blob: 7ea98c7a6a518199ec539265d41d9aedc5657593 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
David Neto85082642018-03-24 06:55:20 -070046#include "spirv/1.0/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040047
David Neto85082642018-03-24 06:55:20 -070048#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050049#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040050#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/spirv_c_strings.hpp"
52#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040053
David Neto4feb7a42017-10-06 17:29:42 -040054#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050055#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050056#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070057#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040058#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040059#include "DescriptorCounter.h"
alan-baker56f7aff2019-05-22 08:06:42 -040060#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040061#include "Passes.h"
alan-bakerce179f12019-12-06 19:02:22 -050062#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040063
David Neto22f144c2017-06-12 14:26:21 -040064#if defined(_MSC_VER)
65#pragma warning(pop)
66#endif
67
68using namespace llvm;
69using namespace clspv;
David Neto156783e2017-07-05 15:39:41 -040070using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040071
72namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040073
David Neto862b7d82018-06-14 18:48:37 -040074cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
75 cl::desc("Show resource variable creation"));
76
77// These hacks exist to help transition code generation algorithms
78// without making huge noise in detailed test output.
79const bool Hack_generate_runtime_array_stride_early = true;
80
David Neto3fbb4072017-10-16 11:28:14 -040081// The value of 1/pi. This value is from MSDN
82// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
83const double kOneOverPi = 0.318309886183790671538;
84const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
85
alan-bakerb6b09dc2018-11-08 16:59:28 -050086const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040087
David Neto22f144c2017-06-12 14:26:21 -040088enum SPIRVOperandType {
89 NUMBERID,
90 LITERAL_INTEGER,
91 LITERAL_STRING,
92 LITERAL_FLOAT
93};
94
95struct SPIRVOperand {
96 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
97 : Type(Ty), LiteralNum(1, Num) {}
98 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
99 : Type(Ty), LiteralStr(Str) {}
100 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
101 : Type(Ty), LiteralStr(Str) {}
102 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
103 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
104
James Price11010dc2019-12-19 13:53:09 -0500105 SPIRVOperandType getType() const { return Type; };
106 uint32_t getNumID() const { return LiteralNum[0]; };
107 std::string getLiteralStr() const { return LiteralStr; };
108 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400109
David Neto87846742018-04-11 17:36:22 -0400110 uint32_t GetNumWords() const {
111 switch (Type) {
112 case NUMBERID:
113 return 1;
114 case LITERAL_INTEGER:
115 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400116 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400117 case LITERAL_STRING:
118 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400119 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400120 }
121 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
122 }
123
David Neto22f144c2017-06-12 14:26:21 -0400124private:
125 SPIRVOperandType Type;
126 std::string LiteralStr;
127 SmallVector<uint32_t, 4> LiteralNum;
128};
129
David Netoc6f3ab22018-04-06 18:02:31 -0400130class SPIRVOperandList {
131public:
David Netoef5ba2b2019-12-20 08:35:54 -0500132 typedef std::unique_ptr<SPIRVOperand> element_type;
133 typedef SmallVector<element_type, 8> container_type;
134 typedef container_type::iterator iterator;
David Netoc6f3ab22018-04-06 18:02:31 -0400135 SPIRVOperandList() {}
alan-bakerb6b09dc2018-11-08 16:59:28 -0500136 SPIRVOperandList(const SPIRVOperandList &other) = delete;
137 SPIRVOperandList(SPIRVOperandList &&other) {
David Netoc6f3ab22018-04-06 18:02:31 -0400138 contents_ = std::move(other.contents_);
139 other.contents_.clear();
140 }
David Netoef5ba2b2019-12-20 08:35:54 -0500141 iterator begin() { return contents_.begin(); }
142 iterator end() { return contents_.end(); }
143 operator ArrayRef<element_type>() { return contents_; }
144 void push_back(element_type op) { contents_.push_back(std::move(op)); }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500145 void clear() { contents_.clear(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400146 size_t size() const { return contents_.size(); }
James Price11010dc2019-12-19 13:53:09 -0500147 const SPIRVOperand *operator[](size_t i) { return contents_[i].get(); }
David Netoc6f3ab22018-04-06 18:02:31 -0400148
David Netoef5ba2b2019-12-20 08:35:54 -0500149 const container_type &getOperands() const { return contents_; }
David Neto87846742018-04-11 17:36:22 -0400150
David Netoc6f3ab22018-04-06 18:02:31 -0400151private:
David Netoef5ba2b2019-12-20 08:35:54 -0500152 container_type contents_;
David Netoc6f3ab22018-04-06 18:02:31 -0400153};
154
James Price11010dc2019-12-19 13:53:09 -0500155SPIRVOperandList &operator<<(SPIRVOperandList &list,
David Netoef5ba2b2019-12-20 08:35:54 -0500156 std::unique_ptr<SPIRVOperand> elem) {
157 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400158 return list;
159}
160
David Netoef5ba2b2019-12-20 08:35:54 -0500161std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
162 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400163}
David Netoef5ba2b2019-12-20 08:35:54 -0500164std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
165 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400166}
David Netoef5ba2b2019-12-20 08:35:54 -0500167std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
168 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400169}
David Netoef5ba2b2019-12-20 08:35:54 -0500170std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
171 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500172}
David Netoef5ba2b2019-12-20 08:35:54 -0500173std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
174 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400175}
David Netoc6f3ab22018-04-06 18:02:31 -0400176
David Neto22f144c2017-06-12 14:26:21 -0400177struct SPIRVInstruction {
David Netoef5ba2b2019-12-20 08:35:54 -0500178 // Creates an instruction with an opcode and no result ID, and with the given
179 // operands. This computes its own word count. Takes ownership of the
180 // operands and clears |Ops|.
181 SPIRVInstruction(spv::Op Opc, SPIRVOperandList &Ops)
182 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
James Price11010dc2019-12-19 13:53:09 -0500183 for (auto &operand : Ops) {
David Netoee2660d2018-06-28 16:31:29 -0400184 WordCount += uint16_t(operand->GetNumWords());
David Neto87846742018-04-11 17:36:22 -0400185 }
David Netoef5ba2b2019-12-20 08:35:54 -0500186 Operands.reserve(Ops.size());
187 for (auto &ptr : Ops) {
188 Operands.emplace_back(std::move(ptr));
189 ptr.reset(nullptr);
David Neto87846742018-04-11 17:36:22 -0400190 }
David Netoef5ba2b2019-12-20 08:35:54 -0500191 Ops.clear();
192 }
193 // Creates an instruction with an opcode and a no-zero result ID, and
194 // with the given operands. This computes its own word count. Takes ownership
195 // of the operands and clears |Ops|.
196 SPIRVInstruction(spv::Op Opc, uint32_t ResID, SPIRVOperandList &Ops)
197 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
James Price11010dc2019-12-19 13:53:09 -0500198 for (auto &operand : Ops) {
David Neto87846742018-04-11 17:36:22 -0400199 WordCount += operand->GetNumWords();
200 }
David Netoef5ba2b2019-12-20 08:35:54 -0500201 Operands.reserve(Ops.size());
202 for (auto &ptr : Ops) {
203 Operands.emplace_back(std::move(ptr));
204 ptr.reset(nullptr);
205 }
206 if (ResID == 0) {
207 llvm_unreachable("Result ID of 0 was provided");
208 }
209 Ops.clear();
David Neto87846742018-04-11 17:36:22 -0400210 }
David Neto22f144c2017-06-12 14:26:21 -0400211
David Netoef5ba2b2019-12-20 08:35:54 -0500212 // Creates an instruction with an opcode and no result ID, and with the single
213 // operand. This computes its own word count.
214 SPIRVInstruction(spv::Op Opc, SPIRVOperandList::element_type operand)
215 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {
216 WordCount += operand->GetNumWords();
217 Operands.emplace_back(std::move(operand));
218 operand.reset(nullptr);
219 }
220 // Creates an instruction with an opcode and a non-zero result ID, and
221 // with the single operand. This computes its own word count.
222 SPIRVInstruction(spv::Op Opc, uint32_t ResID,
223 SPIRVOperandList::element_type operand)
224 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
225 WordCount += operand->GetNumWords();
226 if (ResID == 0) {
227 llvm_unreachable("Result ID of 0 was provided");
228 }
229 Operands.emplace_back(std::move(operand));
230 operand.reset(nullptr);
231 }
232 // Creates an instruction with an opcode and a no-zero result ID, and no
233 // operands.
234 SPIRVInstruction(spv::Op Opc, uint32_t ResID)
235 : WordCount(2), Opcode(static_cast<uint16_t>(Opc)), ResultID(ResID) {
236 if (ResID == 0) {
237 llvm_unreachable("Result ID of 0 was provided");
238 }
239 }
240 // Creates an instruction with an opcode, no result ID, no type ID, and no
241 // operands.
242 SPIRVInstruction(spv::Op Opc)
243 : WordCount(1), Opcode(static_cast<uint16_t>(Opc)), ResultID(0) {}
244
David Netoee2660d2018-06-28 16:31:29 -0400245 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400246 uint16_t getOpcode() const { return Opcode; }
247 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500248 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500249 return Operands;
250 }
David Neto22f144c2017-06-12 14:26:21 -0400251
252private:
David Netoee2660d2018-06-28 16:31:29 -0400253 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400254 uint16_t Opcode;
255 uint32_t ResultID;
David Netoef5ba2b2019-12-20 08:35:54 -0500256 SmallVector<std::unique_ptr<SPIRVOperand>, 4> Operands;
David Neto22f144c2017-06-12 14:26:21 -0400257};
258
259struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400260 typedef DenseMap<Type *, uint32_t> TypeMapType;
261 typedef UniqueVector<Type *> TypeList;
262 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400263 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400264 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
265 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400266 // A vector of tuples, each of which is:
267 // - the LLVM instruction that we will later generate SPIR-V code for
268 // - where the SPIR-V instruction should be inserted
269 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400270 typedef std::vector<
271 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
272 DeferredInstVecType;
273 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
274 GlobalConstFuncMapType;
275
David Neto44795152017-07-13 15:45:28 -0400276 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500277 raw_pwrite_stream &out,
278 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400279 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400280 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400281 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400282 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400283 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400284 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500285 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
286 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
Kévin Petit89a525c2019-06-15 08:13:07 +0100287 WorkgroupSizeVarID(0), max_local_spec_id_(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400288
James Price11010dc2019-12-19 13:53:09 -0500289 virtual ~SPIRVProducerPass() {
290 for (auto *Inst : SPIRVInsts) {
291 delete Inst;
292 }
293 }
294
David Neto22f144c2017-06-12 14:26:21 -0400295 void getAnalysisUsage(AnalysisUsage &AU) const override {
296 AU.addRequired<DominatorTreeWrapperPass>();
297 AU.addRequired<LoopInfoWrapperPass>();
298 }
299
300 virtual bool runOnModule(Module &module) override;
301
302 // output the SPIR-V header block
303 void outputHeader();
304
305 // patch the SPIR-V header block
306 void patchHeader();
307
308 uint32_t lookupType(Type *Ty) {
309 if (Ty->isPointerTy() &&
310 (Ty->getPointerAddressSpace() != AddressSpace::UniformConstant)) {
311 auto PointeeTy = Ty->getPointerElementType();
312 if (PointeeTy->isStructTy() &&
313 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
314 Ty = PointeeTy;
315 }
316 }
317
David Neto862b7d82018-06-14 18:48:37 -0400318 auto where = TypeMap.find(Ty);
319 if (where == TypeMap.end()) {
320 if (Ty) {
321 errs() << "Unhandled type " << *Ty << "\n";
322 } else {
323 errs() << "Unhandled type (null)\n";
324 }
David Netoe439d702018-03-23 13:14:08 -0700325 llvm_unreachable("\nUnhandled type!");
David Neto22f144c2017-06-12 14:26:21 -0400326 }
327
David Neto862b7d82018-06-14 18:48:37 -0400328 return where->second;
David Neto22f144c2017-06-12 14:26:21 -0400329 }
330 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-bakerabd82722019-12-03 17:14:51 -0500331 TypeList &getImageTypeList() { return ImageTypeList; }
David Neto22f144c2017-06-12 14:26:21 -0400332 TypeList &getTypeList() { return Types; };
333 ValueList &getConstantList() { return Constants; };
334 ValueMapType &getValueMap() { return ValueMap; }
335 ValueMapType &getAllocatedValueMap() { return AllocatedValueMap; }
336 SPIRVInstructionList &getSPIRVInstList() { return SPIRVInsts; };
David Neto22f144c2017-06-12 14:26:21 -0400337 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
338 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
339 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
340 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
341 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
alan-baker5b86ed72019-02-15 08:26:50 -0500342 bool hasVariablePointersStorageBuffer() {
343 return HasVariablePointersStorageBuffer;
344 }
345 void setVariablePointersStorageBuffer(bool Val) {
346 HasVariablePointersStorageBuffer = Val;
347 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400348 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400349 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500350 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
351 return samplerMap;
352 }
David Neto22f144c2017-06-12 14:26:21 -0400353 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
354 return GlobalConstFuncTypeMap;
355 }
356 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
357 return GlobalConstArgumentSet;
358 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500359 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400360
David Netoc6f3ab22018-04-06 18:02:31 -0400361 void GenerateLLVMIRInfo(Module &M, const DataLayout &DL);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500362 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
363 // *not* be converted to a storage buffer, replace each such global variable
364 // with one in the storage class expecgted by SPIR-V.
David Neto862b7d82018-06-14 18:48:37 -0400365 void FindGlobalConstVars(Module &M, const DataLayout &DL);
366 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
367 // ModuleOrderedResourceVars.
368 void FindResourceVars(Module &M, const DataLayout &DL);
Alan Baker202c8c72018-08-13 13:47:44 -0400369 void FindWorkgroupVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400370 bool FindExtInst(Module &M);
371 void FindTypePerGlobalVar(GlobalVariable &GV);
372 void FindTypePerFunc(Function &F);
David Neto862b7d82018-06-14 18:48:37 -0400373 void FindTypesForSamplerMap(Module &M);
374 void FindTypesForResourceVars(Module &M);
alan-bakerb6b09dc2018-11-08 16:59:28 -0500375 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
376 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400377 void FindType(Type *Ty);
378 void FindConstantPerGlobalVar(GlobalVariable &GV);
379 void FindConstantPerFunc(Function &F);
380 void FindConstant(Value *V);
381 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400382 // Generates instructions for SPIR-V types corresponding to the LLVM types
383 // saved in the |Types| member. A type follows its subtypes. IDs are
384 // allocated sequentially starting with the current value of nextID, and
385 // with a type following its subtypes. Also updates nextID to just beyond
386 // the last generated ID.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500387 void GenerateSPIRVTypes(LLVMContext &context, Module &module);
David Neto22f144c2017-06-12 14:26:21 -0400388 void GenerateSPIRVConstants();
David Neto5c22a252018-03-15 16:07:41 -0400389 void GenerateModuleInfo(Module &M);
Kévin Petitbbbda972020-03-03 19:16:31 +0000390 void GeneratePushConstantDescriptormapEntries(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400391 void GenerateGlobalVar(GlobalVariable &GV);
David Netoc6f3ab22018-04-06 18:02:31 -0400392 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400393 // Generate descriptor map entries for resource variables associated with
394 // arguments to F.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500395 void GenerateDescriptorMapInfo(const DataLayout &DL, Function &F);
David Neto22f144c2017-06-12 14:26:21 -0400396 void GenerateSamplers(Module &M);
David Neto862b7d82018-06-14 18:48:37 -0400397 // Generate OpVariables for %clspv.resource.var.* calls.
398 void GenerateResourceVars(Module &M);
David Neto22f144c2017-06-12 14:26:21 -0400399 void GenerateFuncPrologue(Function &F);
400 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400401 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400402 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
403 spv::Op GetSPIRVCastOpcode(Instruction &I);
404 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
405 void GenerateInstruction(Instruction &I);
406 void GenerateFuncEpilogue();
407 void HandleDeferredInstruction();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500408 void HandleDeferredDecorations(const DataLayout &DL);
David Neto22f144c2017-06-12 14:26:21 -0400409 bool is4xi8vec(Type *Ty) const;
410 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400411 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400412 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400413 // Returns the GLSL extended instruction enum that the given function
414 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400415 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400416 // Returns the GLSL extended instruction enum indirectly used by the given
417 // function. That is, to implement the given function, we use an extended
418 // instruction plus one more instruction. If none, then returns the 0 value,
419 // i.e. GLSLstd4580Bad.
420 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
421 // Returns the single GLSL extended instruction used directly or
422 // indirectly by the given function call.
423 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400424 void WriteOneWord(uint32_t Word);
425 void WriteResultID(SPIRVInstruction *Inst);
426 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500427 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400428 void WriteSPIRVBinary();
429
Alan Baker9bf93fb2018-08-28 16:59:26 -0400430 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500431 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400432
Alan Bakerfcda9482018-10-02 17:09:59 -0400433 // Populate UBO remapped type maps.
434 void PopulateUBOTypeMaps(Module &module);
435
alan-baker06cad652019-12-03 17:56:47 -0500436 // Populate the merge and continue block maps.
437 void PopulateStructuredCFGMaps(Module &module);
438
Alan Bakerfcda9482018-10-02 17:09:59 -0400439 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
440 // uses the internal map, otherwise it falls back on the data layout.
441 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
442 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
443 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000444 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
445 unsigned member,
446 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400447
alan-baker5b86ed72019-02-15 08:26:50 -0500448 // Returns the base pointer of |v|.
449 Value *GetBasePointer(Value *v);
450
451 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
452 // |address_space|.
453 void setVariablePointersCapabilities(unsigned address_space);
454
455 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
456 // variable.
457 bool sameResource(Value *lhs, Value *rhs) const;
458
459 // Returns true if |inst| is phi or select that selects from the same
460 // structure (or null).
461 bool selectFromSameObject(Instruction *inst);
462
alan-bakere9308012019-03-15 10:25:13 -0400463 // Returns true if |Arg| is called with a coherent resource.
464 bool CalledWithCoherentResource(Argument &Arg);
465
David Neto22f144c2017-06-12 14:26:21 -0400466private:
467 static char ID;
David Neto44795152017-07-13 15:45:28 -0400468 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400469 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400470
471 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
472 // convert to other formats on demand?
473
474 // When emitting a C initialization list, the WriteSPIRVBinary method
475 // will actually write its words to this vector via binaryTempOut.
476 SmallVector<char, 100> binaryTempUnderlyingVector;
477 raw_svector_ostream binaryTempOut;
478
479 // Binary output writes to this stream, which might be |out| or
480 // |binaryTempOut|. It's the latter when we really want to write a C
481 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400482 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500483 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400484 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400485 uint64_t patchBoundOffset;
486 uint32_t nextID;
487
alan-bakerf67468c2019-11-25 15:51:49 -0500488 // ID for OpTypeInt 32 1.
489 uint32_t int32ID = 0;
490 // ID for OpTypeVector %int 4.
491 uint32_t v4int32ID = 0;
492
David Neto19a1bad2017-08-25 15:01:41 -0400493 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400494 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400495 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400496 TypeMapType ImageTypeMap;
alan-bakerabd82722019-12-03 17:14:51 -0500497 // A unique-vector of LLVM image types. This list is used to provide
498 // deterministic traversal of image types.
499 TypeList ImageTypeList;
David Neto19a1bad2017-08-25 15:01:41 -0400500 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400501 TypeList Types;
502 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400503 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400504 ValueMapType ValueMap;
505 ValueMapType AllocatedValueMap;
506 SPIRVInstructionList SPIRVInsts;
David Neto862b7d82018-06-14 18:48:37 -0400507
David Neto22f144c2017-06-12 14:26:21 -0400508 EntryPointVecType EntryPointVec;
509 DeferredInstVecType DeferredInstVec;
510 ValueList EntryPointInterfacesVec;
511 uint32_t OpExtInstImportID;
512 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500513 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400514 bool HasVariablePointers;
515 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500516 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700517
518 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700519 // will map F's type to (G, index of the parameter), where in a first phase
520 // G is F's type. During FindTypePerFunc, G will be changed to F's type
521 // but replacing the pointer-to-constant parameter with
522 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700523 // TODO(dneto): This doesn't seem general enough? A function might have
524 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400525 GlobalConstFuncMapType GlobalConstFuncTypeMap;
526 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400527 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700528 // or array types, and which point into transparent memory (StorageBuffer
529 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400530 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700531 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400532
533 // This is truly ugly, but works around what look like driver bugs.
534 // For get_local_size, an earlier part of the flow has created a module-scope
535 // variable in Private address space to hold the value for the workgroup
536 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
537 // When this is present, save the IDs of the initializer value and variable
538 // in these two variables. We only ever do a vector load from it, and
539 // when we see one of those, substitute just the value of the intializer.
540 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700541 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400542 uint32_t WorkgroupSizeValueID;
543 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400544
David Neto862b7d82018-06-14 18:48:37 -0400545 // Bookkeeping for mapping kernel arguments to resource variables.
546 struct ResourceVarInfo {
547 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400548 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400549 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400550 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400551 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
552 const int index; // Index into ResourceVarInfoList
553 const unsigned descriptor_set;
554 const unsigned binding;
555 Function *const var_fn; // The @clspv.resource.var.* function.
556 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400557 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400558 const unsigned addr_space; // The LLVM address space
559 // The SPIR-V ID of the OpVariable. Not populated at construction time.
560 uint32_t var_id = 0;
561 };
562 // A list of resource var info. Each one correponds to a module-scope
563 // resource variable we will have to create. Resource var indices are
564 // indices into this vector.
565 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
566 // This is a vector of pointers of all the resource vars, but ordered by
567 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500568 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400569 // Map a function to the ordered list of resource variables it uses, one for
570 // each argument. If an argument does not use a resource variable, it
571 // will have a null pointer entry.
572 using FunctionToResourceVarsMapType =
573 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
574 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
575
576 // What LLVM types map to SPIR-V types needing layout? These are the
577 // arrays and structures supporting storage buffers and uniform buffers.
578 TypeList TypesNeedingLayout;
579 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
580 UniqueVector<StructType *> StructTypesNeedingBlock;
581 // For a call that represents a load from an opaque type (samplers, images),
582 // map it to the variable id it should load from.
583 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700584
Alan Baker202c8c72018-08-13 13:47:44 -0400585 // One larger than the maximum used SpecId for pointer-to-local arguments.
586 int max_local_spec_id_;
David Netoc6f3ab22018-04-06 18:02:31 -0400587 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500588 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400589 LocalArgList LocalArgs;
590 // Information about a pointer-to-local argument.
591 struct LocalArgInfo {
592 // The SPIR-V ID of the array variable.
593 uint32_t variable_id;
594 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500595 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400596 // The ID of the array type.
597 uint32_t array_size_id;
598 // The ID of the array type.
599 uint32_t array_type_id;
600 // The ID of the pointer to the array type.
601 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400602 // The specialization constant ID of the array size.
603 int spec_id;
604 };
Alan Baker202c8c72018-08-13 13:47:44 -0400605 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500606 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400607 // A mapping from SpecId to its LocalArgInfo.
608 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400609 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500610 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400611 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500612 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
613 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500614
615 // Maps basic block to its merge block.
616 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
617 // Maps basic block to its continue block.
618 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400619};
620
621char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400622
alan-bakerb6b09dc2018-11-08 16:59:28 -0500623} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400624
625namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500626ModulePass *createSPIRVProducerPass(
627 raw_pwrite_stream &out,
628 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400629 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500630 bool outputCInitList) {
631 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400632 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400633}
David Netoc2c368d2017-06-30 16:50:17 -0400634} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400635
636bool SPIRVProducerPass::runOnModule(Module &module) {
David Neto0676e6f2017-07-11 18:47:44 -0400637 binaryOut = outputCInitList ? &binaryTempOut : &out;
638
Alan Bakerfcda9482018-10-02 17:09:59 -0400639 PopulateUBOTypeMaps(module);
alan-baker06cad652019-12-03 17:56:47 -0500640 PopulateStructuredCFGMaps(module);
Alan Bakerfcda9482018-10-02 17:09:59 -0400641
David Neto22f144c2017-06-12 14:26:21 -0400642 // SPIR-V always begins with its header information
643 outputHeader();
644
David Netoc6f3ab22018-04-06 18:02:31 -0400645 const DataLayout &DL = module.getDataLayout();
646
David Neto22f144c2017-06-12 14:26:21 -0400647 // Gather information from the LLVM IR that we require.
David Netoc6f3ab22018-04-06 18:02:31 -0400648 GenerateLLVMIRInfo(module, DL);
David Neto22f144c2017-06-12 14:26:21 -0400649
David Neto22f144c2017-06-12 14:26:21 -0400650 // Collect information on global variables too.
651 for (GlobalVariable &GV : module.globals()) {
652 // If the GV is one of our special __spirv_* variables, remove the
653 // initializer as it was only placed there to force LLVM to not throw the
654 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000655 if (GV.getName().startswith("__spirv_") ||
656 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400657 GV.setInitializer(nullptr);
658 }
659
660 // Collect types' information from global variable.
661 FindTypePerGlobalVar(GV);
662
663 // Collect constant information from global variable.
664 FindConstantPerGlobalVar(GV);
665
666 // If the variable is an input, entry points need to know about it.
667 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400668 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400669 }
670 }
671
672 // If there are extended instructions, generate OpExtInstImport.
673 if (FindExtInst(module)) {
674 GenerateExtInstImport();
675 }
676
677 // Generate SPIRV instructions for types.
Alan Bakerfcda9482018-10-02 17:09:59 -0400678 GenerateSPIRVTypes(module.getContext(), module);
David Neto22f144c2017-06-12 14:26:21 -0400679
680 // Generate SPIRV constants.
681 GenerateSPIRVConstants();
682
alan-baker09cb9802019-12-10 13:16:27 -0500683 // Generate literal samplers if necessary.
684 GenerateSamplers(module);
David Neto22f144c2017-06-12 14:26:21 -0400685
Kévin Petitbbbda972020-03-03 19:16:31 +0000686 // Generate descriptor map entries for all push constants
687 GeneratePushConstantDescriptormapEntries(module);
688
David Neto22f144c2017-06-12 14:26:21 -0400689 // Generate SPIRV variables.
690 for (GlobalVariable &GV : module.globals()) {
691 GenerateGlobalVar(GV);
692 }
David Neto862b7d82018-06-14 18:48:37 -0400693 GenerateResourceVars(module);
David Netoc6f3ab22018-04-06 18:02:31 -0400694 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400695
696 // Generate SPIRV instructions for each function.
697 for (Function &F : module) {
698 if (F.isDeclaration()) {
699 continue;
700 }
701
David Neto862b7d82018-06-14 18:48:37 -0400702 GenerateDescriptorMapInfo(DL, F);
703
David Neto22f144c2017-06-12 14:26:21 -0400704 // Generate Function Prologue.
705 GenerateFuncPrologue(F);
706
707 // Generate SPIRV instructions for function body.
708 GenerateFuncBody(F);
709
710 // Generate Function Epilogue.
711 GenerateFuncEpilogue();
712 }
713
714 HandleDeferredInstruction();
David Neto1a1a0582017-07-07 12:01:44 -0400715 HandleDeferredDecorations(DL);
David Neto22f144c2017-06-12 14:26:21 -0400716
717 // Generate SPIRV module information.
David Neto5c22a252018-03-15 16:07:41 -0400718 GenerateModuleInfo(module);
David Neto22f144c2017-06-12 14:26:21 -0400719
alan-baker00e7a582019-06-07 12:54:21 -0400720 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400721
722 // We need to patch the SPIR-V header to set bound correctly.
723 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400724
725 if (outputCInitList) {
726 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400727 std::ostringstream os;
728
David Neto57fb0b92017-08-04 15:35:09 -0400729 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400730 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400731 os << ",\n";
732 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400733 first = false;
734 };
735
736 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400737 const std::string str(binaryTempOut.str());
738 for (unsigned i = 0; i < str.size(); i += 4) {
739 const uint32_t a = static_cast<unsigned char>(str[i]);
740 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
741 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
742 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
743 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400744 }
745 os << "}\n";
746 out << os.str();
747 }
748
David Neto22f144c2017-06-12 14:26:21 -0400749 return false;
750}
751
752void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400753 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
754 sizeof(spv::MagicNumber));
755 binaryOut->write(reinterpret_cast<const char *>(&spv::Version),
756 sizeof(spv::Version));
David Neto22f144c2017-06-12 14:26:21 -0400757
alan-baker0c18ab02019-06-12 10:23:21 -0400758 // use Google's vendor ID
759 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400760 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400761
alan-baker00e7a582019-06-07 12:54:21 -0400762 // we record where we need to come back to and patch in the bound value
763 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400764
alan-baker00e7a582019-06-07 12:54:21 -0400765 // output a bad bound for now
766 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400767
alan-baker00e7a582019-06-07 12:54:21 -0400768 // output the schema (reserved for use and must be 0)
769 const uint32_t schema = 0;
770 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400771}
772
773void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400774 // for a binary we just write the value of nextID over bound
775 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
776 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400777}
778
David Netoc6f3ab22018-04-06 18:02:31 -0400779void SPIRVProducerPass::GenerateLLVMIRInfo(Module &M, const DataLayout &DL) {
David Neto22f144c2017-06-12 14:26:21 -0400780 // This function generates LLVM IR for function such as global variable for
781 // argument, constant and pointer type for argument access. These information
782 // is artificial one because we need Vulkan SPIR-V output. This function is
783 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400784 LLVMContext &Context = M.getContext();
785
David Neto862b7d82018-06-14 18:48:37 -0400786 FindGlobalConstVars(M, DL);
David Neto5c22a252018-03-15 16:07:41 -0400787
David Neto862b7d82018-06-14 18:48:37 -0400788 FindResourceVars(M, DL);
David Neto22f144c2017-06-12 14:26:21 -0400789
790 bool HasWorkGroupBuiltin = false;
791 for (GlobalVariable &GV : M.globals()) {
792 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
793 if (spv::BuiltInWorkgroupSize == BuiltinType) {
794 HasWorkGroupBuiltin = true;
795 }
796 }
797
David Neto862b7d82018-06-14 18:48:37 -0400798 FindTypesForSamplerMap(M);
799 FindTypesForResourceVars(M);
Alan Baker202c8c72018-08-13 13:47:44 -0400800 FindWorkgroupVars(M);
David Neto22f144c2017-06-12 14:26:21 -0400801
802 for (Function &F : M) {
Kévin Petitabef4522019-03-27 13:08:01 +0000803 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400804 continue;
805 }
806
807 for (BasicBlock &BB : F) {
808 for (Instruction &I : BB) {
809 if (I.getOpcode() == Instruction::ZExt ||
810 I.getOpcode() == Instruction::SExt ||
811 I.getOpcode() == Instruction::UIToFP) {
812 // If there is zext with i1 type, it will be changed to OpSelect. The
813 // OpSelect needs constant 0 and 1 so the constants are added here.
814
815 auto OpTy = I.getOperand(0)->getType();
816
Kévin Petit24272b62018-10-18 19:16:12 +0000817 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400818 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400819 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000820 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400821 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400822 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000823 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400824 } else {
825 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
826 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
827 }
828 }
829 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400830 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400831
832 // Handle image type specially.
alan-baker75090e42020-02-20 11:21:04 -0500833 if (clspv::IsImageBuiltin(callee_name)) {
David Neto22f144c2017-06-12 14:26:21 -0400834 TypeMapType &OpImageTypeMap = getImageTypeMap();
835 Type *ImageTy =
836 Call->getArgOperand(0)->getType()->getPointerElementType();
837 OpImageTypeMap[ImageTy] = 0;
alan-bakerabd82722019-12-03 17:14:51 -0500838 getImageTypeList().insert(ImageTy);
alan-baker75090e42020-02-20 11:21:04 -0500839 }
David Neto22f144c2017-06-12 14:26:21 -0400840
alan-baker75090e42020-02-20 11:21:04 -0500841 if (clspv::IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500842 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400843 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
844 }
David Neto5c22a252018-03-15 16:07:41 -0400845
alan-baker75090e42020-02-20 11:21:04 -0500846 if (clspv::IsUnsampledImageRead(callee_name)) {
847 // All unsampled reads need an integer 0 for the Lod operand.
848 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
849 }
850
alan-bakerce179f12019-12-06 19:02:22 -0500851 if (clspv::IsImageQuery(callee_name)) {
852 Type *ImageTy = Call->getOperand(0)->getType();
853 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500854 uint32_t components =
855 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500856 if (components > 1) {
857 // OpImageQuerySize* return |components| components.
858 FindType(VectorType::get(Type::getInt32Ty(Context), components));
859 if (dim == 3 && IsGetImageDim(callee_name)) {
860 // get_image_dim for 3D images returns an int4.
861 FindType(
862 VectorType::get(Type::getInt32Ty(Context), components + 1));
863 }
864 }
865
866 if (clspv::IsSampledImageType(ImageTy)) {
867 // All sampled image queries need a integer 0 for the Lod
868 // operand.
869 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
870 }
David Neto5c22a252018-03-15 16:07:41 -0400871 }
David Neto22f144c2017-06-12 14:26:21 -0400872 }
873 }
874 }
875
Kévin Petitabef4522019-03-27 13:08:01 +0000876 // More things to do on kernel functions
877 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
878 if (const MDNode *MD =
879 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
880 // We generate constants if the WorkgroupSize builtin is being used.
881 if (HasWorkGroupBuiltin) {
882 // Collect constant information for work group size.
883 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
884 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
885 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400886 }
887 }
888 }
889
alan-bakerf67468c2019-11-25 15:51:49 -0500890 // TODO(alan-baker): make this better.
alan-bakerf906d2b2019-12-10 11:26:23 -0500891 if (M.getTypeByName("opencl.image1d_ro_t.float") ||
892 M.getTypeByName("opencl.image1d_ro_t.float.sampled") ||
893 M.getTypeByName("opencl.image1d_wo_t.float") ||
894 M.getTypeByName("opencl.image2d_ro_t.float") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500895 M.getTypeByName("opencl.image2d_ro_t.float.sampled") ||
896 M.getTypeByName("opencl.image2d_wo_t.float") ||
897 M.getTypeByName("opencl.image3d_ro_t.float") ||
898 M.getTypeByName("opencl.image3d_ro_t.float.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500899 M.getTypeByName("opencl.image3d_wo_t.float") ||
900 M.getTypeByName("opencl.image1d_array_ro_t.float") ||
901 M.getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
902 M.getTypeByName("opencl.image1d_array_wo_t.float") ||
903 M.getTypeByName("opencl.image2d_array_ro_t.float") ||
904 M.getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
905 M.getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500906 FindType(Type::getFloatTy(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500907 } else if (M.getTypeByName("opencl.image1d_ro_t.uint") ||
908 M.getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
909 M.getTypeByName("opencl.image1d_wo_t.uint") ||
910 M.getTypeByName("opencl.image2d_ro_t.uint") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500911 M.getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
912 M.getTypeByName("opencl.image2d_wo_t.uint") ||
913 M.getTypeByName("opencl.image3d_ro_t.uint") ||
914 M.getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500915 M.getTypeByName("opencl.image3d_wo_t.uint") ||
916 M.getTypeByName("opencl.image1d_array_ro_t.uint") ||
917 M.getTypeByName("opencl.image1d_array_ro_t.uint.sampled") ||
918 M.getTypeByName("opencl.image1d_array_wo_t.uint") ||
919 M.getTypeByName("opencl.image2d_array_ro_t.uint") ||
920 M.getTypeByName("opencl.image2d_array_ro_t.uint.sampled") ||
921 M.getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500922 FindType(Type::getInt32Ty(Context));
alan-bakerf906d2b2019-12-10 11:26:23 -0500923 } else if (M.getTypeByName("opencl.image1d_ro_t.int") ||
924 M.getTypeByName("opencl.image1d_ro_t.int.sampled") ||
925 M.getTypeByName("opencl.image1d_wo_t.int") ||
926 M.getTypeByName("opencl.image2d_ro_t.int") ||
alan-bakerf67468c2019-11-25 15:51:49 -0500927 M.getTypeByName("opencl.image2d_ro_t.int.sampled") ||
928 M.getTypeByName("opencl.image2d_wo_t.int") ||
929 M.getTypeByName("opencl.image3d_ro_t.int") ||
930 M.getTypeByName("opencl.image3d_ro_t.int.sampled") ||
alan-baker7150a1d2020-02-25 08:31:06 -0500931 M.getTypeByName("opencl.image3d_wo_t.int") ||
932 M.getTypeByName("opencl.image1d_array_ro_t.int") ||
933 M.getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
934 M.getTypeByName("opencl.image1d_array_wo_t.int") ||
935 M.getTypeByName("opencl.image2d_array_ro_t.int") ||
936 M.getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
937 M.getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500938 // Nothing for now...
939 } else {
940 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400941 FindType(Type::getFloatTy(Context));
942 }
943
944 // Collect types' information from function.
945 FindTypePerFunc(F);
946
947 // Collect constant information from function.
948 FindConstantPerFunc(F);
949 }
950}
951
David Neto862b7d82018-06-14 18:48:37 -0400952void SPIRVProducerPass::FindGlobalConstVars(Module &M, const DataLayout &DL) {
alan-baker56f7aff2019-05-22 08:06:42 -0400953 clspv::NormalizeGlobalVariables(M);
954
David Neto862b7d82018-06-14 18:48:37 -0400955 SmallVector<GlobalVariable *, 8> GVList;
956 SmallVector<GlobalVariable *, 8> DeadGVList;
957 for (GlobalVariable &GV : M.globals()) {
958 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
959 if (GV.use_empty()) {
960 DeadGVList.push_back(&GV);
961 } else {
962 GVList.push_back(&GV);
963 }
964 }
965 }
966
967 // Remove dead global __constant variables.
968 for (auto GV : DeadGVList) {
969 GV->eraseFromParent();
970 }
971 DeadGVList.clear();
972
973 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
974 // For now, we only support a single storage buffer.
975 if (GVList.size() > 0) {
976 assert(GVList.size() == 1);
977 const auto *GV = GVList[0];
978 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400979 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400980 const size_t kConstantMaxSize = 65536;
981 if (constants_byte_size > kConstantMaxSize) {
982 outs() << "Max __constant capacity of " << kConstantMaxSize
983 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
984 llvm_unreachable("Max __constant capacity exceeded");
985 }
986 }
987 } else {
988 // Change global constant variable's address space to ModuleScopePrivate.
989 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
990 for (auto GV : GVList) {
991 // Create new gv with ModuleScopePrivate address space.
992 Type *NewGVTy = GV->getType()->getPointerElementType();
993 GlobalVariable *NewGV = new GlobalVariable(
994 M, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
995 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
996 NewGV->takeName(GV);
997
998 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
999 SmallVector<User *, 8> CandidateUsers;
1000
1001 auto record_called_function_type_as_user =
1002 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1003 // Find argument index.
1004 unsigned index = 0;
1005 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1006 if (gv == call->getOperand(i)) {
1007 // TODO(dneto): Should we break here?
1008 index = i;
1009 }
1010 }
1011
1012 // Record function type with global constant.
1013 GlobalConstFuncTyMap[call->getFunctionType()] =
1014 std::make_pair(call->getFunctionType(), index);
1015 };
1016
1017 for (User *GVU : GVUsers) {
1018 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1019 record_called_function_type_as_user(GV, Call);
1020 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1021 // Check GEP users.
1022 for (User *GEPU : GEP->users()) {
1023 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1024 record_called_function_type_as_user(GEP, GEPCall);
1025 }
1026 }
1027 }
1028
1029 CandidateUsers.push_back(GVU);
1030 }
1031
1032 for (User *U : CandidateUsers) {
1033 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001034 if (!isa<Constant>(U)) {
1035 // #254: Can't change operands of a constant, but this shouldn't be
1036 // something that sticks around in the module.
1037 U->replaceUsesOfWith(GV, NewGV);
1038 }
David Neto862b7d82018-06-14 18:48:37 -04001039 }
1040
1041 // Delete original gv.
1042 GV->eraseFromParent();
1043 }
1044 }
1045}
1046
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001047void SPIRVProducerPass::FindResourceVars(Module &M, const DataLayout &) {
David Neto862b7d82018-06-14 18:48:37 -04001048 ResourceVarInfoList.clear();
1049 FunctionToResourceVarsMap.clear();
1050 ModuleOrderedResourceVars.reset();
1051 // Normally, there is one resource variable per clspv.resource.var.*
1052 // function, since that is unique'd by arg type and index. By design,
1053 // we can share these resource variables across kernels because all
1054 // kernels use the same descriptor set.
1055 //
1056 // But if the user requested distinct descriptor sets per kernel, then
1057 // the descriptor allocator has made different (set,binding) pairs for
1058 // the same (type,arg_index) pair. Since we can decorate a resource
1059 // variable with only exactly one DescriptorSet and Binding, we are
1060 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001061 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001062 // (set,binding) values.
1063 const bool always_distinct_sets =
1064 clspv::Option::DistinctKernelDescriptorSets();
1065 for (Function &F : M) {
1066 // Rely on the fact the resource var functions have a stable ordering
1067 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001068 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001069 // Find all calls to this function with distinct set and binding pairs.
1070 // Save them in ResourceVarInfoList.
1071
1072 // Determine uniqueness of the (set,binding) pairs only withing this
1073 // one resource-var builtin function.
1074 using SetAndBinding = std::pair<unsigned, unsigned>;
1075 // Maps set and binding to the resource var info.
1076 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1077 bool first_use = true;
1078 for (auto &U : F.uses()) {
1079 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1080 const auto set = unsigned(
1081 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1082 const auto binding = unsigned(
1083 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1084 const auto arg_kind = clspv::ArgKind(
1085 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1086 const auto arg_index = unsigned(
1087 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001088 const auto coherent = unsigned(
1089 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001090
1091 // Find or make the resource var info for this combination.
1092 ResourceVarInfo *rv = nullptr;
1093 if (always_distinct_sets) {
1094 // Make a new resource var any time we see a different
1095 // (set,binding) pair.
1096 SetAndBinding key{set, binding};
1097 auto where = set_and_binding_map.find(key);
1098 if (where == set_and_binding_map.end()) {
1099 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001100 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001101 ResourceVarInfoList.emplace_back(rv);
1102 set_and_binding_map[key] = rv;
1103 } else {
1104 rv = where->second;
1105 }
1106 } else {
1107 // The default is to make exactly one resource for each
1108 // clspv.resource.var.* function.
1109 if (first_use) {
1110 first_use = false;
1111 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001112 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001113 ResourceVarInfoList.emplace_back(rv);
1114 } else {
1115 rv = ResourceVarInfoList.back().get();
1116 }
1117 }
1118
1119 // Now populate FunctionToResourceVarsMap.
1120 auto &mapping =
1121 FunctionToResourceVarsMap[call->getParent()->getParent()];
1122 while (mapping.size() <= arg_index) {
1123 mapping.push_back(nullptr);
1124 }
1125 mapping[arg_index] = rv;
1126 }
1127 }
1128 }
1129 }
1130
1131 // Populate ModuleOrderedResourceVars.
1132 for (Function &F : M) {
1133 auto where = FunctionToResourceVarsMap.find(&F);
1134 if (where != FunctionToResourceVarsMap.end()) {
1135 for (auto &rv : where->second) {
1136 if (rv != nullptr) {
1137 ModuleOrderedResourceVars.insert(rv);
1138 }
1139 }
1140 }
1141 }
1142 if (ShowResourceVars) {
1143 for (auto *info : ModuleOrderedResourceVars) {
1144 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1145 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1146 << "\n";
1147 }
1148 }
1149}
1150
David Neto22f144c2017-06-12 14:26:21 -04001151bool SPIRVProducerPass::FindExtInst(Module &M) {
1152 LLVMContext &Context = M.getContext();
1153 bool HasExtInst = false;
1154
1155 for (Function &F : M) {
1156 for (BasicBlock &BB : F) {
1157 for (Instruction &I : BB) {
1158 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1159 Function *Callee = Call->getCalledFunction();
1160 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001161 auto callee_name = Callee->getName();
1162 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1163 const glsl::ExtInst IndirectEInst =
1164 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001165
David Neto3fbb4072017-10-16 11:28:14 -04001166 HasExtInst |=
1167 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1168
1169 if (IndirectEInst) {
1170 // Register extra constants if needed.
1171
1172 // Registers a type and constant for computing the result of the
1173 // given instruction. If the result of the instruction is a vector,
1174 // then make a splat vector constant with the same number of
1175 // elements.
1176 auto register_constant = [this, &I](Constant *constant) {
1177 FindType(constant->getType());
1178 FindConstant(constant);
1179 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1180 // Register the splat vector of the value with the same
1181 // width as the result of the instruction.
1182 auto *vec_constant = ConstantVector::getSplat(
1183 static_cast<unsigned>(vectorTy->getNumElements()),
1184 constant);
1185 FindConstant(vec_constant);
1186 FindType(vec_constant->getType());
1187 }
1188 };
1189 switch (IndirectEInst) {
1190 case glsl::ExtInstFindUMsb:
1191 // clz needs OpExtInst and OpISub with constant 31, or splat
1192 // vector of 31. Add it to the constant list here.
1193 register_constant(
1194 ConstantInt::get(Type::getInt32Ty(Context), 31));
1195 break;
1196 case glsl::ExtInstAcos:
1197 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001198 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001199 case glsl::ExtInstAtan2:
1200 // We need 1/pi for acospi, asinpi, atan2pi.
1201 register_constant(
1202 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1203 break;
1204 default:
1205 assert(false && "internally inconsistent");
1206 }
David Neto22f144c2017-06-12 14:26:21 -04001207 }
1208 }
1209 }
1210 }
1211 }
1212
1213 return HasExtInst;
1214}
1215
1216void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1217 // Investigate global variable's type.
1218 FindType(GV.getType());
1219}
1220
1221void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1222 // Investigate function's type.
1223 FunctionType *FTy = F.getFunctionType();
1224
1225 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1226 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001227 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001228 if (GlobalConstFuncTyMap.count(FTy)) {
1229 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1230 SmallVector<Type *, 4> NewFuncParamTys;
1231 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1232 Type *ParamTy = FTy->getParamType(i);
1233 if (i == GVCstArgIdx) {
1234 Type *EleTy = ParamTy->getPointerElementType();
1235 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1236 }
1237
1238 NewFuncParamTys.push_back(ParamTy);
1239 }
1240
1241 FunctionType *NewFTy =
1242 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1243 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1244 FTy = NewFTy;
1245 }
1246
1247 FindType(FTy);
1248 } else {
1249 // As kernel functions do not have parameters, create new function type and
1250 // add it to type map.
1251 SmallVector<Type *, 4> NewFuncParamTys;
1252 FunctionType *NewFTy =
1253 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1254 FindType(NewFTy);
1255 }
1256
1257 // Investigate instructions' type in function body.
1258 for (BasicBlock &BB : F) {
1259 for (Instruction &I : BB) {
1260 if (isa<ShuffleVectorInst>(I)) {
1261 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1262 // Ignore type for mask of shuffle vector instruction.
1263 if (i == 2) {
1264 continue;
1265 }
1266
1267 Value *Op = I.getOperand(i);
1268 if (!isa<MetadataAsValue>(Op)) {
1269 FindType(Op->getType());
1270 }
1271 }
1272
1273 FindType(I.getType());
1274 continue;
1275 }
1276
David Neto862b7d82018-06-14 18:48:37 -04001277 CallInst *Call = dyn_cast<CallInst>(&I);
1278
1279 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001280 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001281 // This is a fake call representing access to a resource variable.
1282 // We handle that elsewhere.
1283 continue;
1284 }
1285
Alan Baker202c8c72018-08-13 13:47:44 -04001286 if (Call && Call->getCalledFunction()->getName().startswith(
1287 clspv::WorkgroupAccessorFunction())) {
1288 // This is a fake call representing access to a workgroup variable.
1289 // We handle that elsewhere.
1290 continue;
1291 }
1292
alan-bakerf083bed2020-01-29 08:15:42 -05001293 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1294 // OpCompositeExtract which takes literal values for indices. As a result
1295 // don't map the type of indices.
1296 if (I.getOpcode() == Instruction::ExtractValue) {
1297 FindType(I.getOperand(0)->getType());
1298 continue;
1299 }
1300 if (I.getOpcode() == Instruction::InsertValue) {
1301 FindType(I.getOperand(0)->getType());
1302 FindType(I.getOperand(1)->getType());
1303 continue;
1304 }
1305
1306 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1307 // the index is a constant. In such a case don't map the index type.
1308 if (I.getOpcode() == Instruction::ExtractElement) {
1309 FindType(I.getOperand(0)->getType());
1310 Value *op1 = I.getOperand(1);
1311 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1312 FindType(op1->getType());
1313 }
1314 continue;
1315 }
1316 if (I.getOpcode() == Instruction::InsertElement) {
1317 FindType(I.getOperand(0)->getType());
1318 FindType(I.getOperand(1)->getType());
1319 Value *op2 = I.getOperand(2);
1320 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1321 FindType(op2->getType());
1322 }
1323 continue;
1324 }
1325
David Neto22f144c2017-06-12 14:26:21 -04001326 // Work through the operands of the instruction.
1327 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1328 Value *const Op = I.getOperand(i);
1329 // If any of the operands is a constant, find the type!
1330 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1331 FindType(Op->getType());
1332 }
1333 }
1334
1335 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001336 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001337 // Avoid to check call instruction's type.
1338 break;
1339 }
Alan Baker202c8c72018-08-13 13:47:44 -04001340 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1341 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1342 clspv::WorkgroupAccessorFunction())) {
1343 // This is a fake call representing access to a workgroup variable.
1344 // We handle that elsewhere.
1345 continue;
1346 }
1347 }
David Neto22f144c2017-06-12 14:26:21 -04001348 if (!isa<MetadataAsValue>(&Op)) {
1349 FindType(Op->getType());
1350 continue;
1351 }
1352 }
1353
David Neto22f144c2017-06-12 14:26:21 -04001354 // We don't want to track the type of this call as we are going to replace
1355 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001356 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001357 Call->getCalledFunction()->getName())) {
1358 continue;
1359 }
1360
1361 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1362 // If gep's base operand has ModuleScopePrivate address space, make gep
1363 // return ModuleScopePrivate address space.
1364 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1365 // Add pointer type with private address space for global constant to
1366 // type list.
1367 Type *EleTy = I.getType()->getPointerElementType();
1368 Type *NewPTy =
1369 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1370
1371 FindType(NewPTy);
1372 continue;
1373 }
1374 }
1375
1376 FindType(I.getType());
1377 }
1378 }
1379}
1380
David Neto862b7d82018-06-14 18:48:37 -04001381void SPIRVProducerPass::FindTypesForSamplerMap(Module &M) {
1382 // If we are using a sampler map, find the type of the sampler.
Kévin Petitdf71de32019-04-09 14:09:50 +01001383 if (M.getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001384 0 < getSamplerMap().size()) {
1385 auto SamplerStructTy = M.getTypeByName("opencl.sampler_t");
1386 if (!SamplerStructTy) {
1387 SamplerStructTy = StructType::create(M.getContext(), "opencl.sampler_t");
1388 }
1389
1390 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1391
1392 FindType(SamplerTy);
1393 }
1394}
1395
1396void SPIRVProducerPass::FindTypesForResourceVars(Module &M) {
1397 // Record types so they are generated.
1398 TypesNeedingLayout.reset();
1399 StructTypesNeedingBlock.reset();
1400
1401 // To match older clspv codegen, generate the float type first if required
1402 // for images.
1403 for (const auto *info : ModuleOrderedResourceVars) {
1404 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1405 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001406 if (IsIntImageType(info->var_fn->getReturnType())) {
1407 // Nothing for now...
1408 } else if (IsUintImageType(info->var_fn->getReturnType())) {
1409 FindType(Type::getInt32Ty(M.getContext()));
1410 }
1411
1412 // We need "float" either for the sampled type or for the Lod operand.
David Neto862b7d82018-06-14 18:48:37 -04001413 FindType(Type::getFloatTy(M.getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001414 }
1415 }
1416
1417 for (const auto *info : ModuleOrderedResourceVars) {
1418 Type *type = info->var_fn->getReturnType();
1419
1420 switch (info->arg_kind) {
1421 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001422 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001423 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1424 StructTypesNeedingBlock.insert(sty);
1425 } else {
1426 errs() << *type << "\n";
1427 llvm_unreachable("Buffer arguments must map to structures!");
1428 }
1429 break;
1430 case clspv::ArgKind::Pod:
1431 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1432 StructTypesNeedingBlock.insert(sty);
1433 } else {
1434 errs() << *type << "\n";
1435 llvm_unreachable("POD arguments must map to structures!");
1436 }
1437 break;
1438 case clspv::ArgKind::ReadOnlyImage:
1439 case clspv::ArgKind::WriteOnlyImage:
1440 case clspv::ArgKind::Sampler:
1441 // Sampler and image types map to the pointee type but
1442 // in the uniform constant address space.
1443 type = PointerType::get(type->getPointerElementType(),
1444 clspv::AddressSpace::UniformConstant);
1445 break;
1446 default:
1447 break;
1448 }
1449
1450 // The converted type is the type of the OpVariable we will generate.
1451 // If the pointee type is an array of size zero, FindType will convert it
1452 // to a runtime array.
1453 FindType(type);
1454 }
1455
alan-bakerdcd97412019-09-16 15:32:30 -04001456 // If module constants are clustered in a storage buffer then that struct
1457 // needs layout decorations.
1458 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
1459 for (GlobalVariable &GV : M.globals()) {
1460 PointerType *PTy = cast<PointerType>(GV.getType());
1461 const auto AS = PTy->getAddressSpace();
1462 const bool module_scope_constant_external_init =
1463 (AS == AddressSpace::Constant) && GV.hasInitializer();
1464 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1465 if (module_scope_constant_external_init &&
1466 spv::BuiltInMax == BuiltinType) {
1467 StructTypesNeedingBlock.insert(
1468 cast<StructType>(PTy->getPointerElementType()));
1469 }
1470 }
1471 }
1472
Kévin Petitbbbda972020-03-03 19:16:31 +00001473 for (const GlobalVariable &GV : M.globals()) {
1474 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1475 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1476 assert(Ty->isStructTy() && "Push constants have to be structures.");
1477 auto STy = cast<StructType>(Ty);
1478 StructTypesNeedingBlock.insert(STy);
1479 }
1480 }
1481
David Neto862b7d82018-06-14 18:48:37 -04001482 // Traverse the arrays and structures underneath each Block, and
1483 // mark them as needing layout.
1484 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1485 StructTypesNeedingBlock.end());
1486 while (!work_list.empty()) {
1487 Type *type = work_list.back();
1488 work_list.pop_back();
1489 TypesNeedingLayout.insert(type);
1490 switch (type->getTypeID()) {
1491 case Type::ArrayTyID:
1492 work_list.push_back(type->getArrayElementType());
1493 if (!Hack_generate_runtime_array_stride_early) {
1494 // Remember this array type for deferred decoration.
1495 TypesNeedingArrayStride.insert(type);
1496 }
1497 break;
1498 case Type::StructTyID:
1499 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1500 work_list.push_back(elem_ty);
1501 }
1502 default:
1503 // This type and its contained types don't get layout.
1504 break;
1505 }
1506 }
1507}
1508
Alan Baker202c8c72018-08-13 13:47:44 -04001509void SPIRVProducerPass::FindWorkgroupVars(Module &M) {
1510 // The SpecId assignment for pointer-to-local arguments is recorded in
1511 // module-level metadata. Translate that information into local argument
1512 // information.
1513 NamedMDNode *nmd = M.getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001514 if (!nmd)
1515 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001516 for (auto operand : nmd->operands()) {
1517 MDTuple *tuple = cast<MDTuple>(operand);
1518 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1519 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001520 ConstantAsMetadata *arg_index_md =
1521 cast<ConstantAsMetadata>(tuple->getOperand(1));
1522 int arg_index = static_cast<int>(
1523 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1524 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001525
1526 ConstantAsMetadata *spec_id_md =
1527 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001528 int spec_id = static_cast<int>(
1529 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001530
1531 max_local_spec_id_ = std::max(max_local_spec_id_, spec_id + 1);
1532 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001533 if (LocalSpecIdInfoMap.count(spec_id))
1534 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001535
1536 // We haven't seen this SpecId yet, so generate the LocalArgInfo for it.
1537 LocalArgInfo info{nextID, arg->getType()->getPointerElementType(),
1538 nextID + 1, nextID + 2,
1539 nextID + 3, spec_id};
1540 LocalSpecIdInfoMap[spec_id] = info;
1541 nextID += 4;
1542
1543 // Ensure the types necessary for this argument get generated.
1544 Type *IdxTy = Type::getInt32Ty(M.getContext());
1545 FindConstant(ConstantInt::get(IdxTy, 0));
1546 FindType(IdxTy);
1547 FindType(arg->getType());
1548 }
1549}
1550
David Neto22f144c2017-06-12 14:26:21 -04001551void SPIRVProducerPass::FindType(Type *Ty) {
1552 TypeList &TyList = getTypeList();
1553
1554 if (0 != TyList.idFor(Ty)) {
1555 return;
1556 }
1557
1558 if (Ty->isPointerTy()) {
1559 auto AddrSpace = Ty->getPointerAddressSpace();
1560 if ((AddressSpace::Constant == AddrSpace) ||
1561 (AddressSpace::Global == AddrSpace)) {
1562 auto PointeeTy = Ty->getPointerElementType();
1563
1564 if (PointeeTy->isStructTy() &&
1565 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1566 FindType(PointeeTy);
1567 auto ActualPointerTy =
1568 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1569 FindType(ActualPointerTy);
1570 return;
1571 }
1572 }
1573 }
1574
David Neto862b7d82018-06-14 18:48:37 -04001575 // By convention, LLVM array type with 0 elements will map to
1576 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1577 // has a constant number of elements. We need to support type of the
1578 // constant.
1579 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1580 if (arrayTy->getNumElements() > 0) {
1581 LLVMContext &Context = Ty->getContext();
1582 FindType(Type::getInt32Ty(Context));
1583 }
David Neto22f144c2017-06-12 14:26:21 -04001584 }
1585
1586 for (Type *SubTy : Ty->subtypes()) {
1587 FindType(SubTy);
1588 }
1589
1590 TyList.insert(Ty);
1591}
1592
1593void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1594 // If the global variable has a (non undef) initializer.
1595 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001596 // Generate the constant if it's not the initializer to a module scope
1597 // constant that we will expect in a storage buffer.
1598 const bool module_scope_constant_external_init =
1599 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1600 clspv::Option::ModuleConstantsInStorageBuffer();
1601 if (!module_scope_constant_external_init) {
1602 FindConstant(GV.getInitializer());
1603 }
David Neto22f144c2017-06-12 14:26:21 -04001604 }
1605}
1606
1607void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1608 // Investigate constants in function body.
1609 for (BasicBlock &BB : F) {
1610 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001611 if (auto *call = dyn_cast<CallInst>(&I)) {
1612 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001613 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001614 // We've handled these constants elsewhere, so skip it.
1615 continue;
1616 }
Alan Baker202c8c72018-08-13 13:47:44 -04001617 if (name.startswith(clspv::ResourceAccessorFunction())) {
1618 continue;
1619 }
1620 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001621 continue;
1622 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001623 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1624 // Skip the first operand that has the SPIR-V Opcode
1625 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1626 if (isa<Constant>(I.getOperand(i)) &&
1627 !isa<GlobalValue>(I.getOperand(i))) {
1628 FindConstant(I.getOperand(i));
1629 }
1630 }
1631 continue;
1632 }
David Neto22f144c2017-06-12 14:26:21 -04001633 }
1634
1635 if (isa<AllocaInst>(I)) {
1636 // Alloca instruction has constant for the number of element. Ignore it.
1637 continue;
1638 } else if (isa<ShuffleVectorInst>(I)) {
1639 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1640 // Ignore constant for mask of shuffle vector instruction.
1641 if (i == 2) {
1642 continue;
1643 }
1644
1645 if (isa<Constant>(I.getOperand(i)) &&
1646 !isa<GlobalValue>(I.getOperand(i))) {
1647 FindConstant(I.getOperand(i));
1648 }
1649 }
1650
1651 continue;
1652 } else if (isa<InsertElementInst>(I)) {
1653 // Handle InsertElement with <4 x i8> specially.
1654 Type *CompositeTy = I.getOperand(0)->getType();
1655 if (is4xi8vec(CompositeTy)) {
1656 LLVMContext &Context = CompositeTy->getContext();
1657 if (isa<Constant>(I.getOperand(0))) {
1658 FindConstant(I.getOperand(0));
1659 }
1660
1661 if (isa<Constant>(I.getOperand(1))) {
1662 FindConstant(I.getOperand(1));
1663 }
1664
1665 // Add mask constant 0xFF.
1666 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1667 FindConstant(CstFF);
1668
1669 // Add shift amount constant.
1670 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1671 uint64_t Idx = CI->getZExtValue();
1672 Constant *CstShiftAmount =
1673 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1674 FindConstant(CstShiftAmount);
1675 }
1676
1677 continue;
1678 }
1679
1680 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1681 // Ignore constant for index of InsertElement instruction.
1682 if (i == 2) {
1683 continue;
1684 }
1685
1686 if (isa<Constant>(I.getOperand(i)) &&
1687 !isa<GlobalValue>(I.getOperand(i))) {
1688 FindConstant(I.getOperand(i));
1689 }
1690 }
1691
1692 continue;
1693 } else if (isa<ExtractElementInst>(I)) {
1694 // Handle ExtractElement with <4 x i8> specially.
1695 Type *CompositeTy = I.getOperand(0)->getType();
1696 if (is4xi8vec(CompositeTy)) {
1697 LLVMContext &Context = CompositeTy->getContext();
1698 if (isa<Constant>(I.getOperand(0))) {
1699 FindConstant(I.getOperand(0));
1700 }
1701
1702 // Add mask constant 0xFF.
1703 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1704 FindConstant(CstFF);
1705
1706 // Add shift amount constant.
1707 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1708 uint64_t Idx = CI->getZExtValue();
1709 Constant *CstShiftAmount =
1710 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1711 FindConstant(CstShiftAmount);
1712 } else {
1713 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1714 FindConstant(Cst8);
1715 }
1716
1717 continue;
1718 }
1719
1720 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1721 // Ignore constant for index of ExtractElement instruction.
1722 if (i == 1) {
1723 continue;
1724 }
1725
1726 if (isa<Constant>(I.getOperand(i)) &&
1727 !isa<GlobalValue>(I.getOperand(i))) {
1728 FindConstant(I.getOperand(i));
1729 }
1730 }
1731
1732 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001733 } else if ((Instruction::Xor == I.getOpcode()) &&
1734 I.getType()->isIntegerTy(1)) {
1735 // We special case for Xor where the type is i1 and one of the arguments
1736 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1737 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001738 bool foundConstantTrue = false;
1739 for (Use &Op : I.operands()) {
1740 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1741 auto CI = cast<ConstantInt>(Op);
1742
1743 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001744 // If we already found the true constant, we might (probably only
1745 // on -O0) have an OpLogicalNot which is taking a constant
1746 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001747 FindConstant(Op);
1748 } else {
1749 foundConstantTrue = true;
1750 }
1751 }
1752 }
1753
1754 continue;
David Netod2de94a2017-08-28 17:27:47 -04001755 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001756 // Special case if i8 is not generally handled.
1757 if (!clspv::Option::Int8Support()) {
1758 // For truncation to i8 we mask against 255.
1759 Type *ToTy = I.getType();
1760 if (8u == ToTy->getPrimitiveSizeInBits()) {
1761 LLVMContext &Context = ToTy->getContext();
1762 Constant *Cst255 =
1763 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1764 FindConstant(Cst255);
1765 }
David Netod2de94a2017-08-28 17:27:47 -04001766 }
Neil Henning39672102017-09-29 14:33:13 +01001767 } else if (isa<AtomicRMWInst>(I)) {
1768 LLVMContext &Context = I.getContext();
1769
1770 FindConstant(
1771 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1772 FindConstant(ConstantInt::get(
1773 Type::getInt32Ty(Context),
1774 spv::MemorySemanticsUniformMemoryMask |
1775 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001776 }
1777
1778 for (Use &Op : I.operands()) {
1779 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1780 FindConstant(Op);
1781 }
1782 }
1783 }
1784 }
1785}
1786
1787void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001788 ValueList &CstList = getConstantList();
1789
David Netofb9a7972017-08-25 17:08:24 -04001790 // If V is already tracked, ignore it.
1791 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001792 return;
1793 }
1794
David Neto862b7d82018-06-14 18:48:37 -04001795 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1796 return;
1797 }
1798
David Neto22f144c2017-06-12 14:26:21 -04001799 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001800 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001801
1802 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001803 if (is4xi8vec(CstTy)) {
1804 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001805 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001806 }
1807 }
1808
1809 if (Cst->getNumOperands()) {
1810 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1811 ++I) {
1812 FindConstant(*I);
1813 }
1814
David Netofb9a7972017-08-25 17:08:24 -04001815 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001816 return;
1817 } else if (const ConstantDataSequential *CDS =
1818 dyn_cast<ConstantDataSequential>(Cst)) {
1819 // Add constants for each element to constant list.
1820 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1821 Constant *EleCst = CDS->getElementAsConstant(i);
1822 FindConstant(EleCst);
1823 }
1824 }
1825
1826 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001827 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001828 }
1829}
1830
1831spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1832 switch (AddrSpace) {
1833 default:
1834 llvm_unreachable("Unsupported OpenCL address space");
1835 case AddressSpace::Private:
1836 return spv::StorageClassFunction;
1837 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001838 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001839 case AddressSpace::Constant:
1840 return clspv::Option::ConstantArgsInUniformBuffer()
1841 ? spv::StorageClassUniform
1842 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001843 case AddressSpace::Input:
1844 return spv::StorageClassInput;
1845 case AddressSpace::Local:
1846 return spv::StorageClassWorkgroup;
1847 case AddressSpace::UniformConstant:
1848 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001849 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001850 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001851 case AddressSpace::ModuleScopePrivate:
1852 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001853 case AddressSpace::PushConstant:
1854 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001855 }
1856}
1857
David Neto862b7d82018-06-14 18:48:37 -04001858spv::StorageClass
1859SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1860 switch (arg_kind) {
1861 case clspv::ArgKind::Buffer:
1862 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001863 case clspv::ArgKind::BufferUBO:
1864 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001865 case clspv::ArgKind::Pod:
1866 return clspv::Option::PodArgsInUniformBuffer()
1867 ? spv::StorageClassUniform
1868 : spv::StorageClassStorageBuffer;
1869 case clspv::ArgKind::Local:
1870 return spv::StorageClassWorkgroup;
1871 case clspv::ArgKind::ReadOnlyImage:
1872 case clspv::ArgKind::WriteOnlyImage:
1873 case clspv::ArgKind::Sampler:
1874 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001875 default:
1876 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001877 }
1878}
1879
David Neto22f144c2017-06-12 14:26:21 -04001880spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1881 return StringSwitch<spv::BuiltIn>(Name)
1882 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1883 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1884 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1885 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1886 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
1887 .Default(spv::BuiltInMax);
1888}
1889
1890void SPIRVProducerPass::GenerateExtInstImport() {
1891 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1892 uint32_t &ExtInstImportID = getOpExtInstImportID();
1893
1894 //
1895 // Generate OpExtInstImport.
1896 //
1897 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001898 ExtInstImportID = nextID;
David Neto87846742018-04-11 17:36:22 -04001899 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpExtInstImport, nextID++,
1900 MkString("GLSL.std.450")));
David Neto22f144c2017-06-12 14:26:21 -04001901}
1902
alan-bakerb6b09dc2018-11-08 16:59:28 -05001903void SPIRVProducerPass::GenerateSPIRVTypes(LLVMContext &Context,
1904 Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04001905 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
1906 ValueMapType &VMap = getValueMap();
1907 ValueMapType &AllocatedVMap = getAllocatedValueMap();
Alan Bakerfcda9482018-10-02 17:09:59 -04001908 const auto &DL = module.getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04001909
1910 // Map for OpTypeRuntimeArray. If argument has pointer type, 2 spirv type
1911 // instructions are generated. They are OpTypePointer and OpTypeRuntimeArray.
1912 DenseMap<Type *, uint32_t> OpRuntimeTyMap;
1913
1914 for (Type *Ty : getTypeList()) {
1915 // Update TypeMap with nextID for reference later.
1916 TypeMap[Ty] = nextID;
1917
1918 switch (Ty->getTypeID()) {
1919 default: {
1920 Ty->print(errs());
1921 llvm_unreachable("Unsupported type???");
1922 break;
1923 }
1924 case Type::MetadataTyID:
1925 case Type::LabelTyID: {
1926 // Ignore these types.
1927 break;
1928 }
1929 case Type::PointerTyID: {
1930 PointerType *PTy = cast<PointerType>(Ty);
1931 unsigned AddrSpace = PTy->getAddressSpace();
1932
1933 // For the purposes of our Vulkan SPIR-V type system, constant and global
1934 // are conflated.
1935 bool UseExistingOpTypePointer = false;
1936 if (AddressSpace::Constant == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001937 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1938 AddrSpace = AddressSpace::Global;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001939 // Check to see if we already created this type (for instance, if we
1940 // had a constant <type>* and a global <type>*, the type would be
1941 // created by one of these types, and shared by both).
Alan Bakerfcda9482018-10-02 17:09:59 -04001942 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1943 if (0 < TypeMap.count(GlobalTy)) {
1944 TypeMap[PTy] = TypeMap[GlobalTy];
1945 UseExistingOpTypePointer = true;
1946 break;
1947 }
David Neto22f144c2017-06-12 14:26:21 -04001948 }
1949 } else if (AddressSpace::Global == AddrSpace) {
Alan Bakerfcda9482018-10-02 17:09:59 -04001950 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1951 AddrSpace = AddressSpace::Constant;
David Neto22f144c2017-06-12 14:26:21 -04001952
alan-bakerb6b09dc2018-11-08 16:59:28 -05001953 // Check to see if we already created this type (for instance, if we
1954 // had a constant <type>* and a global <type>*, the type would be
1955 // created by one of these types, and shared by both).
1956 auto ConstantTy =
1957 PTy->getPointerElementType()->getPointerTo(AddrSpace);
Alan Bakerfcda9482018-10-02 17:09:59 -04001958 if (0 < TypeMap.count(ConstantTy)) {
1959 TypeMap[PTy] = TypeMap[ConstantTy];
1960 UseExistingOpTypePointer = true;
1961 }
David Neto22f144c2017-06-12 14:26:21 -04001962 }
1963 }
1964
David Neto862b7d82018-06-14 18:48:37 -04001965 const bool HasArgUser = true;
David Neto22f144c2017-06-12 14:26:21 -04001966
David Neto862b7d82018-06-14 18:48:37 -04001967 if (HasArgUser && !UseExistingOpTypePointer) {
David Neto22f144c2017-06-12 14:26:21 -04001968 //
1969 // Generate OpTypePointer.
1970 //
1971
1972 // OpTypePointer
1973 // Ops[0] = Storage Class
1974 // Ops[1] = Element Type ID
1975 SPIRVOperandList Ops;
1976
David Neto257c3892018-04-11 13:19:45 -04001977 Ops << MkNum(GetStorageClass(AddrSpace))
1978 << MkId(lookupType(PTy->getElementType()));
David Neto22f144c2017-06-12 14:26:21 -04001979
David Neto87846742018-04-11 17:36:22 -04001980 auto *Inst = new SPIRVInstruction(spv::OpTypePointer, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001981 SPIRVInstList.push_back(Inst);
1982 }
David Neto22f144c2017-06-12 14:26:21 -04001983 break;
1984 }
1985 case Type::StructTyID: {
David Neto22f144c2017-06-12 14:26:21 -04001986 StructType *STy = cast<StructType>(Ty);
1987
1988 // Handle sampler type.
1989 if (STy->isOpaque()) {
1990 if (STy->getName().equals("opencl.sampler_t")) {
1991 //
1992 // Generate OpTypeSampler
1993 //
1994 // Empty Ops.
1995 SPIRVOperandList Ops;
1996
David Neto87846742018-04-11 17:36:22 -04001997 auto *Inst = new SPIRVInstruction(spv::OpTypeSampler, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04001998 SPIRVInstList.push_back(Inst);
1999 break;
alan-bakerf906d2b2019-12-10 11:26:23 -05002000 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2001 STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002002 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2003 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05002004 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002005 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002006 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2007 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05002008 STy->getName().startswith("opencl.image3d_ro_t") ||
2009 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002010 //
2011 // Generate OpTypeImage
2012 //
2013 // Ops[0] = Sampled Type ID
2014 // Ops[1] = Dim ID
2015 // Ops[2] = Depth (Literal Number)
2016 // Ops[3] = Arrayed (Literal Number)
2017 // Ops[4] = MS (Literal Number)
2018 // Ops[5] = Sampled (Literal Number)
2019 // Ops[6] = Image Format ID
2020 //
2021 SPIRVOperandList Ops;
2022
alan-bakerf67468c2019-11-25 15:51:49 -05002023 uint32_t ImageTyID = nextID++;
2024 uint32_t SampledTyID = 0;
2025 if (STy->getName().contains(".float")) {
2026 SampledTyID = lookupType(Type::getFloatTy(Context));
2027 } else if (STy->getName().contains(".uint")) {
2028 SampledTyID = lookupType(Type::getInt32Ty(Context));
2029 } else if (STy->getName().contains(".int")) {
2030 // Generate a signed 32-bit integer if necessary.
2031 if (int32ID == 0) {
2032 int32ID = nextID++;
2033 SPIRVOperandList intOps;
2034 intOps << MkNum(32);
2035 intOps << MkNum(1);
2036 auto signed_int =
2037 new SPIRVInstruction(spv::OpTypeInt, int32ID, intOps);
2038 SPIRVInstList.push_back(signed_int);
2039 }
2040 SampledTyID = int32ID;
2041
2042 // Generate a vec4 of the signed int if necessary.
2043 if (v4int32ID == 0) {
2044 v4int32ID = nextID++;
2045 SPIRVOperandList vecOps;
2046 vecOps << MkId(int32ID);
2047 vecOps << MkNum(4);
2048 auto int_vec =
2049 new SPIRVInstruction(spv::OpTypeVector, v4int32ID, vecOps);
2050 SPIRVInstList.push_back(int_vec);
2051 }
2052 } else {
2053 // This was likely an UndefValue.
2054 SampledTyID = lookupType(Type::getFloatTy(Context));
2055 }
David Neto257c3892018-04-11 13:19:45 -04002056 Ops << MkId(SampledTyID);
David Neto22f144c2017-06-12 14:26:21 -04002057
2058 spv::Dim DimID = spv::Dim2D;
alan-bakerf906d2b2019-12-10 11:26:23 -05002059 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05002060 STy->getName().startswith("opencl.image1d_wo_t") ||
2061 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2062 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05002063 DimID = spv::Dim1D;
2064 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2065 STy->getName().startswith("opencl.image3d_wo_t")) {
David Neto22f144c2017-06-12 14:26:21 -04002066 DimID = spv::Dim3D;
2067 }
David Neto257c3892018-04-11 13:19:45 -04002068 Ops << MkNum(DimID);
David Neto22f144c2017-06-12 14:26:21 -04002069
2070 // TODO: Set up Depth.
David Neto257c3892018-04-11 13:19:45 -04002071 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002072
alan-baker7150a1d2020-02-25 08:31:06 -05002073 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2074 Ops << MkNum(arrayed);
David Neto22f144c2017-06-12 14:26:21 -04002075
2076 // TODO: Set up MS.
David Neto257c3892018-04-11 13:19:45 -04002077 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04002078
alan-baker7150a1d2020-02-25 08:31:06 -05002079 // Set up Sampled.
David Neto22f144c2017-06-12 14:26:21 -04002080 //
2081 // From Spec
2082 //
2083 // 0 indicates this is only known at run time, not at compile time
2084 // 1 indicates will be used with sampler
2085 // 2 indicates will be used without a sampler (a storage image)
2086 uint32_t Sampled = 1;
alan-bakerf67468c2019-11-25 15:51:49 -05002087 if (!STy->getName().contains(".sampled")) {
David Neto22f144c2017-06-12 14:26:21 -04002088 Sampled = 2;
2089 }
David Neto257c3892018-04-11 13:19:45 -04002090 Ops << MkNum(Sampled);
David Neto22f144c2017-06-12 14:26:21 -04002091
2092 // TODO: Set up Image Format.
David Neto257c3892018-04-11 13:19:45 -04002093 Ops << MkNum(spv::ImageFormatUnknown);
David Neto22f144c2017-06-12 14:26:21 -04002094
alan-bakerf67468c2019-11-25 15:51:49 -05002095 auto *Inst = new SPIRVInstruction(spv::OpTypeImage, ImageTyID, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002096 SPIRVInstList.push_back(Inst);
2097 break;
2098 }
2099 }
2100
2101 //
2102 // Generate OpTypeStruct
2103 //
2104 // Ops[0] ... Ops[n] = Member IDs
2105 SPIRVOperandList Ops;
2106
2107 for (auto *EleTy : STy->elements()) {
David Neto862b7d82018-06-14 18:48:37 -04002108 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002109 }
2110
David Neto22f144c2017-06-12 14:26:21 -04002111 uint32_t STyID = nextID;
2112
alan-bakerb6b09dc2018-11-08 16:59:28 -05002113 auto *Inst = new SPIRVInstruction(spv::OpTypeStruct, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002114 SPIRVInstList.push_back(Inst);
2115
2116 // Generate OpMemberDecorate.
2117 auto DecoInsertPoint =
2118 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2119 [](SPIRVInstruction *Inst) -> bool {
2120 return Inst->getOpcode() != spv::OpDecorate &&
2121 Inst->getOpcode() != spv::OpMemberDecorate &&
2122 Inst->getOpcode() != spv::OpExtInstImport;
2123 });
2124
Kévin Petitbbbda972020-03-03 19:16:31 +00002125 if (TypesNeedingLayout.idFor(STy)) {
2126 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2127 MemberIdx++) {
2128 // Ops[0] = Structure Type ID
2129 // Ops[1] = Member Index(Literal Number)
2130 // Ops[2] = Decoration (Offset)
2131 // Ops[3] = Byte Offset (Literal Number)
2132 Ops.clear();
David Netoc463b372017-08-10 15:32:21 -04002133
Kévin Petitbbbda972020-03-03 19:16:31 +00002134 Ops << MkId(STyID) << MkNum(MemberIdx)
2135 << MkNum(spv::DecorationOffset);
David Neto22f144c2017-06-12 14:26:21 -04002136
Kévin Petitbbbda972020-03-03 19:16:31 +00002137 const auto ByteOffset =
2138 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
David Neto22f144c2017-06-12 14:26:21 -04002139
Kévin Petitbbbda972020-03-03 19:16:31 +00002140 Ops << MkNum(ByteOffset);
2141
2142 auto *DecoInst = new SPIRVInstruction(spv::OpMemberDecorate, Ops);
2143 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
Alan Bakerfcda9482018-10-02 17:09:59 -04002144 }
David Neto22f144c2017-06-12 14:26:21 -04002145 }
2146
2147 // Generate OpDecorate.
David Neto862b7d82018-06-14 18:48:37 -04002148 if (StructTypesNeedingBlock.idFor(STy)) {
2149 Ops.clear();
2150 // Use Block decorations with StorageBuffer storage class.
2151 Ops << MkId(STyID) << MkNum(spv::DecorationBlock);
David Neto22f144c2017-06-12 14:26:21 -04002152
David Neto862b7d82018-06-14 18:48:37 -04002153 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2154 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
David Neto22f144c2017-06-12 14:26:21 -04002155 }
2156 break;
2157 }
2158 case Type::IntegerTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002159 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
David Neto22f144c2017-06-12 14:26:21 -04002160
2161 if (BitWidth == 1) {
David Netoef5ba2b2019-12-20 08:35:54 -05002162 auto *Inst = new SPIRVInstruction(spv::OpTypeBool, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002163 SPIRVInstList.push_back(Inst);
2164 } else {
alan-bakerb39c8262019-03-08 14:03:37 -05002165 if (!clspv::Option::Int8Support()) {
2166 // i8 is added to TypeMap as i32.
2167 // No matter what LLVM type is requested first, always alias the
2168 // second one's SPIR-V type to be the same as the one we generated
2169 // first.
2170 unsigned aliasToWidth = 0;
2171 if (BitWidth == 8) {
2172 aliasToWidth = 32;
2173 BitWidth = 32;
2174 } else if (BitWidth == 32) {
2175 aliasToWidth = 8;
2176 }
2177 if (aliasToWidth) {
2178 Type *otherType = Type::getIntNTy(Ty->getContext(), aliasToWidth);
2179 auto where = TypeMap.find(otherType);
2180 if (where == TypeMap.end()) {
2181 // Go ahead and make it, but also map the other type to it.
2182 TypeMap[otherType] = nextID;
2183 } else {
2184 // Alias this SPIR-V type the existing type.
2185 TypeMap[Ty] = where->second;
2186 break;
2187 }
David Neto391aeb12017-08-26 15:51:58 -04002188 }
David Neto22f144c2017-06-12 14:26:21 -04002189 }
2190
David Neto257c3892018-04-11 13:19:45 -04002191 SPIRVOperandList Ops;
2192 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
David Neto22f144c2017-06-12 14:26:21 -04002193
2194 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002195 new SPIRVInstruction(spv::OpTypeInt, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002196 }
2197 break;
2198 }
2199 case Type::HalfTyID:
2200 case Type::FloatTyID:
2201 case Type::DoubleTyID: {
alan-baker0e64a592019-11-18 13:36:25 -05002202 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
James Price11010dc2019-12-19 13:53:09 -05002203 auto WidthOp = MkNum(BitWidth);
David Neto22f144c2017-06-12 14:26:21 -04002204
2205 SPIRVInstList.push_back(
David Netoef5ba2b2019-12-20 08:35:54 -05002206 new SPIRVInstruction(spv::OpTypeFloat, nextID++, std::move(WidthOp)));
David Neto22f144c2017-06-12 14:26:21 -04002207 break;
2208 }
2209 case Type::ArrayTyID: {
David Neto22f144c2017-06-12 14:26:21 -04002210 ArrayType *ArrTy = cast<ArrayType>(Ty);
David Neto862b7d82018-06-14 18:48:37 -04002211 const uint64_t Length = ArrTy->getArrayNumElements();
2212 if (Length == 0) {
2213 // By convention, map it to a RuntimeArray.
David Neto22f144c2017-06-12 14:26:21 -04002214
David Neto862b7d82018-06-14 18:48:37 -04002215 // Only generate the type once.
2216 // TODO(dneto): Can it ever be generated more than once?
2217 // Doesn't LLVM type uniqueness guarantee we'll only see this
2218 // once?
2219 Type *EleTy = ArrTy->getArrayElementType();
2220 if (OpRuntimeTyMap.count(EleTy) == 0) {
2221 uint32_t OpTypeRuntimeArrayID = nextID;
2222 OpRuntimeTyMap[Ty] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002223
David Neto862b7d82018-06-14 18:48:37 -04002224 //
2225 // Generate OpTypeRuntimeArray.
2226 //
David Neto22f144c2017-06-12 14:26:21 -04002227
David Neto862b7d82018-06-14 18:48:37 -04002228 // OpTypeRuntimeArray
2229 // Ops[0] = Element Type ID
2230 SPIRVOperandList Ops;
2231 Ops << MkId(lookupType(EleTy));
David Neto22f144c2017-06-12 14:26:21 -04002232
David Neto862b7d82018-06-14 18:48:37 -04002233 SPIRVInstList.push_back(
2234 new SPIRVInstruction(spv::OpTypeRuntimeArray, nextID++, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002235
David Neto862b7d82018-06-14 18:48:37 -04002236 if (Hack_generate_runtime_array_stride_early) {
2237 // Generate OpDecorate.
2238 auto DecoInsertPoint = std::find_if(
2239 SPIRVInstList.begin(), SPIRVInstList.end(),
2240 [](SPIRVInstruction *Inst) -> bool {
2241 return Inst->getOpcode() != spv::OpDecorate &&
2242 Inst->getOpcode() != spv::OpMemberDecorate &&
2243 Inst->getOpcode() != spv::OpExtInstImport;
2244 });
David Neto22f144c2017-06-12 14:26:21 -04002245
David Neto862b7d82018-06-14 18:48:37 -04002246 // Ops[0] = Target ID
2247 // Ops[1] = Decoration (ArrayStride)
2248 // Ops[2] = Stride Number(Literal Number)
2249 Ops.clear();
David Neto85082642018-03-24 06:55:20 -07002250
David Neto862b7d82018-06-14 18:48:37 -04002251 Ops << MkId(OpTypeRuntimeArrayID)
2252 << MkNum(spv::DecorationArrayStride)
Alan Bakerfcda9482018-10-02 17:09:59 -04002253 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
David Neto22f144c2017-06-12 14:26:21 -04002254
David Neto862b7d82018-06-14 18:48:37 -04002255 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
2256 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
2257 }
2258 }
David Neto22f144c2017-06-12 14:26:21 -04002259
David Neto862b7d82018-06-14 18:48:37 -04002260 } else {
David Neto22f144c2017-06-12 14:26:21 -04002261
David Neto862b7d82018-06-14 18:48:37 -04002262 //
2263 // Generate OpConstant and OpTypeArray.
2264 //
2265
2266 //
2267 // Generate OpConstant for array length.
2268 //
2269 // Ops[0] = Result Type ID
2270 // Ops[1] .. Ops[n] = Values LiteralNumber
2271 SPIRVOperandList Ops;
2272
2273 Type *LengthTy = Type::getInt32Ty(Context);
2274 uint32_t ResTyID = lookupType(LengthTy);
2275 Ops << MkId(ResTyID);
2276
2277 assert(Length < UINT32_MAX);
2278 Ops << MkNum(static_cast<uint32_t>(Length));
2279
2280 // Add constant for length to constant list.
2281 Constant *CstLength = ConstantInt::get(LengthTy, Length);
2282 AllocatedVMap[CstLength] = nextID;
2283 VMap[CstLength] = nextID;
2284 uint32_t LengthID = nextID;
2285
2286 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
2287 SPIRVInstList.push_back(CstInst);
2288
2289 // Remember to generate ArrayStride later
2290 getTypesNeedingArrayStride().insert(Ty);
2291
2292 //
2293 // Generate OpTypeArray.
2294 //
2295 // Ops[0] = Element Type ID
2296 // Ops[1] = Array Length Constant ID
2297 Ops.clear();
2298
2299 uint32_t EleTyID = lookupType(ArrTy->getElementType());
2300 Ops << MkId(EleTyID) << MkId(LengthID);
2301
2302 // Update TypeMap with nextID.
2303 TypeMap[Ty] = nextID;
2304
2305 auto *ArrayInst = new SPIRVInstruction(spv::OpTypeArray, nextID++, Ops);
2306 SPIRVInstList.push_back(ArrayInst);
2307 }
David Neto22f144c2017-06-12 14:26:21 -04002308 break;
2309 }
2310 case Type::VectorTyID: {
alan-bakerb39c8262019-03-08 14:03:37 -05002311 // <4 x i8> is changed to i32 if i8 is not generally supported.
2312 if (!clspv::Option::Int8Support() &&
2313 Ty->getVectorElementType() == Type::getInt8Ty(Context)) {
David Neto22f144c2017-06-12 14:26:21 -04002314 if (Ty->getVectorNumElements() == 4) {
2315 TypeMap[Ty] = lookupType(Ty->getVectorElementType());
2316 break;
2317 } else {
2318 Ty->print(errs());
2319 llvm_unreachable("Support above i8 vector type");
2320 }
2321 }
2322
2323 // Ops[0] = Component Type ID
2324 // Ops[1] = Component Count (Literal Number)
David Neto257c3892018-04-11 13:19:45 -04002325 SPIRVOperandList Ops;
2326 Ops << MkId(lookupType(Ty->getVectorElementType()))
2327 << MkNum(Ty->getVectorNumElements());
David Neto22f144c2017-06-12 14:26:21 -04002328
alan-bakerb6b09dc2018-11-08 16:59:28 -05002329 SPIRVInstruction *inst =
2330 new SPIRVInstruction(spv::OpTypeVector, nextID++, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002331 SPIRVInstList.push_back(inst);
David Neto22f144c2017-06-12 14:26:21 -04002332 break;
2333 }
2334 case Type::VoidTyID: {
David Netoef5ba2b2019-12-20 08:35:54 -05002335 auto *Inst = new SPIRVInstruction(spv::OpTypeVoid, nextID++);
David Neto22f144c2017-06-12 14:26:21 -04002336 SPIRVInstList.push_back(Inst);
2337 break;
2338 }
2339 case Type::FunctionTyID: {
2340 // Generate SPIRV instruction for function type.
2341 FunctionType *FTy = cast<FunctionType>(Ty);
2342
2343 // Ops[0] = Return Type ID
2344 // Ops[1] ... Ops[n] = Parameter Type IDs
2345 SPIRVOperandList Ops;
2346
2347 // Find SPIRV instruction for return type
David Netoc6f3ab22018-04-06 18:02:31 -04002348 Ops << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04002349
2350 // Find SPIRV instructions for parameter types
2351 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2352 // Find SPIRV instruction for parameter type.
2353 auto ParamTy = FTy->getParamType(k);
2354 if (ParamTy->isPointerTy()) {
2355 auto PointeeTy = ParamTy->getPointerElementType();
2356 if (PointeeTy->isStructTy() &&
2357 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2358 ParamTy = PointeeTy;
2359 }
2360 }
2361
David Netoc6f3ab22018-04-06 18:02:31 -04002362 Ops << MkId(lookupType(ParamTy));
David Neto22f144c2017-06-12 14:26:21 -04002363 }
2364
David Neto87846742018-04-11 17:36:22 -04002365 auto *Inst = new SPIRVInstruction(spv::OpTypeFunction, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002366 SPIRVInstList.push_back(Inst);
2367 break;
2368 }
2369 }
2370 }
2371
2372 // Generate OpTypeSampledImage.
alan-bakerabd82722019-12-03 17:14:51 -05002373 for (auto &ImgTy : getImageTypeList()) {
David Neto22f144c2017-06-12 14:26:21 -04002374 //
2375 // Generate OpTypeSampledImage.
2376 //
2377 // Ops[0] = Image Type ID
2378 //
2379 SPIRVOperandList Ops;
2380
David Netoc6f3ab22018-04-06 18:02:31 -04002381 Ops << MkId(TypeMap[ImgTy]);
David Neto22f144c2017-06-12 14:26:21 -04002382
alan-bakerabd82722019-12-03 17:14:51 -05002383 // Update the image type map.
2384 getImageTypeMap()[ImgTy] = nextID;
David Neto22f144c2017-06-12 14:26:21 -04002385
David Neto87846742018-04-11 17:36:22 -04002386 auto *Inst = new SPIRVInstruction(spv::OpTypeSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002387 SPIRVInstList.push_back(Inst);
2388 }
David Netoc6f3ab22018-04-06 18:02:31 -04002389
2390 // Generate types for pointer-to-local arguments.
Alan Baker202c8c72018-08-13 13:47:44 -04002391 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
2392 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002393 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04002394
2395 // Generate the spec constant.
2396 SPIRVOperandList Ops;
2397 Ops << MkId(lookupType(Type::getInt32Ty(Context))) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04002398 SPIRVInstList.push_back(
2399 new SPIRVInstruction(spv::OpSpecConstant, arg_info.array_size_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002400
2401 // Generate the array type.
2402 Ops.clear();
2403 // The element type must have been created.
2404 uint32_t elem_ty_id = lookupType(arg_info.elem_type);
2405 assert(elem_ty_id);
2406 Ops << MkId(elem_ty_id) << MkId(arg_info.array_size_id);
2407
2408 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04002409 new SPIRVInstruction(spv::OpTypeArray, arg_info.array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002410
2411 Ops.clear();
2412 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(arg_info.array_type_id);
David Neto87846742018-04-11 17:36:22 -04002413 SPIRVInstList.push_back(new SPIRVInstruction(
2414 spv::OpTypePointer, arg_info.ptr_array_type_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04002415 }
David Neto22f144c2017-06-12 14:26:21 -04002416}
2417
2418void SPIRVProducerPass::GenerateSPIRVConstants() {
2419 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2420 ValueMapType &VMap = getValueMap();
2421 ValueMapType &AllocatedVMap = getAllocatedValueMap();
2422 ValueList &CstList = getConstantList();
David Neto482550a2018-03-24 05:21:07 -07002423 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002424
2425 for (uint32_t i = 0; i < CstList.size(); i++) {
David Netofb9a7972017-08-25 17:08:24 -04002426 // UniqueVector ids are 1-based.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002427 Constant *Cst = cast<Constant>(CstList[i + 1]);
David Neto22f144c2017-06-12 14:26:21 -04002428
2429 // OpTypeArray's constant was already generated.
David Netofb9a7972017-08-25 17:08:24 -04002430 if (AllocatedVMap.find_as(Cst) != AllocatedVMap.end()) {
David Neto22f144c2017-06-12 14:26:21 -04002431 continue;
2432 }
2433
David Netofb9a7972017-08-25 17:08:24 -04002434 // Set ValueMap with nextID for reference later.
David Neto22f144c2017-06-12 14:26:21 -04002435 VMap[Cst] = nextID;
2436
2437 //
2438 // Generate OpConstant.
2439 //
2440
2441 // Ops[0] = Result Type ID
2442 // Ops[1] .. Ops[n] = Values LiteralNumber
2443 SPIRVOperandList Ops;
2444
David Neto257c3892018-04-11 13:19:45 -04002445 Ops << MkId(lookupType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002446
2447 std::vector<uint32_t> LiteralNum;
David Neto22f144c2017-06-12 14:26:21 -04002448 spv::Op Opcode = spv::OpNop;
2449
2450 if (isa<UndefValue>(Cst)) {
2451 // Ops[0] = Result Type ID
David Netoc66b3352017-10-20 14:28:46 -04002452 Opcode = spv::OpUndef;
Alan Baker9bf93fb2018-08-28 16:59:26 -04002453 if (hack_undef && IsTypeNullable(Cst->getType())) {
2454 Opcode = spv::OpConstantNull;
David Netoc66b3352017-10-20 14:28:46 -04002455 }
David Neto22f144c2017-06-12 14:26:21 -04002456 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2457 unsigned BitWidth = CI->getBitWidth();
2458 if (BitWidth == 1) {
2459 // If the bitwidth of constant is 1, generate OpConstantTrue or
2460 // OpConstantFalse.
2461 if (CI->getZExtValue()) {
2462 // Ops[0] = Result Type ID
2463 Opcode = spv::OpConstantTrue;
2464 } else {
2465 // Ops[0] = Result Type ID
2466 Opcode = spv::OpConstantFalse;
2467 }
David Neto22f144c2017-06-12 14:26:21 -04002468 } else {
2469 auto V = CI->getZExtValue();
2470 LiteralNum.push_back(V & 0xFFFFFFFF);
2471
2472 if (BitWidth > 32) {
2473 LiteralNum.push_back(V >> 32);
2474 }
2475
2476 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002477
David Neto257c3892018-04-11 13:19:45 -04002478 Ops << MkInteger(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002479 }
2480 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2481 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2482 Type *CFPTy = CFP->getType();
2483 if (CFPTy->isFloatTy()) {
2484 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
Kévin Petit02ee34e2019-04-04 19:03:22 +01002485 } else if (CFPTy->isDoubleTy()) {
2486 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2487 LiteralNum.push_back(FPVal >> 32);
alan-baker089bf932020-01-07 16:35:45 -05002488 } else if (CFPTy->isHalfTy()) {
2489 LiteralNum.push_back(FPVal & 0xFFFF);
David Neto22f144c2017-06-12 14:26:21 -04002490 } else {
2491 CFPTy->print(errs());
2492 llvm_unreachable("Implement this ConstantFP Type");
2493 }
2494
2495 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002496
David Neto257c3892018-04-11 13:19:45 -04002497 Ops << MkFloat(LiteralNum);
David Neto22f144c2017-06-12 14:26:21 -04002498 } else if (isa<ConstantDataSequential>(Cst) &&
2499 cast<ConstantDataSequential>(Cst)->isString()) {
2500 Cst->print(errs());
2501 llvm_unreachable("Implement this Constant");
2502
2503 } else if (const ConstantDataSequential *CDS =
2504 dyn_cast<ConstantDataSequential>(Cst)) {
David Neto49351ac2017-08-26 17:32:20 -04002505 // Let's convert <4 x i8> constant to int constant specially.
2506 // This case occurs when all the values are specified as constant
2507 // ints.
2508 Type *CstTy = Cst->getType();
2509 if (is4xi8vec(CstTy)) {
2510 LLVMContext &Context = CstTy->getContext();
2511
2512 //
2513 // Generate OpConstant with OpTypeInt 32 0.
2514 //
Neil Henning39672102017-09-29 14:33:13 +01002515 uint32_t IntValue = 0;
2516 for (unsigned k = 0; k < 4; k++) {
2517 const uint64_t Val = CDS->getElementAsInteger(k);
David Neto49351ac2017-08-26 17:32:20 -04002518 IntValue = (IntValue << 8) | (Val & 0xffu);
2519 }
2520
2521 Type *i32 = Type::getInt32Ty(Context);
2522 Constant *CstInt = ConstantInt::get(i32, IntValue);
2523 // If this constant is already registered on VMap, use it.
2524 if (VMap.count(CstInt)) {
2525 uint32_t CstID = VMap[CstInt];
2526 VMap[Cst] = CstID;
2527 continue;
2528 }
2529
David Neto257c3892018-04-11 13:19:45 -04002530 Ops << MkNum(IntValue);
David Neto49351ac2017-08-26 17:32:20 -04002531
David Neto87846742018-04-11 17:36:22 -04002532 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto49351ac2017-08-26 17:32:20 -04002533 SPIRVInstList.push_back(CstInst);
2534
2535 continue;
2536 }
2537
2538 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002539 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2540 Constant *EleCst = CDS->getElementAsConstant(k);
2541 uint32_t EleCstID = VMap[EleCst];
David Neto257c3892018-04-11 13:19:45 -04002542 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002543 }
2544
2545 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002546 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2547 // Let's convert <4 x i8> constant to int constant specially.
David Neto49351ac2017-08-26 17:32:20 -04002548 // This case occurs when at least one of the values is an undef.
David Neto22f144c2017-06-12 14:26:21 -04002549 Type *CstTy = Cst->getType();
2550 if (is4xi8vec(CstTy)) {
2551 LLVMContext &Context = CstTy->getContext();
2552
2553 //
2554 // Generate OpConstant with OpTypeInt 32 0.
2555 //
Neil Henning39672102017-09-29 14:33:13 +01002556 uint32_t IntValue = 0;
David Neto22f144c2017-06-12 14:26:21 -04002557 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2558 I != E; ++I) {
2559 uint64_t Val = 0;
alan-bakerb6b09dc2018-11-08 16:59:28 -05002560 const Value *CV = *I;
Neil Henning39672102017-09-29 14:33:13 +01002561 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2562 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002563 }
David Neto49351ac2017-08-26 17:32:20 -04002564 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002565 }
2566
David Neto49351ac2017-08-26 17:32:20 -04002567 Type *i32 = Type::getInt32Ty(Context);
2568 Constant *CstInt = ConstantInt::get(i32, IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002569 // If this constant is already registered on VMap, use it.
2570 if (VMap.count(CstInt)) {
2571 uint32_t CstID = VMap[CstInt];
2572 VMap[Cst] = CstID;
David Neto19a1bad2017-08-25 15:01:41 -04002573 continue;
David Neto22f144c2017-06-12 14:26:21 -04002574 }
2575
David Neto257c3892018-04-11 13:19:45 -04002576 Ops << MkNum(IntValue);
David Neto22f144c2017-06-12 14:26:21 -04002577
David Neto87846742018-04-11 17:36:22 -04002578 auto *CstInst = new SPIRVInstruction(spv::OpConstant, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002579 SPIRVInstList.push_back(CstInst);
2580
David Neto19a1bad2017-08-25 15:01:41 -04002581 continue;
David Neto22f144c2017-06-12 14:26:21 -04002582 }
2583
2584 // We use a constant composite in SPIR-V for our constant aggregate in
2585 // LLVM.
2586 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002587
2588 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
2589 // Look up the ID of the element of this aggregate (which we will
2590 // previously have created a constant for).
2591 uint32_t ElementConstantID = VMap[CA->getAggregateElement(k)];
2592
2593 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002594 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002595 }
2596 } else if (Cst->isNullValue()) {
2597 Opcode = spv::OpConstantNull;
David Neto22f144c2017-06-12 14:26:21 -04002598 } else {
2599 Cst->print(errs());
2600 llvm_unreachable("Unsupported Constant???");
2601 }
2602
alan-baker5b86ed72019-02-15 08:26:50 -05002603 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2604 // Null pointer requires variable pointers.
2605 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2606 }
2607
David Neto87846742018-04-11 17:36:22 -04002608 auto *CstInst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002609 SPIRVInstList.push_back(CstInst);
2610 }
2611}
2612
2613void SPIRVProducerPass::GenerateSamplers(Module &M) {
2614 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto22f144c2017-06-12 14:26:21 -04002615
alan-bakerb6b09dc2018-11-08 16:59:28 -05002616 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002617 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002618 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2619 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002620
David Neto862b7d82018-06-14 18:48:37 -04002621 // We might have samplers in the sampler map that are not used
2622 // in the translation unit. We need to allocate variables
2623 // for them and bindings too.
2624 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002625
Kévin Petitdf71de32019-04-09 14:09:50 +01002626 auto *var_fn = M.getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002627 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002628 if (!var_fn)
2629 return;
alan-baker09cb9802019-12-10 13:16:27 -05002630
David Neto862b7d82018-06-14 18:48:37 -04002631 for (auto user : var_fn->users()) {
2632 // Populate SamplerLiteralToDescriptorSetMap and
2633 // SamplerLiteralToBindingMap.
2634 //
2635 // Look for calls like
2636 // call %opencl.sampler_t addrspace(2)*
2637 // @clspv.sampler.var.literal(
2638 // i32 descriptor,
2639 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002640 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002641 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002642 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002643 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002644 auto sampler_value = third_param;
2645 if (clspv::Option::UseSamplerMap()) {
2646 if (third_param >= sampler_map.size()) {
2647 errs() << "Out of bounds index to sampler map: " << third_param;
2648 llvm_unreachable("bad sampler init: out of bounds");
2649 }
2650 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002651 }
2652
David Neto862b7d82018-06-14 18:48:37 -04002653 const auto descriptor_set = static_cast<unsigned>(
2654 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2655 const auto binding = static_cast<unsigned>(
2656 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2657
2658 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2659 SamplerLiteralToBindingMap[sampler_value] = binding;
2660 used_bindings.insert(binding);
2661 }
2662 }
2663
alan-baker09cb9802019-12-10 13:16:27 -05002664 DenseSet<size_t> seen;
2665 for (auto user : var_fn->users()) {
2666 if (!isa<CallInst>(user))
2667 continue;
2668
2669 auto call = cast<CallInst>(user);
2670 const unsigned third_param = static_cast<unsigned>(
2671 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2672
2673 // Already allocated a variable for this value.
2674 if (!seen.insert(third_param).second)
2675 continue;
2676
2677 auto sampler_value = third_param;
2678 if (clspv::Option::UseSamplerMap()) {
2679 sampler_value = sampler_map[third_param].first;
2680 }
2681
David Neto22f144c2017-06-12 14:26:21 -04002682 // Generate OpVariable.
2683 //
2684 // GIDOps[0] : Result Type ID
2685 // GIDOps[1] : Storage Class
2686 SPIRVOperandList Ops;
2687
David Neto257c3892018-04-11 13:19:45 -04002688 Ops << MkId(lookupType(SamplerTy))
2689 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002690
David Neto862b7d82018-06-14 18:48:37 -04002691 auto sampler_var_id = nextID++;
2692 auto *Inst = new SPIRVInstruction(spv::OpVariable, sampler_var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002693 SPIRVInstList.push_back(Inst);
2694
alan-baker09cb9802019-12-10 13:16:27 -05002695 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002696
2697 // Find Insert Point for OpDecorate.
2698 auto DecoInsertPoint =
2699 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2700 [](SPIRVInstruction *Inst) -> bool {
2701 return Inst->getOpcode() != spv::OpDecorate &&
2702 Inst->getOpcode() != spv::OpMemberDecorate &&
2703 Inst->getOpcode() != spv::OpExtInstImport;
2704 });
2705
2706 // Ops[0] = Target ID
2707 // Ops[1] = Decoration (DescriptorSet)
2708 // Ops[2] = LiteralNumber according to Decoration
2709 Ops.clear();
2710
David Neto862b7d82018-06-14 18:48:37 -04002711 unsigned descriptor_set;
2712 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002713 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002714 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002715 // This sampler is not actually used. Find the next one.
2716 for (binding = 0; used_bindings.count(binding); binding++)
2717 ;
2718 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2719 used_bindings.insert(binding);
2720 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002721 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2722 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002723
alan-baker09cb9802019-12-10 13:16:27 -05002724 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002725 descriptorMapEntries->emplace_back(std::move(sampler_data),
2726 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002727 }
2728
2729 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2730 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002731
David Neto87846742018-04-11 17:36:22 -04002732 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002733 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
2734
2735 // Ops[0] = Target ID
2736 // Ops[1] = Decoration (Binding)
2737 // Ops[2] = LiteralNumber according to Decoration
2738 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002739 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2740 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002741
David Neto87846742018-04-11 17:36:22 -04002742 auto *BindDecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002743 SPIRVInstList.insert(DecoInsertPoint, BindDecoInst);
2744 }
David Neto862b7d82018-06-14 18:48:37 -04002745}
David Neto22f144c2017-06-12 14:26:21 -04002746
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01002747void SPIRVProducerPass::GenerateResourceVars(Module &) {
David Neto862b7d82018-06-14 18:48:37 -04002748 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
2749 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002750
David Neto862b7d82018-06-14 18:48:37 -04002751 // Generate variables. Make one for each of resource var info object.
2752 for (auto *info : ModuleOrderedResourceVars) {
2753 Type *type = info->var_fn->getReturnType();
2754 // Remap the address space for opaque types.
2755 switch (info->arg_kind) {
2756 case clspv::ArgKind::Sampler:
2757 case clspv::ArgKind::ReadOnlyImage:
2758 case clspv::ArgKind::WriteOnlyImage:
2759 type = PointerType::get(type->getPointerElementType(),
2760 clspv::AddressSpace::UniformConstant);
2761 break;
2762 default:
2763 break;
2764 }
David Neto22f144c2017-06-12 14:26:21 -04002765
David Neto862b7d82018-06-14 18:48:37 -04002766 info->var_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04002767
David Neto862b7d82018-06-14 18:48:37 -04002768 const auto type_id = lookupType(type);
2769 const auto sc = GetStorageClassForArgKind(info->arg_kind);
2770 SPIRVOperandList Ops;
2771 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002772
David Neto862b7d82018-06-14 18:48:37 -04002773 auto *Inst = new SPIRVInstruction(spv::OpVariable, info->var_id, Ops);
2774 SPIRVInstList.push_back(Inst);
2775
2776 // Map calls to the variable-builtin-function.
2777 for (auto &U : info->var_fn->uses()) {
2778 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2779 const auto set = unsigned(
2780 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2781 const auto binding = unsigned(
2782 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2783 if (set == info->descriptor_set && binding == info->binding) {
2784 switch (info->arg_kind) {
2785 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002786 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002787 case clspv::ArgKind::Pod:
2788 // The call maps to the variable directly.
2789 VMap[call] = info->var_id;
2790 break;
2791 case clspv::ArgKind::Sampler:
2792 case clspv::ArgKind::ReadOnlyImage:
2793 case clspv::ArgKind::WriteOnlyImage:
2794 // The call maps to a load we generate later.
2795 ResourceVarDeferredLoadCalls[call] = info->var_id;
2796 break;
2797 default:
2798 llvm_unreachable("Unhandled arg kind");
2799 }
2800 }
David Neto22f144c2017-06-12 14:26:21 -04002801 }
David Neto862b7d82018-06-14 18:48:37 -04002802 }
2803 }
David Neto22f144c2017-06-12 14:26:21 -04002804
David Neto862b7d82018-06-14 18:48:37 -04002805 // Generate associated decorations.
David Neto22f144c2017-06-12 14:26:21 -04002806
David Neto862b7d82018-06-14 18:48:37 -04002807 // Find Insert Point for OpDecorate.
2808 auto DecoInsertPoint =
2809 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
2810 [](SPIRVInstruction *Inst) -> bool {
2811 return Inst->getOpcode() != spv::OpDecorate &&
2812 Inst->getOpcode() != spv::OpMemberDecorate &&
2813 Inst->getOpcode() != spv::OpExtInstImport;
2814 });
2815
2816 SPIRVOperandList Ops;
2817 for (auto *info : ModuleOrderedResourceVars) {
2818 // Decorate with DescriptorSet and Binding.
2819 Ops.clear();
2820 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2821 << MkNum(info->descriptor_set);
2822 SPIRVInstList.insert(DecoInsertPoint,
2823 new SPIRVInstruction(spv::OpDecorate, Ops));
2824
2825 Ops.clear();
2826 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2827 << MkNum(info->binding);
2828 SPIRVInstList.insert(DecoInsertPoint,
2829 new SPIRVInstruction(spv::OpDecorate, Ops));
2830
alan-bakere9308012019-03-15 10:25:13 -04002831 if (info->coherent) {
2832 // Decorate with Coherent if required for the variable.
2833 Ops.clear();
2834 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
2835 SPIRVInstList.insert(DecoInsertPoint,
2836 new SPIRVInstruction(spv::OpDecorate, Ops));
2837 }
2838
David Neto862b7d82018-06-14 18:48:37 -04002839 // Generate NonWritable and NonReadable
2840 switch (info->arg_kind) {
2841 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002842 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002843 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2844 clspv::AddressSpace::Constant) {
2845 Ops.clear();
2846 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
2847 SPIRVInstList.insert(DecoInsertPoint,
2848 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04002849 }
David Neto862b7d82018-06-14 18:48:37 -04002850 break;
David Neto862b7d82018-06-14 18:48:37 -04002851 case clspv::ArgKind::WriteOnlyImage:
2852 Ops.clear();
2853 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
2854 SPIRVInstList.insert(DecoInsertPoint,
2855 new SPIRVInstruction(spv::OpDecorate, Ops));
2856 break;
2857 default:
2858 break;
David Neto22f144c2017-06-12 14:26:21 -04002859 }
2860 }
2861}
2862
Kévin Petitbbbda972020-03-03 19:16:31 +00002863namespace {
2864
2865bool isScalarType(Type *type) {
2866 return type->isIntegerTy() || type->isFloatTy();
2867}
2868
2869uint64_t structAlignment(StructType *type,
2870 std::function<uint64_t(Type *)> alignFn) {
2871 uint64_t maxAlign = 1;
2872 for (unsigned i = 0; i < type->getStructNumElements(); i++) {
2873 uint64_t align = alignFn(type->getStructElementType(i));
2874 maxAlign = std::max(align, maxAlign);
2875 }
2876 return maxAlign;
2877}
2878
2879uint64_t scalarAlignment(Type *type) {
2880 // A scalar of size N has a scalar alignment of N.
2881 if (isScalarType(type)) {
2882 return type->getScalarSizeInBits() / 8;
2883 }
2884
2885 // A vector or matrix type has a scalar alignment equal to that of its
2886 // component type.
2887 if (type->isVectorTy()) {
2888 return scalarAlignment(type->getVectorElementType());
2889 }
2890
2891 // An array type has a scalar alignment equal to that of its element type.
2892 if (type->isArrayTy()) {
2893 return scalarAlignment(type->getArrayElementType());
2894 }
2895
2896 // A structure has a scalar alignment equal to the largest scalar alignment of
2897 // any of its members.
2898 if (type->isStructTy()) {
2899 return structAlignment(cast<StructType>(type), scalarAlignment);
2900 }
2901
2902 llvm_unreachable("Unsupported type");
2903}
2904
2905uint64_t baseAlignment(Type *type) {
2906 // A scalar has a base alignment equal to its scalar alignment.
2907 if (isScalarType(type)) {
2908 return scalarAlignment(type);
2909 }
2910
2911 if (type->isVectorTy()) {
2912 unsigned numElems = type->getVectorNumElements();
2913
2914 // A two-component vector has a base alignment equal to twice its scalar
2915 // alignment.
2916 if (numElems == 2) {
2917 return 2 * scalarAlignment(type);
2918 }
2919 // A three- or four-component vector has a base alignment equal to four
2920 // times its scalar alignment.
2921 if ((numElems == 3) || (numElems == 4)) {
2922 return 4 * scalarAlignment(type);
2923 }
2924 }
2925
2926 // An array has a base alignment equal to the base alignment of its element
2927 // type.
2928 if (type->isArrayTy()) {
2929 return baseAlignment(type->getArrayElementType());
2930 }
2931
2932 // A structure has a base alignment equal to the largest base alignment of any
2933 // of its members.
2934 if (type->isStructTy()) {
2935 return structAlignment(cast<StructType>(type), baseAlignment);
2936 }
2937
2938 // TODO A row-major matrix of C columns has a base alignment equal to the base
2939 // alignment of a vector of C matrix components.
2940 // TODO A column-major matrix has a base alignment equal to the base alignment
2941 // of the matrix column type.
2942
2943 llvm_unreachable("Unsupported type");
2944}
2945
2946uint64_t extendedAlignment(Type *type) {
2947 // A scalar, vector or matrix type has an extended alignment equal to its base
2948 // alignment.
2949 // TODO matrix type
2950 if (isScalarType(type) || type->isVectorTy()) {
2951 return baseAlignment(type);
2952 }
2953
2954 // An array or structure type has an extended alignment equal to the largest
2955 // extended alignment of any of its members, rounded up to a multiple of 16
2956 if (type->isStructTy()) {
2957 auto salign = structAlignment(cast<StructType>(type), extendedAlignment);
2958 return alignTo(salign, 16);
2959 }
2960
2961 if (type->isArrayTy()) {
2962 auto salign = extendedAlignment(type->getArrayElementType());
2963 return alignTo(salign, 16);
2964 }
2965
2966 llvm_unreachable("Unsupported type");
2967}
2968
2969uint64_t standardAlignment(Type *type, spv::StorageClass sclass) {
2970 // If the scalarBlockLayout feature is enabled on the device then every member
2971 // must be aligned according to its scalar alignment
2972 if (clspv::Option::ScalarBlockLayout()) {
2973 return scalarAlignment(type);
2974 }
2975
2976 // All vectors must be aligned according to their scalar alignment
2977 if (type->isVectorTy()) {
2978 return scalarAlignment(type);
2979 }
2980
2981 // If the uniformBufferStandardLayout feature is not enabled on the device,
2982 // then any member of an OpTypeStruct with a storage class of Uniform and a
2983 // decoration of Block must be aligned according to its extended alignment.
2984 if (!clspv::Option::Std430UniformBufferLayout() &&
2985 sclass == spv::StorageClassUniform) {
2986 return extendedAlignment(type);
2987 }
2988
2989 // Every other member must be aligned according to its base alignment
2990 return baseAlignment(type);
2991}
2992
2993bool improperlyStraddles(const DataLayout &DL, Type *type, unsigned offset) {
2994 assert(type->isVectorTy());
2995
2996 auto size = DL.getTypeStoreSize(type);
2997
2998 // It is a vector with total size less than or equal to 16 bytes, and has
2999 // Offset decorations placing its first byte at F and its last byte at L,
3000 // where floor(F / 16) != floor(L / 16).
3001 if ((size <= 16) && (offset % 16 + size > 16)) {
3002 return true;
3003 }
3004
3005 // It is a vector with total size greater than 16 bytes and has its Offset
3006 // decorations placing its first byte at a non-integer multiple of 16
3007 if ((size > 16) && (offset % 16 != 0)) {
3008 return true;
3009 }
3010
3011 return false;
3012}
3013
3014// See 14.5 Shader Resource Interface in Vulkan spec
3015bool isValidExplicitLayout(Module &M, StructType *STy, unsigned Member,
3016 spv::StorageClass SClass, unsigned Offset,
3017 unsigned PreviousMemberOffset) {
3018
3019 auto MemberType = STy->getElementType(Member);
3020 auto Align = standardAlignment(MemberType, SClass);
3021 auto &DL = M.getDataLayout();
3022
3023 // The Offset decoration of any member must be a multiple of its alignment
3024 if (Offset % Align != 0) {
3025 return false;
3026 }
3027
3028 // TODO Any ArrayStride or MatrixStride decoration must be a multiple of the
3029 // alignment of the array or matrix as defined above
3030
3031 if (!clspv::Option::ScalarBlockLayout()) {
3032 // Vectors must not improperly straddle, as defined above
3033 if (MemberType->isVectorTy() &&
3034 improperlyStraddles(DL, MemberType, Offset)) {
3035 return true;
3036 }
3037
3038 // The Offset decoration of a member must not place it between the end
3039 // of a structure or an array and the next multiple of the alignment of that
3040 // structure or array
3041 if (Member > 0) {
3042 auto PType = STy->getElementType(Member - 1);
3043 if (PType->isStructTy() || PType->isArrayTy()) {
3044 auto PAlign = standardAlignment(PType, SClass);
3045 if (Offset - PreviousMemberOffset < PAlign) {
3046 return false;
3047 }
3048 }
3049 }
3050 }
3051
3052 return true;
3053}
3054
3055} // namespace
3056
3057void SPIRVProducerPass::GeneratePushConstantDescriptormapEntries(Module &M) {
3058
3059 if (auto GV = M.getGlobalVariable(clspv::PushConstantsVariableName())) {
3060 auto const &DL = M.getDataLayout();
3061 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
3062 auto STy = cast<StructType>(GV->getValueType());
3063
3064 for (unsigned i = 0; i < STy->getNumElements(); i++) {
3065 auto pc = static_cast<clspv::PushConstant>(
3066 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
3067 auto memberType = STy->getElementType(i);
3068 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
3069 unsigned previousOffset = 0;
3070 if (i > 0) {
3071 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
3072 }
3073 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
3074 assert(isValidExplicitLayout(M, STy, i, spv::StorageClassPushConstant,
3075 offset, previousOffset));
3076 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
3077 descriptorMapEntries->emplace_back(std::move(data));
3078 }
3079 }
3080}
3081
David Neto22f144c2017-06-12 14:26:21 -04003082void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003083 Module &M = *GV.getParent();
David Neto22f144c2017-06-12 14:26:21 -04003084 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3085 ValueMapType &VMap = getValueMap();
3086 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07003087 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04003088
3089 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
3090 Type *Ty = GV.getType();
3091 PointerType *PTy = cast<PointerType>(Ty);
3092
3093 uint32_t InitializerID = 0;
3094
3095 // Workgroup size is handled differently (it goes into a constant)
3096 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3097 std::vector<bool> HasMDVec;
3098 uint32_t PrevXDimCst = 0xFFFFFFFF;
3099 uint32_t PrevYDimCst = 0xFFFFFFFF;
3100 uint32_t PrevZDimCst = 0xFFFFFFFF;
3101 for (Function &Func : *GV.getParent()) {
3102 if (Func.isDeclaration()) {
3103 continue;
3104 }
3105
3106 // We only need to check kernels.
3107 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
3108 continue;
3109 }
3110
3111 if (const MDNode *MD =
3112 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
3113 uint32_t CurXDimCst = static_cast<uint32_t>(
3114 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3115 uint32_t CurYDimCst = static_cast<uint32_t>(
3116 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3117 uint32_t CurZDimCst = static_cast<uint32_t>(
3118 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3119
3120 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
3121 PrevZDimCst == 0xFFFFFFFF) {
3122 PrevXDimCst = CurXDimCst;
3123 PrevYDimCst = CurYDimCst;
3124 PrevZDimCst = CurZDimCst;
3125 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
3126 CurZDimCst != PrevZDimCst) {
3127 llvm_unreachable(
3128 "reqd_work_group_size must be the same across all kernels");
3129 } else {
3130 continue;
3131 }
3132
3133 //
3134 // Generate OpConstantComposite.
3135 //
3136 // Ops[0] : Result Type ID
3137 // Ops[1] : Constant size for x dimension.
3138 // Ops[2] : Constant size for y dimension.
3139 // Ops[3] : Constant size for z dimension.
3140 SPIRVOperandList Ops;
3141
3142 uint32_t XDimCstID =
3143 VMap[mdconst::extract<ConstantInt>(MD->getOperand(0))];
3144 uint32_t YDimCstID =
3145 VMap[mdconst::extract<ConstantInt>(MD->getOperand(1))];
3146 uint32_t ZDimCstID =
3147 VMap[mdconst::extract<ConstantInt>(MD->getOperand(2))];
3148
3149 InitializerID = nextID;
3150
David Neto257c3892018-04-11 13:19:45 -04003151 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3152 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003153
David Neto87846742018-04-11 17:36:22 -04003154 auto *Inst =
3155 new SPIRVInstruction(spv::OpConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003156 SPIRVInstList.push_back(Inst);
3157
3158 HasMDVec.push_back(true);
3159 } else {
3160 HasMDVec.push_back(false);
3161 }
3162 }
3163
3164 // Check all kernels have same definitions for work_group_size.
3165 bool HasMD = false;
3166 if (!HasMDVec.empty()) {
3167 HasMD = HasMDVec[0];
3168 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
3169 if (HasMD != HasMDVec[i]) {
3170 llvm_unreachable(
3171 "Kernels should have consistent work group size definition");
3172 }
3173 }
3174 }
3175
3176 // If all kernels do not have metadata for reqd_work_group_size, generate
3177 // OpSpecConstants for x/y/z dimension.
3178 if (!HasMD) {
3179 //
3180 // Generate OpSpecConstants for x/y/z dimension.
3181 //
3182 // Ops[0] : Result Type ID
3183 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
3184 uint32_t XDimCstID = 0;
3185 uint32_t YDimCstID = 0;
3186 uint32_t ZDimCstID = 0;
3187
David Neto22f144c2017-06-12 14:26:21 -04003188 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04003189 uint32_t result_type_id =
3190 lookupType(Ty->getPointerElementType()->getSequentialElementType());
David Neto22f144c2017-06-12 14:26:21 -04003191
David Neto257c3892018-04-11 13:19:45 -04003192 // X Dimension
3193 Ops << MkId(result_type_id) << MkNum(1);
3194 XDimCstID = nextID++;
3195 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003196 new SPIRVInstruction(spv::OpSpecConstant, XDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003197
3198 // Y Dimension
3199 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003200 Ops << MkId(result_type_id) << MkNum(1);
3201 YDimCstID = nextID++;
3202 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003203 new SPIRVInstruction(spv::OpSpecConstant, YDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003204
3205 // Z Dimension
3206 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003207 Ops << MkId(result_type_id) << MkNum(1);
3208 ZDimCstID = nextID++;
3209 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003210 new SPIRVInstruction(spv::OpSpecConstant, ZDimCstID, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003211
David Neto257c3892018-04-11 13:19:45 -04003212 BuiltinDimVec.push_back(XDimCstID);
3213 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003214 BuiltinDimVec.push_back(ZDimCstID);
3215
David Neto22f144c2017-06-12 14:26:21 -04003216 //
3217 // Generate OpSpecConstantComposite.
3218 //
3219 // Ops[0] : Result Type ID
3220 // Ops[1] : Constant size for x dimension.
3221 // Ops[2] : Constant size for y dimension.
3222 // Ops[3] : Constant size for z dimension.
3223 InitializerID = nextID;
3224
3225 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003226 Ops << MkId(lookupType(Ty->getPointerElementType())) << MkId(XDimCstID)
3227 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04003228
David Neto87846742018-04-11 17:36:22 -04003229 auto *Inst =
3230 new SPIRVInstruction(spv::OpSpecConstantComposite, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003231 SPIRVInstList.push_back(Inst);
3232 }
3233 }
3234
David Neto22f144c2017-06-12 14:26:21 -04003235 VMap[&GV] = nextID;
3236
3237 //
3238 // Generate OpVariable.
3239 //
3240 // GIDOps[0] : Result Type ID
3241 // GIDOps[1] : Storage Class
3242 SPIRVOperandList Ops;
3243
David Neto85082642018-03-24 06:55:20 -07003244 const auto AS = PTy->getAddressSpace();
David Netoc6f3ab22018-04-06 18:02:31 -04003245 Ops << MkId(lookupType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003246
David Neto85082642018-03-24 06:55:20 -07003247 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003248 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003249 clspv::Option::ModuleConstantsInStorageBuffer();
3250
Kévin Petit23d5f182019-08-13 16:21:29 +01003251 if (GV.hasInitializer()) {
3252 auto GVInit = GV.getInitializer();
3253 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
3254 assert(VMap.count(GVInit) == 1);
3255 InitializerID = VMap[GVInit];
David Neto85082642018-03-24 06:55:20 -07003256 }
3257 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003258
3259 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003260 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003261 Ops << MkId(InitializerID);
3262 }
David Neto85082642018-03-24 06:55:20 -07003263 const uint32_t var_id = nextID++;
3264
David Neto87846742018-04-11 17:36:22 -04003265 auto *Inst = new SPIRVInstruction(spv::OpVariable, var_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003266 SPIRVInstList.push_back(Inst);
3267
3268 // If we have a builtin.
3269 if (spv::BuiltInMax != BuiltinType) {
3270 // Find Insert Point for OpDecorate.
3271 auto DecoInsertPoint =
3272 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3273 [](SPIRVInstruction *Inst) -> bool {
3274 return Inst->getOpcode() != spv::OpDecorate &&
3275 Inst->getOpcode() != spv::OpMemberDecorate &&
3276 Inst->getOpcode() != spv::OpExtInstImport;
3277 });
3278 //
3279 // Generate OpDecorate.
3280 //
3281 // DOps[0] = Target ID
3282 // DOps[1] = Decoration (Builtin)
3283 // DOps[2] = BuiltIn ID
3284 uint32_t ResultID;
3285
3286 // WorkgroupSize is different, we decorate the constant composite that has
3287 // its value, rather than the variable that we use to access the value.
3288 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3289 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003290 // Save both the value and variable IDs for later.
3291 WorkgroupSizeValueID = InitializerID;
3292 WorkgroupSizeVarID = VMap[&GV];
David Neto22f144c2017-06-12 14:26:21 -04003293 } else {
3294 ResultID = VMap[&GV];
3295 }
3296
3297 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003298 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3299 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003300
David Neto87846742018-04-11 17:36:22 -04003301 auto *DescDecoInst = new SPIRVInstruction(spv::OpDecorate, DOps);
David Neto22f144c2017-06-12 14:26:21 -04003302 SPIRVInstList.insert(DecoInsertPoint, DescDecoInst);
David Neto85082642018-03-24 06:55:20 -07003303 } else if (module_scope_constant_external_init) {
3304 // This module scope constant is initialized from a storage buffer with data
3305 // provided by the host at binding 0 of the next descriptor set.
David Neto78383442018-06-15 20:31:56 -04003306 const uint32_t descriptor_set = TakeDescriptorIndex(&M);
David Neto85082642018-03-24 06:55:20 -07003307
David Neto862b7d82018-06-14 18:48:37 -04003308 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003309 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3310 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003311 std::string hexbytes;
3312 llvm::raw_string_ostream str(hexbytes);
3313 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003314 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3315 str.str()};
3316 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3317 0);
David Neto85082642018-03-24 06:55:20 -07003318
3319 // Find Insert Point for OpDecorate.
3320 auto DecoInsertPoint =
3321 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3322 [](SPIRVInstruction *Inst) -> bool {
3323 return Inst->getOpcode() != spv::OpDecorate &&
3324 Inst->getOpcode() != spv::OpMemberDecorate &&
3325 Inst->getOpcode() != spv::OpExtInstImport;
3326 });
3327
David Neto257c3892018-04-11 13:19:45 -04003328 // OpDecorate %var Binding <binding>
David Neto85082642018-03-24 06:55:20 -07003329 SPIRVOperandList DOps;
David Neto257c3892018-04-11 13:19:45 -04003330 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
3331 DecoInsertPoint = SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04003332 DecoInsertPoint, new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto85082642018-03-24 06:55:20 -07003333
3334 // OpDecorate %var DescriptorSet <descriptor_set>
3335 DOps.clear();
David Neto257c3892018-04-11 13:19:45 -04003336 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3337 << MkNum(descriptor_set);
David Netoc6f3ab22018-04-06 18:02:31 -04003338 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04003339 new SPIRVInstruction(spv::OpDecorate, DOps));
David Neto22f144c2017-06-12 14:26:21 -04003340 }
3341}
3342
David Netoc6f3ab22018-04-06 18:02:31 -04003343void SPIRVProducerPass::GenerateWorkgroupVars() {
3344 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
Alan Baker202c8c72018-08-13 13:47:44 -04003345 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
3346 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003347 LocalArgInfo &info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04003348
3349 // Generate OpVariable.
3350 //
3351 // GIDOps[0] : Result Type ID
3352 // GIDOps[1] : Storage Class
3353 SPIRVOperandList Ops;
3354 Ops << MkId(info.ptr_array_type_id) << MkNum(spv::StorageClassWorkgroup);
3355
3356 SPIRVInstList.push_back(
David Neto87846742018-04-11 17:36:22 -04003357 new SPIRVInstruction(spv::OpVariable, info.variable_id, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04003358 }
3359}
3360
David Neto862b7d82018-06-14 18:48:37 -04003361void SPIRVProducerPass::GenerateDescriptorMapInfo(const DataLayout &DL,
3362 Function &F) {
David Netoc5fb5242018-07-30 13:28:31 -04003363 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3364 return;
3365 }
David Neto862b7d82018-06-14 18:48:37 -04003366 // Gather the list of resources that are used by this function's arguments.
3367 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3368
alan-bakerf5e5f692018-11-27 08:33:24 -05003369 // TODO(alan-baker): This should become unnecessary by fixing the rest of the
3370 // flow to generate pod_ubo arguments earlier.
David Neto862b7d82018-06-14 18:48:37 -04003371 auto remap_arg_kind = [](StringRef argKind) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003372 std::string kind =
3373 clspv::Option::PodArgsInUniformBuffer() && argKind.equals("pod")
3374 ? "pod_ubo"
alan-baker21574d32020-01-29 16:00:31 -05003375 : argKind.str();
alan-bakerf5e5f692018-11-27 08:33:24 -05003376 return GetArgKindFromName(kind);
David Neto862b7d82018-06-14 18:48:37 -04003377 };
3378
3379 auto *fty = F.getType()->getPointerElementType();
3380 auto *func_ty = dyn_cast<FunctionType>(fty);
3381
alan-baker038e9242019-04-19 22:14:41 -04003382 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003383 // If an argument maps to a resource variable, then get descriptor set and
3384 // binding from the resoure variable. Other info comes from the metadata.
3385 const auto *arg_map = F.getMetadata("kernel_arg_map");
3386 if (arg_map) {
3387 for (const auto &arg : arg_map->operands()) {
3388 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
Kévin PETITa353c832018-03-20 23:21:21 +00003389 assert(arg_node->getNumOperands() == 7);
David Neto862b7d82018-06-14 18:48:37 -04003390 const auto name =
3391 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3392 const auto old_index =
3393 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3394 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003395 const size_t new_index = static_cast<size_t>(
3396 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003397 const auto offset =
3398 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003399 const auto arg_size =
3400 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
David Neto862b7d82018-06-14 18:48:37 -04003401 const auto argKind = remap_arg_kind(
Kévin PETITa353c832018-03-20 23:21:21 +00003402 dyn_cast<MDString>(arg_node->getOperand(5))->getString());
David Neto862b7d82018-06-14 18:48:37 -04003403 const auto spec_id =
Kévin PETITa353c832018-03-20 23:21:21 +00003404 dyn_extract<ConstantInt>(arg_node->getOperand(6))->getSExtValue();
alan-bakerf5e5f692018-11-27 08:33:24 -05003405
3406 uint32_t descriptor_set = 0;
3407 uint32_t binding = 0;
3408 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003409 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3410 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003411 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003412 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003413 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003414 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3415 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3416 DL));
David Neto862b7d82018-06-14 18:48:37 -04003417 } else {
3418 auto *info = resource_var_at_index[new_index];
3419 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003420 descriptor_set = info->descriptor_set;
3421 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003422 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003423 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3424 binding);
David Neto862b7d82018-06-14 18:48:37 -04003425 }
3426 } else {
3427 // There is no argument map.
3428 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003429 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003430
3431 SmallVector<Argument *, 4> arguments;
3432 for (auto &arg : F.args()) {
3433 arguments.push_back(&arg);
3434 }
3435
3436 unsigned arg_index = 0;
3437 for (auto *info : resource_var_at_index) {
3438 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003439 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003440 unsigned arg_size = 0;
Kévin PETITa353c832018-03-20 23:21:21 +00003441 if (info->arg_kind == clspv::ArgKind::Pod) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003442 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003443 }
3444
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003445 // Local pointer arguments are unused in this case. Offset is always
3446 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003447 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003448 F.getName().str(),
3449 arg->getName().str(),
3450 arg_index,
3451 remap_arg_kind(clspv::GetArgKindName(info->arg_kind)),
3452 0,
3453 0,
3454 0,
3455 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003456 descriptorMapEntries->emplace_back(std::move(kernel_data),
3457 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003458 }
3459 arg_index++;
3460 }
3461 // Generate mappings for pointer-to-local arguments.
3462 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3463 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003464 auto where = LocalArgSpecIds.find(arg);
3465 if (where != LocalArgSpecIds.end()) {
3466 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003467 // Pod arguments members are unused in this case.
3468 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003469 F.getName().str(),
3470 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003471 arg_index,
3472 ArgKind::Local,
3473 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003474 static_cast<uint32_t>(
3475 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003476 0,
3477 0};
3478 // Pointer-to-local arguments do not utilize descriptor set and binding.
3479 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003480 }
3481 }
3482 }
3483}
3484
David Neto22f144c2017-06-12 14:26:21 -04003485void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
3486 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3487 ValueMapType &VMap = getValueMap();
3488 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003489 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3490 auto &GlobalConstArgSet = getGlobalConstArgSet();
3491
3492 FunctionType *FTy = F.getFunctionType();
3493
3494 //
David Neto22f144c2017-06-12 14:26:21 -04003495 // Generate OPFunction.
3496 //
3497
3498 // FOps[0] : Result Type ID
3499 // FOps[1] : Function Control
3500 // FOps[2] : Function Type ID
3501 SPIRVOperandList FOps;
3502
3503 // Find SPIRV instruction for return type.
David Neto257c3892018-04-11 13:19:45 -04003504 FOps << MkId(lookupType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003505
3506 // Check function attributes for SPIRV Function Control.
3507 uint32_t FuncControl = spv::FunctionControlMaskNone;
3508 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3509 FuncControl |= spv::FunctionControlInlineMask;
3510 }
3511 if (F.hasFnAttribute(Attribute::NoInline)) {
3512 FuncControl |= spv::FunctionControlDontInlineMask;
3513 }
3514 // TODO: Check llvm attribute for Function Control Pure.
3515 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3516 FuncControl |= spv::FunctionControlPureMask;
3517 }
3518 // TODO: Check llvm attribute for Function Control Const.
3519 if (F.hasFnAttribute(Attribute::ReadNone)) {
3520 FuncControl |= spv::FunctionControlConstMask;
3521 }
3522
David Neto257c3892018-04-11 13:19:45 -04003523 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003524
3525 uint32_t FTyID;
3526 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3527 SmallVector<Type *, 4> NewFuncParamTys;
3528 FunctionType *NewFTy =
3529 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
3530 FTyID = lookupType(NewFTy);
3531 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003532 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003533 if (GlobalConstFuncTyMap.count(FTy)) {
3534 FTyID = lookupType(GlobalConstFuncTyMap[FTy].first);
3535 } else {
3536 FTyID = lookupType(FTy);
3537 }
3538 }
3539
David Neto257c3892018-04-11 13:19:45 -04003540 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003541
3542 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3543 EntryPoints.push_back(std::make_pair(&F, nextID));
3544 }
3545
3546 VMap[&F] = nextID;
3547
David Neto482550a2018-03-24 05:21:07 -07003548 if (clspv::Option::ShowIDs()) {
David Netob05675d2018-02-16 12:37:49 -05003549 errs() << "Function " << F.getName() << " is " << nextID << "\n";
3550 }
David Neto22f144c2017-06-12 14:26:21 -04003551 // Generate SPIRV instruction for function.
David Neto87846742018-04-11 17:36:22 -04003552 auto *FuncInst = new SPIRVInstruction(spv::OpFunction, nextID++, FOps);
David Neto22f144c2017-06-12 14:26:21 -04003553 SPIRVInstList.push_back(FuncInst);
3554
3555 //
3556 // Generate OpFunctionParameter for Normal function.
3557 //
3558
3559 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003560
3561 // Find Insert Point for OpDecorate.
3562 auto DecoInsertPoint =
3563 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
3564 [](SPIRVInstruction *Inst) -> bool {
3565 return Inst->getOpcode() != spv::OpDecorate &&
3566 Inst->getOpcode() != spv::OpMemberDecorate &&
3567 Inst->getOpcode() != spv::OpExtInstImport;
3568 });
3569
David Neto22f144c2017-06-12 14:26:21 -04003570 // Iterate Argument for name instead of param type from function type.
3571 unsigned ArgIdx = 0;
3572 for (Argument &Arg : F.args()) {
alan-bakere9308012019-03-15 10:25:13 -04003573 uint32_t param_id = nextID++;
3574 VMap[&Arg] = param_id;
3575
3576 if (CalledWithCoherentResource(Arg)) {
3577 // If the arg is passed a coherent resource ever, then decorate this
3578 // parameter with Coherent too.
3579 SPIRVOperandList decoration_ops;
3580 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003581 SPIRVInstList.insert(
3582 DecoInsertPoint,
3583 new SPIRVInstruction(spv::OpDecorate, decoration_ops));
alan-bakere9308012019-03-15 10:25:13 -04003584 }
David Neto22f144c2017-06-12 14:26:21 -04003585
3586 // ParamOps[0] : Result Type ID
3587 SPIRVOperandList ParamOps;
3588
3589 // Find SPIRV instruction for parameter type.
3590 uint32_t ParamTyID = lookupType(Arg.getType());
3591 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3592 if (GlobalConstFuncTyMap.count(FTy)) {
3593 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3594 Type *EleTy = PTy->getPointerElementType();
3595 Type *ArgTy =
3596 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
3597 ParamTyID = lookupType(ArgTy);
3598 GlobalConstArgSet.insert(&Arg);
3599 }
3600 }
3601 }
David Neto257c3892018-04-11 13:19:45 -04003602 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003603
3604 // Generate SPIRV instruction for parameter.
David Neto87846742018-04-11 17:36:22 -04003605 auto *ParamInst =
alan-bakere9308012019-03-15 10:25:13 -04003606 new SPIRVInstruction(spv::OpFunctionParameter, param_id, ParamOps);
David Neto22f144c2017-06-12 14:26:21 -04003607 SPIRVInstList.push_back(ParamInst);
3608
3609 ArgIdx++;
3610 }
3611 }
3612}
3613
alan-bakerb6b09dc2018-11-08 16:59:28 -05003614void SPIRVProducerPass::GenerateModuleInfo(Module &module) {
David Neto22f144c2017-06-12 14:26:21 -04003615 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3616 EntryPointVecType &EntryPoints = getEntryPointVec();
3617 ValueMapType &VMap = getValueMap();
3618 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
3619 uint32_t &ExtInstImportID = getOpExtInstImportID();
3620 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3621
3622 // Set up insert point.
3623 auto InsertPoint = SPIRVInstList.begin();
3624
3625 //
3626 // Generate OpCapability
3627 //
3628 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3629
3630 // Ops[0] = Capability
3631 SPIRVOperandList Ops;
3632
David Neto87846742018-04-11 17:36:22 -04003633 auto *CapInst =
David Netoef5ba2b2019-12-20 08:35:54 -05003634 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityShader));
David Neto22f144c2017-06-12 14:26:21 -04003635 SPIRVInstList.insert(InsertPoint, CapInst);
3636
alan-bakerf906d2b2019-12-10 11:26:23 -05003637 bool write_without_format = false;
3638 bool sampled_1d = false;
3639 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003640 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003641 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3642 // Generate OpCapability for i8 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003643 SPIRVInstList.insert(
3644 InsertPoint,
3645 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt8)));
alan-bakerb39c8262019-03-08 14:03:37 -05003646 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003647 // Generate OpCapability for i16 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003648 SPIRVInstList.insert(
3649 InsertPoint,
3650 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt16)));
David Neto22f144c2017-06-12 14:26:21 -04003651 } else if (Ty->isIntegerTy(64)) {
3652 // Generate OpCapability for i64 type.
David Netoef5ba2b2019-12-20 08:35:54 -05003653 SPIRVInstList.insert(
3654 InsertPoint,
3655 new SPIRVInstruction(spv::OpCapability, MkNum(spv::CapabilityInt64)));
David Neto22f144c2017-06-12 14:26:21 -04003656 } else if (Ty->isHalfTy()) {
3657 // Generate OpCapability for half type.
David Netoef5ba2b2019-12-20 08:35:54 -05003658 SPIRVInstList.insert(InsertPoint,
3659 new SPIRVInstruction(spv::OpCapability,
3660 MkNum(spv::CapabilityFloat16)));
David Neto22f144c2017-06-12 14:26:21 -04003661 } else if (Ty->isDoubleTy()) {
3662 // Generate OpCapability for double type.
David Netoef5ba2b2019-12-20 08:35:54 -05003663 SPIRVInstList.insert(InsertPoint,
3664 new SPIRVInstruction(spv::OpCapability,
3665 MkNum(spv::CapabilityFloat64)));
David Neto22f144c2017-06-12 14:26:21 -04003666 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3667 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003668 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003669 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003670 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003671 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003672 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003673 write_without_format = true;
3674 }
3675 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003676 STy->getName().startswith("opencl.image1d_wo_t") ||
3677 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3678 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003679 if (STy->getName().contains(".sampled"))
3680 sampled_1d = true;
3681 else
3682 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003683 }
3684 }
3685 }
3686 }
3687
alan-bakerf906d2b2019-12-10 11:26:23 -05003688 if (write_without_format) {
3689 // Generate OpCapability for write only image type.
3690 SPIRVInstList.insert(
3691 InsertPoint,
3692 new SPIRVInstruction(
3693 spv::OpCapability,
3694 {MkNum(spv::CapabilityStorageImageWriteWithoutFormat)}));
3695 }
3696 if (image_1d) {
3697 // Generate OpCapability for unsampled 1D image type.
3698 SPIRVInstList.insert(InsertPoint,
3699 new SPIRVInstruction(spv::OpCapability,
3700 {MkNum(spv::CapabilityImage1D)}));
3701 } else if (sampled_1d) {
3702 // Generate OpCapability for sampled 1D image type.
3703 SPIRVInstList.insert(
3704 InsertPoint, new SPIRVInstruction(spv::OpCapability,
3705 {MkNum(spv::CapabilitySampled1D)}));
3706 }
3707
David Neto5c22a252018-03-15 16:07:41 -04003708 { // OpCapability ImageQuery
3709 bool hasImageQuery = false;
alan-bakerf67468c2019-11-25 15:51:49 -05003710 for (const auto &SymVal : module.getValueSymbolTable()) {
3711 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
alan-bakerce179f12019-12-06 19:02:22 -05003712 if (clspv::IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003713 hasImageQuery = true;
3714 break;
3715 }
David Neto5c22a252018-03-15 16:07:41 -04003716 }
3717 }
alan-bakerf67468c2019-11-25 15:51:49 -05003718
David Neto5c22a252018-03-15 16:07:41 -04003719 if (hasImageQuery) {
David Neto87846742018-04-11 17:36:22 -04003720 auto *ImageQueryCapInst = new SPIRVInstruction(
3721 spv::OpCapability, {MkNum(spv::CapabilityImageQuery)});
David Neto5c22a252018-03-15 16:07:41 -04003722 SPIRVInstList.insert(InsertPoint, ImageQueryCapInst);
3723 }
3724 }
3725
David Neto22f144c2017-06-12 14:26:21 -04003726 if (hasVariablePointers()) {
3727 //
David Neto22f144c2017-06-12 14:26:21 -04003728 // Generate OpCapability.
3729 //
3730 // Ops[0] = Capability
3731 //
3732 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003733 Ops << MkNum(spv::CapabilityVariablePointers);
David Neto22f144c2017-06-12 14:26:21 -04003734
David Neto87846742018-04-11 17:36:22 -04003735 SPIRVInstList.insert(InsertPoint,
3736 new SPIRVInstruction(spv::OpCapability, Ops));
alan-baker5b86ed72019-02-15 08:26:50 -05003737 } else if (hasVariablePointersStorageBuffer()) {
3738 //
3739 // Generate OpCapability.
3740 //
3741 // Ops[0] = Capability
3742 //
3743 Ops.clear();
3744 Ops << MkNum(spv::CapabilityVariablePointersStorageBuffer);
David Neto22f144c2017-06-12 14:26:21 -04003745
alan-baker5b86ed72019-02-15 08:26:50 -05003746 SPIRVInstList.insert(InsertPoint,
3747 new SPIRVInstruction(spv::OpCapability, Ops));
3748 }
3749
3750 // Always add the storage buffer extension
3751 {
David Neto22f144c2017-06-12 14:26:21 -04003752 //
3753 // Generate OpExtension.
3754 //
3755 // Ops[0] = Name (Literal String)
3756 //
alan-baker5b86ed72019-02-15 08:26:50 -05003757 auto *ExtensionInst = new SPIRVInstruction(
3758 spv::OpExtension, {MkString("SPV_KHR_storage_buffer_storage_class")});
3759 SPIRVInstList.insert(InsertPoint, ExtensionInst);
3760 }
David Neto22f144c2017-06-12 14:26:21 -04003761
alan-baker5b86ed72019-02-15 08:26:50 -05003762 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3763 //
3764 // Generate OpExtension.
3765 //
3766 // Ops[0] = Name (Literal String)
3767 //
3768 auto *ExtensionInst = new SPIRVInstruction(
3769 spv::OpExtension, {MkString("SPV_KHR_variable_pointers")});
3770 SPIRVInstList.insert(InsertPoint, ExtensionInst);
David Neto22f144c2017-06-12 14:26:21 -04003771 }
3772
3773 if (ExtInstImportID) {
3774 ++InsertPoint;
3775 }
3776
3777 //
3778 // Generate OpMemoryModel
3779 //
3780 // Memory model for Vulkan will always be GLSL450.
3781
3782 // Ops[0] = Addressing Model
3783 // Ops[1] = Memory Model
3784 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003785 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003786
David Neto87846742018-04-11 17:36:22 -04003787 auto *MemModelInst = new SPIRVInstruction(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003788 SPIRVInstList.insert(InsertPoint, MemModelInst);
3789
3790 //
3791 // Generate OpEntryPoint
3792 //
3793 for (auto EntryPoint : EntryPoints) {
3794 // Ops[0] = Execution Model
3795 // Ops[1] = EntryPoint ID
3796 // Ops[2] = Name (Literal String)
3797 // ...
3798 //
3799 // TODO: Do we need to consider Interface ID for forward references???
3800 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003801 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003802 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3803 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003804
David Neto22f144c2017-06-12 14:26:21 -04003805 for (Value *Interface : EntryPointInterfaces) {
David Neto257c3892018-04-11 13:19:45 -04003806 Ops << MkId(VMap[Interface]);
David Neto22f144c2017-06-12 14:26:21 -04003807 }
3808
David Neto87846742018-04-11 17:36:22 -04003809 auto *EntryPointInst = new SPIRVInstruction(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003810 SPIRVInstList.insert(InsertPoint, EntryPointInst);
3811 }
3812
3813 for (auto EntryPoint : EntryPoints) {
3814 if (const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3815 ->getMetadata("reqd_work_group_size")) {
3816
3817 if (!BuiltinDimVec.empty()) {
3818 llvm_unreachable(
3819 "Kernels should have consistent work group size definition");
3820 }
3821
3822 //
3823 // Generate OpExecutionMode
3824 //
3825
3826 // Ops[0] = Entry Point ID
3827 // Ops[1] = Execution Mode
3828 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3829 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003830 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003831
3832 uint32_t XDim = static_cast<uint32_t>(
3833 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3834 uint32_t YDim = static_cast<uint32_t>(
3835 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3836 uint32_t ZDim = static_cast<uint32_t>(
3837 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3838
David Neto257c3892018-04-11 13:19:45 -04003839 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003840
David Neto87846742018-04-11 17:36:22 -04003841 auto *ExecModeInst = new SPIRVInstruction(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003842 SPIRVInstList.insert(InsertPoint, ExecModeInst);
3843 }
3844 }
3845
3846 //
3847 // Generate OpSource.
3848 //
3849 // Ops[0] = SourceLanguage ID
3850 // Ops[1] = Version (LiteralNum)
3851 //
3852 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003853 switch (clspv::Option::Language()) {
3854 case clspv::Option::SourceLanguage::OpenCL_C_10:
3855 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3856 break;
3857 case clspv::Option::SourceLanguage::OpenCL_C_11:
3858 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3859 break;
3860 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003861 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003862 break;
3863 case clspv::Option::SourceLanguage::OpenCL_C_20:
3864 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3865 break;
3866 case clspv::Option::SourceLanguage::OpenCL_CPP:
3867 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3868 break;
3869 default:
3870 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3871 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003872 }
David Neto22f144c2017-06-12 14:26:21 -04003873
David Neto87846742018-04-11 17:36:22 -04003874 auto *OpenSourceInst = new SPIRVInstruction(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003875 SPIRVInstList.insert(InsertPoint, OpenSourceInst);
3876
3877 if (!BuiltinDimVec.empty()) {
3878 //
3879 // Generate OpDecorates for x/y/z dimension.
3880 //
3881 // Ops[0] = Target ID
3882 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003883 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003884
3885 // X Dimension
3886 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003887 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
David Neto87846742018-04-11 17:36:22 -04003888 SPIRVInstList.insert(InsertPoint,
3889 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003890
3891 // Y Dimension
3892 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003893 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
David Neto87846742018-04-11 17:36:22 -04003894 SPIRVInstList.insert(InsertPoint,
3895 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003896
3897 // Z Dimension
3898 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003899 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
David Neto87846742018-04-11 17:36:22 -04003900 SPIRVInstList.insert(InsertPoint,
3901 new SPIRVInstruction(spv::OpDecorate, Ops));
David Neto22f144c2017-06-12 14:26:21 -04003902 }
3903}
3904
David Netob6e2e062018-04-25 10:32:06 -04003905void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3906 // Work around a driver bug. Initializers on Private variables might not
3907 // work. So the start of the kernel should store the initializer value to the
3908 // variables. Yes, *every* entry point pays this cost if *any* entry point
3909 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3910 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003911 // TODO(dneto): Remove this at some point once fixed drivers are widely
3912 // available.
David Netob6e2e062018-04-25 10:32:06 -04003913 if (WorkgroupSizeVarID) {
3914 assert(WorkgroupSizeValueID);
3915
3916 SPIRVOperandList Ops;
3917 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3918
3919 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
3920 getSPIRVInstList().push_back(Inst);
3921 }
3922}
3923
David Neto22f144c2017-06-12 14:26:21 -04003924void SPIRVProducerPass::GenerateFuncBody(Function &F) {
3925 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
3926 ValueMapType &VMap = getValueMap();
3927
David Netob6e2e062018-04-25 10:32:06 -04003928 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003929
3930 for (BasicBlock &BB : F) {
3931 // Register BasicBlock to ValueMap.
3932 VMap[&BB] = nextID;
3933
3934 //
3935 // Generate OpLabel for Basic Block.
3936 //
3937 SPIRVOperandList Ops;
David Neto87846742018-04-11 17:36:22 -04003938 auto *Inst = new SPIRVInstruction(spv::OpLabel, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003939 SPIRVInstList.push_back(Inst);
3940
David Neto6dcd4712017-06-23 11:06:47 -04003941 // OpVariable instructions must come first.
3942 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003943 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3944 // Allocating a pointer requires variable pointers.
3945 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003946 setVariablePointersCapabilities(
3947 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003948 }
David Neto6dcd4712017-06-23 11:06:47 -04003949 GenerateInstruction(I);
3950 }
3951 }
3952
David Neto22f144c2017-06-12 14:26:21 -04003953 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003954 if (clspv::Option::HackInitializers()) {
3955 GenerateEntryPointInitialStores();
3956 }
David Neto22f144c2017-06-12 14:26:21 -04003957 }
3958
3959 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003960 if (!isa<AllocaInst>(I)) {
3961 GenerateInstruction(I);
3962 }
David Neto22f144c2017-06-12 14:26:21 -04003963 }
3964 }
3965}
3966
3967spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3968 const std::map<CmpInst::Predicate, spv::Op> Map = {
3969 {CmpInst::ICMP_EQ, spv::OpIEqual},
3970 {CmpInst::ICMP_NE, spv::OpINotEqual},
3971 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3972 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3973 {CmpInst::ICMP_ULT, spv::OpULessThan},
3974 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3975 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3976 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3977 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3978 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3979 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3980 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3981 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3982 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3983 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3984 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3985 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3986 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3987 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3988 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3989 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3990 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3991
3992 assert(0 != Map.count(I->getPredicate()));
3993
3994 return Map.at(I->getPredicate());
3995}
3996
3997spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3998 const std::map<unsigned, spv::Op> Map{
3999 {Instruction::Trunc, spv::OpUConvert},
4000 {Instruction::ZExt, spv::OpUConvert},
4001 {Instruction::SExt, spv::OpSConvert},
4002 {Instruction::FPToUI, spv::OpConvertFToU},
4003 {Instruction::FPToSI, spv::OpConvertFToS},
4004 {Instruction::UIToFP, spv::OpConvertUToF},
4005 {Instruction::SIToFP, spv::OpConvertSToF},
4006 {Instruction::FPTrunc, spv::OpFConvert},
4007 {Instruction::FPExt, spv::OpFConvert},
4008 {Instruction::BitCast, spv::OpBitcast}};
4009
4010 assert(0 != Map.count(I.getOpcode()));
4011
4012 return Map.at(I.getOpcode());
4013}
4014
4015spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00004016 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004017 switch (I.getOpcode()) {
4018 default:
4019 break;
4020 case Instruction::Or:
4021 return spv::OpLogicalOr;
4022 case Instruction::And:
4023 return spv::OpLogicalAnd;
4024 case Instruction::Xor:
4025 return spv::OpLogicalNotEqual;
4026 }
4027 }
4028
alan-bakerb6b09dc2018-11-08 16:59:28 -05004029 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04004030 {Instruction::Add, spv::OpIAdd},
4031 {Instruction::FAdd, spv::OpFAdd},
4032 {Instruction::Sub, spv::OpISub},
4033 {Instruction::FSub, spv::OpFSub},
4034 {Instruction::Mul, spv::OpIMul},
4035 {Instruction::FMul, spv::OpFMul},
4036 {Instruction::UDiv, spv::OpUDiv},
4037 {Instruction::SDiv, spv::OpSDiv},
4038 {Instruction::FDiv, spv::OpFDiv},
4039 {Instruction::URem, spv::OpUMod},
4040 {Instruction::SRem, spv::OpSRem},
4041 {Instruction::FRem, spv::OpFRem},
4042 {Instruction::Or, spv::OpBitwiseOr},
4043 {Instruction::Xor, spv::OpBitwiseXor},
4044 {Instruction::And, spv::OpBitwiseAnd},
4045 {Instruction::Shl, spv::OpShiftLeftLogical},
4046 {Instruction::LShr, spv::OpShiftRightLogical},
4047 {Instruction::AShr, spv::OpShiftRightArithmetic}};
4048
4049 assert(0 != Map.count(I.getOpcode()));
4050
4051 return Map.at(I.getOpcode());
4052}
4053
4054void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
4055 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
4056 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04004057 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4058 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
4059
4060 // Register Instruction to ValueMap.
4061 if (0 == VMap[&I]) {
4062 VMap[&I] = nextID;
4063 }
4064
4065 switch (I.getOpcode()) {
4066 default: {
4067 if (Instruction::isCast(I.getOpcode())) {
4068 //
4069 // Generate SPIRV instructions for cast operators.
4070 //
4071
David Netod2de94a2017-08-28 17:27:47 -04004072 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004073 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04004074 auto toI8 = Ty == Type::getInt8Ty(Context);
4075 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04004076 // Handle zext, sext and uitofp with i1 type specially.
4077 if ((I.getOpcode() == Instruction::ZExt ||
4078 I.getOpcode() == Instruction::SExt ||
4079 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05004080 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04004081 //
4082 // Generate OpSelect.
4083 //
4084
4085 // Ops[0] = Result Type ID
4086 // Ops[1] = Condition ID
4087 // Ops[2] = True Constant ID
4088 // Ops[3] = False Constant ID
4089 SPIRVOperandList Ops;
4090
David Neto257c3892018-04-11 13:19:45 -04004091 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004092
David Neto22f144c2017-06-12 14:26:21 -04004093 uint32_t CondID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004094 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04004095
4096 uint32_t TrueID = 0;
4097 if (I.getOpcode() == Instruction::ZExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004098 TrueID = VMap[ConstantInt::get(I.getType(), 1)];
David Neto22f144c2017-06-12 14:26:21 -04004099 } else if (I.getOpcode() == Instruction::SExt) {
Kévin Petit7bfb8992019-02-26 13:45:08 +00004100 TrueID = VMap[ConstantInt::getSigned(I.getType(), -1)];
David Neto22f144c2017-06-12 14:26:21 -04004101 } else {
4102 TrueID = VMap[ConstantFP::get(Context, APFloat(1.0f))];
4103 }
David Neto257c3892018-04-11 13:19:45 -04004104 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04004105
4106 uint32_t FalseID = 0;
4107 if (I.getOpcode() == Instruction::ZExt) {
4108 FalseID = VMap[Constant::getNullValue(I.getType())];
4109 } else if (I.getOpcode() == Instruction::SExt) {
4110 FalseID = VMap[Constant::getNullValue(I.getType())];
4111 } else {
4112 FalseID = VMap[ConstantFP::get(Context, APFloat(0.0f))];
4113 }
David Neto257c3892018-04-11 13:19:45 -04004114 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04004115
David Neto87846742018-04-11 17:36:22 -04004116 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004117 SPIRVInstList.push_back(Inst);
alan-bakerb39c8262019-03-08 14:03:37 -05004118 } else if (!clspv::Option::Int8Support() &&
4119 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004120 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4121 // 8 bits.
4122 // Before:
4123 // %result = trunc i32 %a to i8
4124 // After
4125 // %result = OpBitwiseAnd %uint %a %uint_255
4126
4127 SPIRVOperandList Ops;
4128
David Neto257c3892018-04-11 13:19:45 -04004129 Ops << MkId(lookupType(OpTy)) << MkId(VMap[I.getOperand(0)]);
David Netod2de94a2017-08-28 17:27:47 -04004130
4131 Type *UintTy = Type::getInt32Ty(Context);
4132 uint32_t MaskID = VMap[ConstantInt::get(UintTy, 255)];
David Neto257c3892018-04-11 13:19:45 -04004133 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04004134
David Neto87846742018-04-11 17:36:22 -04004135 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Netod2de94a2017-08-28 17:27:47 -04004136 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004137 } else {
4138 // Ops[0] = Result Type ID
4139 // Ops[1] = Source Value ID
4140 SPIRVOperandList Ops;
4141
David Neto257c3892018-04-11 13:19:45 -04004142 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004143
David Neto87846742018-04-11 17:36:22 -04004144 auto *Inst = new SPIRVInstruction(GetSPIRVCastOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004145 SPIRVInstList.push_back(Inst);
4146 }
4147 } else if (isa<BinaryOperator>(I)) {
4148 //
4149 // Generate SPIRV instructions for binary operators.
4150 //
4151
4152 // Handle xor with i1 type specially.
4153 if (I.getOpcode() == Instruction::Xor &&
4154 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004155 ((isa<ConstantInt>(I.getOperand(0)) &&
4156 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4157 (isa<ConstantInt>(I.getOperand(1)) &&
4158 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004159 //
4160 // Generate OpLogicalNot.
4161 //
4162 // Ops[0] = Result Type ID
4163 // Ops[1] = Operand
4164 SPIRVOperandList Ops;
4165
David Neto257c3892018-04-11 13:19:45 -04004166 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004167
4168 Value *CondV = I.getOperand(0);
4169 if (isa<Constant>(I.getOperand(0))) {
4170 CondV = I.getOperand(1);
4171 }
David Neto257c3892018-04-11 13:19:45 -04004172 Ops << MkId(VMap[CondV]);
David Neto22f144c2017-06-12 14:26:21 -04004173
David Neto87846742018-04-11 17:36:22 -04004174 auto *Inst = new SPIRVInstruction(spv::OpLogicalNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004175 SPIRVInstList.push_back(Inst);
4176 } else {
4177 // Ops[0] = Result Type ID
4178 // Ops[1] = Operand 0
4179 // Ops[2] = Operand 1
4180 SPIRVOperandList Ops;
4181
David Neto257c3892018-04-11 13:19:45 -04004182 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4183 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004184
David Neto87846742018-04-11 17:36:22 -04004185 auto *Inst =
4186 new SPIRVInstruction(GetSPIRVBinaryOpcode(I), nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004187 SPIRVInstList.push_back(Inst);
4188 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004189 } else if (I.getOpcode() == Instruction::FNeg) {
4190 // The only unary operator.
4191 //
4192 // Ops[0] = Result Type ID
4193 // Ops[1] = Operand 0
4194 SPIRVOperandList ops;
4195
4196 ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
4197 auto *Inst = new SPIRVInstruction(spv::OpFNegate, nextID++, ops);
4198 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004199 } else {
4200 I.print(errs());
4201 llvm_unreachable("Unsupported instruction???");
4202 }
4203 break;
4204 }
4205 case Instruction::GetElementPtr: {
4206 auto &GlobalConstArgSet = getGlobalConstArgSet();
4207
4208 //
4209 // Generate OpAccessChain.
4210 //
4211 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4212
4213 //
4214 // Generate OpAccessChain.
4215 //
4216
4217 // Ops[0] = Result Type ID
4218 // Ops[1] = Base ID
4219 // Ops[2] ... Ops[n] = Indexes ID
4220 SPIRVOperandList Ops;
4221
alan-bakerb6b09dc2018-11-08 16:59:28 -05004222 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004223 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4224 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4225 // Use pointer type with private address space for global constant.
4226 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004227 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004228 }
David Neto257c3892018-04-11 13:19:45 -04004229
4230 Ops << MkId(lookupType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04004231
David Neto862b7d82018-06-14 18:48:37 -04004232 // Generate the base pointer.
4233 Ops << MkId(VMap[GEP->getPointerOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004234
David Neto862b7d82018-06-14 18:48:37 -04004235 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004236
4237 //
4238 // Follows below rules for gep.
4239 //
David Neto862b7d82018-06-14 18:48:37 -04004240 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4241 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004242 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4243 // first index.
4244 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4245 // use gep's first index.
4246 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4247 // gep's first index.
4248 //
4249 spv::Op Opcode = spv::OpAccessChain;
4250 unsigned offset = 0;
4251 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004252 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004253 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004254 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004255 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004256 }
David Neto862b7d82018-06-14 18:48:37 -04004257 } else {
David Neto22f144c2017-06-12 14:26:21 -04004258 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004259 }
4260
4261 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04004262 // Do we need to generate ArrayStride? Check against the GEP result type
4263 // rather than the pointer type of the base because when indexing into
4264 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
4265 // for something else in the SPIR-V.
4266 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05004267 auto address_space = ResultType->getAddressSpace();
4268 setVariablePointersCapabilities(address_space);
4269 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004270 case spv::StorageClassStorageBuffer:
4271 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04004272 // Save the need to generate an ArrayStride decoration. But defer
4273 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07004274 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04004275 break;
4276 default:
4277 break;
David Neto1a1a0582017-07-07 12:01:44 -04004278 }
David Neto22f144c2017-06-12 14:26:21 -04004279 }
4280
4281 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
David Neto257c3892018-04-11 13:19:45 -04004282 Ops << MkId(VMap[*II]);
David Neto22f144c2017-06-12 14:26:21 -04004283 }
4284
David Neto87846742018-04-11 17:36:22 -04004285 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004286 SPIRVInstList.push_back(Inst);
4287 break;
4288 }
4289 case Instruction::ExtractValue: {
4290 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4291 // Ops[0] = Result Type ID
4292 // Ops[1] = Composite ID
4293 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4294 SPIRVOperandList Ops;
4295
David Neto257c3892018-04-11 13:19:45 -04004296 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004297
4298 uint32_t CompositeID = VMap[EVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004299 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004300
4301 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004302 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004303 }
4304
David Neto87846742018-04-11 17:36:22 -04004305 auto *Inst = new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004306 SPIRVInstList.push_back(Inst);
4307 break;
4308 }
4309 case Instruction::InsertValue: {
4310 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4311 // Ops[0] = Result Type ID
4312 // Ops[1] = Object ID
4313 // Ops[2] = Composite ID
4314 // Ops[3] ... Ops[n] = Indexes (Literal Number)
4315 SPIRVOperandList Ops;
4316
4317 uint32_t ResTyID = lookupType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04004318 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04004319
4320 uint32_t ObjectID = VMap[IVI->getInsertedValueOperand()];
David Neto257c3892018-04-11 13:19:45 -04004321 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04004322
4323 uint32_t CompositeID = VMap[IVI->getAggregateOperand()];
David Neto257c3892018-04-11 13:19:45 -04004324 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04004325
4326 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04004327 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04004328 }
4329
David Neto87846742018-04-11 17:36:22 -04004330 auto *Inst = new SPIRVInstruction(spv::OpCompositeInsert, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004331 SPIRVInstList.push_back(Inst);
4332 break;
4333 }
4334 case Instruction::Select: {
4335 //
4336 // Generate OpSelect.
4337 //
4338
4339 // Ops[0] = Result Type ID
4340 // Ops[1] = Condition ID
4341 // Ops[2] = True Constant ID
4342 // Ops[3] = False Constant ID
4343 SPIRVOperandList Ops;
4344
4345 // Find SPIRV instruction for parameter type.
4346 auto Ty = I.getType();
4347 if (Ty->isPointerTy()) {
4348 auto PointeeTy = Ty->getPointerElementType();
4349 if (PointeeTy->isStructTy() &&
4350 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4351 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004352 } else {
4353 // Selecting between pointers requires variable pointers.
4354 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4355 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4356 setVariablePointers(true);
4357 }
David Neto22f144c2017-06-12 14:26:21 -04004358 }
4359 }
4360
David Neto257c3892018-04-11 13:19:45 -04004361 Ops << MkId(lookupType(Ty)) << MkId(VMap[I.getOperand(0)])
4362 << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004363
David Neto87846742018-04-11 17:36:22 -04004364 auto *Inst = new SPIRVInstruction(spv::OpSelect, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004365 SPIRVInstList.push_back(Inst);
4366 break;
4367 }
4368 case Instruction::ExtractElement: {
4369 // Handle <4 x i8> type manually.
4370 Type *CompositeTy = I.getOperand(0)->getType();
4371 if (is4xi8vec(CompositeTy)) {
4372 //
4373 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4374 // <4 x i8>.
4375 //
4376
4377 //
4378 // Generate OpShiftRightLogical
4379 //
4380 // Ops[0] = Result Type ID
4381 // Ops[1] = Operand 0
4382 // Ops[2] = Operand 1
4383 //
4384 SPIRVOperandList Ops;
4385
David Neto257c3892018-04-11 13:19:45 -04004386 Ops << MkId(lookupType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004387
4388 uint32_t Op0ID = VMap[I.getOperand(0)];
David Neto257c3892018-04-11 13:19:45 -04004389 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004390
4391 uint32_t Op1ID = 0;
4392 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4393 // Handle constant index.
4394 uint64_t Idx = CI->getZExtValue();
4395 Value *ShiftAmount =
4396 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4397 Op1ID = VMap[ShiftAmount];
4398 } else {
4399 // Handle variable index.
4400 SPIRVOperandList TmpOps;
4401
David Neto257c3892018-04-11 13:19:45 -04004402 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4403 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004404
4405 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004406 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004407
4408 Op1ID = nextID;
4409
David Neto87846742018-04-11 17:36:22 -04004410 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004411 SPIRVInstList.push_back(TmpInst);
4412 }
David Neto257c3892018-04-11 13:19:45 -04004413 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004414
4415 uint32_t ShiftID = nextID;
4416
David Neto87846742018-04-11 17:36:22 -04004417 auto *Inst =
4418 new SPIRVInstruction(spv::OpShiftRightLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004419 SPIRVInstList.push_back(Inst);
4420
4421 //
4422 // Generate OpBitwiseAnd
4423 //
4424 // Ops[0] = Result Type ID
4425 // Ops[1] = Operand 0
4426 // Ops[2] = Operand 1
4427 //
4428 Ops.clear();
4429
David Neto257c3892018-04-11 13:19:45 -04004430 Ops << MkId(lookupType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004431
4432 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
David Neto257c3892018-04-11 13:19:45 -04004433 Ops << MkId(VMap[CstFF]);
David Neto22f144c2017-06-12 14:26:21 -04004434
David Neto9b2d6252017-09-06 15:47:37 -04004435 // Reset mapping for this value to the result of the bitwise and.
4436 VMap[&I] = nextID;
4437
David Neto87846742018-04-11 17:36:22 -04004438 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004439 SPIRVInstList.push_back(Inst);
4440 break;
4441 }
4442
4443 // Ops[0] = Result Type ID
4444 // Ops[1] = Composite ID
4445 // Ops[2] ... Ops[n] = Indexes (Literal Number)
4446 SPIRVOperandList Ops;
4447
David Neto257c3892018-04-11 13:19:45 -04004448 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04004449
4450 spv::Op Opcode = spv::OpCompositeExtract;
4451 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004452 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004453 } else {
David Neto257c3892018-04-11 13:19:45 -04004454 Ops << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004455 Opcode = spv::OpVectorExtractDynamic;
4456 }
4457
David Neto87846742018-04-11 17:36:22 -04004458 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004459 SPIRVInstList.push_back(Inst);
4460 break;
4461 }
4462 case Instruction::InsertElement: {
4463 // Handle <4 x i8> type manually.
4464 Type *CompositeTy = I.getOperand(0)->getType();
4465 if (is4xi8vec(CompositeTy)) {
4466 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
4467 uint32_t CstFFID = VMap[CstFF];
4468
4469 uint32_t ShiftAmountID = 0;
4470 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4471 // Handle constant index.
4472 uint64_t Idx = CI->getZExtValue();
4473 Value *ShiftAmount =
4474 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
4475 ShiftAmountID = VMap[ShiftAmount];
4476 } else {
4477 // Handle variable index.
4478 SPIRVOperandList TmpOps;
4479
David Neto257c3892018-04-11 13:19:45 -04004480 TmpOps << MkId(lookupType(Type::getInt32Ty(Context)))
4481 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004482
4483 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
David Neto257c3892018-04-11 13:19:45 -04004484 TmpOps << MkId(VMap[Cst8]);
David Neto22f144c2017-06-12 14:26:21 -04004485
4486 ShiftAmountID = nextID;
4487
David Neto87846742018-04-11 17:36:22 -04004488 auto *TmpInst = new SPIRVInstruction(spv::OpIMul, nextID++, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004489 SPIRVInstList.push_back(TmpInst);
4490 }
4491
4492 //
4493 // Generate mask operations.
4494 //
4495
4496 // ShiftLeft mask according to index of insertelement.
4497 SPIRVOperandList Ops;
4498
David Neto257c3892018-04-11 13:19:45 -04004499 const uint32_t ResTyID = lookupType(CompositeTy);
4500 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004501
4502 uint32_t MaskID = nextID;
4503
David Neto87846742018-04-11 17:36:22 -04004504 auto *Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004505 SPIRVInstList.push_back(Inst);
4506
4507 // Inverse mask.
4508 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004509 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004510
4511 uint32_t InvMaskID = nextID;
4512
David Neto87846742018-04-11 17:36:22 -04004513 Inst = new SPIRVInstruction(spv::OpNot, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004514 SPIRVInstList.push_back(Inst);
4515
4516 // Apply mask.
4517 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004518 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(0)]) << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004519
4520 uint32_t OrgValID = nextID;
4521
David Neto87846742018-04-11 17:36:22 -04004522 Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004523 SPIRVInstList.push_back(Inst);
4524
4525 // Create correct value according to index of insertelement.
4526 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004527 Ops << MkId(ResTyID) << MkId(VMap[I.getOperand(1)])
4528 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004529
4530 uint32_t InsertValID = nextID;
4531
David Neto87846742018-04-11 17:36:22 -04004532 Inst = new SPIRVInstruction(spv::OpShiftLeftLogical, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004533 SPIRVInstList.push_back(Inst);
4534
4535 // Insert value to original value.
4536 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004537 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004538
David Netoa394f392017-08-26 20:45:29 -04004539 VMap[&I] = nextID;
4540
David Neto87846742018-04-11 17:36:22 -04004541 Inst = new SPIRVInstruction(spv::OpBitwiseOr, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004542 SPIRVInstList.push_back(Inst);
4543
4544 break;
4545 }
4546
David Neto22f144c2017-06-12 14:26:21 -04004547 SPIRVOperandList Ops;
4548
James Priced26efea2018-06-09 23:28:32 +01004549 // Ops[0] = Result Type ID
4550 Ops << MkId(lookupType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004551
4552 spv::Op Opcode = spv::OpCompositeInsert;
4553 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004554 const auto value = CI->getZExtValue();
4555 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004556 // Ops[1] = Object ID
4557 // Ops[2] = Composite ID
4558 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004559 Ops << MkId(VMap[I.getOperand(1)]) << MkId(VMap[I.getOperand(0)])
James Priced26efea2018-06-09 23:28:32 +01004560 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004561 } else {
James Priced26efea2018-06-09 23:28:32 +01004562 // Ops[1] = Composite ID
4563 // Ops[2] = Object ID
4564 // Ops[3] ... Ops[n] = Indexes (Literal Number)
alan-bakerb6b09dc2018-11-08 16:59:28 -05004565 Ops << MkId(VMap[I.getOperand(0)]) << MkId(VMap[I.getOperand(1)])
James Priced26efea2018-06-09 23:28:32 +01004566 << MkId(VMap[I.getOperand(2)]);
David Neto22f144c2017-06-12 14:26:21 -04004567 Opcode = spv::OpVectorInsertDynamic;
4568 }
4569
David Neto87846742018-04-11 17:36:22 -04004570 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004571 SPIRVInstList.push_back(Inst);
4572 break;
4573 }
4574 case Instruction::ShuffleVector: {
4575 // Ops[0] = Result Type ID
4576 // Ops[1] = Vector 1 ID
4577 // Ops[2] = Vector 2 ID
4578 // Ops[3] ... Ops[n] = Components (Literal Number)
4579 SPIRVOperandList Ops;
4580
David Neto257c3892018-04-11 13:19:45 -04004581 Ops << MkId(lookupType(I.getType())) << MkId(VMap[I.getOperand(0)])
4582 << MkId(VMap[I.getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004583
4584 uint64_t NumElements = 0;
4585 if (Constant *Cst = dyn_cast<Constant>(I.getOperand(2))) {
4586 NumElements = cast<VectorType>(Cst->getType())->getNumElements();
4587
4588 if (Cst->isNullValue()) {
4589 for (unsigned i = 0; i < NumElements; i++) {
David Neto257c3892018-04-11 13:19:45 -04004590 Ops << MkNum(0);
David Neto22f144c2017-06-12 14:26:21 -04004591 }
4592 } else if (const ConstantDataSequential *CDS =
4593 dyn_cast<ConstantDataSequential>(Cst)) {
4594 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
4595 std::vector<uint32_t> LiteralNum;
David Neto257c3892018-04-11 13:19:45 -04004596 const auto value = CDS->getElementAsInteger(i);
4597 assert(value <= UINT32_MAX);
4598 Ops << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004599 }
4600 } else if (const ConstantVector *CV = dyn_cast<ConstantVector>(Cst)) {
4601 for (unsigned i = 0; i < CV->getNumOperands(); i++) {
4602 auto Op = CV->getOperand(i);
4603
4604 uint32_t literal = 0;
4605
4606 if (auto CI = dyn_cast<ConstantInt>(Op)) {
4607 literal = static_cast<uint32_t>(CI->getZExtValue());
4608 } else if (auto UI = dyn_cast<UndefValue>(Op)) {
4609 literal = 0xFFFFFFFFu;
4610 } else {
4611 Op->print(errs());
4612 llvm_unreachable("Unsupported element in ConstantVector!");
4613 }
4614
David Neto257c3892018-04-11 13:19:45 -04004615 Ops << MkNum(literal);
David Neto22f144c2017-06-12 14:26:21 -04004616 }
4617 } else {
4618 Cst->print(errs());
4619 llvm_unreachable("Unsupported constant mask in ShuffleVector!");
4620 }
4621 }
4622
David Neto87846742018-04-11 17:36:22 -04004623 auto *Inst = new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004624 SPIRVInstList.push_back(Inst);
4625 break;
4626 }
4627 case Instruction::ICmp:
4628 case Instruction::FCmp: {
4629 CmpInst *CmpI = cast<CmpInst>(&I);
4630
David Netod4ca2e62017-07-06 18:47:35 -04004631 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004632 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004633 if (isa<PointerType>(ArgTy)) {
4634 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004635 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004636 errs()
4637 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4638 << "in function " << name << "\n";
4639 llvm_unreachable("Pointer equality check is invalid");
4640 break;
4641 }
4642
David Neto257c3892018-04-11 13:19:45 -04004643 // Ops[0] = Result Type ID
4644 // Ops[1] = Operand 1 ID
4645 // Ops[2] = Operand 2 ID
4646 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04004647
David Neto257c3892018-04-11 13:19:45 -04004648 Ops << MkId(lookupType(CmpI->getType())) << MkId(VMap[CmpI->getOperand(0)])
4649 << MkId(VMap[CmpI->getOperand(1)]);
David Neto22f144c2017-06-12 14:26:21 -04004650
4651 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
David Neto87846742018-04-11 17:36:22 -04004652 auto *Inst = new SPIRVInstruction(Opcode, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004653 SPIRVInstList.push_back(Inst);
4654 break;
4655 }
4656 case Instruction::Br: {
4657 // Branch instrucion is deferred because it needs label's ID. Record slot's
4658 // location on SPIRVInstructionList.
4659 DeferredInsts.push_back(
4660 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4661 break;
4662 }
4663 case Instruction::Switch: {
4664 I.print(errs());
4665 llvm_unreachable("Unsupported instruction???");
4666 break;
4667 }
4668 case Instruction::IndirectBr: {
4669 I.print(errs());
4670 llvm_unreachable("Unsupported instruction???");
4671 break;
4672 }
4673 case Instruction::PHI: {
4674 // Branch instrucion is deferred because it needs label's ID. Record slot's
4675 // location on SPIRVInstructionList.
4676 DeferredInsts.push_back(
4677 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
4678 break;
4679 }
4680 case Instruction::Alloca: {
4681 //
4682 // Generate OpVariable.
4683 //
4684 // Ops[0] : Result Type ID
4685 // Ops[1] : Storage Class
4686 SPIRVOperandList Ops;
4687
David Neto257c3892018-04-11 13:19:45 -04004688 Ops << MkId(lookupType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004689
David Neto87846742018-04-11 17:36:22 -04004690 auto *Inst = new SPIRVInstruction(spv::OpVariable, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004691 SPIRVInstList.push_back(Inst);
4692 break;
4693 }
4694 case Instruction::Load: {
4695 LoadInst *LD = cast<LoadInst>(&I);
4696 //
4697 // Generate OpLoad.
4698 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004699
alan-baker5b86ed72019-02-15 08:26:50 -05004700 if (LD->getType()->isPointerTy()) {
4701 // Loading a pointer requires variable pointers.
4702 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4703 }
David Neto22f144c2017-06-12 14:26:21 -04004704
David Neto0a2f98d2017-09-15 19:38:40 -04004705 uint32_t ResTyID = lookupType(LD->getType());
David Netoa60b00b2017-09-15 16:34:09 -04004706 uint32_t PointerID = VMap[LD->getPointerOperand()];
4707
4708 // This is a hack to work around what looks like a driver bug.
4709 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004710 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4711 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004712 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004713 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004714 // Generate a bitwise-and of the original value with itself.
4715 // We should have been able to get away with just an OpCopyObject,
4716 // but we need something more complex to get past certain driver bugs.
4717 // This is ridiculous, but necessary.
4718 // TODO(dneto): Revisit this once drivers fix their bugs.
4719
4720 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004721 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4722 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004723
David Neto87846742018-04-11 17:36:22 -04004724 auto *Inst = new SPIRVInstruction(spv::OpBitwiseAnd, nextID++, Ops);
David Neto0a2f98d2017-09-15 19:38:40 -04004725 SPIRVInstList.push_back(Inst);
David Netoa60b00b2017-09-15 16:34:09 -04004726 break;
4727 }
4728
4729 // This is the normal path. Generate a load.
4730
David Neto22f144c2017-06-12 14:26:21 -04004731 // Ops[0] = Result Type ID
4732 // Ops[1] = Pointer ID
4733 // Ops[2] ... Ops[n] = Optional Memory Access
4734 //
4735 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004736
David Neto22f144c2017-06-12 14:26:21 -04004737 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04004738 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004739
David Neto87846742018-04-11 17:36:22 -04004740 auto *Inst = new SPIRVInstruction(spv::OpLoad, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004741 SPIRVInstList.push_back(Inst);
4742 break;
4743 }
4744 case Instruction::Store: {
4745 StoreInst *ST = cast<StoreInst>(&I);
4746 //
4747 // Generate OpStore.
4748 //
4749
alan-baker5b86ed72019-02-15 08:26:50 -05004750 if (ST->getValueOperand()->getType()->isPointerTy()) {
4751 // Storing a pointer requires variable pointers.
4752 setVariablePointersCapabilities(
4753 ST->getValueOperand()->getType()->getPointerAddressSpace());
4754 }
4755
David Neto22f144c2017-06-12 14:26:21 -04004756 // Ops[0] = Pointer ID
4757 // Ops[1] = Object ID
4758 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4759 //
4760 // TODO: Do we need to implement Optional Memory Access???
David Neto257c3892018-04-11 13:19:45 -04004761 SPIRVOperandList Ops;
4762 Ops << MkId(VMap[ST->getPointerOperand()])
4763 << MkId(VMap[ST->getValueOperand()]);
David Neto22f144c2017-06-12 14:26:21 -04004764
David Neto87846742018-04-11 17:36:22 -04004765 auto *Inst = new SPIRVInstruction(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004766 SPIRVInstList.push_back(Inst);
4767 break;
4768 }
4769 case Instruction::AtomicCmpXchg: {
4770 I.print(errs());
4771 llvm_unreachable("Unsupported instruction???");
4772 break;
4773 }
4774 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004775 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4776
4777 spv::Op opcode;
4778
4779 switch (AtomicRMW->getOperation()) {
4780 default:
4781 I.print(errs());
4782 llvm_unreachable("Unsupported instruction???");
4783 case llvm::AtomicRMWInst::Add:
4784 opcode = spv::OpAtomicIAdd;
4785 break;
4786 case llvm::AtomicRMWInst::Sub:
4787 opcode = spv::OpAtomicISub;
4788 break;
4789 case llvm::AtomicRMWInst::Xchg:
4790 opcode = spv::OpAtomicExchange;
4791 break;
4792 case llvm::AtomicRMWInst::Min:
4793 opcode = spv::OpAtomicSMin;
4794 break;
4795 case llvm::AtomicRMWInst::Max:
4796 opcode = spv::OpAtomicSMax;
4797 break;
4798 case llvm::AtomicRMWInst::UMin:
4799 opcode = spv::OpAtomicUMin;
4800 break;
4801 case llvm::AtomicRMWInst::UMax:
4802 opcode = spv::OpAtomicUMax;
4803 break;
4804 case llvm::AtomicRMWInst::And:
4805 opcode = spv::OpAtomicAnd;
4806 break;
4807 case llvm::AtomicRMWInst::Or:
4808 opcode = spv::OpAtomicOr;
4809 break;
4810 case llvm::AtomicRMWInst::Xor:
4811 opcode = spv::OpAtomicXor;
4812 break;
4813 }
4814
4815 //
4816 // Generate OpAtomic*.
4817 //
4818 SPIRVOperandList Ops;
4819
David Neto257c3892018-04-11 13:19:45 -04004820 Ops << MkId(lookupType(I.getType()))
4821 << MkId(VMap[AtomicRMW->getPointerOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004822
4823 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004824 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
David Neto257c3892018-04-11 13:19:45 -04004825 Ops << MkId(VMap[ConstantScopeDevice]);
Neil Henning39672102017-09-29 14:33:13 +01004826
4827 const auto ConstantMemorySemantics = ConstantInt::get(
4828 IntTy, spv::MemorySemanticsUniformMemoryMask |
4829 spv::MemorySemanticsSequentiallyConsistentMask);
David Neto257c3892018-04-11 13:19:45 -04004830 Ops << MkId(VMap[ConstantMemorySemantics]);
Neil Henning39672102017-09-29 14:33:13 +01004831
David Neto257c3892018-04-11 13:19:45 -04004832 Ops << MkId(VMap[AtomicRMW->getValOperand()]);
Neil Henning39672102017-09-29 14:33:13 +01004833
4834 VMap[&I] = nextID;
4835
David Neto87846742018-04-11 17:36:22 -04004836 auto *Inst = new SPIRVInstruction(opcode, nextID++, Ops);
Neil Henning39672102017-09-29 14:33:13 +01004837 SPIRVInstList.push_back(Inst);
David Neto22f144c2017-06-12 14:26:21 -04004838 break;
4839 }
4840 case Instruction::Fence: {
4841 I.print(errs());
4842 llvm_unreachable("Unsupported instruction???");
4843 break;
4844 }
4845 case Instruction::Call: {
4846 CallInst *Call = dyn_cast<CallInst>(&I);
4847 Function *Callee = Call->getCalledFunction();
4848
Alan Baker202c8c72018-08-13 13:47:44 -04004849 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004850 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4851 // Generate an OpLoad
4852 SPIRVOperandList Ops;
4853 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004854
David Neto862b7d82018-06-14 18:48:37 -04004855 Ops << MkId(lookupType(Call->getType()->getPointerElementType()))
4856 << MkId(ResourceVarDeferredLoadCalls[Call]);
4857
4858 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
4859 SPIRVInstList.push_back(Inst);
4860 VMap[Call] = load_id;
4861 break;
4862
4863 } else {
4864 // This maps to an OpVariable we've already generated.
4865 // No code is generated for the call.
4866 }
4867 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004868 } else if (Callee->getName().startswith(
4869 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004870 // Don't codegen an instruction here, but instead map this call directly
4871 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004872 int spec_id = static_cast<int>(
4873 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004874 const auto &info = LocalSpecIdInfoMap[spec_id];
4875 VMap[Call] = info.variable_id;
4876 break;
David Neto862b7d82018-06-14 18:48:37 -04004877 }
4878
4879 // Sampler initializers become a load of the corresponding sampler.
4880
Kévin Petitdf71de32019-04-09 14:09:50 +01004881 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004882 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004883 const auto third_param = static_cast<unsigned>(
4884 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4885 auto sampler_value = third_param;
4886 if (clspv::Option::UseSamplerMap()) {
4887 sampler_value = getSamplerMap()[third_param].first;
4888 }
David Neto862b7d82018-06-14 18:48:37 -04004889
4890 // Generate an OpLoad
David Neto22f144c2017-06-12 14:26:21 -04004891 SPIRVOperandList Ops;
David Neto862b7d82018-06-14 18:48:37 -04004892 const auto load_id = nextID++;
David Neto22f144c2017-06-12 14:26:21 -04004893
David Neto257c3892018-04-11 13:19:45 -04004894 Ops << MkId(lookupType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004895 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004896
David Neto862b7d82018-06-14 18:48:37 -04004897 auto *Inst = new SPIRVInstruction(spv::OpLoad, load_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004898 SPIRVInstList.push_back(Inst);
David Neto862b7d82018-06-14 18:48:37 -04004899 VMap[Call] = load_id;
David Neto22f144c2017-06-12 14:26:21 -04004900 break;
4901 }
4902
Kévin Petit349c9502019-03-28 17:24:14 +00004903 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004904 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4905 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4906 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004907
Kévin Petit617a76d2019-04-04 13:54:16 +01004908 // If the switch above didn't have an entry maybe the intrinsic
4909 // is using the name mangling logic.
4910 bool usesMangler = false;
4911 if (opcode == spv::OpNop) {
4912 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4913 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4914 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4915 usesMangler = true;
4916 }
4917 }
4918
Kévin Petit349c9502019-03-28 17:24:14 +00004919 if (opcode != spv::OpNop) {
4920
David Neto22f144c2017-06-12 14:26:21 -04004921 SPIRVOperandList Ops;
4922
Kévin Petit349c9502019-03-28 17:24:14 +00004923 if (!I.getType()->isVoidTy()) {
4924 Ops << MkId(lookupType(I.getType()));
4925 }
David Neto22f144c2017-06-12 14:26:21 -04004926
Kévin Petit617a76d2019-04-04 13:54:16 +01004927 unsigned firstOperand = usesMangler ? 1 : 0;
4928 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
David Neto257c3892018-04-11 13:19:45 -04004929 Ops << MkId(VMap[Call->getArgOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04004930 }
4931
Kévin Petit349c9502019-03-28 17:24:14 +00004932 if (!I.getType()->isVoidTy()) {
4933 VMap[&I] = nextID;
Kévin Petit8a560882019-03-21 15:24:34 +00004934 }
4935
Kévin Petit349c9502019-03-28 17:24:14 +00004936 SPIRVInstruction *Inst;
4937 if (!I.getType()->isVoidTy()) {
4938 Inst = new SPIRVInstruction(opcode, nextID++, Ops);
4939 } else {
4940 Inst = new SPIRVInstruction(opcode, Ops);
4941 }
Kévin Petit8a560882019-03-21 15:24:34 +00004942 SPIRVInstList.push_back(Inst);
4943 break;
4944 }
4945
David Neto22f144c2017-06-12 14:26:21 -04004946 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4947 if (Callee->getName().startswith("spirv.copy_memory")) {
4948 //
4949 // Generate OpCopyMemory.
4950 //
4951
4952 // Ops[0] = Dst ID
4953 // Ops[1] = Src ID
4954 // Ops[2] = Memory Access
4955 // Ops[3] = Alignment
4956
4957 auto IsVolatile =
4958 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4959
4960 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4961 : spv::MemoryAccessMaskNone;
4962
4963 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4964
4965 auto Alignment =
4966 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4967
David Neto257c3892018-04-11 13:19:45 -04004968 SPIRVOperandList Ops;
4969 Ops << MkId(VMap[Call->getArgOperand(0)])
4970 << MkId(VMap[Call->getArgOperand(1)]) << MkNum(MemoryAccess)
4971 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004972
David Neto87846742018-04-11 17:36:22 -04004973 auto *Inst = new SPIRVInstruction(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004974
4975 SPIRVInstList.push_back(Inst);
4976
4977 break;
4978 }
4979
alan-baker75090e42020-02-20 11:21:04 -05004980 // read_image (with a sampler) is converted to OpSampledImage and
4981 // OpImageSampleExplicitLod. Additionally, OpTypeSampledImage is
4982 // generated.
alan-bakerf67468c2019-11-25 15:51:49 -05004983 if (clspv::IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004984 //
4985 // Generate OpSampledImage.
4986 //
4987 // Ops[0] = Result Type ID
4988 // Ops[1] = Image ID
4989 // Ops[2] = Sampler ID
4990 //
4991 SPIRVOperandList Ops;
4992
4993 Value *Image = Call->getArgOperand(0);
4994 Value *Sampler = Call->getArgOperand(1);
4995 Value *Coordinate = Call->getArgOperand(2);
4996
4997 TypeMapType &OpImageTypeMap = getImageTypeMap();
4998 Type *ImageTy = Image->getType()->getPointerElementType();
4999 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04005000 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005001 uint32_t SamplerID = VMap[Sampler];
David Neto257c3892018-04-11 13:19:45 -04005002
5003 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04005004
5005 uint32_t SampledImageID = nextID;
5006
David Neto87846742018-04-11 17:36:22 -04005007 auto *Inst = new SPIRVInstruction(spv::OpSampledImage, nextID++, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005008 SPIRVInstList.push_back(Inst);
5009
5010 //
5011 // Generate OpImageSampleExplicitLod.
5012 //
5013 // Ops[0] = Result Type ID
5014 // Ops[1] = Sampled Image ID
5015 // Ops[2] = Coordinate ID
5016 // Ops[3] = Image Operands Type ID
5017 // Ops[4] ... Ops[n] = Operands ID
5018 //
5019 Ops.clear();
5020
alan-bakerf67468c2019-11-25 15:51:49 -05005021 const bool is_int_image = IsIntImageType(Image->getType());
5022 uint32_t result_type = 0;
5023 if (is_int_image) {
5024 result_type = v4int32ID;
5025 } else {
5026 result_type = lookupType(Call->getType());
5027 }
5028
5029 Ops << MkId(result_type) << MkId(SampledImageID) << MkId(VMap[Coordinate])
5030 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04005031
5032 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
David Neto257c3892018-04-11 13:19:45 -04005033 Ops << MkId(VMap[CstFP0]);
David Neto22f144c2017-06-12 14:26:21 -04005034
alan-bakerf67468c2019-11-25 15:51:49 -05005035 uint32_t final_id = nextID++;
5036 VMap[&I] = final_id;
David Neto22f144c2017-06-12 14:26:21 -04005037
alan-bakerf67468c2019-11-25 15:51:49 -05005038 uint32_t image_id = final_id;
5039 if (is_int_image) {
5040 // Int image requires a bitcast from v4int to v4uint.
5041 image_id = nextID++;
5042 }
5043
5044 Inst = new SPIRVInstruction(spv::OpImageSampleExplicitLod, image_id, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005045 SPIRVInstList.push_back(Inst);
alan-bakerf67468c2019-11-25 15:51:49 -05005046
5047 if (is_int_image) {
5048 // Generate the bitcast.
5049 Ops.clear();
5050 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5051 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5052 SPIRVInstList.push_back(Inst);
5053 }
David Neto22f144c2017-06-12 14:26:21 -04005054 break;
5055 }
5056
alan-baker75090e42020-02-20 11:21:04 -05005057 // read_image (without a sampler) is mapped to OpImageFetch.
5058 if (clspv::IsUnsampledImageRead(Callee)) {
5059 Value *Image = Call->getArgOperand(0);
5060 Value *Coordinate = Call->getArgOperand(1);
5061
5062 //
5063 // Generate OpImageFetch
5064 //
5065 // Ops[0] = Result Type ID
5066 // Ops[1] = Image ID
5067 // Ops[2] = Coordinate ID
5068 // Ops[3] = Lod
5069 // Ops[4] = 0
5070 //
5071 SPIRVOperandList Ops;
5072
5073 const bool is_int_image = IsIntImageType(Image->getType());
5074 uint32_t result_type = 0;
5075 if (is_int_image) {
5076 result_type = v4int32ID;
5077 } else {
5078 result_type = lookupType(Call->getType());
5079 }
5080
5081 Ops << MkId(result_type) << MkId(VMap[Image]) << MkId(VMap[Coordinate])
5082 << MkNum(spv::ImageOperandsLodMask);
5083
5084 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5085 Ops << MkId(VMap[CstInt0]);
5086
5087 uint32_t final_id = nextID++;
5088 VMap[&I] = final_id;
5089
5090 uint32_t image_id = final_id;
5091 if (is_int_image) {
5092 // Int image requires a bitcast from v4int to v4uint.
5093 image_id = nextID++;
5094 }
5095
5096 auto *Inst = new SPIRVInstruction(spv::OpImageFetch, image_id, Ops);
5097 SPIRVInstList.push_back(Inst);
5098
5099 if (is_int_image) {
5100 // Generate the bitcast.
5101 Ops.clear();
5102 Ops << MkId(lookupType(Call->getType())) << MkId(image_id);
5103 Inst = new SPIRVInstruction(spv::OpBitcast, final_id, Ops);
5104 SPIRVInstList.push_back(Inst);
5105 }
5106 break;
5107 }
5108
alan-bakerf67468c2019-11-25 15:51:49 -05005109 // write_image is mapped to OpImageWrite.
5110 if (clspv::IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04005111 //
5112 // Generate OpImageWrite.
5113 //
5114 // Ops[0] = Image ID
5115 // Ops[1] = Coordinate ID
5116 // Ops[2] = Texel ID
5117 // Ops[3] = (Optional) Image Operands Type (Literal Number)
5118 // Ops[4] ... Ops[n] = (Optional) Operands ID
5119 //
5120 SPIRVOperandList Ops;
5121
5122 Value *Image = Call->getArgOperand(0);
5123 Value *Coordinate = Call->getArgOperand(1);
5124 Value *Texel = Call->getArgOperand(2);
5125
5126 uint32_t ImageID = VMap[Image];
David Neto22f144c2017-06-12 14:26:21 -04005127 uint32_t CoordinateID = VMap[Coordinate];
David Neto22f144c2017-06-12 14:26:21 -04005128 uint32_t TexelID = VMap[Texel];
alan-bakerf67468c2019-11-25 15:51:49 -05005129
5130 const bool is_int_image = IsIntImageType(Image->getType());
5131 if (is_int_image) {
5132 // Generate a bitcast to v4int and use it as the texel value.
5133 uint32_t castID = nextID++;
5134 Ops << MkId(v4int32ID) << MkId(TexelID);
5135 auto cast = new SPIRVInstruction(spv::OpBitcast, castID, Ops);
5136 SPIRVInstList.push_back(cast);
5137 Ops.clear();
5138 TexelID = castID;
5139 }
David Neto257c3892018-04-11 13:19:45 -04005140 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04005141
David Neto87846742018-04-11 17:36:22 -04005142 auto *Inst = new SPIRVInstruction(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005143 SPIRVInstList.push_back(Inst);
5144 break;
5145 }
5146
alan-bakerce179f12019-12-06 19:02:22 -05005147 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
5148 if (clspv::IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04005149 //
alan-bakerce179f12019-12-06 19:02:22 -05005150 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04005151 //
5152 // Ops[0] = Image ID
5153 //
alan-bakerce179f12019-12-06 19:02:22 -05005154 // Result type has components equal to the dimensionality of the image,
5155 // plus 1 if the image is arrayed.
5156 //
alan-bakerf906d2b2019-12-10 11:26:23 -05005157 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
David Neto5c22a252018-03-15 16:07:41 -04005158 SPIRVOperandList Ops;
5159
5160 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05005161 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
5162 uint32_t SizesTypeID = 0;
5163
David Neto5c22a252018-03-15 16:07:41 -04005164 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05005165 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05005166 const uint32_t components =
5167 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05005168 if (components == 1) {
alan-bakerce179f12019-12-06 19:02:22 -05005169 SizesTypeID = TypeMap[Type::getInt32Ty(Context)];
5170 } else {
alan-baker7150a1d2020-02-25 08:31:06 -05005171 SizesTypeID =
5172 TypeMap[VectorType::get(Type::getInt32Ty(Context), components)];
alan-bakerce179f12019-12-06 19:02:22 -05005173 }
David Neto5c22a252018-03-15 16:07:41 -04005174 uint32_t ImageID = VMap[Image];
David Neto257c3892018-04-11 13:19:45 -04005175 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05005176 spv::Op query_opcode = spv::OpImageQuerySize;
5177 if (clspv::IsSampledImageType(Image->getType())) {
5178 query_opcode = spv::OpImageQuerySizeLod;
5179 // Need explicit 0 for Lod operand.
5180 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5181 Ops << MkId(VMap[CstInt0]);
5182 }
David Neto5c22a252018-03-15 16:07:41 -04005183
5184 uint32_t SizesID = nextID++;
alan-bakerce179f12019-12-06 19:02:22 -05005185 auto *QueryInst = new SPIRVInstruction(query_opcode, SizesID, Ops);
David Neto5c22a252018-03-15 16:07:41 -04005186 SPIRVInstList.push_back(QueryInst);
5187
alan-bakerce179f12019-12-06 19:02:22 -05005188 // May require an extra instruction to create the appropriate result of
5189 // the builtin function.
5190 if (clspv::IsGetImageDim(Callee)) {
5191 if (dim == 3) {
5192 // get_image_dim returns an int4 for 3D images.
5193 //
5194 // Reset value map entry since we generated an intermediate
5195 // instruction.
5196 VMap[&I] = nextID;
David Neto5c22a252018-03-15 16:07:41 -04005197
alan-bakerce179f12019-12-06 19:02:22 -05005198 // Implement:
5199 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
5200 Ops.clear();
5201 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 4)))
5202 << MkId(SizesID);
David Neto5c22a252018-03-15 16:07:41 -04005203
alan-bakerce179f12019-12-06 19:02:22 -05005204 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
5205 Ops << MkId(VMap[CstInt0]);
David Neto5c22a252018-03-15 16:07:41 -04005206
alan-bakerce179f12019-12-06 19:02:22 -05005207 auto *Inst =
5208 new SPIRVInstruction(spv::OpCompositeConstruct, nextID++, Ops);
5209 SPIRVInstList.push_back(Inst);
5210 } else if (dim != components) {
5211 // get_image_dim return an int2 regardless of the arrayedness of the
5212 // image. If the image is arrayed an element must be dropped from the
5213 // query result.
5214 //
5215 // Reset value map entry since we generated an intermediate
5216 // instruction.
5217 VMap[&I] = nextID;
5218
5219 // Implement:
5220 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
5221 Ops.clear();
5222 Ops << MkId(lookupType(VectorType::get(Type::getInt32Ty(Context), 2)))
5223 << MkId(SizesID) << MkId(SizesID) << MkNum(0) << MkNum(1);
5224
5225 auto *Inst =
5226 new SPIRVInstruction(spv::OpVectorShuffle, nextID++, Ops);
5227 SPIRVInstList.push_back(Inst);
5228 }
5229 } else if (components > 1) {
5230 // Reset value map entry since we generated an intermediate instruction.
5231 VMap[&I] = nextID;
5232
5233 // Implement:
5234 // %result = OpCompositeExtract %uint %sizes <component number>
5235 Ops.clear();
5236 Ops << MkId(TypeMap[I.getType()]) << MkId(SizesID);
5237
5238 uint32_t component = 0;
5239 if (IsGetImageHeight(Callee))
5240 component = 1;
5241 else if (IsGetImageDepth(Callee))
5242 component = 2;
5243 Ops << MkNum(component);
5244
5245 auto *Inst =
5246 new SPIRVInstruction(spv::OpCompositeExtract, nextID++, Ops);
5247 SPIRVInstList.push_back(Inst);
5248 }
David Neto5c22a252018-03-15 16:07:41 -04005249 break;
5250 }
5251
David Neto22f144c2017-06-12 14:26:21 -04005252 // Call instrucion is deferred because it needs function's ID. Record
5253 // slot's location on SPIRVInstructionList.
5254 DeferredInsts.push_back(
5255 std::make_tuple(&I, --SPIRVInstList.end(), nextID++));
5256
David Neto3fbb4072017-10-16 11:28:14 -04005257 // Check whether the implementation of this call uses an extended
5258 // instruction plus one more value-producing instruction. If so, then
5259 // reserve the id for the extra value-producing slot.
5260 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
5261 if (EInst != kGlslExtInstBad) {
5262 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04005263 // Increase nextID.
David Neto22f144c2017-06-12 14:26:21 -04005264 VMap[&I] = nextID;
5265 nextID++;
5266 }
5267 break;
5268 }
5269 case Instruction::Ret: {
5270 unsigned NumOps = I.getNumOperands();
5271 if (NumOps == 0) {
5272 //
5273 // Generate OpReturn.
5274 //
David Netoef5ba2b2019-12-20 08:35:54 -05005275 SPIRVInstList.push_back(new SPIRVInstruction(spv::OpReturn));
David Neto22f144c2017-06-12 14:26:21 -04005276 } else {
5277 //
5278 // Generate OpReturnValue.
5279 //
5280
5281 // Ops[0] = Return Value ID
5282 SPIRVOperandList Ops;
David Neto257c3892018-04-11 13:19:45 -04005283
5284 Ops << MkId(VMap[I.getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005285
David Neto87846742018-04-11 17:36:22 -04005286 auto *Inst = new SPIRVInstruction(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005287 SPIRVInstList.push_back(Inst);
5288 break;
5289 }
5290 break;
5291 }
5292 }
5293}
5294
5295void SPIRVProducerPass::GenerateFuncEpilogue() {
5296 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5297
5298 //
5299 // Generate OpFunctionEnd
5300 //
5301
David Netoef5ba2b2019-12-20 08:35:54 -05005302 auto *Inst = new SPIRVInstruction(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04005303 SPIRVInstList.push_back(Inst);
5304}
5305
5306bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05005307 // Don't specialize <4 x i8> if i8 is generally supported.
5308 if (clspv::Option::Int8Support())
5309 return false;
5310
David Neto22f144c2017-06-12 14:26:21 -04005311 LLVMContext &Context = Ty->getContext();
5312 if (Ty->isVectorTy()) {
5313 if (Ty->getVectorElementType() == Type::getInt8Ty(Context) &&
5314 Ty->getVectorNumElements() == 4) {
5315 return true;
5316 }
5317 }
5318
5319 return false;
5320}
5321
5322void SPIRVProducerPass::HandleDeferredInstruction() {
5323 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
5324 ValueMapType &VMap = getValueMap();
5325 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
5326
5327 for (auto DeferredInst = DeferredInsts.rbegin();
5328 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
5329 Value *Inst = std::get<0>(*DeferredInst);
5330 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
5331 if (InsertPoint != SPIRVInstList.end()) {
5332 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
5333 ++InsertPoint;
5334 }
5335 }
5336
5337 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05005338 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04005339 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05005340 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04005341 //
5342 // Generate OpLoopMerge.
5343 //
5344 // Ops[0] = Merge Block ID
5345 // Ops[1] = Continue Target ID
5346 // Ops[2] = Selection Control
5347 SPIRVOperandList Ops;
5348
alan-baker06cad652019-12-03 17:56:47 -05005349 auto MergeBB = MergeBlocks[BrBB];
5350 auto ContinueBB = ContinueBlocks[BrBB];
David Neto22f144c2017-06-12 14:26:21 -04005351 uint32_t MergeBBID = VMap[MergeBB];
David Neto22f144c2017-06-12 14:26:21 -04005352 uint32_t ContinueBBID = VMap[ContinueBB];
David Neto257c3892018-04-11 13:19:45 -04005353 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05005354 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005355
David Neto87846742018-04-11 17:36:22 -04005356 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005357 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05005358 } else if (MergeBlocks.count(BrBB)) {
5359 //
5360 // Generate OpSelectionMerge.
5361 //
5362 // Ops[0] = Merge Block ID
5363 // Ops[1] = Selection Control
5364 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005365
alan-baker06cad652019-12-03 17:56:47 -05005366 auto MergeBB = MergeBlocks[BrBB];
5367 uint32_t MergeBBID = VMap[MergeBB];
5368 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04005369
alan-baker06cad652019-12-03 17:56:47 -05005370 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
5371 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04005372 }
5373
5374 if (Br->isConditional()) {
5375 //
5376 // Generate OpBranchConditional.
5377 //
5378 // Ops[0] = Condition ID
5379 // Ops[1] = True Label ID
5380 // Ops[2] = False Label ID
5381 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
5382 SPIRVOperandList Ops;
5383
5384 uint32_t CondID = VMap[Br->getCondition()];
David Neto22f144c2017-06-12 14:26:21 -04005385 uint32_t TrueBBID = VMap[Br->getSuccessor(0)];
David Neto22f144c2017-06-12 14:26:21 -04005386 uint32_t FalseBBID = VMap[Br->getSuccessor(1)];
David Neto257c3892018-04-11 13:19:45 -04005387
5388 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04005389
David Neto87846742018-04-11 17:36:22 -04005390 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005391 SPIRVInstList.insert(InsertPoint, BrInst);
5392 } else {
5393 //
5394 // Generate OpBranch.
5395 //
5396 // Ops[0] = Target Label ID
5397 SPIRVOperandList Ops;
5398
5399 uint32_t TargetID = VMap[Br->getSuccessor(0)];
David Neto257c3892018-04-11 13:19:45 -04005400 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04005401
David Neto87846742018-04-11 17:36:22 -04005402 SPIRVInstList.insert(InsertPoint,
5403 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04005404 }
5405 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005406 if (PHI->getType()->isPointerTy()) {
5407 // OpPhi on pointers requires variable pointers.
5408 setVariablePointersCapabilities(
5409 PHI->getType()->getPointerAddressSpace());
5410 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
5411 setVariablePointers(true);
5412 }
5413 }
5414
David Neto22f144c2017-06-12 14:26:21 -04005415 //
5416 // Generate OpPhi.
5417 //
5418 // Ops[0] = Result Type ID
5419 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
5420 SPIRVOperandList Ops;
5421
David Neto257c3892018-04-11 13:19:45 -04005422 Ops << MkId(lookupType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005423
David Neto22f144c2017-06-12 14:26:21 -04005424 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
5425 uint32_t VarID = VMap[PHI->getIncomingValue(i)];
David Neto22f144c2017-06-12 14:26:21 -04005426 uint32_t ParentID = VMap[PHI->getIncomingBlock(i)];
David Neto257c3892018-04-11 13:19:45 -04005427 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04005428 }
5429
5430 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005431 InsertPoint,
5432 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04005433 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
5434 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04005435 auto callee_name = Callee->getName();
5436 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04005437
5438 if (EInst) {
5439 uint32_t &ExtInstImportID = getOpExtInstImportID();
5440
5441 //
5442 // Generate OpExtInst.
5443 //
5444
5445 // Ops[0] = Result Type ID
5446 // Ops[1] = Set ID (OpExtInstImport ID)
5447 // Ops[2] = Instruction Number (Literal Number)
5448 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
5449 SPIRVOperandList Ops;
5450
David Neto862b7d82018-06-14 18:48:37 -04005451 Ops << MkId(lookupType(Call->getType())) << MkId(ExtInstImportID)
5452 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04005453
David Neto22f144c2017-06-12 14:26:21 -04005454 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5455 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
David Neto257c3892018-04-11 13:19:45 -04005456 Ops << MkId(VMap[Call->getOperand(i)]);
David Neto22f144c2017-06-12 14:26:21 -04005457 }
5458
David Neto87846742018-04-11 17:36:22 -04005459 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
5460 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005461 SPIRVInstList.insert(InsertPoint, ExtInst);
5462
David Neto3fbb4072017-10-16 11:28:14 -04005463 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5464 if (IndirectExtInst != kGlslExtInstBad) {
5465 // Generate one more instruction that uses the result of the extended
5466 // instruction. Its result id is one more than the id of the
5467 // extended instruction.
David Neto22f144c2017-06-12 14:26:21 -04005468 LLVMContext &Context =
5469 Call->getParent()->getParent()->getParent()->getContext();
David Neto22f144c2017-06-12 14:26:21 -04005470
David Neto3fbb4072017-10-16 11:28:14 -04005471 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
5472 &VMap, &SPIRVInstList, &InsertPoint](
5473 spv::Op opcode, Constant *constant) {
5474 //
5475 // Generate instruction like:
5476 // result = opcode constant <extinst-result>
5477 //
5478 // Ops[0] = Result Type ID
5479 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5480 // Ops[2] = Operand 1 ;; the result of the extended instruction
5481 SPIRVOperandList Ops;
David Neto22f144c2017-06-12 14:26:21 -04005482
David Neto3fbb4072017-10-16 11:28:14 -04005483 Type *resultTy = Call->getType();
David Neto257c3892018-04-11 13:19:45 -04005484 Ops << MkId(lookupType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005485
5486 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5487 constant = ConstantVector::getSplat(
5488 static_cast<unsigned>(vectorTy->getNumElements()), constant);
5489 }
David Neto257c3892018-04-11 13:19:45 -04005490 Ops << MkId(VMap[constant]) << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005491
5492 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005493 InsertPoint, new SPIRVInstruction(
5494 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005495 };
5496
5497 switch (IndirectExtInst) {
5498 case glsl::ExtInstFindUMsb: // Implementing clz
5499 generate_extra_inst(
5500 spv::OpISub, ConstantInt::get(Type::getInt32Ty(Context), 31));
5501 break;
5502 case glsl::ExtInstAcos: // Implementing acospi
5503 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005504 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005505 case glsl::ExtInstAtan2: // Implementing atan2pi
5506 generate_extra_inst(
5507 spv::OpFMul,
5508 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5509 break;
5510
5511 default:
5512 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005513 }
David Neto22f144c2017-06-12 14:26:21 -04005514 }
David Neto3fbb4072017-10-16 11:28:14 -04005515
alan-bakerb39c8262019-03-08 14:03:37 -05005516 } else if (callee_name.startswith("_Z8popcount")) {
David Neto22f144c2017-06-12 14:26:21 -04005517 //
5518 // Generate OpBitCount
5519 //
5520 // Ops[0] = Result Type ID
5521 // Ops[1] = Base ID
David Neto257c3892018-04-11 13:19:45 -04005522 SPIRVOperandList Ops;
5523 Ops << MkId(lookupType(Call->getType()))
5524 << MkId(VMap[Call->getOperand(0)]);
David Neto22f144c2017-06-12 14:26:21 -04005525
5526 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005527 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005528 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005529
David Neto862b7d82018-06-14 18:48:37 -04005530 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005531
5532 // Generate an OpCompositeConstruct
5533 SPIRVOperandList Ops;
5534
5535 // The result type.
David Neto257c3892018-04-11 13:19:45 -04005536 Ops << MkId(lookupType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005537
5538 for (Use &use : Call->arg_operands()) {
David Neto257c3892018-04-11 13:19:45 -04005539 Ops << MkId(VMap[use.get()]);
David Netoab03f432017-11-03 17:00:44 -04005540 }
5541
5542 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005543 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5544 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005545
Alan Baker202c8c72018-08-13 13:47:44 -04005546 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5547
5548 // We have already mapped the call's result value to an ID.
5549 // Don't generate any code now.
5550
5551 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005552
5553 // We have already mapped the call's result value to an ID.
5554 // Don't generate any code now.
5555
David Neto22f144c2017-06-12 14:26:21 -04005556 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005557 if (Call->getType()->isPointerTy()) {
5558 // Functions returning pointers require variable pointers.
5559 setVariablePointersCapabilities(
5560 Call->getType()->getPointerAddressSpace());
5561 }
5562
David Neto22f144c2017-06-12 14:26:21 -04005563 //
5564 // Generate OpFunctionCall.
5565 //
5566
5567 // Ops[0] = Result Type ID
5568 // Ops[1] = Callee Function ID
5569 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
5570 SPIRVOperandList Ops;
5571
David Neto862b7d82018-06-14 18:48:37 -04005572 Ops << MkId(lookupType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005573
5574 uint32_t CalleeID = VMap[Callee];
David Neto43568eb2017-10-13 18:25:25 -04005575 if (CalleeID == 0) {
5576 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005577 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005578 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5579 // causes an infinite loop. Instead, go ahead and generate
5580 // the bad function call. A validator will catch the 0-Id.
5581 // llvm_unreachable("Can't translate function call");
5582 }
David Neto22f144c2017-06-12 14:26:21 -04005583
David Neto257c3892018-04-11 13:19:45 -04005584 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005585
David Neto22f144c2017-06-12 14:26:21 -04005586 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5587 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005588 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005589 auto *operand_type = operand->getType();
5590 // Images and samplers can be passed as function parameters without
5591 // variable pointers.
5592 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5593 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005594 auto sc =
5595 GetStorageClass(operand->getType()->getPointerAddressSpace());
5596 if (sc == spv::StorageClassStorageBuffer) {
5597 // Passing SSBO by reference requires variable pointers storage
5598 // buffer.
5599 setVariablePointersStorageBuffer(true);
5600 } else if (sc == spv::StorageClassWorkgroup) {
5601 // Workgroup references require variable pointers if they are not
5602 // memory object declarations.
5603 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5604 // Workgroup accessor represents a variable reference.
5605 if (!operand_call->getCalledFunction()->getName().startswith(
5606 clspv::WorkgroupAccessorFunction()))
5607 setVariablePointers(true);
5608 } else {
5609 // Arguments are function parameters.
5610 if (!isa<Argument>(operand))
5611 setVariablePointers(true);
5612 }
5613 }
5614 }
5615 Ops << MkId(VMap[operand]);
David Neto22f144c2017-06-12 14:26:21 -04005616 }
5617
David Neto87846742018-04-11 17:36:22 -04005618 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5619 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005620 SPIRVInstList.insert(InsertPoint, CallInst);
5621 }
5622 }
5623 }
5624}
5625
David Neto1a1a0582017-07-07 12:01:44 -04005626void SPIRVProducerPass::HandleDeferredDecorations(const DataLayout &DL) {
Alan Baker202c8c72018-08-13 13:47:44 -04005627 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005628 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005629 }
David Neto1a1a0582017-07-07 12:01:44 -04005630
5631 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
David Neto1a1a0582017-07-07 12:01:44 -04005632
5633 // Find an iterator pointing just past the last decoration.
5634 bool seen_decorations = false;
5635 auto DecoInsertPoint =
5636 std::find_if(SPIRVInstList.begin(), SPIRVInstList.end(),
5637 [&seen_decorations](SPIRVInstruction *Inst) -> bool {
5638 const bool is_decoration =
5639 Inst->getOpcode() == spv::OpDecorate ||
5640 Inst->getOpcode() == spv::OpMemberDecorate;
5641 if (is_decoration) {
5642 seen_decorations = true;
5643 return false;
5644 } else {
5645 return seen_decorations;
5646 }
5647 });
5648
David Netoc6f3ab22018-04-06 18:02:31 -04005649 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5650 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005651 for (auto *type : getTypesNeedingArrayStride()) {
5652 Type *elemTy = nullptr;
5653 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5654 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005655 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005656 elemTy = arrayTy->getArrayElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005657 } else if (auto *seqTy = dyn_cast<SequentialType>(type)) {
David Neto85082642018-03-24 06:55:20 -07005658 elemTy = seqTy->getSequentialElementType();
5659 } else {
5660 errs() << "Unhandled strided type " << *type << "\n";
5661 llvm_unreachable("Unhandled strided type");
5662 }
David Neto1a1a0582017-07-07 12:01:44 -04005663
5664 // Ops[0] = Target ID
5665 // Ops[1] = Decoration (ArrayStride)
5666 // Ops[2] = Stride number (Literal Number)
5667 SPIRVOperandList Ops;
5668
David Neto85082642018-03-24 06:55:20 -07005669 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005670 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005671
5672 Ops << MkId(lookupType(type)) << MkNum(spv::DecorationArrayStride)
5673 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005674
David Neto87846742018-04-11 17:36:22 -04005675 auto *DecoInst = new SPIRVInstruction(spv::OpDecorate, Ops);
David Neto1a1a0582017-07-07 12:01:44 -04005676 SPIRVInstList.insert(DecoInsertPoint, DecoInst);
5677 }
David Netoc6f3ab22018-04-06 18:02:31 -04005678
5679 // Emit SpecId decorations targeting the array size value.
Alan Baker202c8c72018-08-13 13:47:44 -04005680 for (auto spec_id = clspv::FirstLocalSpecId(); spec_id < max_local_spec_id_;
5681 ++spec_id) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005682 LocalArgInfo &arg_info = LocalSpecIdInfoMap[spec_id];
David Netoc6f3ab22018-04-06 18:02:31 -04005683 SPIRVOperandList Ops;
5684 Ops << MkId(arg_info.array_size_id) << MkNum(spv::DecorationSpecId)
5685 << MkNum(arg_info.spec_id);
5686 SPIRVInstList.insert(DecoInsertPoint,
David Neto87846742018-04-11 17:36:22 -04005687 new SPIRVInstruction(spv::OpDecorate, Ops));
David Netoc6f3ab22018-04-06 18:02:31 -04005688 }
David Neto1a1a0582017-07-07 12:01:44 -04005689}
5690
David Neto22f144c2017-06-12 14:26:21 -04005691glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
5692 return StringSwitch<glsl::ExtInst>(Name)
alan-bakerb39c8262019-03-08 14:03:37 -05005693 .Case("_Z3absc", glsl::ExtInst::ExtInstSAbs)
5694 .Case("_Z3absDv2_c", glsl::ExtInst::ExtInstSAbs)
5695 .Case("_Z3absDv3_c", glsl::ExtInst::ExtInstSAbs)
5696 .Case("_Z3absDv4_c", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005697 .Case("_Z3abss", glsl::ExtInst::ExtInstSAbs)
5698 .Case("_Z3absDv2_s", glsl::ExtInst::ExtInstSAbs)
5699 .Case("_Z3absDv3_s", glsl::ExtInst::ExtInstSAbs)
5700 .Case("_Z3absDv4_s", glsl::ExtInst::ExtInstSAbs)
David Neto22f144c2017-06-12 14:26:21 -04005701 .Case("_Z3absi", glsl::ExtInst::ExtInstSAbs)
5702 .Case("_Z3absDv2_i", glsl::ExtInst::ExtInstSAbs)
5703 .Case("_Z3absDv3_i", glsl::ExtInst::ExtInstSAbs)
5704 .Case("_Z3absDv4_i", glsl::ExtInst::ExtInstSAbs)
Kévin Petit2444e9b2018-11-09 14:14:37 +00005705 .Case("_Z3absl", glsl::ExtInst::ExtInstSAbs)
5706 .Case("_Z3absDv2_l", glsl::ExtInst::ExtInstSAbs)
5707 .Case("_Z3absDv3_l", glsl::ExtInst::ExtInstSAbs)
5708 .Case("_Z3absDv4_l", glsl::ExtInst::ExtInstSAbs)
alan-bakerb39c8262019-03-08 14:03:37 -05005709 .Case("_Z5clampccc", glsl::ExtInst::ExtInstSClamp)
5710 .Case("_Z5clampDv2_cS_S_", glsl::ExtInst::ExtInstSClamp)
5711 .Case("_Z5clampDv3_cS_S_", glsl::ExtInst::ExtInstSClamp)
5712 .Case("_Z5clampDv4_cS_S_", glsl::ExtInst::ExtInstSClamp)
5713 .Case("_Z5clamphhh", glsl::ExtInst::ExtInstUClamp)
5714 .Case("_Z5clampDv2_hS_S_", glsl::ExtInst::ExtInstUClamp)
5715 .Case("_Z5clampDv3_hS_S_", glsl::ExtInst::ExtInstUClamp)
5716 .Case("_Z5clampDv4_hS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005717 .Case("_Z5clampsss", glsl::ExtInst::ExtInstSClamp)
5718 .Case("_Z5clampDv2_sS_S_", glsl::ExtInst::ExtInstSClamp)
5719 .Case("_Z5clampDv3_sS_S_", glsl::ExtInst::ExtInstSClamp)
5720 .Case("_Z5clampDv4_sS_S_", glsl::ExtInst::ExtInstSClamp)
5721 .Case("_Z5clampttt", glsl::ExtInst::ExtInstUClamp)
5722 .Case("_Z5clampDv2_tS_S_", glsl::ExtInst::ExtInstUClamp)
5723 .Case("_Z5clampDv3_tS_S_", glsl::ExtInst::ExtInstUClamp)
5724 .Case("_Z5clampDv4_tS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005725 .Case("_Z5clampiii", glsl::ExtInst::ExtInstSClamp)
5726 .Case("_Z5clampDv2_iS_S_", glsl::ExtInst::ExtInstSClamp)
5727 .Case("_Z5clampDv3_iS_S_", glsl::ExtInst::ExtInstSClamp)
5728 .Case("_Z5clampDv4_iS_S_", glsl::ExtInst::ExtInstSClamp)
5729 .Case("_Z5clampjjj", glsl::ExtInst::ExtInstUClamp)
5730 .Case("_Z5clampDv2_jS_S_", glsl::ExtInst::ExtInstUClamp)
5731 .Case("_Z5clampDv3_jS_S_", glsl::ExtInst::ExtInstUClamp)
5732 .Case("_Z5clampDv4_jS_S_", glsl::ExtInst::ExtInstUClamp)
Kévin Petit495255d2019-03-06 13:56:48 +00005733 .Case("_Z5clamplll", glsl::ExtInst::ExtInstSClamp)
5734 .Case("_Z5clampDv2_lS_S_", glsl::ExtInst::ExtInstSClamp)
5735 .Case("_Z5clampDv3_lS_S_", glsl::ExtInst::ExtInstSClamp)
5736 .Case("_Z5clampDv4_lS_S_", glsl::ExtInst::ExtInstSClamp)
5737 .Case("_Z5clampmmm", glsl::ExtInst::ExtInstUClamp)
5738 .Case("_Z5clampDv2_mS_S_", glsl::ExtInst::ExtInstUClamp)
5739 .Case("_Z5clampDv3_mS_S_", glsl::ExtInst::ExtInstUClamp)
5740 .Case("_Z5clampDv4_mS_S_", glsl::ExtInst::ExtInstUClamp)
David Neto22f144c2017-06-12 14:26:21 -04005741 .Case("_Z5clampfff", glsl::ExtInst::ExtInstFClamp)
5742 .Case("_Z5clampDv2_fS_S_", glsl::ExtInst::ExtInstFClamp)
5743 .Case("_Z5clampDv3_fS_S_", glsl::ExtInst::ExtInstFClamp)
5744 .Case("_Z5clampDv4_fS_S_", glsl::ExtInst::ExtInstFClamp)
alan-baker49bb5fb2020-01-15 08:22:13 -05005745 .Case("_Z5clampDhDhDh", glsl::ExtInst::ExtInstFClamp)
5746 .Case("_Z5clampDv2_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5747 .Case("_Z5clampDv3_DhS_S_", glsl::ExtInst::ExtInstFClamp)
5748 .Case("_Z5clampDv4_DhS_S_", glsl::ExtInst::ExtInstFClamp)
alan-bakerb39c8262019-03-08 14:03:37 -05005749 .Case("_Z3maxcc", glsl::ExtInst::ExtInstSMax)
5750 .Case("_Z3maxDv2_cS_", glsl::ExtInst::ExtInstSMax)
5751 .Case("_Z3maxDv3_cS_", glsl::ExtInst::ExtInstSMax)
5752 .Case("_Z3maxDv4_cS_", glsl::ExtInst::ExtInstSMax)
5753 .Case("_Z3maxhh", glsl::ExtInst::ExtInstUMax)
5754 .Case("_Z3maxDv2_hS_", glsl::ExtInst::ExtInstUMax)
5755 .Case("_Z3maxDv3_hS_", glsl::ExtInst::ExtInstUMax)
5756 .Case("_Z3maxDv4_hS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005757 .Case("_Z3maxss", glsl::ExtInst::ExtInstSMax)
5758 .Case("_Z3maxDv2_sS_", glsl::ExtInst::ExtInstSMax)
5759 .Case("_Z3maxDv3_sS_", glsl::ExtInst::ExtInstSMax)
5760 .Case("_Z3maxDv4_sS_", glsl::ExtInst::ExtInstSMax)
5761 .Case("_Z3maxtt", glsl::ExtInst::ExtInstUMax)
5762 .Case("_Z3maxDv2_tS_", glsl::ExtInst::ExtInstUMax)
5763 .Case("_Z3maxDv3_tS_", glsl::ExtInst::ExtInstUMax)
5764 .Case("_Z3maxDv4_tS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005765 .Case("_Z3maxii", glsl::ExtInst::ExtInstSMax)
5766 .Case("_Z3maxDv2_iS_", glsl::ExtInst::ExtInstSMax)
5767 .Case("_Z3maxDv3_iS_", glsl::ExtInst::ExtInstSMax)
5768 .Case("_Z3maxDv4_iS_", glsl::ExtInst::ExtInstSMax)
5769 .Case("_Z3maxjj", glsl::ExtInst::ExtInstUMax)
5770 .Case("_Z3maxDv2_jS_", glsl::ExtInst::ExtInstUMax)
5771 .Case("_Z3maxDv3_jS_", glsl::ExtInst::ExtInstUMax)
5772 .Case("_Z3maxDv4_jS_", glsl::ExtInst::ExtInstUMax)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005773 .Case("_Z3maxll", glsl::ExtInst::ExtInstSMax)
5774 .Case("_Z3maxDv2_lS_", glsl::ExtInst::ExtInstSMax)
5775 .Case("_Z3maxDv3_lS_", glsl::ExtInst::ExtInstSMax)
5776 .Case("_Z3maxDv4_lS_", glsl::ExtInst::ExtInstSMax)
5777 .Case("_Z3maxmm", glsl::ExtInst::ExtInstUMax)
5778 .Case("_Z3maxDv2_mS_", glsl::ExtInst::ExtInstUMax)
5779 .Case("_Z3maxDv3_mS_", glsl::ExtInst::ExtInstUMax)
5780 .Case("_Z3maxDv4_mS_", glsl::ExtInst::ExtInstUMax)
David Neto22f144c2017-06-12 14:26:21 -04005781 .Case("_Z3maxff", glsl::ExtInst::ExtInstFMax)
5782 .Case("_Z3maxDv2_fS_", glsl::ExtInst::ExtInstFMax)
5783 .Case("_Z3maxDv3_fS_", glsl::ExtInst::ExtInstFMax)
5784 .Case("_Z3maxDv4_fS_", glsl::ExtInst::ExtInstFMax)
alan-baker49bb5fb2020-01-15 08:22:13 -05005785 .Case("_Z3maxDhDh", glsl::ExtInst::ExtInstFMax)
5786 .Case("_Z3maxDv2_DhS_", glsl::ExtInst::ExtInstFMax)
5787 .Case("_Z3maxDv3_DhS_", glsl::ExtInst::ExtInstFMax)
5788 .Case("_Z3maxDv4_DhS_", glsl::ExtInst::ExtInstFMax)
David Neto22f144c2017-06-12 14:26:21 -04005789 .StartsWith("_Z4fmax", glsl::ExtInst::ExtInstFMax)
alan-bakerb39c8262019-03-08 14:03:37 -05005790 .Case("_Z3mincc", glsl::ExtInst::ExtInstSMin)
5791 .Case("_Z3minDv2_cS_", glsl::ExtInst::ExtInstSMin)
5792 .Case("_Z3minDv3_cS_", glsl::ExtInst::ExtInstSMin)
5793 .Case("_Z3minDv4_cS_", glsl::ExtInst::ExtInstSMin)
5794 .Case("_Z3minhh", glsl::ExtInst::ExtInstUMin)
5795 .Case("_Z3minDv2_hS_", glsl::ExtInst::ExtInstUMin)
5796 .Case("_Z3minDv3_hS_", glsl::ExtInst::ExtInstUMin)
5797 .Case("_Z3minDv4_hS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005798 .Case("_Z3minss", glsl::ExtInst::ExtInstSMin)
5799 .Case("_Z3minDv2_sS_", glsl::ExtInst::ExtInstSMin)
5800 .Case("_Z3minDv3_sS_", glsl::ExtInst::ExtInstSMin)
5801 .Case("_Z3minDv4_sS_", glsl::ExtInst::ExtInstSMin)
5802 .Case("_Z3mintt", glsl::ExtInst::ExtInstUMin)
5803 .Case("_Z3minDv2_tS_", glsl::ExtInst::ExtInstUMin)
5804 .Case("_Z3minDv3_tS_", glsl::ExtInst::ExtInstUMin)
5805 .Case("_Z3minDv4_tS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005806 .Case("_Z3minii", glsl::ExtInst::ExtInstSMin)
5807 .Case("_Z3minDv2_iS_", glsl::ExtInst::ExtInstSMin)
5808 .Case("_Z3minDv3_iS_", glsl::ExtInst::ExtInstSMin)
5809 .Case("_Z3minDv4_iS_", glsl::ExtInst::ExtInstSMin)
5810 .Case("_Z3minjj", glsl::ExtInst::ExtInstUMin)
5811 .Case("_Z3minDv2_jS_", glsl::ExtInst::ExtInstUMin)
5812 .Case("_Z3minDv3_jS_", glsl::ExtInst::ExtInstUMin)
5813 .Case("_Z3minDv4_jS_", glsl::ExtInst::ExtInstUMin)
Kévin Petit9e1971c2018-11-09 14:17:18 +00005814 .Case("_Z3minll", glsl::ExtInst::ExtInstSMin)
5815 .Case("_Z3minDv2_lS_", glsl::ExtInst::ExtInstSMin)
5816 .Case("_Z3minDv3_lS_", glsl::ExtInst::ExtInstSMin)
5817 .Case("_Z3minDv4_lS_", glsl::ExtInst::ExtInstSMin)
5818 .Case("_Z3minmm", glsl::ExtInst::ExtInstUMin)
5819 .Case("_Z3minDv2_mS_", glsl::ExtInst::ExtInstUMin)
5820 .Case("_Z3minDv3_mS_", glsl::ExtInst::ExtInstUMin)
5821 .Case("_Z3minDv4_mS_", glsl::ExtInst::ExtInstUMin)
David Neto22f144c2017-06-12 14:26:21 -04005822 .Case("_Z3minff", glsl::ExtInst::ExtInstFMin)
5823 .Case("_Z3minDv2_fS_", glsl::ExtInst::ExtInstFMin)
5824 .Case("_Z3minDv3_fS_", glsl::ExtInst::ExtInstFMin)
5825 .Case("_Z3minDv4_fS_", glsl::ExtInst::ExtInstFMin)
alan-baker49bb5fb2020-01-15 08:22:13 -05005826 .Case("_Z3minDhDh", glsl::ExtInst::ExtInstFMin)
5827 .Case("_Z3minDv2_DhS_", glsl::ExtInst::ExtInstFMin)
5828 .Case("_Z3minDv3_DhS_", glsl::ExtInst::ExtInstFMin)
5829 .Case("_Z3minDv4_DhS_", glsl::ExtInst::ExtInstFMin)
David Neto22f144c2017-06-12 14:26:21 -04005830 .StartsWith("_Z4fmin", glsl::ExtInst::ExtInstFMin)
5831 .StartsWith("_Z7degrees", glsl::ExtInst::ExtInstDegrees)
5832 .StartsWith("_Z7radians", glsl::ExtInst::ExtInstRadians)
5833 .StartsWith("_Z3mix", glsl::ExtInst::ExtInstFMix)
5834 .StartsWith("_Z4acos", glsl::ExtInst::ExtInstAcos)
5835 .StartsWith("_Z5acosh", glsl::ExtInst::ExtInstAcosh)
5836 .StartsWith("_Z4asin", glsl::ExtInst::ExtInstAsin)
5837 .StartsWith("_Z5asinh", glsl::ExtInst::ExtInstAsinh)
5838 .StartsWith("_Z4atan", glsl::ExtInst::ExtInstAtan)
5839 .StartsWith("_Z5atan2", glsl::ExtInst::ExtInstAtan2)
5840 .StartsWith("_Z5atanh", glsl::ExtInst::ExtInstAtanh)
5841 .StartsWith("_Z4ceil", glsl::ExtInst::ExtInstCeil)
5842 .StartsWith("_Z3sin", glsl::ExtInst::ExtInstSin)
5843 .StartsWith("_Z4sinh", glsl::ExtInst::ExtInstSinh)
5844 .StartsWith("_Z8half_sin", glsl::ExtInst::ExtInstSin)
5845 .StartsWith("_Z10native_sin", glsl::ExtInst::ExtInstSin)
5846 .StartsWith("_Z3cos", glsl::ExtInst::ExtInstCos)
5847 .StartsWith("_Z4cosh", glsl::ExtInst::ExtInstCosh)
5848 .StartsWith("_Z8half_cos", glsl::ExtInst::ExtInstCos)
5849 .StartsWith("_Z10native_cos", glsl::ExtInst::ExtInstCos)
5850 .StartsWith("_Z3tan", glsl::ExtInst::ExtInstTan)
5851 .StartsWith("_Z4tanh", glsl::ExtInst::ExtInstTanh)
5852 .StartsWith("_Z8half_tan", glsl::ExtInst::ExtInstTan)
5853 .StartsWith("_Z10native_tan", glsl::ExtInst::ExtInstTan)
5854 .StartsWith("_Z3exp", glsl::ExtInst::ExtInstExp)
5855 .StartsWith("_Z8half_exp", glsl::ExtInst::ExtInstExp)
5856 .StartsWith("_Z10native_exp", glsl::ExtInst::ExtInstExp)
5857 .StartsWith("_Z4exp2", glsl::ExtInst::ExtInstExp2)
5858 .StartsWith("_Z9half_exp2", glsl::ExtInst::ExtInstExp2)
5859 .StartsWith("_Z11native_exp2", glsl::ExtInst::ExtInstExp2)
5860 .StartsWith("_Z3log", glsl::ExtInst::ExtInstLog)
5861 .StartsWith("_Z8half_log", glsl::ExtInst::ExtInstLog)
5862 .StartsWith("_Z10native_log", glsl::ExtInst::ExtInstLog)
5863 .StartsWith("_Z4log2", glsl::ExtInst::ExtInstLog2)
5864 .StartsWith("_Z9half_log2", glsl::ExtInst::ExtInstLog2)
5865 .StartsWith("_Z11native_log2", glsl::ExtInst::ExtInstLog2)
5866 .StartsWith("_Z4fabs", glsl::ExtInst::ExtInstFAbs)
kpet3458e942018-10-03 14:35:21 +01005867 .StartsWith("_Z3fma", glsl::ExtInst::ExtInstFma)
David Neto22f144c2017-06-12 14:26:21 -04005868 .StartsWith("_Z5floor", glsl::ExtInst::ExtInstFloor)
5869 .StartsWith("_Z5ldexp", glsl::ExtInst::ExtInstLdexp)
5870 .StartsWith("_Z3pow", glsl::ExtInst::ExtInstPow)
5871 .StartsWith("_Z4powr", glsl::ExtInst::ExtInstPow)
5872 .StartsWith("_Z9half_powr", glsl::ExtInst::ExtInstPow)
5873 .StartsWith("_Z11native_powr", glsl::ExtInst::ExtInstPow)
5874 .StartsWith("_Z5round", glsl::ExtInst::ExtInstRound)
5875 .StartsWith("_Z4sqrt", glsl::ExtInst::ExtInstSqrt)
5876 .StartsWith("_Z9half_sqrt", glsl::ExtInst::ExtInstSqrt)
5877 .StartsWith("_Z11native_sqrt", glsl::ExtInst::ExtInstSqrt)
5878 .StartsWith("_Z5rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5879 .StartsWith("_Z10half_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5880 .StartsWith("_Z12native_rsqrt", glsl::ExtInst::ExtInstInverseSqrt)
5881 .StartsWith("_Z5trunc", glsl::ExtInst::ExtInstTrunc)
5882 .StartsWith("_Z5frexp", glsl::ExtInst::ExtInstFrexp)
5883 .StartsWith("_Z4sign", glsl::ExtInst::ExtInstFSign)
5884 .StartsWith("_Z6length", glsl::ExtInst::ExtInstLength)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005885 .StartsWith("_Z11fast_length", glsl::ExtInst::ExtInstLength)
David Neto22f144c2017-06-12 14:26:21 -04005886 .StartsWith("_Z8distance", glsl::ExtInst::ExtInstDistance)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005887 .StartsWith("_Z13fast_distance", glsl::ExtInst::ExtInstDistance)
David Netoe9a03512017-10-16 10:08:27 -04005888 .StartsWith("_Z4step", glsl::ExtInst::ExtInstStep)
kpet6fd2a262018-10-03 14:48:01 +01005889 .StartsWith("_Z10smoothstep", glsl::ExtInst::ExtInstSmoothStep)
David Neto22f144c2017-06-12 14:26:21 -04005890 .Case("_Z5crossDv3_fS_", glsl::ExtInst::ExtInstCross)
5891 .StartsWith("_Z9normalize", glsl::ExtInst::ExtInstNormalize)
Kévin Petit7d09cec2018-09-22 15:43:38 +01005892 .StartsWith("_Z14fast_normalize", glsl::ExtInst::ExtInstNormalize)
David Neto22f144c2017-06-12 14:26:21 -04005893 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5894 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5895 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto62653202017-10-16 19:05:18 -04005896 .Case("clspv.fract.f", glsl::ExtInst::ExtInstFract)
5897 .Case("clspv.fract.v2f", glsl::ExtInst::ExtInstFract)
5898 .Case("clspv.fract.v3f", glsl::ExtInst::ExtInstFract)
5899 .Case("clspv.fract.v4f", glsl::ExtInst::ExtInstFract)
David Neto3fbb4072017-10-16 11:28:14 -04005900 .Default(kGlslExtInstBad);
5901}
5902
5903glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
5904 // Check indirect cases.
5905 return StringSwitch<glsl::ExtInst>(Name)
5906 .StartsWith("_Z3clz", glsl::ExtInst::ExtInstFindUMsb)
5907 // Use exact match on float arg because these need a multiply
5908 // of a constant of the right floating point type.
5909 .Case("_Z6acospif", glsl::ExtInst::ExtInstAcos)
5910 .Case("_Z6acospiDv2_f", glsl::ExtInst::ExtInstAcos)
5911 .Case("_Z6acospiDv3_f", glsl::ExtInst::ExtInstAcos)
5912 .Case("_Z6acospiDv4_f", glsl::ExtInst::ExtInstAcos)
5913 .Case("_Z6asinpif", glsl::ExtInst::ExtInstAsin)
5914 .Case("_Z6asinpiDv2_f", glsl::ExtInst::ExtInstAsin)
5915 .Case("_Z6asinpiDv3_f", glsl::ExtInst::ExtInstAsin)
5916 .Case("_Z6asinpiDv4_f", glsl::ExtInst::ExtInstAsin)
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005917 .Case("_Z6atanpif", glsl::ExtInst::ExtInstAtan)
5918 .Case("_Z6atanpiDv2_f", glsl::ExtInst::ExtInstAtan)
5919 .Case("_Z6atanpiDv3_f", glsl::ExtInst::ExtInstAtan)
5920 .Case("_Z6atanpiDv4_f", glsl::ExtInst::ExtInstAtan)
David Neto3fbb4072017-10-16 11:28:14 -04005921 .Case("_Z7atan2piff", glsl::ExtInst::ExtInstAtan2)
5922 .Case("_Z7atan2piDv2_fS_", glsl::ExtInst::ExtInstAtan2)
5923 .Case("_Z7atan2piDv3_fS_", glsl::ExtInst::ExtInstAtan2)
5924 .Case("_Z7atan2piDv4_fS_", glsl::ExtInst::ExtInstAtan2)
5925 .Default(kGlslExtInstBad);
5926}
5927
alan-bakerb6b09dc2018-11-08 16:59:28 -05005928glsl::ExtInst
5929SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005930 auto direct = getExtInstEnum(Name);
5931 if (direct != kGlslExtInstBad)
5932 return direct;
5933 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005934}
5935
David Neto22f144c2017-06-12 14:26:21 -04005936void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005937 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005938}
5939
5940void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5941 WriteOneWord(Inst->getResultID());
5942}
5943
5944void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5945 // High 16 bit : Word Count
5946 // Low 16 bit : Opcode
5947 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005948 const uint32_t count = Inst->getWordCount();
5949 if (count > 65535) {
5950 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5951 llvm_unreachable("Word count too high");
5952 }
David Neto22f144c2017-06-12 14:26:21 -04005953 Word |= Inst->getWordCount() << 16;
5954 WriteOneWord(Word);
5955}
5956
David Netoef5ba2b2019-12-20 08:35:54 -05005957void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005958 SPIRVOperandType OpTy = Op->getType();
5959 switch (OpTy) {
5960 default: {
5961 llvm_unreachable("Unsupported SPIRV Operand Type???");
5962 break;
5963 }
5964 case SPIRVOperandType::NUMBERID: {
5965 WriteOneWord(Op->getNumID());
5966 break;
5967 }
5968 case SPIRVOperandType::LITERAL_STRING: {
5969 std::string Str = Op->getLiteralStr();
5970 const char *Data = Str.c_str();
5971 size_t WordSize = Str.size() / 4;
5972 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5973 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5974 }
5975
5976 uint32_t Remainder = Str.size() % 4;
5977 uint32_t LastWord = 0;
5978 if (Remainder) {
5979 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5980 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5981 }
5982 }
5983
5984 WriteOneWord(LastWord);
5985 break;
5986 }
5987 case SPIRVOperandType::LITERAL_INTEGER:
5988 case SPIRVOperandType::LITERAL_FLOAT: {
5989 auto LiteralNum = Op->getLiteralNum();
5990 // TODO: Handle LiteranNum carefully.
5991 for (auto Word : LiteralNum) {
5992 WriteOneWord(Word);
5993 }
5994 break;
5995 }
5996 }
5997}
5998
5999void SPIRVProducerPass::WriteSPIRVBinary() {
6000 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList();
6001
6002 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05006003 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04006004 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
6005
6006 switch (Opcode) {
6007 default: {
David Neto5c22a252018-03-15 16:07:41 -04006008 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04006009 llvm_unreachable("Unsupported SPIRV instruction");
6010 break;
6011 }
6012 case spv::OpCapability:
6013 case spv::OpExtension:
6014 case spv::OpMemoryModel:
6015 case spv::OpEntryPoint:
6016 case spv::OpExecutionMode:
6017 case spv::OpSource:
6018 case spv::OpDecorate:
6019 case spv::OpMemberDecorate:
6020 case spv::OpBranch:
6021 case spv::OpBranchConditional:
6022 case spv::OpSelectionMerge:
6023 case spv::OpLoopMerge:
6024 case spv::OpStore:
6025 case spv::OpImageWrite:
6026 case spv::OpReturnValue:
6027 case spv::OpControlBarrier:
6028 case spv::OpMemoryBarrier:
6029 case spv::OpReturn:
6030 case spv::OpFunctionEnd:
6031 case spv::OpCopyMemory: {
6032 WriteWordCountAndOpcode(Inst);
6033 for (uint32_t i = 0; i < Ops.size(); i++) {
6034 WriteOperand(Ops[i]);
6035 }
6036 break;
6037 }
6038 case spv::OpTypeBool:
6039 case spv::OpTypeVoid:
6040 case spv::OpTypeSampler:
6041 case spv::OpLabel:
6042 case spv::OpExtInstImport:
6043 case spv::OpTypePointer:
6044 case spv::OpTypeRuntimeArray:
6045 case spv::OpTypeStruct:
6046 case spv::OpTypeImage:
6047 case spv::OpTypeSampledImage:
6048 case spv::OpTypeInt:
6049 case spv::OpTypeFloat:
6050 case spv::OpTypeArray:
6051 case spv::OpTypeVector:
6052 case spv::OpTypeFunction: {
6053 WriteWordCountAndOpcode(Inst);
6054 WriteResultID(Inst);
6055 for (uint32_t i = 0; i < Ops.size(); i++) {
6056 WriteOperand(Ops[i]);
6057 }
6058 break;
6059 }
6060 case spv::OpFunction:
6061 case spv::OpFunctionParameter:
6062 case spv::OpAccessChain:
6063 case spv::OpPtrAccessChain:
6064 case spv::OpInBoundsAccessChain:
6065 case spv::OpUConvert:
6066 case spv::OpSConvert:
6067 case spv::OpConvertFToU:
6068 case spv::OpConvertFToS:
6069 case spv::OpConvertUToF:
6070 case spv::OpConvertSToF:
6071 case spv::OpFConvert:
6072 case spv::OpConvertPtrToU:
6073 case spv::OpConvertUToPtr:
6074 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05006075 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04006076 case spv::OpIAdd:
6077 case spv::OpFAdd:
6078 case spv::OpISub:
6079 case spv::OpFSub:
6080 case spv::OpIMul:
6081 case spv::OpFMul:
6082 case spv::OpUDiv:
6083 case spv::OpSDiv:
6084 case spv::OpFDiv:
6085 case spv::OpUMod:
6086 case spv::OpSRem:
6087 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00006088 case spv::OpUMulExtended:
6089 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04006090 case spv::OpBitwiseOr:
6091 case spv::OpBitwiseXor:
6092 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04006093 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04006094 case spv::OpShiftLeftLogical:
6095 case spv::OpShiftRightLogical:
6096 case spv::OpShiftRightArithmetic:
6097 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04006098 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04006099 case spv::OpCompositeExtract:
6100 case spv::OpVectorExtractDynamic:
6101 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04006102 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04006103 case spv::OpVectorInsertDynamic:
6104 case spv::OpVectorShuffle:
6105 case spv::OpIEqual:
6106 case spv::OpINotEqual:
6107 case spv::OpUGreaterThan:
6108 case spv::OpUGreaterThanEqual:
6109 case spv::OpULessThan:
6110 case spv::OpULessThanEqual:
6111 case spv::OpSGreaterThan:
6112 case spv::OpSGreaterThanEqual:
6113 case spv::OpSLessThan:
6114 case spv::OpSLessThanEqual:
6115 case spv::OpFOrdEqual:
6116 case spv::OpFOrdGreaterThan:
6117 case spv::OpFOrdGreaterThanEqual:
6118 case spv::OpFOrdLessThan:
6119 case spv::OpFOrdLessThanEqual:
6120 case spv::OpFOrdNotEqual:
6121 case spv::OpFUnordEqual:
6122 case spv::OpFUnordGreaterThan:
6123 case spv::OpFUnordGreaterThanEqual:
6124 case spv::OpFUnordLessThan:
6125 case spv::OpFUnordLessThanEqual:
6126 case spv::OpFUnordNotEqual:
6127 case spv::OpExtInst:
6128 case spv::OpIsInf:
6129 case spv::OpIsNan:
6130 case spv::OpAny:
6131 case spv::OpAll:
6132 case spv::OpUndef:
6133 case spv::OpConstantNull:
6134 case spv::OpLogicalOr:
6135 case spv::OpLogicalAnd:
6136 case spv::OpLogicalNot:
6137 case spv::OpLogicalNotEqual:
6138 case spv::OpConstantComposite:
6139 case spv::OpSpecConstantComposite:
6140 case spv::OpConstantTrue:
6141 case spv::OpConstantFalse:
6142 case spv::OpConstant:
6143 case spv::OpSpecConstant:
6144 case spv::OpVariable:
6145 case spv::OpFunctionCall:
6146 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05006147 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04006148 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04006149 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05006150 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04006151 case spv::OpSelect:
6152 case spv::OpPhi:
6153 case spv::OpLoad:
6154 case spv::OpAtomicIAdd:
6155 case spv::OpAtomicISub:
6156 case spv::OpAtomicExchange:
6157 case spv::OpAtomicIIncrement:
6158 case spv::OpAtomicIDecrement:
6159 case spv::OpAtomicCompareExchange:
6160 case spv::OpAtomicUMin:
6161 case spv::OpAtomicSMin:
6162 case spv::OpAtomicUMax:
6163 case spv::OpAtomicSMax:
6164 case spv::OpAtomicAnd:
6165 case spv::OpAtomicOr:
6166 case spv::OpAtomicXor:
6167 case spv::OpDot: {
6168 WriteWordCountAndOpcode(Inst);
6169 WriteOperand(Ops[0]);
6170 WriteResultID(Inst);
6171 for (uint32_t i = 1; i < Ops.size(); i++) {
6172 WriteOperand(Ops[i]);
6173 }
6174 break;
6175 }
6176 }
6177 }
6178}
Alan Baker9bf93fb2018-08-28 16:59:26 -04006179
alan-bakerb6b09dc2018-11-08 16:59:28 -05006180bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04006181 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05006182 case Type::HalfTyID:
6183 case Type::FloatTyID:
6184 case Type::DoubleTyID:
6185 case Type::IntegerTyID:
6186 case Type::VectorTyID:
6187 return true;
6188 case Type::PointerTyID: {
6189 const PointerType *pointer_type = cast<PointerType>(type);
6190 if (pointer_type->getPointerAddressSpace() !=
6191 AddressSpace::UniformConstant) {
6192 auto pointee_type = pointer_type->getPointerElementType();
6193 if (pointee_type->isStructTy() &&
6194 cast<StructType>(pointee_type)->isOpaque()) {
6195 // Images and samplers are not nullable.
6196 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006197 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04006198 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05006199 return true;
6200 }
6201 case Type::ArrayTyID:
6202 return IsTypeNullable(cast<CompositeType>(type)->getTypeAtIndex(0u));
6203 case Type::StructTyID: {
6204 const StructType *struct_type = cast<StructType>(type);
6205 // Images and samplers are not nullable.
6206 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04006207 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05006208 for (const auto element : struct_type->elements()) {
6209 if (!IsTypeNullable(element))
6210 return false;
6211 }
6212 return true;
6213 }
6214 default:
6215 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04006216 }
6217}
Alan Bakerfcda9482018-10-02 17:09:59 -04006218
6219void SPIRVProducerPass::PopulateUBOTypeMaps(Module &module) {
6220 if (auto *offsets_md =
6221 module.getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
6222 // Metdata is stored as key-value pair operands. The first element of each
6223 // operand is the type and the second is a vector of offsets.
6224 for (const auto *operand : offsets_md->operands()) {
6225 const auto *pair = cast<MDTuple>(operand);
6226 auto *type =
6227 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6228 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
6229 std::vector<uint32_t> offsets;
6230 for (const Metadata *offset_md : offset_vector->operands()) {
6231 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05006232 offsets.push_back(static_cast<uint32_t>(
6233 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04006234 }
6235 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
6236 }
6237 }
6238
6239 if (auto *sizes_md =
6240 module.getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
6241 // Metadata is stored as key-value pair operands. The first element of each
6242 // operand is the type and the second is a triple of sizes: type size in
6243 // bits, store size and alloc size.
6244 for (const auto *operand : sizes_md->operands()) {
6245 const auto *pair = cast<MDTuple>(operand);
6246 auto *type =
6247 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
6248 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
6249 uint64_t type_size_in_bits =
6250 cast<ConstantInt>(
6251 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
6252 ->getZExtValue();
6253 uint64_t type_store_size =
6254 cast<ConstantInt>(
6255 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
6256 ->getZExtValue();
6257 uint64_t type_alloc_size =
6258 cast<ConstantInt>(
6259 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
6260 ->getZExtValue();
6261 RemappedUBOTypeSizes.insert(std::make_pair(
6262 type, std::make_tuple(type_size_in_bits, type_store_size,
6263 type_alloc_size)));
6264 }
6265 }
6266}
6267
6268uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
6269 const DataLayout &DL) {
6270 auto iter = RemappedUBOTypeSizes.find(type);
6271 if (iter != RemappedUBOTypeSizes.end()) {
6272 return std::get<0>(iter->second);
6273 }
6274
6275 return DL.getTypeSizeInBits(type);
6276}
6277
6278uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
6279 auto iter = RemappedUBOTypeSizes.find(type);
6280 if (iter != RemappedUBOTypeSizes.end()) {
6281 return std::get<1>(iter->second);
6282 }
6283
6284 return DL.getTypeStoreSize(type);
6285}
6286
6287uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
6288 auto iter = RemappedUBOTypeSizes.find(type);
6289 if (iter != RemappedUBOTypeSizes.end()) {
6290 return std::get<2>(iter->second);
6291 }
6292
6293 return DL.getTypeAllocSize(type);
6294}
alan-baker5b86ed72019-02-15 08:26:50 -05006295
Kévin Petitbbbda972020-03-03 19:16:31 +00006296uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
6297 StructType *type, unsigned member, const DataLayout &DL) {
6298 const auto StructLayout = DL.getStructLayout(type);
6299 // Search for the correct offsets if this type was remapped.
6300 std::vector<uint32_t> *offsets = nullptr;
6301 auto iter = RemappedUBOTypeOffsets.find(type);
6302 if (iter != RemappedUBOTypeOffsets.end()) {
6303 offsets = &iter->second;
6304 }
6305 auto ByteOffset =
6306 static_cast<uint32_t>(StructLayout->getElementOffset(member));
6307 if (offsets) {
6308 ByteOffset = (*offsets)[member];
6309 }
6310
6311 return ByteOffset;
6312}
6313
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006314void SPIRVProducerPass::setVariablePointersCapabilities(
6315 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05006316 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
6317 setVariablePointersStorageBuffer(true);
6318 } else {
6319 setVariablePointers(true);
6320 }
6321}
6322
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04006323Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05006324 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
6325 return GetBasePointer(gep->getPointerOperand());
6326 }
6327
6328 // Conservatively return |v|.
6329 return v;
6330}
6331
6332bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
6333 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
6334 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
6335 if (lhs_call->getCalledFunction()->getName().startswith(
6336 clspv::ResourceAccessorFunction()) &&
6337 rhs_call->getCalledFunction()->getName().startswith(
6338 clspv::ResourceAccessorFunction())) {
6339 // For resource accessors, match descriptor set and binding.
6340 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
6341 lhs_call->getOperand(1) == rhs_call->getOperand(1))
6342 return true;
6343 } else if (lhs_call->getCalledFunction()->getName().startswith(
6344 clspv::WorkgroupAccessorFunction()) &&
6345 rhs_call->getCalledFunction()->getName().startswith(
6346 clspv::WorkgroupAccessorFunction())) {
6347 // For workgroup resources, match spec id.
6348 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
6349 return true;
6350 }
6351 }
6352 }
6353
6354 return false;
6355}
6356
6357bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
6358 assert(inst->getType()->isPointerTy());
6359 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
6360 spv::StorageClassStorageBuffer);
6361 const bool hack_undef = clspv::Option::HackUndef();
6362 if (auto *select = dyn_cast<SelectInst>(inst)) {
6363 auto *true_base = GetBasePointer(select->getTrueValue());
6364 auto *false_base = GetBasePointer(select->getFalseValue());
6365
6366 if (true_base == false_base)
6367 return true;
6368
6369 // If either the true or false operand is a null, then we satisfy the same
6370 // object constraint.
6371 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
6372 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
6373 return true;
6374 }
6375
6376 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
6377 if (false_cst->isNullValue() ||
6378 (hack_undef && isa<UndefValue>(false_base)))
6379 return true;
6380 }
6381
6382 if (sameResource(true_base, false_base))
6383 return true;
6384 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
6385 Value *value = nullptr;
6386 bool ok = true;
6387 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
6388 auto *base = GetBasePointer(phi->getIncomingValue(i));
6389 // Null values satisfy the constraint of selecting of selecting from the
6390 // same object.
6391 if (!value) {
6392 if (auto *cst = dyn_cast<Constant>(base)) {
6393 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
6394 value = base;
6395 } else {
6396 value = base;
6397 }
6398 } else if (base != value) {
6399 if (auto *base_cst = dyn_cast<Constant>(base)) {
6400 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
6401 continue;
6402 }
6403
6404 if (sameResource(value, base))
6405 continue;
6406
6407 // Values don't represent the same base.
6408 ok = false;
6409 }
6410 }
6411
6412 return ok;
6413 }
6414
6415 // Conservatively return false.
6416 return false;
6417}
alan-bakere9308012019-03-15 10:25:13 -04006418
6419bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
6420 if (!Arg.getType()->isPointerTy() ||
6421 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
6422 // Only SSBOs need to be annotated as coherent.
6423 return false;
6424 }
6425
6426 DenseSet<Value *> visited;
6427 std::vector<Value *> stack;
6428 for (auto *U : Arg.getParent()->users()) {
6429 if (auto *call = dyn_cast<CallInst>(U)) {
6430 stack.push_back(call->getOperand(Arg.getArgNo()));
6431 }
6432 }
6433
6434 while (!stack.empty()) {
6435 Value *v = stack.back();
6436 stack.pop_back();
6437
6438 if (!visited.insert(v).second)
6439 continue;
6440
6441 auto *resource_call = dyn_cast<CallInst>(v);
6442 if (resource_call &&
6443 resource_call->getCalledFunction()->getName().startswith(
6444 clspv::ResourceAccessorFunction())) {
6445 // If this is a resource accessor function, check if the coherent operand
6446 // is set.
6447 const auto coherent =
6448 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
6449 ->getZExtValue());
6450 if (coherent == 1)
6451 return true;
6452 } else if (auto *arg = dyn_cast<Argument>(v)) {
6453 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04006454 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04006455 if (auto *call = dyn_cast<CallInst>(U)) {
6456 stack.push_back(call->getOperand(arg->getArgNo()));
6457 }
6458 }
6459 } else if (auto *user = dyn_cast<User>(v)) {
6460 // If this is a user, traverse all operands that could lead to resource
6461 // variables.
6462 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
6463 Value *operand = user->getOperand(i);
6464 if (operand->getType()->isPointerTy() &&
6465 operand->getType()->getPointerAddressSpace() ==
6466 clspv::AddressSpace::Global) {
6467 stack.push_back(operand);
6468 }
6469 }
6470 }
6471 }
6472
6473 // No coherent resource variables encountered.
6474 return false;
6475}
alan-baker06cad652019-12-03 17:56:47 -05006476
6477void SPIRVProducerPass::PopulateStructuredCFGMaps(Module &module) {
6478 // First, track loop merges and continues.
6479 DenseSet<BasicBlock *> LoopMergesAndContinues;
6480 for (auto &F : module) {
6481 if (F.isDeclaration())
6482 continue;
6483
6484 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
6485 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
6486 std::deque<BasicBlock *> order;
6487 DenseSet<BasicBlock *> visited;
6488 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
6489
6490 for (auto BB : order) {
6491 auto terminator = BB->getTerminator();
6492 auto branch = dyn_cast<BranchInst>(terminator);
6493 if (LI.isLoopHeader(BB)) {
6494 auto L = LI.getLoopFor(BB);
6495 BasicBlock *ContinueBB = nullptr;
6496 BasicBlock *MergeBB = nullptr;
6497
6498 MergeBB = L->getExitBlock();
6499 if (!MergeBB) {
6500 // StructurizeCFG pass converts CFG into triangle shape and the cfg
6501 // has regions with single entry/exit. As a result, loop should not
6502 // have multiple exits.
6503 llvm_unreachable("Loop has multiple exits???");
6504 }
6505
6506 if (L->isLoopLatch(BB)) {
6507 ContinueBB = BB;
6508 } else {
6509 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
6510 // block.
6511 BasicBlock *Header = L->getHeader();
6512 BasicBlock *Latch = L->getLoopLatch();
6513 for (auto *loop_block : L->blocks()) {
6514 if (loop_block == Header) {
6515 continue;
6516 }
6517
6518 // Check whether block dominates block with back-edge.
6519 // The loop latch is the single block with a back-edge. If it was
6520 // possible, StructurizeCFG made the loop conform to this
6521 // requirement, otherwise |Latch| is a nullptr.
6522 if (DT.dominates(loop_block, Latch)) {
6523 ContinueBB = loop_block;
6524 }
6525 }
6526
6527 if (!ContinueBB) {
6528 llvm_unreachable("Wrong continue block from loop");
6529 }
6530 }
6531
6532 // Record the continue and merge blocks.
6533 MergeBlocks[BB] = MergeBB;
6534 ContinueBlocks[BB] = ContinueBB;
6535 LoopMergesAndContinues.insert(MergeBB);
6536 LoopMergesAndContinues.insert(ContinueBB);
6537 } else if (branch && branch->isConditional()) {
6538 auto L = LI.getLoopFor(BB);
6539 bool HasBackedge = false;
6540 while (L && !HasBackedge) {
6541 if (L->isLoopLatch(BB)) {
6542 HasBackedge = true;
6543 }
6544 L = L->getParentLoop();
6545 }
6546
6547 if (!HasBackedge) {
6548 // Only need a merge if the branch doesn't include a loop break or
6549 // continue.
6550 auto true_bb = branch->getSuccessor(0);
6551 auto false_bb = branch->getSuccessor(1);
6552 if (!LoopMergesAndContinues.count(true_bb) &&
6553 !LoopMergesAndContinues.count(false_bb)) {
6554 // StructurizeCFG pass already manipulated CFG. Just use false block
6555 // of branch instruction as merge block.
6556 MergeBlocks[BB] = false_bb;
6557 }
6558 }
6559 }
6560 }
6561 }
6562}