blob: 7ba56e4fdcc65beb767b43e0c0e3dbbc7e3e9ed5 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
SJWf93f5f32020-05-05 07:27:56 -050046// enable spv::HasResultAndType
47#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040048#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040049
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050051#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040052#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070053#include "clspv/spirv_c_strings.hpp"
54#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040055
David Neto4feb7a42017-10-06 17:29:42 -040056#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050057#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050058#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070059#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040060#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040061#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040062#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040063#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040064#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040065#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050066#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040067
David Neto22f144c2017-06-12 14:26:21 -040068#if defined(_MSC_VER)
69#pragma warning(pop)
70#endif
71
72using namespace llvm;
73using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050074using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040075using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040076
77namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040078
David Neto862b7d82018-06-14 18:48:37 -040079cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
80 cl::desc("Show resource variable creation"));
81
alan-baker5ed87542020-03-23 11:05:22 -040082cl::opt<bool>
83 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
84 cl::desc("Dump the IR at the start of SPIRVProducer"));
85
David Neto862b7d82018-06-14 18:48:37 -040086// These hacks exist to help transition code generation algorithms
87// without making huge noise in detailed test output.
88const bool Hack_generate_runtime_array_stride_early = true;
89
David Neto3fbb4072017-10-16 11:28:14 -040090// The value of 1/pi. This value is from MSDN
91// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
92const double kOneOverPi = 0.318309886183790671538;
93const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
94
alan-bakerb6b09dc2018-11-08 16:59:28 -050095const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040096
SJW69939d52020-04-16 07:29:07 -050097// SPIRV Module Sections (per 2.4 of the SPIRV spec)
98// These are used to collect SPIRVInstructions by type on-the-fly.
99enum SPIRVSection {
100 kCapabilities,
101 kExtensions,
102 kImports,
103 kMemoryModel,
104 kEntryPoints,
105 kExecutionModes,
106
107 kDebug,
108 kAnnotations,
109
110 kTypes,
111 kConstants = kTypes,
112 kGlobalVariables,
113
114 kFunctions,
115
116 kSectionCount
117};
118
SJWf93f5f32020-05-05 07:27:56 -0500119typedef uint32_t SPIRVID;
120
David Neto22f144c2017-06-12 14:26:21 -0400121enum SPIRVOperandType {
122 NUMBERID,
123 LITERAL_INTEGER,
124 LITERAL_STRING,
125 LITERAL_FLOAT
126};
127
128struct SPIRVOperand {
129 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num)
130 : Type(Ty), LiteralNum(1, Num) {}
131 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
132 : Type(Ty), LiteralStr(Str) {}
133 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
134 : Type(Ty), LiteralStr(Str) {}
135 explicit SPIRVOperand(SPIRVOperandType Ty, ArrayRef<uint32_t> NumVec)
136 : Type(Ty), LiteralNum(NumVec.begin(), NumVec.end()) {}
137
James Price11010dc2019-12-19 13:53:09 -0500138 SPIRVOperandType getType() const { return Type; };
139 uint32_t getNumID() const { return LiteralNum[0]; };
140 std::string getLiteralStr() const { return LiteralStr; };
141 ArrayRef<uint32_t> getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400142
David Neto87846742018-04-11 17:36:22 -0400143 uint32_t GetNumWords() const {
144 switch (Type) {
145 case NUMBERID:
146 return 1;
147 case LITERAL_INTEGER:
148 case LITERAL_FLOAT:
David Netoee2660d2018-06-28 16:31:29 -0400149 return uint32_t(LiteralNum.size());
David Neto87846742018-04-11 17:36:22 -0400150 case LITERAL_STRING:
151 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400152 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400153 }
154 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
155 }
156
David Neto22f144c2017-06-12 14:26:21 -0400157private:
158 SPIRVOperandType Type;
159 std::string LiteralStr;
160 SmallVector<uint32_t, 4> LiteralNum;
161};
162
SJWf93f5f32020-05-05 07:27:56 -0500163typedef SmallVector<std::unique_ptr<SPIRVOperand>, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400164
SJWf93f5f32020-05-05 07:27:56 -0500165SPIRVOperandVec &operator<<(SPIRVOperandVec &list,
166 std::unique_ptr<SPIRVOperand> elem) {
David Netoef5ba2b2019-12-20 08:35:54 -0500167 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400168 return list;
169}
170
David Netoef5ba2b2019-12-20 08:35:54 -0500171std::unique_ptr<SPIRVOperand> MkNum(uint32_t num) {
172 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num);
David Netoc6f3ab22018-04-06 18:02:31 -0400173}
David Netoef5ba2b2019-12-20 08:35:54 -0500174std::unique_ptr<SPIRVOperand> MkInteger(ArrayRef<uint32_t> num_vec) {
175 return std::make_unique<SPIRVOperand>(LITERAL_INTEGER, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400176}
David Netoef5ba2b2019-12-20 08:35:54 -0500177std::unique_ptr<SPIRVOperand> MkFloat(ArrayRef<uint32_t> num_vec) {
178 return std::make_unique<SPIRVOperand>(LITERAL_FLOAT, num_vec);
David Neto257c3892018-04-11 13:19:45 -0400179}
David Netoef5ba2b2019-12-20 08:35:54 -0500180std::unique_ptr<SPIRVOperand> MkId(uint32_t id) {
181 return std::make_unique<SPIRVOperand>(NUMBERID, id);
James Price11010dc2019-12-19 13:53:09 -0500182}
David Netoef5ba2b2019-12-20 08:35:54 -0500183std::unique_ptr<SPIRVOperand> MkString(StringRef str) {
184 return std::make_unique<SPIRVOperand>(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400185}
David Netoc6f3ab22018-04-06 18:02:31 -0400186
David Neto22f144c2017-06-12 14:26:21 -0400187struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500188 // Primary constructor must have Opcode, initializes WordCount based on ResID.
189 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
190 : Opcode(static_cast<uint16_t>(Opc)) {
191 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400192 }
David Neto22f144c2017-06-12 14:26:21 -0400193
SJWf93f5f32020-05-05 07:27:56 -0500194 // Creates an instruction with an opcode and no result ID, and with the given
195 // operands. This calls primary constructor to initialize Opcode, WordCount.
196 // Takes ownership of the operands and clears |Ops|.
197 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
198 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500199 }
SJWf93f5f32020-05-05 07:27:56 -0500200 // Creates an instruction with an opcode and no result ID, and with the given
201 // operands. This calls primary constructor to initialize Opcode, WordCount.
202 // Takes ownership of the operands and clears |Ops|.
203 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
204 : SPIRVInstruction(Opc, ResID) {
205 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500206 }
David Netoef5ba2b2019-12-20 08:35:54 -0500207
David Netoee2660d2018-06-28 16:31:29 -0400208 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400209 uint16_t getOpcode() const { return Opcode; }
210 uint32_t getResultID() const { return ResultID; }
David Netoef5ba2b2019-12-20 08:35:54 -0500211 ArrayRef<std::unique_ptr<SPIRVOperand>> getOperands() const {
James Price11010dc2019-12-19 13:53:09 -0500212 return Operands;
213 }
David Neto22f144c2017-06-12 14:26:21 -0400214
215private:
SJWf93f5f32020-05-05 07:27:56 -0500216 void setResult(uint32_t ResID = 0) {
217 WordCount = 1 + (ResID != 0 ? 1 : 0);
218 ResultID = ResID;
219 }
220
221 void setOperands(SPIRVOperandVec &Ops) {
222 assert(Operands.empty());
223 Operands = std::move(Ops);
224 for (auto &opd : Operands) {
225 WordCount += uint16_t(opd->GetNumWords());
226 }
227 }
228
229private:
David Netoee2660d2018-06-28 16:31:29 -0400230 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400231 uint16_t Opcode;
232 uint32_t ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500233 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400234};
235
236struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400237 typedef DenseMap<Type *, uint32_t> TypeMapType;
238 typedef UniqueVector<Type *> TypeList;
239 typedef DenseMap<Value *, uint32_t> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400240 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400241 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
242 typedef std::list<SPIRVInstruction *> SPIRVInstructionList;
David Neto87846742018-04-11 17:36:22 -0400243 // A vector of tuples, each of which is:
244 // - the LLVM instruction that we will later generate SPIR-V code for
245 // - where the SPIR-V instruction should be inserted
246 // - the result ID of the SPIR-V instruction
David Neto22f144c2017-06-12 14:26:21 -0400247 typedef std::vector<
248 std::tuple<Value *, SPIRVInstructionList::iterator, uint32_t>>
249 DeferredInstVecType;
250 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
251 GlobalConstFuncMapType;
252
David Neto44795152017-07-13 15:45:28 -0400253 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500254 raw_pwrite_stream &out,
255 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400256 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400257 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400258 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400259 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400260 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400261 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500262 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
263 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-bakera1be3322020-04-20 12:48:18 -0400264 WorkgroupSizeVarID(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400265
James Price11010dc2019-12-19 13:53:09 -0500266 virtual ~SPIRVProducerPass() {
SJW69939d52020-04-16 07:29:07 -0500267 for (int i = 0; i < kSectionCount; ++i) {
268 for (auto *Inst : SPIRVSections[i]) {
269 delete Inst;
270 }
James Price11010dc2019-12-19 13:53:09 -0500271 }
272 }
273
David Neto22f144c2017-06-12 14:26:21 -0400274 void getAnalysisUsage(AnalysisUsage &AU) const override {
275 AU.addRequired<DominatorTreeWrapperPass>();
276 AU.addRequired<LoopInfoWrapperPass>();
277 }
278
279 virtual bool runOnModule(Module &module) override;
280
281 // output the SPIR-V header block
282 void outputHeader();
283
284 // patch the SPIR-V header block
285 void patchHeader();
286
David Neto22f144c2017-06-12 14:26:21 -0400287 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
288 TypeList &getTypeList() { return Types; };
289 ValueList &getConstantList() { return Constants; };
290 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500291 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
292 return SPIRVSections[Section];
293 };
David Neto22f144c2017-06-12 14:26:21 -0400294 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
295 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
296 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
297 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
298 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500299
alan-baker5b86ed72019-02-15 08:26:50 -0500300 bool hasVariablePointersStorageBuffer() {
301 return HasVariablePointersStorageBuffer;
302 }
303 void setVariablePointersStorageBuffer(bool Val) {
304 HasVariablePointersStorageBuffer = Val;
305 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400306 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400307 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500308 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
309 return samplerMap;
310 }
David Neto22f144c2017-06-12 14:26:21 -0400311 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
312 return GlobalConstFuncTypeMap;
313 }
314 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
315 return GlobalConstArgumentSet;
316 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500317 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400318
SJW77b87ad2020-04-21 14:37:52 -0500319 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500320 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
321 // *not* be converted to a storage buffer, replace each such global variable
322 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500323 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400324 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
325 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500326 void FindResourceVars();
SJW77b87ad2020-04-21 14:37:52 -0500327 bool FindExtInst();
David Neto22f144c2017-06-12 14:26:21 -0400328 void FindTypePerGlobalVar(GlobalVariable &GV);
329 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500330 void FindTypesForSamplerMap();
331 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500332 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
333 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400334 void FindType(Type *Ty);
335 void FindConstantPerGlobalVar(GlobalVariable &GV);
336 void FindConstantPerFunc(Function &F);
337 void FindConstant(Value *V);
SJWf93f5f32020-05-05 07:27:56 -0500338
339 // Lookup or create Types, Constants.
340 // Returns SPIRVID once it has been created.
341 SPIRVID getSPIRVType(Type *Ty);
342 SPIRVID getSPIRVConstant(Constant *Cst);
343 // Lookup SPIRVID of llvm::Value, may create Constant.
344 SPIRVID getSPIRVValue(Value *V);
345
David Neto22f144c2017-06-12 14:26:21 -0400346 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400347 // Generates instructions for SPIR-V types corresponding to the LLVM types
348 // saved in the |Types| member. A type follows its subtypes. IDs are
349 // allocated sequentially starting with the current value of nextID, and
350 // with a type following its subtypes. Also updates nextID to just beyond
351 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500352 void GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400353 void GenerateSPIRVConstants();
SJW77b87ad2020-04-21 14:37:52 -0500354 void GenerateModuleInfo();
355 void GeneratePushConstantDescriptorMapEntries();
356 void GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400357 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500358 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400359 // Generate descriptor map entries for resource variables associated with
360 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500361 void GenerateDescriptorMapInfo(Function &F);
362 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400363 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500364 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400365 void GenerateFuncPrologue(Function &F);
366 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400367 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400368 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
369 spv::Op GetSPIRVCastOpcode(Instruction &I);
370 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
371 void GenerateInstruction(Instruction &I);
372 void GenerateFuncEpilogue();
373 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500374 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400375 bool is4xi8vec(Type *Ty) const;
376 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400377 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400378 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400379 // Returns the GLSL extended instruction enum that the given function
380 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400381 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400382 // Returns the GLSL extended instruction enum indirectly used by the given
383 // function. That is, to implement the given function, we use an extended
384 // instruction plus one more instruction. If none, then returns the 0 value,
385 // i.e. GLSLstd4580Bad.
386 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
387 // Returns the single GLSL extended instruction used directly or
388 // indirectly by the given function call.
389 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400390 void WriteOneWord(uint32_t Word);
391 void WriteResultID(SPIRVInstruction *Inst);
392 void WriteWordCountAndOpcode(SPIRVInstruction *Inst);
David Netoef5ba2b2019-12-20 08:35:54 -0500393 void WriteOperand(const std::unique_ptr<SPIRVOperand> &Op);
David Neto22f144c2017-06-12 14:26:21 -0400394 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500395 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400396
Alan Baker9bf93fb2018-08-28 16:59:26 -0400397 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500398 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400399
Alan Bakerfcda9482018-10-02 17:09:59 -0400400 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500401 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400402
alan-baker06cad652019-12-03 17:56:47 -0500403 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500404 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500405
Alan Bakerfcda9482018-10-02 17:09:59 -0400406 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
407 // uses the internal map, otherwise it falls back on the data layout.
408 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
409 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
410 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000411 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
412 unsigned member,
413 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400414
alan-baker5b86ed72019-02-15 08:26:50 -0500415 // Returns the base pointer of |v|.
416 Value *GetBasePointer(Value *v);
417
418 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
419 // |address_space|.
420 void setVariablePointersCapabilities(unsigned address_space);
421
422 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
423 // variable.
424 bool sameResource(Value *lhs, Value *rhs) const;
425
426 // Returns true if |inst| is phi or select that selects from the same
427 // structure (or null).
428 bool selectFromSameObject(Instruction *inst);
429
alan-bakere9308012019-03-15 10:25:13 -0400430 // Returns true if |Arg| is called with a coherent resource.
431 bool CalledWithCoherentResource(Argument &Arg);
432
SJWf93f5f32020-05-05 07:27:56 -0500433 //
434 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
435 template <enum SPIRVSection TSection = kFunctions>
436 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
437 bool has_result, has_result_type;
438 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
439 SPIRVID RID = has_result ? incrNextID() : 0;
440 SPIRVInstruction *I = new SPIRVInstruction(Opcode, RID, Operands);
441 SPIRVSections[TSection].push_back(I);
442 return RID;
443 }
444 template <enum SPIRVSection TSection = kFunctions>
445 SPIRVID addSPIRVInst(spv::Op Op) {
446 SPIRVOperandVec Ops;
447 return addSPIRVInst<TSection>(Op, Ops);
448 }
449 template <enum SPIRVSection TSection = kFunctions>
450 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
451 SPIRVOperandVec Ops;
452 Ops << MkNum(V);
453 return addSPIRVInst<TSection>(Op, Ops);
454 }
455 template <enum SPIRVSection TSection = kFunctions>
456 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
457 SPIRVOperandVec Ops;
458 Ops << MkString(V);
459 return addSPIRVInst<TSection>(Op, Ops);
460 }
461
David Neto22f144c2017-06-12 14:26:21 -0400462private:
463 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500464
465 Module *module;
466
David Neto44795152017-07-13 15:45:28 -0400467 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400468 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400469
470 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
471 // convert to other formats on demand?
472
473 // When emitting a C initialization list, the WriteSPIRVBinary method
474 // will actually write its words to this vector via binaryTempOut.
475 SmallVector<char, 100> binaryTempUnderlyingVector;
476 raw_svector_ostream binaryTempOut;
477
478 // Binary output writes to this stream, which might be |out| or
479 // |binaryTempOut|. It's the latter when we really want to write a C
480 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400481 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500482 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400483 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400484 uint64_t patchBoundOffset;
485 uint32_t nextID;
486
SJWf93f5f32020-05-05 07:27:56 -0500487 SPIRVID incrNextID() { return nextID++; }
488
alan-bakerf67468c2019-11-25 15:51:49 -0500489 // ID for OpTypeInt 32 1.
490 uint32_t int32ID = 0;
491 // ID for OpTypeVector %int 4.
492 uint32_t v4int32ID = 0;
493
David Neto19a1bad2017-08-25 15:01:41 -0400494 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400495 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400496 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400497 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400498 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400499 TypeList Types;
500 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400501 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400502 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500503 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400504
David Neto22f144c2017-06-12 14:26:21 -0400505 EntryPointVecType EntryPointVec;
506 DeferredInstVecType DeferredInstVec;
507 ValueList EntryPointInterfacesVec;
508 uint32_t OpExtInstImportID;
509 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500510 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400511 bool HasVariablePointers;
512 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500513 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700514
515 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700516 // will map F's type to (G, index of the parameter), where in a first phase
517 // G is F's type. During FindTypePerFunc, G will be changed to F's type
518 // but replacing the pointer-to-constant parameter with
519 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700520 // TODO(dneto): This doesn't seem general enough? A function might have
521 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400522 GlobalConstFuncMapType GlobalConstFuncTypeMap;
523 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400524 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700525 // or array types, and which point into transparent memory (StorageBuffer
526 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400527 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700528 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400529
530 // This is truly ugly, but works around what look like driver bugs.
531 // For get_local_size, an earlier part of the flow has created a module-scope
532 // variable in Private address space to hold the value for the workgroup
533 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
534 // When this is present, save the IDs of the initializer value and variable
535 // in these two variables. We only ever do a vector load from it, and
536 // when we see one of those, substitute just the value of the intializer.
537 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700538 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400539 uint32_t WorkgroupSizeValueID;
540 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400541
David Neto862b7d82018-06-14 18:48:37 -0400542 // Bookkeeping for mapping kernel arguments to resource variables.
543 struct ResourceVarInfo {
544 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400545 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400546 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400547 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400548 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
549 const int index; // Index into ResourceVarInfoList
550 const unsigned descriptor_set;
551 const unsigned binding;
552 Function *const var_fn; // The @clspv.resource.var.* function.
553 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400554 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400555 const unsigned addr_space; // The LLVM address space
556 // The SPIR-V ID of the OpVariable. Not populated at construction time.
557 uint32_t var_id = 0;
558 };
559 // A list of resource var info. Each one correponds to a module-scope
560 // resource variable we will have to create. Resource var indices are
561 // indices into this vector.
562 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
563 // This is a vector of pointers of all the resource vars, but ordered by
564 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500565 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400566 // Map a function to the ordered list of resource variables it uses, one for
567 // each argument. If an argument does not use a resource variable, it
568 // will have a null pointer entry.
569 using FunctionToResourceVarsMapType =
570 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
571 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
572
573 // What LLVM types map to SPIR-V types needing layout? These are the
574 // arrays and structures supporting storage buffers and uniform buffers.
575 TypeList TypesNeedingLayout;
576 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
577 UniqueVector<StructType *> StructTypesNeedingBlock;
578 // For a call that represents a load from an opaque type (samplers, images),
579 // map it to the variable id it should load from.
580 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700581
David Netoc6f3ab22018-04-06 18:02:31 -0400582 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500583 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400584 LocalArgList LocalArgs;
585 // Information about a pointer-to-local argument.
586 struct LocalArgInfo {
587 // The SPIR-V ID of the array variable.
588 uint32_t variable_id;
589 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500590 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400591 // The ID of the array type.
592 uint32_t array_size_id;
593 // The ID of the array type.
594 uint32_t array_type_id;
595 // The ID of the pointer to the array type.
596 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400597 // The specialization constant ID of the array size.
598 int spec_id;
599 };
Alan Baker202c8c72018-08-13 13:47:44 -0400600 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400602 // A mapping from SpecId to its LocalArgInfo.
603 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400604 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500605 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400606 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500607 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
608 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500609
610 // Maps basic block to its merge block.
611 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
612 // Maps basic block to its continue block.
613 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400614};
615
616char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400617
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400619
620namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500621ModulePass *createSPIRVProducerPass(
622 raw_pwrite_stream &out,
623 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400624 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500625 bool outputCInitList) {
626 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400627 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400628}
David Netoc2c368d2017-06-30 16:50:17 -0400629} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400630
SJW77b87ad2020-04-21 14:37:52 -0500631bool SPIRVProducerPass::runOnModule(Module &M) {
632 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400633 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500634 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400635 }
David Neto0676e6f2017-07-11 18:47:44 -0400636 binaryOut = outputCInitList ? &binaryTempOut : &out;
637
SJW77b87ad2020-04-21 14:37:52 -0500638 PopulateUBOTypeMaps();
639 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400640
David Neto22f144c2017-06-12 14:26:21 -0400641 // SPIR-V always begins with its header information
642 outputHeader();
643
644 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500645 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400646
David Neto22f144c2017-06-12 14:26:21 -0400647 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500648 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400649 // If the GV is one of our special __spirv_* variables, remove the
650 // initializer as it was only placed there to force LLVM to not throw the
651 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000652 if (GV.getName().startswith("__spirv_") ||
653 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400654 GV.setInitializer(nullptr);
655 }
656
657 // Collect types' information from global variable.
658 FindTypePerGlobalVar(GV);
659
660 // Collect constant information from global variable.
661 FindConstantPerGlobalVar(GV);
662
663 // If the variable is an input, entry points need to know about it.
664 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400665 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400666 }
667 }
668
669 // If there are extended instructions, generate OpExtInstImport.
SJW77b87ad2020-04-21 14:37:52 -0500670 if (FindExtInst()) {
David Neto22f144c2017-06-12 14:26:21 -0400671 GenerateExtInstImport();
672 }
673
674 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500675 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400676
677 // Generate SPIRV constants.
678 GenerateSPIRVConstants();
679
alan-baker09cb9802019-12-10 13:16:27 -0500680 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500681 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400682
Kévin Petitbbbda972020-03-03 19:16:31 +0000683 // Generate descriptor map entries for all push constants
SJW77b87ad2020-04-21 14:37:52 -0500684 GeneratePushConstantDescriptorMapEntries();
Kévin Petitbbbda972020-03-03 19:16:31 +0000685
David Neto22f144c2017-06-12 14:26:21 -0400686 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500687 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400688 GenerateGlobalVar(GV);
689 }
SJW77b87ad2020-04-21 14:37:52 -0500690 GenerateResourceVars();
691 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400692
693 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500694 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400695 if (F.isDeclaration()) {
696 continue;
697 }
698
SJW77b87ad2020-04-21 14:37:52 -0500699 GenerateDescriptorMapInfo(F);
David Neto862b7d82018-06-14 18:48:37 -0400700
David Neto22f144c2017-06-12 14:26:21 -0400701 // Generate Function Prologue.
702 GenerateFuncPrologue(F);
703
704 // Generate SPIRV instructions for function body.
705 GenerateFuncBody(F);
706
707 // Generate Function Epilogue.
708 GenerateFuncEpilogue();
709 }
710
711 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500712 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400713
714 // Generate descriptor map entries for module scope specialization constants.
SJW77b87ad2020-04-21 14:37:52 -0500715 GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400716
717 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500718 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400719
alan-baker00e7a582019-06-07 12:54:21 -0400720 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400721
722 // We need to patch the SPIR-V header to set bound correctly.
723 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400724
725 if (outputCInitList) {
726 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400727 std::ostringstream os;
728
David Neto57fb0b92017-08-04 15:35:09 -0400729 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400730 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400731 os << ",\n";
732 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400733 first = false;
734 };
735
736 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400737 const std::string str(binaryTempOut.str());
738 for (unsigned i = 0; i < str.size(); i += 4) {
739 const uint32_t a = static_cast<unsigned char>(str[i]);
740 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
741 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
742 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
743 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400744 }
745 os << "}\n";
746 out << os.str();
747 }
748
David Neto22f144c2017-06-12 14:26:21 -0400749 return false;
750}
751
752void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400753 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
754 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400755 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
756 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
757 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400758
alan-baker0c18ab02019-06-12 10:23:21 -0400759 // use Google's vendor ID
760 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400761 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400762
alan-baker00e7a582019-06-07 12:54:21 -0400763 // we record where we need to come back to and patch in the bound value
764 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400765
alan-baker00e7a582019-06-07 12:54:21 -0400766 // output a bad bound for now
767 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400768
alan-baker00e7a582019-06-07 12:54:21 -0400769 // output the schema (reserved for use and must be 0)
770 const uint32_t schema = 0;
771 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400772}
773
774void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400775 // for a binary we just write the value of nextID over bound
776 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
777 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400778}
779
SJW77b87ad2020-04-21 14:37:52 -0500780void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400781 // This function generates LLVM IR for function such as global variable for
782 // argument, constant and pointer type for argument access. These information
783 // is artificial one because we need Vulkan SPIR-V output. This function is
784 // executed ahead of FindType and FindConstant.
SJW77b87ad2020-04-21 14:37:52 -0500785 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -0400786
SJW77b87ad2020-04-21 14:37:52 -0500787 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400788
SJW77b87ad2020-04-21 14:37:52 -0500789 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400790
791 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500792 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400793 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
794 if (spv::BuiltInWorkgroupSize == BuiltinType) {
795 HasWorkGroupBuiltin = true;
796 }
797 }
798
SJW77b87ad2020-04-21 14:37:52 -0500799 FindTypesForSamplerMap();
800 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400801
SJW77b87ad2020-04-21 14:37:52 -0500802 for (Function &F : *module) {
Kévin Petitabef4522019-03-27 13:08:01 +0000803 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400804 continue;
805 }
806
807 for (BasicBlock &BB : F) {
808 for (Instruction &I : BB) {
809 if (I.getOpcode() == Instruction::ZExt ||
810 I.getOpcode() == Instruction::SExt ||
811 I.getOpcode() == Instruction::UIToFP) {
812 // If there is zext with i1 type, it will be changed to OpSelect. The
813 // OpSelect needs constant 0 and 1 so the constants are added here.
814
815 auto OpTy = I.getOperand(0)->getType();
816
Kévin Petit24272b62018-10-18 19:16:12 +0000817 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400818 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400819 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000820 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400821 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400822 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000823 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400824 } else {
825 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
826 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
827 }
828 }
829 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400830 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400831
SJW173c7e92020-03-16 08:44:47 -0500832 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500833 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400834 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500835 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500836 // All unsampled reads need an integer 0 for the Lod operand.
837 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500838 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500839 Type *ImageTy = Call->getOperand(0)->getType();
840 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500841 uint32_t components =
842 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500843 if (components > 1) {
844 // OpImageQuerySize* return |components| components.
845 FindType(VectorType::get(Type::getInt32Ty(Context), components));
846 if (dim == 3 && IsGetImageDim(callee_name)) {
847 // get_image_dim for 3D images returns an int4.
848 FindType(
849 VectorType::get(Type::getInt32Ty(Context), components + 1));
850 }
851 }
852
SJW173c7e92020-03-16 08:44:47 -0500853 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500854 // All sampled image queries need a integer 0 for the Lod
855 // operand.
856 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
857 }
David Neto5c22a252018-03-15 16:07:41 -0400858 }
David Neto22f144c2017-06-12 14:26:21 -0400859 }
860 }
861 }
862
Kévin Petitabef4522019-03-27 13:08:01 +0000863 // More things to do on kernel functions
864 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
865 if (const MDNode *MD =
866 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
867 // We generate constants if the WorkgroupSize builtin is being used.
868 if (HasWorkGroupBuiltin) {
869 // Collect constant information for work group size.
870 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
871 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
872 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400873 }
874 }
875 }
876
alan-bakerf67468c2019-11-25 15:51:49 -0500877 // TODO(alan-baker): make this better.
SJW77b87ad2020-04-21 14:37:52 -0500878 if (module->getTypeByName("opencl.image1d_ro_t.float") ||
879 module->getTypeByName("opencl.image1d_ro_t.float.sampled") ||
880 module->getTypeByName("opencl.image1d_wo_t.float") ||
881 module->getTypeByName("opencl.image2d_ro_t.float") ||
882 module->getTypeByName("opencl.image2d_ro_t.float.sampled") ||
883 module->getTypeByName("opencl.image2d_wo_t.float") ||
884 module->getTypeByName("opencl.image3d_ro_t.float") ||
885 module->getTypeByName("opencl.image3d_ro_t.float.sampled") ||
886 module->getTypeByName("opencl.image3d_wo_t.float") ||
887 module->getTypeByName("opencl.image1d_array_ro_t.float") ||
888 module->getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
889 module->getTypeByName("opencl.image1d_array_wo_t.float") ||
890 module->getTypeByName("opencl.image2d_array_ro_t.float") ||
891 module->getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
892 module->getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500893 FindType(Type::getFloatTy(Context));
SJW77b87ad2020-04-21 14:37:52 -0500894 } else if (module->getTypeByName("opencl.image1d_ro_t.uint") ||
895 module->getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
896 module->getTypeByName("opencl.image1d_wo_t.uint") ||
897 module->getTypeByName("opencl.image2d_ro_t.uint") ||
898 module->getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
899 module->getTypeByName("opencl.image2d_wo_t.uint") ||
900 module->getTypeByName("opencl.image3d_ro_t.uint") ||
901 module->getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
902 module->getTypeByName("opencl.image3d_wo_t.uint") ||
903 module->getTypeByName("opencl.image1d_array_ro_t.uint") ||
904 module->getTypeByName(
905 "opencl.image1d_array_ro_t.uint.sampled") ||
906 module->getTypeByName("opencl.image1d_array_wo_t.uint") ||
907 module->getTypeByName("opencl.image2d_array_ro_t.uint") ||
908 module->getTypeByName(
909 "opencl.image2d_array_ro_t.uint.sampled") ||
910 module->getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500911 FindType(Type::getInt32Ty(Context));
SJW77b87ad2020-04-21 14:37:52 -0500912 } else if (module->getTypeByName("opencl.image1d_ro_t.int") ||
913 module->getTypeByName("opencl.image1d_ro_t.int.sampled") ||
914 module->getTypeByName("opencl.image1d_wo_t.int") ||
915 module->getTypeByName("opencl.image2d_ro_t.int") ||
916 module->getTypeByName("opencl.image2d_ro_t.int.sampled") ||
917 module->getTypeByName("opencl.image2d_wo_t.int") ||
918 module->getTypeByName("opencl.image3d_ro_t.int") ||
919 module->getTypeByName("opencl.image3d_ro_t.int.sampled") ||
920 module->getTypeByName("opencl.image3d_wo_t.int") ||
921 module->getTypeByName("opencl.image1d_array_ro_t.int") ||
922 module->getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
923 module->getTypeByName("opencl.image1d_array_wo_t.int") ||
924 module->getTypeByName("opencl.image2d_array_ro_t.int") ||
925 module->getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
926 module->getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500927 // Nothing for now...
928 } else {
929 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400930 FindType(Type::getFloatTy(Context));
931 }
932
933 // Collect types' information from function.
934 FindTypePerFunc(F);
935
936 // Collect constant information from function.
937 FindConstantPerFunc(F);
938 }
939}
940
SJW77b87ad2020-04-21 14:37:52 -0500941void SPIRVProducerPass::FindGlobalConstVars() {
942 clspv::NormalizeGlobalVariables(*module);
943 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400944
David Neto862b7d82018-06-14 18:48:37 -0400945 SmallVector<GlobalVariable *, 8> GVList;
946 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500947 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400948 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
949 if (GV.use_empty()) {
950 DeadGVList.push_back(&GV);
951 } else {
952 GVList.push_back(&GV);
953 }
954 }
955 }
956
957 // Remove dead global __constant variables.
958 for (auto GV : DeadGVList) {
959 GV->eraseFromParent();
960 }
961 DeadGVList.clear();
962
963 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
964 // For now, we only support a single storage buffer.
965 if (GVList.size() > 0) {
966 assert(GVList.size() == 1);
967 const auto *GV = GVList[0];
968 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400969 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400970 const size_t kConstantMaxSize = 65536;
971 if (constants_byte_size > kConstantMaxSize) {
972 outs() << "Max __constant capacity of " << kConstantMaxSize
973 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
974 llvm_unreachable("Max __constant capacity exceeded");
975 }
976 }
977 } else {
978 // Change global constant variable's address space to ModuleScopePrivate.
979 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
980 for (auto GV : GVList) {
981 // Create new gv with ModuleScopePrivate address space.
982 Type *NewGVTy = GV->getType()->getPointerElementType();
983 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500984 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400985 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
986 NewGV->takeName(GV);
987
988 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
989 SmallVector<User *, 8> CandidateUsers;
990
991 auto record_called_function_type_as_user =
992 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
993 // Find argument index.
994 unsigned index = 0;
995 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
996 if (gv == call->getOperand(i)) {
997 // TODO(dneto): Should we break here?
998 index = i;
999 }
1000 }
1001
1002 // Record function type with global constant.
1003 GlobalConstFuncTyMap[call->getFunctionType()] =
1004 std::make_pair(call->getFunctionType(), index);
1005 };
1006
1007 for (User *GVU : GVUsers) {
1008 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1009 record_called_function_type_as_user(GV, Call);
1010 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1011 // Check GEP users.
1012 for (User *GEPU : GEP->users()) {
1013 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1014 record_called_function_type_as_user(GEP, GEPCall);
1015 }
1016 }
1017 }
1018
1019 CandidateUsers.push_back(GVU);
1020 }
1021
1022 for (User *U : CandidateUsers) {
1023 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001024 if (!isa<Constant>(U)) {
1025 // #254: Can't change operands of a constant, but this shouldn't be
1026 // something that sticks around in the module.
1027 U->replaceUsesOfWith(GV, NewGV);
1028 }
David Neto862b7d82018-06-14 18:48:37 -04001029 }
1030
1031 // Delete original gv.
1032 GV->eraseFromParent();
1033 }
1034 }
1035}
1036
SJW77b87ad2020-04-21 14:37:52 -05001037void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001038 ResourceVarInfoList.clear();
1039 FunctionToResourceVarsMap.clear();
1040 ModuleOrderedResourceVars.reset();
1041 // Normally, there is one resource variable per clspv.resource.var.*
1042 // function, since that is unique'd by arg type and index. By design,
1043 // we can share these resource variables across kernels because all
1044 // kernels use the same descriptor set.
1045 //
1046 // But if the user requested distinct descriptor sets per kernel, then
1047 // the descriptor allocator has made different (set,binding) pairs for
1048 // the same (type,arg_index) pair. Since we can decorate a resource
1049 // variable with only exactly one DescriptorSet and Binding, we are
1050 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001051 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001052 // (set,binding) values.
1053 const bool always_distinct_sets =
1054 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -05001055 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001056 // Rely on the fact the resource var functions have a stable ordering
1057 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001058 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001059 // Find all calls to this function with distinct set and binding pairs.
1060 // Save them in ResourceVarInfoList.
1061
1062 // Determine uniqueness of the (set,binding) pairs only withing this
1063 // one resource-var builtin function.
1064 using SetAndBinding = std::pair<unsigned, unsigned>;
1065 // Maps set and binding to the resource var info.
1066 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1067 bool first_use = true;
1068 for (auto &U : F.uses()) {
1069 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1070 const auto set = unsigned(
1071 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1072 const auto binding = unsigned(
1073 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1074 const auto arg_kind = clspv::ArgKind(
1075 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1076 const auto arg_index = unsigned(
1077 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001078 const auto coherent = unsigned(
1079 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001080
1081 // Find or make the resource var info for this combination.
1082 ResourceVarInfo *rv = nullptr;
1083 if (always_distinct_sets) {
1084 // Make a new resource var any time we see a different
1085 // (set,binding) pair.
1086 SetAndBinding key{set, binding};
1087 auto where = set_and_binding_map.find(key);
1088 if (where == set_and_binding_map.end()) {
1089 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001090 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001091 ResourceVarInfoList.emplace_back(rv);
1092 set_and_binding_map[key] = rv;
1093 } else {
1094 rv = where->second;
1095 }
1096 } else {
1097 // The default is to make exactly one resource for each
1098 // clspv.resource.var.* function.
1099 if (first_use) {
1100 first_use = false;
1101 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001102 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001103 ResourceVarInfoList.emplace_back(rv);
1104 } else {
1105 rv = ResourceVarInfoList.back().get();
1106 }
1107 }
1108
1109 // Now populate FunctionToResourceVarsMap.
1110 auto &mapping =
1111 FunctionToResourceVarsMap[call->getParent()->getParent()];
1112 while (mapping.size() <= arg_index) {
1113 mapping.push_back(nullptr);
1114 }
1115 mapping[arg_index] = rv;
1116 }
1117 }
1118 }
1119 }
1120
1121 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001122 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001123 auto where = FunctionToResourceVarsMap.find(&F);
1124 if (where != FunctionToResourceVarsMap.end()) {
1125 for (auto &rv : where->second) {
1126 if (rv != nullptr) {
1127 ModuleOrderedResourceVars.insert(rv);
1128 }
1129 }
1130 }
1131 }
1132 if (ShowResourceVars) {
1133 for (auto *info : ModuleOrderedResourceVars) {
1134 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1135 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1136 << "\n";
1137 }
1138 }
1139}
1140
SJW77b87ad2020-04-21 14:37:52 -05001141bool SPIRVProducerPass::FindExtInst() {
1142 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04001143 bool HasExtInst = false;
1144
SJW77b87ad2020-04-21 14:37:52 -05001145 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -04001146 for (BasicBlock &BB : F) {
1147 for (Instruction &I : BB) {
1148 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1149 Function *Callee = Call->getCalledFunction();
1150 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001151 auto callee_name = Callee->getName();
1152 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1153 const glsl::ExtInst IndirectEInst =
1154 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001155
David Neto3fbb4072017-10-16 11:28:14 -04001156 HasExtInst |=
1157 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1158
1159 if (IndirectEInst) {
1160 // Register extra constants if needed.
1161
1162 // Registers a type and constant for computing the result of the
1163 // given instruction. If the result of the instruction is a vector,
1164 // then make a splat vector constant with the same number of
1165 // elements.
1166 auto register_constant = [this, &I](Constant *constant) {
1167 FindType(constant->getType());
1168 FindConstant(constant);
1169 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1170 // Register the splat vector of the value with the same
1171 // width as the result of the instruction.
1172 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001173 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001174 constant);
1175 FindConstant(vec_constant);
1176 FindType(vec_constant->getType());
1177 }
1178 };
1179 switch (IndirectEInst) {
1180 case glsl::ExtInstFindUMsb:
1181 // clz needs OpExtInst and OpISub with constant 31, or splat
1182 // vector of 31. Add it to the constant list here.
1183 register_constant(
1184 ConstantInt::get(Type::getInt32Ty(Context), 31));
1185 break;
1186 case glsl::ExtInstAcos:
1187 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001188 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001189 case glsl::ExtInstAtan2:
1190 // We need 1/pi for acospi, asinpi, atan2pi.
1191 register_constant(
1192 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1193 break;
1194 default:
1195 assert(false && "internally inconsistent");
1196 }
David Neto22f144c2017-06-12 14:26:21 -04001197 }
1198 }
1199 }
1200 }
1201 }
1202
1203 return HasExtInst;
1204}
1205
1206void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1207 // Investigate global variable's type.
1208 FindType(GV.getType());
1209}
1210
1211void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1212 // Investigate function's type.
1213 FunctionType *FTy = F.getFunctionType();
1214
1215 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1216 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001217 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001218 if (GlobalConstFuncTyMap.count(FTy)) {
1219 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1220 SmallVector<Type *, 4> NewFuncParamTys;
1221 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1222 Type *ParamTy = FTy->getParamType(i);
1223 if (i == GVCstArgIdx) {
1224 Type *EleTy = ParamTy->getPointerElementType();
1225 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1226 }
1227
1228 NewFuncParamTys.push_back(ParamTy);
1229 }
1230
1231 FunctionType *NewFTy =
1232 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1233 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1234 FTy = NewFTy;
1235 }
1236
1237 FindType(FTy);
1238 } else {
1239 // As kernel functions do not have parameters, create new function type and
1240 // add it to type map.
1241 SmallVector<Type *, 4> NewFuncParamTys;
1242 FunctionType *NewFTy =
1243 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1244 FindType(NewFTy);
1245 }
1246
1247 // Investigate instructions' type in function body.
1248 for (BasicBlock &BB : F) {
1249 for (Instruction &I : BB) {
1250 if (isa<ShuffleVectorInst>(I)) {
1251 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1252 // Ignore type for mask of shuffle vector instruction.
1253 if (i == 2) {
1254 continue;
1255 }
1256
1257 Value *Op = I.getOperand(i);
1258 if (!isa<MetadataAsValue>(Op)) {
1259 FindType(Op->getType());
1260 }
1261 }
1262
1263 FindType(I.getType());
1264 continue;
1265 }
1266
David Neto862b7d82018-06-14 18:48:37 -04001267 CallInst *Call = dyn_cast<CallInst>(&I);
1268
1269 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001270 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001271 // This is a fake call representing access to a resource variable.
1272 // We handle that elsewhere.
1273 continue;
1274 }
1275
Alan Baker202c8c72018-08-13 13:47:44 -04001276 if (Call && Call->getCalledFunction()->getName().startswith(
1277 clspv::WorkgroupAccessorFunction())) {
1278 // This is a fake call representing access to a workgroup variable.
1279 // We handle that elsewhere.
1280 continue;
1281 }
1282
alan-bakerf083bed2020-01-29 08:15:42 -05001283 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1284 // OpCompositeExtract which takes literal values for indices. As a result
1285 // don't map the type of indices.
1286 if (I.getOpcode() == Instruction::ExtractValue) {
1287 FindType(I.getOperand(0)->getType());
1288 continue;
1289 }
1290 if (I.getOpcode() == Instruction::InsertValue) {
1291 FindType(I.getOperand(0)->getType());
1292 FindType(I.getOperand(1)->getType());
1293 continue;
1294 }
1295
1296 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1297 // the index is a constant. In such a case don't map the index type.
1298 if (I.getOpcode() == Instruction::ExtractElement) {
1299 FindType(I.getOperand(0)->getType());
1300 Value *op1 = I.getOperand(1);
1301 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1302 FindType(op1->getType());
1303 }
1304 continue;
1305 }
1306 if (I.getOpcode() == Instruction::InsertElement) {
1307 FindType(I.getOperand(0)->getType());
1308 FindType(I.getOperand(1)->getType());
1309 Value *op2 = I.getOperand(2);
1310 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1311 FindType(op2->getType());
1312 }
1313 continue;
1314 }
1315
David Neto22f144c2017-06-12 14:26:21 -04001316 // Work through the operands of the instruction.
1317 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1318 Value *const Op = I.getOperand(i);
1319 // If any of the operands is a constant, find the type!
1320 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1321 FindType(Op->getType());
1322 }
1323 }
1324
1325 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001326 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001327 // Avoid to check call instruction's type.
1328 break;
1329 }
Alan Baker202c8c72018-08-13 13:47:44 -04001330 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1331 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1332 clspv::WorkgroupAccessorFunction())) {
1333 // This is a fake call representing access to a workgroup variable.
1334 // We handle that elsewhere.
1335 continue;
1336 }
1337 }
David Neto22f144c2017-06-12 14:26:21 -04001338 if (!isa<MetadataAsValue>(&Op)) {
1339 FindType(Op->getType());
1340 continue;
1341 }
1342 }
1343
David Neto22f144c2017-06-12 14:26:21 -04001344 // We don't want to track the type of this call as we are going to replace
1345 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001346 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001347 Call->getCalledFunction()->getName())) {
1348 continue;
1349 }
1350
1351 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1352 // If gep's base operand has ModuleScopePrivate address space, make gep
1353 // return ModuleScopePrivate address space.
1354 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1355 // Add pointer type with private address space for global constant to
1356 // type list.
1357 Type *EleTy = I.getType()->getPointerElementType();
1358 Type *NewPTy =
1359 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1360
1361 FindType(NewPTy);
1362 continue;
1363 }
1364 }
1365
1366 FindType(I.getType());
1367 }
1368 }
1369}
1370
SJW77b87ad2020-04-21 14:37:52 -05001371void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001372 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001373 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001374 0 < getSamplerMap().size()) {
SJW77b87ad2020-04-21 14:37:52 -05001375 auto SamplerStructTy = module->getTypeByName("opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001376 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001377 SamplerStructTy =
1378 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001379 }
1380
1381 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1382
1383 FindType(SamplerTy);
1384 }
1385}
1386
SJW77b87ad2020-04-21 14:37:52 -05001387void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001388 // Record types so they are generated.
1389 TypesNeedingLayout.reset();
1390 StructTypesNeedingBlock.reset();
1391
1392 // To match older clspv codegen, generate the float type first if required
1393 // for images.
1394 for (const auto *info : ModuleOrderedResourceVars) {
1395 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1396 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001397 if (IsIntImageType(info->var_fn->getReturnType())) {
1398 // Nothing for now...
1399 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001400 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001401 }
1402
1403 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001404 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001405 }
1406 }
1407
1408 for (const auto *info : ModuleOrderedResourceVars) {
1409 Type *type = info->var_fn->getReturnType();
1410
1411 switch (info->arg_kind) {
1412 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001413 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001414 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1415 StructTypesNeedingBlock.insert(sty);
1416 } else {
1417 errs() << *type << "\n";
1418 llvm_unreachable("Buffer arguments must map to structures!");
1419 }
1420 break;
1421 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001422 case clspv::ArgKind::PodUBO:
1423 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001424 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1425 StructTypesNeedingBlock.insert(sty);
1426 } else {
1427 errs() << *type << "\n";
1428 llvm_unreachable("POD arguments must map to structures!");
1429 }
1430 break;
1431 case clspv::ArgKind::ReadOnlyImage:
1432 case clspv::ArgKind::WriteOnlyImage:
1433 case clspv::ArgKind::Sampler:
1434 // Sampler and image types map to the pointee type but
1435 // in the uniform constant address space.
1436 type = PointerType::get(type->getPointerElementType(),
1437 clspv::AddressSpace::UniformConstant);
1438 break;
1439 default:
1440 break;
1441 }
1442
1443 // The converted type is the type of the OpVariable we will generate.
1444 // If the pointee type is an array of size zero, FindType will convert it
1445 // to a runtime array.
1446 FindType(type);
1447 }
1448
alan-bakerdcd97412019-09-16 15:32:30 -04001449 // If module constants are clustered in a storage buffer then that struct
1450 // needs layout decorations.
1451 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001452 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001453 PointerType *PTy = cast<PointerType>(GV.getType());
1454 const auto AS = PTy->getAddressSpace();
1455 const bool module_scope_constant_external_init =
1456 (AS == AddressSpace::Constant) && GV.hasInitializer();
1457 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1458 if (module_scope_constant_external_init &&
1459 spv::BuiltInMax == BuiltinType) {
1460 StructTypesNeedingBlock.insert(
1461 cast<StructType>(PTy->getPointerElementType()));
1462 }
1463 }
1464 }
1465
SJW77b87ad2020-04-21 14:37:52 -05001466 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001467 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1468 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1469 assert(Ty->isStructTy() && "Push constants have to be structures.");
1470 auto STy = cast<StructType>(Ty);
1471 StructTypesNeedingBlock.insert(STy);
1472 }
1473 }
1474
David Neto862b7d82018-06-14 18:48:37 -04001475 // Traverse the arrays and structures underneath each Block, and
1476 // mark them as needing layout.
1477 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1478 StructTypesNeedingBlock.end());
1479 while (!work_list.empty()) {
1480 Type *type = work_list.back();
1481 work_list.pop_back();
1482 TypesNeedingLayout.insert(type);
1483 switch (type->getTypeID()) {
1484 case Type::ArrayTyID:
1485 work_list.push_back(type->getArrayElementType());
1486 if (!Hack_generate_runtime_array_stride_early) {
1487 // Remember this array type for deferred decoration.
1488 TypesNeedingArrayStride.insert(type);
1489 }
1490 break;
1491 case Type::StructTyID:
1492 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1493 work_list.push_back(elem_ty);
1494 }
1495 default:
1496 // This type and its contained types don't get layout.
1497 break;
1498 }
1499 }
1500}
1501
SJWf93f5f32020-05-05 07:27:56 -05001502void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001503 // The SpecId assignment for pointer-to-local arguments is recorded in
1504 // module-level metadata. Translate that information into local argument
1505 // information.
SJWf93f5f32020-05-05 07:27:56 -05001506 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001507 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001508 if (!nmd)
1509 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001510 for (auto operand : nmd->operands()) {
1511 MDTuple *tuple = cast<MDTuple>(operand);
1512 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1513 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001514 ConstantAsMetadata *arg_index_md =
1515 cast<ConstantAsMetadata>(tuple->getOperand(1));
1516 int arg_index = static_cast<int>(
1517 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1518 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001519
1520 ConstantAsMetadata *spec_id_md =
1521 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001522 int spec_id = static_cast<int>(
1523 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001524
Alan Baker202c8c72018-08-13 13:47:44 -04001525 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001526 if (LocalSpecIdInfoMap.count(spec_id))
1527 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001528
SJWf93f5f32020-05-05 07:27:56 -05001529 // Generate the spec constant.
1530 SPIRVOperandVec Ops;
1531 Ops << MkId(getSPIRVType(Type::getInt32Ty(Context))) << MkNum(1);
1532 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001533
SJWf93f5f32020-05-05 07:27:56 -05001534 // Generate the array type.
1535 Type *ElemTy = arg->getType()->getPointerElementType();
1536 Ops.clear();
1537 // The element type must have been created.
1538 uint32_t elem_ty_id = getSPIRVType(ElemTy);
1539 Ops << MkId(elem_ty_id) << MkId(ArraySizeID);
1540
1541 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1542
1543 Ops.clear();
1544 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(ArrayTypeID);
1545 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1546
1547 // Generate OpVariable.
1548 //
1549 // Ops[0] : Result Type ID
1550 // Ops[1] : Storage Class
1551 Ops.clear();
1552 Ops << MkId(PtrArrayTypeID) << MkNum(spv::StorageClassWorkgroup);
1553
1554 SPIRVID VariableID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1555
1556 Ops.clear();
1557 Ops << MkId(ArraySizeID) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
1558 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1559
1560 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1561 ArrayTypeID, PtrArrayTypeID, spec_id};
1562 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001563 }
1564}
1565
David Neto22f144c2017-06-12 14:26:21 -04001566void SPIRVProducerPass::FindType(Type *Ty) {
1567 TypeList &TyList = getTypeList();
1568
1569 if (0 != TyList.idFor(Ty)) {
1570 return;
1571 }
1572
1573 if (Ty->isPointerTy()) {
1574 auto AddrSpace = Ty->getPointerAddressSpace();
1575 if ((AddressSpace::Constant == AddrSpace) ||
1576 (AddressSpace::Global == AddrSpace)) {
1577 auto PointeeTy = Ty->getPointerElementType();
1578
1579 if (PointeeTy->isStructTy() &&
1580 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1581 FindType(PointeeTy);
1582 auto ActualPointerTy =
1583 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1584 FindType(ActualPointerTy);
1585 return;
1586 }
1587 }
1588 }
1589
David Neto862b7d82018-06-14 18:48:37 -04001590 // By convention, LLVM array type with 0 elements will map to
1591 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1592 // has a constant number of elements. We need to support type of the
1593 // constant.
1594 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1595 if (arrayTy->getNumElements() > 0) {
1596 LLVMContext &Context = Ty->getContext();
1597 FindType(Type::getInt32Ty(Context));
1598 }
David Neto22f144c2017-06-12 14:26:21 -04001599 }
1600
1601 for (Type *SubTy : Ty->subtypes()) {
1602 FindType(SubTy);
1603 }
1604
1605 TyList.insert(Ty);
1606}
1607
1608void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1609 // If the global variable has a (non undef) initializer.
1610 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001611 // Generate the constant if it's not the initializer to a module scope
1612 // constant that we will expect in a storage buffer.
1613 const bool module_scope_constant_external_init =
1614 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1615 clspv::Option::ModuleConstantsInStorageBuffer();
1616 if (!module_scope_constant_external_init) {
1617 FindConstant(GV.getInitializer());
1618 }
David Neto22f144c2017-06-12 14:26:21 -04001619 }
1620}
1621
1622void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1623 // Investigate constants in function body.
1624 for (BasicBlock &BB : F) {
1625 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001626 if (auto *call = dyn_cast<CallInst>(&I)) {
1627 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001628 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001629 // We've handled these constants elsewhere, so skip it.
1630 continue;
1631 }
Alan Baker202c8c72018-08-13 13:47:44 -04001632 if (name.startswith(clspv::ResourceAccessorFunction())) {
1633 continue;
1634 }
1635 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001636 continue;
1637 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001638 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1639 // Skip the first operand that has the SPIR-V Opcode
1640 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1641 if (isa<Constant>(I.getOperand(i)) &&
1642 !isa<GlobalValue>(I.getOperand(i))) {
1643 FindConstant(I.getOperand(i));
1644 }
1645 }
1646 continue;
1647 }
David Neto22f144c2017-06-12 14:26:21 -04001648 }
1649
1650 if (isa<AllocaInst>(I)) {
1651 // Alloca instruction has constant for the number of element. Ignore it.
1652 continue;
1653 } else if (isa<ShuffleVectorInst>(I)) {
1654 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1655 // Ignore constant for mask of shuffle vector instruction.
1656 if (i == 2) {
1657 continue;
1658 }
1659
1660 if (isa<Constant>(I.getOperand(i)) &&
1661 !isa<GlobalValue>(I.getOperand(i))) {
1662 FindConstant(I.getOperand(i));
1663 }
1664 }
1665
1666 continue;
1667 } else if (isa<InsertElementInst>(I)) {
1668 // Handle InsertElement with <4 x i8> specially.
1669 Type *CompositeTy = I.getOperand(0)->getType();
1670 if (is4xi8vec(CompositeTy)) {
1671 LLVMContext &Context = CompositeTy->getContext();
1672 if (isa<Constant>(I.getOperand(0))) {
1673 FindConstant(I.getOperand(0));
1674 }
1675
1676 if (isa<Constant>(I.getOperand(1))) {
1677 FindConstant(I.getOperand(1));
1678 }
1679
1680 // Add mask constant 0xFF.
1681 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1682 FindConstant(CstFF);
1683
1684 // Add shift amount constant.
1685 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1686 uint64_t Idx = CI->getZExtValue();
1687 Constant *CstShiftAmount =
1688 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1689 FindConstant(CstShiftAmount);
1690 }
1691
1692 continue;
1693 }
1694
1695 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1696 // Ignore constant for index of InsertElement instruction.
1697 if (i == 2) {
1698 continue;
1699 }
1700
1701 if (isa<Constant>(I.getOperand(i)) &&
1702 !isa<GlobalValue>(I.getOperand(i))) {
1703 FindConstant(I.getOperand(i));
1704 }
1705 }
1706
1707 continue;
1708 } else if (isa<ExtractElementInst>(I)) {
1709 // Handle ExtractElement with <4 x i8> specially.
1710 Type *CompositeTy = I.getOperand(0)->getType();
1711 if (is4xi8vec(CompositeTy)) {
1712 LLVMContext &Context = CompositeTy->getContext();
1713 if (isa<Constant>(I.getOperand(0))) {
1714 FindConstant(I.getOperand(0));
1715 }
1716
1717 // Add mask constant 0xFF.
1718 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1719 FindConstant(CstFF);
1720
1721 // Add shift amount constant.
1722 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1723 uint64_t Idx = CI->getZExtValue();
1724 Constant *CstShiftAmount =
1725 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1726 FindConstant(CstShiftAmount);
1727 } else {
1728 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1729 FindConstant(Cst8);
1730 }
1731
1732 continue;
1733 }
1734
1735 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1736 // Ignore constant for index of ExtractElement instruction.
1737 if (i == 1) {
1738 continue;
1739 }
1740
1741 if (isa<Constant>(I.getOperand(i)) &&
1742 !isa<GlobalValue>(I.getOperand(i))) {
1743 FindConstant(I.getOperand(i));
1744 }
1745 }
1746
1747 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001748 } else if ((Instruction::Xor == I.getOpcode()) &&
1749 I.getType()->isIntegerTy(1)) {
1750 // We special case for Xor where the type is i1 and one of the arguments
1751 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1752 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001753 bool foundConstantTrue = false;
1754 for (Use &Op : I.operands()) {
1755 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1756 auto CI = cast<ConstantInt>(Op);
1757
1758 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001759 // If we already found the true constant, we might (probably only
1760 // on -O0) have an OpLogicalNot which is taking a constant
1761 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001762 FindConstant(Op);
1763 } else {
1764 foundConstantTrue = true;
1765 }
1766 }
1767 }
1768
1769 continue;
David Netod2de94a2017-08-28 17:27:47 -04001770 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001771 // Special case if i8 is not generally handled.
1772 if (!clspv::Option::Int8Support()) {
1773 // For truncation to i8 we mask against 255.
1774 Type *ToTy = I.getType();
1775 if (8u == ToTy->getPrimitiveSizeInBits()) {
1776 LLVMContext &Context = ToTy->getContext();
1777 Constant *Cst255 =
1778 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1779 FindConstant(Cst255);
1780 }
David Netod2de94a2017-08-28 17:27:47 -04001781 }
Neil Henning39672102017-09-29 14:33:13 +01001782 } else if (isa<AtomicRMWInst>(I)) {
1783 LLVMContext &Context = I.getContext();
1784
1785 FindConstant(
1786 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1787 FindConstant(ConstantInt::get(
1788 Type::getInt32Ty(Context),
1789 spv::MemorySemanticsUniformMemoryMask |
1790 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001791 }
1792
1793 for (Use &Op : I.operands()) {
1794 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1795 FindConstant(Op);
1796 }
1797 }
1798 }
1799 }
1800}
1801
1802void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001803 ValueList &CstList = getConstantList();
1804
David Netofb9a7972017-08-25 17:08:24 -04001805 // If V is already tracked, ignore it.
1806 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001807 return;
1808 }
1809
David Neto862b7d82018-06-14 18:48:37 -04001810 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1811 return;
1812 }
1813
David Neto22f144c2017-06-12 14:26:21 -04001814 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001815 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001816
1817 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001818 if (is4xi8vec(CstTy)) {
1819 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001820 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001821 }
1822 }
1823
1824 if (Cst->getNumOperands()) {
1825 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1826 ++I) {
1827 FindConstant(*I);
1828 }
1829
David Netofb9a7972017-08-25 17:08:24 -04001830 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001831 return;
1832 } else if (const ConstantDataSequential *CDS =
1833 dyn_cast<ConstantDataSequential>(Cst)) {
1834 // Add constants for each element to constant list.
1835 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1836 Constant *EleCst = CDS->getElementAsConstant(i);
1837 FindConstant(EleCst);
1838 }
1839 }
1840
1841 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001842 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001843 }
1844}
1845
1846spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1847 switch (AddrSpace) {
1848 default:
1849 llvm_unreachable("Unsupported OpenCL address space");
1850 case AddressSpace::Private:
1851 return spv::StorageClassFunction;
1852 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001853 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001854 case AddressSpace::Constant:
1855 return clspv::Option::ConstantArgsInUniformBuffer()
1856 ? spv::StorageClassUniform
1857 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001858 case AddressSpace::Input:
1859 return spv::StorageClassInput;
1860 case AddressSpace::Local:
1861 return spv::StorageClassWorkgroup;
1862 case AddressSpace::UniformConstant:
1863 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001864 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001865 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001866 case AddressSpace::ModuleScopePrivate:
1867 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001868 case AddressSpace::PushConstant:
1869 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001870 }
1871}
1872
David Neto862b7d82018-06-14 18:48:37 -04001873spv::StorageClass
1874SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1875 switch (arg_kind) {
1876 case clspv::ArgKind::Buffer:
1877 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001878 case clspv::ArgKind::BufferUBO:
1879 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001880 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001881 return spv::StorageClassStorageBuffer;
1882 case clspv::ArgKind::PodUBO:
1883 return spv::StorageClassUniform;
1884 case clspv::ArgKind::PodPushConstant:
1885 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001886 case clspv::ArgKind::Local:
1887 return spv::StorageClassWorkgroup;
1888 case clspv::ArgKind::ReadOnlyImage:
1889 case clspv::ArgKind::WriteOnlyImage:
1890 case clspv::ArgKind::Sampler:
1891 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001892 default:
1893 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001894 }
1895}
1896
David Neto22f144c2017-06-12 14:26:21 -04001897spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1898 return StringSwitch<spv::BuiltIn>(Name)
1899 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1900 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1901 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1902 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1903 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001904 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001905 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001906 .Default(spv::BuiltInMax);
1907}
1908
1909void SPIRVProducerPass::GenerateExtInstImport() {
David Neto22f144c2017-06-12 14:26:21 -04001910 uint32_t &ExtInstImportID = getOpExtInstImportID();
1911
1912 //
1913 // Generate OpExtInstImport.
1914 //
1915 // Ops[0] ... Ops[n] = Name (Literal String)
SJWf93f5f32020-05-05 07:27:56 -05001916
1917 ExtInstImportID =
1918 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1919}
1920
1921uint32_t SPIRVProducerPass::getSPIRVType(Type *Ty) {
1922 auto TI = TypeMap.find(Ty);
1923 if (TI != TypeMap.end()) {
1924 assert(TI->second);
1925 return TI->second;
1926 }
1927
1928 const auto &DL = module->getDataLayout();
1929
1930 uint32_t RID = 0;
1931
1932 switch (Ty->getTypeID()) {
1933 default: {
1934 Ty->print(errs());
1935 llvm_unreachable("Unsupported type???");
1936 break;
1937 }
1938 case Type::MetadataTyID:
1939 case Type::LabelTyID: {
1940 // Ignore these types.
1941 break;
1942 }
1943 case Type::PointerTyID: {
1944 PointerType *PTy = cast<PointerType>(Ty);
1945 unsigned AddrSpace = PTy->getAddressSpace();
1946
1947 if (AddrSpace != AddressSpace::UniformConstant) {
1948 auto PointeeTy = PTy->getElementType();
1949 if (PointeeTy->isStructTy() &&
1950 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1951 // TODO(sjw): assert always an image?
1952 RID = getSPIRVType(PointeeTy);
1953 break;
1954 }
1955 }
1956
1957 // For the purposes of our Vulkan SPIR-V type system, constant and global
1958 // are conflated.
1959 if (AddressSpace::Constant == AddrSpace) {
1960 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1961 AddrSpace = AddressSpace::Global;
1962 // Check to see if we already created this type (for instance, if we
1963 // had a constant <type>* and a global <type>*, the type would be
1964 // created by one of these types, and shared by both).
1965 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1966 if (0 < TypeMap.count(GlobalTy)) {
1967 RID = TypeMap[GlobalTy];
1968 break;
1969 }
1970 }
1971 } else if (AddressSpace::Global == AddrSpace) {
1972 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1973 AddrSpace = AddressSpace::Constant;
1974
1975 // Check to see if we already created this type (for instance, if we
1976 // had a constant <type>* and a global <type>*, the type would be
1977 // created by one of these types, and shared by both).
1978 auto ConstantTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1979 if (0 < TypeMap.count(ConstantTy)) {
1980 RID = TypeMap[ConstantTy];
1981 break;
1982 }
1983 }
1984 }
1985
1986 //
1987 // Generate OpTypePointer.
1988 //
1989
1990 // OpTypePointer
1991 // Ops[0] = Storage Class
1992 // Ops[1] = Element Type ID
1993 SPIRVOperandVec Ops;
1994
1995 Ops << MkNum(GetStorageClass(AddrSpace))
1996 << MkId(getSPIRVType(PTy->getElementType()));
1997
1998 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1999 break;
2000 }
2001 case Type::StructTyID: {
2002 StructType *STy = cast<StructType>(Ty);
2003
2004 // Handle sampler type.
2005 if (STy->isOpaque()) {
2006 if (STy->getName().equals("opencl.sampler_t")) {
2007 //
2008 // Generate OpTypeSampler
2009 //
2010 // Empty Ops.
2011
2012 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
2013 break;
2014 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2015 STy->getName().startswith("opencl.image1d_wo_t") ||
2016 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2017 STy->getName().startswith("opencl.image1d_array_wo_t") ||
2018 STy->getName().startswith("opencl.image2d_ro_t") ||
2019 STy->getName().startswith("opencl.image2d_wo_t") ||
2020 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2021 STy->getName().startswith("opencl.image2d_array_wo_t") ||
2022 STy->getName().startswith("opencl.image3d_ro_t") ||
2023 STy->getName().startswith("opencl.image3d_wo_t")) {
2024 //
2025 // Generate OpTypeImage
2026 //
2027 // Ops[0] = Sampled Type ID
2028 // Ops[1] = Dim ID
2029 // Ops[2] = Depth (Literal Number)
2030 // Ops[3] = Arrayed (Literal Number)
2031 // Ops[4] = MS (Literal Number)
2032 // Ops[5] = Sampled (Literal Number)
2033 // Ops[6] = Image Format ID
2034 //
2035 SPIRVOperandVec Ops;
2036
2037 uint32_t SampledTyID = 0;
2038 if (STy->getName().contains(".float")) {
2039 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
2040 } else if (STy->getName().contains(".uint")) {
2041 SampledTyID = getSPIRVType(Type::getInt32Ty(Ty->getContext()));
2042 } else if (STy->getName().contains(".int")) {
2043 // Generate a signed 32-bit integer if necessary.
2044 if (int32ID == 0) {
2045 SPIRVOperandVec intOps;
2046 intOps << MkNum(32);
2047 intOps << MkNum(1);
2048 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
2049 }
2050 SampledTyID = int32ID;
2051
2052 // Generate a vec4 of the signed int if necessary.
2053 if (v4int32ID == 0) {
2054 SPIRVOperandVec vecOps;
2055 vecOps << MkId(int32ID);
2056 vecOps << MkNum(4);
2057 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
2058 }
2059 } else {
2060 // This was likely an UndefValue.
2061 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
2062 }
2063 Ops << MkId(SampledTyID);
2064
2065 spv::Dim DimID = spv::Dim2D;
2066 if (STy->getName().startswith("opencl.image1d_ro_t") ||
2067 STy->getName().startswith("opencl.image1d_wo_t") ||
2068 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2069 STy->getName().startswith("opencl.image1d_array_wo_t")) {
2070 DimID = spv::Dim1D;
2071 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2072 STy->getName().startswith("opencl.image3d_wo_t")) {
2073 DimID = spv::Dim3D;
2074 }
2075 Ops << MkNum(DimID);
2076
2077 // TODO: Set up Depth.
2078 Ops << MkNum(0);
2079
2080 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2081 Ops << MkNum(arrayed);
2082
2083 // TODO: Set up MS.
2084 Ops << MkNum(0);
2085
2086 // Set up Sampled.
2087 //
2088 // From Spec
2089 //
2090 // 0 indicates this is only known at run time, not at compile time
2091 // 1 indicates will be used with sampler
2092 // 2 indicates will be used without a sampler (a storage image)
2093 uint32_t Sampled = 1;
2094 if (!STy->getName().contains(".sampled")) {
2095 Sampled = 2;
2096 }
2097 Ops << MkNum(Sampled);
2098
2099 // TODO: Set up Image Format.
2100 Ops << MkNum(spv::ImageFormatUnknown);
2101
2102 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
2103
2104 Ops.clear();
2105 Ops << MkId(RID);
2106
2107 getImageTypeMap()[Ty] =
2108 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
2109 break;
2110 }
2111 }
2112
2113 //
2114 // Generate OpTypeStruct
2115 //
2116 // Ops[0] ... Ops[n] = Member IDs
2117 SPIRVOperandVec Ops;
2118
2119 for (auto *EleTy : STy->elements()) {
2120 Ops << MkId(getSPIRVType(EleTy));
2121 }
2122
2123 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
2124
2125 // Generate OpMemberDecorate.
2126 if (TypesNeedingLayout.idFor(STy)) {
2127 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2128 MemberIdx++) {
2129 // Ops[0] = Structure Type ID
2130 // Ops[1] = Member Index(Literal Number)
2131 // Ops[2] = Decoration (Offset)
2132 // Ops[3] = Byte Offset (Literal Number)
2133 Ops.clear();
2134
2135 Ops << MkId(RID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
2136
2137 const auto ByteOffset =
2138 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
2139
2140 Ops << MkNum(ByteOffset);
2141
2142 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
2143 }
2144 }
2145
2146 // Generate OpDecorate.
2147 if (StructTypesNeedingBlock.idFor(STy)) {
2148 Ops.clear();
2149 // Use Block decorations with StorageBuffer storage class.
2150 Ops << MkId(RID) << MkNum(spv::DecorationBlock);
2151
2152 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2153 }
2154 break;
2155 }
2156 case Type::IntegerTyID: {
2157 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2158
2159 if (BitWidth == 1) {
2160 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
2161 } else {
2162 if (!clspv::Option::Int8Support() && BitWidth == 8) {
2163 // i8 is added to TypeMap as i32.
2164 RID = getSPIRVType(Type::getIntNTy(Ty->getContext(), 32));
2165 } else {
2166 SPIRVOperandVec Ops;
2167 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
2168 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
2169 }
2170 }
2171 break;
2172 }
2173 case Type::HalfTyID:
2174 case Type::FloatTyID:
2175 case Type::DoubleTyID: {
2176 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2177 SPIRVOperandVec Ops;
2178 Ops << MkNum(BitWidth);
2179
2180 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
2181 break;
2182 }
2183 case Type::ArrayTyID: {
2184 ArrayType *ArrTy = cast<ArrayType>(Ty);
2185 const uint64_t Length = ArrTy->getArrayNumElements();
2186 if (Length == 0) {
2187 // By convention, map it to a RuntimeArray.
2188
2189 Type *EleTy = ArrTy->getArrayElementType();
2190
2191 //
2192 // Generate OpTypeRuntimeArray.
2193 //
2194 // OpTypeRuntimeArray
2195 // Ops[0] = Element Type ID
2196 SPIRVOperandVec Ops;
2197 Ops << MkId(getSPIRVType(EleTy));
2198
2199 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
2200
2201 if (Hack_generate_runtime_array_stride_early) {
2202 // Generate OpDecorate.
2203
2204 // Ops[0] = Target ID
2205 // Ops[1] = Decoration (ArrayStride)
2206 // Ops[2] = Stride Number(Literal Number)
2207 Ops.clear();
2208
2209 Ops << MkId(RID) << MkNum(spv::DecorationArrayStride)
2210 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
2211
2212 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2213 }
2214
2215 } else {
2216
2217 //
2218 // Generate OpConstant and OpTypeArray.
2219 //
2220
2221 //
2222 // Generate OpConstant for array length.
2223 //
2224 // Add constant for length to constant list.
2225 Constant *CstLength =
2226 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
2227 uint32_t LengthID = getSPIRVValue(CstLength);
2228
2229 // Remember to generate ArrayStride later
2230 getTypesNeedingArrayStride().insert(Ty);
2231
2232 //
2233 // Generate OpTypeArray.
2234 //
2235 // Ops[0] = Element Type ID
2236 // Ops[1] = Array Length Constant ID
2237 SPIRVOperandVec Ops;
2238
2239 uint32_t EleTyID = getSPIRVType(ArrTy->getElementType());
2240 Ops << MkId(EleTyID) << MkId(LengthID);
2241
2242 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
2243 }
2244 break;
2245 }
2246 case Type::FixedVectorTyID: {
2247 auto VecTy = cast<VectorType>(Ty);
2248 // <4 x i8> is changed to i32 if i8 is not generally supported.
2249 if (!clspv::Option::Int8Support() &&
2250 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
2251 if (VecTy->getNumElements() == 4) {
2252 RID = getSPIRVType(VecTy->getElementType());
2253 break;
2254 } else {
2255 Ty->print(errs());
2256 llvm_unreachable("Support above i8 vector type");
2257 }
2258 }
2259
2260 // Ops[0] = Component Type ID
2261 // Ops[1] = Component Count (Literal Number)
2262 SPIRVOperandVec Ops;
2263 Ops << MkId(getSPIRVType(VecTy->getElementType()))
2264 << MkNum(VecTy->getNumElements());
2265
2266 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
2267 break;
2268 }
2269 case Type::VoidTyID: {
2270 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
2271 break;
2272 }
2273 case Type::FunctionTyID: {
2274 // Generate SPIRV instruction for function type.
2275 FunctionType *FTy = cast<FunctionType>(Ty);
2276
2277 // Ops[0] = Return Type ID
2278 // Ops[1] ... Ops[n] = Parameter Type IDs
2279 SPIRVOperandVec Ops;
2280
2281 // Find SPIRV instruction for return type
2282 Ops << MkId(getSPIRVType(FTy->getReturnType()));
2283
2284 // Find SPIRV instructions for parameter types
2285 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2286 // Find SPIRV instruction for parameter type.
2287 auto ParamTy = FTy->getParamType(k);
2288 if (ParamTy->isPointerTy()) {
2289 auto PointeeTy = ParamTy->getPointerElementType();
2290 if (PointeeTy->isStructTy() &&
2291 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2292 ParamTy = PointeeTy;
2293 }
2294 }
2295
2296 Ops << MkId(getSPIRVType(ParamTy));
2297 }
2298
2299 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
2300 break;
2301 }
2302 }
2303
2304 if (RID) {
2305 TypeMap[Ty] = RID;
2306 }
2307 return RID;
David Neto22f144c2017-06-12 14:26:21 -04002308}
2309
SJW77b87ad2020-04-21 14:37:52 -05002310void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04002311 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05002312 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04002313 }
David Neto22f144c2017-06-12 14:26:21 -04002314}
2315
SJWf93f5f32020-05-05 07:27:56 -05002316SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *Cst) {
David Neto22f144c2017-06-12 14:26:21 -04002317 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07002318 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002319
SJWf93f5f32020-05-05 07:27:56 -05002320 SPIRVID RID = 0;
David Neto22f144c2017-06-12 14:26:21 -04002321
SJWf93f5f32020-05-05 07:27:56 -05002322 //
2323 // Generate OpConstant.
2324 //
2325 // Ops[0] = Result Type ID
2326 // Ops[1] .. Ops[n] = Values LiteralNumber
2327 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002328
SJWf93f5f32020-05-05 07:27:56 -05002329 Ops << MkId(getSPIRVType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002330
SJWf93f5f32020-05-05 07:27:56 -05002331 std::vector<uint32_t> LiteralNum;
2332 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04002333
SJWf93f5f32020-05-05 07:27:56 -05002334 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04002335 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05002336 Opcode = spv::OpUndef;
2337 if (hack_undef && IsTypeNullable(Cst->getType())) {
2338 Opcode = spv::OpConstantNull;
2339 }
2340 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2341 unsigned BitWidth = CI->getBitWidth();
2342 if (BitWidth == 1) {
2343 // If the bitwidth of constant is 1, generate OpConstantTrue or
2344 // OpConstantFalse.
2345 if (CI->getZExtValue()) {
2346 // Ops[0] = Result Type ID
2347 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04002348 } else {
SJWf93f5f32020-05-05 07:27:56 -05002349 // Ops[0] = Result Type ID
2350 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04002351 }
SJWf93f5f32020-05-05 07:27:56 -05002352 } else {
2353 auto V = CI->getZExtValue();
2354 LiteralNum.push_back(V & 0xFFFFFFFF);
2355
2356 if (BitWidth > 32) {
2357 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002358 }
2359
2360 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002361
SJWf93f5f32020-05-05 07:27:56 -05002362 Ops << MkInteger(LiteralNum);
2363 }
2364 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2365 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2366 Type *CFPTy = CFP->getType();
2367 if (CFPTy->isFloatTy()) {
2368 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2369 } else if (CFPTy->isDoubleTy()) {
2370 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2371 LiteralNum.push_back(FPVal >> 32);
2372 } else if (CFPTy->isHalfTy()) {
2373 LiteralNum.push_back(FPVal & 0xFFFF);
2374 } else {
2375 CFPTy->print(errs());
2376 llvm_unreachable("Implement this ConstantFP Type");
2377 }
David Neto22f144c2017-06-12 14:26:21 -04002378
SJWf93f5f32020-05-05 07:27:56 -05002379 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04002380
SJWf93f5f32020-05-05 07:27:56 -05002381 Ops << MkFloat(LiteralNum);
2382 } else if (isa<ConstantDataSequential>(Cst) &&
2383 cast<ConstantDataSequential>(Cst)->isString()) {
2384 Cst->print(errs());
2385 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002386
SJWf93f5f32020-05-05 07:27:56 -05002387 } else if (const ConstantDataSequential *CDS =
2388 dyn_cast<ConstantDataSequential>(Cst)) {
2389 // Let's convert <4 x i8> constant to int constant specially.
2390 // This case occurs when all the values are specified as constant
2391 // ints.
2392 Type *CstTy = Cst->getType();
2393 if (is4xi8vec(CstTy)) {
2394 LLVMContext &Context = CstTy->getContext();
David Neto49351ac2017-08-26 17:32:20 -04002395
SJWf93f5f32020-05-05 07:27:56 -05002396 //
2397 // Generate OpConstant with OpTypeInt 32 0.
2398 //
2399 uint32_t IntValue = 0;
2400 for (unsigned k = 0; k < 4; k++) {
2401 const uint64_t Val = CDS->getElementAsInteger(k);
2402 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002403 }
2404
SJWf93f5f32020-05-05 07:27:56 -05002405 Type *i32 = Type::getInt32Ty(Context);
2406 Constant *CstInt = ConstantInt::get(i32, IntValue);
2407 RID = getSPIRVValue(CstInt);
2408 } else {
2409
David Neto49351ac2017-08-26 17:32:20 -04002410 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002411 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2412 Constant *EleCst = CDS->getElementAsConstant(k);
SJWf93f5f32020-05-05 07:27:56 -05002413 uint32_t EleCstID = getSPIRVValue(EleCst);
David Neto257c3892018-04-11 13:19:45 -04002414 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002415 }
2416
2417 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002418 }
2419 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2420 // Let's convert <4 x i8> constant to int constant specially.
2421 // This case occurs when at least one of the values is an undef.
2422 Type *CstTy = Cst->getType();
2423 if (is4xi8vec(CstTy)) {
2424 LLVMContext &Context = CstTy->getContext();
David Neto22f144c2017-06-12 14:26:21 -04002425
SJWf93f5f32020-05-05 07:27:56 -05002426 //
2427 // Generate OpConstant with OpTypeInt 32 0.
2428 //
2429 uint32_t IntValue = 0;
2430 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2431 I != E; ++I) {
2432 uint64_t Val = 0;
2433 const Value *CV = *I;
2434 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2435 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002436 }
SJWf93f5f32020-05-05 07:27:56 -05002437 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002438 }
2439
SJWf93f5f32020-05-05 07:27:56 -05002440 Type *i32 = Type::getInt32Ty(Context);
2441 Constant *CstInt = ConstantInt::get(i32, IntValue);
2442 RID = getSPIRVValue(CstInt);
2443 } else {
2444
David Neto22f144c2017-06-12 14:26:21 -04002445 // We use a constant composite in SPIR-V for our constant aggregate in
2446 // LLVM.
2447 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002448
2449 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
SJWf93f5f32020-05-05 07:27:56 -05002450 uint32_t ElementConstantID = getSPIRVValue(CA->getAggregateElement(k));
David Neto22f144c2017-06-12 14:26:21 -04002451
2452 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002453 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002454 }
David Neto22f144c2017-06-12 14:26:21 -04002455 }
SJWf93f5f32020-05-05 07:27:56 -05002456 } else if (Cst->isNullValue()) {
2457 Opcode = spv::OpConstantNull;
2458 } else {
2459 Cst->print(errs());
2460 llvm_unreachable("Unsupported Constant???");
2461 }
David Neto22f144c2017-06-12 14:26:21 -04002462
SJWf93f5f32020-05-05 07:27:56 -05002463 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2464 // Null pointer requires variable pointers.
2465 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2466 }
alan-baker5b86ed72019-02-15 08:26:50 -05002467
SJWf93f5f32020-05-05 07:27:56 -05002468 if (RID == 0) {
2469 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2470 }
2471
2472 VMap[Cst] = RID;
2473
2474 return RID;
2475}
2476
2477SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2478 auto II = ValueMap.find(V);
2479 if (II != ValueMap.end()) {
2480 assert(II->second);
2481 return II->second;
2482 }
2483 if (Constant *Cst = dyn_cast<Constant>(V)) {
2484 return getSPIRVConstant(Cst);
2485 } else {
2486 llvm_unreachable("Variable not found");
2487 }
2488}
2489
2490void SPIRVProducerPass::GenerateSPIRVConstants() {
2491 ValueList &CstList = getConstantList();
2492
2493 for (uint32_t i = 0; i < CstList.size(); i++) {
2494 // UniqueVector ids are 1-based.
2495 Constant *Cst = cast<Constant>(CstList[i + 1]);
2496 getSPIRVValue(Cst);
David Neto22f144c2017-06-12 14:26:21 -04002497 }
2498}
2499
SJW77b87ad2020-04-21 14:37:52 -05002500void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002501 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002502 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002503 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2504 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002505
David Neto862b7d82018-06-14 18:48:37 -04002506 // We might have samplers in the sampler map that are not used
2507 // in the translation unit. We need to allocate variables
2508 // for them and bindings too.
2509 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002510
SJW77b87ad2020-04-21 14:37:52 -05002511 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002512 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002513 if (!var_fn)
2514 return;
alan-baker09cb9802019-12-10 13:16:27 -05002515
David Neto862b7d82018-06-14 18:48:37 -04002516 for (auto user : var_fn->users()) {
2517 // Populate SamplerLiteralToDescriptorSetMap and
2518 // SamplerLiteralToBindingMap.
2519 //
2520 // Look for calls like
2521 // call %opencl.sampler_t addrspace(2)*
2522 // @clspv.sampler.var.literal(
2523 // i32 descriptor,
2524 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002525 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002526 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002527 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002528 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002529 auto sampler_value = third_param;
2530 if (clspv::Option::UseSamplerMap()) {
2531 if (third_param >= sampler_map.size()) {
2532 errs() << "Out of bounds index to sampler map: " << third_param;
2533 llvm_unreachable("bad sampler init: out of bounds");
2534 }
2535 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002536 }
2537
David Neto862b7d82018-06-14 18:48:37 -04002538 const auto descriptor_set = static_cast<unsigned>(
2539 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2540 const auto binding = static_cast<unsigned>(
2541 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2542
2543 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2544 SamplerLiteralToBindingMap[sampler_value] = binding;
2545 used_bindings.insert(binding);
2546 }
2547 }
2548
alan-baker09cb9802019-12-10 13:16:27 -05002549 DenseSet<size_t> seen;
2550 for (auto user : var_fn->users()) {
2551 if (!isa<CallInst>(user))
2552 continue;
2553
2554 auto call = cast<CallInst>(user);
2555 const unsigned third_param = static_cast<unsigned>(
2556 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2557
2558 // Already allocated a variable for this value.
2559 if (!seen.insert(third_param).second)
2560 continue;
2561
2562 auto sampler_value = third_param;
2563 if (clspv::Option::UseSamplerMap()) {
2564 sampler_value = sampler_map[third_param].first;
2565 }
2566
David Neto22f144c2017-06-12 14:26:21 -04002567 // Generate OpVariable.
2568 //
2569 // GIDOps[0] : Result Type ID
2570 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05002571 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002572
SJWf93f5f32020-05-05 07:27:56 -05002573 Ops << MkId(getSPIRVType(SamplerTy))
David Neto257c3892018-04-11 13:19:45 -04002574 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002575
SJWf93f5f32020-05-05 07:27:56 -05002576 auto sampler_var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002577
alan-baker09cb9802019-12-10 13:16:27 -05002578 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002579
David Neto862b7d82018-06-14 18:48:37 -04002580 unsigned descriptor_set;
2581 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002582 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002583 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002584 // This sampler is not actually used. Find the next one.
2585 for (binding = 0; used_bindings.count(binding); binding++)
2586 ;
2587 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2588 used_bindings.insert(binding);
2589 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002590 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2591 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002592
alan-baker09cb9802019-12-10 13:16:27 -05002593 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002594 descriptorMapEntries->emplace_back(std::move(sampler_data),
2595 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002596 }
2597
SJW69939d52020-04-16 07:29:07 -05002598 // Ops[0] = Target ID
2599 // Ops[1] = Decoration (DescriptorSet)
2600 // Ops[2] = LiteralNumber according to Decoration
2601 Ops.clear();
2602
David Neto862b7d82018-06-14 18:48:37 -04002603 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2604 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002605
SJWf93f5f32020-05-05 07:27:56 -05002606 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002607
2608 // Ops[0] = Target ID
2609 // Ops[1] = Decoration (Binding)
2610 // Ops[2] = LiteralNumber according to Decoration
2611 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002612 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2613 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002614
SJWf93f5f32020-05-05 07:27:56 -05002615 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002616 }
David Neto862b7d82018-06-14 18:48:37 -04002617}
David Neto22f144c2017-06-12 14:26:21 -04002618
SJW77b87ad2020-04-21 14:37:52 -05002619void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002620 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002621
David Neto862b7d82018-06-14 18:48:37 -04002622 // Generate variables. Make one for each of resource var info object.
2623 for (auto *info : ModuleOrderedResourceVars) {
2624 Type *type = info->var_fn->getReturnType();
2625 // Remap the address space for opaque types.
2626 switch (info->arg_kind) {
2627 case clspv::ArgKind::Sampler:
2628 case clspv::ArgKind::ReadOnlyImage:
2629 case clspv::ArgKind::WriteOnlyImage:
2630 type = PointerType::get(type->getPointerElementType(),
2631 clspv::AddressSpace::UniformConstant);
2632 break;
2633 default:
2634 break;
2635 }
David Neto22f144c2017-06-12 14:26:21 -04002636
SJWf93f5f32020-05-05 07:27:56 -05002637 const auto type_id = getSPIRVType(type);
David Neto862b7d82018-06-14 18:48:37 -04002638 const auto sc = GetStorageClassForArgKind(info->arg_kind);
SJWf93f5f32020-05-05 07:27:56 -05002639 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002640 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002641
SJWf93f5f32020-05-05 07:27:56 -05002642 info->var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002643
2644 // Map calls to the variable-builtin-function.
2645 for (auto &U : info->var_fn->uses()) {
2646 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2647 const auto set = unsigned(
2648 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2649 const auto binding = unsigned(
2650 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2651 if (set == info->descriptor_set && binding == info->binding) {
2652 switch (info->arg_kind) {
2653 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002654 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002655 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002656 case clspv::ArgKind::PodUBO:
2657 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002658 // The call maps to the variable directly.
2659 VMap[call] = info->var_id;
2660 break;
2661 case clspv::ArgKind::Sampler:
2662 case clspv::ArgKind::ReadOnlyImage:
2663 case clspv::ArgKind::WriteOnlyImage:
2664 // The call maps to a load we generate later.
2665 ResourceVarDeferredLoadCalls[call] = info->var_id;
2666 break;
2667 default:
2668 llvm_unreachable("Unhandled arg kind");
2669 }
2670 }
David Neto22f144c2017-06-12 14:26:21 -04002671 }
David Neto862b7d82018-06-14 18:48:37 -04002672 }
2673 }
David Neto22f144c2017-06-12 14:26:21 -04002674
David Neto862b7d82018-06-14 18:48:37 -04002675 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002676 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002677 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002678 // Push constants don't need descriptor set or binding decorations.
2679 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2680 continue;
2681
David Neto862b7d82018-06-14 18:48:37 -04002682 // Decorate with DescriptorSet and Binding.
2683 Ops.clear();
2684 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2685 << MkNum(info->descriptor_set);
SJWf93f5f32020-05-05 07:27:56 -05002686 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002687
2688 Ops.clear();
2689 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2690 << MkNum(info->binding);
SJWf93f5f32020-05-05 07:27:56 -05002691 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002692
alan-bakere9308012019-03-15 10:25:13 -04002693 if (info->coherent) {
2694 // Decorate with Coherent if required for the variable.
2695 Ops.clear();
2696 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
SJWf93f5f32020-05-05 07:27:56 -05002697 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002698 }
2699
David Neto862b7d82018-06-14 18:48:37 -04002700 // Generate NonWritable and NonReadable
2701 switch (info->arg_kind) {
2702 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002703 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002704 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2705 clspv::AddressSpace::Constant) {
2706 Ops.clear();
2707 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
SJWf93f5f32020-05-05 07:27:56 -05002708 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002709 }
David Neto862b7d82018-06-14 18:48:37 -04002710 break;
David Neto862b7d82018-06-14 18:48:37 -04002711 case clspv::ArgKind::WriteOnlyImage:
2712 Ops.clear();
2713 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
SJWf93f5f32020-05-05 07:27:56 -05002714 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002715 break;
2716 default:
2717 break;
David Neto22f144c2017-06-12 14:26:21 -04002718 }
2719 }
2720}
2721
SJW77b87ad2020-04-21 14:37:52 -05002722void SPIRVProducerPass::GeneratePushConstantDescriptorMapEntries() {
Kévin Petitbbbda972020-03-03 19:16:31 +00002723
SJW77b87ad2020-04-21 14:37:52 -05002724 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
2725 auto const &DL = module->getDataLayout();
Kévin Petitbbbda972020-03-03 19:16:31 +00002726 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
2727 auto STy = cast<StructType>(GV->getValueType());
2728
2729 for (unsigned i = 0; i < STy->getNumElements(); i++) {
2730 auto pc = static_cast<clspv::PushConstant>(
2731 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
2732 auto memberType = STy->getElementType(i);
2733 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
2734 unsigned previousOffset = 0;
2735 if (i > 0) {
2736 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
2737 }
2738 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
SJW77b87ad2020-04-21 14:37:52 -05002739 assert(isValidExplicitLayout(*module, STy, i,
2740 spv::StorageClassPushConstant, offset,
2741 previousOffset));
Kévin Petitbbbda972020-03-03 19:16:31 +00002742 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
2743 descriptorMapEntries->emplace_back(std::move(data));
2744 }
2745 }
2746}
2747
SJW77b87ad2020-04-21 14:37:52 -05002748void SPIRVProducerPass::GenerateSpecConstantDescriptorMapEntries() {
2749 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04002750 auto kind = pair.first;
2751 auto id = pair.second;
2752
2753 // Local memory size is only used for kernel arguments.
2754 if (kind == SpecConstant::kLocalMemorySize)
2755 continue;
2756
2757 version0::DescriptorMapEntry::SpecConstantData data = {kind, id};
2758 descriptorMapEntries->emplace_back(std::move(data));
2759 }
2760}
2761
David Neto22f144c2017-06-12 14:26:21 -04002762void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002763 ValueMapType &VMap = getValueMap();
2764 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002765 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002766
2767 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2768 Type *Ty = GV.getType();
2769 PointerType *PTy = cast<PointerType>(Ty);
2770
2771 uint32_t InitializerID = 0;
2772
2773 // Workgroup size is handled differently (it goes into a constant)
2774 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2775 std::vector<bool> HasMDVec;
2776 uint32_t PrevXDimCst = 0xFFFFFFFF;
2777 uint32_t PrevYDimCst = 0xFFFFFFFF;
2778 uint32_t PrevZDimCst = 0xFFFFFFFF;
2779 for (Function &Func : *GV.getParent()) {
2780 if (Func.isDeclaration()) {
2781 continue;
2782 }
2783
2784 // We only need to check kernels.
2785 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2786 continue;
2787 }
2788
2789 if (const MDNode *MD =
2790 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2791 uint32_t CurXDimCst = static_cast<uint32_t>(
2792 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2793 uint32_t CurYDimCst = static_cast<uint32_t>(
2794 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2795 uint32_t CurZDimCst = static_cast<uint32_t>(
2796 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2797
2798 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2799 PrevZDimCst == 0xFFFFFFFF) {
2800 PrevXDimCst = CurXDimCst;
2801 PrevYDimCst = CurYDimCst;
2802 PrevZDimCst = CurZDimCst;
2803 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2804 CurZDimCst != PrevZDimCst) {
2805 llvm_unreachable(
2806 "reqd_work_group_size must be the same across all kernels");
2807 } else {
2808 continue;
2809 }
2810
2811 //
2812 // Generate OpConstantComposite.
2813 //
2814 // Ops[0] : Result Type ID
2815 // Ops[1] : Constant size for x dimension.
2816 // Ops[2] : Constant size for y dimension.
2817 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002818 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002819
2820 uint32_t XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002821 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04002822 uint32_t YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002823 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04002824 uint32_t ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002825 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002826
SJWf93f5f32020-05-05 07:27:56 -05002827 Ops << MkId(getSPIRVType(Ty->getPointerElementType()))
2828 << MkId(XDimCstID) << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002829
SJWf93f5f32020-05-05 07:27:56 -05002830 InitializerID =
2831 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002832
2833 HasMDVec.push_back(true);
2834 } else {
2835 HasMDVec.push_back(false);
2836 }
2837 }
2838
2839 // Check all kernels have same definitions for work_group_size.
2840 bool HasMD = false;
2841 if (!HasMDVec.empty()) {
2842 HasMD = HasMDVec[0];
2843 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2844 if (HasMD != HasMDVec[i]) {
2845 llvm_unreachable(
2846 "Kernels should have consistent work group size definition");
2847 }
2848 }
2849 }
2850
2851 // If all kernels do not have metadata for reqd_work_group_size, generate
2852 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002853 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002854 //
2855 // Generate OpSpecConstants for x/y/z dimension.
2856 //
2857 // Ops[0] : Result Type ID
2858 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2859 uint32_t XDimCstID = 0;
2860 uint32_t YDimCstID = 0;
2861 uint32_t ZDimCstID = 0;
2862
alan-bakera1be3322020-04-20 12:48:18 -04002863 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002864 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002865
SJWf93f5f32020-05-05 07:27:56 -05002866 SPIRVOperandVec Ops;
2867 uint32_t result_type_id = getSPIRVType(
2868 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002869
David Neto257c3892018-04-11 13:19:45 -04002870 // X Dimension
2871 Ops << MkId(result_type_id) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05002872 XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002873
2874 // Y Dimension
2875 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002876 Ops << MkId(result_type_id) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05002877 YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002878
2879 // Z Dimension
2880 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002881 Ops << MkId(result_type_id) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05002882 ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002883
David Neto257c3892018-04-11 13:19:45 -04002884 BuiltinDimVec.push_back(XDimCstID);
2885 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002886 BuiltinDimVec.push_back(ZDimCstID);
2887
David Neto22f144c2017-06-12 14:26:21 -04002888 //
2889 // Generate OpSpecConstantComposite.
2890 //
2891 // Ops[0] : Result Type ID
2892 // Ops[1] : Constant size for x dimension.
2893 // Ops[2] : Constant size for y dimension.
2894 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002895 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002896 Ops << MkId(getSPIRVType(Ty->getPointerElementType())) << MkId(XDimCstID)
David Neto257c3892018-04-11 13:19:45 -04002897 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002898
SJWf93f5f32020-05-05 07:27:56 -05002899 InitializerID =
2900 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002901 }
alan-bakerbed3a882020-04-21 14:42:41 -04002902 } else if (BuiltinType == spv::BuiltInWorkDim) {
2903 // 1. Generate a specialization constant with a default of 3.
2904 // 2. Allocate and annotate a SpecId for the constant.
2905 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002906 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002907
2908 //
2909 // Generate OpSpecConstant.
2910 //
2911 // Ops[0] : Result Type ID
2912 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002913
SJWf93f5f32020-05-05 07:27:56 -05002914 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2915 << MkNum(3);
alan-bakerbed3a882020-04-21 14:42:41 -04002916
SJWf93f5f32020-05-05 07:27:56 -05002917 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002918
2919 //
2920 // Generate SpecId decoration.
2921 //
2922 // Ops[0] : target
2923 // Ops[1] : decoration
2924 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002925 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002926 Ops.clear();
2927 Ops << MkId(InitializerID) << MkNum(spv::DecorationSpecId)
2928 << MkNum(spec_id);
2929
SJWf93f5f32020-05-05 07:27:56 -05002930 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002931 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2932 // 1. Generate a spec constant with a default of {0, 0, 0}.
2933 // 2. Allocate and annotate SpecIds for the constants.
2934 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002935 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002936
2937 //
2938 // Generate OpSpecConstant for each dimension.
2939 //
2940 // Ops[0] : Result Type ID
2941 // Ops[1] : Default literal value
2942 //
SJWf93f5f32020-05-05 07:27:56 -05002943 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2944 << MkNum(0);
2945 uint32_t x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002946
alan-bakere1996972020-05-04 08:38:12 -04002947 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002948 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2949 << MkNum(0);
2950 uint32_t y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002951
alan-bakere1996972020-05-04 08:38:12 -04002952 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002953 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2954 << MkNum(0);
2955 uint32_t z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002956
2957 //
2958 // Generate SpecId decoration for each dimension.
2959 //
2960 // Ops[0] : target
2961 // Ops[1] : decoration
2962 // Ops[2] : SpecId
2963 //
2964 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2965 Ops.clear();
2966 Ops << MkId(x_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
SJWf93f5f32020-05-05 07:27:56 -05002967 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002968
2969 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2970 Ops.clear();
2971 Ops << MkId(y_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
SJWf93f5f32020-05-05 07:27:56 -05002972 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002973
2974 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2975 Ops.clear();
2976 Ops << MkId(z_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
SJWf93f5f32020-05-05 07:27:56 -05002977 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002978
2979 //
2980 // Generate OpSpecConstantComposite.
2981 //
2982 // Ops[0] : type id
2983 // Ops[1..n-1] : elements
2984 //
alan-bakere1996972020-05-04 08:38:12 -04002985 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002986 Ops << MkId(getSPIRVType(GV.getType()->getPointerElementType()))
2987 << MkId(x_id) << MkId(y_id) << MkId(z_id);
2988 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002989 }
2990
David Neto22f144c2017-06-12 14:26:21 -04002991 //
2992 // Generate OpVariable.
2993 //
2994 // GIDOps[0] : Result Type ID
2995 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05002996 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002997
David Neto85082642018-03-24 06:55:20 -07002998 const auto AS = PTy->getAddressSpace();
SJWf93f5f32020-05-05 07:27:56 -05002999 Ops << MkId(getSPIRVType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003000
David Neto85082642018-03-24 06:55:20 -07003001 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003002 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003003 clspv::Option::ModuleConstantsInStorageBuffer();
3004
Kévin Petit23d5f182019-08-13 16:21:29 +01003005 if (GV.hasInitializer()) {
3006 auto GVInit = GV.getInitializer();
3007 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05003008 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07003009 }
3010 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003011
3012 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003013 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003014 Ops << MkId(InitializerID);
3015 }
SJWf93f5f32020-05-05 07:27:56 -05003016 const uint32_t var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto85082642018-03-24 06:55:20 -07003017
SJWf93f5f32020-05-05 07:27:56 -05003018 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04003019
alan-bakere1996972020-05-04 08:38:12 -04003020 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
3021 return builtin == spv::BuiltInWorkDim ||
3022 builtin == spv::BuiltInGlobalOffset;
3023 };
3024
alan-bakere1996972020-05-04 08:38:12 -04003025 // If we have a builtin (not an OpenCL builtin).
3026 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04003027 //
3028 // Generate OpDecorate.
3029 //
3030 // DOps[0] = Target ID
3031 // DOps[1] = Decoration (Builtin)
3032 // DOps[2] = BuiltIn ID
3033 uint32_t ResultID;
3034
3035 // WorkgroupSize is different, we decorate the constant composite that has
3036 // its value, rather than the variable that we use to access the value.
3037 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3038 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003039 // Save both the value and variable IDs for later.
3040 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05003041 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04003042 } else {
SJWf93f5f32020-05-05 07:27:56 -05003043 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04003044 }
3045
SJWf93f5f32020-05-05 07:27:56 -05003046 SPIRVOperandVec DOps;
David Neto257c3892018-04-11 13:19:45 -04003047 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3048 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003049
SJWf93f5f32020-05-05 07:27:56 -05003050 addSPIRVInst<kAnnotations>(spv::OpDecorate, DOps);
David Neto85082642018-03-24 06:55:20 -07003051 } else if (module_scope_constant_external_init) {
3052 // This module scope constant is initialized from a storage buffer with data
3053 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05003054 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07003055
David Neto862b7d82018-06-14 18:48:37 -04003056 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003057 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3058 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003059 std::string hexbytes;
3060 llvm::raw_string_ostream str(hexbytes);
3061 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003062 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3063 str.str()};
3064 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3065 0);
David Neto85082642018-03-24 06:55:20 -07003066
SJWf93f5f32020-05-05 07:27:56 -05003067 SPIRVOperandVec DOps;
David Neto85082642018-03-24 06:55:20 -07003068
3069 // OpDecorate %var DescriptorSet <descriptor_set>
David Neto257c3892018-04-11 13:19:45 -04003070 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3071 << MkNum(descriptor_set);
SJWf93f5f32020-05-05 07:27:56 -05003072 addSPIRVInst<kAnnotations>(spv::OpDecorate, DOps);
SJW69939d52020-04-16 07:29:07 -05003073
3074 // OpDecorate %var Binding <binding>
3075 DOps.clear();
3076 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
SJWf93f5f32020-05-05 07:27:56 -05003077 addSPIRVInst<kAnnotations>(spv::OpDecorate, DOps);
David Netoc6f3ab22018-04-06 18:02:31 -04003078 }
3079}
3080
SJW77b87ad2020-04-21 14:37:52 -05003081void SPIRVProducerPass::GenerateDescriptorMapInfo(Function &F) {
3082 const auto &DL = module->getDataLayout();
David Netoc5fb5242018-07-30 13:28:31 -04003083 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3084 return;
3085 }
Kévin Petit717f8572020-04-06 17:31:53 +01003086 // Add entries for each kernel
3087 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
3088 F.getName().str()};
3089 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
3090
David Neto862b7d82018-06-14 18:48:37 -04003091 // Gather the list of resources that are used by this function's arguments.
3092 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3093
David Neto862b7d82018-06-14 18:48:37 -04003094 auto *fty = F.getType()->getPointerElementType();
3095 auto *func_ty = dyn_cast<FunctionType>(fty);
3096
alan-baker038e9242019-04-19 22:14:41 -04003097 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003098 // If an argument maps to a resource variable, then get descriptor set and
3099 // binding from the resoure variable. Other info comes from the metadata.
alan-bakerff6c9292020-05-04 08:32:09 -04003100 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
3101 auto local_spec_id_md =
3102 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
David Neto862b7d82018-06-14 18:48:37 -04003103 if (arg_map) {
3104 for (const auto &arg : arg_map->operands()) {
3105 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
alan-bakerff6c9292020-05-04 08:32:09 -04003106 assert(arg_node->getNumOperands() == 6);
David Neto862b7d82018-06-14 18:48:37 -04003107 const auto name =
3108 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3109 const auto old_index =
3110 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3111 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003112 const size_t new_index = static_cast<size_t>(
3113 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003114 const auto offset =
3115 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003116 const auto arg_size =
3117 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
alan-bakerc4579bb2020-04-29 14:15:50 -04003118 const auto argKind = clspv::GetArgKindFromName(
3119 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
alan-bakerf5e5f692018-11-27 08:33:24 -05003120
alan-bakerff6c9292020-05-04 08:32:09 -04003121 // If this is a local memory argument, find the right spec id for this
3122 // argument.
3123 int64_t spec_id = -1;
3124 if (argKind == clspv::ArgKind::Local) {
3125 for (auto spec_id_arg : local_spec_id_md->operands()) {
3126 if ((&F == dyn_cast<Function>(
3127 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
3128 ->getValue())) &&
3129 (new_index ==
3130 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
3131 ->getZExtValue())) {
3132 spec_id = mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
3133 ->getSExtValue();
3134 break;
3135 }
3136 }
3137 }
alan-bakerf5e5f692018-11-27 08:33:24 -05003138 uint32_t descriptor_set = 0;
3139 uint32_t binding = 0;
3140 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003141 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3142 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003143 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003144 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003145 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003146 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3147 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3148 DL));
David Neto862b7d82018-06-14 18:48:37 -04003149 } else {
3150 auto *info = resource_var_at_index[new_index];
3151 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003152 descriptor_set = info->descriptor_set;
3153 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003154 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003155 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3156 binding);
David Neto862b7d82018-06-14 18:48:37 -04003157 }
3158 } else {
3159 // There is no argument map.
3160 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003161 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003162
3163 SmallVector<Argument *, 4> arguments;
3164 for (auto &arg : F.args()) {
3165 arguments.push_back(&arg);
3166 }
3167
3168 unsigned arg_index = 0;
3169 for (auto *info : resource_var_at_index) {
3170 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003171 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003172 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04003173 if (info->arg_kind == clspv::ArgKind::Pod ||
3174 info->arg_kind == clspv::ArgKind::PodUBO ||
3175 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003176 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003177 }
3178
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003179 // Local pointer arguments are unused in this case. Offset is always
3180 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003181 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003182 F.getName().str(),
3183 arg->getName().str(),
3184 arg_index,
alan-bakerc4579bb2020-04-29 14:15:50 -04003185 info->arg_kind,
alan-baker21574d32020-01-29 16:00:31 -05003186 0,
3187 0,
3188 0,
3189 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003190 descriptorMapEntries->emplace_back(std::move(kernel_data),
3191 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003192 }
3193 arg_index++;
3194 }
3195 // Generate mappings for pointer-to-local arguments.
3196 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3197 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003198 auto where = LocalArgSpecIds.find(arg);
3199 if (where != LocalArgSpecIds.end()) {
3200 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003201 // Pod arguments members are unused in this case.
3202 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003203 F.getName().str(),
3204 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003205 arg_index,
3206 ArgKind::Local,
3207 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003208 static_cast<uint32_t>(
3209 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003210 0,
3211 0};
3212 // Pointer-to-local arguments do not utilize descriptor set and binding.
3213 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003214 }
3215 }
3216 }
3217}
3218
David Neto22f144c2017-06-12 14:26:21 -04003219void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003220 ValueMapType &VMap = getValueMap();
3221 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003222 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3223 auto &GlobalConstArgSet = getGlobalConstArgSet();
3224
3225 FunctionType *FTy = F.getFunctionType();
3226
3227 //
David Neto22f144c2017-06-12 14:26:21 -04003228 // Generate OPFunction.
3229 //
3230
3231 // FOps[0] : Result Type ID
3232 // FOps[1] : Function Control
3233 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05003234 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04003235
3236 // Find SPIRV instruction for return type.
SJWf93f5f32020-05-05 07:27:56 -05003237 FOps << MkId(getSPIRVType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003238
3239 // Check function attributes for SPIRV Function Control.
3240 uint32_t FuncControl = spv::FunctionControlMaskNone;
3241 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3242 FuncControl |= spv::FunctionControlInlineMask;
3243 }
3244 if (F.hasFnAttribute(Attribute::NoInline)) {
3245 FuncControl |= spv::FunctionControlDontInlineMask;
3246 }
3247 // TODO: Check llvm attribute for Function Control Pure.
3248 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3249 FuncControl |= spv::FunctionControlPureMask;
3250 }
3251 // TODO: Check llvm attribute for Function Control Const.
3252 if (F.hasFnAttribute(Attribute::ReadNone)) {
3253 FuncControl |= spv::FunctionControlConstMask;
3254 }
3255
David Neto257c3892018-04-11 13:19:45 -04003256 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003257
3258 uint32_t FTyID;
3259 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3260 SmallVector<Type *, 4> NewFuncParamTys;
3261 FunctionType *NewFTy =
3262 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05003263 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04003264 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003265 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003266 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05003267 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04003268 } else {
SJWf93f5f32020-05-05 07:27:56 -05003269 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04003270 }
3271 }
3272
David Neto257c3892018-04-11 13:19:45 -04003273 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003274
SJWf93f5f32020-05-05 07:27:56 -05003275 // Generate SPIRV instruction for function.
3276 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
3277 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04003278
SJWf93f5f32020-05-05 07:27:56 -05003279 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3280 EntryPoints.push_back(std::make_pair(&F, FID));
3281 }
David Neto22f144c2017-06-12 14:26:21 -04003282
David Neto482550a2018-03-24 05:21:07 -07003283 if (clspv::Option::ShowIDs()) {
SJWf93f5f32020-05-05 07:27:56 -05003284 errs() << "Function " << F.getName() << " is " << FID << "\n";
David Netob05675d2018-02-16 12:37:49 -05003285 }
David Neto22f144c2017-06-12 14:26:21 -04003286
3287 //
3288 // Generate OpFunctionParameter for Normal function.
3289 //
3290
3291 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003292
David Neto22f144c2017-06-12 14:26:21 -04003293 // Iterate Argument for name instead of param type from function type.
3294 unsigned ArgIdx = 0;
3295 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04003296 // ParamOps[0] : Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05003297 SPIRVOperandVec ParamOps;
David Neto22f144c2017-06-12 14:26:21 -04003298
3299 // Find SPIRV instruction for parameter type.
SJWf93f5f32020-05-05 07:27:56 -05003300 uint32_t ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04003301 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3302 if (GlobalConstFuncTyMap.count(FTy)) {
3303 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3304 Type *EleTy = PTy->getPointerElementType();
3305 Type *ArgTy =
3306 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05003307 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04003308 GlobalConstArgSet.insert(&Arg);
3309 }
3310 }
3311 }
David Neto257c3892018-04-11 13:19:45 -04003312 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003313
3314 // Generate SPIRV instruction for parameter.
SJWf93f5f32020-05-05 07:27:56 -05003315 uint32_t param_id = addSPIRVInst(spv::OpFunctionParameter, ParamOps);
3316 VMap[&Arg] = param_id;
3317
3318 if (CalledWithCoherentResource(Arg)) {
3319 // If the arg is passed a coherent resource ever, then decorate this
3320 // parameter with Coherent too.
3321 SPIRVOperandVec decoration_ops;
3322 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
3323 addSPIRVInst<kAnnotations>(spv::OpDecorate, decoration_ops);
3324 }
David Neto22f144c2017-06-12 14:26:21 -04003325
3326 ArgIdx++;
3327 }
3328 }
3329}
3330
SJW77b87ad2020-04-21 14:37:52 -05003331void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04003332 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003333 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
David Neto22f144c2017-06-12 14:26:21 -04003334 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3335
David Neto22f144c2017-06-12 14:26:21 -04003336 //
3337 // Generate OpCapability
3338 //
3339 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3340
3341 // Ops[0] = Capability
SJWf93f5f32020-05-05 07:27:56 -05003342 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003343
SJWf93f5f32020-05-05 07:27:56 -05003344 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityShader);
David Neto22f144c2017-06-12 14:26:21 -04003345
alan-bakerf906d2b2019-12-10 11:26:23 -05003346 bool write_without_format = false;
3347 bool sampled_1d = false;
3348 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003349 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003350 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3351 // Generate OpCapability for i8 type.
SJWf93f5f32020-05-05 07:27:56 -05003352 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityInt8);
alan-bakerb39c8262019-03-08 14:03:37 -05003353 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003354 // Generate OpCapability for i16 type.
SJWf93f5f32020-05-05 07:27:56 -05003355 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityInt16);
David Neto22f144c2017-06-12 14:26:21 -04003356 } else if (Ty->isIntegerTy(64)) {
3357 // Generate OpCapability for i64 type.
SJWf93f5f32020-05-05 07:27:56 -05003358 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityInt64);
David Neto22f144c2017-06-12 14:26:21 -04003359 } else if (Ty->isHalfTy()) {
3360 // Generate OpCapability for half type.
SJWf93f5f32020-05-05 07:27:56 -05003361 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityFloat16);
David Neto22f144c2017-06-12 14:26:21 -04003362 } else if (Ty->isDoubleTy()) {
3363 // Generate OpCapability for double type.
SJWf93f5f32020-05-05 07:27:56 -05003364 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityFloat64);
David Neto22f144c2017-06-12 14:26:21 -04003365 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3366 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003367 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003368 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003369 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003370 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003371 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003372 write_without_format = true;
3373 }
3374 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003375 STy->getName().startswith("opencl.image1d_wo_t") ||
3376 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3377 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003378 if (STy->getName().contains(".sampled"))
3379 sampled_1d = true;
3380 else
3381 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003382 }
3383 }
3384 }
3385 }
3386
alan-bakerf906d2b2019-12-10 11:26:23 -05003387 if (write_without_format) {
3388 // Generate OpCapability for write only image type.
SJWf93f5f32020-05-05 07:27:56 -05003389 addSPIRVInst<kCapabilities>(spv::OpCapability,
3390 spv::CapabilityStorageImageWriteWithoutFormat);
alan-bakerf906d2b2019-12-10 11:26:23 -05003391 }
3392 if (image_1d) {
3393 // Generate OpCapability for unsampled 1D image type.
SJWf93f5f32020-05-05 07:27:56 -05003394 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityImage1D);
alan-bakerf906d2b2019-12-10 11:26:23 -05003395 } else if (sampled_1d) {
3396 // Generate OpCapability for sampled 1D image type.
SJWf93f5f32020-05-05 07:27:56 -05003397 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilitySampled1D);
alan-bakerf906d2b2019-12-10 11:26:23 -05003398 }
3399
David Neto5c22a252018-03-15 16:07:41 -04003400 { // OpCapability ImageQuery
3401 bool hasImageQuery = false;
SJW77b87ad2020-04-21 14:37:52 -05003402 for (const auto &SymVal : module->getValueSymbolTable()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003403 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003404 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003405 hasImageQuery = true;
3406 break;
3407 }
David Neto5c22a252018-03-15 16:07:41 -04003408 }
3409 }
alan-bakerf67468c2019-11-25 15:51:49 -05003410
David Neto5c22a252018-03-15 16:07:41 -04003411 if (hasImageQuery) {
SJWf93f5f32020-05-05 07:27:56 -05003412 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityImageQuery);
David Neto5c22a252018-03-15 16:07:41 -04003413 }
3414 }
3415
David Neto22f144c2017-06-12 14:26:21 -04003416 if (hasVariablePointers()) {
3417 //
David Neto22f144c2017-06-12 14:26:21 -04003418 // Generate OpCapability.
3419 //
3420 // Ops[0] = Capability
3421 //
SJWf93f5f32020-05-05 07:27:56 -05003422 addSPIRVInst<kCapabilities>(spv::OpCapability,
3423 spv::CapabilityVariablePointers);
alan-baker5b86ed72019-02-15 08:26:50 -05003424 } else if (hasVariablePointersStorageBuffer()) {
3425 //
3426 // Generate OpCapability.
3427 //
3428 // Ops[0] = Capability
3429 //
SJWf93f5f32020-05-05 07:27:56 -05003430 addSPIRVInst<kCapabilities>(spv::OpCapability,
3431 spv::CapabilityVariablePointersStorageBuffer);
alan-baker5b86ed72019-02-15 08:26:50 -05003432 }
3433
3434 // Always add the storage buffer extension
3435 {
David Neto22f144c2017-06-12 14:26:21 -04003436 //
3437 // Generate OpExtension.
3438 //
3439 // Ops[0] = Name (Literal String)
3440 //
SJWf93f5f32020-05-05 07:27:56 -05003441 addSPIRVInst<kExtensions>(spv::OpExtension,
3442 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05003443 }
David Neto22f144c2017-06-12 14:26:21 -04003444
alan-baker5b86ed72019-02-15 08:26:50 -05003445 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3446 //
3447 // Generate OpExtension.
3448 //
3449 // Ops[0] = Name (Literal String)
3450 //
SJWf93f5f32020-05-05 07:27:56 -05003451 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04003452 }
3453
3454 //
3455 // Generate OpMemoryModel
3456 //
3457 // Memory model for Vulkan will always be GLSL450.
3458
3459 // Ops[0] = Addressing Model
3460 // Ops[1] = Memory Model
3461 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003462 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003463
SJWf93f5f32020-05-05 07:27:56 -05003464 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003465
3466 //
3467 // Generate OpEntryPoint
3468 //
3469 for (auto EntryPoint : EntryPoints) {
3470 // Ops[0] = Execution Model
3471 // Ops[1] = EntryPoint ID
3472 // Ops[2] = Name (Literal String)
3473 // ...
3474 //
3475 // TODO: Do we need to consider Interface ID for forward references???
3476 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003477 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003478 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3479 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003480
David Neto22f144c2017-06-12 14:26:21 -04003481 for (Value *Interface : EntryPointInterfaces) {
SJWf93f5f32020-05-05 07:27:56 -05003482 Ops << MkId(getSPIRVValue(Interface));
David Neto22f144c2017-06-12 14:26:21 -04003483 }
3484
SJWf93f5f32020-05-05 07:27:56 -05003485 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003486 }
3487
3488 for (auto EntryPoint : EntryPoints) {
Kévin Petit21c23c62020-04-29 01:38:28 +01003489 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3490 ->getMetadata("reqd_work_group_size");
3491 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04003492
3493 if (!BuiltinDimVec.empty()) {
3494 llvm_unreachable(
3495 "Kernels should have consistent work group size definition");
3496 }
3497
3498 //
3499 // Generate OpExecutionMode
3500 //
3501
3502 // Ops[0] = Entry Point ID
3503 // Ops[1] = Execution Mode
3504 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3505 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003506 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003507
3508 uint32_t XDim = static_cast<uint32_t>(
3509 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3510 uint32_t YDim = static_cast<uint32_t>(
3511 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3512 uint32_t ZDim = static_cast<uint32_t>(
3513 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3514
David Neto257c3892018-04-11 13:19:45 -04003515 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003516
SJWf93f5f32020-05-05 07:27:56 -05003517 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003518 }
3519 }
3520
3521 //
3522 // Generate OpSource.
3523 //
3524 // Ops[0] = SourceLanguage ID
3525 // Ops[1] = Version (LiteralNum)
3526 //
3527 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003528 switch (clspv::Option::Language()) {
3529 case clspv::Option::SourceLanguage::OpenCL_C_10:
3530 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3531 break;
3532 case clspv::Option::SourceLanguage::OpenCL_C_11:
3533 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3534 break;
3535 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003536 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003537 break;
3538 case clspv::Option::SourceLanguage::OpenCL_C_20:
3539 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3540 break;
3541 case clspv::Option::SourceLanguage::OpenCL_CPP:
3542 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3543 break;
3544 default:
3545 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3546 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003547 }
David Neto22f144c2017-06-12 14:26:21 -04003548
SJWf93f5f32020-05-05 07:27:56 -05003549 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003550
3551 if (!BuiltinDimVec.empty()) {
3552 //
3553 // Generate OpDecorates for x/y/z dimension.
3554 //
3555 // Ops[0] = Target ID
3556 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003557 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003558
3559 // X Dimension
3560 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003561 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
SJWf93f5f32020-05-05 07:27:56 -05003562 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003563
3564 // Y Dimension
3565 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003566 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05003567 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003568
3569 // Z Dimension
3570 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003571 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
SJWf93f5f32020-05-05 07:27:56 -05003572 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003573 }
3574}
3575
David Netob6e2e062018-04-25 10:32:06 -04003576void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3577 // Work around a driver bug. Initializers on Private variables might not
3578 // work. So the start of the kernel should store the initializer value to the
3579 // variables. Yes, *every* entry point pays this cost if *any* entry point
3580 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3581 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003582 // TODO(dneto): Remove this at some point once fixed drivers are widely
3583 // available.
David Netob6e2e062018-04-25 10:32:06 -04003584 if (WorkgroupSizeVarID) {
3585 assert(WorkgroupSizeValueID);
3586
SJWf93f5f32020-05-05 07:27:56 -05003587 SPIRVOperandVec Ops;
David Netob6e2e062018-04-25 10:32:06 -04003588 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3589
SJWf93f5f32020-05-05 07:27:56 -05003590 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04003591 }
3592}
3593
David Neto22f144c2017-06-12 14:26:21 -04003594void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003595 ValueMapType &VMap = getValueMap();
3596
David Netob6e2e062018-04-25 10:32:06 -04003597 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003598
3599 for (BasicBlock &BB : F) {
3600 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04003601
3602 //
3603 // Generate OpLabel for Basic Block.
3604 //
SJWf93f5f32020-05-05 07:27:56 -05003605 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04003606
David Neto6dcd4712017-06-23 11:06:47 -04003607 // OpVariable instructions must come first.
3608 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003609 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3610 // Allocating a pointer requires variable pointers.
3611 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003612 setVariablePointersCapabilities(
3613 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003614 }
David Neto6dcd4712017-06-23 11:06:47 -04003615 GenerateInstruction(I);
3616 }
3617 }
3618
David Neto22f144c2017-06-12 14:26:21 -04003619 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003620 if (clspv::Option::HackInitializers()) {
3621 GenerateEntryPointInitialStores();
3622 }
David Neto22f144c2017-06-12 14:26:21 -04003623 }
3624
3625 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003626 if (!isa<AllocaInst>(I)) {
3627 GenerateInstruction(I);
3628 }
David Neto22f144c2017-06-12 14:26:21 -04003629 }
3630 }
3631}
3632
3633spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3634 const std::map<CmpInst::Predicate, spv::Op> Map = {
3635 {CmpInst::ICMP_EQ, spv::OpIEqual},
3636 {CmpInst::ICMP_NE, spv::OpINotEqual},
3637 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3638 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3639 {CmpInst::ICMP_ULT, spv::OpULessThan},
3640 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3641 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3642 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3643 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3644 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3645 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3646 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3647 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3648 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3649 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3650 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3651 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3652 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3653 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3654 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3655 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3656 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3657
3658 assert(0 != Map.count(I->getPredicate()));
3659
3660 return Map.at(I->getPredicate());
3661}
3662
3663spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3664 const std::map<unsigned, spv::Op> Map{
3665 {Instruction::Trunc, spv::OpUConvert},
3666 {Instruction::ZExt, spv::OpUConvert},
3667 {Instruction::SExt, spv::OpSConvert},
3668 {Instruction::FPToUI, spv::OpConvertFToU},
3669 {Instruction::FPToSI, spv::OpConvertFToS},
3670 {Instruction::UIToFP, spv::OpConvertUToF},
3671 {Instruction::SIToFP, spv::OpConvertSToF},
3672 {Instruction::FPTrunc, spv::OpFConvert},
3673 {Instruction::FPExt, spv::OpFConvert},
3674 {Instruction::BitCast, spv::OpBitcast}};
3675
3676 assert(0 != Map.count(I.getOpcode()));
3677
3678 return Map.at(I.getOpcode());
3679}
3680
3681spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003682 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003683 switch (I.getOpcode()) {
3684 default:
3685 break;
3686 case Instruction::Or:
3687 return spv::OpLogicalOr;
3688 case Instruction::And:
3689 return spv::OpLogicalAnd;
3690 case Instruction::Xor:
3691 return spv::OpLogicalNotEqual;
3692 }
3693 }
3694
alan-bakerb6b09dc2018-11-08 16:59:28 -05003695 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003696 {Instruction::Add, spv::OpIAdd},
3697 {Instruction::FAdd, spv::OpFAdd},
3698 {Instruction::Sub, spv::OpISub},
3699 {Instruction::FSub, spv::OpFSub},
3700 {Instruction::Mul, spv::OpIMul},
3701 {Instruction::FMul, spv::OpFMul},
3702 {Instruction::UDiv, spv::OpUDiv},
3703 {Instruction::SDiv, spv::OpSDiv},
3704 {Instruction::FDiv, spv::OpFDiv},
3705 {Instruction::URem, spv::OpUMod},
3706 {Instruction::SRem, spv::OpSRem},
3707 {Instruction::FRem, spv::OpFRem},
3708 {Instruction::Or, spv::OpBitwiseOr},
3709 {Instruction::Xor, spv::OpBitwiseXor},
3710 {Instruction::And, spv::OpBitwiseAnd},
3711 {Instruction::Shl, spv::OpShiftLeftLogical},
3712 {Instruction::LShr, spv::OpShiftRightLogical},
3713 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3714
3715 assert(0 != Map.count(I.getOpcode()));
3716
3717 return Map.at(I.getOpcode());
3718}
3719
3720void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
SJW69939d52020-04-16 07:29:07 -05003721 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04003722 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003723 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
3724 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3725
SJWf93f5f32020-05-05 07:27:56 -05003726 SPIRVID RID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003727
3728 switch (I.getOpcode()) {
3729 default: {
3730 if (Instruction::isCast(I.getOpcode())) {
3731 //
3732 // Generate SPIRV instructions for cast operators.
3733 //
3734
David Netod2de94a2017-08-28 17:27:47 -04003735 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003736 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003737 auto toI8 = Ty == Type::getInt8Ty(Context);
3738 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003739 // Handle zext, sext and uitofp with i1 type specially.
3740 if ((I.getOpcode() == Instruction::ZExt ||
3741 I.getOpcode() == Instruction::SExt ||
3742 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003743 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003744 //
3745 // Generate OpSelect.
3746 //
3747
3748 // Ops[0] = Result Type ID
3749 // Ops[1] = Condition ID
3750 // Ops[2] = True Constant ID
3751 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003752 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003753
SJWf93f5f32020-05-05 07:27:56 -05003754 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003755
SJWf93f5f32020-05-05 07:27:56 -05003756 uint32_t CondID = getSPIRVValue(I.getOperand(0));
David Neto257c3892018-04-11 13:19:45 -04003757 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003758
3759 uint32_t TrueID = 0;
3760 if (I.getOpcode() == Instruction::ZExt) {
SJWf93f5f32020-05-05 07:27:56 -05003761 TrueID = getSPIRVValue(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -04003762 } else if (I.getOpcode() == Instruction::SExt) {
SJWf93f5f32020-05-05 07:27:56 -05003763 TrueID = getSPIRVValue(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -04003764 } else {
SJWf93f5f32020-05-05 07:27:56 -05003765 TrueID = getSPIRVValue(ConstantFP::get(Context, APFloat(1.0f)));
David Neto22f144c2017-06-12 14:26:21 -04003766 }
David Neto257c3892018-04-11 13:19:45 -04003767 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003768
3769 uint32_t FalseID = 0;
3770 if (I.getOpcode() == Instruction::ZExt) {
SJWf93f5f32020-05-05 07:27:56 -05003771 FalseID = getSPIRVValue(Constant::getNullValue(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003772 } else if (I.getOpcode() == Instruction::SExt) {
SJWf93f5f32020-05-05 07:27:56 -05003773 FalseID = getSPIRVValue(Constant::getNullValue(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003774 } else {
SJWf93f5f32020-05-05 07:27:56 -05003775 FalseID = getSPIRVValue(ConstantFP::get(Context, APFloat(0.0f)));
David Neto22f144c2017-06-12 14:26:21 -04003776 }
David Neto257c3892018-04-11 13:19:45 -04003777 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003778
SJWf93f5f32020-05-05 07:27:56 -05003779 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003780 } else if (!clspv::Option::Int8Support() &&
3781 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003782 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3783 // 8 bits.
3784 // Before:
3785 // %result = trunc i32 %a to i8
3786 // After
3787 // %result = OpBitwiseAnd %uint %a %uint_255
3788
SJWf93f5f32020-05-05 07:27:56 -05003789 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003790
SJWf93f5f32020-05-05 07:27:56 -05003791 Ops << MkId(getSPIRVType(OpTy)) << MkId(getSPIRVValue(I.getOperand(0)));
David Netod2de94a2017-08-28 17:27:47 -04003792
3793 Type *UintTy = Type::getInt32Ty(Context);
SJWf93f5f32020-05-05 07:27:56 -05003794 uint32_t MaskID = getSPIRVValue(ConstantInt::get(UintTy, 255));
David Neto257c3892018-04-11 13:19:45 -04003795 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003796
SJWf93f5f32020-05-05 07:27:56 -05003797 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003798 } else {
3799 // Ops[0] = Result Type ID
3800 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003801 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003802
SJWf93f5f32020-05-05 07:27:56 -05003803 Ops << MkId(getSPIRVType(I.getType()))
3804 << MkId(getSPIRVValue(I.getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04003805
SJWf93f5f32020-05-05 07:27:56 -05003806 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003807 }
3808 } else if (isa<BinaryOperator>(I)) {
3809 //
3810 // Generate SPIRV instructions for binary operators.
3811 //
3812
3813 // Handle xor with i1 type specially.
3814 if (I.getOpcode() == Instruction::Xor &&
3815 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003816 ((isa<ConstantInt>(I.getOperand(0)) &&
3817 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3818 (isa<ConstantInt>(I.getOperand(1)) &&
3819 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003820 //
3821 // Generate OpLogicalNot.
3822 //
3823 // Ops[0] = Result Type ID
3824 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003825 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003826
SJWf93f5f32020-05-05 07:27:56 -05003827 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003828
3829 Value *CondV = I.getOperand(0);
3830 if (isa<Constant>(I.getOperand(0))) {
3831 CondV = I.getOperand(1);
3832 }
SJWf93f5f32020-05-05 07:27:56 -05003833 Ops << MkId(getSPIRVValue(CondV));
David Neto22f144c2017-06-12 14:26:21 -04003834
SJWf93f5f32020-05-05 07:27:56 -05003835 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003836 } else {
3837 // Ops[0] = Result Type ID
3838 // Ops[1] = Operand 0
3839 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003840 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003841
SJWf93f5f32020-05-05 07:27:56 -05003842 Ops << MkId(getSPIRVType(I.getType()))
3843 << MkId(getSPIRVValue(I.getOperand(0)))
3844 << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04003845
SJWf93f5f32020-05-05 07:27:56 -05003846 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003847 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003848 } else if (I.getOpcode() == Instruction::FNeg) {
3849 // The only unary operator.
3850 //
3851 // Ops[0] = Result Type ID
3852 // Ops[1] = Operand 0
SJWf93f5f32020-05-05 07:27:56 -05003853 SPIRVOperandVec ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003854
SJWf93f5f32020-05-05 07:27:56 -05003855 ops << MkId(getSPIRVType(I.getType()))
3856 << MkId(getSPIRVValue(I.getOperand(0)));
3857 RID = addSPIRVInst(spv::OpFNegate, ops);
David Neto22f144c2017-06-12 14:26:21 -04003858 } else {
3859 I.print(errs());
3860 llvm_unreachable("Unsupported instruction???");
3861 }
3862 break;
3863 }
3864 case Instruction::GetElementPtr: {
3865 auto &GlobalConstArgSet = getGlobalConstArgSet();
3866
3867 //
3868 // Generate OpAccessChain.
3869 //
3870 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3871
3872 //
3873 // Generate OpAccessChain.
3874 //
3875
3876 // Ops[0] = Result Type ID
3877 // Ops[1] = Base ID
3878 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05003879 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003880
alan-bakerb6b09dc2018-11-08 16:59:28 -05003881 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003882 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3883 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3884 // Use pointer type with private address space for global constant.
3885 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003886 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003887 }
David Neto257c3892018-04-11 13:19:45 -04003888
SJWf93f5f32020-05-05 07:27:56 -05003889 Ops << MkId(getSPIRVType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003890
David Neto862b7d82018-06-14 18:48:37 -04003891 // Generate the base pointer.
SJWf93f5f32020-05-05 07:27:56 -05003892 Ops << MkId(getSPIRVValue(GEP->getPointerOperand()));
David Neto22f144c2017-06-12 14:26:21 -04003893
David Neto862b7d82018-06-14 18:48:37 -04003894 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003895
3896 //
3897 // Follows below rules for gep.
3898 //
David Neto862b7d82018-06-14 18:48:37 -04003899 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3900 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003901 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3902 // first index.
3903 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3904 // use gep's first index.
3905 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3906 // gep's first index.
3907 //
3908 spv::Op Opcode = spv::OpAccessChain;
3909 unsigned offset = 0;
3910 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003911 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003912 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003913 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003914 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003915 }
David Neto862b7d82018-06-14 18:48:37 -04003916 } else {
David Neto22f144c2017-06-12 14:26:21 -04003917 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003918 }
3919
3920 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003921 // Do we need to generate ArrayStride? Check against the GEP result type
3922 // rather than the pointer type of the base because when indexing into
3923 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3924 // for something else in the SPIR-V.
3925 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003926 auto address_space = ResultType->getAddressSpace();
3927 setVariablePointersCapabilities(address_space);
3928 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003929 case spv::StorageClassStorageBuffer:
3930 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003931 // Save the need to generate an ArrayStride decoration. But defer
3932 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003933 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003934 break;
3935 default:
3936 break;
David Neto1a1a0582017-07-07 12:01:44 -04003937 }
David Neto22f144c2017-06-12 14:26:21 -04003938 }
3939
3940 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJWf93f5f32020-05-05 07:27:56 -05003941 Ops << MkId(getSPIRVValue(*II));
David Neto22f144c2017-06-12 14:26:21 -04003942 }
3943
SJWf93f5f32020-05-05 07:27:56 -05003944 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003945 break;
3946 }
3947 case Instruction::ExtractValue: {
3948 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3949 // Ops[0] = Result Type ID
3950 // Ops[1] = Composite ID
3951 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003952 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003953
SJWf93f5f32020-05-05 07:27:56 -05003954 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003955
SJWf93f5f32020-05-05 07:27:56 -05003956 uint32_t CompositeID = getSPIRVValue(EVI->getAggregateOperand());
David Neto257c3892018-04-11 13:19:45 -04003957 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003958
3959 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003960 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003961 }
3962
SJWf93f5f32020-05-05 07:27:56 -05003963 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003964 break;
3965 }
3966 case Instruction::InsertValue: {
3967 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3968 // Ops[0] = Result Type ID
3969 // Ops[1] = Object ID
3970 // Ops[2] = Composite ID
3971 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003972 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003973
SJWf93f5f32020-05-05 07:27:56 -05003974 uint32_t ResTyID = getSPIRVType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003975 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003976
SJWf93f5f32020-05-05 07:27:56 -05003977 uint32_t ObjectID = getSPIRVValue(IVI->getInsertedValueOperand());
David Neto257c3892018-04-11 13:19:45 -04003978 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003979
SJWf93f5f32020-05-05 07:27:56 -05003980 uint32_t CompositeID = getSPIRVValue(IVI->getAggregateOperand());
David Neto257c3892018-04-11 13:19:45 -04003981 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003982
3983 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003984 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003985 }
3986
SJWf93f5f32020-05-05 07:27:56 -05003987 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003988 break;
3989 }
3990 case Instruction::Select: {
3991 //
3992 // Generate OpSelect.
3993 //
3994
3995 // Ops[0] = Result Type ID
3996 // Ops[1] = Condition ID
3997 // Ops[2] = True Constant ID
3998 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003999 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004000
4001 // Find SPIRV instruction for parameter type.
4002 auto Ty = I.getType();
4003 if (Ty->isPointerTy()) {
4004 auto PointeeTy = Ty->getPointerElementType();
4005 if (PointeeTy->isStructTy() &&
4006 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4007 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004008 } else {
4009 // Selecting between pointers requires variable pointers.
4010 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4011 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4012 setVariablePointers(true);
4013 }
David Neto22f144c2017-06-12 14:26:21 -04004014 }
4015 }
4016
SJWf93f5f32020-05-05 07:27:56 -05004017 Ops << MkId(getSPIRVType(Ty)) << MkId(getSPIRVValue(I.getOperand(0)))
4018 << MkId(getSPIRVValue(I.getOperand(1)))
4019 << MkId(getSPIRVValue(I.getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04004020
SJWf93f5f32020-05-05 07:27:56 -05004021 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004022 break;
4023 }
4024 case Instruction::ExtractElement: {
4025 // Handle <4 x i8> type manually.
4026 Type *CompositeTy = I.getOperand(0)->getType();
4027 if (is4xi8vec(CompositeTy)) {
4028 //
4029 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4030 // <4 x i8>.
4031 //
4032
4033 //
4034 // Generate OpShiftRightLogical
4035 //
4036 // Ops[0] = Result Type ID
4037 // Ops[1] = Operand 0
4038 // Ops[2] = Operand 1
4039 //
SJWf93f5f32020-05-05 07:27:56 -05004040 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004041
SJWf93f5f32020-05-05 07:27:56 -05004042 Ops << MkId(getSPIRVType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004043
SJWf93f5f32020-05-05 07:27:56 -05004044 uint32_t Op0ID = getSPIRVValue(I.getOperand(0));
David Neto257c3892018-04-11 13:19:45 -04004045 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004046
4047 uint32_t Op1ID = 0;
4048 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4049 // Handle constant index.
4050 uint64_t Idx = CI->getZExtValue();
4051 Value *ShiftAmount =
4052 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05004053 Op1ID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04004054 } else {
4055 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004056 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004057
SJWf93f5f32020-05-05 07:27:56 -05004058 TmpOps << MkId(getSPIRVType(Type::getInt32Ty(Context)))
4059 << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004060
4061 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJWf93f5f32020-05-05 07:27:56 -05004062 TmpOps << MkId(getSPIRVValue(Cst8));
David Neto22f144c2017-06-12 14:26:21 -04004063
SJWf93f5f32020-05-05 07:27:56 -05004064 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004065 }
David Neto257c3892018-04-11 13:19:45 -04004066 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004067
SJWf93f5f32020-05-05 07:27:56 -05004068 uint32_t ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004069
4070 //
4071 // Generate OpBitwiseAnd
4072 //
4073 // Ops[0] = Result Type ID
4074 // Ops[1] = Operand 0
4075 // Ops[2] = Operand 1
4076 //
4077 Ops.clear();
4078
SJWf93f5f32020-05-05 07:27:56 -05004079 Ops << MkId(getSPIRVType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004080
4081 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJWf93f5f32020-05-05 07:27:56 -05004082 Ops << MkId(getSPIRVValue(CstFF));
David Neto22f144c2017-06-12 14:26:21 -04004083
SJWf93f5f32020-05-05 07:27:56 -05004084 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004085 break;
4086 }
4087
4088 // Ops[0] = Result Type ID
4089 // Ops[1] = Composite ID
4090 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004091 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004092
SJWf93f5f32020-05-05 07:27:56 -05004093 Ops << MkId(getSPIRVType(I.getType()))
4094 << MkId(getSPIRVValue(I.getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04004095
4096 spv::Op Opcode = spv::OpCompositeExtract;
4097 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004098 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004099 } else {
SJWf93f5f32020-05-05 07:27:56 -05004100 Ops << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004101 Opcode = spv::OpVectorExtractDynamic;
4102 }
4103
SJWf93f5f32020-05-05 07:27:56 -05004104 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004105 break;
4106 }
4107 case Instruction::InsertElement: {
4108 // Handle <4 x i8> type manually.
4109 Type *CompositeTy = I.getOperand(0)->getType();
4110 if (is4xi8vec(CompositeTy)) {
4111 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJWf93f5f32020-05-05 07:27:56 -05004112 uint32_t CstFFID = getSPIRVValue(CstFF);
David Neto22f144c2017-06-12 14:26:21 -04004113
4114 uint32_t ShiftAmountID = 0;
4115 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4116 // Handle constant index.
4117 uint64_t Idx = CI->getZExtValue();
4118 Value *ShiftAmount =
4119 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05004120 ShiftAmountID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04004121 } else {
4122 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004123 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004124
SJWf93f5f32020-05-05 07:27:56 -05004125 TmpOps << MkId(getSPIRVType(Type::getInt32Ty(Context)))
4126 << MkId(getSPIRVValue(I.getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04004127
4128 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJWf93f5f32020-05-05 07:27:56 -05004129 TmpOps << MkId(getSPIRVValue(Cst8));
David Neto22f144c2017-06-12 14:26:21 -04004130
SJWf93f5f32020-05-05 07:27:56 -05004131 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004132 }
4133
4134 //
4135 // Generate mask operations.
4136 //
4137
4138 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004139 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004140
SJWf93f5f32020-05-05 07:27:56 -05004141 const uint32_t ResTyID = getSPIRVType(CompositeTy);
David Neto257c3892018-04-11 13:19:45 -04004142 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004143
SJWf93f5f32020-05-05 07:27:56 -05004144 uint32_t MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004145
4146 // Inverse mask.
4147 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004148 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004149
SJWf93f5f32020-05-05 07:27:56 -05004150 uint32_t InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004151
4152 // Apply mask.
4153 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004154 Ops << MkId(ResTyID) << MkId(getSPIRVValue(I.getOperand(0)))
4155 << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004156
SJWf93f5f32020-05-05 07:27:56 -05004157 uint32_t OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004158
4159 // Create correct value according to index of insertelement.
4160 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004161 Ops << MkId(ResTyID) << MkId(getSPIRVValue(I.getOperand(1)))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004162 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004163
SJWf93f5f32020-05-05 07:27:56 -05004164 uint32_t InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004165
4166 // Insert value to original value.
4167 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004168 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004169
SJWf93f5f32020-05-05 07:27:56 -05004170 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004171 break;
4172 }
4173
SJWf93f5f32020-05-05 07:27:56 -05004174 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004175
James Priced26efea2018-06-09 23:28:32 +01004176 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05004177 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004178
4179 spv::Op Opcode = spv::OpCompositeInsert;
4180 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004181 const auto value = CI->getZExtValue();
4182 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004183 // Ops[1] = Object ID
4184 // Ops[2] = Composite ID
4185 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004186 Ops << MkId(getSPIRVValue(I.getOperand(1)))
4187 << MkId(getSPIRVValue(I.getOperand(0)))
James Priced26efea2018-06-09 23:28:32 +01004188 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004189 } else {
James Priced26efea2018-06-09 23:28:32 +01004190 // Ops[1] = Composite ID
4191 // Ops[2] = Object ID
4192 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004193 Ops << MkId(getSPIRVValue(I.getOperand(0)))
4194 << MkId(getSPIRVValue(I.getOperand(1)))
4195 << MkId(getSPIRVValue(I.getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04004196 Opcode = spv::OpVectorInsertDynamic;
4197 }
4198
SJWf93f5f32020-05-05 07:27:56 -05004199 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004200 break;
4201 }
4202 case Instruction::ShuffleVector: {
4203 // Ops[0] = Result Type ID
4204 // Ops[1] = Vector 1 ID
4205 // Ops[2] = Vector 2 ID
4206 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004207 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004208
SJWf93f5f32020-05-05 07:27:56 -05004209 Ops << MkId(getSPIRVType(I.getType()))
4210 << MkId(getSPIRVValue(I.getOperand(0)))
4211 << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004212
alan-bakerc9666712020-04-01 16:31:21 -04004213 auto shuffle = cast<ShuffleVectorInst>(&I);
4214 SmallVector<int, 4> mask;
4215 shuffle->getShuffleMask(mask);
4216 for (auto i : mask) {
4217 if (i == UndefMaskElem) {
4218 if (clspv::Option::HackUndef())
4219 // Use 0 instead of undef.
David Neto257c3892018-04-11 13:19:45 -04004220 Ops << MkNum(0);
alan-bakerc9666712020-04-01 16:31:21 -04004221 else
4222 // Undef for shuffle in SPIR-V.
4223 Ops << MkNum(0xffffffff);
David Neto22f144c2017-06-12 14:26:21 -04004224 } else {
alan-bakerc9666712020-04-01 16:31:21 -04004225 Ops << MkNum(i);
David Neto22f144c2017-06-12 14:26:21 -04004226 }
4227 }
4228
SJWf93f5f32020-05-05 07:27:56 -05004229 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004230 break;
4231 }
4232 case Instruction::ICmp:
4233 case Instruction::FCmp: {
4234 CmpInst *CmpI = cast<CmpInst>(&I);
4235
David Netod4ca2e62017-07-06 18:47:35 -04004236 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004237 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004238 if (isa<PointerType>(ArgTy)) {
4239 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004240 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004241 errs()
4242 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4243 << "in function " << name << "\n";
4244 llvm_unreachable("Pointer equality check is invalid");
4245 break;
4246 }
4247
David Neto257c3892018-04-11 13:19:45 -04004248 // Ops[0] = Result Type ID
4249 // Ops[1] = Operand 1 ID
4250 // Ops[2] = Operand 2 ID
SJWf93f5f32020-05-05 07:27:56 -05004251 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004252
SJWf93f5f32020-05-05 07:27:56 -05004253 Ops << MkId(getSPIRVType(CmpI->getType()))
4254 << MkId(getSPIRVValue(CmpI->getOperand(0)))
4255 << MkId(getSPIRVValue(CmpI->getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004256
4257 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
SJWf93f5f32020-05-05 07:27:56 -05004258 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004259 break;
4260 }
4261 case Instruction::Br: {
4262 // Branch instrucion is deferred because it needs label's ID. Record slot's
4263 // location on SPIRVInstructionList.
4264 DeferredInsts.push_back(
4265 std::make_tuple(&I, --SPIRVInstList.end(), 0 /* No id */));
4266 break;
4267 }
4268 case Instruction::Switch: {
4269 I.print(errs());
4270 llvm_unreachable("Unsupported instruction???");
4271 break;
4272 }
4273 case Instruction::IndirectBr: {
4274 I.print(errs());
4275 llvm_unreachable("Unsupported instruction???");
4276 break;
4277 }
4278 case Instruction::PHI: {
4279 // Branch instrucion is deferred because it needs label's ID. Record slot's
4280 // location on SPIRVInstructionList.
SJWf93f5f32020-05-05 07:27:56 -05004281 RID = incrNextID();
4282 DeferredInsts.push_back(std::make_tuple(&I, --SPIRVInstList.end(), RID));
David Neto22f144c2017-06-12 14:26:21 -04004283 break;
4284 }
4285 case Instruction::Alloca: {
4286 //
4287 // Generate OpVariable.
4288 //
4289 // Ops[0] : Result Type ID
4290 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004291 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004292
SJWf93f5f32020-05-05 07:27:56 -05004293 Ops << MkId(getSPIRVType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004294
SJWf93f5f32020-05-05 07:27:56 -05004295 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004296 break;
4297 }
4298 case Instruction::Load: {
4299 LoadInst *LD = cast<LoadInst>(&I);
4300 //
4301 // Generate OpLoad.
4302 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004303
alan-baker5b86ed72019-02-15 08:26:50 -05004304 if (LD->getType()->isPointerTy()) {
4305 // Loading a pointer requires variable pointers.
4306 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4307 }
David Neto22f144c2017-06-12 14:26:21 -04004308
SJWf93f5f32020-05-05 07:27:56 -05004309 uint32_t ResTyID = getSPIRVType(LD->getType());
4310 uint32_t PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004311
4312 // This is a hack to work around what looks like a driver bug.
4313 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004314 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4315 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004316 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004317 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004318 // Generate a bitwise-and of the original value with itself.
4319 // We should have been able to get away with just an OpCopyObject,
4320 // but we need something more complex to get past certain driver bugs.
4321 // This is ridiculous, but necessary.
4322 // TODO(dneto): Revisit this once drivers fix their bugs.
4323
SJWf93f5f32020-05-05 07:27:56 -05004324 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004325 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4326 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004327
SJWf93f5f32020-05-05 07:27:56 -05004328 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004329 break;
4330 }
4331
4332 // This is the normal path. Generate a load.
4333
David Neto22f144c2017-06-12 14:26:21 -04004334 // Ops[0] = Result Type ID
4335 // Ops[1] = Pointer ID
4336 // Ops[2] ... Ops[n] = Optional Memory Access
4337 //
4338 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004339
SJWf93f5f32020-05-05 07:27:56 -05004340 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004341 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004342
SJWf93f5f32020-05-05 07:27:56 -05004343 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004344 break;
4345 }
4346 case Instruction::Store: {
4347 StoreInst *ST = cast<StoreInst>(&I);
4348 //
4349 // Generate OpStore.
4350 //
4351
alan-baker5b86ed72019-02-15 08:26:50 -05004352 if (ST->getValueOperand()->getType()->isPointerTy()) {
4353 // Storing a pointer requires variable pointers.
4354 setVariablePointersCapabilities(
4355 ST->getValueOperand()->getType()->getPointerAddressSpace());
4356 }
4357
David Neto22f144c2017-06-12 14:26:21 -04004358 // Ops[0] = Pointer ID
4359 // Ops[1] = Object ID
4360 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4361 //
4362 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05004363 SPIRVOperandVec Ops;
4364 Ops << MkId(getSPIRVValue(ST->getPointerOperand()))
4365 << MkId(getSPIRVValue(ST->getValueOperand()));
David Neto22f144c2017-06-12 14:26:21 -04004366
SJWf93f5f32020-05-05 07:27:56 -05004367 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004368 break;
4369 }
4370 case Instruction::AtomicCmpXchg: {
4371 I.print(errs());
4372 llvm_unreachable("Unsupported instruction???");
4373 break;
4374 }
4375 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004376 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4377
4378 spv::Op opcode;
4379
4380 switch (AtomicRMW->getOperation()) {
4381 default:
4382 I.print(errs());
4383 llvm_unreachable("Unsupported instruction???");
4384 case llvm::AtomicRMWInst::Add:
4385 opcode = spv::OpAtomicIAdd;
4386 break;
4387 case llvm::AtomicRMWInst::Sub:
4388 opcode = spv::OpAtomicISub;
4389 break;
4390 case llvm::AtomicRMWInst::Xchg:
4391 opcode = spv::OpAtomicExchange;
4392 break;
4393 case llvm::AtomicRMWInst::Min:
4394 opcode = spv::OpAtomicSMin;
4395 break;
4396 case llvm::AtomicRMWInst::Max:
4397 opcode = spv::OpAtomicSMax;
4398 break;
4399 case llvm::AtomicRMWInst::UMin:
4400 opcode = spv::OpAtomicUMin;
4401 break;
4402 case llvm::AtomicRMWInst::UMax:
4403 opcode = spv::OpAtomicUMax;
4404 break;
4405 case llvm::AtomicRMWInst::And:
4406 opcode = spv::OpAtomicAnd;
4407 break;
4408 case llvm::AtomicRMWInst::Or:
4409 opcode = spv::OpAtomicOr;
4410 break;
4411 case llvm::AtomicRMWInst::Xor:
4412 opcode = spv::OpAtomicXor;
4413 break;
4414 }
4415
4416 //
4417 // Generate OpAtomic*.
4418 //
SJWf93f5f32020-05-05 07:27:56 -05004419 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004420
SJWf93f5f32020-05-05 07:27:56 -05004421 Ops << MkId(getSPIRVType(I.getType()))
4422 << MkId(getSPIRVValue(AtomicRMW->getPointerOperand()));
Neil Henning39672102017-09-29 14:33:13 +01004423
4424 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004425 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
SJWf93f5f32020-05-05 07:27:56 -05004426 Ops << MkId(getSPIRVValue(ConstantScopeDevice));
Neil Henning39672102017-09-29 14:33:13 +01004427
4428 const auto ConstantMemorySemantics = ConstantInt::get(
4429 IntTy, spv::MemorySemanticsUniformMemoryMask |
4430 spv::MemorySemanticsSequentiallyConsistentMask);
SJWf93f5f32020-05-05 07:27:56 -05004431 Ops << MkId(getSPIRVValue(ConstantMemorySemantics));
Neil Henning39672102017-09-29 14:33:13 +01004432
SJWf93f5f32020-05-05 07:27:56 -05004433 Ops << MkId(getSPIRVValue(AtomicRMW->getValOperand()));
Neil Henning39672102017-09-29 14:33:13 +01004434
SJWf93f5f32020-05-05 07:27:56 -05004435 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004436 break;
4437 }
4438 case Instruction::Fence: {
4439 I.print(errs());
4440 llvm_unreachable("Unsupported instruction???");
4441 break;
4442 }
4443 case Instruction::Call: {
4444 CallInst *Call = dyn_cast<CallInst>(&I);
4445 Function *Callee = Call->getCalledFunction();
4446
Alan Baker202c8c72018-08-13 13:47:44 -04004447 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004448 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4449 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05004450 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004451
SJWf93f5f32020-05-05 07:27:56 -05004452 Ops << MkId(getSPIRVType(Call->getType()->getPointerElementType()))
David Neto862b7d82018-06-14 18:48:37 -04004453 << MkId(ResourceVarDeferredLoadCalls[Call]);
4454
SJWf93f5f32020-05-05 07:27:56 -05004455 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto862b7d82018-06-14 18:48:37 -04004456 break;
4457
4458 } else {
4459 // This maps to an OpVariable we've already generated.
4460 // No code is generated for the call.
4461 }
4462 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004463 } else if (Callee->getName().startswith(
4464 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004465 // Don't codegen an instruction here, but instead map this call directly
4466 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004467 int spec_id = static_cast<int>(
4468 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004469 const auto &info = LocalSpecIdInfoMap[spec_id];
SJWf93f5f32020-05-05 07:27:56 -05004470 RID = info.variable_id;
Alan Baker202c8c72018-08-13 13:47:44 -04004471 break;
David Neto862b7d82018-06-14 18:48:37 -04004472 }
4473
4474 // Sampler initializers become a load of the corresponding sampler.
4475
Kévin Petitdf71de32019-04-09 14:09:50 +01004476 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004477 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004478 const auto third_param = static_cast<unsigned>(
4479 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4480 auto sampler_value = third_param;
4481 if (clspv::Option::UseSamplerMap()) {
4482 sampler_value = getSamplerMap()[third_param].first;
4483 }
David Neto862b7d82018-06-14 18:48:37 -04004484
4485 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05004486 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004487
SJWf93f5f32020-05-05 07:27:56 -05004488 Ops << MkId(getSPIRVType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004489 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004490
SJWf93f5f32020-05-05 07:27:56 -05004491 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004492 break;
4493 }
4494
Kévin Petit349c9502019-03-28 17:24:14 +00004495 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004496 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4497 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4498 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004499
Kévin Petit617a76d2019-04-04 13:54:16 +01004500 // If the switch above didn't have an entry maybe the intrinsic
4501 // is using the name mangling logic.
4502 bool usesMangler = false;
4503 if (opcode == spv::OpNop) {
4504 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4505 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4506 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4507 usesMangler = true;
4508 }
4509 }
4510
Kévin Petit349c9502019-03-28 17:24:14 +00004511 if (opcode != spv::OpNop) {
4512
SJWf93f5f32020-05-05 07:27:56 -05004513 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004514
Kévin Petit349c9502019-03-28 17:24:14 +00004515 if (!I.getType()->isVoidTy()) {
SJWf93f5f32020-05-05 07:27:56 -05004516 Ops << MkId(getSPIRVType(I.getType()));
Kévin Petit349c9502019-03-28 17:24:14 +00004517 }
David Neto22f144c2017-06-12 14:26:21 -04004518
Kévin Petit617a76d2019-04-04 13:54:16 +01004519 unsigned firstOperand = usesMangler ? 1 : 0;
4520 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
SJWf93f5f32020-05-05 07:27:56 -05004521 Ops << MkId(getSPIRVValue(Call->getArgOperand(i)));
David Neto22f144c2017-06-12 14:26:21 -04004522 }
4523
SJWf93f5f32020-05-05 07:27:56 -05004524 RID = addSPIRVInst(opcode, Ops);
Kévin Petit8a560882019-03-21 15:24:34 +00004525 break;
4526 }
4527
David Neto22f144c2017-06-12 14:26:21 -04004528 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4529 if (Callee->getName().startswith("spirv.copy_memory")) {
4530 //
4531 // Generate OpCopyMemory.
4532 //
4533
4534 // Ops[0] = Dst ID
4535 // Ops[1] = Src ID
4536 // Ops[2] = Memory Access
4537 // Ops[3] = Alignment
4538
4539 auto IsVolatile =
4540 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4541
4542 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4543 : spv::MemoryAccessMaskNone;
4544
4545 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4546
4547 auto Alignment =
4548 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4549
SJWf93f5f32020-05-05 07:27:56 -05004550 SPIRVOperandVec Ops;
4551 Ops << MkId(getSPIRVValue(Call->getArgOperand(0)))
4552 << MkId(getSPIRVValue(Call->getArgOperand(1))) << MkNum(MemoryAccess)
David Neto257c3892018-04-11 13:19:45 -04004553 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004554
SJWf93f5f32020-05-05 07:27:56 -05004555 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004556 break;
4557 }
4558
SJW2c317da2020-03-23 07:39:13 -05004559 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4560 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004561 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004562 //
4563 // Generate OpSampledImage.
4564 //
4565 // Ops[0] = Result Type ID
4566 // Ops[1] = Image ID
4567 // Ops[2] = Sampler ID
4568 //
SJWf93f5f32020-05-05 07:27:56 -05004569 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004570
4571 Value *Image = Call->getArgOperand(0);
4572 Value *Sampler = Call->getArgOperand(1);
4573 Value *Coordinate = Call->getArgOperand(2);
4574
4575 TypeMapType &OpImageTypeMap = getImageTypeMap();
4576 Type *ImageTy = Image->getType()->getPointerElementType();
4577 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
SJWf93f5f32020-05-05 07:27:56 -05004578 uint32_t ImageID = getSPIRVValue(Image);
4579 uint32_t SamplerID = getSPIRVValue(Sampler);
David Neto257c3892018-04-11 13:19:45 -04004580
4581 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004582
SJWf93f5f32020-05-05 07:27:56 -05004583 uint32_t SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004584
4585 //
4586 // Generate OpImageSampleExplicitLod.
4587 //
4588 // Ops[0] = Result Type ID
4589 // Ops[1] = Sampled Image ID
4590 // Ops[2] = Coordinate ID
4591 // Ops[3] = Image Operands Type ID
4592 // Ops[4] ... Ops[n] = Operands ID
4593 //
4594 Ops.clear();
4595
alan-bakerf67468c2019-11-25 15:51:49 -05004596 const bool is_int_image = IsIntImageType(Image->getType());
4597 uint32_t result_type = 0;
4598 if (is_int_image) {
4599 result_type = v4int32ID;
4600 } else {
SJWf93f5f32020-05-05 07:27:56 -05004601 result_type = getSPIRVType(Call->getType());
alan-bakerf67468c2019-11-25 15:51:49 -05004602 }
4603
SJWf93f5f32020-05-05 07:27:56 -05004604 Ops << MkId(result_type) << MkId(SampledImageID)
4605 << MkId(getSPIRVValue(Coordinate))
alan-bakerf67468c2019-11-25 15:51:49 -05004606 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004607
4608 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
SJWf93f5f32020-05-05 07:27:56 -05004609 Ops << MkId(getSPIRVValue(CstFP0));
David Neto22f144c2017-06-12 14:26:21 -04004610
SJWf93f5f32020-05-05 07:27:56 -05004611 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004612
4613 if (is_int_image) {
4614 // Generate the bitcast.
4615 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004616 Ops << MkId(getSPIRVType(Call->getType())) << MkId(RID);
4617 RID = addSPIRVInst(spv::OpBitcast, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004618 }
David Neto22f144c2017-06-12 14:26:21 -04004619 break;
4620 }
4621
alan-baker75090e42020-02-20 11:21:04 -05004622 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05004623 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05004624 Value *Image = Call->getArgOperand(0);
4625 Value *Coordinate = Call->getArgOperand(1);
4626
4627 //
4628 // Generate OpImageFetch
4629 //
4630 // Ops[0] = Result Type ID
4631 // Ops[1] = Image ID
4632 // Ops[2] = Coordinate ID
4633 // Ops[3] = Lod
4634 // Ops[4] = 0
4635 //
SJWf93f5f32020-05-05 07:27:56 -05004636 SPIRVOperandVec Ops;
alan-baker75090e42020-02-20 11:21:04 -05004637
4638 const bool is_int_image = IsIntImageType(Image->getType());
4639 uint32_t result_type = 0;
4640 if (is_int_image) {
4641 result_type = v4int32ID;
4642 } else {
SJWf93f5f32020-05-05 07:27:56 -05004643 result_type = getSPIRVType(Call->getType());
alan-baker75090e42020-02-20 11:21:04 -05004644 }
4645
SJWf93f5f32020-05-05 07:27:56 -05004646 Ops << MkId(result_type) << MkId(getSPIRVValue(Image))
4647 << MkId(getSPIRVValue(Coordinate))
alan-baker75090e42020-02-20 11:21:04 -05004648 << MkNum(spv::ImageOperandsLodMask);
4649
4650 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJWf93f5f32020-05-05 07:27:56 -05004651 Ops << MkId(getSPIRVValue(CstInt0));
alan-baker75090e42020-02-20 11:21:04 -05004652
SJWf93f5f32020-05-05 07:27:56 -05004653 RID = addSPIRVInst(spv::OpImageFetch, Ops);
alan-baker75090e42020-02-20 11:21:04 -05004654
4655 if (is_int_image) {
4656 // Generate the bitcast.
4657 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004658 Ops << MkId(getSPIRVType(Call->getType())) << MkId(RID);
4659 RID = addSPIRVInst(spv::OpBitcast, Ops);
alan-baker75090e42020-02-20 11:21:04 -05004660 }
4661 break;
4662 }
4663
alan-bakerf67468c2019-11-25 15:51:49 -05004664 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05004665 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004666 //
4667 // Generate OpImageWrite.
4668 //
4669 // Ops[0] = Image ID
4670 // Ops[1] = Coordinate ID
4671 // Ops[2] = Texel ID
4672 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4673 // Ops[4] ... Ops[n] = (Optional) Operands ID
4674 //
SJWf93f5f32020-05-05 07:27:56 -05004675 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004676
4677 Value *Image = Call->getArgOperand(0);
4678 Value *Coordinate = Call->getArgOperand(1);
4679 Value *Texel = Call->getArgOperand(2);
4680
SJWf93f5f32020-05-05 07:27:56 -05004681 uint32_t ImageID = getSPIRVValue(Image);
4682 uint32_t CoordinateID = getSPIRVValue(Coordinate);
4683 uint32_t TexelID = getSPIRVValue(Texel);
alan-bakerf67468c2019-11-25 15:51:49 -05004684
4685 const bool is_int_image = IsIntImageType(Image->getType());
4686 if (is_int_image) {
4687 // Generate a bitcast to v4int and use it as the texel value.
alan-bakerf67468c2019-11-25 15:51:49 -05004688 Ops << MkId(v4int32ID) << MkId(TexelID);
SJWf93f5f32020-05-05 07:27:56 -05004689 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004690 Ops.clear();
alan-bakerf67468c2019-11-25 15:51:49 -05004691 }
David Neto257c3892018-04-11 13:19:45 -04004692 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004693
SJWf93f5f32020-05-05 07:27:56 -05004694 RID = addSPIRVInst(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004695 break;
4696 }
4697
alan-bakerce179f12019-12-06 19:02:22 -05004698 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05004699 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004700 //
alan-bakerce179f12019-12-06 19:02:22 -05004701 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004702 //
4703 // Ops[0] = Image ID
4704 //
alan-bakerce179f12019-12-06 19:02:22 -05004705 // Result type has components equal to the dimensionality of the image,
4706 // plus 1 if the image is arrayed.
4707 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004708 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
SJWf93f5f32020-05-05 07:27:56 -05004709 SPIRVOperandVec Ops;
David Neto5c22a252018-03-15 16:07:41 -04004710
4711 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004712 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4713 uint32_t SizesTypeID = 0;
4714
David Neto5c22a252018-03-15 16:07:41 -04004715 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004716 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05004717 const uint32_t components =
4718 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05004719 if (components == 1) {
SJWf93f5f32020-05-05 07:27:56 -05004720 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
alan-bakerce179f12019-12-06 19:02:22 -05004721 } else {
SJWf93f5f32020-05-05 07:27:56 -05004722 SizesTypeID = getSPIRVType(
4723 VectorType::get(Type::getInt32Ty(Context), components));
alan-bakerce179f12019-12-06 19:02:22 -05004724 }
SJWf93f5f32020-05-05 07:27:56 -05004725 uint32_t ImageID = getSPIRVValue(Image);
David Neto257c3892018-04-11 13:19:45 -04004726 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004727 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05004728 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05004729 query_opcode = spv::OpImageQuerySizeLod;
4730 // Need explicit 0 for Lod operand.
4731 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJWf93f5f32020-05-05 07:27:56 -05004732 Ops << MkId(getSPIRVValue(CstInt0));
alan-bakerce179f12019-12-06 19:02:22 -05004733 }
David Neto5c22a252018-03-15 16:07:41 -04004734
SJWf93f5f32020-05-05 07:27:56 -05004735 RID = addSPIRVInst(query_opcode, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004736
alan-bakerce179f12019-12-06 19:02:22 -05004737 // May require an extra instruction to create the appropriate result of
4738 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05004739 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05004740 if (dim == 3) {
4741 // get_image_dim returns an int4 for 3D images.
4742 //
David Neto5c22a252018-03-15 16:07:41 -04004743
alan-bakerce179f12019-12-06 19:02:22 -05004744 // Implement:
4745 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4746 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004747 Ops << MkId(getSPIRVType(
4748 VectorType::get(Type::getInt32Ty(Context), 4)))
4749 << MkId(RID);
David Neto5c22a252018-03-15 16:07:41 -04004750
alan-bakerce179f12019-12-06 19:02:22 -05004751 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJWf93f5f32020-05-05 07:27:56 -05004752 Ops << MkId(getSPIRVValue(CstInt0));
David Neto5c22a252018-03-15 16:07:41 -04004753
SJWf93f5f32020-05-05 07:27:56 -05004754 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004755 } else if (dim != components) {
4756 // get_image_dim return an int2 regardless of the arrayedness of the
4757 // image. If the image is arrayed an element must be dropped from the
4758 // query result.
4759 //
alan-bakerce179f12019-12-06 19:02:22 -05004760
4761 // Implement:
4762 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4763 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004764 Ops << MkId(getSPIRVType(
4765 VectorType::get(Type::getInt32Ty(Context), 2)))
4766 << MkId(RID) << MkId(RID) << MkNum(0) << MkNum(1);
alan-bakerce179f12019-12-06 19:02:22 -05004767
SJWf93f5f32020-05-05 07:27:56 -05004768 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004769 }
4770 } else if (components > 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004771 // Implement:
4772 // %result = OpCompositeExtract %uint %sizes <component number>
4773 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004774 Ops << MkId(TypeMap[I.getType()]) << MkId(RID);
alan-bakerce179f12019-12-06 19:02:22 -05004775
4776 uint32_t component = 0;
4777 if (IsGetImageHeight(Callee))
4778 component = 1;
4779 else if (IsGetImageDepth(Callee))
4780 component = 2;
4781 Ops << MkNum(component);
4782
SJWf93f5f32020-05-05 07:27:56 -05004783 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004784 }
David Neto5c22a252018-03-15 16:07:41 -04004785 break;
4786 }
4787
David Neto22f144c2017-06-12 14:26:21 -04004788 // Call instrucion is deferred because it needs function's ID. Record
4789 // slot's location on SPIRVInstructionList.
SJWf93f5f32020-05-05 07:27:56 -05004790 RID = incrNextID();
4791 DeferredInsts.push_back(std::make_tuple(&I, --SPIRVInstList.end(), RID));
David Neto22f144c2017-06-12 14:26:21 -04004792
David Neto3fbb4072017-10-16 11:28:14 -04004793 // Check whether the implementation of this call uses an extended
4794 // instruction plus one more value-producing instruction. If so, then
4795 // reserve the id for the extra value-producing slot.
4796 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4797 if (EInst != kGlslExtInstBad) {
4798 // Reserve a spot for the extra value.
David Neto4d02a532017-09-17 12:57:44 -04004799 // Increase nextID.
SJWf93f5f32020-05-05 07:27:56 -05004800 RID = incrNextID();
David Neto22f144c2017-06-12 14:26:21 -04004801 }
4802 break;
4803 }
4804 case Instruction::Ret: {
4805 unsigned NumOps = I.getNumOperands();
4806 if (NumOps == 0) {
4807 //
4808 // Generate OpReturn.
4809 //
SJWf93f5f32020-05-05 07:27:56 -05004810 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004811 } else {
4812 //
4813 // Generate OpReturnValue.
4814 //
4815
4816 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004817 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004818
SJWf93f5f32020-05-05 07:27:56 -05004819 Ops << MkId(getSPIRVValue(I.getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04004820
SJWf93f5f32020-05-05 07:27:56 -05004821 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004822 break;
4823 }
4824 break;
4825 }
4826 }
SJWf93f5f32020-05-05 07:27:56 -05004827
4828 // Register Instruction to ValueMap.
4829 if (0 != RID) {
4830 VMap[&I] = RID;
4831 }
David Neto22f144c2017-06-12 14:26:21 -04004832}
4833
4834void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004835
4836 //
4837 // Generate OpFunctionEnd
4838 //
SJWf93f5f32020-05-05 07:27:56 -05004839 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004840}
4841
4842bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004843 // Don't specialize <4 x i8> if i8 is generally supported.
4844 if (clspv::Option::Int8Support())
4845 return false;
4846
David Neto22f144c2017-06-12 14:26:21 -04004847 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004848 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4849 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
4850 VecTy->getNumElements() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004851 return true;
4852 }
4853 }
4854
4855 return false;
4856}
4857
4858void SPIRVProducerPass::HandleDeferredInstruction() {
SJW69939d52020-04-16 07:29:07 -05004859 SPIRVInstructionList &SPIRVInstList = getSPIRVInstList(kFunctions);
David Neto22f144c2017-06-12 14:26:21 -04004860 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4861
4862 for (auto DeferredInst = DeferredInsts.rbegin();
4863 DeferredInst != DeferredInsts.rend(); ++DeferredInst) {
4864 Value *Inst = std::get<0>(*DeferredInst);
4865 SPIRVInstructionList::iterator InsertPoint = ++std::get<1>(*DeferredInst);
4866 if (InsertPoint != SPIRVInstList.end()) {
4867 while ((*InsertPoint)->getOpcode() == spv::OpPhi) {
4868 ++InsertPoint;
4869 }
4870 }
4871
4872 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004873 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004874 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004875 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004876 //
4877 // Generate OpLoopMerge.
4878 //
4879 // Ops[0] = Merge Block ID
4880 // Ops[1] = Continue Target ID
4881 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004882 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004883
alan-baker06cad652019-12-03 17:56:47 -05004884 auto MergeBB = MergeBlocks[BrBB];
4885 auto ContinueBB = ContinueBlocks[BrBB];
SJWf93f5f32020-05-05 07:27:56 -05004886 uint32_t MergeBBID = getSPIRVValue(MergeBB);
4887 uint32_t ContinueBBID = getSPIRVValue(ContinueBB);
David Neto257c3892018-04-11 13:19:45 -04004888 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004889 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004890
David Neto87846742018-04-11 17:36:22 -04004891 auto *MergeInst = new SPIRVInstruction(spv::OpLoopMerge, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004892 SPIRVInstList.insert(InsertPoint, MergeInst);
alan-baker06cad652019-12-03 17:56:47 -05004893 } else if (MergeBlocks.count(BrBB)) {
4894 //
4895 // Generate OpSelectionMerge.
4896 //
4897 // Ops[0] = Merge Block ID
4898 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004899 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004900
alan-baker06cad652019-12-03 17:56:47 -05004901 auto MergeBB = MergeBlocks[BrBB];
SJWf93f5f32020-05-05 07:27:56 -05004902 uint32_t MergeBBID = getSPIRVValue(MergeBB);
alan-baker06cad652019-12-03 17:56:47 -05004903 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004904
alan-baker06cad652019-12-03 17:56:47 -05004905 auto *MergeInst = new SPIRVInstruction(spv::OpSelectionMerge, Ops);
4906 SPIRVInstList.insert(InsertPoint, MergeInst);
David Neto22f144c2017-06-12 14:26:21 -04004907 }
4908
4909 if (Br->isConditional()) {
4910 //
4911 // Generate OpBranchConditional.
4912 //
4913 // Ops[0] = Condition ID
4914 // Ops[1] = True Label ID
4915 // Ops[2] = False Label ID
4916 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004917 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004918
SJWf93f5f32020-05-05 07:27:56 -05004919 uint32_t CondID = getSPIRVValue(Br->getCondition());
4920 uint32_t TrueBBID = getSPIRVValue(Br->getSuccessor(0));
4921 uint32_t FalseBBID = getSPIRVValue(Br->getSuccessor(1));
David Neto257c3892018-04-11 13:19:45 -04004922
4923 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004924
David Neto87846742018-04-11 17:36:22 -04004925 auto *BrInst = new SPIRVInstruction(spv::OpBranchConditional, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004926 SPIRVInstList.insert(InsertPoint, BrInst);
4927 } else {
4928 //
4929 // Generate OpBranch.
4930 //
4931 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004932 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004933
SJWf93f5f32020-05-05 07:27:56 -05004934 uint32_t TargetID = getSPIRVValue(Br->getSuccessor(0));
David Neto257c3892018-04-11 13:19:45 -04004935 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004936
David Neto87846742018-04-11 17:36:22 -04004937 SPIRVInstList.insert(InsertPoint,
4938 new SPIRVInstruction(spv::OpBranch, Ops));
David Neto22f144c2017-06-12 14:26:21 -04004939 }
4940 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004941 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4942 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004943 // OpPhi on pointers requires variable pointers.
4944 setVariablePointersCapabilities(
4945 PHI->getType()->getPointerAddressSpace());
4946 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4947 setVariablePointers(true);
4948 }
4949 }
4950
David Neto22f144c2017-06-12 14:26:21 -04004951 //
4952 // Generate OpPhi.
4953 //
4954 // Ops[0] = Result Type ID
4955 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004956 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004957
SJWf93f5f32020-05-05 07:27:56 -05004958 Ops << MkId(getSPIRVType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004959
David Neto22f144c2017-06-12 14:26:21 -04004960 for (unsigned i = 0; i < PHI->getNumIncomingValues(); i++) {
SJWf93f5f32020-05-05 07:27:56 -05004961 uint32_t VarID = getSPIRVValue(PHI->getIncomingValue(i));
4962 uint32_t ParentID = getSPIRVValue(PHI->getIncomingBlock(i));
David Neto257c3892018-04-11 13:19:45 -04004963 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004964 }
4965
4966 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04004967 InsertPoint,
4968 new SPIRVInstruction(spv::OpPhi, std::get<2>(*DeferredInst), Ops));
David Neto22f144c2017-06-12 14:26:21 -04004969 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4970 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05004971 LLVMContext &Context = Callee->getContext();
4972 auto IntTy = Type::getInt32Ty(Context);
4973 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04004974 auto callee_name = Callee->getName();
4975 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004976
4977 if (EInst) {
4978 uint32_t &ExtInstImportID = getOpExtInstImportID();
4979
4980 //
4981 // Generate OpExtInst.
4982 //
4983
4984 // Ops[0] = Result Type ID
4985 // Ops[1] = Set ID (OpExtInstImport ID)
4986 // Ops[2] = Instruction Number (Literal Number)
4987 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
SJWf93f5f32020-05-05 07:27:56 -05004988 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004989
SJWf93f5f32020-05-05 07:27:56 -05004990 Ops << MkId(getSPIRVType(Call->getType())) << MkId(ExtInstImportID)
David Neto862b7d82018-06-14 18:48:37 -04004991 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004992
David Neto22f144c2017-06-12 14:26:21 -04004993 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
4994 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
SJWf93f5f32020-05-05 07:27:56 -05004995 Ops << MkId(getSPIRVValue(Call->getOperand(i)));
David Neto22f144c2017-06-12 14:26:21 -04004996 }
4997
David Neto87846742018-04-11 17:36:22 -04004998 auto *ExtInst = new SPIRVInstruction(spv::OpExtInst,
4999 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005000 SPIRVInstList.insert(InsertPoint, ExtInst);
5001
David Neto3fbb4072017-10-16 11:28:14 -04005002 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5003 if (IndirectExtInst != kGlslExtInstBad) {
5004 // Generate one more instruction that uses the result of the extended
5005 // instruction. Its result id is one more than the id of the
5006 // extended instruction.
David Neto3fbb4072017-10-16 11:28:14 -04005007 auto generate_extra_inst = [this, &Context, &Call, &DeferredInst,
SJWf93f5f32020-05-05 07:27:56 -05005008 &SPIRVInstList, &InsertPoint](
David Neto3fbb4072017-10-16 11:28:14 -04005009 spv::Op opcode, Constant *constant) {
5010 //
5011 // Generate instruction like:
5012 // result = opcode constant <extinst-result>
5013 //
5014 // Ops[0] = Result Type ID
5015 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5016 // Ops[2] = Operand 1 ;; the result of the extended instruction
SJWf93f5f32020-05-05 07:27:56 -05005017 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04005018
David Neto3fbb4072017-10-16 11:28:14 -04005019 Type *resultTy = Call->getType();
SJWf93f5f32020-05-05 07:27:56 -05005020 Ops << MkId(getSPIRVType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005021
5022 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5023 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005024 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5025 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005026 }
SJWf93f5f32020-05-05 07:27:56 -05005027 Ops << MkId(getSPIRVValue(constant))
5028 << MkId(std::get<2>(*DeferredInst));
David Neto3fbb4072017-10-16 11:28:14 -04005029
5030 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005031 InsertPoint, new SPIRVInstruction(
5032 opcode, std::get<2>(*DeferredInst) + 1, Ops));
David Neto3fbb4072017-10-16 11:28:14 -04005033 };
5034
5035 switch (IndirectExtInst) {
5036 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005037 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005038 break;
5039 case glsl::ExtInstAcos: // Implementing acospi
5040 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005041 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005042 case glsl::ExtInstAtan2: // Implementing atan2pi
5043 generate_extra_inst(
5044 spv::OpFMul,
5045 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5046 break;
5047
5048 default:
5049 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005050 }
David Neto22f144c2017-06-12 14:26:21 -04005051 }
David Neto3fbb4072017-10-16 11:28:14 -04005052
SJW2c317da2020-03-23 07:39:13 -05005053 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005054 //
5055 // Generate OpBitCount
5056 //
5057 // Ops[0] = Result Type ID
5058 // Ops[1] = Base ID
SJWf93f5f32020-05-05 07:27:56 -05005059 SPIRVOperandVec Ops;
5060 Ops << MkId(getSPIRVType(Call->getType()))
5061 << MkId(getSPIRVValue(Call->getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04005062
5063 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005064 InsertPoint, new SPIRVInstruction(spv::OpBitCount,
David Neto22f144c2017-06-12 14:26:21 -04005065 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005066
David Neto862b7d82018-06-14 18:48:37 -04005067 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005068
5069 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05005070 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04005071
5072 // The result type.
SJWf93f5f32020-05-05 07:27:56 -05005073 Ops << MkId(getSPIRVType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005074
5075 for (Use &use : Call->arg_operands()) {
SJWf93f5f32020-05-05 07:27:56 -05005076 Ops << MkId(getSPIRVValue(use.get()));
David Netoab03f432017-11-03 17:00:44 -04005077 }
5078
5079 SPIRVInstList.insert(
David Neto87846742018-04-11 17:36:22 -04005080 InsertPoint, new SPIRVInstruction(spv::OpCompositeConstruct,
5081 std::get<2>(*DeferredInst), Ops));
David Netoab03f432017-11-03 17:00:44 -04005082
Alan Baker202c8c72018-08-13 13:47:44 -04005083 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5084
5085 // We have already mapped the call's result value to an ID.
5086 // Don't generate any code now.
5087
5088 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005089
5090 // We have already mapped the call's result value to an ID.
5091 // Don't generate any code now.
5092
David Neto22f144c2017-06-12 14:26:21 -04005093 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005094 if (Call->getType()->isPointerTy()) {
5095 // Functions returning pointers require variable pointers.
5096 setVariablePointersCapabilities(
5097 Call->getType()->getPointerAddressSpace());
5098 }
5099
David Neto22f144c2017-06-12 14:26:21 -04005100 //
5101 // Generate OpFunctionCall.
5102 //
5103
5104 // Ops[0] = Result Type ID
5105 // Ops[1] = Callee Function ID
5106 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05005107 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04005108
SJWf93f5f32020-05-05 07:27:56 -05005109 Ops << MkId(getSPIRVType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005110
SJWf93f5f32020-05-05 07:27:56 -05005111 uint32_t CalleeID = getSPIRVValue(Callee);
David Neto43568eb2017-10-13 18:25:25 -04005112 if (CalleeID == 0) {
5113 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005114 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005115 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5116 // causes an infinite loop. Instead, go ahead and generate
5117 // the bad function call. A validator will catch the 0-Id.
5118 // llvm_unreachable("Can't translate function call");
5119 }
David Neto22f144c2017-06-12 14:26:21 -04005120
David Neto257c3892018-04-11 13:19:45 -04005121 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005122
David Neto22f144c2017-06-12 14:26:21 -04005123 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
5124 for (unsigned i = 0; i < CalleeFTy->getNumParams(); i++) {
alan-baker5b86ed72019-02-15 08:26:50 -05005125 auto *operand = Call->getOperand(i);
alan-bakerd4d50652019-12-03 17:17:15 -05005126 auto *operand_type = operand->getType();
5127 // Images and samplers can be passed as function parameters without
5128 // variable pointers.
5129 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5130 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005131 auto sc =
5132 GetStorageClass(operand->getType()->getPointerAddressSpace());
5133 if (sc == spv::StorageClassStorageBuffer) {
5134 // Passing SSBO by reference requires variable pointers storage
5135 // buffer.
5136 setVariablePointersStorageBuffer(true);
5137 } else if (sc == spv::StorageClassWorkgroup) {
5138 // Workgroup references require variable pointers if they are not
5139 // memory object declarations.
5140 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5141 // Workgroup accessor represents a variable reference.
5142 if (!operand_call->getCalledFunction()->getName().startswith(
5143 clspv::WorkgroupAccessorFunction()))
5144 setVariablePointers(true);
5145 } else {
5146 // Arguments are function parameters.
5147 if (!isa<Argument>(operand))
5148 setVariablePointers(true);
5149 }
5150 }
5151 }
SJWf93f5f32020-05-05 07:27:56 -05005152 Ops << MkId(getSPIRVValue(operand));
David Neto22f144c2017-06-12 14:26:21 -04005153 }
5154
David Neto87846742018-04-11 17:36:22 -04005155 auto *CallInst = new SPIRVInstruction(spv::OpFunctionCall,
5156 std::get<2>(*DeferredInst), Ops);
David Neto22f144c2017-06-12 14:26:21 -04005157 SPIRVInstList.insert(InsertPoint, CallInst);
5158 }
5159 }
5160 }
5161}
5162
SJW77b87ad2020-04-21 14:37:52 -05005163void SPIRVProducerPass::HandleDeferredDecorations() {
5164 const auto &DL = module->getDataLayout();
Alan Baker202c8c72018-08-13 13:47:44 -04005165 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005166 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005167 }
David Neto1a1a0582017-07-07 12:01:44 -04005168
David Netoc6f3ab22018-04-06 18:02:31 -04005169 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5170 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005171 for (auto *type : getTypesNeedingArrayStride()) {
5172 Type *elemTy = nullptr;
5173 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5174 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005175 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04005176 elemTy = arrayTy->getElementType();
5177 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
5178 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07005179 } else {
5180 errs() << "Unhandled strided type " << *type << "\n";
5181 llvm_unreachable("Unhandled strided type");
5182 }
David Neto1a1a0582017-07-07 12:01:44 -04005183
5184 // Ops[0] = Target ID
5185 // Ops[1] = Decoration (ArrayStride)
5186 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05005187 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04005188
David Neto85082642018-03-24 06:55:20 -07005189 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005190 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005191
SJWf93f5f32020-05-05 07:27:56 -05005192 Ops << MkId(getSPIRVType(type)) << MkNum(spv::DecorationArrayStride)
David Neto257c3892018-04-11 13:19:45 -04005193 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005194
SJWf93f5f32020-05-05 07:27:56 -05005195 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04005196 }
David Neto1a1a0582017-07-07 12:01:44 -04005197}
5198
David Neto22f144c2017-06-12 14:26:21 -04005199glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005200
5201 const auto &fi = Builtins::Lookup(Name);
5202 switch (fi) {
5203 case Builtins::kClamp: {
5204 auto param_type = fi.getParameter(0);
5205 if (param_type.type_id == Type::FloatTyID) {
5206 return glsl::ExtInst::ExtInstFClamp;
5207 }
5208 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5209 : glsl::ExtInst::ExtInstUClamp;
5210 }
5211 case Builtins::kMax: {
5212 auto param_type = fi.getParameter(0);
5213 if (param_type.type_id == Type::FloatTyID) {
5214 return glsl::ExtInst::ExtInstFMax;
5215 }
5216 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5217 : glsl::ExtInst::ExtInstUMax;
5218 }
5219 case Builtins::kMin: {
5220 auto param_type = fi.getParameter(0);
5221 if (param_type.type_id == Type::FloatTyID) {
5222 return glsl::ExtInst::ExtInstFMin;
5223 }
5224 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5225 : glsl::ExtInst::ExtInstUMin;
5226 }
5227 case Builtins::kAbs:
5228 return glsl::ExtInst::ExtInstSAbs;
5229 case Builtins::kFmax:
5230 return glsl::ExtInst::ExtInstFMax;
5231 case Builtins::kFmin:
5232 return glsl::ExtInst::ExtInstFMin;
5233 case Builtins::kDegrees:
5234 return glsl::ExtInst::ExtInstDegrees;
5235 case Builtins::kRadians:
5236 return glsl::ExtInst::ExtInstRadians;
5237 case Builtins::kMix:
5238 return glsl::ExtInst::ExtInstFMix;
5239 case Builtins::kAcos:
5240 case Builtins::kAcospi:
5241 return glsl::ExtInst::ExtInstAcos;
5242 case Builtins::kAcosh:
5243 return glsl::ExtInst::ExtInstAcosh;
5244 case Builtins::kAsin:
5245 case Builtins::kAsinpi:
5246 return glsl::ExtInst::ExtInstAsin;
5247 case Builtins::kAsinh:
5248 return glsl::ExtInst::ExtInstAsinh;
5249 case Builtins::kAtan:
5250 case Builtins::kAtanpi:
5251 return glsl::ExtInst::ExtInstAtan;
5252 case Builtins::kAtanh:
5253 return glsl::ExtInst::ExtInstAtanh;
5254 case Builtins::kAtan2:
5255 case Builtins::kAtan2pi:
5256 return glsl::ExtInst::ExtInstAtan2;
5257 case Builtins::kCeil:
5258 return glsl::ExtInst::ExtInstCeil;
5259 case Builtins::kSin:
5260 case Builtins::kHalfSin:
5261 case Builtins::kNativeSin:
5262 return glsl::ExtInst::ExtInstSin;
5263 case Builtins::kSinh:
5264 return glsl::ExtInst::ExtInstSinh;
5265 case Builtins::kCos:
5266 case Builtins::kHalfCos:
5267 case Builtins::kNativeCos:
5268 return glsl::ExtInst::ExtInstCos;
5269 case Builtins::kCosh:
5270 return glsl::ExtInst::ExtInstCosh;
5271 case Builtins::kTan:
5272 case Builtins::kHalfTan:
5273 case Builtins::kNativeTan:
5274 return glsl::ExtInst::ExtInstTan;
5275 case Builtins::kTanh:
5276 return glsl::ExtInst::ExtInstTanh;
5277 case Builtins::kExp:
5278 case Builtins::kHalfExp:
5279 case Builtins::kNativeExp:
5280 return glsl::ExtInst::ExtInstExp;
5281 case Builtins::kExp2:
5282 case Builtins::kHalfExp2:
5283 case Builtins::kNativeExp2:
5284 return glsl::ExtInst::ExtInstExp2;
5285 case Builtins::kLog:
5286 case Builtins::kHalfLog:
5287 case Builtins::kNativeLog:
5288 return glsl::ExtInst::ExtInstLog;
5289 case Builtins::kLog2:
5290 case Builtins::kHalfLog2:
5291 case Builtins::kNativeLog2:
5292 return glsl::ExtInst::ExtInstLog2;
5293 case Builtins::kFabs:
5294 return glsl::ExtInst::ExtInstFAbs;
5295 case Builtins::kFma:
5296 return glsl::ExtInst::ExtInstFma;
5297 case Builtins::kFloor:
5298 return glsl::ExtInst::ExtInstFloor;
5299 case Builtins::kLdexp:
5300 return glsl::ExtInst::ExtInstLdexp;
5301 case Builtins::kPow:
5302 case Builtins::kPowr:
5303 case Builtins::kHalfPowr:
5304 case Builtins::kNativePowr:
5305 return glsl::ExtInst::ExtInstPow;
5306 case Builtins::kRound:
5307 return glsl::ExtInst::ExtInstRound;
5308 case Builtins::kSqrt:
5309 case Builtins::kHalfSqrt:
5310 case Builtins::kNativeSqrt:
5311 return glsl::ExtInst::ExtInstSqrt;
5312 case Builtins::kRsqrt:
5313 case Builtins::kHalfRsqrt:
5314 case Builtins::kNativeRsqrt:
5315 return glsl::ExtInst::ExtInstInverseSqrt;
5316 case Builtins::kTrunc:
5317 return glsl::ExtInst::ExtInstTrunc;
5318 case Builtins::kFrexp:
5319 return glsl::ExtInst::ExtInstFrexp;
5320 case Builtins::kFract:
5321 return glsl::ExtInst::ExtInstFract;
5322 case Builtins::kSign:
5323 return glsl::ExtInst::ExtInstFSign;
5324 case Builtins::kLength:
5325 case Builtins::kFastLength:
5326 return glsl::ExtInst::ExtInstLength;
5327 case Builtins::kDistance:
5328 case Builtins::kFastDistance:
5329 return glsl::ExtInst::ExtInstDistance;
5330 case Builtins::kStep:
5331 return glsl::ExtInst::ExtInstStep;
5332 case Builtins::kSmoothstep:
5333 return glsl::ExtInst::ExtInstSmoothStep;
5334 case Builtins::kCross:
5335 return glsl::ExtInst::ExtInstCross;
5336 case Builtins::kNormalize:
5337 case Builtins::kFastNormalize:
5338 return glsl::ExtInst::ExtInstNormalize;
5339 default:
5340 break;
5341 }
5342
David Neto22f144c2017-06-12 14:26:21 -04005343 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005344 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5345 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5346 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005347 .Default(kGlslExtInstBad);
5348}
5349
5350glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005351 switch (Builtins::Lookup(Name)) {
5352 case Builtins::kClz:
5353 return glsl::ExtInst::ExtInstFindUMsb;
5354 case Builtins::kAcospi:
5355 return glsl::ExtInst::ExtInstAcos;
5356 case Builtins::kAsinpi:
5357 return glsl::ExtInst::ExtInstAsin;
5358 case Builtins::kAtanpi:
5359 return glsl::ExtInst::ExtInstAtan;
5360 case Builtins::kAtan2pi:
5361 return glsl::ExtInst::ExtInstAtan2;
5362 default:
5363 break;
5364 }
5365 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005366}
5367
alan-bakerb6b09dc2018-11-08 16:59:28 -05005368glsl::ExtInst
5369SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005370 auto direct = getExtInstEnum(Name);
5371 if (direct != kGlslExtInstBad)
5372 return direct;
5373 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005374}
5375
David Neto22f144c2017-06-12 14:26:21 -04005376void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005377 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005378}
5379
5380void SPIRVProducerPass::WriteResultID(SPIRVInstruction *Inst) {
5381 WriteOneWord(Inst->getResultID());
5382}
5383
5384void SPIRVProducerPass::WriteWordCountAndOpcode(SPIRVInstruction *Inst) {
5385 // High 16 bit : Word Count
5386 // Low 16 bit : Opcode
5387 uint32_t Word = Inst->getOpcode();
David Netoee2660d2018-06-28 16:31:29 -04005388 const uint32_t count = Inst->getWordCount();
5389 if (count > 65535) {
5390 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5391 llvm_unreachable("Word count too high");
5392 }
David Neto22f144c2017-06-12 14:26:21 -04005393 Word |= Inst->getWordCount() << 16;
5394 WriteOneWord(Word);
5395}
5396
David Netoef5ba2b2019-12-20 08:35:54 -05005397void SPIRVProducerPass::WriteOperand(const std::unique_ptr<SPIRVOperand> &Op) {
David Neto22f144c2017-06-12 14:26:21 -04005398 SPIRVOperandType OpTy = Op->getType();
5399 switch (OpTy) {
5400 default: {
5401 llvm_unreachable("Unsupported SPIRV Operand Type???");
5402 break;
5403 }
5404 case SPIRVOperandType::NUMBERID: {
5405 WriteOneWord(Op->getNumID());
5406 break;
5407 }
5408 case SPIRVOperandType::LITERAL_STRING: {
5409 std::string Str = Op->getLiteralStr();
5410 const char *Data = Str.c_str();
5411 size_t WordSize = Str.size() / 4;
5412 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5413 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5414 }
5415
5416 uint32_t Remainder = Str.size() % 4;
5417 uint32_t LastWord = 0;
5418 if (Remainder) {
5419 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5420 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5421 }
5422 }
5423
5424 WriteOneWord(LastWord);
5425 break;
5426 }
5427 case SPIRVOperandType::LITERAL_INTEGER:
5428 case SPIRVOperandType::LITERAL_FLOAT: {
5429 auto LiteralNum = Op->getLiteralNum();
5430 // TODO: Handle LiteranNum carefully.
5431 for (auto Word : LiteralNum) {
5432 WriteOneWord(Word);
5433 }
5434 break;
5435 }
5436 }
5437}
5438
5439void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005440 for (int i = 0; i < kSectionCount; ++i) {
5441 WriteSPIRVBinary(SPIRVSections[i]);
5442 }
5443}
5444
5445void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04005446
5447 for (auto Inst : SPIRVInstList) {
David Netoef5ba2b2019-12-20 08:35:54 -05005448 const auto &Ops = Inst->getOperands();
David Neto22f144c2017-06-12 14:26:21 -04005449 spv::Op Opcode = static_cast<spv::Op>(Inst->getOpcode());
5450
5451 switch (Opcode) {
5452 default: {
David Neto5c22a252018-03-15 16:07:41 -04005453 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005454 llvm_unreachable("Unsupported SPIRV instruction");
5455 break;
5456 }
5457 case spv::OpCapability:
5458 case spv::OpExtension:
5459 case spv::OpMemoryModel:
5460 case spv::OpEntryPoint:
5461 case spv::OpExecutionMode:
5462 case spv::OpSource:
5463 case spv::OpDecorate:
5464 case spv::OpMemberDecorate:
5465 case spv::OpBranch:
5466 case spv::OpBranchConditional:
5467 case spv::OpSelectionMerge:
5468 case spv::OpLoopMerge:
5469 case spv::OpStore:
5470 case spv::OpImageWrite:
5471 case spv::OpReturnValue:
5472 case spv::OpControlBarrier:
5473 case spv::OpMemoryBarrier:
5474 case spv::OpReturn:
5475 case spv::OpFunctionEnd:
5476 case spv::OpCopyMemory: {
5477 WriteWordCountAndOpcode(Inst);
5478 for (uint32_t i = 0; i < Ops.size(); i++) {
5479 WriteOperand(Ops[i]);
5480 }
5481 break;
5482 }
5483 case spv::OpTypeBool:
5484 case spv::OpTypeVoid:
5485 case spv::OpTypeSampler:
5486 case spv::OpLabel:
5487 case spv::OpExtInstImport:
5488 case spv::OpTypePointer:
5489 case spv::OpTypeRuntimeArray:
5490 case spv::OpTypeStruct:
5491 case spv::OpTypeImage:
5492 case spv::OpTypeSampledImage:
5493 case spv::OpTypeInt:
5494 case spv::OpTypeFloat:
5495 case spv::OpTypeArray:
5496 case spv::OpTypeVector:
5497 case spv::OpTypeFunction: {
5498 WriteWordCountAndOpcode(Inst);
5499 WriteResultID(Inst);
5500 for (uint32_t i = 0; i < Ops.size(); i++) {
5501 WriteOperand(Ops[i]);
5502 }
5503 break;
5504 }
5505 case spv::OpFunction:
5506 case spv::OpFunctionParameter:
5507 case spv::OpAccessChain:
5508 case spv::OpPtrAccessChain:
5509 case spv::OpInBoundsAccessChain:
5510 case spv::OpUConvert:
5511 case spv::OpSConvert:
5512 case spv::OpConvertFToU:
5513 case spv::OpConvertFToS:
5514 case spv::OpConvertUToF:
5515 case spv::OpConvertSToF:
5516 case spv::OpFConvert:
5517 case spv::OpConvertPtrToU:
5518 case spv::OpConvertUToPtr:
5519 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005520 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005521 case spv::OpIAdd:
5522 case spv::OpFAdd:
5523 case spv::OpISub:
5524 case spv::OpFSub:
5525 case spv::OpIMul:
5526 case spv::OpFMul:
5527 case spv::OpUDiv:
5528 case spv::OpSDiv:
5529 case spv::OpFDiv:
5530 case spv::OpUMod:
5531 case spv::OpSRem:
5532 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005533 case spv::OpUMulExtended:
5534 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005535 case spv::OpBitwiseOr:
5536 case spv::OpBitwiseXor:
5537 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005538 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005539 case spv::OpShiftLeftLogical:
5540 case spv::OpShiftRightLogical:
5541 case spv::OpShiftRightArithmetic:
5542 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005543 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005544 case spv::OpCompositeExtract:
5545 case spv::OpVectorExtractDynamic:
5546 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005547 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005548 case spv::OpVectorInsertDynamic:
5549 case spv::OpVectorShuffle:
5550 case spv::OpIEqual:
5551 case spv::OpINotEqual:
5552 case spv::OpUGreaterThan:
5553 case spv::OpUGreaterThanEqual:
5554 case spv::OpULessThan:
5555 case spv::OpULessThanEqual:
5556 case spv::OpSGreaterThan:
5557 case spv::OpSGreaterThanEqual:
5558 case spv::OpSLessThan:
5559 case spv::OpSLessThanEqual:
5560 case spv::OpFOrdEqual:
5561 case spv::OpFOrdGreaterThan:
5562 case spv::OpFOrdGreaterThanEqual:
5563 case spv::OpFOrdLessThan:
5564 case spv::OpFOrdLessThanEqual:
5565 case spv::OpFOrdNotEqual:
5566 case spv::OpFUnordEqual:
5567 case spv::OpFUnordGreaterThan:
5568 case spv::OpFUnordGreaterThanEqual:
5569 case spv::OpFUnordLessThan:
5570 case spv::OpFUnordLessThanEqual:
5571 case spv::OpFUnordNotEqual:
5572 case spv::OpExtInst:
5573 case spv::OpIsInf:
5574 case spv::OpIsNan:
5575 case spv::OpAny:
5576 case spv::OpAll:
5577 case spv::OpUndef:
5578 case spv::OpConstantNull:
5579 case spv::OpLogicalOr:
5580 case spv::OpLogicalAnd:
5581 case spv::OpLogicalNot:
5582 case spv::OpLogicalNotEqual:
5583 case spv::OpConstantComposite:
5584 case spv::OpSpecConstantComposite:
5585 case spv::OpConstantTrue:
5586 case spv::OpConstantFalse:
5587 case spv::OpConstant:
5588 case spv::OpSpecConstant:
5589 case spv::OpVariable:
5590 case spv::OpFunctionCall:
5591 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005592 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04005593 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005594 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005595 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005596 case spv::OpSelect:
5597 case spv::OpPhi:
5598 case spv::OpLoad:
5599 case spv::OpAtomicIAdd:
5600 case spv::OpAtomicISub:
5601 case spv::OpAtomicExchange:
5602 case spv::OpAtomicIIncrement:
5603 case spv::OpAtomicIDecrement:
5604 case spv::OpAtomicCompareExchange:
5605 case spv::OpAtomicUMin:
5606 case spv::OpAtomicSMin:
5607 case spv::OpAtomicUMax:
5608 case spv::OpAtomicSMax:
5609 case spv::OpAtomicAnd:
5610 case spv::OpAtomicOr:
5611 case spv::OpAtomicXor:
5612 case spv::OpDot: {
5613 WriteWordCountAndOpcode(Inst);
5614 WriteOperand(Ops[0]);
5615 WriteResultID(Inst);
5616 for (uint32_t i = 1; i < Ops.size(); i++) {
5617 WriteOperand(Ops[i]);
5618 }
5619 break;
5620 }
5621 }
5622 }
5623}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005624
alan-bakerb6b09dc2018-11-08 16:59:28 -05005625bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005626 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005627 case Type::HalfTyID:
5628 case Type::FloatTyID:
5629 case Type::DoubleTyID:
5630 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005631 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005632 return true;
5633 case Type::PointerTyID: {
5634 const PointerType *pointer_type = cast<PointerType>(type);
5635 if (pointer_type->getPointerAddressSpace() !=
5636 AddressSpace::UniformConstant) {
5637 auto pointee_type = pointer_type->getPointerElementType();
5638 if (pointee_type->isStructTy() &&
5639 cast<StructType>(pointee_type)->isOpaque()) {
5640 // Images and samplers are not nullable.
5641 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005642 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005643 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005644 return true;
5645 }
5646 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005647 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005648 case Type::StructTyID: {
5649 const StructType *struct_type = cast<StructType>(type);
5650 // Images and samplers are not nullable.
5651 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005652 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005653 for (const auto element : struct_type->elements()) {
5654 if (!IsTypeNullable(element))
5655 return false;
5656 }
5657 return true;
5658 }
5659 default:
5660 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005661 }
5662}
Alan Bakerfcda9482018-10-02 17:09:59 -04005663
SJW77b87ad2020-04-21 14:37:52 -05005664void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005665 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005666 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005667 // Metdata is stored as key-value pair operands. The first element of each
5668 // operand is the type and the second is a vector of offsets.
5669 for (const auto *operand : offsets_md->operands()) {
5670 const auto *pair = cast<MDTuple>(operand);
5671 auto *type =
5672 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5673 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5674 std::vector<uint32_t> offsets;
5675 for (const Metadata *offset_md : offset_vector->operands()) {
5676 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005677 offsets.push_back(static_cast<uint32_t>(
5678 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005679 }
5680 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5681 }
5682 }
5683
5684 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005685 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005686 // Metadata is stored as key-value pair operands. The first element of each
5687 // operand is the type and the second is a triple of sizes: type size in
5688 // bits, store size and alloc size.
5689 for (const auto *operand : sizes_md->operands()) {
5690 const auto *pair = cast<MDTuple>(operand);
5691 auto *type =
5692 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5693 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5694 uint64_t type_size_in_bits =
5695 cast<ConstantInt>(
5696 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5697 ->getZExtValue();
5698 uint64_t type_store_size =
5699 cast<ConstantInt>(
5700 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5701 ->getZExtValue();
5702 uint64_t type_alloc_size =
5703 cast<ConstantInt>(
5704 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5705 ->getZExtValue();
5706 RemappedUBOTypeSizes.insert(std::make_pair(
5707 type, std::make_tuple(type_size_in_bits, type_store_size,
5708 type_alloc_size)));
5709 }
5710 }
5711}
5712
5713uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5714 const DataLayout &DL) {
5715 auto iter = RemappedUBOTypeSizes.find(type);
5716 if (iter != RemappedUBOTypeSizes.end()) {
5717 return std::get<0>(iter->second);
5718 }
5719
5720 return DL.getTypeSizeInBits(type);
5721}
5722
5723uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5724 auto iter = RemappedUBOTypeSizes.find(type);
5725 if (iter != RemappedUBOTypeSizes.end()) {
5726 return std::get<1>(iter->second);
5727 }
5728
5729 return DL.getTypeStoreSize(type);
5730}
5731
5732uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5733 auto iter = RemappedUBOTypeSizes.find(type);
5734 if (iter != RemappedUBOTypeSizes.end()) {
5735 return std::get<2>(iter->second);
5736 }
5737
5738 return DL.getTypeAllocSize(type);
5739}
alan-baker5b86ed72019-02-15 08:26:50 -05005740
Kévin Petitbbbda972020-03-03 19:16:31 +00005741uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5742 StructType *type, unsigned member, const DataLayout &DL) {
5743 const auto StructLayout = DL.getStructLayout(type);
5744 // Search for the correct offsets if this type was remapped.
5745 std::vector<uint32_t> *offsets = nullptr;
5746 auto iter = RemappedUBOTypeOffsets.find(type);
5747 if (iter != RemappedUBOTypeOffsets.end()) {
5748 offsets = &iter->second;
5749 }
5750 auto ByteOffset =
5751 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5752 if (offsets) {
5753 ByteOffset = (*offsets)[member];
5754 }
5755
5756 return ByteOffset;
5757}
5758
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005759void SPIRVProducerPass::setVariablePointersCapabilities(
5760 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005761 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5762 setVariablePointersStorageBuffer(true);
5763 } else {
5764 setVariablePointers(true);
5765 }
5766}
5767
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005768Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005769 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5770 return GetBasePointer(gep->getPointerOperand());
5771 }
5772
5773 // Conservatively return |v|.
5774 return v;
5775}
5776
5777bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5778 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5779 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5780 if (lhs_call->getCalledFunction()->getName().startswith(
5781 clspv::ResourceAccessorFunction()) &&
5782 rhs_call->getCalledFunction()->getName().startswith(
5783 clspv::ResourceAccessorFunction())) {
5784 // For resource accessors, match descriptor set and binding.
5785 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5786 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5787 return true;
5788 } else if (lhs_call->getCalledFunction()->getName().startswith(
5789 clspv::WorkgroupAccessorFunction()) &&
5790 rhs_call->getCalledFunction()->getName().startswith(
5791 clspv::WorkgroupAccessorFunction())) {
5792 // For workgroup resources, match spec id.
5793 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5794 return true;
5795 }
5796 }
5797 }
5798
5799 return false;
5800}
5801
5802bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5803 assert(inst->getType()->isPointerTy());
5804 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5805 spv::StorageClassStorageBuffer);
5806 const bool hack_undef = clspv::Option::HackUndef();
5807 if (auto *select = dyn_cast<SelectInst>(inst)) {
5808 auto *true_base = GetBasePointer(select->getTrueValue());
5809 auto *false_base = GetBasePointer(select->getFalseValue());
5810
5811 if (true_base == false_base)
5812 return true;
5813
5814 // If either the true or false operand is a null, then we satisfy the same
5815 // object constraint.
5816 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5817 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5818 return true;
5819 }
5820
5821 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5822 if (false_cst->isNullValue() ||
5823 (hack_undef && isa<UndefValue>(false_base)))
5824 return true;
5825 }
5826
5827 if (sameResource(true_base, false_base))
5828 return true;
5829 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5830 Value *value = nullptr;
5831 bool ok = true;
5832 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5833 auto *base = GetBasePointer(phi->getIncomingValue(i));
5834 // Null values satisfy the constraint of selecting of selecting from the
5835 // same object.
5836 if (!value) {
5837 if (auto *cst = dyn_cast<Constant>(base)) {
5838 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5839 value = base;
5840 } else {
5841 value = base;
5842 }
5843 } else if (base != value) {
5844 if (auto *base_cst = dyn_cast<Constant>(base)) {
5845 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5846 continue;
5847 }
5848
5849 if (sameResource(value, base))
5850 continue;
5851
5852 // Values don't represent the same base.
5853 ok = false;
5854 }
5855 }
5856
5857 return ok;
5858 }
5859
5860 // Conservatively return false.
5861 return false;
5862}
alan-bakere9308012019-03-15 10:25:13 -04005863
5864bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5865 if (!Arg.getType()->isPointerTy() ||
5866 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5867 // Only SSBOs need to be annotated as coherent.
5868 return false;
5869 }
5870
5871 DenseSet<Value *> visited;
5872 std::vector<Value *> stack;
5873 for (auto *U : Arg.getParent()->users()) {
5874 if (auto *call = dyn_cast<CallInst>(U)) {
5875 stack.push_back(call->getOperand(Arg.getArgNo()));
5876 }
5877 }
5878
5879 while (!stack.empty()) {
5880 Value *v = stack.back();
5881 stack.pop_back();
5882
5883 if (!visited.insert(v).second)
5884 continue;
5885
5886 auto *resource_call = dyn_cast<CallInst>(v);
5887 if (resource_call &&
5888 resource_call->getCalledFunction()->getName().startswith(
5889 clspv::ResourceAccessorFunction())) {
5890 // If this is a resource accessor function, check if the coherent operand
5891 // is set.
5892 const auto coherent =
5893 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5894 ->getZExtValue());
5895 if (coherent == 1)
5896 return true;
5897 } else if (auto *arg = dyn_cast<Argument>(v)) {
5898 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005899 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005900 if (auto *call = dyn_cast<CallInst>(U)) {
5901 stack.push_back(call->getOperand(arg->getArgNo()));
5902 }
5903 }
5904 } else if (auto *user = dyn_cast<User>(v)) {
5905 // If this is a user, traverse all operands that could lead to resource
5906 // variables.
5907 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5908 Value *operand = user->getOperand(i);
5909 if (operand->getType()->isPointerTy() &&
5910 operand->getType()->getPointerAddressSpace() ==
5911 clspv::AddressSpace::Global) {
5912 stack.push_back(operand);
5913 }
5914 }
5915 }
5916 }
5917
5918 // No coherent resource variables encountered.
5919 return false;
5920}
alan-baker06cad652019-12-03 17:56:47 -05005921
SJW77b87ad2020-04-21 14:37:52 -05005922void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005923 // First, track loop merges and continues.
5924 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005925 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005926 if (F.isDeclaration())
5927 continue;
5928
5929 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5930 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5931 std::deque<BasicBlock *> order;
5932 DenseSet<BasicBlock *> visited;
5933 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5934
5935 for (auto BB : order) {
5936 auto terminator = BB->getTerminator();
5937 auto branch = dyn_cast<BranchInst>(terminator);
5938 if (LI.isLoopHeader(BB)) {
5939 auto L = LI.getLoopFor(BB);
5940 BasicBlock *ContinueBB = nullptr;
5941 BasicBlock *MergeBB = nullptr;
5942
5943 MergeBB = L->getExitBlock();
5944 if (!MergeBB) {
5945 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5946 // has regions with single entry/exit. As a result, loop should not
5947 // have multiple exits.
5948 llvm_unreachable("Loop has multiple exits???");
5949 }
5950
5951 if (L->isLoopLatch(BB)) {
5952 ContinueBB = BB;
5953 } else {
5954 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5955 // block.
5956 BasicBlock *Header = L->getHeader();
5957 BasicBlock *Latch = L->getLoopLatch();
5958 for (auto *loop_block : L->blocks()) {
5959 if (loop_block == Header) {
5960 continue;
5961 }
5962
5963 // Check whether block dominates block with back-edge.
5964 // The loop latch is the single block with a back-edge. If it was
5965 // possible, StructurizeCFG made the loop conform to this
5966 // requirement, otherwise |Latch| is a nullptr.
5967 if (DT.dominates(loop_block, Latch)) {
5968 ContinueBB = loop_block;
5969 }
5970 }
5971
5972 if (!ContinueBB) {
5973 llvm_unreachable("Wrong continue block from loop");
5974 }
5975 }
5976
5977 // Record the continue and merge blocks.
5978 MergeBlocks[BB] = MergeBB;
5979 ContinueBlocks[BB] = ContinueBB;
5980 LoopMergesAndContinues.insert(MergeBB);
5981 LoopMergesAndContinues.insert(ContinueBB);
5982 } else if (branch && branch->isConditional()) {
5983 auto L = LI.getLoopFor(BB);
5984 bool HasBackedge = false;
5985 while (L && !HasBackedge) {
5986 if (L->isLoopLatch(BB)) {
5987 HasBackedge = true;
5988 }
5989 L = L->getParentLoop();
5990 }
5991
5992 if (!HasBackedge) {
5993 // Only need a merge if the branch doesn't include a loop break or
5994 // continue.
5995 auto true_bb = branch->getSuccessor(0);
5996 auto false_bb = branch->getSuccessor(1);
5997 if (!LoopMergesAndContinues.count(true_bb) &&
5998 !LoopMergesAndContinues.count(false_bb)) {
5999 // StructurizeCFG pass already manipulated CFG. Just use false block
6000 // of branch instruction as merge block.
6001 MergeBlocks[BB] = false_bb;
6002 }
6003 }
6004 }
6005 }
6006 }
6007}