blob: d260fc32cca675ee774838eaf09f03d83aaf09b7 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
SJWf93f5f32020-05-05 07:27:56 -050046// enable spv::HasResultAndType
47#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040048#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040049
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050051#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040052#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070053#include "clspv/spirv_c_strings.hpp"
54#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040055
David Neto4feb7a42017-10-06 17:29:42 -040056#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050057#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050058#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070059#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040060#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040061#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040062#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040063#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040064#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040065#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050066#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040067
David Neto22f144c2017-06-12 14:26:21 -040068#if defined(_MSC_VER)
69#pragma warning(pop)
70#endif
71
72using namespace llvm;
73using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050074using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040075using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040076
77namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040078
David Neto862b7d82018-06-14 18:48:37 -040079cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
80 cl::desc("Show resource variable creation"));
81
alan-baker5ed87542020-03-23 11:05:22 -040082cl::opt<bool>
83 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
84 cl::desc("Dump the IR at the start of SPIRVProducer"));
85
David Neto862b7d82018-06-14 18:48:37 -040086// These hacks exist to help transition code generation algorithms
87// without making huge noise in detailed test output.
88const bool Hack_generate_runtime_array_stride_early = true;
89
David Neto3fbb4072017-10-16 11:28:14 -040090// The value of 1/pi. This value is from MSDN
91// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
92const double kOneOverPi = 0.318309886183790671538;
93const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
94
alan-bakerb6b09dc2018-11-08 16:59:28 -050095const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040096
SJW69939d52020-04-16 07:29:07 -050097// SPIRV Module Sections (per 2.4 of the SPIRV spec)
98// These are used to collect SPIRVInstructions by type on-the-fly.
99enum SPIRVSection {
100 kCapabilities,
101 kExtensions,
102 kImports,
103 kMemoryModel,
104 kEntryPoints,
105 kExecutionModes,
106
107 kDebug,
108 kAnnotations,
109
110 kTypes,
111 kConstants = kTypes,
112 kGlobalVariables,
113
114 kFunctions,
115
116 kSectionCount
117};
118
SJWf93f5f32020-05-05 07:27:56 -0500119typedef uint32_t SPIRVID;
120
SJW88ed5fe2020-05-11 12:40:57 -0500121enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400122
123struct SPIRVOperand {
SJW88ed5fe2020-05-11 12:40:57 -0500124 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
125 LiteralNum[0] = Num;
126 }
David Neto22f144c2017-06-12 14:26:21 -0400127 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
128 : Type(Ty), LiteralStr(Str) {}
129 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
130 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500131 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
132 auto sz = NumVec.size();
133 assert(sz >= 1 && sz <= 2);
134 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
135 LiteralNum[0] = NumVec[0];
136 if (sz == 2) {
137 LiteralNum[1] = NumVec[1];
138 }
139 }
David Neto22f144c2017-06-12 14:26:21 -0400140
James Price11010dc2019-12-19 13:53:09 -0500141 SPIRVOperandType getType() const { return Type; };
142 uint32_t getNumID() const { return LiteralNum[0]; };
143 std::string getLiteralStr() const { return LiteralStr; };
SJW88ed5fe2020-05-11 12:40:57 -0500144 const uint32_t *getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400145
David Neto87846742018-04-11 17:36:22 -0400146 uint32_t GetNumWords() const {
147 switch (Type) {
148 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500149 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400150 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500151 case LITERAL_DWORD:
152 return 2;
David Neto87846742018-04-11 17:36:22 -0400153 case LITERAL_STRING:
154 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400155 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400156 }
157 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
158 }
159
David Neto22f144c2017-06-12 14:26:21 -0400160private:
161 SPIRVOperandType Type;
162 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500163 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400164};
165
SJW88ed5fe2020-05-11 12:40:57 -0500166typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400167
SJW88ed5fe2020-05-11 12:40:57 -0500168SPIRVOperandVec &operator<<(SPIRVOperandVec &list, SPIRVOperand elem) {
David Netoef5ba2b2019-12-20 08:35:54 -0500169 list.push_back(std::move(elem));
David Netoc6f3ab22018-04-06 18:02:31 -0400170 return list;
171}
172
SJW88ed5fe2020-05-11 12:40:57 -0500173SPIRVOperand MkNum(uint32_t num) { return SPIRVOperand(LITERAL_WORD, num); }
174SPIRVOperand MkInteger(ArrayRef<uint32_t> num_vec) {
175 return SPIRVOperand(num_vec);
David Netoc6f3ab22018-04-06 18:02:31 -0400176}
SJW88ed5fe2020-05-11 12:40:57 -0500177SPIRVOperand MkFloat(ArrayRef<uint32_t> num_vec) {
178 return SPIRVOperand(num_vec);
David Neto257c3892018-04-11 13:19:45 -0400179}
SJW88ed5fe2020-05-11 12:40:57 -0500180SPIRVOperand MkId(uint32_t id) { return SPIRVOperand(NUMBERID, id); }
181SPIRVOperand MkString(StringRef str) {
182 return SPIRVOperand(LITERAL_STRING, str);
David Neto257c3892018-04-11 13:19:45 -0400183}
David Netoc6f3ab22018-04-06 18:02:31 -0400184
David Neto22f144c2017-06-12 14:26:21 -0400185struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500186 // Primary constructor must have Opcode, initializes WordCount based on ResID.
187 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
188 : Opcode(static_cast<uint16_t>(Opc)) {
189 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400190 }
David Neto22f144c2017-06-12 14:26:21 -0400191
SJWf93f5f32020-05-05 07:27:56 -0500192 // Creates an instruction with an opcode and no result ID, and with the given
193 // operands. This calls primary constructor to initialize Opcode, WordCount.
194 // Takes ownership of the operands and clears |Ops|.
195 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
196 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500197 }
SJWf93f5f32020-05-05 07:27:56 -0500198 // Creates an instruction with an opcode and no result ID, and with the given
199 // operands. This calls primary constructor to initialize Opcode, WordCount.
200 // Takes ownership of the operands and clears |Ops|.
201 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
202 : SPIRVInstruction(Opc, ResID) {
203 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500204 }
David Netoef5ba2b2019-12-20 08:35:54 -0500205
David Netoee2660d2018-06-28 16:31:29 -0400206 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400207 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500208 SPIRVID getResultID() const { return ResultID; }
209 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400210
211private:
SJWf93f5f32020-05-05 07:27:56 -0500212 void setResult(uint32_t ResID = 0) {
213 WordCount = 1 + (ResID != 0 ? 1 : 0);
214 ResultID = ResID;
215 }
216
217 void setOperands(SPIRVOperandVec &Ops) {
218 assert(Operands.empty());
219 Operands = std::move(Ops);
220 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500221 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500222 }
223 }
224
225private:
David Netoee2660d2018-06-28 16:31:29 -0400226 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400227 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500228 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500229 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400230};
231
232struct SPIRVProducerPass final : public ModulePass {
David Neto22f144c2017-06-12 14:26:21 -0400233 typedef DenseMap<Type *, uint32_t> TypeMapType;
234 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500235 typedef DenseMap<Value *, SPIRVID> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400236 typedef UniqueVector<Value *> ValueList;
David Neto22f144c2017-06-12 14:26:21 -0400237 typedef std::vector<std::pair<Value *, uint32_t>> EntryPointVecType;
SJW88ed5fe2020-05-11 12:40:57 -0500238 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
239 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400240 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500241 // - the SPIR-V instruction placeholder that will be replaced
242 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400243 DeferredInstVecType;
244 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
245 GlobalConstFuncMapType;
246
David Neto44795152017-07-13 15:45:28 -0400247 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500248 raw_pwrite_stream &out,
249 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400250 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400251 bool outputCInitList)
David Netoc2c368d2017-06-30 16:50:17 -0400252 : ModulePass(ID), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400253 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400254 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400255 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500256 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
257 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-bakera1be3322020-04-20 12:48:18 -0400258 WorkgroupSizeVarID(0) {}
David Neto22f144c2017-06-12 14:26:21 -0400259
James Price11010dc2019-12-19 13:53:09 -0500260 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500261 }
262
David Neto22f144c2017-06-12 14:26:21 -0400263 void getAnalysisUsage(AnalysisUsage &AU) const override {
264 AU.addRequired<DominatorTreeWrapperPass>();
265 AU.addRequired<LoopInfoWrapperPass>();
266 }
267
268 virtual bool runOnModule(Module &module) override;
269
270 // output the SPIR-V header block
271 void outputHeader();
272
273 // patch the SPIR-V header block
274 void patchHeader();
275
David Neto22f144c2017-06-12 14:26:21 -0400276 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
277 TypeList &getTypeList() { return Types; };
278 ValueList &getConstantList() { return Constants; };
279 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500280 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
281 return SPIRVSections[Section];
282 };
David Neto22f144c2017-06-12 14:26:21 -0400283 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
284 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
285 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
286 uint32_t &getOpExtInstImportID() { return OpExtInstImportID; };
287 std::vector<uint32_t> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500288
alan-baker5b86ed72019-02-15 08:26:50 -0500289 bool hasVariablePointersStorageBuffer() {
290 return HasVariablePointersStorageBuffer;
291 }
292 void setVariablePointersStorageBuffer(bool Val) {
293 HasVariablePointersStorageBuffer = Val;
294 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400295 bool hasVariablePointers() { return HasVariablePointers; };
David Neto22f144c2017-06-12 14:26:21 -0400296 void setVariablePointers(bool Val) { HasVariablePointers = Val; };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500297 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
298 return samplerMap;
299 }
David Neto22f144c2017-06-12 14:26:21 -0400300 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
301 return GlobalConstFuncTypeMap;
302 }
303 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
304 return GlobalConstArgumentSet;
305 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500306 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400307
SJW77b87ad2020-04-21 14:37:52 -0500308 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500309 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
310 // *not* be converted to a storage buffer, replace each such global variable
311 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500312 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400313 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
314 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500315 void FindResourceVars();
SJW77b87ad2020-04-21 14:37:52 -0500316 bool FindExtInst();
David Neto22f144c2017-06-12 14:26:21 -0400317 void FindTypePerGlobalVar(GlobalVariable &GV);
318 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500319 void FindTypesForSamplerMap();
320 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500321 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
322 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400323 void FindType(Type *Ty);
324 void FindConstantPerGlobalVar(GlobalVariable &GV);
325 void FindConstantPerFunc(Function &F);
326 void FindConstant(Value *V);
SJWf93f5f32020-05-05 07:27:56 -0500327
328 // Lookup or create Types, Constants.
329 // Returns SPIRVID once it has been created.
330 SPIRVID getSPIRVType(Type *Ty);
331 SPIRVID getSPIRVConstant(Constant *Cst);
332 // Lookup SPIRVID of llvm::Value, may create Constant.
333 SPIRVID getSPIRVValue(Value *V);
334
David Neto22f144c2017-06-12 14:26:21 -0400335 void GenerateExtInstImport();
David Neto19a1bad2017-08-25 15:01:41 -0400336 // Generates instructions for SPIR-V types corresponding to the LLVM types
337 // saved in the |Types| member. A type follows its subtypes. IDs are
338 // allocated sequentially starting with the current value of nextID, and
339 // with a type following its subtypes. Also updates nextID to just beyond
340 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500341 void GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400342 void GenerateSPIRVConstants();
SJW77b87ad2020-04-21 14:37:52 -0500343 void GenerateModuleInfo();
344 void GeneratePushConstantDescriptorMapEntries();
345 void GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400346 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500347 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400348 // Generate descriptor map entries for resource variables associated with
349 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500350 void GenerateDescriptorMapInfo(Function &F);
351 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400352 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500353 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400354 void GenerateFuncPrologue(Function &F);
355 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400356 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400357 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
358 spv::Op GetSPIRVCastOpcode(Instruction &I);
359 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
360 void GenerateInstruction(Instruction &I);
361 void GenerateFuncEpilogue();
362 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500363 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400364 bool is4xi8vec(Type *Ty) const;
365 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400366 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400367 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400368 // Returns the GLSL extended instruction enum that the given function
369 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400370 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400371 // Returns the GLSL extended instruction enum indirectly used by the given
372 // function. That is, to implement the given function, we use an extended
373 // instruction plus one more instruction. If none, then returns the 0 value,
374 // i.e. GLSLstd4580Bad.
375 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
376 // Returns the single GLSL extended instruction used directly or
377 // indirectly by the given function call.
378 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400379 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500380 void WriteResultID(const SPIRVInstruction &Inst);
381 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
382 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400383 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500384 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400385
Alan Baker9bf93fb2018-08-28 16:59:26 -0400386 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500387 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400388
Alan Bakerfcda9482018-10-02 17:09:59 -0400389 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500390 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400391
alan-baker06cad652019-12-03 17:56:47 -0500392 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500393 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500394
Alan Bakerfcda9482018-10-02 17:09:59 -0400395 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
396 // uses the internal map, otherwise it falls back on the data layout.
397 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
398 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
399 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000400 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
401 unsigned member,
402 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400403
alan-baker5b86ed72019-02-15 08:26:50 -0500404 // Returns the base pointer of |v|.
405 Value *GetBasePointer(Value *v);
406
407 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
408 // |address_space|.
409 void setVariablePointersCapabilities(unsigned address_space);
410
411 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
412 // variable.
413 bool sameResource(Value *lhs, Value *rhs) const;
414
415 // Returns true if |inst| is phi or select that selects from the same
416 // structure (or null).
417 bool selectFromSameObject(Instruction *inst);
418
alan-bakere9308012019-03-15 10:25:13 -0400419 // Returns true if |Arg| is called with a coherent resource.
420 bool CalledWithCoherentResource(Argument &Arg);
421
SJWf93f5f32020-05-05 07:27:56 -0500422 //
423 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
424 template <enum SPIRVSection TSection = kFunctions>
425 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
426 bool has_result, has_result_type;
427 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
428 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500429 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500430 return RID;
431 }
432 template <enum SPIRVSection TSection = kFunctions>
433 SPIRVID addSPIRVInst(spv::Op Op) {
434 SPIRVOperandVec Ops;
435 return addSPIRVInst<TSection>(Op, Ops);
436 }
437 template <enum SPIRVSection TSection = kFunctions>
438 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
439 SPIRVOperandVec Ops;
440 Ops << MkNum(V);
441 return addSPIRVInst<TSection>(Op, Ops);
442 }
443 template <enum SPIRVSection TSection = kFunctions>
444 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
445 SPIRVOperandVec Ops;
446 Ops << MkString(V);
447 return addSPIRVInst<TSection>(Op, Ops);
448 }
449
SJW88ed5fe2020-05-11 12:40:57 -0500450 //
451 // Add placeholder for llvm::Value that references future values.
452 // Must have result ID just in case final SPIRVInstruction requires.
453 SPIRVID addSPIRVPlaceholder(Value *I) {
454 SPIRVID RID = incrNextID();
455 SPIRVOperandVec Ops;
456 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
457 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
458 return RID;
459 }
460 // Replace placeholder with actual SPIRVInstruction on the final pass
461 // (HandleDeferredInstruction).
462 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
463 SPIRVOperandVec &Operands) {
464 bool has_result, has_result_type;
465 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
466 SPIRVID RID = has_result ? I->getResultID() : 0;
467 *I = SPIRVInstruction(Opcode, RID, Operands);
468 return RID;
469 }
470
David Neto22f144c2017-06-12 14:26:21 -0400471private:
472 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500473
474 Module *module;
475
David Neto44795152017-07-13 15:45:28 -0400476 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400477 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400478
479 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
480 // convert to other formats on demand?
481
482 // When emitting a C initialization list, the WriteSPIRVBinary method
483 // will actually write its words to this vector via binaryTempOut.
484 SmallVector<char, 100> binaryTempUnderlyingVector;
485 raw_svector_ostream binaryTempOut;
486
487 // Binary output writes to this stream, which might be |out| or
488 // |binaryTempOut|. It's the latter when we really want to write a C
489 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400490 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500491 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400492 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400493 uint64_t patchBoundOffset;
494 uint32_t nextID;
495
SJWf93f5f32020-05-05 07:27:56 -0500496 SPIRVID incrNextID() { return nextID++; }
497
alan-bakerf67468c2019-11-25 15:51:49 -0500498 // ID for OpTypeInt 32 1.
499 uint32_t int32ID = 0;
500 // ID for OpTypeVector %int 4.
501 uint32_t v4int32ID = 0;
502
David Neto19a1bad2017-08-25 15:01:41 -0400503 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400504 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400505 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400506 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400507 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400508 TypeList Types;
509 ValueList Constants;
David Neto19a1bad2017-08-25 15:01:41 -0400510 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400511 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500512 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400513
David Neto22f144c2017-06-12 14:26:21 -0400514 EntryPointVecType EntryPointVec;
515 DeferredInstVecType DeferredInstVec;
516 ValueList EntryPointInterfacesVec;
517 uint32_t OpExtInstImportID;
518 std::vector<uint32_t> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500519 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400520 bool HasVariablePointers;
521 Type *SamplerTy;
alan-baker09cb9802019-12-10 13:16:27 -0500522 DenseMap<unsigned, unsigned> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700523
524 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700525 // will map F's type to (G, index of the parameter), where in a first phase
526 // G is F's type. During FindTypePerFunc, G will be changed to F's type
527 // but replacing the pointer-to-constant parameter with
528 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700529 // TODO(dneto): This doesn't seem general enough? A function might have
530 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400531 GlobalConstFuncMapType GlobalConstFuncTypeMap;
532 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400533 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700534 // or array types, and which point into transparent memory (StorageBuffer
535 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400536 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700537 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400538
539 // This is truly ugly, but works around what look like driver bugs.
540 // For get_local_size, an earlier part of the flow has created a module-scope
541 // variable in Private address space to hold the value for the workgroup
542 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
543 // When this is present, save the IDs of the initializer value and variable
544 // in these two variables. We only ever do a vector load from it, and
545 // when we see one of those, substitute just the value of the intializer.
546 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700547 // TODO(dneto): Remove this once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -0400548 uint32_t WorkgroupSizeValueID;
549 uint32_t WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400550
David Neto862b7d82018-06-14 18:48:37 -0400551 // Bookkeeping for mapping kernel arguments to resource variables.
552 struct ResourceVarInfo {
553 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400554 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400555 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400556 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400557 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
558 const int index; // Index into ResourceVarInfoList
559 const unsigned descriptor_set;
560 const unsigned binding;
561 Function *const var_fn; // The @clspv.resource.var.* function.
562 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400563 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400564 const unsigned addr_space; // The LLVM address space
565 // The SPIR-V ID of the OpVariable. Not populated at construction time.
566 uint32_t var_id = 0;
567 };
568 // A list of resource var info. Each one correponds to a module-scope
569 // resource variable we will have to create. Resource var indices are
570 // indices into this vector.
571 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
572 // This is a vector of pointers of all the resource vars, but ordered by
573 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500574 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400575 // Map a function to the ordered list of resource variables it uses, one for
576 // each argument. If an argument does not use a resource variable, it
577 // will have a null pointer entry.
578 using FunctionToResourceVarsMapType =
579 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
580 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
581
582 // What LLVM types map to SPIR-V types needing layout? These are the
583 // arrays and structures supporting storage buffers and uniform buffers.
584 TypeList TypesNeedingLayout;
585 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
586 UniqueVector<StructType *> StructTypesNeedingBlock;
587 // For a call that represents a load from an opaque type (samplers, images),
588 // map it to the variable id it should load from.
589 DenseMap<CallInst *, uint32_t> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700590
David Netoc6f3ab22018-04-06 18:02:31 -0400591 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500592 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400593 LocalArgList LocalArgs;
594 // Information about a pointer-to-local argument.
595 struct LocalArgInfo {
596 // The SPIR-V ID of the array variable.
597 uint32_t variable_id;
598 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500599 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400600 // The ID of the array type.
601 uint32_t array_size_id;
602 // The ID of the array type.
603 uint32_t array_type_id;
604 // The ID of the pointer to the array type.
605 uint32_t ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400606 // The specialization constant ID of the array size.
607 int spec_id;
608 };
Alan Baker202c8c72018-08-13 13:47:44 -0400609 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500610 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400611 // A mapping from SpecId to its LocalArgInfo.
612 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400613 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500614 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400615 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500616 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
617 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500618
619 // Maps basic block to its merge block.
620 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
621 // Maps basic block to its continue block.
622 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
David Neto22f144c2017-06-12 14:26:21 -0400623};
624
625char SPIRVProducerPass::ID;
David Netoc6f3ab22018-04-06 18:02:31 -0400626
alan-bakerb6b09dc2018-11-08 16:59:28 -0500627} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400628
629namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500630ModulePass *createSPIRVProducerPass(
631 raw_pwrite_stream &out,
632 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400633 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500634 bool outputCInitList) {
635 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400636 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400637}
David Netoc2c368d2017-06-30 16:50:17 -0400638} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400639
SJW77b87ad2020-04-21 14:37:52 -0500640bool SPIRVProducerPass::runOnModule(Module &M) {
641 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400642 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500643 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400644 }
David Neto0676e6f2017-07-11 18:47:44 -0400645 binaryOut = outputCInitList ? &binaryTempOut : &out;
646
SJW77b87ad2020-04-21 14:37:52 -0500647 PopulateUBOTypeMaps();
648 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400649
David Neto22f144c2017-06-12 14:26:21 -0400650 // SPIR-V always begins with its header information
651 outputHeader();
652
653 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500654 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400655
David Neto22f144c2017-06-12 14:26:21 -0400656 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500657 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400658 // If the GV is one of our special __spirv_* variables, remove the
659 // initializer as it was only placed there to force LLVM to not throw the
660 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000661 if (GV.getName().startswith("__spirv_") ||
662 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400663 GV.setInitializer(nullptr);
664 }
665
666 // Collect types' information from global variable.
667 FindTypePerGlobalVar(GV);
668
669 // Collect constant information from global variable.
670 FindConstantPerGlobalVar(GV);
671
672 // If the variable is an input, entry points need to know about it.
673 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400674 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400675 }
676 }
677
678 // If there are extended instructions, generate OpExtInstImport.
SJW77b87ad2020-04-21 14:37:52 -0500679 if (FindExtInst()) {
David Neto22f144c2017-06-12 14:26:21 -0400680 GenerateExtInstImport();
681 }
682
683 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500684 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400685
686 // Generate SPIRV constants.
687 GenerateSPIRVConstants();
688
alan-baker09cb9802019-12-10 13:16:27 -0500689 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500690 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400691
Kévin Petitbbbda972020-03-03 19:16:31 +0000692 // Generate descriptor map entries for all push constants
SJW77b87ad2020-04-21 14:37:52 -0500693 GeneratePushConstantDescriptorMapEntries();
Kévin Petitbbbda972020-03-03 19:16:31 +0000694
David Neto22f144c2017-06-12 14:26:21 -0400695 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500696 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400697 GenerateGlobalVar(GV);
698 }
SJW77b87ad2020-04-21 14:37:52 -0500699 GenerateResourceVars();
700 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400701
702 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500703 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400704 if (F.isDeclaration()) {
705 continue;
706 }
707
SJW77b87ad2020-04-21 14:37:52 -0500708 GenerateDescriptorMapInfo(F);
David Neto862b7d82018-06-14 18:48:37 -0400709
David Neto22f144c2017-06-12 14:26:21 -0400710 // Generate Function Prologue.
711 GenerateFuncPrologue(F);
712
713 // Generate SPIRV instructions for function body.
714 GenerateFuncBody(F);
715
716 // Generate Function Epilogue.
717 GenerateFuncEpilogue();
718 }
719
720 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500721 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400722
723 // Generate descriptor map entries for module scope specialization constants.
SJW77b87ad2020-04-21 14:37:52 -0500724 GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400725
726 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500727 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400728
alan-baker00e7a582019-06-07 12:54:21 -0400729 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400730
731 // We need to patch the SPIR-V header to set bound correctly.
732 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400733
734 if (outputCInitList) {
735 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400736 std::ostringstream os;
737
David Neto57fb0b92017-08-04 15:35:09 -0400738 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400739 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400740 os << ",\n";
741 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400742 first = false;
743 };
744
745 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400746 const std::string str(binaryTempOut.str());
747 for (unsigned i = 0; i < str.size(); i += 4) {
748 const uint32_t a = static_cast<unsigned char>(str[i]);
749 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
750 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
751 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
752 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400753 }
754 os << "}\n";
755 out << os.str();
756 }
757
David Neto22f144c2017-06-12 14:26:21 -0400758 return false;
759}
760
761void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400762 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
763 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400764 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
765 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
766 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400767
alan-baker0c18ab02019-06-12 10:23:21 -0400768 // use Google's vendor ID
769 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400770 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400771
alan-baker00e7a582019-06-07 12:54:21 -0400772 // we record where we need to come back to and patch in the bound value
773 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400774
alan-baker00e7a582019-06-07 12:54:21 -0400775 // output a bad bound for now
776 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400777
alan-baker00e7a582019-06-07 12:54:21 -0400778 // output the schema (reserved for use and must be 0)
779 const uint32_t schema = 0;
780 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400781}
782
783void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400784 // for a binary we just write the value of nextID over bound
785 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
786 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400787}
788
SJW77b87ad2020-04-21 14:37:52 -0500789void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400790 // This function generates LLVM IR for function such as global variable for
791 // argument, constant and pointer type for argument access. These information
792 // is artificial one because we need Vulkan SPIR-V output. This function is
793 // executed ahead of FindType and FindConstant.
SJW77b87ad2020-04-21 14:37:52 -0500794 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -0400795
SJW77b87ad2020-04-21 14:37:52 -0500796 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400797
SJW77b87ad2020-04-21 14:37:52 -0500798 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400799
800 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500801 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400802 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
803 if (spv::BuiltInWorkgroupSize == BuiltinType) {
804 HasWorkGroupBuiltin = true;
805 }
806 }
807
SJW77b87ad2020-04-21 14:37:52 -0500808 FindTypesForSamplerMap();
809 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400810
SJW77b87ad2020-04-21 14:37:52 -0500811 for (Function &F : *module) {
Kévin Petitabef4522019-03-27 13:08:01 +0000812 if (F.isDeclaration()) {
David Neto22f144c2017-06-12 14:26:21 -0400813 continue;
814 }
815
816 for (BasicBlock &BB : F) {
817 for (Instruction &I : BB) {
818 if (I.getOpcode() == Instruction::ZExt ||
819 I.getOpcode() == Instruction::SExt ||
820 I.getOpcode() == Instruction::UIToFP) {
821 // If there is zext with i1 type, it will be changed to OpSelect. The
822 // OpSelect needs constant 0 and 1 so the constants are added here.
823
824 auto OpTy = I.getOperand(0)->getType();
825
Kévin Petit24272b62018-10-18 19:16:12 +0000826 if (OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -0400827 if (I.getOpcode() == Instruction::ZExt) {
David Neto22f144c2017-06-12 14:26:21 -0400828 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000829 FindConstant(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -0400830 } else if (I.getOpcode() == Instruction::SExt) {
David Neto22f144c2017-06-12 14:26:21 -0400831 FindConstant(Constant::getNullValue(I.getType()));
Kévin Petit7bfb8992019-02-26 13:45:08 +0000832 FindConstant(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -0400833 } else {
834 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
835 FindConstant(ConstantFP::get(Context, APFloat(1.0f)));
836 }
837 }
838 } else if (CallInst *Call = dyn_cast<CallInst>(&I)) {
David Neto862b7d82018-06-14 18:48:37 -0400839 StringRef callee_name = Call->getCalledFunction()->getName();
David Neto22f144c2017-06-12 14:26:21 -0400840
SJW173c7e92020-03-16 08:44:47 -0500841 if (IsSampledImageRead(callee_name)) {
alan-bakerf67468c2019-11-25 15:51:49 -0500842 // All sampled reads need a floating point 0 for the Lod operand.
David Neto22f144c2017-06-12 14:26:21 -0400843 FindConstant(ConstantFP::get(Context, APFloat(0.0f)));
SJW2c317da2020-03-23 07:39:13 -0500844 } else if (IsUnsampledImageRead(callee_name)) {
alan-baker75090e42020-02-20 11:21:04 -0500845 // All unsampled reads need an integer 0 for the Lod operand.
846 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
SJW2c317da2020-03-23 07:39:13 -0500847 } else if (IsImageQuery(callee_name)) {
alan-bakerce179f12019-12-06 19:02:22 -0500848 Type *ImageTy = Call->getOperand(0)->getType();
849 const uint32_t dim = ImageDimensionality(ImageTy);
alan-baker7150a1d2020-02-25 08:31:06 -0500850 uint32_t components =
851 dim + (clspv::IsArrayImageType(ImageTy) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -0500852 if (components > 1) {
853 // OpImageQuerySize* return |components| components.
854 FindType(VectorType::get(Type::getInt32Ty(Context), components));
855 if (dim == 3 && IsGetImageDim(callee_name)) {
856 // get_image_dim for 3D images returns an int4.
857 FindType(
858 VectorType::get(Type::getInt32Ty(Context), components + 1));
859 }
860 }
861
SJW173c7e92020-03-16 08:44:47 -0500862 if (IsSampledImageType(ImageTy)) {
alan-bakerce179f12019-12-06 19:02:22 -0500863 // All sampled image queries need a integer 0 for the Lod
864 // operand.
865 FindConstant(ConstantInt::get(Context, APInt(32, 0)));
866 }
David Neto5c22a252018-03-15 16:07:41 -0400867 }
David Neto22f144c2017-06-12 14:26:21 -0400868 }
869 }
870 }
871
Kévin Petitabef4522019-03-27 13:08:01 +0000872 // More things to do on kernel functions
873 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
874 if (const MDNode *MD =
875 dyn_cast<Function>(&F)->getMetadata("reqd_work_group_size")) {
876 // We generate constants if the WorkgroupSize builtin is being used.
877 if (HasWorkGroupBuiltin) {
878 // Collect constant information for work group size.
879 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(0)));
880 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(1)));
881 FindConstant(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -0400882 }
883 }
884 }
885
alan-bakerf67468c2019-11-25 15:51:49 -0500886 // TODO(alan-baker): make this better.
SJW77b87ad2020-04-21 14:37:52 -0500887 if (module->getTypeByName("opencl.image1d_ro_t.float") ||
888 module->getTypeByName("opencl.image1d_ro_t.float.sampled") ||
889 module->getTypeByName("opencl.image1d_wo_t.float") ||
890 module->getTypeByName("opencl.image2d_ro_t.float") ||
891 module->getTypeByName("opencl.image2d_ro_t.float.sampled") ||
892 module->getTypeByName("opencl.image2d_wo_t.float") ||
893 module->getTypeByName("opencl.image3d_ro_t.float") ||
894 module->getTypeByName("opencl.image3d_ro_t.float.sampled") ||
895 module->getTypeByName("opencl.image3d_wo_t.float") ||
896 module->getTypeByName("opencl.image1d_array_ro_t.float") ||
897 module->getTypeByName("opencl.image1d_array_ro_t.float.sampled") ||
898 module->getTypeByName("opencl.image1d_array_wo_t.float") ||
899 module->getTypeByName("opencl.image2d_array_ro_t.float") ||
900 module->getTypeByName("opencl.image2d_array_ro_t.float.sampled") ||
901 module->getTypeByName("opencl.image2d_array_wo_t.float")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500902 FindType(Type::getFloatTy(Context));
SJW77b87ad2020-04-21 14:37:52 -0500903 } else if (module->getTypeByName("opencl.image1d_ro_t.uint") ||
904 module->getTypeByName("opencl.image1d_ro_t.uint.sampled") ||
905 module->getTypeByName("opencl.image1d_wo_t.uint") ||
906 module->getTypeByName("opencl.image2d_ro_t.uint") ||
907 module->getTypeByName("opencl.image2d_ro_t.uint.sampled") ||
908 module->getTypeByName("opencl.image2d_wo_t.uint") ||
909 module->getTypeByName("opencl.image3d_ro_t.uint") ||
910 module->getTypeByName("opencl.image3d_ro_t.uint.sampled") ||
911 module->getTypeByName("opencl.image3d_wo_t.uint") ||
912 module->getTypeByName("opencl.image1d_array_ro_t.uint") ||
913 module->getTypeByName(
914 "opencl.image1d_array_ro_t.uint.sampled") ||
915 module->getTypeByName("opencl.image1d_array_wo_t.uint") ||
916 module->getTypeByName("opencl.image2d_array_ro_t.uint") ||
917 module->getTypeByName(
918 "opencl.image2d_array_ro_t.uint.sampled") ||
919 module->getTypeByName("opencl.image2d_array_wo_t.uint")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500920 FindType(Type::getInt32Ty(Context));
SJW77b87ad2020-04-21 14:37:52 -0500921 } else if (module->getTypeByName("opencl.image1d_ro_t.int") ||
922 module->getTypeByName("opencl.image1d_ro_t.int.sampled") ||
923 module->getTypeByName("opencl.image1d_wo_t.int") ||
924 module->getTypeByName("opencl.image2d_ro_t.int") ||
925 module->getTypeByName("opencl.image2d_ro_t.int.sampled") ||
926 module->getTypeByName("opencl.image2d_wo_t.int") ||
927 module->getTypeByName("opencl.image3d_ro_t.int") ||
928 module->getTypeByName("opencl.image3d_ro_t.int.sampled") ||
929 module->getTypeByName("opencl.image3d_wo_t.int") ||
930 module->getTypeByName("opencl.image1d_array_ro_t.int") ||
931 module->getTypeByName("opencl.image1d_array_ro_t.int.sampled") ||
932 module->getTypeByName("opencl.image1d_array_wo_t.int") ||
933 module->getTypeByName("opencl.image2d_array_ro_t.int") ||
934 module->getTypeByName("opencl.image2d_array_ro_t.int.sampled") ||
935 module->getTypeByName("opencl.image2d_array_wo_t.int")) {
alan-bakerf67468c2019-11-25 15:51:49 -0500936 // Nothing for now...
937 } else {
938 // This was likely an UndefValue.
David Neto22f144c2017-06-12 14:26:21 -0400939 FindType(Type::getFloatTy(Context));
940 }
941
942 // Collect types' information from function.
943 FindTypePerFunc(F);
944
945 // Collect constant information from function.
946 FindConstantPerFunc(F);
947 }
948}
949
SJW77b87ad2020-04-21 14:37:52 -0500950void SPIRVProducerPass::FindGlobalConstVars() {
951 clspv::NormalizeGlobalVariables(*module);
952 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400953
David Neto862b7d82018-06-14 18:48:37 -0400954 SmallVector<GlobalVariable *, 8> GVList;
955 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500956 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400957 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
958 if (GV.use_empty()) {
959 DeadGVList.push_back(&GV);
960 } else {
961 GVList.push_back(&GV);
962 }
963 }
964 }
965
966 // Remove dead global __constant variables.
967 for (auto GV : DeadGVList) {
968 GV->eraseFromParent();
969 }
970 DeadGVList.clear();
971
972 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
973 // For now, we only support a single storage buffer.
974 if (GVList.size() > 0) {
975 assert(GVList.size() == 1);
976 const auto *GV = GVList[0];
977 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400978 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400979 const size_t kConstantMaxSize = 65536;
980 if (constants_byte_size > kConstantMaxSize) {
981 outs() << "Max __constant capacity of " << kConstantMaxSize
982 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
983 llvm_unreachable("Max __constant capacity exceeded");
984 }
985 }
986 } else {
987 // Change global constant variable's address space to ModuleScopePrivate.
988 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
989 for (auto GV : GVList) {
990 // Create new gv with ModuleScopePrivate address space.
991 Type *NewGVTy = GV->getType()->getPointerElementType();
992 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500993 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400994 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
995 NewGV->takeName(GV);
996
997 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
998 SmallVector<User *, 8> CandidateUsers;
999
1000 auto record_called_function_type_as_user =
1001 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
1002 // Find argument index.
1003 unsigned index = 0;
1004 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
1005 if (gv == call->getOperand(i)) {
1006 // TODO(dneto): Should we break here?
1007 index = i;
1008 }
1009 }
1010
1011 // Record function type with global constant.
1012 GlobalConstFuncTyMap[call->getFunctionType()] =
1013 std::make_pair(call->getFunctionType(), index);
1014 };
1015
1016 for (User *GVU : GVUsers) {
1017 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
1018 record_called_function_type_as_user(GV, Call);
1019 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
1020 // Check GEP users.
1021 for (User *GEPU : GEP->users()) {
1022 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1023 record_called_function_type_as_user(GEP, GEPCall);
1024 }
1025 }
1026 }
1027
1028 CandidateUsers.push_back(GVU);
1029 }
1030
1031 for (User *U : CandidateUsers) {
1032 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001033 if (!isa<Constant>(U)) {
1034 // #254: Can't change operands of a constant, but this shouldn't be
1035 // something that sticks around in the module.
1036 U->replaceUsesOfWith(GV, NewGV);
1037 }
David Neto862b7d82018-06-14 18:48:37 -04001038 }
1039
1040 // Delete original gv.
1041 GV->eraseFromParent();
1042 }
1043 }
1044}
1045
SJW77b87ad2020-04-21 14:37:52 -05001046void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001047 ResourceVarInfoList.clear();
1048 FunctionToResourceVarsMap.clear();
1049 ModuleOrderedResourceVars.reset();
1050 // Normally, there is one resource variable per clspv.resource.var.*
1051 // function, since that is unique'd by arg type and index. By design,
1052 // we can share these resource variables across kernels because all
1053 // kernels use the same descriptor set.
1054 //
1055 // But if the user requested distinct descriptor sets per kernel, then
1056 // the descriptor allocator has made different (set,binding) pairs for
1057 // the same (type,arg_index) pair. Since we can decorate a resource
1058 // variable with only exactly one DescriptorSet and Binding, we are
1059 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001060 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001061 // (set,binding) values.
1062 const bool always_distinct_sets =
1063 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -05001064 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001065 // Rely on the fact the resource var functions have a stable ordering
1066 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -04001067 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001068 // Find all calls to this function with distinct set and binding pairs.
1069 // Save them in ResourceVarInfoList.
1070
1071 // Determine uniqueness of the (set,binding) pairs only withing this
1072 // one resource-var builtin function.
1073 using SetAndBinding = std::pair<unsigned, unsigned>;
1074 // Maps set and binding to the resource var info.
1075 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1076 bool first_use = true;
1077 for (auto &U : F.uses()) {
1078 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1079 const auto set = unsigned(
1080 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1081 const auto binding = unsigned(
1082 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1083 const auto arg_kind = clspv::ArgKind(
1084 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1085 const auto arg_index = unsigned(
1086 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001087 const auto coherent = unsigned(
1088 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001089
1090 // Find or make the resource var info for this combination.
1091 ResourceVarInfo *rv = nullptr;
1092 if (always_distinct_sets) {
1093 // Make a new resource var any time we see a different
1094 // (set,binding) pair.
1095 SetAndBinding key{set, binding};
1096 auto where = set_and_binding_map.find(key);
1097 if (where == set_and_binding_map.end()) {
1098 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001099 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001100 ResourceVarInfoList.emplace_back(rv);
1101 set_and_binding_map[key] = rv;
1102 } else {
1103 rv = where->second;
1104 }
1105 } else {
1106 // The default is to make exactly one resource for each
1107 // clspv.resource.var.* function.
1108 if (first_use) {
1109 first_use = false;
1110 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001111 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001112 ResourceVarInfoList.emplace_back(rv);
1113 } else {
1114 rv = ResourceVarInfoList.back().get();
1115 }
1116 }
1117
1118 // Now populate FunctionToResourceVarsMap.
1119 auto &mapping =
1120 FunctionToResourceVarsMap[call->getParent()->getParent()];
1121 while (mapping.size() <= arg_index) {
1122 mapping.push_back(nullptr);
1123 }
1124 mapping[arg_index] = rv;
1125 }
1126 }
1127 }
1128 }
1129
1130 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001131 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001132 auto where = FunctionToResourceVarsMap.find(&F);
1133 if (where != FunctionToResourceVarsMap.end()) {
1134 for (auto &rv : where->second) {
1135 if (rv != nullptr) {
1136 ModuleOrderedResourceVars.insert(rv);
1137 }
1138 }
1139 }
1140 }
1141 if (ShowResourceVars) {
1142 for (auto *info : ModuleOrderedResourceVars) {
1143 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1144 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1145 << "\n";
1146 }
1147 }
1148}
1149
SJW77b87ad2020-04-21 14:37:52 -05001150bool SPIRVProducerPass::FindExtInst() {
1151 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04001152 bool HasExtInst = false;
1153
SJW77b87ad2020-04-21 14:37:52 -05001154 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -04001155 for (BasicBlock &BB : F) {
1156 for (Instruction &I : BB) {
1157 if (CallInst *Call = dyn_cast<CallInst>(&I)) {
1158 Function *Callee = Call->getCalledFunction();
1159 // Check whether this call is for extend instructions.
David Neto3fbb4072017-10-16 11:28:14 -04001160 auto callee_name = Callee->getName();
1161 const glsl::ExtInst EInst = getExtInstEnum(callee_name);
1162 const glsl::ExtInst IndirectEInst =
1163 getIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04001164
David Neto3fbb4072017-10-16 11:28:14 -04001165 HasExtInst |=
1166 (EInst != kGlslExtInstBad) || (IndirectEInst != kGlslExtInstBad);
1167
1168 if (IndirectEInst) {
1169 // Register extra constants if needed.
1170
1171 // Registers a type and constant for computing the result of the
1172 // given instruction. If the result of the instruction is a vector,
1173 // then make a splat vector constant with the same number of
1174 // elements.
1175 auto register_constant = [this, &I](Constant *constant) {
1176 FindType(constant->getType());
1177 FindConstant(constant);
1178 if (auto *vectorTy = dyn_cast<VectorType>(I.getType())) {
1179 // Register the splat vector of the value with the same
1180 // width as the result of the instruction.
1181 auto *vec_constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04001182 {static_cast<unsigned>(vectorTy->getNumElements()), false},
David Neto3fbb4072017-10-16 11:28:14 -04001183 constant);
1184 FindConstant(vec_constant);
1185 FindType(vec_constant->getType());
1186 }
1187 };
1188 switch (IndirectEInst) {
1189 case glsl::ExtInstFindUMsb:
1190 // clz needs OpExtInst and OpISub with constant 31, or splat
1191 // vector of 31. Add it to the constant list here.
1192 register_constant(
1193 ConstantInt::get(Type::getInt32Ty(Context), 31));
1194 break;
1195 case glsl::ExtInstAcos:
1196 case glsl::ExtInstAsin:
Kévin Petiteb9f90a2018-09-29 12:29:34 +01001197 case glsl::ExtInstAtan:
David Neto3fbb4072017-10-16 11:28:14 -04001198 case glsl::ExtInstAtan2:
1199 // We need 1/pi for acospi, asinpi, atan2pi.
1200 register_constant(
1201 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
1202 break;
1203 default:
1204 assert(false && "internally inconsistent");
1205 }
David Neto22f144c2017-06-12 14:26:21 -04001206 }
1207 }
1208 }
1209 }
1210 }
1211
1212 return HasExtInst;
1213}
1214
1215void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1216 // Investigate global variable's type.
1217 FindType(GV.getType());
1218}
1219
1220void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1221 // Investigate function's type.
1222 FunctionType *FTy = F.getFunctionType();
1223
1224 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1225 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001226 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001227 if (GlobalConstFuncTyMap.count(FTy)) {
1228 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1229 SmallVector<Type *, 4> NewFuncParamTys;
1230 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1231 Type *ParamTy = FTy->getParamType(i);
1232 if (i == GVCstArgIdx) {
1233 Type *EleTy = ParamTy->getPointerElementType();
1234 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1235 }
1236
1237 NewFuncParamTys.push_back(ParamTy);
1238 }
1239
1240 FunctionType *NewFTy =
1241 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1242 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1243 FTy = NewFTy;
1244 }
1245
1246 FindType(FTy);
1247 } else {
1248 // As kernel functions do not have parameters, create new function type and
1249 // add it to type map.
1250 SmallVector<Type *, 4> NewFuncParamTys;
1251 FunctionType *NewFTy =
1252 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1253 FindType(NewFTy);
1254 }
1255
1256 // Investigate instructions' type in function body.
1257 for (BasicBlock &BB : F) {
1258 for (Instruction &I : BB) {
1259 if (isa<ShuffleVectorInst>(I)) {
1260 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1261 // Ignore type for mask of shuffle vector instruction.
1262 if (i == 2) {
1263 continue;
1264 }
1265
1266 Value *Op = I.getOperand(i);
1267 if (!isa<MetadataAsValue>(Op)) {
1268 FindType(Op->getType());
1269 }
1270 }
1271
1272 FindType(I.getType());
1273 continue;
1274 }
1275
David Neto862b7d82018-06-14 18:48:37 -04001276 CallInst *Call = dyn_cast<CallInst>(&I);
1277
1278 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001279 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001280 // This is a fake call representing access to a resource variable.
1281 // We handle that elsewhere.
1282 continue;
1283 }
1284
Alan Baker202c8c72018-08-13 13:47:44 -04001285 if (Call && Call->getCalledFunction()->getName().startswith(
1286 clspv::WorkgroupAccessorFunction())) {
1287 // This is a fake call representing access to a workgroup variable.
1288 // We handle that elsewhere.
1289 continue;
1290 }
1291
alan-bakerf083bed2020-01-29 08:15:42 -05001292 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1293 // OpCompositeExtract which takes literal values for indices. As a result
1294 // don't map the type of indices.
1295 if (I.getOpcode() == Instruction::ExtractValue) {
1296 FindType(I.getOperand(0)->getType());
1297 continue;
1298 }
1299 if (I.getOpcode() == Instruction::InsertValue) {
1300 FindType(I.getOperand(0)->getType());
1301 FindType(I.getOperand(1)->getType());
1302 continue;
1303 }
1304
1305 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1306 // the index is a constant. In such a case don't map the index type.
1307 if (I.getOpcode() == Instruction::ExtractElement) {
1308 FindType(I.getOperand(0)->getType());
1309 Value *op1 = I.getOperand(1);
1310 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1311 FindType(op1->getType());
1312 }
1313 continue;
1314 }
1315 if (I.getOpcode() == Instruction::InsertElement) {
1316 FindType(I.getOperand(0)->getType());
1317 FindType(I.getOperand(1)->getType());
1318 Value *op2 = I.getOperand(2);
1319 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1320 FindType(op2->getType());
1321 }
1322 continue;
1323 }
1324
David Neto22f144c2017-06-12 14:26:21 -04001325 // Work through the operands of the instruction.
1326 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1327 Value *const Op = I.getOperand(i);
1328 // If any of the operands is a constant, find the type!
1329 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1330 FindType(Op->getType());
1331 }
1332 }
1333
1334 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001335 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001336 // Avoid to check call instruction's type.
1337 break;
1338 }
Alan Baker202c8c72018-08-13 13:47:44 -04001339 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1340 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1341 clspv::WorkgroupAccessorFunction())) {
1342 // This is a fake call representing access to a workgroup variable.
1343 // We handle that elsewhere.
1344 continue;
1345 }
1346 }
David Neto22f144c2017-06-12 14:26:21 -04001347 if (!isa<MetadataAsValue>(&Op)) {
1348 FindType(Op->getType());
1349 continue;
1350 }
1351 }
1352
David Neto22f144c2017-06-12 14:26:21 -04001353 // We don't want to track the type of this call as we are going to replace
1354 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001355 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001356 Call->getCalledFunction()->getName())) {
1357 continue;
1358 }
1359
1360 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1361 // If gep's base operand has ModuleScopePrivate address space, make gep
1362 // return ModuleScopePrivate address space.
1363 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1364 // Add pointer type with private address space for global constant to
1365 // type list.
1366 Type *EleTy = I.getType()->getPointerElementType();
1367 Type *NewPTy =
1368 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1369
1370 FindType(NewPTy);
1371 continue;
1372 }
1373 }
1374
1375 FindType(I.getType());
1376 }
1377 }
1378}
1379
SJW77b87ad2020-04-21 14:37:52 -05001380void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001381 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001382 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001383 0 < getSamplerMap().size()) {
SJW77b87ad2020-04-21 14:37:52 -05001384 auto SamplerStructTy = module->getTypeByName("opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001385 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001386 SamplerStructTy =
1387 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001388 }
1389
1390 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1391
1392 FindType(SamplerTy);
1393 }
1394}
1395
SJW77b87ad2020-04-21 14:37:52 -05001396void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001397 // Record types so they are generated.
1398 TypesNeedingLayout.reset();
1399 StructTypesNeedingBlock.reset();
1400
1401 // To match older clspv codegen, generate the float type first if required
1402 // for images.
1403 for (const auto *info : ModuleOrderedResourceVars) {
1404 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1405 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001406 if (IsIntImageType(info->var_fn->getReturnType())) {
1407 // Nothing for now...
1408 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001409 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001410 }
1411
1412 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001413 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001414 }
1415 }
1416
1417 for (const auto *info : ModuleOrderedResourceVars) {
1418 Type *type = info->var_fn->getReturnType();
1419
1420 switch (info->arg_kind) {
1421 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001422 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001423 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1424 StructTypesNeedingBlock.insert(sty);
1425 } else {
1426 errs() << *type << "\n";
1427 llvm_unreachable("Buffer arguments must map to structures!");
1428 }
1429 break;
1430 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001431 case clspv::ArgKind::PodUBO:
1432 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001433 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1434 StructTypesNeedingBlock.insert(sty);
1435 } else {
1436 errs() << *type << "\n";
1437 llvm_unreachable("POD arguments must map to structures!");
1438 }
1439 break;
1440 case clspv::ArgKind::ReadOnlyImage:
1441 case clspv::ArgKind::WriteOnlyImage:
1442 case clspv::ArgKind::Sampler:
1443 // Sampler and image types map to the pointee type but
1444 // in the uniform constant address space.
1445 type = PointerType::get(type->getPointerElementType(),
1446 clspv::AddressSpace::UniformConstant);
1447 break;
1448 default:
1449 break;
1450 }
1451
1452 // The converted type is the type of the OpVariable we will generate.
1453 // If the pointee type is an array of size zero, FindType will convert it
1454 // to a runtime array.
1455 FindType(type);
1456 }
1457
alan-bakerdcd97412019-09-16 15:32:30 -04001458 // If module constants are clustered in a storage buffer then that struct
1459 // needs layout decorations.
1460 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001461 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001462 PointerType *PTy = cast<PointerType>(GV.getType());
1463 const auto AS = PTy->getAddressSpace();
1464 const bool module_scope_constant_external_init =
1465 (AS == AddressSpace::Constant) && GV.hasInitializer();
1466 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1467 if (module_scope_constant_external_init &&
1468 spv::BuiltInMax == BuiltinType) {
1469 StructTypesNeedingBlock.insert(
1470 cast<StructType>(PTy->getPointerElementType()));
1471 }
1472 }
1473 }
1474
SJW77b87ad2020-04-21 14:37:52 -05001475 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001476 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1477 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1478 assert(Ty->isStructTy() && "Push constants have to be structures.");
1479 auto STy = cast<StructType>(Ty);
1480 StructTypesNeedingBlock.insert(STy);
1481 }
1482 }
1483
David Neto862b7d82018-06-14 18:48:37 -04001484 // Traverse the arrays and structures underneath each Block, and
1485 // mark them as needing layout.
1486 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1487 StructTypesNeedingBlock.end());
1488 while (!work_list.empty()) {
1489 Type *type = work_list.back();
1490 work_list.pop_back();
1491 TypesNeedingLayout.insert(type);
1492 switch (type->getTypeID()) {
1493 case Type::ArrayTyID:
1494 work_list.push_back(type->getArrayElementType());
1495 if (!Hack_generate_runtime_array_stride_early) {
1496 // Remember this array type for deferred decoration.
1497 TypesNeedingArrayStride.insert(type);
1498 }
1499 break;
1500 case Type::StructTyID:
1501 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1502 work_list.push_back(elem_ty);
1503 }
1504 default:
1505 // This type and its contained types don't get layout.
1506 break;
1507 }
1508 }
1509}
1510
SJWf93f5f32020-05-05 07:27:56 -05001511void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001512 // The SpecId assignment for pointer-to-local arguments is recorded in
1513 // module-level metadata. Translate that information into local argument
1514 // information.
SJWf93f5f32020-05-05 07:27:56 -05001515 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001516 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001517 if (!nmd)
1518 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001519 for (auto operand : nmd->operands()) {
1520 MDTuple *tuple = cast<MDTuple>(operand);
1521 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1522 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001523 ConstantAsMetadata *arg_index_md =
1524 cast<ConstantAsMetadata>(tuple->getOperand(1));
1525 int arg_index = static_cast<int>(
1526 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1527 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001528
1529 ConstantAsMetadata *spec_id_md =
1530 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001531 int spec_id = static_cast<int>(
1532 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001533
Alan Baker202c8c72018-08-13 13:47:44 -04001534 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001535 if (LocalSpecIdInfoMap.count(spec_id))
1536 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001537
SJWf93f5f32020-05-05 07:27:56 -05001538 // Generate the spec constant.
1539 SPIRVOperandVec Ops;
1540 Ops << MkId(getSPIRVType(Type::getInt32Ty(Context))) << MkNum(1);
1541 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001542
SJWf93f5f32020-05-05 07:27:56 -05001543 // Generate the array type.
1544 Type *ElemTy = arg->getType()->getPointerElementType();
1545 Ops.clear();
1546 // The element type must have been created.
1547 uint32_t elem_ty_id = getSPIRVType(ElemTy);
1548 Ops << MkId(elem_ty_id) << MkId(ArraySizeID);
1549
1550 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1551
1552 Ops.clear();
1553 Ops << MkNum(spv::StorageClassWorkgroup) << MkId(ArrayTypeID);
1554 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1555
1556 // Generate OpVariable.
1557 //
1558 // Ops[0] : Result Type ID
1559 // Ops[1] : Storage Class
1560 Ops.clear();
1561 Ops << MkId(PtrArrayTypeID) << MkNum(spv::StorageClassWorkgroup);
1562
1563 SPIRVID VariableID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1564
1565 Ops.clear();
1566 Ops << MkId(ArraySizeID) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
1567 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1568
1569 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1570 ArrayTypeID, PtrArrayTypeID, spec_id};
1571 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001572 }
1573}
1574
David Neto22f144c2017-06-12 14:26:21 -04001575void SPIRVProducerPass::FindType(Type *Ty) {
1576 TypeList &TyList = getTypeList();
1577
1578 if (0 != TyList.idFor(Ty)) {
1579 return;
1580 }
1581
1582 if (Ty->isPointerTy()) {
1583 auto AddrSpace = Ty->getPointerAddressSpace();
1584 if ((AddressSpace::Constant == AddrSpace) ||
1585 (AddressSpace::Global == AddrSpace)) {
1586 auto PointeeTy = Ty->getPointerElementType();
1587
1588 if (PointeeTy->isStructTy() &&
1589 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1590 FindType(PointeeTy);
1591 auto ActualPointerTy =
1592 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1593 FindType(ActualPointerTy);
1594 return;
1595 }
1596 }
1597 }
1598
David Neto862b7d82018-06-14 18:48:37 -04001599 // By convention, LLVM array type with 0 elements will map to
1600 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1601 // has a constant number of elements. We need to support type of the
1602 // constant.
1603 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1604 if (arrayTy->getNumElements() > 0) {
1605 LLVMContext &Context = Ty->getContext();
1606 FindType(Type::getInt32Ty(Context));
1607 }
David Neto22f144c2017-06-12 14:26:21 -04001608 }
1609
1610 for (Type *SubTy : Ty->subtypes()) {
1611 FindType(SubTy);
1612 }
1613
1614 TyList.insert(Ty);
1615}
1616
1617void SPIRVProducerPass::FindConstantPerGlobalVar(GlobalVariable &GV) {
1618 // If the global variable has a (non undef) initializer.
1619 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
David Neto862b7d82018-06-14 18:48:37 -04001620 // Generate the constant if it's not the initializer to a module scope
1621 // constant that we will expect in a storage buffer.
1622 const bool module_scope_constant_external_init =
1623 (GV.getType()->getPointerAddressSpace() == AddressSpace::Constant) &&
1624 clspv::Option::ModuleConstantsInStorageBuffer();
1625 if (!module_scope_constant_external_init) {
1626 FindConstant(GV.getInitializer());
1627 }
David Neto22f144c2017-06-12 14:26:21 -04001628 }
1629}
1630
1631void SPIRVProducerPass::FindConstantPerFunc(Function &F) {
1632 // Investigate constants in function body.
1633 for (BasicBlock &BB : F) {
1634 for (Instruction &I : BB) {
David Neto862b7d82018-06-14 18:48:37 -04001635 if (auto *call = dyn_cast<CallInst>(&I)) {
1636 auto name = call->getCalledFunction()->getName();
Kévin Petitdf71de32019-04-09 14:09:50 +01001637 if (name == clspv::LiteralSamplerFunction()) {
David Neto862b7d82018-06-14 18:48:37 -04001638 // We've handled these constants elsewhere, so skip it.
1639 continue;
1640 }
Alan Baker202c8c72018-08-13 13:47:44 -04001641 if (name.startswith(clspv::ResourceAccessorFunction())) {
1642 continue;
1643 }
1644 if (name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001645 continue;
1646 }
Kévin Petit617a76d2019-04-04 13:54:16 +01001647 if (name.startswith(clspv::SPIRVOpIntrinsicFunction())) {
1648 // Skip the first operand that has the SPIR-V Opcode
1649 for (unsigned i = 1; i < I.getNumOperands(); i++) {
1650 if (isa<Constant>(I.getOperand(i)) &&
1651 !isa<GlobalValue>(I.getOperand(i))) {
1652 FindConstant(I.getOperand(i));
1653 }
1654 }
1655 continue;
1656 }
David Neto22f144c2017-06-12 14:26:21 -04001657 }
1658
1659 if (isa<AllocaInst>(I)) {
1660 // Alloca instruction has constant for the number of element. Ignore it.
1661 continue;
1662 } else if (isa<ShuffleVectorInst>(I)) {
1663 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1664 // Ignore constant for mask of shuffle vector instruction.
1665 if (i == 2) {
1666 continue;
1667 }
1668
1669 if (isa<Constant>(I.getOperand(i)) &&
1670 !isa<GlobalValue>(I.getOperand(i))) {
1671 FindConstant(I.getOperand(i));
1672 }
1673 }
1674
1675 continue;
1676 } else if (isa<InsertElementInst>(I)) {
1677 // Handle InsertElement with <4 x i8> specially.
1678 Type *CompositeTy = I.getOperand(0)->getType();
1679 if (is4xi8vec(CompositeTy)) {
1680 LLVMContext &Context = CompositeTy->getContext();
1681 if (isa<Constant>(I.getOperand(0))) {
1682 FindConstant(I.getOperand(0));
1683 }
1684
1685 if (isa<Constant>(I.getOperand(1))) {
1686 FindConstant(I.getOperand(1));
1687 }
1688
1689 // Add mask constant 0xFF.
1690 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1691 FindConstant(CstFF);
1692
1693 // Add shift amount constant.
1694 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
1695 uint64_t Idx = CI->getZExtValue();
1696 Constant *CstShiftAmount =
1697 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1698 FindConstant(CstShiftAmount);
1699 }
1700
1701 continue;
1702 }
1703
1704 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1705 // Ignore constant for index of InsertElement instruction.
1706 if (i == 2) {
1707 continue;
1708 }
1709
1710 if (isa<Constant>(I.getOperand(i)) &&
1711 !isa<GlobalValue>(I.getOperand(i))) {
1712 FindConstant(I.getOperand(i));
1713 }
1714 }
1715
1716 continue;
1717 } else if (isa<ExtractElementInst>(I)) {
1718 // Handle ExtractElement with <4 x i8> specially.
1719 Type *CompositeTy = I.getOperand(0)->getType();
1720 if (is4xi8vec(CompositeTy)) {
1721 LLVMContext &Context = CompositeTy->getContext();
1722 if (isa<Constant>(I.getOperand(0))) {
1723 FindConstant(I.getOperand(0));
1724 }
1725
1726 // Add mask constant 0xFF.
1727 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
1728 FindConstant(CstFF);
1729
1730 // Add shift amount constant.
1731 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
1732 uint64_t Idx = CI->getZExtValue();
1733 Constant *CstShiftAmount =
1734 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
1735 FindConstant(CstShiftAmount);
1736 } else {
1737 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
1738 FindConstant(Cst8);
1739 }
1740
1741 continue;
1742 }
1743
1744 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1745 // Ignore constant for index of ExtractElement instruction.
1746 if (i == 1) {
1747 continue;
1748 }
1749
1750 if (isa<Constant>(I.getOperand(i)) &&
1751 !isa<GlobalValue>(I.getOperand(i))) {
1752 FindConstant(I.getOperand(i));
1753 }
1754 }
1755
1756 continue;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001757 } else if ((Instruction::Xor == I.getOpcode()) &&
1758 I.getType()->isIntegerTy(1)) {
1759 // We special case for Xor where the type is i1 and one of the arguments
1760 // is a constant 1 (true), this is an OpLogicalNot in SPIR-V, and we
1761 // don't need the constant
David Neto22f144c2017-06-12 14:26:21 -04001762 bool foundConstantTrue = false;
1763 for (Use &Op : I.operands()) {
1764 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1765 auto CI = cast<ConstantInt>(Op);
1766
1767 if (CI->isZero() || foundConstantTrue) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05001768 // If we already found the true constant, we might (probably only
1769 // on -O0) have an OpLogicalNot which is taking a constant
1770 // argument, so discover it anyway.
David Neto22f144c2017-06-12 14:26:21 -04001771 FindConstant(Op);
1772 } else {
1773 foundConstantTrue = true;
1774 }
1775 }
1776 }
1777
1778 continue;
David Netod2de94a2017-08-28 17:27:47 -04001779 } else if (isa<TruncInst>(I)) {
alan-bakerb39c8262019-03-08 14:03:37 -05001780 // Special case if i8 is not generally handled.
1781 if (!clspv::Option::Int8Support()) {
1782 // For truncation to i8 we mask against 255.
1783 Type *ToTy = I.getType();
1784 if (8u == ToTy->getPrimitiveSizeInBits()) {
1785 LLVMContext &Context = ToTy->getContext();
1786 Constant *Cst255 =
1787 ConstantInt::get(Type::getInt32Ty(Context), 0xff);
1788 FindConstant(Cst255);
1789 }
David Netod2de94a2017-08-28 17:27:47 -04001790 }
Neil Henning39672102017-09-29 14:33:13 +01001791 } else if (isa<AtomicRMWInst>(I)) {
1792 LLVMContext &Context = I.getContext();
1793
1794 FindConstant(
1795 ConstantInt::get(Type::getInt32Ty(Context), spv::ScopeDevice));
1796 FindConstant(ConstantInt::get(
1797 Type::getInt32Ty(Context),
1798 spv::MemorySemanticsUniformMemoryMask |
1799 spv::MemorySemanticsSequentiallyConsistentMask));
David Neto22f144c2017-06-12 14:26:21 -04001800 }
1801
1802 for (Use &Op : I.operands()) {
1803 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1804 FindConstant(Op);
1805 }
1806 }
1807 }
1808 }
1809}
1810
1811void SPIRVProducerPass::FindConstant(Value *V) {
David Neto22f144c2017-06-12 14:26:21 -04001812 ValueList &CstList = getConstantList();
1813
David Netofb9a7972017-08-25 17:08:24 -04001814 // If V is already tracked, ignore it.
1815 if (0 != CstList.idFor(V)) {
David Neto22f144c2017-06-12 14:26:21 -04001816 return;
1817 }
1818
David Neto862b7d82018-06-14 18:48:37 -04001819 if (isa<GlobalValue>(V) && clspv::Option::ModuleConstantsInStorageBuffer()) {
1820 return;
1821 }
1822
David Neto22f144c2017-06-12 14:26:21 -04001823 Constant *Cst = cast<Constant>(V);
David Neto862b7d82018-06-14 18:48:37 -04001824 Type *CstTy = Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001825
1826 // Handle constant with <4 x i8> type specially.
David Neto22f144c2017-06-12 14:26:21 -04001827 if (is4xi8vec(CstTy)) {
1828 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001829 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001830 }
1831 }
1832
1833 if (Cst->getNumOperands()) {
1834 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end(); I != E;
1835 ++I) {
1836 FindConstant(*I);
1837 }
1838
David Netofb9a7972017-08-25 17:08:24 -04001839 CstList.insert(Cst);
David Neto22f144c2017-06-12 14:26:21 -04001840 return;
1841 } else if (const ConstantDataSequential *CDS =
1842 dyn_cast<ConstantDataSequential>(Cst)) {
1843 // Add constants for each element to constant list.
1844 for (unsigned i = 0; i < CDS->getNumElements(); i++) {
1845 Constant *EleCst = CDS->getElementAsConstant(i);
1846 FindConstant(EleCst);
1847 }
1848 }
1849
1850 if (!isa<GlobalValue>(V)) {
David Netofb9a7972017-08-25 17:08:24 -04001851 CstList.insert(V);
David Neto22f144c2017-06-12 14:26:21 -04001852 }
1853}
1854
1855spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1856 switch (AddrSpace) {
1857 default:
1858 llvm_unreachable("Unsupported OpenCL address space");
1859 case AddressSpace::Private:
1860 return spv::StorageClassFunction;
1861 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001862 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001863 case AddressSpace::Constant:
1864 return clspv::Option::ConstantArgsInUniformBuffer()
1865 ? spv::StorageClassUniform
1866 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001867 case AddressSpace::Input:
1868 return spv::StorageClassInput;
1869 case AddressSpace::Local:
1870 return spv::StorageClassWorkgroup;
1871 case AddressSpace::UniformConstant:
1872 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001873 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001874 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001875 case AddressSpace::ModuleScopePrivate:
1876 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001877 case AddressSpace::PushConstant:
1878 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001879 }
1880}
1881
David Neto862b7d82018-06-14 18:48:37 -04001882spv::StorageClass
1883SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1884 switch (arg_kind) {
1885 case clspv::ArgKind::Buffer:
1886 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001887 case clspv::ArgKind::BufferUBO:
1888 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001889 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001890 return spv::StorageClassStorageBuffer;
1891 case clspv::ArgKind::PodUBO:
1892 return spv::StorageClassUniform;
1893 case clspv::ArgKind::PodPushConstant:
1894 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001895 case clspv::ArgKind::Local:
1896 return spv::StorageClassWorkgroup;
1897 case clspv::ArgKind::ReadOnlyImage:
1898 case clspv::ArgKind::WriteOnlyImage:
1899 case clspv::ArgKind::Sampler:
1900 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001901 default:
1902 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001903 }
1904}
1905
David Neto22f144c2017-06-12 14:26:21 -04001906spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1907 return StringSwitch<spv::BuiltIn>(Name)
1908 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1909 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1910 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1911 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1912 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001913 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001914 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001915 .Default(spv::BuiltInMax);
1916}
1917
1918void SPIRVProducerPass::GenerateExtInstImport() {
David Neto22f144c2017-06-12 14:26:21 -04001919 uint32_t &ExtInstImportID = getOpExtInstImportID();
1920
1921 //
1922 // Generate OpExtInstImport.
1923 //
1924 // Ops[0] ... Ops[n] = Name (Literal String)
SJWf93f5f32020-05-05 07:27:56 -05001925
1926 ExtInstImportID =
1927 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1928}
1929
1930uint32_t SPIRVProducerPass::getSPIRVType(Type *Ty) {
1931 auto TI = TypeMap.find(Ty);
1932 if (TI != TypeMap.end()) {
1933 assert(TI->second);
1934 return TI->second;
1935 }
1936
1937 const auto &DL = module->getDataLayout();
1938
1939 uint32_t RID = 0;
1940
1941 switch (Ty->getTypeID()) {
1942 default: {
1943 Ty->print(errs());
1944 llvm_unreachable("Unsupported type???");
1945 break;
1946 }
1947 case Type::MetadataTyID:
1948 case Type::LabelTyID: {
1949 // Ignore these types.
1950 break;
1951 }
1952 case Type::PointerTyID: {
1953 PointerType *PTy = cast<PointerType>(Ty);
1954 unsigned AddrSpace = PTy->getAddressSpace();
1955
1956 if (AddrSpace != AddressSpace::UniformConstant) {
1957 auto PointeeTy = PTy->getElementType();
1958 if (PointeeTy->isStructTy() &&
1959 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1960 // TODO(sjw): assert always an image?
1961 RID = getSPIRVType(PointeeTy);
1962 break;
1963 }
1964 }
1965
1966 // For the purposes of our Vulkan SPIR-V type system, constant and global
1967 // are conflated.
1968 if (AddressSpace::Constant == AddrSpace) {
1969 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1970 AddrSpace = AddressSpace::Global;
1971 // Check to see if we already created this type (for instance, if we
1972 // had a constant <type>* and a global <type>*, the type would be
1973 // created by one of these types, and shared by both).
1974 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1975 if (0 < TypeMap.count(GlobalTy)) {
1976 RID = TypeMap[GlobalTy];
1977 break;
1978 }
1979 }
1980 } else if (AddressSpace::Global == AddrSpace) {
1981 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1982 AddrSpace = AddressSpace::Constant;
1983
1984 // Check to see if we already created this type (for instance, if we
1985 // had a constant <type>* and a global <type>*, the type would be
1986 // created by one of these types, and shared by both).
1987 auto ConstantTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1988 if (0 < TypeMap.count(ConstantTy)) {
1989 RID = TypeMap[ConstantTy];
1990 break;
1991 }
1992 }
1993 }
1994
1995 //
1996 // Generate OpTypePointer.
1997 //
1998
1999 // OpTypePointer
2000 // Ops[0] = Storage Class
2001 // Ops[1] = Element Type ID
2002 SPIRVOperandVec Ops;
2003
2004 Ops << MkNum(GetStorageClass(AddrSpace))
2005 << MkId(getSPIRVType(PTy->getElementType()));
2006
2007 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
2008 break;
2009 }
2010 case Type::StructTyID: {
2011 StructType *STy = cast<StructType>(Ty);
2012
2013 // Handle sampler type.
2014 if (STy->isOpaque()) {
2015 if (STy->getName().equals("opencl.sampler_t")) {
2016 //
2017 // Generate OpTypeSampler
2018 //
2019 // Empty Ops.
2020
2021 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
2022 break;
2023 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
2024 STy->getName().startswith("opencl.image1d_wo_t") ||
2025 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2026 STy->getName().startswith("opencl.image1d_array_wo_t") ||
2027 STy->getName().startswith("opencl.image2d_ro_t") ||
2028 STy->getName().startswith("opencl.image2d_wo_t") ||
2029 STy->getName().startswith("opencl.image2d_array_ro_t") ||
2030 STy->getName().startswith("opencl.image2d_array_wo_t") ||
2031 STy->getName().startswith("opencl.image3d_ro_t") ||
2032 STy->getName().startswith("opencl.image3d_wo_t")) {
2033 //
2034 // Generate OpTypeImage
2035 //
2036 // Ops[0] = Sampled Type ID
2037 // Ops[1] = Dim ID
2038 // Ops[2] = Depth (Literal Number)
2039 // Ops[3] = Arrayed (Literal Number)
2040 // Ops[4] = MS (Literal Number)
2041 // Ops[5] = Sampled (Literal Number)
2042 // Ops[6] = Image Format ID
2043 //
2044 SPIRVOperandVec Ops;
2045
2046 uint32_t SampledTyID = 0;
2047 if (STy->getName().contains(".float")) {
2048 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
2049 } else if (STy->getName().contains(".uint")) {
2050 SampledTyID = getSPIRVType(Type::getInt32Ty(Ty->getContext()));
2051 } else if (STy->getName().contains(".int")) {
2052 // Generate a signed 32-bit integer if necessary.
2053 if (int32ID == 0) {
2054 SPIRVOperandVec intOps;
2055 intOps << MkNum(32);
2056 intOps << MkNum(1);
2057 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
2058 }
2059 SampledTyID = int32ID;
2060
2061 // Generate a vec4 of the signed int if necessary.
2062 if (v4int32ID == 0) {
2063 SPIRVOperandVec vecOps;
2064 vecOps << MkId(int32ID);
2065 vecOps << MkNum(4);
2066 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
2067 }
2068 } else {
2069 // This was likely an UndefValue.
2070 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
2071 }
2072 Ops << MkId(SampledTyID);
2073
2074 spv::Dim DimID = spv::Dim2D;
2075 if (STy->getName().startswith("opencl.image1d_ro_t") ||
2076 STy->getName().startswith("opencl.image1d_wo_t") ||
2077 STy->getName().startswith("opencl.image1d_array_ro_t") ||
2078 STy->getName().startswith("opencl.image1d_array_wo_t")) {
2079 DimID = spv::Dim1D;
2080 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
2081 STy->getName().startswith("opencl.image3d_wo_t")) {
2082 DimID = spv::Dim3D;
2083 }
2084 Ops << MkNum(DimID);
2085
2086 // TODO: Set up Depth.
2087 Ops << MkNum(0);
2088
2089 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
2090 Ops << MkNum(arrayed);
2091
2092 // TODO: Set up MS.
2093 Ops << MkNum(0);
2094
2095 // Set up Sampled.
2096 //
2097 // From Spec
2098 //
2099 // 0 indicates this is only known at run time, not at compile time
2100 // 1 indicates will be used with sampler
2101 // 2 indicates will be used without a sampler (a storage image)
2102 uint32_t Sampled = 1;
2103 if (!STy->getName().contains(".sampled")) {
2104 Sampled = 2;
2105 }
2106 Ops << MkNum(Sampled);
2107
2108 // TODO: Set up Image Format.
2109 Ops << MkNum(spv::ImageFormatUnknown);
2110
2111 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
2112
2113 Ops.clear();
2114 Ops << MkId(RID);
2115
2116 getImageTypeMap()[Ty] =
2117 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
2118 break;
2119 }
2120 }
2121
2122 //
2123 // Generate OpTypeStruct
2124 //
2125 // Ops[0] ... Ops[n] = Member IDs
2126 SPIRVOperandVec Ops;
2127
2128 for (auto *EleTy : STy->elements()) {
2129 Ops << MkId(getSPIRVType(EleTy));
2130 }
2131
2132 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
2133
2134 // Generate OpMemberDecorate.
2135 if (TypesNeedingLayout.idFor(STy)) {
2136 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
2137 MemberIdx++) {
2138 // Ops[0] = Structure Type ID
2139 // Ops[1] = Member Index(Literal Number)
2140 // Ops[2] = Decoration (Offset)
2141 // Ops[3] = Byte Offset (Literal Number)
2142 Ops.clear();
2143
2144 Ops << MkId(RID) << MkNum(MemberIdx) << MkNum(spv::DecorationOffset);
2145
2146 const auto ByteOffset =
2147 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
2148
2149 Ops << MkNum(ByteOffset);
2150
2151 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
2152 }
2153 }
2154
2155 // Generate OpDecorate.
2156 if (StructTypesNeedingBlock.idFor(STy)) {
2157 Ops.clear();
2158 // Use Block decorations with StorageBuffer storage class.
2159 Ops << MkId(RID) << MkNum(spv::DecorationBlock);
2160
2161 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2162 }
2163 break;
2164 }
2165 case Type::IntegerTyID: {
2166 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2167
2168 if (BitWidth == 1) {
2169 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
2170 } else {
2171 if (!clspv::Option::Int8Support() && BitWidth == 8) {
2172 // i8 is added to TypeMap as i32.
2173 RID = getSPIRVType(Type::getIntNTy(Ty->getContext(), 32));
2174 } else {
2175 SPIRVOperandVec Ops;
2176 Ops << MkNum(BitWidth) << MkNum(0 /* not signed */);
2177 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
2178 }
2179 }
2180 break;
2181 }
2182 case Type::HalfTyID:
2183 case Type::FloatTyID:
2184 case Type::DoubleTyID: {
2185 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
2186 SPIRVOperandVec Ops;
2187 Ops << MkNum(BitWidth);
2188
2189 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
2190 break;
2191 }
2192 case Type::ArrayTyID: {
2193 ArrayType *ArrTy = cast<ArrayType>(Ty);
2194 const uint64_t Length = ArrTy->getArrayNumElements();
2195 if (Length == 0) {
2196 // By convention, map it to a RuntimeArray.
2197
2198 Type *EleTy = ArrTy->getArrayElementType();
2199
2200 //
2201 // Generate OpTypeRuntimeArray.
2202 //
2203 // OpTypeRuntimeArray
2204 // Ops[0] = Element Type ID
2205 SPIRVOperandVec Ops;
2206 Ops << MkId(getSPIRVType(EleTy));
2207
2208 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
2209
2210 if (Hack_generate_runtime_array_stride_early) {
2211 // Generate OpDecorate.
2212
2213 // Ops[0] = Target ID
2214 // Ops[1] = Decoration (ArrayStride)
2215 // Ops[2] = Stride Number(Literal Number)
2216 Ops.clear();
2217
2218 Ops << MkId(RID) << MkNum(spv::DecorationArrayStride)
2219 << MkNum(static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL)));
2220
2221 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2222 }
2223
2224 } else {
2225
2226 //
2227 // Generate OpConstant and OpTypeArray.
2228 //
2229
2230 //
2231 // Generate OpConstant for array length.
2232 //
2233 // Add constant for length to constant list.
2234 Constant *CstLength =
2235 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
2236 uint32_t LengthID = getSPIRVValue(CstLength);
2237
2238 // Remember to generate ArrayStride later
2239 getTypesNeedingArrayStride().insert(Ty);
2240
2241 //
2242 // Generate OpTypeArray.
2243 //
2244 // Ops[0] = Element Type ID
2245 // Ops[1] = Array Length Constant ID
2246 SPIRVOperandVec Ops;
2247
2248 uint32_t EleTyID = getSPIRVType(ArrTy->getElementType());
2249 Ops << MkId(EleTyID) << MkId(LengthID);
2250
2251 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
2252 }
2253 break;
2254 }
2255 case Type::FixedVectorTyID: {
2256 auto VecTy = cast<VectorType>(Ty);
2257 // <4 x i8> is changed to i32 if i8 is not generally supported.
2258 if (!clspv::Option::Int8Support() &&
2259 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
2260 if (VecTy->getNumElements() == 4) {
2261 RID = getSPIRVType(VecTy->getElementType());
2262 break;
2263 } else {
2264 Ty->print(errs());
2265 llvm_unreachable("Support above i8 vector type");
2266 }
2267 }
2268
2269 // Ops[0] = Component Type ID
2270 // Ops[1] = Component Count (Literal Number)
2271 SPIRVOperandVec Ops;
2272 Ops << MkId(getSPIRVType(VecTy->getElementType()))
2273 << MkNum(VecTy->getNumElements());
2274
2275 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
2276 break;
2277 }
2278 case Type::VoidTyID: {
2279 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
2280 break;
2281 }
2282 case Type::FunctionTyID: {
2283 // Generate SPIRV instruction for function type.
2284 FunctionType *FTy = cast<FunctionType>(Ty);
2285
2286 // Ops[0] = Return Type ID
2287 // Ops[1] ... Ops[n] = Parameter Type IDs
2288 SPIRVOperandVec Ops;
2289
2290 // Find SPIRV instruction for return type
2291 Ops << MkId(getSPIRVType(FTy->getReturnType()));
2292
2293 // Find SPIRV instructions for parameter types
2294 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2295 // Find SPIRV instruction for parameter type.
2296 auto ParamTy = FTy->getParamType(k);
2297 if (ParamTy->isPointerTy()) {
2298 auto PointeeTy = ParamTy->getPointerElementType();
2299 if (PointeeTy->isStructTy() &&
2300 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2301 ParamTy = PointeeTy;
2302 }
2303 }
2304
2305 Ops << MkId(getSPIRVType(ParamTy));
2306 }
2307
2308 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
2309 break;
2310 }
2311 }
2312
2313 if (RID) {
2314 TypeMap[Ty] = RID;
2315 }
2316 return RID;
David Neto22f144c2017-06-12 14:26:21 -04002317}
2318
SJW77b87ad2020-04-21 14:37:52 -05002319void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04002320 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05002321 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04002322 }
David Neto22f144c2017-06-12 14:26:21 -04002323}
2324
SJWf93f5f32020-05-05 07:27:56 -05002325SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *Cst) {
David Neto22f144c2017-06-12 14:26:21 -04002326 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07002327 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002328
SJWf93f5f32020-05-05 07:27:56 -05002329 SPIRVID RID = 0;
David Neto22f144c2017-06-12 14:26:21 -04002330
SJWf93f5f32020-05-05 07:27:56 -05002331 //
2332 // Generate OpConstant.
2333 //
2334 // Ops[0] = Result Type ID
2335 // Ops[1] .. Ops[n] = Values LiteralNumber
2336 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002337
SJWf93f5f32020-05-05 07:27:56 -05002338 Ops << MkId(getSPIRVType(Cst->getType()));
David Neto22f144c2017-06-12 14:26:21 -04002339
SJWf93f5f32020-05-05 07:27:56 -05002340 std::vector<uint32_t> LiteralNum;
2341 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04002342
SJWf93f5f32020-05-05 07:27:56 -05002343 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04002344 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05002345 Opcode = spv::OpUndef;
2346 if (hack_undef && IsTypeNullable(Cst->getType())) {
2347 Opcode = spv::OpConstantNull;
2348 }
2349 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
2350 unsigned BitWidth = CI->getBitWidth();
2351 if (BitWidth == 1) {
2352 // If the bitwidth of constant is 1, generate OpConstantTrue or
2353 // OpConstantFalse.
2354 if (CI->getZExtValue()) {
2355 // Ops[0] = Result Type ID
2356 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04002357 } else {
SJWf93f5f32020-05-05 07:27:56 -05002358 // Ops[0] = Result Type ID
2359 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04002360 }
SJWf93f5f32020-05-05 07:27:56 -05002361 } else {
2362 auto V = CI->getZExtValue();
2363 LiteralNum.push_back(V & 0xFFFFFFFF);
2364
2365 if (BitWidth > 32) {
2366 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002367 }
2368
2369 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002370
SJWf93f5f32020-05-05 07:27:56 -05002371 Ops << MkInteger(LiteralNum);
2372 }
2373 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2374 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2375 Type *CFPTy = CFP->getType();
2376 if (CFPTy->isFloatTy()) {
2377 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2378 } else if (CFPTy->isDoubleTy()) {
2379 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2380 LiteralNum.push_back(FPVal >> 32);
2381 } else if (CFPTy->isHalfTy()) {
2382 LiteralNum.push_back(FPVal & 0xFFFF);
2383 } else {
2384 CFPTy->print(errs());
2385 llvm_unreachable("Implement this ConstantFP Type");
2386 }
David Neto22f144c2017-06-12 14:26:21 -04002387
SJWf93f5f32020-05-05 07:27:56 -05002388 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04002389
SJWf93f5f32020-05-05 07:27:56 -05002390 Ops << MkFloat(LiteralNum);
2391 } else if (isa<ConstantDataSequential>(Cst) &&
2392 cast<ConstantDataSequential>(Cst)->isString()) {
2393 Cst->print(errs());
2394 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002395
SJWf93f5f32020-05-05 07:27:56 -05002396 } else if (const ConstantDataSequential *CDS =
2397 dyn_cast<ConstantDataSequential>(Cst)) {
2398 // Let's convert <4 x i8> constant to int constant specially.
2399 // This case occurs when all the values are specified as constant
2400 // ints.
2401 Type *CstTy = Cst->getType();
2402 if (is4xi8vec(CstTy)) {
2403 LLVMContext &Context = CstTy->getContext();
David Neto49351ac2017-08-26 17:32:20 -04002404
SJWf93f5f32020-05-05 07:27:56 -05002405 //
2406 // Generate OpConstant with OpTypeInt 32 0.
2407 //
2408 uint32_t IntValue = 0;
2409 for (unsigned k = 0; k < 4; k++) {
2410 const uint64_t Val = CDS->getElementAsInteger(k);
2411 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002412 }
2413
SJWf93f5f32020-05-05 07:27:56 -05002414 Type *i32 = Type::getInt32Ty(Context);
2415 Constant *CstInt = ConstantInt::get(i32, IntValue);
2416 RID = getSPIRVValue(CstInt);
2417 } else {
2418
David Neto49351ac2017-08-26 17:32:20 -04002419 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002420 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
2421 Constant *EleCst = CDS->getElementAsConstant(k);
SJWf93f5f32020-05-05 07:27:56 -05002422 uint32_t EleCstID = getSPIRVValue(EleCst);
David Neto257c3892018-04-11 13:19:45 -04002423 Ops << MkId(EleCstID);
David Neto22f144c2017-06-12 14:26:21 -04002424 }
2425
2426 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002427 }
2428 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2429 // Let's convert <4 x i8> constant to int constant specially.
2430 // This case occurs when at least one of the values is an undef.
2431 Type *CstTy = Cst->getType();
2432 if (is4xi8vec(CstTy)) {
2433 LLVMContext &Context = CstTy->getContext();
David Neto22f144c2017-06-12 14:26:21 -04002434
SJWf93f5f32020-05-05 07:27:56 -05002435 //
2436 // Generate OpConstant with OpTypeInt 32 0.
2437 //
2438 uint32_t IntValue = 0;
2439 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2440 I != E; ++I) {
2441 uint64_t Val = 0;
2442 const Value *CV = *I;
2443 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2444 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002445 }
SJWf93f5f32020-05-05 07:27:56 -05002446 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002447 }
2448
SJWf93f5f32020-05-05 07:27:56 -05002449 Type *i32 = Type::getInt32Ty(Context);
2450 Constant *CstInt = ConstantInt::get(i32, IntValue);
2451 RID = getSPIRVValue(CstInt);
2452 } else {
2453
David Neto22f144c2017-06-12 14:26:21 -04002454 // We use a constant composite in SPIR-V for our constant aggregate in
2455 // LLVM.
2456 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002457
2458 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
SJWf93f5f32020-05-05 07:27:56 -05002459 uint32_t ElementConstantID = getSPIRVValue(CA->getAggregateElement(k));
David Neto22f144c2017-06-12 14:26:21 -04002460
2461 // And add an operand to the composite we are constructing
David Neto257c3892018-04-11 13:19:45 -04002462 Ops << MkId(ElementConstantID);
David Neto22f144c2017-06-12 14:26:21 -04002463 }
David Neto22f144c2017-06-12 14:26:21 -04002464 }
SJWf93f5f32020-05-05 07:27:56 -05002465 } else if (Cst->isNullValue()) {
2466 Opcode = spv::OpConstantNull;
2467 } else {
2468 Cst->print(errs());
2469 llvm_unreachable("Unsupported Constant???");
2470 }
David Neto22f144c2017-06-12 14:26:21 -04002471
SJWf93f5f32020-05-05 07:27:56 -05002472 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2473 // Null pointer requires variable pointers.
2474 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2475 }
alan-baker5b86ed72019-02-15 08:26:50 -05002476
SJWf93f5f32020-05-05 07:27:56 -05002477 if (RID == 0) {
2478 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2479 }
2480
2481 VMap[Cst] = RID;
2482
2483 return RID;
2484}
2485
2486SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2487 auto II = ValueMap.find(V);
2488 if (II != ValueMap.end()) {
2489 assert(II->second);
2490 return II->second;
2491 }
2492 if (Constant *Cst = dyn_cast<Constant>(V)) {
2493 return getSPIRVConstant(Cst);
2494 } else {
2495 llvm_unreachable("Variable not found");
2496 }
2497}
2498
2499void SPIRVProducerPass::GenerateSPIRVConstants() {
2500 ValueList &CstList = getConstantList();
2501
2502 for (uint32_t i = 0; i < CstList.size(); i++) {
2503 // UniqueVector ids are 1-based.
2504 Constant *Cst = cast<Constant>(CstList[i + 1]);
2505 getSPIRVValue(Cst);
David Neto22f144c2017-06-12 14:26:21 -04002506 }
2507}
2508
SJW77b87ad2020-04-21 14:37:52 -05002509void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002510 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002511 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002512 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2513 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002514
David Neto862b7d82018-06-14 18:48:37 -04002515 // We might have samplers in the sampler map that are not used
2516 // in the translation unit. We need to allocate variables
2517 // for them and bindings too.
2518 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002519
SJW77b87ad2020-04-21 14:37:52 -05002520 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002521 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002522 if (!var_fn)
2523 return;
alan-baker09cb9802019-12-10 13:16:27 -05002524
David Neto862b7d82018-06-14 18:48:37 -04002525 for (auto user : var_fn->users()) {
2526 // Populate SamplerLiteralToDescriptorSetMap and
2527 // SamplerLiteralToBindingMap.
2528 //
2529 // Look for calls like
2530 // call %opencl.sampler_t addrspace(2)*
2531 // @clspv.sampler.var.literal(
2532 // i32 descriptor,
2533 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002534 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002535 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002536 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002537 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002538 auto sampler_value = third_param;
2539 if (clspv::Option::UseSamplerMap()) {
2540 if (third_param >= sampler_map.size()) {
2541 errs() << "Out of bounds index to sampler map: " << third_param;
2542 llvm_unreachable("bad sampler init: out of bounds");
2543 }
2544 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002545 }
2546
David Neto862b7d82018-06-14 18:48:37 -04002547 const auto descriptor_set = static_cast<unsigned>(
2548 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2549 const auto binding = static_cast<unsigned>(
2550 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2551
2552 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2553 SamplerLiteralToBindingMap[sampler_value] = binding;
2554 used_bindings.insert(binding);
2555 }
2556 }
2557
alan-baker09cb9802019-12-10 13:16:27 -05002558 DenseSet<size_t> seen;
2559 for (auto user : var_fn->users()) {
2560 if (!isa<CallInst>(user))
2561 continue;
2562
2563 auto call = cast<CallInst>(user);
2564 const unsigned third_param = static_cast<unsigned>(
2565 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2566
2567 // Already allocated a variable for this value.
2568 if (!seen.insert(third_param).second)
2569 continue;
2570
2571 auto sampler_value = third_param;
2572 if (clspv::Option::UseSamplerMap()) {
2573 sampler_value = sampler_map[third_param].first;
2574 }
2575
David Neto22f144c2017-06-12 14:26:21 -04002576 // Generate OpVariable.
2577 //
2578 // GIDOps[0] : Result Type ID
2579 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05002580 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002581
SJWf93f5f32020-05-05 07:27:56 -05002582 Ops << MkId(getSPIRVType(SamplerTy))
David Neto257c3892018-04-11 13:19:45 -04002583 << MkNum(spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002584
SJWf93f5f32020-05-05 07:27:56 -05002585 auto sampler_var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002586
alan-baker09cb9802019-12-10 13:16:27 -05002587 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002588
David Neto862b7d82018-06-14 18:48:37 -04002589 unsigned descriptor_set;
2590 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002591 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002592 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002593 // This sampler is not actually used. Find the next one.
2594 for (binding = 0; used_bindings.count(binding); binding++)
2595 ;
2596 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2597 used_bindings.insert(binding);
2598 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002599 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2600 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002601
alan-baker09cb9802019-12-10 13:16:27 -05002602 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002603 descriptorMapEntries->emplace_back(std::move(sampler_data),
2604 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002605 }
2606
SJW69939d52020-04-16 07:29:07 -05002607 // Ops[0] = Target ID
2608 // Ops[1] = Decoration (DescriptorSet)
2609 // Ops[2] = LiteralNumber according to Decoration
2610 Ops.clear();
2611
David Neto862b7d82018-06-14 18:48:37 -04002612 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationDescriptorSet)
2613 << MkNum(descriptor_set);
David Neto22f144c2017-06-12 14:26:21 -04002614
SJWf93f5f32020-05-05 07:27:56 -05002615 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002616
2617 // Ops[0] = Target ID
2618 // Ops[1] = Decoration (Binding)
2619 // Ops[2] = LiteralNumber according to Decoration
2620 Ops.clear();
David Neto862b7d82018-06-14 18:48:37 -04002621 Ops << MkId(sampler_var_id) << MkNum(spv::DecorationBinding)
2622 << MkNum(binding);
David Neto22f144c2017-06-12 14:26:21 -04002623
SJWf93f5f32020-05-05 07:27:56 -05002624 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002625 }
David Neto862b7d82018-06-14 18:48:37 -04002626}
David Neto22f144c2017-06-12 14:26:21 -04002627
SJW77b87ad2020-04-21 14:37:52 -05002628void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002629 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002630
David Neto862b7d82018-06-14 18:48:37 -04002631 // Generate variables. Make one for each of resource var info object.
2632 for (auto *info : ModuleOrderedResourceVars) {
2633 Type *type = info->var_fn->getReturnType();
2634 // Remap the address space for opaque types.
2635 switch (info->arg_kind) {
2636 case clspv::ArgKind::Sampler:
2637 case clspv::ArgKind::ReadOnlyImage:
2638 case clspv::ArgKind::WriteOnlyImage:
2639 type = PointerType::get(type->getPointerElementType(),
2640 clspv::AddressSpace::UniformConstant);
2641 break;
2642 default:
2643 break;
2644 }
David Neto22f144c2017-06-12 14:26:21 -04002645
SJWf93f5f32020-05-05 07:27:56 -05002646 const auto type_id = getSPIRVType(type);
David Neto862b7d82018-06-14 18:48:37 -04002647 const auto sc = GetStorageClassForArgKind(info->arg_kind);
SJWf93f5f32020-05-05 07:27:56 -05002648 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002649 Ops << MkId(type_id) << MkNum(sc);
David Neto22f144c2017-06-12 14:26:21 -04002650
SJWf93f5f32020-05-05 07:27:56 -05002651 info->var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002652
2653 // Map calls to the variable-builtin-function.
2654 for (auto &U : info->var_fn->uses()) {
2655 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2656 const auto set = unsigned(
2657 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2658 const auto binding = unsigned(
2659 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2660 if (set == info->descriptor_set && binding == info->binding) {
2661 switch (info->arg_kind) {
2662 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002663 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002664 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002665 case clspv::ArgKind::PodUBO:
2666 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002667 // The call maps to the variable directly.
2668 VMap[call] = info->var_id;
2669 break;
2670 case clspv::ArgKind::Sampler:
2671 case clspv::ArgKind::ReadOnlyImage:
2672 case clspv::ArgKind::WriteOnlyImage:
2673 // The call maps to a load we generate later.
2674 ResourceVarDeferredLoadCalls[call] = info->var_id;
2675 break;
2676 default:
2677 llvm_unreachable("Unhandled arg kind");
2678 }
2679 }
David Neto22f144c2017-06-12 14:26:21 -04002680 }
David Neto862b7d82018-06-14 18:48:37 -04002681 }
2682 }
David Neto22f144c2017-06-12 14:26:21 -04002683
David Neto862b7d82018-06-14 18:48:37 -04002684 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002685 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002686 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002687 // Push constants don't need descriptor set or binding decorations.
2688 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2689 continue;
2690
David Neto862b7d82018-06-14 18:48:37 -04002691 // Decorate with DescriptorSet and Binding.
2692 Ops.clear();
2693 Ops << MkId(info->var_id) << MkNum(spv::DecorationDescriptorSet)
2694 << MkNum(info->descriptor_set);
SJWf93f5f32020-05-05 07:27:56 -05002695 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002696
2697 Ops.clear();
2698 Ops << MkId(info->var_id) << MkNum(spv::DecorationBinding)
2699 << MkNum(info->binding);
SJWf93f5f32020-05-05 07:27:56 -05002700 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002701
alan-bakere9308012019-03-15 10:25:13 -04002702 if (info->coherent) {
2703 // Decorate with Coherent if required for the variable.
2704 Ops.clear();
2705 Ops << MkId(info->var_id) << MkNum(spv::DecorationCoherent);
SJWf93f5f32020-05-05 07:27:56 -05002706 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002707 }
2708
David Neto862b7d82018-06-14 18:48:37 -04002709 // Generate NonWritable and NonReadable
2710 switch (info->arg_kind) {
2711 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002712 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002713 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2714 clspv::AddressSpace::Constant) {
2715 Ops.clear();
2716 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonWritable);
SJWf93f5f32020-05-05 07:27:56 -05002717 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002718 }
David Neto862b7d82018-06-14 18:48:37 -04002719 break;
David Neto862b7d82018-06-14 18:48:37 -04002720 case clspv::ArgKind::WriteOnlyImage:
2721 Ops.clear();
2722 Ops << MkId(info->var_id) << MkNum(spv::DecorationNonReadable);
SJWf93f5f32020-05-05 07:27:56 -05002723 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002724 break;
2725 default:
2726 break;
David Neto22f144c2017-06-12 14:26:21 -04002727 }
2728 }
2729}
2730
SJW77b87ad2020-04-21 14:37:52 -05002731void SPIRVProducerPass::GeneratePushConstantDescriptorMapEntries() {
Kévin Petitbbbda972020-03-03 19:16:31 +00002732
SJW77b87ad2020-04-21 14:37:52 -05002733 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
2734 auto const &DL = module->getDataLayout();
Kévin Petitbbbda972020-03-03 19:16:31 +00002735 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
2736 auto STy = cast<StructType>(GV->getValueType());
2737
2738 for (unsigned i = 0; i < STy->getNumElements(); i++) {
2739 auto pc = static_cast<clspv::PushConstant>(
2740 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
2741 auto memberType = STy->getElementType(i);
2742 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
2743 unsigned previousOffset = 0;
2744 if (i > 0) {
2745 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
2746 }
2747 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
SJW77b87ad2020-04-21 14:37:52 -05002748 assert(isValidExplicitLayout(*module, STy, i,
2749 spv::StorageClassPushConstant, offset,
2750 previousOffset));
Kévin Petitbbbda972020-03-03 19:16:31 +00002751 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
2752 descriptorMapEntries->emplace_back(std::move(data));
2753 }
2754 }
2755}
2756
SJW77b87ad2020-04-21 14:37:52 -05002757void SPIRVProducerPass::GenerateSpecConstantDescriptorMapEntries() {
2758 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04002759 auto kind = pair.first;
2760 auto id = pair.second;
2761
2762 // Local memory size is only used for kernel arguments.
2763 if (kind == SpecConstant::kLocalMemorySize)
2764 continue;
2765
2766 version0::DescriptorMapEntry::SpecConstantData data = {kind, id};
2767 descriptorMapEntries->emplace_back(std::move(data));
2768 }
2769}
2770
David Neto22f144c2017-06-12 14:26:21 -04002771void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002772 ValueMapType &VMap = getValueMap();
2773 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002774 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002775
2776 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2777 Type *Ty = GV.getType();
2778 PointerType *PTy = cast<PointerType>(Ty);
2779
2780 uint32_t InitializerID = 0;
2781
2782 // Workgroup size is handled differently (it goes into a constant)
2783 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2784 std::vector<bool> HasMDVec;
2785 uint32_t PrevXDimCst = 0xFFFFFFFF;
2786 uint32_t PrevYDimCst = 0xFFFFFFFF;
2787 uint32_t PrevZDimCst = 0xFFFFFFFF;
2788 for (Function &Func : *GV.getParent()) {
2789 if (Func.isDeclaration()) {
2790 continue;
2791 }
2792
2793 // We only need to check kernels.
2794 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2795 continue;
2796 }
2797
2798 if (const MDNode *MD =
2799 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2800 uint32_t CurXDimCst = static_cast<uint32_t>(
2801 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2802 uint32_t CurYDimCst = static_cast<uint32_t>(
2803 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2804 uint32_t CurZDimCst = static_cast<uint32_t>(
2805 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2806
2807 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2808 PrevZDimCst == 0xFFFFFFFF) {
2809 PrevXDimCst = CurXDimCst;
2810 PrevYDimCst = CurYDimCst;
2811 PrevZDimCst = CurZDimCst;
2812 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2813 CurZDimCst != PrevZDimCst) {
2814 llvm_unreachable(
2815 "reqd_work_group_size must be the same across all kernels");
2816 } else {
2817 continue;
2818 }
2819
2820 //
2821 // Generate OpConstantComposite.
2822 //
2823 // Ops[0] : Result Type ID
2824 // Ops[1] : Constant size for x dimension.
2825 // Ops[2] : Constant size for y dimension.
2826 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002827 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002828
2829 uint32_t XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002830 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04002831 uint32_t YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002832 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04002833 uint32_t ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002834 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002835
SJWf93f5f32020-05-05 07:27:56 -05002836 Ops << MkId(getSPIRVType(Ty->getPointerElementType()))
2837 << MkId(XDimCstID) << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002838
SJWf93f5f32020-05-05 07:27:56 -05002839 InitializerID =
2840 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002841
2842 HasMDVec.push_back(true);
2843 } else {
2844 HasMDVec.push_back(false);
2845 }
2846 }
2847
2848 // Check all kernels have same definitions for work_group_size.
2849 bool HasMD = false;
2850 if (!HasMDVec.empty()) {
2851 HasMD = HasMDVec[0];
2852 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2853 if (HasMD != HasMDVec[i]) {
2854 llvm_unreachable(
2855 "Kernels should have consistent work group size definition");
2856 }
2857 }
2858 }
2859
2860 // If all kernels do not have metadata for reqd_work_group_size, generate
2861 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002862 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002863 //
2864 // Generate OpSpecConstants for x/y/z dimension.
2865 //
2866 // Ops[0] : Result Type ID
2867 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
2868 uint32_t XDimCstID = 0;
2869 uint32_t YDimCstID = 0;
2870 uint32_t ZDimCstID = 0;
2871
alan-bakera1be3322020-04-20 12:48:18 -04002872 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002873 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002874
SJWf93f5f32020-05-05 07:27:56 -05002875 SPIRVOperandVec Ops;
2876 uint32_t result_type_id = getSPIRVType(
2877 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002878
David Neto257c3892018-04-11 13:19:45 -04002879 // X Dimension
2880 Ops << MkId(result_type_id) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05002881 XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002882
2883 // Y Dimension
2884 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002885 Ops << MkId(result_type_id) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05002886 YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002887
2888 // Z Dimension
2889 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04002890 Ops << MkId(result_type_id) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05002891 ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002892
David Neto257c3892018-04-11 13:19:45 -04002893 BuiltinDimVec.push_back(XDimCstID);
2894 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002895 BuiltinDimVec.push_back(ZDimCstID);
2896
David Neto22f144c2017-06-12 14:26:21 -04002897 //
2898 // Generate OpSpecConstantComposite.
2899 //
2900 // Ops[0] : Result Type ID
2901 // Ops[1] : Constant size for x dimension.
2902 // Ops[2] : Constant size for y dimension.
2903 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002904 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002905 Ops << MkId(getSPIRVType(Ty->getPointerElementType())) << MkId(XDimCstID)
David Neto257c3892018-04-11 13:19:45 -04002906 << MkId(YDimCstID) << MkId(ZDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002907
SJWf93f5f32020-05-05 07:27:56 -05002908 InitializerID =
2909 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002910 }
alan-bakerbed3a882020-04-21 14:42:41 -04002911 } else if (BuiltinType == spv::BuiltInWorkDim) {
2912 // 1. Generate a specialization constant with a default of 3.
2913 // 2. Allocate and annotate a SpecId for the constant.
2914 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002915 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002916
2917 //
2918 // Generate OpSpecConstant.
2919 //
2920 // Ops[0] : Result Type ID
2921 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002922
SJWf93f5f32020-05-05 07:27:56 -05002923 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2924 << MkNum(3);
alan-bakerbed3a882020-04-21 14:42:41 -04002925
SJWf93f5f32020-05-05 07:27:56 -05002926 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002927
2928 //
2929 // Generate SpecId decoration.
2930 //
2931 // Ops[0] : target
2932 // Ops[1] : decoration
2933 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002934 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002935 Ops.clear();
2936 Ops << MkId(InitializerID) << MkNum(spv::DecorationSpecId)
2937 << MkNum(spec_id);
2938
SJWf93f5f32020-05-05 07:27:56 -05002939 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002940 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2941 // 1. Generate a spec constant with a default of {0, 0, 0}.
2942 // 2. Allocate and annotate SpecIds for the constants.
2943 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002944 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002945
2946 //
2947 // Generate OpSpecConstant for each dimension.
2948 //
2949 // Ops[0] : Result Type ID
2950 // Ops[1] : Default literal value
2951 //
SJWf93f5f32020-05-05 07:27:56 -05002952 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2953 << MkNum(0);
2954 uint32_t x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002955
alan-bakere1996972020-05-04 08:38:12 -04002956 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002957 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2958 << MkNum(0);
2959 uint32_t y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002960
alan-bakere1996972020-05-04 08:38:12 -04002961 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002962 Ops << MkId(getSPIRVType(IntegerType::get(GV.getContext(), 32)))
2963 << MkNum(0);
2964 uint32_t z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002965
2966 //
2967 // Generate SpecId decoration for each dimension.
2968 //
2969 // Ops[0] : target
2970 // Ops[1] : decoration
2971 // Ops[2] : SpecId
2972 //
2973 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2974 Ops.clear();
2975 Ops << MkId(x_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
SJWf93f5f32020-05-05 07:27:56 -05002976 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002977
2978 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2979 Ops.clear();
2980 Ops << MkId(y_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
SJWf93f5f32020-05-05 07:27:56 -05002981 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002982
2983 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2984 Ops.clear();
2985 Ops << MkId(z_id) << MkNum(spv::DecorationSpecId) << MkNum(spec_id);
SJWf93f5f32020-05-05 07:27:56 -05002986 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002987
2988 //
2989 // Generate OpSpecConstantComposite.
2990 //
2991 // Ops[0] : type id
2992 // Ops[1..n-1] : elements
2993 //
alan-bakere1996972020-05-04 08:38:12 -04002994 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05002995 Ops << MkId(getSPIRVType(GV.getType()->getPointerElementType()))
2996 << MkId(x_id) << MkId(y_id) << MkId(z_id);
2997 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002998 }
2999
David Neto22f144c2017-06-12 14:26:21 -04003000 //
3001 // Generate OpVariable.
3002 //
3003 // GIDOps[0] : Result Type ID
3004 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05003005 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003006
David Neto85082642018-03-24 06:55:20 -07003007 const auto AS = PTy->getAddressSpace();
SJWf93f5f32020-05-05 07:27:56 -05003008 Ops << MkId(getSPIRVType(Ty)) << MkNum(GetStorageClass(AS));
David Neto22f144c2017-06-12 14:26:21 -04003009
David Neto85082642018-03-24 06:55:20 -07003010 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04003011 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07003012 clspv::Option::ModuleConstantsInStorageBuffer();
3013
Kévin Petit23d5f182019-08-13 16:21:29 +01003014 if (GV.hasInitializer()) {
3015 auto GVInit = GV.getInitializer();
3016 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05003017 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07003018 }
3019 }
Kévin Petit23d5f182019-08-13 16:21:29 +01003020
3021 if (0 != InitializerID) {
Kévin Petitbbbda972020-03-03 19:16:31 +00003022 // Emit the ID of the initializer as part of the variable definition.
Kévin Petit23d5f182019-08-13 16:21:29 +01003023 Ops << MkId(InitializerID);
3024 }
SJWf93f5f32020-05-05 07:27:56 -05003025 const uint32_t var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto85082642018-03-24 06:55:20 -07003026
SJWf93f5f32020-05-05 07:27:56 -05003027 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04003028
alan-bakere1996972020-05-04 08:38:12 -04003029 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
3030 return builtin == spv::BuiltInWorkDim ||
3031 builtin == spv::BuiltInGlobalOffset;
3032 };
3033
alan-bakere1996972020-05-04 08:38:12 -04003034 // If we have a builtin (not an OpenCL builtin).
3035 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04003036 //
3037 // Generate OpDecorate.
3038 //
3039 // DOps[0] = Target ID
3040 // DOps[1] = Decoration (Builtin)
3041 // DOps[2] = BuiltIn ID
3042 uint32_t ResultID;
3043
3044 // WorkgroupSize is different, we decorate the constant composite that has
3045 // its value, rather than the variable that we use to access the value.
3046 if (spv::BuiltInWorkgroupSize == BuiltinType) {
3047 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04003048 // Save both the value and variable IDs for later.
3049 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05003050 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04003051 } else {
SJWf93f5f32020-05-05 07:27:56 -05003052 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04003053 }
3054
SJWf93f5f32020-05-05 07:27:56 -05003055 SPIRVOperandVec DOps;
David Neto257c3892018-04-11 13:19:45 -04003056 DOps << MkId(ResultID) << MkNum(spv::DecorationBuiltIn)
3057 << MkNum(BuiltinType);
David Neto22f144c2017-06-12 14:26:21 -04003058
SJWf93f5f32020-05-05 07:27:56 -05003059 addSPIRVInst<kAnnotations>(spv::OpDecorate, DOps);
David Neto85082642018-03-24 06:55:20 -07003060 } else if (module_scope_constant_external_init) {
3061 // This module scope constant is initialized from a storage buffer with data
3062 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05003063 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07003064
David Neto862b7d82018-06-14 18:48:37 -04003065 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07003066 // Use "kind,buffer" to indicate storage buffer. We might want to expand
3067 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05003068 std::string hexbytes;
3069 llvm::raw_string_ostream str(hexbytes);
3070 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003071 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
3072 str.str()};
3073 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
3074 0);
David Neto85082642018-03-24 06:55:20 -07003075
SJWf93f5f32020-05-05 07:27:56 -05003076 SPIRVOperandVec DOps;
David Neto85082642018-03-24 06:55:20 -07003077
3078 // OpDecorate %var DescriptorSet <descriptor_set>
David Neto257c3892018-04-11 13:19:45 -04003079 DOps << MkId(var_id) << MkNum(spv::DecorationDescriptorSet)
3080 << MkNum(descriptor_set);
SJWf93f5f32020-05-05 07:27:56 -05003081 addSPIRVInst<kAnnotations>(spv::OpDecorate, DOps);
SJW69939d52020-04-16 07:29:07 -05003082
3083 // OpDecorate %var Binding <binding>
3084 DOps.clear();
3085 DOps << MkId(var_id) << MkNum(spv::DecorationBinding) << MkNum(0);
SJWf93f5f32020-05-05 07:27:56 -05003086 addSPIRVInst<kAnnotations>(spv::OpDecorate, DOps);
David Netoc6f3ab22018-04-06 18:02:31 -04003087 }
3088}
3089
SJW77b87ad2020-04-21 14:37:52 -05003090void SPIRVProducerPass::GenerateDescriptorMapInfo(Function &F) {
3091 const auto &DL = module->getDataLayout();
David Netoc5fb5242018-07-30 13:28:31 -04003092 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
3093 return;
3094 }
Kévin Petit717f8572020-04-06 17:31:53 +01003095 // Add entries for each kernel
3096 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
3097 F.getName().str()};
3098 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
3099
David Neto862b7d82018-06-14 18:48:37 -04003100 // Gather the list of resources that are used by this function's arguments.
3101 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
3102
David Neto862b7d82018-06-14 18:48:37 -04003103 auto *fty = F.getType()->getPointerElementType();
3104 auto *func_ty = dyn_cast<FunctionType>(fty);
3105
alan-baker038e9242019-04-19 22:14:41 -04003106 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04003107 // If an argument maps to a resource variable, then get descriptor set and
3108 // binding from the resoure variable. Other info comes from the metadata.
alan-bakerff6c9292020-05-04 08:32:09 -04003109 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
3110 auto local_spec_id_md =
3111 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
David Neto862b7d82018-06-14 18:48:37 -04003112 if (arg_map) {
3113 for (const auto &arg : arg_map->operands()) {
3114 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
alan-bakerff6c9292020-05-04 08:32:09 -04003115 assert(arg_node->getNumOperands() == 6);
David Neto862b7d82018-06-14 18:48:37 -04003116 const auto name =
3117 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
3118 const auto old_index =
3119 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
3120 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05003121 const size_t new_index = static_cast<size_t>(
3122 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04003123 const auto offset =
3124 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00003125 const auto arg_size =
3126 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
alan-bakerc4579bb2020-04-29 14:15:50 -04003127 const auto argKind = clspv::GetArgKindFromName(
3128 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
alan-bakerf5e5f692018-11-27 08:33:24 -05003129
alan-bakerff6c9292020-05-04 08:32:09 -04003130 // If this is a local memory argument, find the right spec id for this
3131 // argument.
3132 int64_t spec_id = -1;
3133 if (argKind == clspv::ArgKind::Local) {
3134 for (auto spec_id_arg : local_spec_id_md->operands()) {
3135 if ((&F == dyn_cast<Function>(
3136 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
3137 ->getValue())) &&
3138 (new_index ==
3139 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
3140 ->getZExtValue())) {
3141 spec_id = mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
3142 ->getSExtValue();
3143 break;
3144 }
3145 }
3146 }
alan-bakerf5e5f692018-11-27 08:33:24 -05003147 uint32_t descriptor_set = 0;
3148 uint32_t binding = 0;
3149 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003150 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
3151 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05003152 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003153 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04003154 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05003155 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
3156 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
3157 DL));
David Neto862b7d82018-06-14 18:48:37 -04003158 } else {
3159 auto *info = resource_var_at_index[new_index];
3160 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05003161 descriptor_set = info->descriptor_set;
3162 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04003163 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003164 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
3165 binding);
David Neto862b7d82018-06-14 18:48:37 -04003166 }
3167 } else {
3168 // There is no argument map.
3169 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00003170 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04003171
3172 SmallVector<Argument *, 4> arguments;
3173 for (auto &arg : F.args()) {
3174 arguments.push_back(&arg);
3175 }
3176
3177 unsigned arg_index = 0;
3178 for (auto *info : resource_var_at_index) {
3179 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00003180 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05003181 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04003182 if (info->arg_kind == clspv::ArgKind::Pod ||
3183 info->arg_kind == clspv::ArgKind::PodUBO ||
3184 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05003185 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00003186 }
3187
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003188 // Local pointer arguments are unused in this case. Offset is always
3189 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05003190 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003191 F.getName().str(),
3192 arg->getName().str(),
3193 arg_index,
alan-bakerc4579bb2020-04-29 14:15:50 -04003194 info->arg_kind,
alan-baker21574d32020-01-29 16:00:31 -05003195 0,
3196 0,
3197 0,
3198 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05003199 descriptorMapEntries->emplace_back(std::move(kernel_data),
3200 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04003201 }
3202 arg_index++;
3203 }
3204 // Generate mappings for pointer-to-local arguments.
3205 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
3206 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04003207 auto where = LocalArgSpecIds.find(arg);
3208 if (where != LocalArgSpecIds.end()) {
3209 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05003210 // Pod arguments members are unused in this case.
3211 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05003212 F.getName().str(),
3213 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05003214 arg_index,
3215 ArgKind::Local,
3216 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003217 static_cast<uint32_t>(
3218 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05003219 0,
3220 0};
3221 // Pointer-to-local arguments do not utilize descriptor set and binding.
3222 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04003223 }
3224 }
3225 }
3226}
3227
David Neto22f144c2017-06-12 14:26:21 -04003228void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003229 ValueMapType &VMap = getValueMap();
3230 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003231 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
3232 auto &GlobalConstArgSet = getGlobalConstArgSet();
3233
3234 FunctionType *FTy = F.getFunctionType();
3235
3236 //
David Neto22f144c2017-06-12 14:26:21 -04003237 // Generate OPFunction.
3238 //
3239
3240 // FOps[0] : Result Type ID
3241 // FOps[1] : Function Control
3242 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05003243 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04003244
3245 // Find SPIRV instruction for return type.
SJWf93f5f32020-05-05 07:27:56 -05003246 FOps << MkId(getSPIRVType(FTy->getReturnType()));
David Neto22f144c2017-06-12 14:26:21 -04003247
3248 // Check function attributes for SPIRV Function Control.
3249 uint32_t FuncControl = spv::FunctionControlMaskNone;
3250 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
3251 FuncControl |= spv::FunctionControlInlineMask;
3252 }
3253 if (F.hasFnAttribute(Attribute::NoInline)) {
3254 FuncControl |= spv::FunctionControlDontInlineMask;
3255 }
3256 // TODO: Check llvm attribute for Function Control Pure.
3257 if (F.hasFnAttribute(Attribute::ReadOnly)) {
3258 FuncControl |= spv::FunctionControlPureMask;
3259 }
3260 // TODO: Check llvm attribute for Function Control Const.
3261 if (F.hasFnAttribute(Attribute::ReadNone)) {
3262 FuncControl |= spv::FunctionControlConstMask;
3263 }
3264
David Neto257c3892018-04-11 13:19:45 -04003265 FOps << MkNum(FuncControl);
David Neto22f144c2017-06-12 14:26:21 -04003266
3267 uint32_t FTyID;
3268 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3269 SmallVector<Type *, 4> NewFuncParamTys;
3270 FunctionType *NewFTy =
3271 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05003272 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04003273 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07003274 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04003275 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05003276 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04003277 } else {
SJWf93f5f32020-05-05 07:27:56 -05003278 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04003279 }
3280 }
3281
David Neto257c3892018-04-11 13:19:45 -04003282 FOps << MkId(FTyID);
David Neto22f144c2017-06-12 14:26:21 -04003283
SJWf93f5f32020-05-05 07:27:56 -05003284 // Generate SPIRV instruction for function.
3285 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
3286 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04003287
SJWf93f5f32020-05-05 07:27:56 -05003288 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
3289 EntryPoints.push_back(std::make_pair(&F, FID));
3290 }
David Neto22f144c2017-06-12 14:26:21 -04003291
David Neto482550a2018-03-24 05:21:07 -07003292 if (clspv::Option::ShowIDs()) {
SJWf93f5f32020-05-05 07:27:56 -05003293 errs() << "Function " << F.getName() << " is " << FID << "\n";
David Netob05675d2018-02-16 12:37:49 -05003294 }
David Neto22f144c2017-06-12 14:26:21 -04003295
3296 //
3297 // Generate OpFunctionParameter for Normal function.
3298 //
3299
3300 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04003301
David Neto22f144c2017-06-12 14:26:21 -04003302 // Iterate Argument for name instead of param type from function type.
3303 unsigned ArgIdx = 0;
3304 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04003305 // ParamOps[0] : Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05003306 SPIRVOperandVec ParamOps;
David Neto22f144c2017-06-12 14:26:21 -04003307
3308 // Find SPIRV instruction for parameter type.
SJWf93f5f32020-05-05 07:27:56 -05003309 uint32_t ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04003310 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
3311 if (GlobalConstFuncTyMap.count(FTy)) {
3312 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
3313 Type *EleTy = PTy->getPointerElementType();
3314 Type *ArgTy =
3315 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05003316 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04003317 GlobalConstArgSet.insert(&Arg);
3318 }
3319 }
3320 }
David Neto257c3892018-04-11 13:19:45 -04003321 ParamOps << MkId(ParamTyID);
David Neto22f144c2017-06-12 14:26:21 -04003322
3323 // Generate SPIRV instruction for parameter.
SJWf93f5f32020-05-05 07:27:56 -05003324 uint32_t param_id = addSPIRVInst(spv::OpFunctionParameter, ParamOps);
3325 VMap[&Arg] = param_id;
3326
3327 if (CalledWithCoherentResource(Arg)) {
3328 // If the arg is passed a coherent resource ever, then decorate this
3329 // parameter with Coherent too.
3330 SPIRVOperandVec decoration_ops;
3331 decoration_ops << MkId(param_id) << MkNum(spv::DecorationCoherent);
3332 addSPIRVInst<kAnnotations>(spv::OpDecorate, decoration_ops);
3333 }
David Neto22f144c2017-06-12 14:26:21 -04003334
3335 ArgIdx++;
3336 }
3337 }
3338}
3339
SJW77b87ad2020-04-21 14:37:52 -05003340void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04003341 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04003342 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
David Neto22f144c2017-06-12 14:26:21 -04003343 std::vector<uint32_t> &BuiltinDimVec = getBuiltinDimVec();
3344
David Neto22f144c2017-06-12 14:26:21 -04003345 //
3346 // Generate OpCapability
3347 //
3348 // TODO: Which llvm information is mapped to SPIRV Capapbility?
3349
3350 // Ops[0] = Capability
SJWf93f5f32020-05-05 07:27:56 -05003351 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003352
SJWf93f5f32020-05-05 07:27:56 -05003353 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityShader);
David Neto22f144c2017-06-12 14:26:21 -04003354
alan-bakerf906d2b2019-12-10 11:26:23 -05003355 bool write_without_format = false;
3356 bool sampled_1d = false;
3357 bool image_1d = false;
David Neto22f144c2017-06-12 14:26:21 -04003358 for (Type *Ty : getTypeList()) {
alan-bakerb39c8262019-03-08 14:03:37 -05003359 if (clspv::Option::Int8Support() && Ty->isIntegerTy(8)) {
3360 // Generate OpCapability for i8 type.
SJWf93f5f32020-05-05 07:27:56 -05003361 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityInt8);
alan-bakerb39c8262019-03-08 14:03:37 -05003362 } else if (Ty->isIntegerTy(16)) {
David Neto22f144c2017-06-12 14:26:21 -04003363 // Generate OpCapability for i16 type.
SJWf93f5f32020-05-05 07:27:56 -05003364 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityInt16);
David Neto22f144c2017-06-12 14:26:21 -04003365 } else if (Ty->isIntegerTy(64)) {
3366 // Generate OpCapability for i64 type.
SJWf93f5f32020-05-05 07:27:56 -05003367 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityInt64);
David Neto22f144c2017-06-12 14:26:21 -04003368 } else if (Ty->isHalfTy()) {
3369 // Generate OpCapability for half type.
SJWf93f5f32020-05-05 07:27:56 -05003370 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityFloat16);
David Neto22f144c2017-06-12 14:26:21 -04003371 } else if (Ty->isDoubleTy()) {
3372 // Generate OpCapability for double type.
SJWf93f5f32020-05-05 07:27:56 -05003373 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityFloat64);
David Neto22f144c2017-06-12 14:26:21 -04003374 } else if (auto *STy = dyn_cast<StructType>(Ty)) {
3375 if (STy->isOpaque()) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003376 if (STy->getName().startswith("opencl.image1d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003377 STy->getName().startswith("opencl.image1d_array_wo_t") ||
alan-bakerf906d2b2019-12-10 11:26:23 -05003378 STy->getName().startswith("opencl.image2d_wo_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003379 STy->getName().startswith("opencl.image2d_array_wo_t") ||
alan-bakerf67468c2019-11-25 15:51:49 -05003380 STy->getName().startswith("opencl.image3d_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003381 write_without_format = true;
3382 }
3383 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-baker7150a1d2020-02-25 08:31:06 -05003384 STy->getName().startswith("opencl.image1d_wo_t") ||
3385 STy->getName().startswith("opencl.image1d_array_ro_t") ||
3386 STy->getName().startswith("opencl.image1d_array_wo_t")) {
alan-bakerf906d2b2019-12-10 11:26:23 -05003387 if (STy->getName().contains(".sampled"))
3388 sampled_1d = true;
3389 else
3390 image_1d = true;
David Neto22f144c2017-06-12 14:26:21 -04003391 }
3392 }
3393 }
3394 }
3395
alan-bakerf906d2b2019-12-10 11:26:23 -05003396 if (write_without_format) {
3397 // Generate OpCapability for write only image type.
SJWf93f5f32020-05-05 07:27:56 -05003398 addSPIRVInst<kCapabilities>(spv::OpCapability,
3399 spv::CapabilityStorageImageWriteWithoutFormat);
alan-bakerf906d2b2019-12-10 11:26:23 -05003400 }
3401 if (image_1d) {
3402 // Generate OpCapability for unsampled 1D image type.
SJWf93f5f32020-05-05 07:27:56 -05003403 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityImage1D);
alan-bakerf906d2b2019-12-10 11:26:23 -05003404 } else if (sampled_1d) {
3405 // Generate OpCapability for sampled 1D image type.
SJWf93f5f32020-05-05 07:27:56 -05003406 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilitySampled1D);
alan-bakerf906d2b2019-12-10 11:26:23 -05003407 }
3408
David Neto5c22a252018-03-15 16:07:41 -04003409 { // OpCapability ImageQuery
3410 bool hasImageQuery = false;
SJW77b87ad2020-04-21 14:37:52 -05003411 for (const auto &SymVal : module->getValueSymbolTable()) {
alan-bakerf67468c2019-11-25 15:51:49 -05003412 if (auto F = dyn_cast<Function>(SymVal.getValue())) {
SJW173c7e92020-03-16 08:44:47 -05003413 if (IsImageQuery(F)) {
alan-bakerf67468c2019-11-25 15:51:49 -05003414 hasImageQuery = true;
3415 break;
3416 }
David Neto5c22a252018-03-15 16:07:41 -04003417 }
3418 }
alan-bakerf67468c2019-11-25 15:51:49 -05003419
David Neto5c22a252018-03-15 16:07:41 -04003420 if (hasImageQuery) {
SJWf93f5f32020-05-05 07:27:56 -05003421 addSPIRVInst<kCapabilities>(spv::OpCapability, spv::CapabilityImageQuery);
David Neto5c22a252018-03-15 16:07:41 -04003422 }
3423 }
3424
David Neto22f144c2017-06-12 14:26:21 -04003425 if (hasVariablePointers()) {
3426 //
David Neto22f144c2017-06-12 14:26:21 -04003427 // Generate OpCapability.
3428 //
3429 // Ops[0] = Capability
3430 //
SJWf93f5f32020-05-05 07:27:56 -05003431 addSPIRVInst<kCapabilities>(spv::OpCapability,
3432 spv::CapabilityVariablePointers);
alan-baker5b86ed72019-02-15 08:26:50 -05003433 } else if (hasVariablePointersStorageBuffer()) {
3434 //
3435 // Generate OpCapability.
3436 //
3437 // Ops[0] = Capability
3438 //
SJWf93f5f32020-05-05 07:27:56 -05003439 addSPIRVInst<kCapabilities>(spv::OpCapability,
3440 spv::CapabilityVariablePointersStorageBuffer);
alan-baker5b86ed72019-02-15 08:26:50 -05003441 }
3442
3443 // Always add the storage buffer extension
3444 {
David Neto22f144c2017-06-12 14:26:21 -04003445 //
3446 // Generate OpExtension.
3447 //
3448 // Ops[0] = Name (Literal String)
3449 //
SJWf93f5f32020-05-05 07:27:56 -05003450 addSPIRVInst<kExtensions>(spv::OpExtension,
3451 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05003452 }
David Neto22f144c2017-06-12 14:26:21 -04003453
alan-baker5b86ed72019-02-15 08:26:50 -05003454 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
3455 //
3456 // Generate OpExtension.
3457 //
3458 // Ops[0] = Name (Literal String)
3459 //
SJWf93f5f32020-05-05 07:27:56 -05003460 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04003461 }
3462
3463 //
3464 // Generate OpMemoryModel
3465 //
3466 // Memory model for Vulkan will always be GLSL450.
3467
3468 // Ops[0] = Addressing Model
3469 // Ops[1] = Memory Model
3470 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003471 Ops << MkNum(spv::AddressingModelLogical) << MkNum(spv::MemoryModelGLSL450);
David Neto22f144c2017-06-12 14:26:21 -04003472
SJWf93f5f32020-05-05 07:27:56 -05003473 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003474
3475 //
3476 // Generate OpEntryPoint
3477 //
3478 for (auto EntryPoint : EntryPoints) {
3479 // Ops[0] = Execution Model
3480 // Ops[1] = EntryPoint ID
3481 // Ops[2] = Name (Literal String)
3482 // ...
3483 //
3484 // TODO: Do we need to consider Interface ID for forward references???
3485 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003486 const StringRef &name = EntryPoint.first->getName();
David Neto257c3892018-04-11 13:19:45 -04003487 Ops << MkNum(spv::ExecutionModelGLCompute) << MkId(EntryPoint.second)
3488 << MkString(name);
David Neto22f144c2017-06-12 14:26:21 -04003489
David Neto22f144c2017-06-12 14:26:21 -04003490 for (Value *Interface : EntryPointInterfaces) {
SJWf93f5f32020-05-05 07:27:56 -05003491 Ops << MkId(getSPIRVValue(Interface));
David Neto22f144c2017-06-12 14:26:21 -04003492 }
3493
SJWf93f5f32020-05-05 07:27:56 -05003494 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003495 }
3496
3497 for (auto EntryPoint : EntryPoints) {
Kévin Petit21c23c62020-04-29 01:38:28 +01003498 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
3499 ->getMetadata("reqd_work_group_size");
3500 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04003501
3502 if (!BuiltinDimVec.empty()) {
3503 llvm_unreachable(
3504 "Kernels should have consistent work group size definition");
3505 }
3506
3507 //
3508 // Generate OpExecutionMode
3509 //
3510
3511 // Ops[0] = Entry Point ID
3512 // Ops[1] = Execution Mode
3513 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3514 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05003515 Ops << MkId(EntryPoint.second) << MkNum(spv::ExecutionModeLocalSize);
David Neto22f144c2017-06-12 14:26:21 -04003516
3517 uint32_t XDim = static_cast<uint32_t>(
3518 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3519 uint32_t YDim = static_cast<uint32_t>(
3520 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3521 uint32_t ZDim = static_cast<uint32_t>(
3522 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3523
David Neto257c3892018-04-11 13:19:45 -04003524 Ops << MkNum(XDim) << MkNum(YDim) << MkNum(ZDim);
David Neto22f144c2017-06-12 14:26:21 -04003525
SJWf93f5f32020-05-05 07:27:56 -05003526 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003527 }
3528 }
3529
3530 //
3531 // Generate OpSource.
3532 //
3533 // Ops[0] = SourceLanguage ID
3534 // Ops[1] = Version (LiteralNum)
3535 //
3536 Ops.clear();
Kévin Petitf0515712020-01-07 18:29:20 +00003537 switch (clspv::Option::Language()) {
3538 case clspv::Option::SourceLanguage::OpenCL_C_10:
3539 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(100);
3540 break;
3541 case clspv::Option::SourceLanguage::OpenCL_C_11:
3542 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(110);
3543 break;
3544 case clspv::Option::SourceLanguage::OpenCL_C_12:
Kévin Petit0fc88042019-04-09 23:25:02 +01003545 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(120);
Kévin Petitf0515712020-01-07 18:29:20 +00003546 break;
3547 case clspv::Option::SourceLanguage::OpenCL_C_20:
3548 Ops << MkNum(spv::SourceLanguageOpenCL_C) << MkNum(200);
3549 break;
3550 case clspv::Option::SourceLanguage::OpenCL_CPP:
3551 Ops << MkNum(spv::SourceLanguageOpenCL_CPP) << MkNum(100);
3552 break;
3553 default:
3554 Ops << MkNum(spv::SourceLanguageUnknown) << MkNum(0);
3555 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003556 }
David Neto22f144c2017-06-12 14:26:21 -04003557
SJWf93f5f32020-05-05 07:27:56 -05003558 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003559
3560 if (!BuiltinDimVec.empty()) {
3561 //
3562 // Generate OpDecorates for x/y/z dimension.
3563 //
3564 // Ops[0] = Target ID
3565 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003566 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003567
3568 // X Dimension
3569 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003570 Ops << MkId(BuiltinDimVec[0]) << MkNum(spv::DecorationSpecId) << MkNum(0);
SJWf93f5f32020-05-05 07:27:56 -05003571 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003572
3573 // Y Dimension
3574 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003575 Ops << MkId(BuiltinDimVec[1]) << MkNum(spv::DecorationSpecId) << MkNum(1);
SJWf93f5f32020-05-05 07:27:56 -05003576 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003577
3578 // Z Dimension
3579 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04003580 Ops << MkId(BuiltinDimVec[2]) << MkNum(spv::DecorationSpecId) << MkNum(2);
SJWf93f5f32020-05-05 07:27:56 -05003581 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003582 }
3583}
3584
David Netob6e2e062018-04-25 10:32:06 -04003585void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3586 // Work around a driver bug. Initializers on Private variables might not
3587 // work. So the start of the kernel should store the initializer value to the
3588 // variables. Yes, *every* entry point pays this cost if *any* entry point
3589 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3590 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003591 // TODO(dneto): Remove this at some point once fixed drivers are widely
3592 // available.
David Netob6e2e062018-04-25 10:32:06 -04003593 if (WorkgroupSizeVarID) {
3594 assert(WorkgroupSizeValueID);
3595
SJWf93f5f32020-05-05 07:27:56 -05003596 SPIRVOperandVec Ops;
David Netob6e2e062018-04-25 10:32:06 -04003597 Ops << MkId(WorkgroupSizeVarID) << MkId(WorkgroupSizeValueID);
3598
SJWf93f5f32020-05-05 07:27:56 -05003599 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04003600 }
3601}
3602
David Neto22f144c2017-06-12 14:26:21 -04003603void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003604 ValueMapType &VMap = getValueMap();
3605
David Netob6e2e062018-04-25 10:32:06 -04003606 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003607
3608 for (BasicBlock &BB : F) {
3609 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04003610
3611 //
3612 // Generate OpLabel for Basic Block.
3613 //
SJWf93f5f32020-05-05 07:27:56 -05003614 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04003615
David Neto6dcd4712017-06-23 11:06:47 -04003616 // OpVariable instructions must come first.
3617 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003618 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3619 // Allocating a pointer requires variable pointers.
3620 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003621 setVariablePointersCapabilities(
3622 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003623 }
David Neto6dcd4712017-06-23 11:06:47 -04003624 GenerateInstruction(I);
3625 }
3626 }
3627
David Neto22f144c2017-06-12 14:26:21 -04003628 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003629 if (clspv::Option::HackInitializers()) {
3630 GenerateEntryPointInitialStores();
3631 }
David Neto22f144c2017-06-12 14:26:21 -04003632 }
3633
3634 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003635 if (!isa<AllocaInst>(I)) {
3636 GenerateInstruction(I);
3637 }
David Neto22f144c2017-06-12 14:26:21 -04003638 }
3639 }
3640}
3641
3642spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3643 const std::map<CmpInst::Predicate, spv::Op> Map = {
3644 {CmpInst::ICMP_EQ, spv::OpIEqual},
3645 {CmpInst::ICMP_NE, spv::OpINotEqual},
3646 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3647 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3648 {CmpInst::ICMP_ULT, spv::OpULessThan},
3649 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3650 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3651 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3652 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3653 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3654 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3655 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3656 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3657 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3658 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3659 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3660 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3661 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3662 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3663 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3664 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3665 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3666
3667 assert(0 != Map.count(I->getPredicate()));
3668
3669 return Map.at(I->getPredicate());
3670}
3671
3672spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3673 const std::map<unsigned, spv::Op> Map{
3674 {Instruction::Trunc, spv::OpUConvert},
3675 {Instruction::ZExt, spv::OpUConvert},
3676 {Instruction::SExt, spv::OpSConvert},
3677 {Instruction::FPToUI, spv::OpConvertFToU},
3678 {Instruction::FPToSI, spv::OpConvertFToS},
3679 {Instruction::UIToFP, spv::OpConvertUToF},
3680 {Instruction::SIToFP, spv::OpConvertSToF},
3681 {Instruction::FPTrunc, spv::OpFConvert},
3682 {Instruction::FPExt, spv::OpFConvert},
3683 {Instruction::BitCast, spv::OpBitcast}};
3684
3685 assert(0 != Map.count(I.getOpcode()));
3686
3687 return Map.at(I.getOpcode());
3688}
3689
3690spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003691 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003692 switch (I.getOpcode()) {
3693 default:
3694 break;
3695 case Instruction::Or:
3696 return spv::OpLogicalOr;
3697 case Instruction::And:
3698 return spv::OpLogicalAnd;
3699 case Instruction::Xor:
3700 return spv::OpLogicalNotEqual;
3701 }
3702 }
3703
alan-bakerb6b09dc2018-11-08 16:59:28 -05003704 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003705 {Instruction::Add, spv::OpIAdd},
3706 {Instruction::FAdd, spv::OpFAdd},
3707 {Instruction::Sub, spv::OpISub},
3708 {Instruction::FSub, spv::OpFSub},
3709 {Instruction::Mul, spv::OpIMul},
3710 {Instruction::FMul, spv::OpFMul},
3711 {Instruction::UDiv, spv::OpUDiv},
3712 {Instruction::SDiv, spv::OpSDiv},
3713 {Instruction::FDiv, spv::OpFDiv},
3714 {Instruction::URem, spv::OpUMod},
3715 {Instruction::SRem, spv::OpSRem},
3716 {Instruction::FRem, spv::OpFRem},
3717 {Instruction::Or, spv::OpBitwiseOr},
3718 {Instruction::Xor, spv::OpBitwiseXor},
3719 {Instruction::And, spv::OpBitwiseAnd},
3720 {Instruction::Shl, spv::OpShiftLeftLogical},
3721 {Instruction::LShr, spv::OpShiftRightLogical},
3722 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3723
3724 assert(0 != Map.count(I.getOpcode()));
3725
3726 return Map.at(I.getOpcode());
3727}
3728
3729void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003730 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003731 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3732
SJWf93f5f32020-05-05 07:27:56 -05003733 SPIRVID RID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003734
3735 switch (I.getOpcode()) {
3736 default: {
3737 if (Instruction::isCast(I.getOpcode())) {
3738 //
3739 // Generate SPIRV instructions for cast operators.
3740 //
3741
David Netod2de94a2017-08-28 17:27:47 -04003742 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003743 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003744 auto toI8 = Ty == Type::getInt8Ty(Context);
3745 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003746 // Handle zext, sext and uitofp with i1 type specially.
3747 if ((I.getOpcode() == Instruction::ZExt ||
3748 I.getOpcode() == Instruction::SExt ||
3749 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003750 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003751 //
3752 // Generate OpSelect.
3753 //
3754
3755 // Ops[0] = Result Type ID
3756 // Ops[1] = Condition ID
3757 // Ops[2] = True Constant ID
3758 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003759 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003760
SJWf93f5f32020-05-05 07:27:56 -05003761 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003762
SJWf93f5f32020-05-05 07:27:56 -05003763 uint32_t CondID = getSPIRVValue(I.getOperand(0));
David Neto257c3892018-04-11 13:19:45 -04003764 Ops << MkId(CondID);
David Neto22f144c2017-06-12 14:26:21 -04003765
3766 uint32_t TrueID = 0;
3767 if (I.getOpcode() == Instruction::ZExt) {
SJWf93f5f32020-05-05 07:27:56 -05003768 TrueID = getSPIRVValue(ConstantInt::get(I.getType(), 1));
David Neto22f144c2017-06-12 14:26:21 -04003769 } else if (I.getOpcode() == Instruction::SExt) {
SJWf93f5f32020-05-05 07:27:56 -05003770 TrueID = getSPIRVValue(ConstantInt::getSigned(I.getType(), -1));
David Neto22f144c2017-06-12 14:26:21 -04003771 } else {
SJWf93f5f32020-05-05 07:27:56 -05003772 TrueID = getSPIRVValue(ConstantFP::get(Context, APFloat(1.0f)));
David Neto22f144c2017-06-12 14:26:21 -04003773 }
David Neto257c3892018-04-11 13:19:45 -04003774 Ops << MkId(TrueID);
David Neto22f144c2017-06-12 14:26:21 -04003775
3776 uint32_t FalseID = 0;
3777 if (I.getOpcode() == Instruction::ZExt) {
SJWf93f5f32020-05-05 07:27:56 -05003778 FalseID = getSPIRVValue(Constant::getNullValue(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003779 } else if (I.getOpcode() == Instruction::SExt) {
SJWf93f5f32020-05-05 07:27:56 -05003780 FalseID = getSPIRVValue(Constant::getNullValue(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003781 } else {
SJWf93f5f32020-05-05 07:27:56 -05003782 FalseID = getSPIRVValue(ConstantFP::get(Context, APFloat(0.0f)));
David Neto22f144c2017-06-12 14:26:21 -04003783 }
David Neto257c3892018-04-11 13:19:45 -04003784 Ops << MkId(FalseID);
David Neto22f144c2017-06-12 14:26:21 -04003785
SJWf93f5f32020-05-05 07:27:56 -05003786 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003787 } else if (!clspv::Option::Int8Support() &&
3788 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003789 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3790 // 8 bits.
3791 // Before:
3792 // %result = trunc i32 %a to i8
3793 // After
3794 // %result = OpBitwiseAnd %uint %a %uint_255
3795
SJWf93f5f32020-05-05 07:27:56 -05003796 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003797
SJWf93f5f32020-05-05 07:27:56 -05003798 Ops << MkId(getSPIRVType(OpTy)) << MkId(getSPIRVValue(I.getOperand(0)));
David Netod2de94a2017-08-28 17:27:47 -04003799
3800 Type *UintTy = Type::getInt32Ty(Context);
SJWf93f5f32020-05-05 07:27:56 -05003801 uint32_t MaskID = getSPIRVValue(ConstantInt::get(UintTy, 255));
David Neto257c3892018-04-11 13:19:45 -04003802 Ops << MkId(MaskID);
David Netod2de94a2017-08-28 17:27:47 -04003803
SJWf93f5f32020-05-05 07:27:56 -05003804 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003805 } else {
3806 // Ops[0] = Result Type ID
3807 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003808 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003809
SJWf93f5f32020-05-05 07:27:56 -05003810 Ops << MkId(getSPIRVType(I.getType()))
3811 << MkId(getSPIRVValue(I.getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04003812
SJWf93f5f32020-05-05 07:27:56 -05003813 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003814 }
3815 } else if (isa<BinaryOperator>(I)) {
3816 //
3817 // Generate SPIRV instructions for binary operators.
3818 //
3819
3820 // Handle xor with i1 type specially.
3821 if (I.getOpcode() == Instruction::Xor &&
3822 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003823 ((isa<ConstantInt>(I.getOperand(0)) &&
3824 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3825 (isa<ConstantInt>(I.getOperand(1)) &&
3826 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003827 //
3828 // Generate OpLogicalNot.
3829 //
3830 // Ops[0] = Result Type ID
3831 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003832 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003833
SJWf93f5f32020-05-05 07:27:56 -05003834 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003835
3836 Value *CondV = I.getOperand(0);
3837 if (isa<Constant>(I.getOperand(0))) {
3838 CondV = I.getOperand(1);
3839 }
SJWf93f5f32020-05-05 07:27:56 -05003840 Ops << MkId(getSPIRVValue(CondV));
David Neto22f144c2017-06-12 14:26:21 -04003841
SJWf93f5f32020-05-05 07:27:56 -05003842 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003843 } else {
3844 // Ops[0] = Result Type ID
3845 // Ops[1] = Operand 0
3846 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003847 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003848
SJWf93f5f32020-05-05 07:27:56 -05003849 Ops << MkId(getSPIRVType(I.getType()))
3850 << MkId(getSPIRVValue(I.getOperand(0)))
3851 << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04003852
SJWf93f5f32020-05-05 07:27:56 -05003853 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003854 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003855 } else if (I.getOpcode() == Instruction::FNeg) {
3856 // The only unary operator.
3857 //
3858 // Ops[0] = Result Type ID
3859 // Ops[1] = Operand 0
SJWf93f5f32020-05-05 07:27:56 -05003860 SPIRVOperandVec ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003861
SJWf93f5f32020-05-05 07:27:56 -05003862 ops << MkId(getSPIRVType(I.getType()))
3863 << MkId(getSPIRVValue(I.getOperand(0)));
3864 RID = addSPIRVInst(spv::OpFNegate, ops);
David Neto22f144c2017-06-12 14:26:21 -04003865 } else {
3866 I.print(errs());
3867 llvm_unreachable("Unsupported instruction???");
3868 }
3869 break;
3870 }
3871 case Instruction::GetElementPtr: {
3872 auto &GlobalConstArgSet = getGlobalConstArgSet();
3873
3874 //
3875 // Generate OpAccessChain.
3876 //
3877 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3878
3879 //
3880 // Generate OpAccessChain.
3881 //
3882
3883 // Ops[0] = Result Type ID
3884 // Ops[1] = Base ID
3885 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05003886 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003887
alan-bakerb6b09dc2018-11-08 16:59:28 -05003888 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003889 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3890 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3891 // Use pointer type with private address space for global constant.
3892 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003893 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003894 }
David Neto257c3892018-04-11 13:19:45 -04003895
SJWf93f5f32020-05-05 07:27:56 -05003896 Ops << MkId(getSPIRVType(ResultType));
David Neto22f144c2017-06-12 14:26:21 -04003897
David Neto862b7d82018-06-14 18:48:37 -04003898 // Generate the base pointer.
SJWf93f5f32020-05-05 07:27:56 -05003899 Ops << MkId(getSPIRVValue(GEP->getPointerOperand()));
David Neto22f144c2017-06-12 14:26:21 -04003900
David Neto862b7d82018-06-14 18:48:37 -04003901 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003902
3903 //
3904 // Follows below rules for gep.
3905 //
David Neto862b7d82018-06-14 18:48:37 -04003906 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3907 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003908 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3909 // first index.
3910 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3911 // use gep's first index.
3912 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3913 // gep's first index.
3914 //
3915 spv::Op Opcode = spv::OpAccessChain;
3916 unsigned offset = 0;
3917 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003918 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003919 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003920 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003921 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003922 }
David Neto862b7d82018-06-14 18:48:37 -04003923 } else {
David Neto22f144c2017-06-12 14:26:21 -04003924 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003925 }
3926
3927 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003928 // Do we need to generate ArrayStride? Check against the GEP result type
3929 // rather than the pointer type of the base because when indexing into
3930 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3931 // for something else in the SPIR-V.
3932 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003933 auto address_space = ResultType->getAddressSpace();
3934 setVariablePointersCapabilities(address_space);
3935 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003936 case spv::StorageClassStorageBuffer:
3937 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003938 // Save the need to generate an ArrayStride decoration. But defer
3939 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003940 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003941 break;
3942 default:
3943 break;
David Neto1a1a0582017-07-07 12:01:44 -04003944 }
David Neto22f144c2017-06-12 14:26:21 -04003945 }
3946
3947 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJWf93f5f32020-05-05 07:27:56 -05003948 Ops << MkId(getSPIRVValue(*II));
David Neto22f144c2017-06-12 14:26:21 -04003949 }
3950
SJWf93f5f32020-05-05 07:27:56 -05003951 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003952 break;
3953 }
3954 case Instruction::ExtractValue: {
3955 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3956 // Ops[0] = Result Type ID
3957 // Ops[1] = Composite ID
3958 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003959 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003960
SJWf93f5f32020-05-05 07:27:56 -05003961 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04003962
SJWf93f5f32020-05-05 07:27:56 -05003963 uint32_t CompositeID = getSPIRVValue(EVI->getAggregateOperand());
David Neto257c3892018-04-11 13:19:45 -04003964 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003965
3966 for (auto &Index : EVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003967 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003968 }
3969
SJWf93f5f32020-05-05 07:27:56 -05003970 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003971 break;
3972 }
3973 case Instruction::InsertValue: {
3974 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3975 // Ops[0] = Result Type ID
3976 // Ops[1] = Object ID
3977 // Ops[2] = Composite ID
3978 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003979 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003980
SJWf93f5f32020-05-05 07:27:56 -05003981 uint32_t ResTyID = getSPIRVType(I.getType());
David Neto257c3892018-04-11 13:19:45 -04003982 Ops << MkId(ResTyID);
David Neto22f144c2017-06-12 14:26:21 -04003983
SJWf93f5f32020-05-05 07:27:56 -05003984 uint32_t ObjectID = getSPIRVValue(IVI->getInsertedValueOperand());
David Neto257c3892018-04-11 13:19:45 -04003985 Ops << MkId(ObjectID);
David Neto22f144c2017-06-12 14:26:21 -04003986
SJWf93f5f32020-05-05 07:27:56 -05003987 uint32_t CompositeID = getSPIRVValue(IVI->getAggregateOperand());
David Neto257c3892018-04-11 13:19:45 -04003988 Ops << MkId(CompositeID);
David Neto22f144c2017-06-12 14:26:21 -04003989
3990 for (auto &Index : IVI->indices()) {
David Neto257c3892018-04-11 13:19:45 -04003991 Ops << MkNum(Index);
David Neto22f144c2017-06-12 14:26:21 -04003992 }
3993
SJWf93f5f32020-05-05 07:27:56 -05003994 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003995 break;
3996 }
3997 case Instruction::Select: {
3998 //
3999 // Generate OpSelect.
4000 //
4001
4002 // Ops[0] = Result Type ID
4003 // Ops[1] = Condition ID
4004 // Ops[2] = True Constant ID
4005 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05004006 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004007
4008 // Find SPIRV instruction for parameter type.
4009 auto Ty = I.getType();
4010 if (Ty->isPointerTy()) {
4011 auto PointeeTy = Ty->getPointerElementType();
4012 if (PointeeTy->isStructTy() &&
4013 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4014 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004015 } else {
4016 // Selecting between pointers requires variable pointers.
4017 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4018 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
4019 setVariablePointers(true);
4020 }
David Neto22f144c2017-06-12 14:26:21 -04004021 }
4022 }
4023
SJWf93f5f32020-05-05 07:27:56 -05004024 Ops << MkId(getSPIRVType(Ty)) << MkId(getSPIRVValue(I.getOperand(0)))
4025 << MkId(getSPIRVValue(I.getOperand(1)))
4026 << MkId(getSPIRVValue(I.getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04004027
SJWf93f5f32020-05-05 07:27:56 -05004028 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004029 break;
4030 }
4031 case Instruction::ExtractElement: {
4032 // Handle <4 x i8> type manually.
4033 Type *CompositeTy = I.getOperand(0)->getType();
4034 if (is4xi8vec(CompositeTy)) {
4035 //
4036 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4037 // <4 x i8>.
4038 //
4039
4040 //
4041 // Generate OpShiftRightLogical
4042 //
4043 // Ops[0] = Result Type ID
4044 // Ops[1] = Operand 0
4045 // Ops[2] = Operand 1
4046 //
SJWf93f5f32020-05-05 07:27:56 -05004047 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004048
SJWf93f5f32020-05-05 07:27:56 -05004049 Ops << MkId(getSPIRVType(CompositeTy));
David Neto22f144c2017-06-12 14:26:21 -04004050
SJWf93f5f32020-05-05 07:27:56 -05004051 uint32_t Op0ID = getSPIRVValue(I.getOperand(0));
David Neto257c3892018-04-11 13:19:45 -04004052 Ops << MkId(Op0ID);
David Neto22f144c2017-06-12 14:26:21 -04004053
4054 uint32_t Op1ID = 0;
4055 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4056 // Handle constant index.
4057 uint64_t Idx = CI->getZExtValue();
4058 Value *ShiftAmount =
4059 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05004060 Op1ID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04004061 } else {
4062 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004063 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004064
SJWf93f5f32020-05-05 07:27:56 -05004065 TmpOps << MkId(getSPIRVType(Type::getInt32Ty(Context)))
4066 << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004067
4068 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJWf93f5f32020-05-05 07:27:56 -05004069 TmpOps << MkId(getSPIRVValue(Cst8));
David Neto22f144c2017-06-12 14:26:21 -04004070
SJWf93f5f32020-05-05 07:27:56 -05004071 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004072 }
David Neto257c3892018-04-11 13:19:45 -04004073 Ops << MkId(Op1ID);
David Neto22f144c2017-06-12 14:26:21 -04004074
SJWf93f5f32020-05-05 07:27:56 -05004075 uint32_t ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004076
4077 //
4078 // Generate OpBitwiseAnd
4079 //
4080 // Ops[0] = Result Type ID
4081 // Ops[1] = Operand 0
4082 // Ops[2] = Operand 1
4083 //
4084 Ops.clear();
4085
SJWf93f5f32020-05-05 07:27:56 -05004086 Ops << MkId(getSPIRVType(CompositeTy)) << MkId(ShiftID);
David Neto22f144c2017-06-12 14:26:21 -04004087
4088 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJWf93f5f32020-05-05 07:27:56 -05004089 Ops << MkId(getSPIRVValue(CstFF));
David Neto22f144c2017-06-12 14:26:21 -04004090
SJWf93f5f32020-05-05 07:27:56 -05004091 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004092 break;
4093 }
4094
4095 // Ops[0] = Result Type ID
4096 // Ops[1] = Composite ID
4097 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004098 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004099
SJWf93f5f32020-05-05 07:27:56 -05004100 Ops << MkId(getSPIRVType(I.getType()))
4101 << MkId(getSPIRVValue(I.getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04004102
4103 spv::Op Opcode = spv::OpCompositeExtract;
4104 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
David Neto257c3892018-04-11 13:19:45 -04004105 Ops << MkNum(static_cast<uint32_t>(CI->getZExtValue()));
David Neto22f144c2017-06-12 14:26:21 -04004106 } else {
SJWf93f5f32020-05-05 07:27:56 -05004107 Ops << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004108 Opcode = spv::OpVectorExtractDynamic;
4109 }
4110
SJWf93f5f32020-05-05 07:27:56 -05004111 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004112 break;
4113 }
4114 case Instruction::InsertElement: {
4115 // Handle <4 x i8> type manually.
4116 Type *CompositeTy = I.getOperand(0)->getType();
4117 if (is4xi8vec(CompositeTy)) {
4118 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJWf93f5f32020-05-05 07:27:56 -05004119 uint32_t CstFFID = getSPIRVValue(CstFF);
David Neto22f144c2017-06-12 14:26:21 -04004120
4121 uint32_t ShiftAmountID = 0;
4122 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4123 // Handle constant index.
4124 uint64_t Idx = CI->getZExtValue();
4125 Value *ShiftAmount =
4126 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05004127 ShiftAmountID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04004128 } else {
4129 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004130 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004131
SJWf93f5f32020-05-05 07:27:56 -05004132 TmpOps << MkId(getSPIRVType(Type::getInt32Ty(Context)))
4133 << MkId(getSPIRVValue(I.getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04004134
4135 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJWf93f5f32020-05-05 07:27:56 -05004136 TmpOps << MkId(getSPIRVValue(Cst8));
David Neto22f144c2017-06-12 14:26:21 -04004137
SJWf93f5f32020-05-05 07:27:56 -05004138 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004139 }
4140
4141 //
4142 // Generate mask operations.
4143 //
4144
4145 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004146 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004147
SJWf93f5f32020-05-05 07:27:56 -05004148 const uint32_t ResTyID = getSPIRVType(CompositeTy);
David Neto257c3892018-04-11 13:19:45 -04004149 Ops << MkId(ResTyID) << MkId(CstFFID) << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004150
SJWf93f5f32020-05-05 07:27:56 -05004151 uint32_t MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004152
4153 // Inverse mask.
4154 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004155 Ops << MkId(ResTyID) << MkId(MaskID);
David Neto22f144c2017-06-12 14:26:21 -04004156
SJWf93f5f32020-05-05 07:27:56 -05004157 uint32_t InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004158
4159 // Apply mask.
4160 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004161 Ops << MkId(ResTyID) << MkId(getSPIRVValue(I.getOperand(0)))
4162 << MkId(InvMaskID);
David Neto22f144c2017-06-12 14:26:21 -04004163
SJWf93f5f32020-05-05 07:27:56 -05004164 uint32_t OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004165
4166 // Create correct value according to index of insertelement.
4167 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004168 Ops << MkId(ResTyID) << MkId(getSPIRVValue(I.getOperand(1)))
alan-bakerb6b09dc2018-11-08 16:59:28 -05004169 << MkId(ShiftAmountID);
David Neto22f144c2017-06-12 14:26:21 -04004170
SJWf93f5f32020-05-05 07:27:56 -05004171 uint32_t InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004172
4173 // Insert value to original value.
4174 Ops.clear();
David Neto257c3892018-04-11 13:19:45 -04004175 Ops << MkId(ResTyID) << MkId(OrgValID) << MkId(InsertValID);
David Neto22f144c2017-06-12 14:26:21 -04004176
SJWf93f5f32020-05-05 07:27:56 -05004177 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004178 break;
4179 }
4180
SJWf93f5f32020-05-05 07:27:56 -05004181 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004182
James Priced26efea2018-06-09 23:28:32 +01004183 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05004184 Ops << MkId(getSPIRVType(I.getType()));
David Neto22f144c2017-06-12 14:26:21 -04004185
4186 spv::Op Opcode = spv::OpCompositeInsert;
4187 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004188 const auto value = CI->getZExtValue();
4189 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004190 // Ops[1] = Object ID
4191 // Ops[2] = Composite ID
4192 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004193 Ops << MkId(getSPIRVValue(I.getOperand(1)))
4194 << MkId(getSPIRVValue(I.getOperand(0)))
James Priced26efea2018-06-09 23:28:32 +01004195 << MkNum(static_cast<uint32_t>(value));
David Neto22f144c2017-06-12 14:26:21 -04004196 } else {
James Priced26efea2018-06-09 23:28:32 +01004197 // Ops[1] = Composite ID
4198 // Ops[2] = Object ID
4199 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004200 Ops << MkId(getSPIRVValue(I.getOperand(0)))
4201 << MkId(getSPIRVValue(I.getOperand(1)))
4202 << MkId(getSPIRVValue(I.getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04004203 Opcode = spv::OpVectorInsertDynamic;
4204 }
4205
SJWf93f5f32020-05-05 07:27:56 -05004206 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004207 break;
4208 }
4209 case Instruction::ShuffleVector: {
4210 // Ops[0] = Result Type ID
4211 // Ops[1] = Vector 1 ID
4212 // Ops[2] = Vector 2 ID
4213 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004214 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004215
SJWf93f5f32020-05-05 07:27:56 -05004216 Ops << MkId(getSPIRVType(I.getType()))
4217 << MkId(getSPIRVValue(I.getOperand(0)))
4218 << MkId(getSPIRVValue(I.getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004219
alan-bakerc9666712020-04-01 16:31:21 -04004220 auto shuffle = cast<ShuffleVectorInst>(&I);
4221 SmallVector<int, 4> mask;
4222 shuffle->getShuffleMask(mask);
4223 for (auto i : mask) {
4224 if (i == UndefMaskElem) {
4225 if (clspv::Option::HackUndef())
4226 // Use 0 instead of undef.
David Neto257c3892018-04-11 13:19:45 -04004227 Ops << MkNum(0);
alan-bakerc9666712020-04-01 16:31:21 -04004228 else
4229 // Undef for shuffle in SPIR-V.
4230 Ops << MkNum(0xffffffff);
David Neto22f144c2017-06-12 14:26:21 -04004231 } else {
alan-bakerc9666712020-04-01 16:31:21 -04004232 Ops << MkNum(i);
David Neto22f144c2017-06-12 14:26:21 -04004233 }
4234 }
4235
SJWf93f5f32020-05-05 07:27:56 -05004236 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004237 break;
4238 }
4239 case Instruction::ICmp:
4240 case Instruction::FCmp: {
4241 CmpInst *CmpI = cast<CmpInst>(&I);
4242
David Netod4ca2e62017-07-06 18:47:35 -04004243 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004244 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004245 if (isa<PointerType>(ArgTy)) {
4246 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004247 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004248 errs()
4249 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4250 << "in function " << name << "\n";
4251 llvm_unreachable("Pointer equality check is invalid");
4252 break;
4253 }
4254
David Neto257c3892018-04-11 13:19:45 -04004255 // Ops[0] = Result Type ID
4256 // Ops[1] = Operand 1 ID
4257 // Ops[2] = Operand 2 ID
SJWf93f5f32020-05-05 07:27:56 -05004258 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004259
SJWf93f5f32020-05-05 07:27:56 -05004260 Ops << MkId(getSPIRVType(CmpI->getType()))
4261 << MkId(getSPIRVValue(CmpI->getOperand(0)))
4262 << MkId(getSPIRVValue(CmpI->getOperand(1)));
David Neto22f144c2017-06-12 14:26:21 -04004263
4264 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
SJWf93f5f32020-05-05 07:27:56 -05004265 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004266 break;
4267 }
4268 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004269 // Branch instruction is deferred because it needs label's ID.
4270 BasicBlock *BrBB = I.getParent();
4271 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4272 // Placeholder for Merge operation
4273 RID = addSPIRVPlaceholder(&I);
4274 }
4275 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004276 break;
4277 }
4278 case Instruction::Switch: {
4279 I.print(errs());
4280 llvm_unreachable("Unsupported instruction???");
4281 break;
4282 }
4283 case Instruction::IndirectBr: {
4284 I.print(errs());
4285 llvm_unreachable("Unsupported instruction???");
4286 break;
4287 }
4288 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004289 // PHI instruction is deferred because it needs label's ID.
4290 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004291 break;
4292 }
4293 case Instruction::Alloca: {
4294 //
4295 // Generate OpVariable.
4296 //
4297 // Ops[0] : Result Type ID
4298 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004299 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004300
SJWf93f5f32020-05-05 07:27:56 -05004301 Ops << MkId(getSPIRVType(I.getType())) << MkNum(spv::StorageClassFunction);
David Neto22f144c2017-06-12 14:26:21 -04004302
SJWf93f5f32020-05-05 07:27:56 -05004303 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004304 break;
4305 }
4306 case Instruction::Load: {
4307 LoadInst *LD = cast<LoadInst>(&I);
4308 //
4309 // Generate OpLoad.
4310 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004311
alan-baker5b86ed72019-02-15 08:26:50 -05004312 if (LD->getType()->isPointerTy()) {
4313 // Loading a pointer requires variable pointers.
4314 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4315 }
David Neto22f144c2017-06-12 14:26:21 -04004316
SJWf93f5f32020-05-05 07:27:56 -05004317 uint32_t ResTyID = getSPIRVType(LD->getType());
4318 uint32_t PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004319
4320 // This is a hack to work around what looks like a driver bug.
4321 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004322 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4323 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004324 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004325 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004326 // Generate a bitwise-and of the original value with itself.
4327 // We should have been able to get away with just an OpCopyObject,
4328 // but we need something more complex to get past certain driver bugs.
4329 // This is ridiculous, but necessary.
4330 // TODO(dneto): Revisit this once drivers fix their bugs.
4331
SJWf93f5f32020-05-05 07:27:56 -05004332 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004333 Ops << MkId(ResTyID) << MkId(WorkgroupSizeValueID)
4334 << MkId(WorkgroupSizeValueID);
David Neto0a2f98d2017-09-15 19:38:40 -04004335
SJWf93f5f32020-05-05 07:27:56 -05004336 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004337 break;
4338 }
4339
4340 // This is the normal path. Generate a load.
4341
David Neto22f144c2017-06-12 14:26:21 -04004342 // Ops[0] = Result Type ID
4343 // Ops[1] = Pointer ID
4344 // Ops[2] ... Ops[n] = Optional Memory Access
4345 //
4346 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004347
SJWf93f5f32020-05-05 07:27:56 -05004348 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004349 Ops << MkId(ResTyID) << MkId(PointerID);
David Neto22f144c2017-06-12 14:26:21 -04004350
SJWf93f5f32020-05-05 07:27:56 -05004351 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004352 break;
4353 }
4354 case Instruction::Store: {
4355 StoreInst *ST = cast<StoreInst>(&I);
4356 //
4357 // Generate OpStore.
4358 //
4359
alan-baker5b86ed72019-02-15 08:26:50 -05004360 if (ST->getValueOperand()->getType()->isPointerTy()) {
4361 // Storing a pointer requires variable pointers.
4362 setVariablePointersCapabilities(
4363 ST->getValueOperand()->getType()->getPointerAddressSpace());
4364 }
4365
David Neto22f144c2017-06-12 14:26:21 -04004366 // Ops[0] = Pointer ID
4367 // Ops[1] = Object ID
4368 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4369 //
4370 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05004371 SPIRVOperandVec Ops;
4372 Ops << MkId(getSPIRVValue(ST->getPointerOperand()))
4373 << MkId(getSPIRVValue(ST->getValueOperand()));
David Neto22f144c2017-06-12 14:26:21 -04004374
SJWf93f5f32020-05-05 07:27:56 -05004375 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004376 break;
4377 }
4378 case Instruction::AtomicCmpXchg: {
4379 I.print(errs());
4380 llvm_unreachable("Unsupported instruction???");
4381 break;
4382 }
4383 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004384 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4385
4386 spv::Op opcode;
4387
4388 switch (AtomicRMW->getOperation()) {
4389 default:
4390 I.print(errs());
4391 llvm_unreachable("Unsupported instruction???");
4392 case llvm::AtomicRMWInst::Add:
4393 opcode = spv::OpAtomicIAdd;
4394 break;
4395 case llvm::AtomicRMWInst::Sub:
4396 opcode = spv::OpAtomicISub;
4397 break;
4398 case llvm::AtomicRMWInst::Xchg:
4399 opcode = spv::OpAtomicExchange;
4400 break;
4401 case llvm::AtomicRMWInst::Min:
4402 opcode = spv::OpAtomicSMin;
4403 break;
4404 case llvm::AtomicRMWInst::Max:
4405 opcode = spv::OpAtomicSMax;
4406 break;
4407 case llvm::AtomicRMWInst::UMin:
4408 opcode = spv::OpAtomicUMin;
4409 break;
4410 case llvm::AtomicRMWInst::UMax:
4411 opcode = spv::OpAtomicUMax;
4412 break;
4413 case llvm::AtomicRMWInst::And:
4414 opcode = spv::OpAtomicAnd;
4415 break;
4416 case llvm::AtomicRMWInst::Or:
4417 opcode = spv::OpAtomicOr;
4418 break;
4419 case llvm::AtomicRMWInst::Xor:
4420 opcode = spv::OpAtomicXor;
4421 break;
4422 }
4423
4424 //
4425 // Generate OpAtomic*.
4426 //
SJWf93f5f32020-05-05 07:27:56 -05004427 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004428
SJWf93f5f32020-05-05 07:27:56 -05004429 Ops << MkId(getSPIRVType(I.getType()))
4430 << MkId(getSPIRVValue(AtomicRMW->getPointerOperand()));
Neil Henning39672102017-09-29 14:33:13 +01004431
4432 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01004433 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
SJWf93f5f32020-05-05 07:27:56 -05004434 Ops << MkId(getSPIRVValue(ConstantScopeDevice));
Neil Henning39672102017-09-29 14:33:13 +01004435
4436 const auto ConstantMemorySemantics = ConstantInt::get(
4437 IntTy, spv::MemorySemanticsUniformMemoryMask |
4438 spv::MemorySemanticsSequentiallyConsistentMask);
SJWf93f5f32020-05-05 07:27:56 -05004439 Ops << MkId(getSPIRVValue(ConstantMemorySemantics));
Neil Henning39672102017-09-29 14:33:13 +01004440
SJWf93f5f32020-05-05 07:27:56 -05004441 Ops << MkId(getSPIRVValue(AtomicRMW->getValOperand()));
Neil Henning39672102017-09-29 14:33:13 +01004442
SJWf93f5f32020-05-05 07:27:56 -05004443 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004444 break;
4445 }
4446 case Instruction::Fence: {
4447 I.print(errs());
4448 llvm_unreachable("Unsupported instruction???");
4449 break;
4450 }
4451 case Instruction::Call: {
4452 CallInst *Call = dyn_cast<CallInst>(&I);
4453 Function *Callee = Call->getCalledFunction();
4454
Alan Baker202c8c72018-08-13 13:47:44 -04004455 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004456 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
4457 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05004458 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004459
SJWf93f5f32020-05-05 07:27:56 -05004460 Ops << MkId(getSPIRVType(Call->getType()->getPointerElementType()))
David Neto862b7d82018-06-14 18:48:37 -04004461 << MkId(ResourceVarDeferredLoadCalls[Call]);
4462
SJWf93f5f32020-05-05 07:27:56 -05004463 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto862b7d82018-06-14 18:48:37 -04004464 break;
4465
4466 } else {
4467 // This maps to an OpVariable we've already generated.
4468 // No code is generated for the call.
4469 }
4470 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05004471 } else if (Callee->getName().startswith(
4472 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04004473 // Don't codegen an instruction here, but instead map this call directly
4474 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004475 int spec_id = static_cast<int>(
4476 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04004477 const auto &info = LocalSpecIdInfoMap[spec_id];
SJWf93f5f32020-05-05 07:27:56 -05004478 RID = info.variable_id;
Alan Baker202c8c72018-08-13 13:47:44 -04004479 break;
David Neto862b7d82018-06-14 18:48:37 -04004480 }
4481
4482 // Sampler initializers become a load of the corresponding sampler.
4483
Kévin Petitdf71de32019-04-09 14:09:50 +01004484 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004485 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004486 const auto third_param = static_cast<unsigned>(
4487 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4488 auto sampler_value = third_param;
4489 if (clspv::Option::UseSamplerMap()) {
4490 sampler_value = getSamplerMap()[third_param].first;
4491 }
David Neto862b7d82018-06-14 18:48:37 -04004492
4493 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05004494 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004495
SJWf93f5f32020-05-05 07:27:56 -05004496 Ops << MkId(getSPIRVType(SamplerTy->getPointerElementType()))
alan-baker09cb9802019-12-10 13:16:27 -05004497 << MkId(SamplerLiteralToIDMap[sampler_value]);
David Neto22f144c2017-06-12 14:26:21 -04004498
SJWf93f5f32020-05-05 07:27:56 -05004499 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004500 break;
4501 }
4502
Kévin Petit349c9502019-03-28 17:24:14 +00004503 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01004504 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
4505 .Case("spirv.atomic_xor", spv::OpAtomicXor)
4506 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04004507
Kévin Petit617a76d2019-04-04 13:54:16 +01004508 // If the switch above didn't have an entry maybe the intrinsic
4509 // is using the name mangling logic.
4510 bool usesMangler = false;
4511 if (opcode == spv::OpNop) {
4512 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
4513 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
4514 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
4515 usesMangler = true;
4516 }
4517 }
4518
Kévin Petit349c9502019-03-28 17:24:14 +00004519 if (opcode != spv::OpNop) {
4520
SJWf93f5f32020-05-05 07:27:56 -05004521 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004522
Kévin Petit349c9502019-03-28 17:24:14 +00004523 if (!I.getType()->isVoidTy()) {
SJWf93f5f32020-05-05 07:27:56 -05004524 Ops << MkId(getSPIRVType(I.getType()));
Kévin Petit349c9502019-03-28 17:24:14 +00004525 }
David Neto22f144c2017-06-12 14:26:21 -04004526
Kévin Petit617a76d2019-04-04 13:54:16 +01004527 unsigned firstOperand = usesMangler ? 1 : 0;
4528 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
SJWf93f5f32020-05-05 07:27:56 -05004529 Ops << MkId(getSPIRVValue(Call->getArgOperand(i)));
David Neto22f144c2017-06-12 14:26:21 -04004530 }
4531
SJWf93f5f32020-05-05 07:27:56 -05004532 RID = addSPIRVInst(opcode, Ops);
Kévin Petit8a560882019-03-21 15:24:34 +00004533 break;
4534 }
4535
David Neto22f144c2017-06-12 14:26:21 -04004536 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
4537 if (Callee->getName().startswith("spirv.copy_memory")) {
4538 //
4539 // Generate OpCopyMemory.
4540 //
4541
4542 // Ops[0] = Dst ID
4543 // Ops[1] = Src ID
4544 // Ops[2] = Memory Access
4545 // Ops[3] = Alignment
4546
4547 auto IsVolatile =
4548 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4549
4550 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4551 : spv::MemoryAccessMaskNone;
4552
4553 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4554
4555 auto Alignment =
4556 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4557
SJWf93f5f32020-05-05 07:27:56 -05004558 SPIRVOperandVec Ops;
4559 Ops << MkId(getSPIRVValue(Call->getArgOperand(0)))
4560 << MkId(getSPIRVValue(Call->getArgOperand(1))) << MkNum(MemoryAccess)
David Neto257c3892018-04-11 13:19:45 -04004561 << MkNum(static_cast<uint32_t>(Alignment));
David Neto22f144c2017-06-12 14:26:21 -04004562
SJWf93f5f32020-05-05 07:27:56 -05004563 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004564 break;
4565 }
4566
SJW2c317da2020-03-23 07:39:13 -05004567 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4568 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004569 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004570 //
4571 // Generate OpSampledImage.
4572 //
4573 // Ops[0] = Result Type ID
4574 // Ops[1] = Image ID
4575 // Ops[2] = Sampler ID
4576 //
SJWf93f5f32020-05-05 07:27:56 -05004577 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004578
4579 Value *Image = Call->getArgOperand(0);
4580 Value *Sampler = Call->getArgOperand(1);
4581 Value *Coordinate = Call->getArgOperand(2);
4582
4583 TypeMapType &OpImageTypeMap = getImageTypeMap();
4584 Type *ImageTy = Image->getType()->getPointerElementType();
4585 uint32_t ImageTyID = OpImageTypeMap[ImageTy];
SJWf93f5f32020-05-05 07:27:56 -05004586 uint32_t ImageID = getSPIRVValue(Image);
4587 uint32_t SamplerID = getSPIRVValue(Sampler);
David Neto257c3892018-04-11 13:19:45 -04004588
4589 Ops << MkId(ImageTyID) << MkId(ImageID) << MkId(SamplerID);
David Neto22f144c2017-06-12 14:26:21 -04004590
SJWf93f5f32020-05-05 07:27:56 -05004591 uint32_t SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004592
4593 //
4594 // Generate OpImageSampleExplicitLod.
4595 //
4596 // Ops[0] = Result Type ID
4597 // Ops[1] = Sampled Image ID
4598 // Ops[2] = Coordinate ID
4599 // Ops[3] = Image Operands Type ID
4600 // Ops[4] ... Ops[n] = Operands ID
4601 //
4602 Ops.clear();
4603
alan-bakerf67468c2019-11-25 15:51:49 -05004604 const bool is_int_image = IsIntImageType(Image->getType());
4605 uint32_t result_type = 0;
4606 if (is_int_image) {
4607 result_type = v4int32ID;
4608 } else {
SJWf93f5f32020-05-05 07:27:56 -05004609 result_type = getSPIRVType(Call->getType());
alan-bakerf67468c2019-11-25 15:51:49 -05004610 }
4611
SJWf93f5f32020-05-05 07:27:56 -05004612 Ops << MkId(result_type) << MkId(SampledImageID)
4613 << MkId(getSPIRVValue(Coordinate))
alan-bakerf67468c2019-11-25 15:51:49 -05004614 << MkNum(spv::ImageOperandsLodMask);
David Neto22f144c2017-06-12 14:26:21 -04004615
4616 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
SJWf93f5f32020-05-05 07:27:56 -05004617 Ops << MkId(getSPIRVValue(CstFP0));
David Neto22f144c2017-06-12 14:26:21 -04004618
SJWf93f5f32020-05-05 07:27:56 -05004619 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004620
4621 if (is_int_image) {
4622 // Generate the bitcast.
4623 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004624 Ops << MkId(getSPIRVType(Call->getType())) << MkId(RID);
4625 RID = addSPIRVInst(spv::OpBitcast, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004626 }
David Neto22f144c2017-06-12 14:26:21 -04004627 break;
4628 }
4629
alan-baker75090e42020-02-20 11:21:04 -05004630 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05004631 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05004632 Value *Image = Call->getArgOperand(0);
4633 Value *Coordinate = Call->getArgOperand(1);
4634
4635 //
4636 // Generate OpImageFetch
4637 //
4638 // Ops[0] = Result Type ID
4639 // Ops[1] = Image ID
4640 // Ops[2] = Coordinate ID
4641 // Ops[3] = Lod
4642 // Ops[4] = 0
4643 //
SJWf93f5f32020-05-05 07:27:56 -05004644 SPIRVOperandVec Ops;
alan-baker75090e42020-02-20 11:21:04 -05004645
4646 const bool is_int_image = IsIntImageType(Image->getType());
4647 uint32_t result_type = 0;
4648 if (is_int_image) {
4649 result_type = v4int32ID;
4650 } else {
SJWf93f5f32020-05-05 07:27:56 -05004651 result_type = getSPIRVType(Call->getType());
alan-baker75090e42020-02-20 11:21:04 -05004652 }
4653
SJWf93f5f32020-05-05 07:27:56 -05004654 Ops << MkId(result_type) << MkId(getSPIRVValue(Image))
4655 << MkId(getSPIRVValue(Coordinate))
alan-baker75090e42020-02-20 11:21:04 -05004656 << MkNum(spv::ImageOperandsLodMask);
4657
4658 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJWf93f5f32020-05-05 07:27:56 -05004659 Ops << MkId(getSPIRVValue(CstInt0));
alan-baker75090e42020-02-20 11:21:04 -05004660
SJWf93f5f32020-05-05 07:27:56 -05004661 RID = addSPIRVInst(spv::OpImageFetch, Ops);
alan-baker75090e42020-02-20 11:21:04 -05004662
4663 if (is_int_image) {
4664 // Generate the bitcast.
4665 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004666 Ops << MkId(getSPIRVType(Call->getType())) << MkId(RID);
4667 RID = addSPIRVInst(spv::OpBitcast, Ops);
alan-baker75090e42020-02-20 11:21:04 -05004668 }
4669 break;
4670 }
4671
alan-bakerf67468c2019-11-25 15:51:49 -05004672 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05004673 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004674 //
4675 // Generate OpImageWrite.
4676 //
4677 // Ops[0] = Image ID
4678 // Ops[1] = Coordinate ID
4679 // Ops[2] = Texel ID
4680 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4681 // Ops[4] ... Ops[n] = (Optional) Operands ID
4682 //
SJWf93f5f32020-05-05 07:27:56 -05004683 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004684
4685 Value *Image = Call->getArgOperand(0);
4686 Value *Coordinate = Call->getArgOperand(1);
4687 Value *Texel = Call->getArgOperand(2);
4688
SJWf93f5f32020-05-05 07:27:56 -05004689 uint32_t ImageID = getSPIRVValue(Image);
4690 uint32_t CoordinateID = getSPIRVValue(Coordinate);
4691 uint32_t TexelID = getSPIRVValue(Texel);
alan-bakerf67468c2019-11-25 15:51:49 -05004692
4693 const bool is_int_image = IsIntImageType(Image->getType());
4694 if (is_int_image) {
4695 // Generate a bitcast to v4int and use it as the texel value.
alan-bakerf67468c2019-11-25 15:51:49 -05004696 Ops << MkId(v4int32ID) << MkId(TexelID);
SJWf93f5f32020-05-05 07:27:56 -05004697 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004698 Ops.clear();
alan-bakerf67468c2019-11-25 15:51:49 -05004699 }
David Neto257c3892018-04-11 13:19:45 -04004700 Ops << MkId(ImageID) << MkId(CoordinateID) << MkId(TexelID);
David Neto22f144c2017-06-12 14:26:21 -04004701
SJWf93f5f32020-05-05 07:27:56 -05004702 RID = addSPIRVInst(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004703 break;
4704 }
4705
alan-bakerce179f12019-12-06 19:02:22 -05004706 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05004707 if (IsImageQuery(Callee)) {
David Neto5c22a252018-03-15 16:07:41 -04004708 //
alan-bakerce179f12019-12-06 19:02:22 -05004709 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004710 //
4711 // Ops[0] = Image ID
4712 //
alan-bakerce179f12019-12-06 19:02:22 -05004713 // Result type has components equal to the dimensionality of the image,
4714 // plus 1 if the image is arrayed.
4715 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004716 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
SJWf93f5f32020-05-05 07:27:56 -05004717 SPIRVOperandVec Ops;
David Neto5c22a252018-03-15 16:07:41 -04004718
4719 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004720 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
4721 uint32_t SizesTypeID = 0;
4722
David Neto5c22a252018-03-15 16:07:41 -04004723 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004724 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05004725 const uint32_t components =
4726 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05004727 if (components == 1) {
SJWf93f5f32020-05-05 07:27:56 -05004728 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
alan-bakerce179f12019-12-06 19:02:22 -05004729 } else {
SJWf93f5f32020-05-05 07:27:56 -05004730 SizesTypeID = getSPIRVType(
4731 VectorType::get(Type::getInt32Ty(Context), components));
alan-bakerce179f12019-12-06 19:02:22 -05004732 }
SJWf93f5f32020-05-05 07:27:56 -05004733 uint32_t ImageID = getSPIRVValue(Image);
David Neto257c3892018-04-11 13:19:45 -04004734 Ops << MkId(SizesTypeID) << MkId(ImageID);
alan-bakerce179f12019-12-06 19:02:22 -05004735 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05004736 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05004737 query_opcode = spv::OpImageQuerySizeLod;
4738 // Need explicit 0 for Lod operand.
4739 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJWf93f5f32020-05-05 07:27:56 -05004740 Ops << MkId(getSPIRVValue(CstInt0));
alan-bakerce179f12019-12-06 19:02:22 -05004741 }
David Neto5c22a252018-03-15 16:07:41 -04004742
SJWf93f5f32020-05-05 07:27:56 -05004743 RID = addSPIRVInst(query_opcode, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004744
alan-bakerce179f12019-12-06 19:02:22 -05004745 // May require an extra instruction to create the appropriate result of
4746 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05004747 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05004748 if (dim == 3) {
4749 // get_image_dim returns an int4 for 3D images.
4750 //
David Neto5c22a252018-03-15 16:07:41 -04004751
alan-bakerce179f12019-12-06 19:02:22 -05004752 // Implement:
4753 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
4754 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004755 Ops << MkId(getSPIRVType(
4756 VectorType::get(Type::getInt32Ty(Context), 4)))
4757 << MkId(RID);
David Neto5c22a252018-03-15 16:07:41 -04004758
alan-bakerce179f12019-12-06 19:02:22 -05004759 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJWf93f5f32020-05-05 07:27:56 -05004760 Ops << MkId(getSPIRVValue(CstInt0));
David Neto5c22a252018-03-15 16:07:41 -04004761
SJWf93f5f32020-05-05 07:27:56 -05004762 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004763 } else if (dim != components) {
4764 // get_image_dim return an int2 regardless of the arrayedness of the
4765 // image. If the image is arrayed an element must be dropped from the
4766 // query result.
4767 //
alan-bakerce179f12019-12-06 19:02:22 -05004768
4769 // Implement:
4770 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4771 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004772 Ops << MkId(getSPIRVType(
4773 VectorType::get(Type::getInt32Ty(Context), 2)))
4774 << MkId(RID) << MkId(RID) << MkNum(0) << MkNum(1);
alan-bakerce179f12019-12-06 19:02:22 -05004775
SJWf93f5f32020-05-05 07:27:56 -05004776 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004777 }
4778 } else if (components > 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004779 // Implement:
4780 // %result = OpCompositeExtract %uint %sizes <component number>
4781 Ops.clear();
SJWf93f5f32020-05-05 07:27:56 -05004782 Ops << MkId(TypeMap[I.getType()]) << MkId(RID);
alan-bakerce179f12019-12-06 19:02:22 -05004783
4784 uint32_t component = 0;
4785 if (IsGetImageHeight(Callee))
4786 component = 1;
4787 else if (IsGetImageDepth(Callee))
4788 component = 2;
4789 Ops << MkNum(component);
4790
SJWf93f5f32020-05-05 07:27:56 -05004791 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004792 }
David Neto5c22a252018-03-15 16:07:41 -04004793 break;
4794 }
4795
SJW88ed5fe2020-05-11 12:40:57 -05004796 // Call instruction is deferred because it needs function's ID.
4797 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004798
David Neto3fbb4072017-10-16 11:28:14 -04004799 // Check whether the implementation of this call uses an extended
4800 // instruction plus one more value-producing instruction. If so, then
4801 // reserve the id for the extra value-producing slot.
4802 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4803 if (EInst != kGlslExtInstBad) {
4804 // Reserve a spot for the extra value.
SJW88ed5fe2020-05-11 12:40:57 -05004805 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004806 }
4807 break;
4808 }
4809 case Instruction::Ret: {
4810 unsigned NumOps = I.getNumOperands();
4811 if (NumOps == 0) {
4812 //
4813 // Generate OpReturn.
4814 //
SJWf93f5f32020-05-05 07:27:56 -05004815 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004816 } else {
4817 //
4818 // Generate OpReturnValue.
4819 //
4820
4821 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004822 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004823
SJWf93f5f32020-05-05 07:27:56 -05004824 Ops << MkId(getSPIRVValue(I.getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04004825
SJWf93f5f32020-05-05 07:27:56 -05004826 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004827 break;
4828 }
4829 break;
4830 }
4831 }
SJWf93f5f32020-05-05 07:27:56 -05004832
4833 // Register Instruction to ValueMap.
4834 if (0 != RID) {
4835 VMap[&I] = RID;
4836 }
David Neto22f144c2017-06-12 14:26:21 -04004837}
4838
4839void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004840
4841 //
4842 // Generate OpFunctionEnd
4843 //
SJWf93f5f32020-05-05 07:27:56 -05004844 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004845}
4846
4847bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004848 // Don't specialize <4 x i8> if i8 is generally supported.
4849 if (clspv::Option::Int8Support())
4850 return false;
4851
David Neto22f144c2017-06-12 14:26:21 -04004852 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004853 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4854 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
4855 VecTy->getNumElements() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004856 return true;
4857 }
4858 }
4859
4860 return false;
4861}
4862
4863void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004864 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4865
SJW88ed5fe2020-05-11 12:40:57 -05004866 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4867 Value *Inst = DeferredInsts[i].first;
4868 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4869 SPIRVOperandVec Operands;
4870
4871 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4872 ++i;
4873 assert(DeferredInsts.size() > i);
4874 assert(Inst == DeferredInsts[i].first);
4875 Placeholder = DeferredInsts[i].second;
4876 };
David Neto22f144c2017-06-12 14:26:21 -04004877
4878 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004879 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004880 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004881 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004882 //
4883 // Generate OpLoopMerge.
4884 //
4885 // Ops[0] = Merge Block ID
4886 // Ops[1] = Continue Target ID
4887 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004888 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004889
alan-baker06cad652019-12-03 17:56:47 -05004890 auto MergeBB = MergeBlocks[BrBB];
4891 auto ContinueBB = ContinueBlocks[BrBB];
SJWf93f5f32020-05-05 07:27:56 -05004892 uint32_t MergeBBID = getSPIRVValue(MergeBB);
4893 uint32_t ContinueBBID = getSPIRVValue(ContinueBB);
David Neto257c3892018-04-11 13:19:45 -04004894 Ops << MkId(MergeBBID) << MkId(ContinueBBID)
alan-baker06cad652019-12-03 17:56:47 -05004895 << MkNum(spv::LoopControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004896
SJW88ed5fe2020-05-11 12:40:57 -05004897 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4898
4899 nextDeferred();
4900
alan-baker06cad652019-12-03 17:56:47 -05004901 } else if (MergeBlocks.count(BrBB)) {
4902 //
4903 // Generate OpSelectionMerge.
4904 //
4905 // Ops[0] = Merge Block ID
4906 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004907 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004908
alan-baker06cad652019-12-03 17:56:47 -05004909 auto MergeBB = MergeBlocks[BrBB];
SJWf93f5f32020-05-05 07:27:56 -05004910 uint32_t MergeBBID = getSPIRVValue(MergeBB);
alan-baker06cad652019-12-03 17:56:47 -05004911 Ops << MkId(MergeBBID) << MkNum(spv::SelectionControlMaskNone);
David Neto22f144c2017-06-12 14:26:21 -04004912
SJW88ed5fe2020-05-11 12:40:57 -05004913 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4914
4915 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004916 }
4917
4918 if (Br->isConditional()) {
4919 //
4920 // Generate OpBranchConditional.
4921 //
4922 // Ops[0] = Condition ID
4923 // Ops[1] = True Label ID
4924 // Ops[2] = False Label ID
4925 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004926 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004927
SJWf93f5f32020-05-05 07:27:56 -05004928 uint32_t CondID = getSPIRVValue(Br->getCondition());
4929 uint32_t TrueBBID = getSPIRVValue(Br->getSuccessor(0));
4930 uint32_t FalseBBID = getSPIRVValue(Br->getSuccessor(1));
David Neto257c3892018-04-11 13:19:45 -04004931
4932 Ops << MkId(CondID) << MkId(TrueBBID) << MkId(FalseBBID);
David Neto22f144c2017-06-12 14:26:21 -04004933
SJW88ed5fe2020-05-11 12:40:57 -05004934 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4935
David Neto22f144c2017-06-12 14:26:21 -04004936 } else {
4937 //
4938 // Generate OpBranch.
4939 //
4940 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004941 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004942
SJWf93f5f32020-05-05 07:27:56 -05004943 uint32_t TargetID = getSPIRVValue(Br->getSuccessor(0));
David Neto257c3892018-04-11 13:19:45 -04004944 Ops << MkId(TargetID);
David Neto22f144c2017-06-12 14:26:21 -04004945
SJW88ed5fe2020-05-11 12:40:57 -05004946 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004947 }
4948 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004949 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4950 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004951 // OpPhi on pointers requires variable pointers.
4952 setVariablePointersCapabilities(
4953 PHI->getType()->getPointerAddressSpace());
4954 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
4955 setVariablePointers(true);
4956 }
4957 }
4958
David Neto22f144c2017-06-12 14:26:21 -04004959 //
4960 // Generate OpPhi.
4961 //
4962 // Ops[0] = Result Type ID
4963 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004964 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004965
SJWf93f5f32020-05-05 07:27:56 -05004966 Ops << MkId(getSPIRVType(PHI->getType()));
David Neto22f144c2017-06-12 14:26:21 -04004967
SJW88ed5fe2020-05-11 12:40:57 -05004968 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
4969 uint32_t VarID = getSPIRVValue(PHI->getIncomingValue(j));
4970 uint32_t ParentID = getSPIRVValue(PHI->getIncomingBlock(j));
David Neto257c3892018-04-11 13:19:45 -04004971 Ops << MkId(VarID) << MkId(ParentID);
David Neto22f144c2017-06-12 14:26:21 -04004972 }
4973
SJW88ed5fe2020-05-11 12:40:57 -05004974 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4975
David Neto22f144c2017-06-12 14:26:21 -04004976 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4977 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05004978 LLVMContext &Context = Callee->getContext();
4979 auto IntTy = Type::getInt32Ty(Context);
4980 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04004981 auto callee_name = Callee->getName();
4982 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004983
4984 if (EInst) {
4985 uint32_t &ExtInstImportID = getOpExtInstImportID();
4986
4987 //
4988 // Generate OpExtInst.
4989 //
4990
4991 // Ops[0] = Result Type ID
4992 // Ops[1] = Set ID (OpExtInstImport ID)
4993 // Ops[2] = Instruction Number (Literal Number)
4994 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
SJWf93f5f32020-05-05 07:27:56 -05004995 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004996
SJWf93f5f32020-05-05 07:27:56 -05004997 Ops << MkId(getSPIRVType(Call->getType())) << MkId(ExtInstImportID)
David Neto862b7d82018-06-14 18:48:37 -04004998 << MkNum(EInst);
David Neto22f144c2017-06-12 14:26:21 -04004999
David Neto22f144c2017-06-12 14:26:21 -04005000 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05005001 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
5002 Ops << MkId(getSPIRVValue(Call->getOperand(j)));
David Neto22f144c2017-06-12 14:26:21 -04005003 }
5004
SJW88ed5fe2020-05-11 12:40:57 -05005005 SPIRVID RID = replaceSPIRVInst(Placeholder, spv::OpExtInst, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005006
David Neto3fbb4072017-10-16 11:28:14 -04005007 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
5008 if (IndirectExtInst != kGlslExtInstBad) {
SJW88ed5fe2020-05-11 12:40:57 -05005009
5010 nextDeferred();
5011
David Neto3fbb4072017-10-16 11:28:14 -04005012 // Generate one more instruction that uses the result of the extended
5013 // instruction. Its result id is one more than the id of the
5014 // extended instruction.
SJW88ed5fe2020-05-11 12:40:57 -05005015 auto generate_extra_inst = [this, &Context, &Call, &Placeholder,
5016 RID](spv::Op opcode, Constant *constant) {
David Neto3fbb4072017-10-16 11:28:14 -04005017 //
5018 // Generate instruction like:
5019 // result = opcode constant <extinst-result>
5020 //
5021 // Ops[0] = Result Type ID
5022 // Ops[1] = Operand 0 ;; the constant, suitably splatted
5023 // Ops[2] = Operand 1 ;; the result of the extended instruction
SJWf93f5f32020-05-05 07:27:56 -05005024 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04005025
David Neto3fbb4072017-10-16 11:28:14 -04005026 Type *resultTy = Call->getType();
SJWf93f5f32020-05-05 07:27:56 -05005027 Ops << MkId(getSPIRVType(resultTy));
David Neto3fbb4072017-10-16 11:28:14 -04005028
5029 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
5030 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04005031 {static_cast<unsigned>(vectorTy->getNumElements()), false},
5032 constant);
David Neto3fbb4072017-10-16 11:28:14 -04005033 }
SJW88ed5fe2020-05-11 12:40:57 -05005034 Ops << MkId(getSPIRVValue(constant)) << MkId(RID);
David Neto3fbb4072017-10-16 11:28:14 -04005035
SJW88ed5fe2020-05-11 12:40:57 -05005036 replaceSPIRVInst(Placeholder, opcode, Ops);
David Neto3fbb4072017-10-16 11:28:14 -04005037 };
5038
5039 switch (IndirectExtInst) {
5040 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05005041 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04005042 break;
5043 case glsl::ExtInstAcos: // Implementing acospi
5044 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01005045 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04005046 case glsl::ExtInstAtan2: // Implementing atan2pi
5047 generate_extra_inst(
5048 spv::OpFMul,
5049 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
5050 break;
5051
5052 default:
5053 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04005054 }
David Neto22f144c2017-06-12 14:26:21 -04005055 }
David Neto3fbb4072017-10-16 11:28:14 -04005056
SJW2c317da2020-03-23 07:39:13 -05005057 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04005058 //
5059 // Generate OpBitCount
5060 //
5061 // Ops[0] = Result Type ID
5062 // Ops[1] = Base ID
SJWf93f5f32020-05-05 07:27:56 -05005063 SPIRVOperandVec Ops;
5064 Ops << MkId(getSPIRVType(Call->getType()))
5065 << MkId(getSPIRVValue(Call->getOperand(0)));
David Neto22f144c2017-06-12 14:26:21 -04005066
SJW88ed5fe2020-05-11 12:40:57 -05005067 replaceSPIRVInst(Placeholder, spv::OpBitCount, Ops);
David Netoab03f432017-11-03 17:00:44 -04005068
David Neto862b7d82018-06-14 18:48:37 -04005069 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04005070
5071 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05005072 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04005073
5074 // The result type.
SJWf93f5f32020-05-05 07:27:56 -05005075 Ops << MkId(getSPIRVType(Call->getType()));
David Netoab03f432017-11-03 17:00:44 -04005076
5077 for (Use &use : Call->arg_operands()) {
SJWf93f5f32020-05-05 07:27:56 -05005078 Ops << MkId(getSPIRVValue(use.get()));
David Netoab03f432017-11-03 17:00:44 -04005079 }
5080
SJW88ed5fe2020-05-11 12:40:57 -05005081 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04005082
Alan Baker202c8c72018-08-13 13:47:44 -04005083 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
5084
5085 // We have already mapped the call's result value to an ID.
5086 // Don't generate any code now.
5087
5088 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04005089
5090 // We have already mapped the call's result value to an ID.
5091 // Don't generate any code now.
5092
David Neto22f144c2017-06-12 14:26:21 -04005093 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05005094 if (Call->getType()->isPointerTy()) {
5095 // Functions returning pointers require variable pointers.
5096 setVariablePointersCapabilities(
5097 Call->getType()->getPointerAddressSpace());
5098 }
5099
David Neto22f144c2017-06-12 14:26:21 -04005100 //
5101 // Generate OpFunctionCall.
5102 //
5103
5104 // Ops[0] = Result Type ID
5105 // Ops[1] = Callee Function ID
5106 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05005107 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04005108
SJWf93f5f32020-05-05 07:27:56 -05005109 Ops << MkId(getSPIRVType(Call->getType()));
David Neto22f144c2017-06-12 14:26:21 -04005110
SJWf93f5f32020-05-05 07:27:56 -05005111 uint32_t CalleeID = getSPIRVValue(Callee);
David Neto43568eb2017-10-13 18:25:25 -04005112 if (CalleeID == 0) {
5113 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04005114 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04005115 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
5116 // causes an infinite loop. Instead, go ahead and generate
5117 // the bad function call. A validator will catch the 0-Id.
5118 // llvm_unreachable("Can't translate function call");
5119 }
David Neto22f144c2017-06-12 14:26:21 -04005120
David Neto257c3892018-04-11 13:19:45 -04005121 Ops << MkId(CalleeID);
David Neto22f144c2017-06-12 14:26:21 -04005122
David Neto22f144c2017-06-12 14:26:21 -04005123 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05005124 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
5125 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05005126 auto *operand_type = operand->getType();
5127 // Images and samplers can be passed as function parameters without
5128 // variable pointers.
5129 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
5130 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05005131 auto sc =
5132 GetStorageClass(operand->getType()->getPointerAddressSpace());
5133 if (sc == spv::StorageClassStorageBuffer) {
5134 // Passing SSBO by reference requires variable pointers storage
5135 // buffer.
5136 setVariablePointersStorageBuffer(true);
5137 } else if (sc == spv::StorageClassWorkgroup) {
5138 // Workgroup references require variable pointers if they are not
5139 // memory object declarations.
5140 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
5141 // Workgroup accessor represents a variable reference.
5142 if (!operand_call->getCalledFunction()->getName().startswith(
5143 clspv::WorkgroupAccessorFunction()))
5144 setVariablePointers(true);
5145 } else {
5146 // Arguments are function parameters.
5147 if (!isa<Argument>(operand))
5148 setVariablePointers(true);
5149 }
5150 }
5151 }
SJWf93f5f32020-05-05 07:27:56 -05005152 Ops << MkId(getSPIRVValue(operand));
David Neto22f144c2017-06-12 14:26:21 -04005153 }
5154
SJW88ed5fe2020-05-11 12:40:57 -05005155 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04005156 }
5157 }
5158 }
5159}
5160
SJW77b87ad2020-04-21 14:37:52 -05005161void SPIRVProducerPass::HandleDeferredDecorations() {
5162 const auto &DL = module->getDataLayout();
Alan Baker202c8c72018-08-13 13:47:44 -04005163 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04005164 return;
David Netoc6f3ab22018-04-06 18:02:31 -04005165 }
David Neto1a1a0582017-07-07 12:01:44 -04005166
David Netoc6f3ab22018-04-06 18:02:31 -04005167 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
5168 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07005169 for (auto *type : getTypesNeedingArrayStride()) {
5170 Type *elemTy = nullptr;
5171 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
5172 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05005173 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04005174 elemTy = arrayTy->getElementType();
5175 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
5176 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07005177 } else {
5178 errs() << "Unhandled strided type " << *type << "\n";
5179 llvm_unreachable("Unhandled strided type");
5180 }
David Neto1a1a0582017-07-07 12:01:44 -04005181
5182 // Ops[0] = Target ID
5183 // Ops[1] = Decoration (ArrayStride)
5184 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05005185 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04005186
David Neto85082642018-03-24 06:55:20 -07005187 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04005188 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04005189
SJWf93f5f32020-05-05 07:27:56 -05005190 Ops << MkId(getSPIRVType(type)) << MkNum(spv::DecorationArrayStride)
David Neto257c3892018-04-11 13:19:45 -04005191 << MkNum(stride);
David Neto1a1a0582017-07-07 12:01:44 -04005192
SJWf93f5f32020-05-05 07:27:56 -05005193 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04005194 }
David Neto1a1a0582017-07-07 12:01:44 -04005195}
5196
David Neto22f144c2017-06-12 14:26:21 -04005197glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005198
5199 const auto &fi = Builtins::Lookup(Name);
5200 switch (fi) {
5201 case Builtins::kClamp: {
5202 auto param_type = fi.getParameter(0);
5203 if (param_type.type_id == Type::FloatTyID) {
5204 return glsl::ExtInst::ExtInstFClamp;
5205 }
5206 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5207 : glsl::ExtInst::ExtInstUClamp;
5208 }
5209 case Builtins::kMax: {
5210 auto param_type = fi.getParameter(0);
5211 if (param_type.type_id == Type::FloatTyID) {
5212 return glsl::ExtInst::ExtInstFMax;
5213 }
5214 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5215 : glsl::ExtInst::ExtInstUMax;
5216 }
5217 case Builtins::kMin: {
5218 auto param_type = fi.getParameter(0);
5219 if (param_type.type_id == Type::FloatTyID) {
5220 return glsl::ExtInst::ExtInstFMin;
5221 }
5222 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5223 : glsl::ExtInst::ExtInstUMin;
5224 }
5225 case Builtins::kAbs:
5226 return glsl::ExtInst::ExtInstSAbs;
5227 case Builtins::kFmax:
5228 return glsl::ExtInst::ExtInstFMax;
5229 case Builtins::kFmin:
5230 return glsl::ExtInst::ExtInstFMin;
5231 case Builtins::kDegrees:
5232 return glsl::ExtInst::ExtInstDegrees;
5233 case Builtins::kRadians:
5234 return glsl::ExtInst::ExtInstRadians;
5235 case Builtins::kMix:
5236 return glsl::ExtInst::ExtInstFMix;
5237 case Builtins::kAcos:
5238 case Builtins::kAcospi:
5239 return glsl::ExtInst::ExtInstAcos;
5240 case Builtins::kAcosh:
5241 return glsl::ExtInst::ExtInstAcosh;
5242 case Builtins::kAsin:
5243 case Builtins::kAsinpi:
5244 return glsl::ExtInst::ExtInstAsin;
5245 case Builtins::kAsinh:
5246 return glsl::ExtInst::ExtInstAsinh;
5247 case Builtins::kAtan:
5248 case Builtins::kAtanpi:
5249 return glsl::ExtInst::ExtInstAtan;
5250 case Builtins::kAtanh:
5251 return glsl::ExtInst::ExtInstAtanh;
5252 case Builtins::kAtan2:
5253 case Builtins::kAtan2pi:
5254 return glsl::ExtInst::ExtInstAtan2;
5255 case Builtins::kCeil:
5256 return glsl::ExtInst::ExtInstCeil;
5257 case Builtins::kSin:
5258 case Builtins::kHalfSin:
5259 case Builtins::kNativeSin:
5260 return glsl::ExtInst::ExtInstSin;
5261 case Builtins::kSinh:
5262 return glsl::ExtInst::ExtInstSinh;
5263 case Builtins::kCos:
5264 case Builtins::kHalfCos:
5265 case Builtins::kNativeCos:
5266 return glsl::ExtInst::ExtInstCos;
5267 case Builtins::kCosh:
5268 return glsl::ExtInst::ExtInstCosh;
5269 case Builtins::kTan:
5270 case Builtins::kHalfTan:
5271 case Builtins::kNativeTan:
5272 return glsl::ExtInst::ExtInstTan;
5273 case Builtins::kTanh:
5274 return glsl::ExtInst::ExtInstTanh;
5275 case Builtins::kExp:
5276 case Builtins::kHalfExp:
5277 case Builtins::kNativeExp:
5278 return glsl::ExtInst::ExtInstExp;
5279 case Builtins::kExp2:
5280 case Builtins::kHalfExp2:
5281 case Builtins::kNativeExp2:
5282 return glsl::ExtInst::ExtInstExp2;
5283 case Builtins::kLog:
5284 case Builtins::kHalfLog:
5285 case Builtins::kNativeLog:
5286 return glsl::ExtInst::ExtInstLog;
5287 case Builtins::kLog2:
5288 case Builtins::kHalfLog2:
5289 case Builtins::kNativeLog2:
5290 return glsl::ExtInst::ExtInstLog2;
5291 case Builtins::kFabs:
5292 return glsl::ExtInst::ExtInstFAbs;
5293 case Builtins::kFma:
5294 return glsl::ExtInst::ExtInstFma;
5295 case Builtins::kFloor:
5296 return glsl::ExtInst::ExtInstFloor;
5297 case Builtins::kLdexp:
5298 return glsl::ExtInst::ExtInstLdexp;
5299 case Builtins::kPow:
5300 case Builtins::kPowr:
5301 case Builtins::kHalfPowr:
5302 case Builtins::kNativePowr:
5303 return glsl::ExtInst::ExtInstPow;
5304 case Builtins::kRound:
5305 return glsl::ExtInst::ExtInstRound;
5306 case Builtins::kSqrt:
5307 case Builtins::kHalfSqrt:
5308 case Builtins::kNativeSqrt:
5309 return glsl::ExtInst::ExtInstSqrt;
5310 case Builtins::kRsqrt:
5311 case Builtins::kHalfRsqrt:
5312 case Builtins::kNativeRsqrt:
5313 return glsl::ExtInst::ExtInstInverseSqrt;
5314 case Builtins::kTrunc:
5315 return glsl::ExtInst::ExtInstTrunc;
5316 case Builtins::kFrexp:
5317 return glsl::ExtInst::ExtInstFrexp;
5318 case Builtins::kFract:
5319 return glsl::ExtInst::ExtInstFract;
5320 case Builtins::kSign:
5321 return glsl::ExtInst::ExtInstFSign;
5322 case Builtins::kLength:
5323 case Builtins::kFastLength:
5324 return glsl::ExtInst::ExtInstLength;
5325 case Builtins::kDistance:
5326 case Builtins::kFastDistance:
5327 return glsl::ExtInst::ExtInstDistance;
5328 case Builtins::kStep:
5329 return glsl::ExtInst::ExtInstStep;
5330 case Builtins::kSmoothstep:
5331 return glsl::ExtInst::ExtInstSmoothStep;
5332 case Builtins::kCross:
5333 return glsl::ExtInst::ExtInstCross;
5334 case Builtins::kNormalize:
5335 case Builtins::kFastNormalize:
5336 return glsl::ExtInst::ExtInstNormalize;
5337 default:
5338 break;
5339 }
5340
David Neto22f144c2017-06-12 14:26:21 -04005341 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04005342 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
5343 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
5344 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04005345 .Default(kGlslExtInstBad);
5346}
5347
5348glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05005349 switch (Builtins::Lookup(Name)) {
5350 case Builtins::kClz:
5351 return glsl::ExtInst::ExtInstFindUMsb;
5352 case Builtins::kAcospi:
5353 return glsl::ExtInst::ExtInstAcos;
5354 case Builtins::kAsinpi:
5355 return glsl::ExtInst::ExtInstAsin;
5356 case Builtins::kAtanpi:
5357 return glsl::ExtInst::ExtInstAtan;
5358 case Builtins::kAtan2pi:
5359 return glsl::ExtInst::ExtInstAtan2;
5360 default:
5361 break;
5362 }
5363 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005364}
5365
alan-bakerb6b09dc2018-11-08 16:59:28 -05005366glsl::ExtInst
5367SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04005368 auto direct = getExtInstEnum(Name);
5369 if (direct != kGlslExtInstBad)
5370 return direct;
5371 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04005372}
5373
David Neto22f144c2017-06-12 14:26:21 -04005374void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005375 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005376}
5377
SJW88ed5fe2020-05-11 12:40:57 -05005378void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
5379 WriteOneWord(Inst.getResultID());
David Neto22f144c2017-06-12 14:26:21 -04005380}
5381
SJW88ed5fe2020-05-11 12:40:57 -05005382void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04005383 // High 16 bit : Word Count
5384 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05005385 uint32_t Word = Inst.getOpcode();
5386 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04005387 if (count > 65535) {
5388 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5389 llvm_unreachable("Word count too high");
5390 }
SJW88ed5fe2020-05-11 12:40:57 -05005391 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04005392 WriteOneWord(Word);
5393}
5394
SJW88ed5fe2020-05-11 12:40:57 -05005395void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
5396 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04005397 switch (OpTy) {
5398 default: {
5399 llvm_unreachable("Unsupported SPIRV Operand Type???");
5400 break;
5401 }
5402 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05005403 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04005404 break;
5405 }
5406 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05005407 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04005408 const char *Data = Str.c_str();
5409 size_t WordSize = Str.size() / 4;
5410 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5411 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5412 }
5413
5414 uint32_t Remainder = Str.size() % 4;
5415 uint32_t LastWord = 0;
5416 if (Remainder) {
5417 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5418 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5419 }
5420 }
5421
5422 WriteOneWord(LastWord);
5423 break;
5424 }
SJW88ed5fe2020-05-11 12:40:57 -05005425 case SPIRVOperandType::LITERAL_WORD: {
5426 WriteOneWord(Op.getLiteralNum()[0]);
5427 break;
5428 }
5429 case SPIRVOperandType::LITERAL_DWORD: {
5430 WriteOneWord(Op.getLiteralNum()[0]);
5431 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04005432 break;
5433 }
5434 }
5435}
5436
5437void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005438 for (int i = 0; i < kSectionCount; ++i) {
5439 WriteSPIRVBinary(SPIRVSections[i]);
5440 }
5441}
5442
5443void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04005444
SJW88ed5fe2020-05-11 12:40:57 -05005445 for (const auto &Inst : SPIRVInstList) {
5446 const auto &Ops = Inst.getOperands();
5447 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04005448
5449 switch (Opcode) {
5450 default: {
David Neto5c22a252018-03-15 16:07:41 -04005451 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005452 llvm_unreachable("Unsupported SPIRV instruction");
5453 break;
5454 }
5455 case spv::OpCapability:
5456 case spv::OpExtension:
5457 case spv::OpMemoryModel:
5458 case spv::OpEntryPoint:
5459 case spv::OpExecutionMode:
5460 case spv::OpSource:
5461 case spv::OpDecorate:
5462 case spv::OpMemberDecorate:
5463 case spv::OpBranch:
5464 case spv::OpBranchConditional:
5465 case spv::OpSelectionMerge:
5466 case spv::OpLoopMerge:
5467 case spv::OpStore:
5468 case spv::OpImageWrite:
5469 case spv::OpReturnValue:
5470 case spv::OpControlBarrier:
5471 case spv::OpMemoryBarrier:
5472 case spv::OpReturn:
5473 case spv::OpFunctionEnd:
5474 case spv::OpCopyMemory: {
5475 WriteWordCountAndOpcode(Inst);
5476 for (uint32_t i = 0; i < Ops.size(); i++) {
5477 WriteOperand(Ops[i]);
5478 }
5479 break;
5480 }
5481 case spv::OpTypeBool:
5482 case spv::OpTypeVoid:
5483 case spv::OpTypeSampler:
5484 case spv::OpLabel:
5485 case spv::OpExtInstImport:
5486 case spv::OpTypePointer:
5487 case spv::OpTypeRuntimeArray:
5488 case spv::OpTypeStruct:
5489 case spv::OpTypeImage:
5490 case spv::OpTypeSampledImage:
5491 case spv::OpTypeInt:
5492 case spv::OpTypeFloat:
5493 case spv::OpTypeArray:
5494 case spv::OpTypeVector:
5495 case spv::OpTypeFunction: {
5496 WriteWordCountAndOpcode(Inst);
5497 WriteResultID(Inst);
5498 for (uint32_t i = 0; i < Ops.size(); i++) {
5499 WriteOperand(Ops[i]);
5500 }
5501 break;
5502 }
5503 case spv::OpFunction:
5504 case spv::OpFunctionParameter:
5505 case spv::OpAccessChain:
5506 case spv::OpPtrAccessChain:
5507 case spv::OpInBoundsAccessChain:
5508 case spv::OpUConvert:
5509 case spv::OpSConvert:
5510 case spv::OpConvertFToU:
5511 case spv::OpConvertFToS:
5512 case spv::OpConvertUToF:
5513 case spv::OpConvertSToF:
5514 case spv::OpFConvert:
5515 case spv::OpConvertPtrToU:
5516 case spv::OpConvertUToPtr:
5517 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005518 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005519 case spv::OpIAdd:
5520 case spv::OpFAdd:
5521 case spv::OpISub:
5522 case spv::OpFSub:
5523 case spv::OpIMul:
5524 case spv::OpFMul:
5525 case spv::OpUDiv:
5526 case spv::OpSDiv:
5527 case spv::OpFDiv:
5528 case spv::OpUMod:
5529 case spv::OpSRem:
5530 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005531 case spv::OpUMulExtended:
5532 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005533 case spv::OpBitwiseOr:
5534 case spv::OpBitwiseXor:
5535 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005536 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005537 case spv::OpShiftLeftLogical:
5538 case spv::OpShiftRightLogical:
5539 case spv::OpShiftRightArithmetic:
5540 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005541 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005542 case spv::OpCompositeExtract:
5543 case spv::OpVectorExtractDynamic:
5544 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005545 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005546 case spv::OpVectorInsertDynamic:
5547 case spv::OpVectorShuffle:
5548 case spv::OpIEqual:
5549 case spv::OpINotEqual:
5550 case spv::OpUGreaterThan:
5551 case spv::OpUGreaterThanEqual:
5552 case spv::OpULessThan:
5553 case spv::OpULessThanEqual:
5554 case spv::OpSGreaterThan:
5555 case spv::OpSGreaterThanEqual:
5556 case spv::OpSLessThan:
5557 case spv::OpSLessThanEqual:
5558 case spv::OpFOrdEqual:
5559 case spv::OpFOrdGreaterThan:
5560 case spv::OpFOrdGreaterThanEqual:
5561 case spv::OpFOrdLessThan:
5562 case spv::OpFOrdLessThanEqual:
5563 case spv::OpFOrdNotEqual:
5564 case spv::OpFUnordEqual:
5565 case spv::OpFUnordGreaterThan:
5566 case spv::OpFUnordGreaterThanEqual:
5567 case spv::OpFUnordLessThan:
5568 case spv::OpFUnordLessThanEqual:
5569 case spv::OpFUnordNotEqual:
5570 case spv::OpExtInst:
5571 case spv::OpIsInf:
5572 case spv::OpIsNan:
5573 case spv::OpAny:
5574 case spv::OpAll:
5575 case spv::OpUndef:
5576 case spv::OpConstantNull:
5577 case spv::OpLogicalOr:
5578 case spv::OpLogicalAnd:
5579 case spv::OpLogicalNot:
5580 case spv::OpLogicalNotEqual:
5581 case spv::OpConstantComposite:
5582 case spv::OpSpecConstantComposite:
5583 case spv::OpConstantTrue:
5584 case spv::OpConstantFalse:
5585 case spv::OpConstant:
5586 case spv::OpSpecConstant:
5587 case spv::OpVariable:
5588 case spv::OpFunctionCall:
5589 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005590 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04005591 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005592 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005593 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005594 case spv::OpSelect:
5595 case spv::OpPhi:
5596 case spv::OpLoad:
5597 case spv::OpAtomicIAdd:
5598 case spv::OpAtomicISub:
5599 case spv::OpAtomicExchange:
5600 case spv::OpAtomicIIncrement:
5601 case spv::OpAtomicIDecrement:
5602 case spv::OpAtomicCompareExchange:
5603 case spv::OpAtomicUMin:
5604 case spv::OpAtomicSMin:
5605 case spv::OpAtomicUMax:
5606 case spv::OpAtomicSMax:
5607 case spv::OpAtomicAnd:
5608 case spv::OpAtomicOr:
5609 case spv::OpAtomicXor:
5610 case spv::OpDot: {
5611 WriteWordCountAndOpcode(Inst);
5612 WriteOperand(Ops[0]);
5613 WriteResultID(Inst);
5614 for (uint32_t i = 1; i < Ops.size(); i++) {
5615 WriteOperand(Ops[i]);
5616 }
5617 break;
5618 }
5619 }
5620 }
5621}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005622
alan-bakerb6b09dc2018-11-08 16:59:28 -05005623bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005624 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005625 case Type::HalfTyID:
5626 case Type::FloatTyID:
5627 case Type::DoubleTyID:
5628 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005629 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005630 return true;
5631 case Type::PointerTyID: {
5632 const PointerType *pointer_type = cast<PointerType>(type);
5633 if (pointer_type->getPointerAddressSpace() !=
5634 AddressSpace::UniformConstant) {
5635 auto pointee_type = pointer_type->getPointerElementType();
5636 if (pointee_type->isStructTy() &&
5637 cast<StructType>(pointee_type)->isOpaque()) {
5638 // Images and samplers are not nullable.
5639 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005640 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005641 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005642 return true;
5643 }
5644 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005645 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005646 case Type::StructTyID: {
5647 const StructType *struct_type = cast<StructType>(type);
5648 // Images and samplers are not nullable.
5649 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005650 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005651 for (const auto element : struct_type->elements()) {
5652 if (!IsTypeNullable(element))
5653 return false;
5654 }
5655 return true;
5656 }
5657 default:
5658 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005659 }
5660}
Alan Bakerfcda9482018-10-02 17:09:59 -04005661
SJW77b87ad2020-04-21 14:37:52 -05005662void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005663 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005664 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005665 // Metdata is stored as key-value pair operands. The first element of each
5666 // operand is the type and the second is a vector of offsets.
5667 for (const auto *operand : offsets_md->operands()) {
5668 const auto *pair = cast<MDTuple>(operand);
5669 auto *type =
5670 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5671 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5672 std::vector<uint32_t> offsets;
5673 for (const Metadata *offset_md : offset_vector->operands()) {
5674 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005675 offsets.push_back(static_cast<uint32_t>(
5676 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005677 }
5678 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5679 }
5680 }
5681
5682 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005683 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005684 // Metadata is stored as key-value pair operands. The first element of each
5685 // operand is the type and the second is a triple of sizes: type size in
5686 // bits, store size and alloc size.
5687 for (const auto *operand : sizes_md->operands()) {
5688 const auto *pair = cast<MDTuple>(operand);
5689 auto *type =
5690 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5691 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5692 uint64_t type_size_in_bits =
5693 cast<ConstantInt>(
5694 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5695 ->getZExtValue();
5696 uint64_t type_store_size =
5697 cast<ConstantInt>(
5698 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5699 ->getZExtValue();
5700 uint64_t type_alloc_size =
5701 cast<ConstantInt>(
5702 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5703 ->getZExtValue();
5704 RemappedUBOTypeSizes.insert(std::make_pair(
5705 type, std::make_tuple(type_size_in_bits, type_store_size,
5706 type_alloc_size)));
5707 }
5708 }
5709}
5710
5711uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5712 const DataLayout &DL) {
5713 auto iter = RemappedUBOTypeSizes.find(type);
5714 if (iter != RemappedUBOTypeSizes.end()) {
5715 return std::get<0>(iter->second);
5716 }
5717
5718 return DL.getTypeSizeInBits(type);
5719}
5720
5721uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5722 auto iter = RemappedUBOTypeSizes.find(type);
5723 if (iter != RemappedUBOTypeSizes.end()) {
5724 return std::get<1>(iter->second);
5725 }
5726
5727 return DL.getTypeStoreSize(type);
5728}
5729
5730uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5731 auto iter = RemappedUBOTypeSizes.find(type);
5732 if (iter != RemappedUBOTypeSizes.end()) {
5733 return std::get<2>(iter->second);
5734 }
5735
5736 return DL.getTypeAllocSize(type);
5737}
alan-baker5b86ed72019-02-15 08:26:50 -05005738
Kévin Petitbbbda972020-03-03 19:16:31 +00005739uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5740 StructType *type, unsigned member, const DataLayout &DL) {
5741 const auto StructLayout = DL.getStructLayout(type);
5742 // Search for the correct offsets if this type was remapped.
5743 std::vector<uint32_t> *offsets = nullptr;
5744 auto iter = RemappedUBOTypeOffsets.find(type);
5745 if (iter != RemappedUBOTypeOffsets.end()) {
5746 offsets = &iter->second;
5747 }
5748 auto ByteOffset =
5749 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5750 if (offsets) {
5751 ByteOffset = (*offsets)[member];
5752 }
5753
5754 return ByteOffset;
5755}
5756
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005757void SPIRVProducerPass::setVariablePointersCapabilities(
5758 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005759 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
5760 setVariablePointersStorageBuffer(true);
5761 } else {
5762 setVariablePointers(true);
5763 }
5764}
5765
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005766Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005767 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5768 return GetBasePointer(gep->getPointerOperand());
5769 }
5770
5771 // Conservatively return |v|.
5772 return v;
5773}
5774
5775bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5776 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5777 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5778 if (lhs_call->getCalledFunction()->getName().startswith(
5779 clspv::ResourceAccessorFunction()) &&
5780 rhs_call->getCalledFunction()->getName().startswith(
5781 clspv::ResourceAccessorFunction())) {
5782 // For resource accessors, match descriptor set and binding.
5783 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5784 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5785 return true;
5786 } else if (lhs_call->getCalledFunction()->getName().startswith(
5787 clspv::WorkgroupAccessorFunction()) &&
5788 rhs_call->getCalledFunction()->getName().startswith(
5789 clspv::WorkgroupAccessorFunction())) {
5790 // For workgroup resources, match spec id.
5791 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5792 return true;
5793 }
5794 }
5795 }
5796
5797 return false;
5798}
5799
5800bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5801 assert(inst->getType()->isPointerTy());
5802 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5803 spv::StorageClassStorageBuffer);
5804 const bool hack_undef = clspv::Option::HackUndef();
5805 if (auto *select = dyn_cast<SelectInst>(inst)) {
5806 auto *true_base = GetBasePointer(select->getTrueValue());
5807 auto *false_base = GetBasePointer(select->getFalseValue());
5808
5809 if (true_base == false_base)
5810 return true;
5811
5812 // If either the true or false operand is a null, then we satisfy the same
5813 // object constraint.
5814 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5815 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5816 return true;
5817 }
5818
5819 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5820 if (false_cst->isNullValue() ||
5821 (hack_undef && isa<UndefValue>(false_base)))
5822 return true;
5823 }
5824
5825 if (sameResource(true_base, false_base))
5826 return true;
5827 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5828 Value *value = nullptr;
5829 bool ok = true;
5830 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5831 auto *base = GetBasePointer(phi->getIncomingValue(i));
5832 // Null values satisfy the constraint of selecting of selecting from the
5833 // same object.
5834 if (!value) {
5835 if (auto *cst = dyn_cast<Constant>(base)) {
5836 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5837 value = base;
5838 } else {
5839 value = base;
5840 }
5841 } else if (base != value) {
5842 if (auto *base_cst = dyn_cast<Constant>(base)) {
5843 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5844 continue;
5845 }
5846
5847 if (sameResource(value, base))
5848 continue;
5849
5850 // Values don't represent the same base.
5851 ok = false;
5852 }
5853 }
5854
5855 return ok;
5856 }
5857
5858 // Conservatively return false.
5859 return false;
5860}
alan-bakere9308012019-03-15 10:25:13 -04005861
5862bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5863 if (!Arg.getType()->isPointerTy() ||
5864 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5865 // Only SSBOs need to be annotated as coherent.
5866 return false;
5867 }
5868
5869 DenseSet<Value *> visited;
5870 std::vector<Value *> stack;
5871 for (auto *U : Arg.getParent()->users()) {
5872 if (auto *call = dyn_cast<CallInst>(U)) {
5873 stack.push_back(call->getOperand(Arg.getArgNo()));
5874 }
5875 }
5876
5877 while (!stack.empty()) {
5878 Value *v = stack.back();
5879 stack.pop_back();
5880
5881 if (!visited.insert(v).second)
5882 continue;
5883
5884 auto *resource_call = dyn_cast<CallInst>(v);
5885 if (resource_call &&
5886 resource_call->getCalledFunction()->getName().startswith(
5887 clspv::ResourceAccessorFunction())) {
5888 // If this is a resource accessor function, check if the coherent operand
5889 // is set.
5890 const auto coherent =
5891 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5892 ->getZExtValue());
5893 if (coherent == 1)
5894 return true;
5895 } else if (auto *arg = dyn_cast<Argument>(v)) {
5896 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005897 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005898 if (auto *call = dyn_cast<CallInst>(U)) {
5899 stack.push_back(call->getOperand(arg->getArgNo()));
5900 }
5901 }
5902 } else if (auto *user = dyn_cast<User>(v)) {
5903 // If this is a user, traverse all operands that could lead to resource
5904 // variables.
5905 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5906 Value *operand = user->getOperand(i);
5907 if (operand->getType()->isPointerTy() &&
5908 operand->getType()->getPointerAddressSpace() ==
5909 clspv::AddressSpace::Global) {
5910 stack.push_back(operand);
5911 }
5912 }
5913 }
5914 }
5915
5916 // No coherent resource variables encountered.
5917 return false;
5918}
alan-baker06cad652019-12-03 17:56:47 -05005919
SJW77b87ad2020-04-21 14:37:52 -05005920void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005921 // First, track loop merges and continues.
5922 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005923 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005924 if (F.isDeclaration())
5925 continue;
5926
5927 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5928 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5929 std::deque<BasicBlock *> order;
5930 DenseSet<BasicBlock *> visited;
5931 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5932
5933 for (auto BB : order) {
5934 auto terminator = BB->getTerminator();
5935 auto branch = dyn_cast<BranchInst>(terminator);
5936 if (LI.isLoopHeader(BB)) {
5937 auto L = LI.getLoopFor(BB);
5938 BasicBlock *ContinueBB = nullptr;
5939 BasicBlock *MergeBB = nullptr;
5940
5941 MergeBB = L->getExitBlock();
5942 if (!MergeBB) {
5943 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5944 // has regions with single entry/exit. As a result, loop should not
5945 // have multiple exits.
5946 llvm_unreachable("Loop has multiple exits???");
5947 }
5948
5949 if (L->isLoopLatch(BB)) {
5950 ContinueBB = BB;
5951 } else {
5952 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5953 // block.
5954 BasicBlock *Header = L->getHeader();
5955 BasicBlock *Latch = L->getLoopLatch();
5956 for (auto *loop_block : L->blocks()) {
5957 if (loop_block == Header) {
5958 continue;
5959 }
5960
5961 // Check whether block dominates block with back-edge.
5962 // The loop latch is the single block with a back-edge. If it was
5963 // possible, StructurizeCFG made the loop conform to this
5964 // requirement, otherwise |Latch| is a nullptr.
5965 if (DT.dominates(loop_block, Latch)) {
5966 ContinueBB = loop_block;
5967 }
5968 }
5969
5970 if (!ContinueBB) {
5971 llvm_unreachable("Wrong continue block from loop");
5972 }
5973 }
5974
5975 // Record the continue and merge blocks.
5976 MergeBlocks[BB] = MergeBB;
5977 ContinueBlocks[BB] = ContinueBB;
5978 LoopMergesAndContinues.insert(MergeBB);
5979 LoopMergesAndContinues.insert(ContinueBB);
5980 } else if (branch && branch->isConditional()) {
5981 auto L = LI.getLoopFor(BB);
5982 bool HasBackedge = false;
5983 while (L && !HasBackedge) {
5984 if (L->isLoopLatch(BB)) {
5985 HasBackedge = true;
5986 }
5987 L = L->getParentLoop();
5988 }
5989
5990 if (!HasBackedge) {
5991 // Only need a merge if the branch doesn't include a loop break or
5992 // continue.
5993 auto true_bb = branch->getSuccessor(0);
5994 auto false_bb = branch->getSuccessor(1);
5995 if (!LoopMergesAndContinues.count(true_bb) &&
5996 !LoopMergesAndContinues.count(false_bb)) {
5997 // StructurizeCFG pass already manipulated CFG. Just use false block
5998 // of branch instruction as merge block.
5999 MergeBlocks[BB] = false_bb;
6000 }
6001 }
6002 }
6003 }
6004 }
6005}