blob: daab4360bf3592fead5f4a3536246efe0b72d1d6 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
SJWf93f5f32020-05-05 07:27:56 -050046// enable spv::HasResultAndType
47#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040048#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040049
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050051#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040052#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070053#include "clspv/spirv_c_strings.hpp"
54#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040055
David Neto4feb7a42017-10-06 17:29:42 -040056#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050057#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050058#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070059#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040060#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040061#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040062#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040063#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040064#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040065#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050066#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040067
David Neto22f144c2017-06-12 14:26:21 -040068#if defined(_MSC_VER)
69#pragma warning(pop)
70#endif
71
72using namespace llvm;
73using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050074using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040075using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040076
77namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040078
David Neto862b7d82018-06-14 18:48:37 -040079cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
80 cl::desc("Show resource variable creation"));
81
alan-baker5ed87542020-03-23 11:05:22 -040082cl::opt<bool>
83 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
84 cl::desc("Dump the IR at the start of SPIRVProducer"));
85
David Neto862b7d82018-06-14 18:48:37 -040086// These hacks exist to help transition code generation algorithms
87// without making huge noise in detailed test output.
88const bool Hack_generate_runtime_array_stride_early = true;
89
David Neto3fbb4072017-10-16 11:28:14 -040090// The value of 1/pi. This value is from MSDN
91// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
92const double kOneOverPi = 0.318309886183790671538;
93const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
94
alan-bakerb6b09dc2018-11-08 16:59:28 -050095const char *kCompositeConstructFunctionPrefix = "clspv.composite_construct.";
David Netoab03f432017-11-03 17:00:44 -040096
SJW69939d52020-04-16 07:29:07 -050097// SPIRV Module Sections (per 2.4 of the SPIRV spec)
98// These are used to collect SPIRVInstructions by type on-the-fly.
99enum SPIRVSection {
100 kCapabilities,
101 kExtensions,
102 kImports,
103 kMemoryModel,
104 kEntryPoints,
105 kExecutionModes,
106
107 kDebug,
108 kAnnotations,
109
110 kTypes,
111 kConstants = kTypes,
112 kGlobalVariables,
113
114 kFunctions,
115
116 kSectionCount
117};
118
SJW01901d92020-05-21 08:58:31 -0500119class SPIRVID {
120 uint32_t id;
121
122public:
123 SPIRVID(uint32_t _id = 0) : id(_id) {}
124 uint32_t get() const { return id; }
125 bool isValid() const { return id != 0; }
126 bool operator==(const SPIRVID &that) const { return id == that.id; }
127};
SJWf93f5f32020-05-05 07:27:56 -0500128
SJW88ed5fe2020-05-11 12:40:57 -0500129enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400130
131struct SPIRVOperand {
SJW88ed5fe2020-05-11 12:40:57 -0500132 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
133 LiteralNum[0] = Num;
134 }
David Neto22f144c2017-06-12 14:26:21 -0400135 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
136 : Type(Ty), LiteralStr(Str) {}
137 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
138 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500139 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
140 auto sz = NumVec.size();
141 assert(sz >= 1 && sz <= 2);
142 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
143 LiteralNum[0] = NumVec[0];
144 if (sz == 2) {
145 LiteralNum[1] = NumVec[1];
146 }
147 }
David Neto22f144c2017-06-12 14:26:21 -0400148
James Price11010dc2019-12-19 13:53:09 -0500149 SPIRVOperandType getType() const { return Type; };
150 uint32_t getNumID() const { return LiteralNum[0]; };
151 std::string getLiteralStr() const { return LiteralStr; };
SJW88ed5fe2020-05-11 12:40:57 -0500152 const uint32_t *getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400153
David Neto87846742018-04-11 17:36:22 -0400154 uint32_t GetNumWords() const {
155 switch (Type) {
156 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500157 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400158 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500159 case LITERAL_DWORD:
160 return 2;
David Neto87846742018-04-11 17:36:22 -0400161 case LITERAL_STRING:
162 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400163 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400164 }
165 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
166 }
167
David Neto22f144c2017-06-12 14:26:21 -0400168private:
169 SPIRVOperandType Type;
170 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500171 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400172};
173
SJW88ed5fe2020-05-11 12:40:57 -0500174typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400175
David Neto22f144c2017-06-12 14:26:21 -0400176struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500177 // Primary constructor must have Opcode, initializes WordCount based on ResID.
178 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
179 : Opcode(static_cast<uint16_t>(Opc)) {
180 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400181 }
David Neto22f144c2017-06-12 14:26:21 -0400182
SJWf93f5f32020-05-05 07:27:56 -0500183 // Creates an instruction with an opcode and no result ID, and with the given
184 // operands. This calls primary constructor to initialize Opcode, WordCount.
185 // Takes ownership of the operands and clears |Ops|.
186 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
187 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500188 }
SJWf93f5f32020-05-05 07:27:56 -0500189 // Creates an instruction with an opcode and no result ID, and with the given
190 // operands. This calls primary constructor to initialize Opcode, WordCount.
191 // Takes ownership of the operands and clears |Ops|.
192 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
193 : SPIRVInstruction(Opc, ResID) {
194 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500195 }
David Netoef5ba2b2019-12-20 08:35:54 -0500196
David Netoee2660d2018-06-28 16:31:29 -0400197 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400198 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500199 SPIRVID getResultID() const { return ResultID; }
200 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400201
202private:
SJW01901d92020-05-21 08:58:31 -0500203 void setResult(SPIRVID ResID = 0) {
204 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500205 ResultID = ResID;
206 }
207
208 void setOperands(SPIRVOperandVec &Ops) {
209 assert(Operands.empty());
210 Operands = std::move(Ops);
211 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500212 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500213 }
214 }
215
216private:
David Netoee2660d2018-06-28 16:31:29 -0400217 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400218 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500219 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500220 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400221};
222
223struct SPIRVProducerPass final : public ModulePass {
SJW01901d92020-05-21 08:58:31 -0500224 typedef DenseMap<Type *, SPIRVID> TypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400225 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500226 typedef DenseMap<Value *, SPIRVID> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400227 typedef UniqueVector<Value *> ValueList;
SJW01901d92020-05-21 08:58:31 -0500228 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
229 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500230 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
231 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400232 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500233 // - the SPIR-V instruction placeholder that will be replaced
234 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400235 DeferredInstVecType;
236 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
237 GlobalConstFuncMapType;
238
David Neto44795152017-07-13 15:45:28 -0400239 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500240 raw_pwrite_stream &out,
241 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400242 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400243 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500244 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400245 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400246 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400247 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500248 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
249 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
SJW01901d92020-05-21 08:58:31 -0500250 WorkgroupSizeVarID(0) {
251 addCapability(spv::CapabilityShader);
252 Ptr = this;
253 }
David Neto22f144c2017-06-12 14:26:21 -0400254
James Price11010dc2019-12-19 13:53:09 -0500255 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500256 }
257
David Neto22f144c2017-06-12 14:26:21 -0400258 void getAnalysisUsage(AnalysisUsage &AU) const override {
259 AU.addRequired<DominatorTreeWrapperPass>();
260 AU.addRequired<LoopInfoWrapperPass>();
261 }
262
263 virtual bool runOnModule(Module &module) override;
264
265 // output the SPIR-V header block
266 void outputHeader();
267
268 // patch the SPIR-V header block
269 void patchHeader();
270
SJW01901d92020-05-21 08:58:31 -0500271 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400272 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
273 TypeList &getTypeList() { return Types; };
David Neto22f144c2017-06-12 14:26:21 -0400274 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500275 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
276 return SPIRVSections[Section];
277 };
David Neto22f144c2017-06-12 14:26:21 -0400278 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
279 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
280 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
SJW01901d92020-05-21 08:58:31 -0500281 SPIRVID getOpExtInstImportID();
282 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500283
alan-baker5b86ed72019-02-15 08:26:50 -0500284 bool hasVariablePointersStorageBuffer() {
285 return HasVariablePointersStorageBuffer;
286 }
SJW01901d92020-05-21 08:58:31 -0500287 void setVariablePointersStorageBuffer() {
288 if (!HasVariablePointersStorageBuffer) {
289 addCapability(spv::CapabilityVariablePointersStorageBuffer);
290 HasVariablePointersStorageBuffer = true;
291 }
alan-baker5b86ed72019-02-15 08:26:50 -0500292 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400293 bool hasVariablePointers() { return HasVariablePointers; };
SJW01901d92020-05-21 08:58:31 -0500294 void setVariablePointers() {
295 if (!HasVariablePointers) {
296 addCapability(spv::CapabilityVariablePointers);
297 HasVariablePointers = true;
298 }
299 };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500300 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
301 return samplerMap;
302 }
David Neto22f144c2017-06-12 14:26:21 -0400303 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
304 return GlobalConstFuncTypeMap;
305 }
306 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
307 return GlobalConstArgumentSet;
308 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500309 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400310
SJW77b87ad2020-04-21 14:37:52 -0500311 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500312 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
313 // *not* be converted to a storage buffer, replace each such global variable
314 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500315 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400316 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
317 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500318 void FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400319 void FindTypePerGlobalVar(GlobalVariable &GV);
320 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500321 void FindTypesForSamplerMap();
322 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500323 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
324 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400325 void FindType(Type *Ty);
SJWf93f5f32020-05-05 07:27:56 -0500326
327 // Lookup or create Types, Constants.
328 // Returns SPIRVID once it has been created.
329 SPIRVID getSPIRVType(Type *Ty);
330 SPIRVID getSPIRVConstant(Constant *Cst);
331 // Lookup SPIRVID of llvm::Value, may create Constant.
332 SPIRVID getSPIRVValue(Value *V);
333
David Neto19a1bad2017-08-25 15:01:41 -0400334 // Generates instructions for SPIR-V types corresponding to the LLVM types
335 // saved in the |Types| member. A type follows its subtypes. IDs are
336 // allocated sequentially starting with the current value of nextID, and
337 // with a type following its subtypes. Also updates nextID to just beyond
338 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500339 void GenerateSPIRVTypes();
SJW77b87ad2020-04-21 14:37:52 -0500340 void GenerateModuleInfo();
341 void GeneratePushConstantDescriptorMapEntries();
342 void GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400343 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500344 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400345 // Generate descriptor map entries for resource variables associated with
346 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500347 void GenerateDescriptorMapInfo(Function &F);
348 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400349 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500350 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400351 void GenerateFuncPrologue(Function &F);
352 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400353 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400354 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
355 spv::Op GetSPIRVCastOpcode(Instruction &I);
356 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
357 void GenerateInstruction(Instruction &I);
358 void GenerateFuncEpilogue();
359 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500360 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400361 bool is4xi8vec(Type *Ty) const;
362 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400363 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400364 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400365 // Returns the GLSL extended instruction enum that the given function
366 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
David Neto22f144c2017-06-12 14:26:21 -0400367 glsl::ExtInst getExtInstEnum(StringRef Name);
David Neto3fbb4072017-10-16 11:28:14 -0400368 // Returns the GLSL extended instruction enum indirectly used by the given
369 // function. That is, to implement the given function, we use an extended
370 // instruction plus one more instruction. If none, then returns the 0 value,
371 // i.e. GLSLstd4580Bad.
372 glsl::ExtInst getIndirectExtInstEnum(StringRef Name);
373 // Returns the single GLSL extended instruction used directly or
374 // indirectly by the given function call.
375 glsl::ExtInst getDirectOrIndirectExtInstEnum(StringRef Name);
David Neto22f144c2017-06-12 14:26:21 -0400376 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500377 void WriteResultID(const SPIRVInstruction &Inst);
378 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
379 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400380 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500381 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400382
Alan Baker9bf93fb2018-08-28 16:59:26 -0400383 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500384 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400385
Alan Bakerfcda9482018-10-02 17:09:59 -0400386 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500387 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400388
alan-baker06cad652019-12-03 17:56:47 -0500389 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500390 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500391
Alan Bakerfcda9482018-10-02 17:09:59 -0400392 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
393 // uses the internal map, otherwise it falls back on the data layout.
394 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
395 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
396 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000397 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
398 unsigned member,
399 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400400
alan-baker5b86ed72019-02-15 08:26:50 -0500401 // Returns the base pointer of |v|.
402 Value *GetBasePointer(Value *v);
403
SJW01901d92020-05-21 08:58:31 -0500404 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
405 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
406
alan-baker5b86ed72019-02-15 08:26:50 -0500407 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
408 // |address_space|.
409 void setVariablePointersCapabilities(unsigned address_space);
410
411 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
412 // variable.
413 bool sameResource(Value *lhs, Value *rhs) const;
414
415 // Returns true if |inst| is phi or select that selects from the same
416 // structure (or null).
417 bool selectFromSameObject(Instruction *inst);
418
alan-bakere9308012019-03-15 10:25:13 -0400419 // Returns true if |Arg| is called with a coherent resource.
420 bool CalledWithCoherentResource(Argument &Arg);
421
SJWf93f5f32020-05-05 07:27:56 -0500422 //
423 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
424 template <enum SPIRVSection TSection = kFunctions>
425 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
426 bool has_result, has_result_type;
427 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
428 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500429 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500430 return RID;
431 }
432 template <enum SPIRVSection TSection = kFunctions>
433 SPIRVID addSPIRVInst(spv::Op Op) {
434 SPIRVOperandVec Ops;
435 return addSPIRVInst<TSection>(Op, Ops);
436 }
437 template <enum SPIRVSection TSection = kFunctions>
438 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
439 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500440 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500441 return addSPIRVInst<TSection>(Op, Ops);
442 }
443 template <enum SPIRVSection TSection = kFunctions>
444 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
445 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500446 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500447 return addSPIRVInst<TSection>(Op, Ops);
448 }
449
SJW88ed5fe2020-05-11 12:40:57 -0500450 //
451 // Add placeholder for llvm::Value that references future values.
452 // Must have result ID just in case final SPIRVInstruction requires.
453 SPIRVID addSPIRVPlaceholder(Value *I) {
454 SPIRVID RID = incrNextID();
455 SPIRVOperandVec Ops;
456 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
457 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
458 return RID;
459 }
460 // Replace placeholder with actual SPIRVInstruction on the final pass
461 // (HandleDeferredInstruction).
462 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
463 SPIRVOperandVec &Operands) {
464 bool has_result, has_result_type;
465 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
466 SPIRVID RID = has_result ? I->getResultID() : 0;
467 *I = SPIRVInstruction(Opcode, RID, Operands);
468 return RID;
469 }
470
David Neto22f144c2017-06-12 14:26:21 -0400471private:
472 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500473
474 Module *module;
475
SJW01901d92020-05-21 08:58:31 -0500476 // Set of Capabilities required
477 CapabilitySetType CapabilitySet;
478
David Neto44795152017-07-13 15:45:28 -0400479 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400480 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400481
482 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
483 // convert to other formats on demand?
484
485 // When emitting a C initialization list, the WriteSPIRVBinary method
486 // will actually write its words to this vector via binaryTempOut.
487 SmallVector<char, 100> binaryTempUnderlyingVector;
488 raw_svector_ostream binaryTempOut;
489
490 // Binary output writes to this stream, which might be |out| or
491 // |binaryTempOut|. It's the latter when we really want to write a C
492 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400493 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500494 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400495 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400496 uint64_t patchBoundOffset;
497 uint32_t nextID;
498
SJWf93f5f32020-05-05 07:27:56 -0500499 SPIRVID incrNextID() { return nextID++; }
500
alan-bakerf67468c2019-11-25 15:51:49 -0500501 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500502 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500503 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500504 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500505
David Neto19a1bad2017-08-25 15:01:41 -0400506 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400507 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400508 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400509 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400510 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400511 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400512 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400513 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500514 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400515
David Neto22f144c2017-06-12 14:26:21 -0400516 EntryPointVecType EntryPointVec;
517 DeferredInstVecType DeferredInstVec;
518 ValueList EntryPointInterfacesVec;
SJW01901d92020-05-21 08:58:31 -0500519 SPIRVID OpExtInstImportID;
520 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500521 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400522 bool HasVariablePointers;
523 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500524 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700525
526 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700527 // will map F's type to (G, index of the parameter), where in a first phase
528 // G is F's type. During FindTypePerFunc, G will be changed to F's type
529 // but replacing the pointer-to-constant parameter with
530 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700531 // TODO(dneto): This doesn't seem general enough? A function might have
532 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400533 GlobalConstFuncMapType GlobalConstFuncTypeMap;
534 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400535 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700536 // or array types, and which point into transparent memory (StorageBuffer
537 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400538 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700539 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400540
541 // This is truly ugly, but works around what look like driver bugs.
542 // For get_local_size, an earlier part of the flow has created a module-scope
543 // variable in Private address space to hold the value for the workgroup
544 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
545 // When this is present, save the IDs of the initializer value and variable
546 // in these two variables. We only ever do a vector load from it, and
547 // when we see one of those, substitute just the value of the intializer.
548 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700549 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500550 SPIRVID WorkgroupSizeValueID;
551 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400552
David Neto862b7d82018-06-14 18:48:37 -0400553 // Bookkeeping for mapping kernel arguments to resource variables.
554 struct ResourceVarInfo {
555 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400556 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400557 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400558 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400559 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
560 const int index; // Index into ResourceVarInfoList
561 const unsigned descriptor_set;
562 const unsigned binding;
563 Function *const var_fn; // The @clspv.resource.var.* function.
564 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400565 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400566 const unsigned addr_space; // The LLVM address space
567 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500568 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400569 };
570 // A list of resource var info. Each one correponds to a module-scope
571 // resource variable we will have to create. Resource var indices are
572 // indices into this vector.
573 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
574 // This is a vector of pointers of all the resource vars, but ordered by
575 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500576 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400577 // Map a function to the ordered list of resource variables it uses, one for
578 // each argument. If an argument does not use a resource variable, it
579 // will have a null pointer entry.
580 using FunctionToResourceVarsMapType =
581 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
582 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
583
584 // What LLVM types map to SPIR-V types needing layout? These are the
585 // arrays and structures supporting storage buffers and uniform buffers.
586 TypeList TypesNeedingLayout;
587 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
588 UniqueVector<StructType *> StructTypesNeedingBlock;
589 // For a call that represents a load from an opaque type (samplers, images),
590 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500591 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700592
David Netoc6f3ab22018-04-06 18:02:31 -0400593 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500594 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400595 LocalArgList LocalArgs;
596 // Information about a pointer-to-local argument.
597 struct LocalArgInfo {
598 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500599 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400600 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500601 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400602 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500603 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400604 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500605 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400606 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500607 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400608 // The specialization constant ID of the array size.
609 int spec_id;
610 };
Alan Baker202c8c72018-08-13 13:47:44 -0400611 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500612 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400613 // A mapping from SpecId to its LocalArgInfo.
614 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400615 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500616 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400617 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500618 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
619 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500620
621 // Maps basic block to its merge block.
622 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
623 // Maps basic block to its continue block.
624 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500625
626public:
627 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400628};
629
630char SPIRVProducerPass::ID;
SJW01901d92020-05-21 08:58:31 -0500631SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
David Netoc6f3ab22018-04-06 18:02:31 -0400632
alan-bakerb6b09dc2018-11-08 16:59:28 -0500633} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400634
635namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500636ModulePass *createSPIRVProducerPass(
637 raw_pwrite_stream &out,
638 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400639 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500640 bool outputCInitList) {
641 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400642 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400643}
David Netoc2c368d2017-06-30 16:50:17 -0400644} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400645
SJW01901d92020-05-21 08:58:31 -0500646namespace {
647SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
648 list.emplace_back(LITERAL_WORD, num);
649 return list;
650}
651
652SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
653 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
654 return list;
655}
656
657SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
658 list.emplace_back(num_vec);
659 return list;
660}
661
662SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
663 list.emplace_back(LITERAL_STRING, str);
664 return list;
665}
666
667SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
668 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
669 return list;
670}
671
672SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
673 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
674 return list;
675}
676
677SPIRVOperandVec &operator<<(SPIRVOperandVec &list, SPIRVID &v) {
678 list.emplace_back(NUMBERID, v.get());
679 return list;
680}
681} // namespace
682
SJW77b87ad2020-04-21 14:37:52 -0500683bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500684 // TODO(sjw): Need to reset all data members for each Module, or better
685 // yet create a new SPIRVProducer for every module.. For now only
686 // allow 1 call.
687 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500688 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400689 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500690 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400691 }
David Neto0676e6f2017-07-11 18:47:44 -0400692 binaryOut = outputCInitList ? &binaryTempOut : &out;
693
SJW77b87ad2020-04-21 14:37:52 -0500694 PopulateUBOTypeMaps();
695 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400696
David Neto22f144c2017-06-12 14:26:21 -0400697 // SPIR-V always begins with its header information
698 outputHeader();
699
700 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500701 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400702
David Neto22f144c2017-06-12 14:26:21 -0400703 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500704 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400705 // If the GV is one of our special __spirv_* variables, remove the
706 // initializer as it was only placed there to force LLVM to not throw the
707 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000708 if (GV.getName().startswith("__spirv_") ||
709 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400710 GV.setInitializer(nullptr);
711 }
712
713 // Collect types' information from global variable.
714 FindTypePerGlobalVar(GV);
715
David Neto22f144c2017-06-12 14:26:21 -0400716 // If the variable is an input, entry points need to know about it.
717 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400718 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400719 }
720 }
721
David Neto22f144c2017-06-12 14:26:21 -0400722 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500723 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400724
alan-baker09cb9802019-12-10 13:16:27 -0500725 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500726 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400727
Kévin Petitbbbda972020-03-03 19:16:31 +0000728 // Generate descriptor map entries for all push constants
SJW77b87ad2020-04-21 14:37:52 -0500729 GeneratePushConstantDescriptorMapEntries();
Kévin Petitbbbda972020-03-03 19:16:31 +0000730
David Neto22f144c2017-06-12 14:26:21 -0400731 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500732 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400733 GenerateGlobalVar(GV);
734 }
SJW77b87ad2020-04-21 14:37:52 -0500735 GenerateResourceVars();
736 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400737
738 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500739 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400740 if (F.isDeclaration()) {
741 continue;
742 }
743
SJW77b87ad2020-04-21 14:37:52 -0500744 GenerateDescriptorMapInfo(F);
David Neto862b7d82018-06-14 18:48:37 -0400745
David Neto22f144c2017-06-12 14:26:21 -0400746 // Generate Function Prologue.
747 GenerateFuncPrologue(F);
748
749 // Generate SPIRV instructions for function body.
750 GenerateFuncBody(F);
751
752 // Generate Function Epilogue.
753 GenerateFuncEpilogue();
754 }
755
756 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500757 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400758
759 // Generate descriptor map entries for module scope specialization constants.
SJW77b87ad2020-04-21 14:37:52 -0500760 GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400761
762 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500763 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400764
alan-baker00e7a582019-06-07 12:54:21 -0400765 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400766
767 // We need to patch the SPIR-V header to set bound correctly.
768 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400769
770 if (outputCInitList) {
771 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400772 std::ostringstream os;
773
David Neto57fb0b92017-08-04 15:35:09 -0400774 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400775 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400776 os << ",\n";
777 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400778 first = false;
779 };
780
781 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400782 const std::string str(binaryTempOut.str());
783 for (unsigned i = 0; i < str.size(); i += 4) {
784 const uint32_t a = static_cast<unsigned char>(str[i]);
785 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
786 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
787 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
788 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400789 }
790 os << "}\n";
791 out << os.str();
792 }
793
David Neto22f144c2017-06-12 14:26:21 -0400794 return false;
795}
796
797void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400798 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
799 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400800 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
801 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
802 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400803
alan-baker0c18ab02019-06-12 10:23:21 -0400804 // use Google's vendor ID
805 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400806 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400807
alan-baker00e7a582019-06-07 12:54:21 -0400808 // we record where we need to come back to and patch in the bound value
809 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400810
alan-baker00e7a582019-06-07 12:54:21 -0400811 // output a bad bound for now
812 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400813
alan-baker00e7a582019-06-07 12:54:21 -0400814 // output the schema (reserved for use and must be 0)
815 const uint32_t schema = 0;
816 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400817}
818
819void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400820 // for a binary we just write the value of nextID over bound
821 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
822 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400823}
824
SJW77b87ad2020-04-21 14:37:52 -0500825void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400826 // This function generates LLVM IR for function such as global variable for
827 // argument, constant and pointer type for argument access. These information
828 // is artificial one because we need Vulkan SPIR-V output. This function is
829 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400830
SJW77b87ad2020-04-21 14:37:52 -0500831 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400832
SJW77b87ad2020-04-21 14:37:52 -0500833 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400834
835 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500836 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400837 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
838 if (spv::BuiltInWorkgroupSize == BuiltinType) {
839 HasWorkGroupBuiltin = true;
840 }
841 }
842
SJW77b87ad2020-04-21 14:37:52 -0500843 FindTypesForSamplerMap();
844 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400845}
846
SJW77b87ad2020-04-21 14:37:52 -0500847void SPIRVProducerPass::FindGlobalConstVars() {
848 clspv::NormalizeGlobalVariables(*module);
849 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400850
David Neto862b7d82018-06-14 18:48:37 -0400851 SmallVector<GlobalVariable *, 8> GVList;
852 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500853 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400854 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
855 if (GV.use_empty()) {
856 DeadGVList.push_back(&GV);
857 } else {
858 GVList.push_back(&GV);
859 }
860 }
861 }
862
863 // Remove dead global __constant variables.
864 for (auto GV : DeadGVList) {
865 GV->eraseFromParent();
866 }
867 DeadGVList.clear();
868
869 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
870 // For now, we only support a single storage buffer.
871 if (GVList.size() > 0) {
872 assert(GVList.size() == 1);
873 const auto *GV = GVList[0];
874 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400875 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400876 const size_t kConstantMaxSize = 65536;
877 if (constants_byte_size > kConstantMaxSize) {
878 outs() << "Max __constant capacity of " << kConstantMaxSize
879 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
880 llvm_unreachable("Max __constant capacity exceeded");
881 }
882 }
883 } else {
884 // Change global constant variable's address space to ModuleScopePrivate.
885 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
886 for (auto GV : GVList) {
887 // Create new gv with ModuleScopePrivate address space.
888 Type *NewGVTy = GV->getType()->getPointerElementType();
889 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500890 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400891 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
892 NewGV->takeName(GV);
893
894 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
895 SmallVector<User *, 8> CandidateUsers;
896
897 auto record_called_function_type_as_user =
898 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
899 // Find argument index.
900 unsigned index = 0;
901 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
902 if (gv == call->getOperand(i)) {
903 // TODO(dneto): Should we break here?
904 index = i;
905 }
906 }
907
908 // Record function type with global constant.
909 GlobalConstFuncTyMap[call->getFunctionType()] =
910 std::make_pair(call->getFunctionType(), index);
911 };
912
913 for (User *GVU : GVUsers) {
914 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
915 record_called_function_type_as_user(GV, Call);
916 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
917 // Check GEP users.
918 for (User *GEPU : GEP->users()) {
919 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
920 record_called_function_type_as_user(GEP, GEPCall);
921 }
922 }
923 }
924
925 CandidateUsers.push_back(GVU);
926 }
927
928 for (User *U : CandidateUsers) {
929 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500930 if (!isa<Constant>(U)) {
931 // #254: Can't change operands of a constant, but this shouldn't be
932 // something that sticks around in the module.
933 U->replaceUsesOfWith(GV, NewGV);
934 }
David Neto862b7d82018-06-14 18:48:37 -0400935 }
936
937 // Delete original gv.
938 GV->eraseFromParent();
939 }
940 }
941}
942
SJW77b87ad2020-04-21 14:37:52 -0500943void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -0400944 ResourceVarInfoList.clear();
945 FunctionToResourceVarsMap.clear();
946 ModuleOrderedResourceVars.reset();
947 // Normally, there is one resource variable per clspv.resource.var.*
948 // function, since that is unique'd by arg type and index. By design,
949 // we can share these resource variables across kernels because all
950 // kernels use the same descriptor set.
951 //
952 // But if the user requested distinct descriptor sets per kernel, then
953 // the descriptor allocator has made different (set,binding) pairs for
954 // the same (type,arg_index) pair. Since we can decorate a resource
955 // variable with only exactly one DescriptorSet and Binding, we are
956 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +0000957 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -0400958 // (set,binding) values.
959 const bool always_distinct_sets =
960 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -0500961 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -0400962 // Rely on the fact the resource var functions have a stable ordering
963 // in the module.
Alan Baker202c8c72018-08-13 13:47:44 -0400964 if (F.getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -0400965 // Find all calls to this function with distinct set and binding pairs.
966 // Save them in ResourceVarInfoList.
967
968 // Determine uniqueness of the (set,binding) pairs only withing this
969 // one resource-var builtin function.
970 using SetAndBinding = std::pair<unsigned, unsigned>;
971 // Maps set and binding to the resource var info.
972 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
973 bool first_use = true;
974 for (auto &U : F.uses()) {
975 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
976 const auto set = unsigned(
977 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
978 const auto binding = unsigned(
979 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
980 const auto arg_kind = clspv::ArgKind(
981 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
982 const auto arg_index = unsigned(
983 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400984 const auto coherent = unsigned(
985 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400986
987 // Find or make the resource var info for this combination.
988 ResourceVarInfo *rv = nullptr;
989 if (always_distinct_sets) {
990 // Make a new resource var any time we see a different
991 // (set,binding) pair.
992 SetAndBinding key{set, binding};
993 auto where = set_and_binding_map.find(key);
994 if (where == set_and_binding_map.end()) {
995 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400996 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400997 ResourceVarInfoList.emplace_back(rv);
998 set_and_binding_map[key] = rv;
999 } else {
1000 rv = where->second;
1001 }
1002 } else {
1003 // The default is to make exactly one resource for each
1004 // clspv.resource.var.* function.
1005 if (first_use) {
1006 first_use = false;
1007 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001008 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001009 ResourceVarInfoList.emplace_back(rv);
1010 } else {
1011 rv = ResourceVarInfoList.back().get();
1012 }
1013 }
1014
1015 // Now populate FunctionToResourceVarsMap.
1016 auto &mapping =
1017 FunctionToResourceVarsMap[call->getParent()->getParent()];
1018 while (mapping.size() <= arg_index) {
1019 mapping.push_back(nullptr);
1020 }
1021 mapping[arg_index] = rv;
1022 }
1023 }
1024 }
1025 }
1026
1027 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001028 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001029 auto where = FunctionToResourceVarsMap.find(&F);
1030 if (where != FunctionToResourceVarsMap.end()) {
1031 for (auto &rv : where->second) {
1032 if (rv != nullptr) {
1033 ModuleOrderedResourceVars.insert(rv);
1034 }
1035 }
1036 }
1037 }
1038 if (ShowResourceVars) {
1039 for (auto *info : ModuleOrderedResourceVars) {
1040 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1041 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1042 << "\n";
1043 }
1044 }
1045}
1046
David Neto22f144c2017-06-12 14:26:21 -04001047void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1048 // Investigate global variable's type.
1049 FindType(GV.getType());
1050}
1051
1052void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1053 // Investigate function's type.
1054 FunctionType *FTy = F.getFunctionType();
1055
1056 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1057 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001058 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001059 if (GlobalConstFuncTyMap.count(FTy)) {
1060 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1061 SmallVector<Type *, 4> NewFuncParamTys;
1062 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1063 Type *ParamTy = FTy->getParamType(i);
1064 if (i == GVCstArgIdx) {
1065 Type *EleTy = ParamTy->getPointerElementType();
1066 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1067 }
1068
1069 NewFuncParamTys.push_back(ParamTy);
1070 }
1071
1072 FunctionType *NewFTy =
1073 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1074 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1075 FTy = NewFTy;
1076 }
1077
1078 FindType(FTy);
1079 } else {
1080 // As kernel functions do not have parameters, create new function type and
1081 // add it to type map.
1082 SmallVector<Type *, 4> NewFuncParamTys;
1083 FunctionType *NewFTy =
1084 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1085 FindType(NewFTy);
1086 }
1087
1088 // Investigate instructions' type in function body.
1089 for (BasicBlock &BB : F) {
1090 for (Instruction &I : BB) {
1091 if (isa<ShuffleVectorInst>(I)) {
1092 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1093 // Ignore type for mask of shuffle vector instruction.
1094 if (i == 2) {
1095 continue;
1096 }
1097
1098 Value *Op = I.getOperand(i);
1099 if (!isa<MetadataAsValue>(Op)) {
1100 FindType(Op->getType());
1101 }
1102 }
1103
1104 FindType(I.getType());
1105 continue;
1106 }
1107
David Neto862b7d82018-06-14 18:48:37 -04001108 CallInst *Call = dyn_cast<CallInst>(&I);
1109
1110 if (Call && Call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -04001111 clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04001112 // This is a fake call representing access to a resource variable.
1113 // We handle that elsewhere.
1114 continue;
1115 }
1116
Alan Baker202c8c72018-08-13 13:47:44 -04001117 if (Call && Call->getCalledFunction()->getName().startswith(
1118 clspv::WorkgroupAccessorFunction())) {
1119 // This is a fake call representing access to a workgroup variable.
1120 // We handle that elsewhere.
1121 continue;
1122 }
1123
alan-bakerf083bed2020-01-29 08:15:42 -05001124 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1125 // OpCompositeExtract which takes literal values for indices. As a result
1126 // don't map the type of indices.
1127 if (I.getOpcode() == Instruction::ExtractValue) {
1128 FindType(I.getOperand(0)->getType());
1129 continue;
1130 }
1131 if (I.getOpcode() == Instruction::InsertValue) {
1132 FindType(I.getOperand(0)->getType());
1133 FindType(I.getOperand(1)->getType());
1134 continue;
1135 }
1136
1137 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1138 // the index is a constant. In such a case don't map the index type.
1139 if (I.getOpcode() == Instruction::ExtractElement) {
1140 FindType(I.getOperand(0)->getType());
1141 Value *op1 = I.getOperand(1);
1142 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1143 FindType(op1->getType());
1144 }
1145 continue;
1146 }
1147 if (I.getOpcode() == Instruction::InsertElement) {
1148 FindType(I.getOperand(0)->getType());
1149 FindType(I.getOperand(1)->getType());
1150 Value *op2 = I.getOperand(2);
1151 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1152 FindType(op2->getType());
1153 }
1154 continue;
1155 }
1156
David Neto22f144c2017-06-12 14:26:21 -04001157 // Work through the operands of the instruction.
1158 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1159 Value *const Op = I.getOperand(i);
1160 // If any of the operands is a constant, find the type!
1161 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1162 FindType(Op->getType());
1163 }
1164 }
1165
1166 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001167 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001168 // Avoid to check call instruction's type.
1169 break;
1170 }
Alan Baker202c8c72018-08-13 13:47:44 -04001171 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
1172 if (OpCall && OpCall->getCalledFunction()->getName().startswith(
1173 clspv::WorkgroupAccessorFunction())) {
1174 // This is a fake call representing access to a workgroup variable.
1175 // We handle that elsewhere.
1176 continue;
1177 }
1178 }
David Neto22f144c2017-06-12 14:26:21 -04001179 if (!isa<MetadataAsValue>(&Op)) {
1180 FindType(Op->getType());
1181 continue;
1182 }
1183 }
1184
David Neto22f144c2017-06-12 14:26:21 -04001185 // We don't want to track the type of this call as we are going to replace
1186 // it.
Kévin Petitdf71de32019-04-09 14:09:50 +01001187 if (Call && (clspv::LiteralSamplerFunction() ==
David Neto22f144c2017-06-12 14:26:21 -04001188 Call->getCalledFunction()->getName())) {
1189 continue;
1190 }
1191
1192 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1193 // If gep's base operand has ModuleScopePrivate address space, make gep
1194 // return ModuleScopePrivate address space.
1195 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1196 // Add pointer type with private address space for global constant to
1197 // type list.
1198 Type *EleTy = I.getType()->getPointerElementType();
1199 Type *NewPTy =
1200 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1201
1202 FindType(NewPTy);
1203 continue;
1204 }
1205 }
1206
1207 FindType(I.getType());
1208 }
1209 }
1210}
1211
SJW77b87ad2020-04-21 14:37:52 -05001212void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001213 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001214 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001215 0 < getSamplerMap().size()) {
SJW77b87ad2020-04-21 14:37:52 -05001216 auto SamplerStructTy = module->getTypeByName("opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001217 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001218 SamplerStructTy =
1219 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001220 }
1221
1222 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1223
1224 FindType(SamplerTy);
1225 }
1226}
1227
SJW77b87ad2020-04-21 14:37:52 -05001228void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001229 // Record types so they are generated.
1230 TypesNeedingLayout.reset();
1231 StructTypesNeedingBlock.reset();
1232
1233 // To match older clspv codegen, generate the float type first if required
1234 // for images.
1235 for (const auto *info : ModuleOrderedResourceVars) {
1236 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1237 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001238 if (IsIntImageType(info->var_fn->getReturnType())) {
1239 // Nothing for now...
1240 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001241 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001242 }
1243
1244 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001245 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001246 }
1247 }
1248
1249 for (const auto *info : ModuleOrderedResourceVars) {
1250 Type *type = info->var_fn->getReturnType();
1251
1252 switch (info->arg_kind) {
1253 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001254 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001255 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1256 StructTypesNeedingBlock.insert(sty);
1257 } else {
1258 errs() << *type << "\n";
1259 llvm_unreachable("Buffer arguments must map to structures!");
1260 }
1261 break;
1262 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001263 case clspv::ArgKind::PodUBO:
1264 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001265 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1266 StructTypesNeedingBlock.insert(sty);
1267 } else {
1268 errs() << *type << "\n";
1269 llvm_unreachable("POD arguments must map to structures!");
1270 }
1271 break;
1272 case clspv::ArgKind::ReadOnlyImage:
1273 case clspv::ArgKind::WriteOnlyImage:
1274 case clspv::ArgKind::Sampler:
1275 // Sampler and image types map to the pointee type but
1276 // in the uniform constant address space.
1277 type = PointerType::get(type->getPointerElementType(),
1278 clspv::AddressSpace::UniformConstant);
1279 break;
1280 default:
1281 break;
1282 }
1283
1284 // The converted type is the type of the OpVariable we will generate.
1285 // If the pointee type is an array of size zero, FindType will convert it
1286 // to a runtime array.
1287 FindType(type);
1288 }
1289
alan-bakerdcd97412019-09-16 15:32:30 -04001290 // If module constants are clustered in a storage buffer then that struct
1291 // needs layout decorations.
1292 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001293 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001294 PointerType *PTy = cast<PointerType>(GV.getType());
1295 const auto AS = PTy->getAddressSpace();
1296 const bool module_scope_constant_external_init =
1297 (AS == AddressSpace::Constant) && GV.hasInitializer();
1298 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1299 if (module_scope_constant_external_init &&
1300 spv::BuiltInMax == BuiltinType) {
1301 StructTypesNeedingBlock.insert(
1302 cast<StructType>(PTy->getPointerElementType()));
1303 }
1304 }
1305 }
1306
SJW77b87ad2020-04-21 14:37:52 -05001307 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001308 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1309 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1310 assert(Ty->isStructTy() && "Push constants have to be structures.");
1311 auto STy = cast<StructType>(Ty);
1312 StructTypesNeedingBlock.insert(STy);
1313 }
1314 }
1315
David Neto862b7d82018-06-14 18:48:37 -04001316 // Traverse the arrays and structures underneath each Block, and
1317 // mark them as needing layout.
1318 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1319 StructTypesNeedingBlock.end());
1320 while (!work_list.empty()) {
1321 Type *type = work_list.back();
1322 work_list.pop_back();
1323 TypesNeedingLayout.insert(type);
1324 switch (type->getTypeID()) {
1325 case Type::ArrayTyID:
1326 work_list.push_back(type->getArrayElementType());
1327 if (!Hack_generate_runtime_array_stride_early) {
1328 // Remember this array type for deferred decoration.
1329 TypesNeedingArrayStride.insert(type);
1330 }
1331 break;
1332 case Type::StructTyID:
1333 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1334 work_list.push_back(elem_ty);
1335 }
1336 default:
1337 // This type and its contained types don't get layout.
1338 break;
1339 }
1340 }
1341}
1342
SJWf93f5f32020-05-05 07:27:56 -05001343void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001344 // The SpecId assignment for pointer-to-local arguments is recorded in
1345 // module-level metadata. Translate that information into local argument
1346 // information.
SJWf93f5f32020-05-05 07:27:56 -05001347 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001348 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001349 if (!nmd)
1350 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001351 for (auto operand : nmd->operands()) {
1352 MDTuple *tuple = cast<MDTuple>(operand);
1353 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1354 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001355 ConstantAsMetadata *arg_index_md =
1356 cast<ConstantAsMetadata>(tuple->getOperand(1));
1357 int arg_index = static_cast<int>(
1358 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1359 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001360
1361 ConstantAsMetadata *spec_id_md =
1362 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001363 int spec_id = static_cast<int>(
1364 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001365
Alan Baker202c8c72018-08-13 13:47:44 -04001366 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001367 if (LocalSpecIdInfoMap.count(spec_id))
1368 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001369
SJWf93f5f32020-05-05 07:27:56 -05001370 // Generate the spec constant.
1371 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001372 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001373 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001374
SJWf93f5f32020-05-05 07:27:56 -05001375 // Generate the array type.
1376 Type *ElemTy = arg->getType()->getPointerElementType();
1377 Ops.clear();
1378 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001379 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001380
1381 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1382
1383 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001384 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001385 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1386
1387 // Generate OpVariable.
1388 //
1389 // Ops[0] : Result Type ID
1390 // Ops[1] : Storage Class
1391 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001392 Ops << PtrArrayTypeID << spv::StorageClassWorkgroup;
SJWf93f5f32020-05-05 07:27:56 -05001393
1394 SPIRVID VariableID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1395
1396 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001397 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001398 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1399
1400 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1401 ArrayTypeID, PtrArrayTypeID, spec_id};
1402 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001403 }
1404}
1405
David Neto22f144c2017-06-12 14:26:21 -04001406void SPIRVProducerPass::FindType(Type *Ty) {
1407 TypeList &TyList = getTypeList();
1408
1409 if (0 != TyList.idFor(Ty)) {
1410 return;
1411 }
1412
1413 if (Ty->isPointerTy()) {
1414 auto AddrSpace = Ty->getPointerAddressSpace();
1415 if ((AddressSpace::Constant == AddrSpace) ||
1416 (AddressSpace::Global == AddrSpace)) {
1417 auto PointeeTy = Ty->getPointerElementType();
1418
1419 if (PointeeTy->isStructTy() &&
1420 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1421 FindType(PointeeTy);
1422 auto ActualPointerTy =
1423 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1424 FindType(ActualPointerTy);
1425 return;
1426 }
1427 }
1428 }
1429
David Neto862b7d82018-06-14 18:48:37 -04001430 // By convention, LLVM array type with 0 elements will map to
1431 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1432 // has a constant number of elements. We need to support type of the
1433 // constant.
1434 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1435 if (arrayTy->getNumElements() > 0) {
1436 LLVMContext &Context = Ty->getContext();
1437 FindType(Type::getInt32Ty(Context));
1438 }
David Neto22f144c2017-06-12 14:26:21 -04001439 }
1440
1441 for (Type *SubTy : Ty->subtypes()) {
1442 FindType(SubTy);
1443 }
1444
1445 TyList.insert(Ty);
1446}
1447
David Neto22f144c2017-06-12 14:26:21 -04001448spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1449 switch (AddrSpace) {
1450 default:
1451 llvm_unreachable("Unsupported OpenCL address space");
1452 case AddressSpace::Private:
1453 return spv::StorageClassFunction;
1454 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001455 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001456 case AddressSpace::Constant:
1457 return clspv::Option::ConstantArgsInUniformBuffer()
1458 ? spv::StorageClassUniform
1459 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001460 case AddressSpace::Input:
1461 return spv::StorageClassInput;
1462 case AddressSpace::Local:
1463 return spv::StorageClassWorkgroup;
1464 case AddressSpace::UniformConstant:
1465 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001466 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001467 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001468 case AddressSpace::ModuleScopePrivate:
1469 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001470 case AddressSpace::PushConstant:
1471 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001472 }
1473}
1474
David Neto862b7d82018-06-14 18:48:37 -04001475spv::StorageClass
1476SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1477 switch (arg_kind) {
1478 case clspv::ArgKind::Buffer:
1479 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001480 case clspv::ArgKind::BufferUBO:
1481 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001482 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001483 return spv::StorageClassStorageBuffer;
1484 case clspv::ArgKind::PodUBO:
1485 return spv::StorageClassUniform;
1486 case clspv::ArgKind::PodPushConstant:
1487 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001488 case clspv::ArgKind::Local:
1489 return spv::StorageClassWorkgroup;
1490 case clspv::ArgKind::ReadOnlyImage:
1491 case clspv::ArgKind::WriteOnlyImage:
1492 case clspv::ArgKind::Sampler:
1493 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001494 default:
1495 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001496 }
1497}
1498
David Neto22f144c2017-06-12 14:26:21 -04001499spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1500 return StringSwitch<spv::BuiltIn>(Name)
1501 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1502 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1503 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1504 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1505 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001506 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001507 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001508 .Default(spv::BuiltInMax);
1509}
1510
SJW01901d92020-05-21 08:58:31 -05001511SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1512 if (OpExtInstImportID == 0) {
1513 //
1514 // Generate OpExtInstImport.
1515 //
1516 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001517
SJW01901d92020-05-21 08:58:31 -05001518 OpExtInstImportID =
1519 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1520 }
1521 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001522}
1523
SJW01901d92020-05-21 08:58:31 -05001524SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
SJWf93f5f32020-05-05 07:27:56 -05001525 auto TI = TypeMap.find(Ty);
1526 if (TI != TypeMap.end()) {
SJW01901d92020-05-21 08:58:31 -05001527 assert(TI->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05001528 return TI->second;
1529 }
1530
1531 const auto &DL = module->getDataLayout();
1532
SJW01901d92020-05-21 08:58:31 -05001533 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001534
1535 switch (Ty->getTypeID()) {
1536 default: {
1537 Ty->print(errs());
1538 llvm_unreachable("Unsupported type???");
1539 break;
1540 }
1541 case Type::MetadataTyID:
1542 case Type::LabelTyID: {
1543 // Ignore these types.
1544 break;
1545 }
1546 case Type::PointerTyID: {
1547 PointerType *PTy = cast<PointerType>(Ty);
1548 unsigned AddrSpace = PTy->getAddressSpace();
1549
1550 if (AddrSpace != AddressSpace::UniformConstant) {
1551 auto PointeeTy = PTy->getElementType();
1552 if (PointeeTy->isStructTy() &&
1553 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1554 // TODO(sjw): assert always an image?
1555 RID = getSPIRVType(PointeeTy);
1556 break;
1557 }
1558 }
1559
1560 // For the purposes of our Vulkan SPIR-V type system, constant and global
1561 // are conflated.
1562 if (AddressSpace::Constant == AddrSpace) {
1563 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1564 AddrSpace = AddressSpace::Global;
1565 // Check to see if we already created this type (for instance, if we
1566 // had a constant <type>* and a global <type>*, the type would be
1567 // created by one of these types, and shared by both).
1568 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1569 if (0 < TypeMap.count(GlobalTy)) {
1570 RID = TypeMap[GlobalTy];
1571 break;
1572 }
1573 }
1574 } else if (AddressSpace::Global == AddrSpace) {
1575 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1576 AddrSpace = AddressSpace::Constant;
1577
1578 // Check to see if we already created this type (for instance, if we
1579 // had a constant <type>* and a global <type>*, the type would be
1580 // created by one of these types, and shared by both).
1581 auto ConstantTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1582 if (0 < TypeMap.count(ConstantTy)) {
1583 RID = TypeMap[ConstantTy];
1584 break;
1585 }
1586 }
1587 }
1588
1589 //
1590 // Generate OpTypePointer.
1591 //
1592
1593 // OpTypePointer
1594 // Ops[0] = Storage Class
1595 // Ops[1] = Element Type ID
1596 SPIRVOperandVec Ops;
1597
SJW01901d92020-05-21 08:58:31 -05001598 Ops << GetStorageClass(AddrSpace) << PTy->getElementType();
SJWf93f5f32020-05-05 07:27:56 -05001599
1600 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1601 break;
1602 }
1603 case Type::StructTyID: {
1604 StructType *STy = cast<StructType>(Ty);
1605
1606 // Handle sampler type.
1607 if (STy->isOpaque()) {
1608 if (STy->getName().equals("opencl.sampler_t")) {
1609 //
1610 // Generate OpTypeSampler
1611 //
1612 // Empty Ops.
1613
1614 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1615 break;
1616 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1617 STy->getName().startswith("opencl.image1d_wo_t") ||
1618 STy->getName().startswith("opencl.image1d_array_ro_t") ||
1619 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1620 STy->getName().startswith("opencl.image2d_ro_t") ||
1621 STy->getName().startswith("opencl.image2d_wo_t") ||
1622 STy->getName().startswith("opencl.image2d_array_ro_t") ||
1623 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1624 STy->getName().startswith("opencl.image3d_ro_t") ||
1625 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001626
1627 if (STy->getName().contains("_wo_t")) {
1628 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
1629 }
1630 if (STy->getName().startswith("opencl.image1d_")) {
1631 if (STy->getName().contains(".sampled"))
1632 addCapability(spv::CapabilitySampled1D);
1633 else
1634 addCapability(spv::CapabilityImage1D);
1635 }
1636
SJWf93f5f32020-05-05 07:27:56 -05001637 //
1638 // Generate OpTypeImage
1639 //
1640 // Ops[0] = Sampled Type ID
1641 // Ops[1] = Dim ID
1642 // Ops[2] = Depth (Literal Number)
1643 // Ops[3] = Arrayed (Literal Number)
1644 // Ops[4] = MS (Literal Number)
1645 // Ops[5] = Sampled (Literal Number)
1646 // Ops[6] = Image Format ID
1647 //
1648 SPIRVOperandVec Ops;
1649
SJW01901d92020-05-21 08:58:31 -05001650 SPIRVID SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001651 if (STy->getName().contains(".float")) {
1652 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
1653 } else if (STy->getName().contains(".uint")) {
1654 SampledTyID = getSPIRVType(Type::getInt32Ty(Ty->getContext()));
1655 } else if (STy->getName().contains(".int")) {
1656 // Generate a signed 32-bit integer if necessary.
1657 if (int32ID == 0) {
1658 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001659 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001660 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1661 }
1662 SampledTyID = int32ID;
1663
1664 // Generate a vec4 of the signed int if necessary.
1665 if (v4int32ID == 0) {
1666 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001667 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001668 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1669 }
1670 } else {
1671 // This was likely an UndefValue.
1672 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
1673 }
SJW01901d92020-05-21 08:58:31 -05001674 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001675
1676 spv::Dim DimID = spv::Dim2D;
1677 if (STy->getName().startswith("opencl.image1d_ro_t") ||
1678 STy->getName().startswith("opencl.image1d_wo_t") ||
1679 STy->getName().startswith("opencl.image1d_array_ro_t") ||
1680 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1681 DimID = spv::Dim1D;
1682 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
1683 STy->getName().startswith("opencl.image3d_wo_t")) {
1684 DimID = spv::Dim3D;
1685 }
SJW01901d92020-05-21 08:58:31 -05001686 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001687
1688 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001689 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001690
1691 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001692 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001693
1694 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001695 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001696
1697 // Set up Sampled.
1698 //
1699 // From Spec
1700 //
1701 // 0 indicates this is only known at run time, not at compile time
1702 // 1 indicates will be used with sampler
1703 // 2 indicates will be used without a sampler (a storage image)
1704 uint32_t Sampled = 1;
1705 if (!STy->getName().contains(".sampled")) {
1706 Sampled = 2;
1707 }
SJW01901d92020-05-21 08:58:31 -05001708 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001709
1710 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001711 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001712
1713 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1714
1715 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001716 Ops << RID;
SJWf93f5f32020-05-05 07:27:56 -05001717
1718 getImageTypeMap()[Ty] =
1719 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1720 break;
1721 }
1722 }
1723
1724 //
1725 // Generate OpTypeStruct
1726 //
1727 // Ops[0] ... Ops[n] = Member IDs
1728 SPIRVOperandVec Ops;
1729
1730 for (auto *EleTy : STy->elements()) {
SJW01901d92020-05-21 08:58:31 -05001731 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001732 }
1733
1734 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1735
1736 // Generate OpMemberDecorate.
1737 if (TypesNeedingLayout.idFor(STy)) {
1738 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1739 MemberIdx++) {
1740 // Ops[0] = Structure Type ID
1741 // Ops[1] = Member Index(Literal Number)
1742 // Ops[2] = Decoration (Offset)
1743 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001744 const auto ByteOffset =
1745 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1746
SJW01901d92020-05-21 08:58:31 -05001747 Ops.clear();
1748 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001749
1750 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1751 }
1752 }
1753
1754 // Generate OpDecorate.
1755 if (StructTypesNeedingBlock.idFor(STy)) {
1756 Ops.clear();
1757 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001758 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001759
1760 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1761 }
1762 break;
1763 }
1764 case Type::IntegerTyID: {
1765 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
1766
SJW01901d92020-05-21 08:58:31 -05001767 if (clspv::Option::Int8Support() && BitWidth == 8) {
1768 addCapability(spv::CapabilityInt8);
1769 } else if (BitWidth == 16) {
1770 addCapability(spv::CapabilityInt16);
1771 } else if (BitWidth == 64) {
1772 addCapability(spv::CapabilityInt64);
1773 }
1774
SJWf93f5f32020-05-05 07:27:56 -05001775 if (BitWidth == 1) {
1776 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1777 } else {
1778 if (!clspv::Option::Int8Support() && BitWidth == 8) {
1779 // i8 is added to TypeMap as i32.
1780 RID = getSPIRVType(Type::getIntNTy(Ty->getContext(), 32));
1781 } else {
1782 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001783 Ops << BitWidth << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001784 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1785 }
1786 }
1787 break;
1788 }
1789 case Type::HalfTyID:
1790 case Type::FloatTyID:
1791 case Type::DoubleTyID: {
1792 uint32_t BitWidth = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
SJW01901d92020-05-21 08:58:31 -05001793 if (BitWidth == 16) {
1794 addCapability(spv::CapabilityFloat16);
1795 } else if (BitWidth == 64) {
1796 addCapability(spv::CapabilityFloat64);
1797 }
1798
SJWf93f5f32020-05-05 07:27:56 -05001799 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001800 Ops << BitWidth;
SJWf93f5f32020-05-05 07:27:56 -05001801
1802 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1803 break;
1804 }
1805 case Type::ArrayTyID: {
1806 ArrayType *ArrTy = cast<ArrayType>(Ty);
1807 const uint64_t Length = ArrTy->getArrayNumElements();
1808 if (Length == 0) {
1809 // By convention, map it to a RuntimeArray.
1810
1811 Type *EleTy = ArrTy->getArrayElementType();
1812
1813 //
1814 // Generate OpTypeRuntimeArray.
1815 //
1816 // OpTypeRuntimeArray
1817 // Ops[0] = Element Type ID
1818 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001819 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001820
1821 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1822
1823 if (Hack_generate_runtime_array_stride_early) {
1824 // Generate OpDecorate.
1825
1826 // Ops[0] = Target ID
1827 // Ops[1] = Decoration (ArrayStride)
1828 // Ops[2] = Stride Number(Literal Number)
1829 Ops.clear();
1830
SJW01901d92020-05-21 08:58:31 -05001831 Ops << RID << spv::DecorationArrayStride
1832 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001833
1834 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1835 }
1836
1837 } else {
1838
1839 //
1840 // Generate OpConstant and OpTypeArray.
1841 //
1842
1843 //
1844 // Generate OpConstant for array length.
1845 //
1846 // Add constant for length to constant list.
1847 Constant *CstLength =
1848 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001849
1850 // Remember to generate ArrayStride later
1851 getTypesNeedingArrayStride().insert(Ty);
1852
1853 //
1854 // Generate OpTypeArray.
1855 //
1856 // Ops[0] = Element Type ID
1857 // Ops[1] = Array Length Constant ID
1858 SPIRVOperandVec Ops;
1859
SJW01901d92020-05-21 08:58:31 -05001860 Ops << ArrTy->getElementType() << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001861
1862 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1863 }
1864 break;
1865 }
1866 case Type::FixedVectorTyID: {
1867 auto VecTy = cast<VectorType>(Ty);
1868 // <4 x i8> is changed to i32 if i8 is not generally supported.
1869 if (!clspv::Option::Int8Support() &&
1870 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
1871 if (VecTy->getNumElements() == 4) {
1872 RID = getSPIRVType(VecTy->getElementType());
1873 break;
1874 } else {
1875 Ty->print(errs());
1876 llvm_unreachable("Support above i8 vector type");
1877 }
1878 }
1879
1880 // Ops[0] = Component Type ID
1881 // Ops[1] = Component Count (Literal Number)
1882 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001883 Ops << VecTy->getElementType() << VecTy->getNumElements();
SJWf93f5f32020-05-05 07:27:56 -05001884
1885 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1886 break;
1887 }
1888 case Type::VoidTyID: {
1889 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
1890 break;
1891 }
1892 case Type::FunctionTyID: {
1893 // Generate SPIRV instruction for function type.
1894 FunctionType *FTy = cast<FunctionType>(Ty);
1895
1896 // Ops[0] = Return Type ID
1897 // Ops[1] ... Ops[n] = Parameter Type IDs
1898 SPIRVOperandVec Ops;
1899
1900 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05001901 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05001902
1903 // Find SPIRV instructions for parameter types
1904 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
1905 // Find SPIRV instruction for parameter type.
1906 auto ParamTy = FTy->getParamType(k);
1907 if (ParamTy->isPointerTy()) {
1908 auto PointeeTy = ParamTy->getPointerElementType();
1909 if (PointeeTy->isStructTy() &&
1910 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1911 ParamTy = PointeeTy;
1912 }
1913 }
1914
SJW01901d92020-05-21 08:58:31 -05001915 Ops << ParamTy;
SJWf93f5f32020-05-05 07:27:56 -05001916 }
1917
1918 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
1919 break;
1920 }
1921 }
1922
SJW01901d92020-05-21 08:58:31 -05001923 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05001924 TypeMap[Ty] = RID;
1925 }
1926 return RID;
David Neto22f144c2017-06-12 14:26:21 -04001927}
1928
SJW77b87ad2020-04-21 14:37:52 -05001929void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04001930 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05001931 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04001932 }
David Neto22f144c2017-06-12 14:26:21 -04001933}
1934
SJWf93f5f32020-05-05 07:27:56 -05001935SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *Cst) {
David Neto22f144c2017-06-12 14:26:21 -04001936 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07001937 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04001938
SJW01901d92020-05-21 08:58:31 -05001939 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04001940
SJWf93f5f32020-05-05 07:27:56 -05001941 //
1942 // Generate OpConstant.
1943 //
1944 // Ops[0] = Result Type ID
1945 // Ops[1] .. Ops[n] = Values LiteralNumber
1946 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04001947
SJW01901d92020-05-21 08:58:31 -05001948 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001949
SJWf93f5f32020-05-05 07:27:56 -05001950 std::vector<uint32_t> LiteralNum;
1951 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04001952
SJWf93f5f32020-05-05 07:27:56 -05001953 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04001954 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05001955 Opcode = spv::OpUndef;
1956 if (hack_undef && IsTypeNullable(Cst->getType())) {
1957 Opcode = spv::OpConstantNull;
1958 }
1959 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
1960 unsigned BitWidth = CI->getBitWidth();
1961 if (BitWidth == 1) {
1962 // If the bitwidth of constant is 1, generate OpConstantTrue or
1963 // OpConstantFalse.
1964 if (CI->getZExtValue()) {
1965 // Ops[0] = Result Type ID
1966 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04001967 } else {
SJWf93f5f32020-05-05 07:27:56 -05001968 // Ops[0] = Result Type ID
1969 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04001970 }
SJWf93f5f32020-05-05 07:27:56 -05001971 } else {
1972 auto V = CI->getZExtValue();
1973 LiteralNum.push_back(V & 0xFFFFFFFF);
1974
1975 if (BitWidth > 32) {
1976 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04001977 }
1978
1979 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04001980
SJW01901d92020-05-21 08:58:31 -05001981 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001982 }
1983 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
1984 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
1985 Type *CFPTy = CFP->getType();
1986 if (CFPTy->isFloatTy()) {
1987 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1988 } else if (CFPTy->isDoubleTy()) {
1989 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1990 LiteralNum.push_back(FPVal >> 32);
1991 } else if (CFPTy->isHalfTy()) {
1992 LiteralNum.push_back(FPVal & 0xFFFF);
1993 } else {
1994 CFPTy->print(errs());
1995 llvm_unreachable("Implement this ConstantFP Type");
1996 }
David Neto22f144c2017-06-12 14:26:21 -04001997
SJWf93f5f32020-05-05 07:27:56 -05001998 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04001999
SJW01901d92020-05-21 08:58:31 -05002000 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002001 } else if (isa<ConstantDataSequential>(Cst) &&
2002 cast<ConstantDataSequential>(Cst)->isString()) {
2003 Cst->print(errs());
2004 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002005
SJWf93f5f32020-05-05 07:27:56 -05002006 } else if (const ConstantDataSequential *CDS =
2007 dyn_cast<ConstantDataSequential>(Cst)) {
2008 // Let's convert <4 x i8> constant to int constant specially.
2009 // This case occurs when all the values are specified as constant
2010 // ints.
2011 Type *CstTy = Cst->getType();
2012 if (is4xi8vec(CstTy)) {
2013 LLVMContext &Context = CstTy->getContext();
David Neto49351ac2017-08-26 17:32:20 -04002014
SJWf93f5f32020-05-05 07:27:56 -05002015 //
2016 // Generate OpConstant with OpTypeInt 32 0.
2017 //
2018 uint32_t IntValue = 0;
2019 for (unsigned k = 0; k < 4; k++) {
2020 const uint64_t Val = CDS->getElementAsInteger(k);
2021 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002022 }
2023
SJWf93f5f32020-05-05 07:27:56 -05002024 Type *i32 = Type::getInt32Ty(Context);
2025 Constant *CstInt = ConstantInt::get(i32, IntValue);
2026 RID = getSPIRVValue(CstInt);
2027 } else {
2028
David Neto49351ac2017-08-26 17:32:20 -04002029 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002030 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002031 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002032 }
2033
2034 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002035 }
2036 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2037 // Let's convert <4 x i8> constant to int constant specially.
2038 // This case occurs when at least one of the values is an undef.
2039 Type *CstTy = Cst->getType();
2040 if (is4xi8vec(CstTy)) {
2041 LLVMContext &Context = CstTy->getContext();
David Neto22f144c2017-06-12 14:26:21 -04002042
SJWf93f5f32020-05-05 07:27:56 -05002043 //
2044 // Generate OpConstant with OpTypeInt 32 0.
2045 //
2046 uint32_t IntValue = 0;
2047 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2048 I != E; ++I) {
2049 uint64_t Val = 0;
2050 const Value *CV = *I;
2051 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2052 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002053 }
SJWf93f5f32020-05-05 07:27:56 -05002054 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002055 }
2056
SJWf93f5f32020-05-05 07:27:56 -05002057 Type *i32 = Type::getInt32Ty(Context);
2058 Constant *CstInt = ConstantInt::get(i32, IntValue);
2059 RID = getSPIRVValue(CstInt);
2060 } else {
2061
David Neto22f144c2017-06-12 14:26:21 -04002062 // We use a constant composite in SPIR-V for our constant aggregate in
2063 // LLVM.
2064 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002065
2066 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002067 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002068 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002069 }
David Neto22f144c2017-06-12 14:26:21 -04002070 }
SJWf93f5f32020-05-05 07:27:56 -05002071 } else if (Cst->isNullValue()) {
2072 Opcode = spv::OpConstantNull;
2073 } else {
2074 Cst->print(errs());
2075 llvm_unreachable("Unsupported Constant???");
2076 }
David Neto22f144c2017-06-12 14:26:21 -04002077
SJWf93f5f32020-05-05 07:27:56 -05002078 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2079 // Null pointer requires variable pointers.
2080 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2081 }
alan-baker5b86ed72019-02-15 08:26:50 -05002082
SJWf93f5f32020-05-05 07:27:56 -05002083 if (RID == 0) {
2084 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2085 }
2086
2087 VMap[Cst] = RID;
2088
2089 return RID;
2090}
2091
2092SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2093 auto II = ValueMap.find(V);
2094 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002095 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002096 return II->second;
2097 }
2098 if (Constant *Cst = dyn_cast<Constant>(V)) {
2099 return getSPIRVConstant(Cst);
2100 } else {
2101 llvm_unreachable("Variable not found");
2102 }
2103}
2104
SJW77b87ad2020-04-21 14:37:52 -05002105void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002106 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002107 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002108 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2109 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002110
David Neto862b7d82018-06-14 18:48:37 -04002111 // We might have samplers in the sampler map that are not used
2112 // in the translation unit. We need to allocate variables
2113 // for them and bindings too.
2114 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002115
SJW77b87ad2020-04-21 14:37:52 -05002116 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002117 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002118 if (!var_fn)
2119 return;
alan-baker09cb9802019-12-10 13:16:27 -05002120
David Neto862b7d82018-06-14 18:48:37 -04002121 for (auto user : var_fn->users()) {
2122 // Populate SamplerLiteralToDescriptorSetMap and
2123 // SamplerLiteralToBindingMap.
2124 //
2125 // Look for calls like
2126 // call %opencl.sampler_t addrspace(2)*
2127 // @clspv.sampler.var.literal(
2128 // i32 descriptor,
2129 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002130 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002131 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002132 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002133 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002134 auto sampler_value = third_param;
2135 if (clspv::Option::UseSamplerMap()) {
2136 if (third_param >= sampler_map.size()) {
2137 errs() << "Out of bounds index to sampler map: " << third_param;
2138 llvm_unreachable("bad sampler init: out of bounds");
2139 }
2140 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002141 }
2142
David Neto862b7d82018-06-14 18:48:37 -04002143 const auto descriptor_set = static_cast<unsigned>(
2144 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2145 const auto binding = static_cast<unsigned>(
2146 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2147
2148 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2149 SamplerLiteralToBindingMap[sampler_value] = binding;
2150 used_bindings.insert(binding);
2151 }
2152 }
2153
alan-baker09cb9802019-12-10 13:16:27 -05002154 DenseSet<size_t> seen;
2155 for (auto user : var_fn->users()) {
2156 if (!isa<CallInst>(user))
2157 continue;
2158
2159 auto call = cast<CallInst>(user);
2160 const unsigned third_param = static_cast<unsigned>(
2161 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2162
2163 // Already allocated a variable for this value.
2164 if (!seen.insert(third_param).second)
2165 continue;
2166
2167 auto sampler_value = third_param;
2168 if (clspv::Option::UseSamplerMap()) {
2169 sampler_value = sampler_map[third_param].first;
2170 }
2171
David Neto22f144c2017-06-12 14:26:21 -04002172 // Generate OpVariable.
2173 //
2174 // GIDOps[0] : Result Type ID
2175 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05002176 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002177
SJW01901d92020-05-21 08:58:31 -05002178 Ops << SamplerTy << spv::StorageClassUniformConstant;
David Neto22f144c2017-06-12 14:26:21 -04002179
SJWf93f5f32020-05-05 07:27:56 -05002180 auto sampler_var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002181
alan-baker09cb9802019-12-10 13:16:27 -05002182 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002183
David Neto862b7d82018-06-14 18:48:37 -04002184 unsigned descriptor_set;
2185 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002186 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002187 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002188 // This sampler is not actually used. Find the next one.
2189 for (binding = 0; used_bindings.count(binding); binding++)
2190 ;
2191 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2192 used_bindings.insert(binding);
2193 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002194 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2195 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002196
alan-baker09cb9802019-12-10 13:16:27 -05002197 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002198 descriptorMapEntries->emplace_back(std::move(sampler_data),
2199 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002200 }
2201
SJW69939d52020-04-16 07:29:07 -05002202 // Ops[0] = Target ID
2203 // Ops[1] = Decoration (DescriptorSet)
2204 // Ops[2] = LiteralNumber according to Decoration
2205 Ops.clear();
2206
SJW01901d92020-05-21 08:58:31 -05002207 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002208
SJWf93f5f32020-05-05 07:27:56 -05002209 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002210
2211 // Ops[0] = Target ID
2212 // Ops[1] = Decoration (Binding)
2213 // Ops[2] = LiteralNumber according to Decoration
2214 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002215 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002216
SJWf93f5f32020-05-05 07:27:56 -05002217 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002218 }
David Neto862b7d82018-06-14 18:48:37 -04002219}
David Neto22f144c2017-06-12 14:26:21 -04002220
SJW77b87ad2020-04-21 14:37:52 -05002221void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002222 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002223
David Neto862b7d82018-06-14 18:48:37 -04002224 // Generate variables. Make one for each of resource var info object.
2225 for (auto *info : ModuleOrderedResourceVars) {
2226 Type *type = info->var_fn->getReturnType();
2227 // Remap the address space for opaque types.
2228 switch (info->arg_kind) {
2229 case clspv::ArgKind::Sampler:
2230 case clspv::ArgKind::ReadOnlyImage:
2231 case clspv::ArgKind::WriteOnlyImage:
2232 type = PointerType::get(type->getPointerElementType(),
2233 clspv::AddressSpace::UniformConstant);
2234 break;
2235 default:
2236 break;
2237 }
David Neto22f144c2017-06-12 14:26:21 -04002238
David Neto862b7d82018-06-14 18:48:37 -04002239 const auto sc = GetStorageClassForArgKind(info->arg_kind);
SJWf93f5f32020-05-05 07:27:56 -05002240 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002241 Ops << type << sc;
David Neto22f144c2017-06-12 14:26:21 -04002242
SJWf93f5f32020-05-05 07:27:56 -05002243 info->var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002244
2245 // Map calls to the variable-builtin-function.
2246 for (auto &U : info->var_fn->uses()) {
2247 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2248 const auto set = unsigned(
2249 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2250 const auto binding = unsigned(
2251 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2252 if (set == info->descriptor_set && binding == info->binding) {
2253 switch (info->arg_kind) {
2254 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002255 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002256 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002257 case clspv::ArgKind::PodUBO:
2258 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002259 // The call maps to the variable directly.
2260 VMap[call] = info->var_id;
2261 break;
2262 case clspv::ArgKind::Sampler:
2263 case clspv::ArgKind::ReadOnlyImage:
2264 case clspv::ArgKind::WriteOnlyImage:
2265 // The call maps to a load we generate later.
2266 ResourceVarDeferredLoadCalls[call] = info->var_id;
2267 break;
2268 default:
2269 llvm_unreachable("Unhandled arg kind");
2270 }
2271 }
David Neto22f144c2017-06-12 14:26:21 -04002272 }
David Neto862b7d82018-06-14 18:48:37 -04002273 }
2274 }
David Neto22f144c2017-06-12 14:26:21 -04002275
David Neto862b7d82018-06-14 18:48:37 -04002276 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002277 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002278 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002279 // Push constants don't need descriptor set or binding decorations.
2280 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2281 continue;
2282
David Neto862b7d82018-06-14 18:48:37 -04002283 // Decorate with DescriptorSet and Binding.
2284 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002285 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002286 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002287
2288 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002289 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002290 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002291
alan-bakere9308012019-03-15 10:25:13 -04002292 if (info->coherent) {
2293 // Decorate with Coherent if required for the variable.
2294 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002295 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002296 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002297 }
2298
David Neto862b7d82018-06-14 18:48:37 -04002299 // Generate NonWritable and NonReadable
2300 switch (info->arg_kind) {
2301 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002302 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002303 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2304 clspv::AddressSpace::Constant) {
2305 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002306 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002307 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002308 }
David Neto862b7d82018-06-14 18:48:37 -04002309 break;
David Neto862b7d82018-06-14 18:48:37 -04002310 case clspv::ArgKind::WriteOnlyImage:
2311 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002312 Ops << info->var_id << spv::DecorationNonReadable;
SJWf93f5f32020-05-05 07:27:56 -05002313 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002314 break;
2315 default:
2316 break;
David Neto22f144c2017-06-12 14:26:21 -04002317 }
2318 }
2319}
2320
SJW77b87ad2020-04-21 14:37:52 -05002321void SPIRVProducerPass::GeneratePushConstantDescriptorMapEntries() {
Kévin Petitbbbda972020-03-03 19:16:31 +00002322
SJW77b87ad2020-04-21 14:37:52 -05002323 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
2324 auto const &DL = module->getDataLayout();
Kévin Petitbbbda972020-03-03 19:16:31 +00002325 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
2326 auto STy = cast<StructType>(GV->getValueType());
2327
2328 for (unsigned i = 0; i < STy->getNumElements(); i++) {
2329 auto pc = static_cast<clspv::PushConstant>(
2330 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
2331 auto memberType = STy->getElementType(i);
2332 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
2333 unsigned previousOffset = 0;
2334 if (i > 0) {
2335 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
2336 }
2337 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
SJW77b87ad2020-04-21 14:37:52 -05002338 assert(isValidExplicitLayout(*module, STy, i,
2339 spv::StorageClassPushConstant, offset,
2340 previousOffset));
Kévin Petitbbbda972020-03-03 19:16:31 +00002341 version0::DescriptorMapEntry::PushConstantData data = {pc, offset, size};
2342 descriptorMapEntries->emplace_back(std::move(data));
2343 }
2344 }
2345}
2346
SJW77b87ad2020-04-21 14:37:52 -05002347void SPIRVProducerPass::GenerateSpecConstantDescriptorMapEntries() {
2348 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04002349 auto kind = pair.first;
2350 auto id = pair.second;
2351
2352 // Local memory size is only used for kernel arguments.
2353 if (kind == SpecConstant::kLocalMemorySize)
2354 continue;
2355
2356 version0::DescriptorMapEntry::SpecConstantData data = {kind, id};
2357 descriptorMapEntries->emplace_back(std::move(data));
2358 }
2359}
2360
David Neto22f144c2017-06-12 14:26:21 -04002361void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002362 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002363 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002364 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002365
2366 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2367 Type *Ty = GV.getType();
2368 PointerType *PTy = cast<PointerType>(Ty);
2369
SJW01901d92020-05-21 08:58:31 -05002370 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002371
2372 // Workgroup size is handled differently (it goes into a constant)
2373 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2374 std::vector<bool> HasMDVec;
2375 uint32_t PrevXDimCst = 0xFFFFFFFF;
2376 uint32_t PrevYDimCst = 0xFFFFFFFF;
2377 uint32_t PrevZDimCst = 0xFFFFFFFF;
2378 for (Function &Func : *GV.getParent()) {
2379 if (Func.isDeclaration()) {
2380 continue;
2381 }
2382
2383 // We only need to check kernels.
2384 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2385 continue;
2386 }
2387
2388 if (const MDNode *MD =
2389 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2390 uint32_t CurXDimCst = static_cast<uint32_t>(
2391 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2392 uint32_t CurYDimCst = static_cast<uint32_t>(
2393 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2394 uint32_t CurZDimCst = static_cast<uint32_t>(
2395 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2396
2397 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2398 PrevZDimCst == 0xFFFFFFFF) {
2399 PrevXDimCst = CurXDimCst;
2400 PrevYDimCst = CurYDimCst;
2401 PrevZDimCst = CurZDimCst;
2402 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2403 CurZDimCst != PrevZDimCst) {
2404 llvm_unreachable(
2405 "reqd_work_group_size must be the same across all kernels");
2406 } else {
2407 continue;
2408 }
2409
2410 //
2411 // Generate OpConstantComposite.
2412 //
2413 // Ops[0] : Result Type ID
2414 // Ops[1] : Constant size for x dimension.
2415 // Ops[2] : Constant size for y dimension.
2416 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002417 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002418
SJW01901d92020-05-21 08:58:31 -05002419 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002420 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002421 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002422 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002423 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002424 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002425
SJW01901d92020-05-21 08:58:31 -05002426 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2427 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002428
SJWf93f5f32020-05-05 07:27:56 -05002429 InitializerID =
2430 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002431
2432 HasMDVec.push_back(true);
2433 } else {
2434 HasMDVec.push_back(false);
2435 }
2436 }
2437
2438 // Check all kernels have same definitions for work_group_size.
2439 bool HasMD = false;
2440 if (!HasMDVec.empty()) {
2441 HasMD = HasMDVec[0];
2442 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2443 if (HasMD != HasMDVec[i]) {
2444 llvm_unreachable(
2445 "Kernels should have consistent work group size definition");
2446 }
2447 }
2448 }
2449
2450 // If all kernels do not have metadata for reqd_work_group_size, generate
2451 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002452 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002453 //
2454 // Generate OpSpecConstants for x/y/z dimension.
2455 //
2456 // Ops[0] : Result Type ID
2457 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002458
alan-bakera1be3322020-04-20 12:48:18 -04002459 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002460 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002461
SJWf93f5f32020-05-05 07:27:56 -05002462 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002463 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002464 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002465
David Neto257c3892018-04-11 13:19:45 -04002466 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002467 Ops << result_type_id << 1;
2468 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002469
2470 // Y Dimension
2471 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002472 Ops << result_type_id << 1;
2473 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002474
2475 // Z Dimension
2476 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002477 Ops << result_type_id << 1;
2478 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002479
David Neto257c3892018-04-11 13:19:45 -04002480 BuiltinDimVec.push_back(XDimCstID);
2481 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002482 BuiltinDimVec.push_back(ZDimCstID);
2483
David Neto22f144c2017-06-12 14:26:21 -04002484 //
2485 // Generate OpSpecConstantComposite.
2486 //
2487 // Ops[0] : Result Type ID
2488 // Ops[1] : Constant size for x dimension.
2489 // Ops[2] : Constant size for y dimension.
2490 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002491 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002492 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002493
SJWf93f5f32020-05-05 07:27:56 -05002494 InitializerID =
2495 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002496 }
alan-bakerbed3a882020-04-21 14:42:41 -04002497 } else if (BuiltinType == spv::BuiltInWorkDim) {
2498 // 1. Generate a specialization constant with a default of 3.
2499 // 2. Allocate and annotate a SpecId for the constant.
2500 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002501 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002502
2503 //
2504 // Generate OpSpecConstant.
2505 //
2506 // Ops[0] : Result Type ID
2507 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002508
SJW01901d92020-05-21 08:58:31 -05002509 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002510
SJWf93f5f32020-05-05 07:27:56 -05002511 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002512
2513 //
2514 // Generate SpecId decoration.
2515 //
2516 // Ops[0] : target
2517 // Ops[1] : decoration
2518 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002519 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002520 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002521 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002522
SJWf93f5f32020-05-05 07:27:56 -05002523 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002524 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2525 // 1. Generate a spec constant with a default of {0, 0, 0}.
2526 // 2. Allocate and annotate SpecIds for the constants.
2527 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002528 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002529
2530 //
2531 // Generate OpSpecConstant for each dimension.
2532 //
2533 // Ops[0] : Result Type ID
2534 // Ops[1] : Default literal value
2535 //
SJW01901d92020-05-21 08:58:31 -05002536 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2537 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002538
alan-bakere1996972020-05-04 08:38:12 -04002539 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002540 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2541 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002542
alan-bakere1996972020-05-04 08:38:12 -04002543 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002544 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2545 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002546
2547 //
2548 // Generate SpecId decoration for each dimension.
2549 //
2550 // Ops[0] : target
2551 // Ops[1] : decoration
2552 // Ops[2] : SpecId
2553 //
2554 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2555 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002556 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002557 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002558
2559 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2560 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002561 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002562 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002563
2564 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2565 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002566 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002567 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002568
2569 //
2570 // Generate OpSpecConstantComposite.
2571 //
2572 // Ops[0] : type id
2573 // Ops[1..n-1] : elements
2574 //
alan-bakere1996972020-05-04 08:38:12 -04002575 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002576 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002577 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002578 }
2579
David Neto22f144c2017-06-12 14:26:21 -04002580 //
2581 // Generate OpVariable.
2582 //
2583 // GIDOps[0] : Result Type ID
2584 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05002585 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002586
David Neto85082642018-03-24 06:55:20 -07002587 const auto AS = PTy->getAddressSpace();
SJW01901d92020-05-21 08:58:31 -05002588 Ops << Ty << GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002589
David Neto85082642018-03-24 06:55:20 -07002590 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002591 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002592 clspv::Option::ModuleConstantsInStorageBuffer();
2593
Kévin Petit23d5f182019-08-13 16:21:29 +01002594 if (GV.hasInitializer()) {
2595 auto GVInit = GV.getInitializer();
2596 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002597 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002598 }
2599 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002600
SJW01901d92020-05-21 08:58:31 -05002601 if (InitializerID.isValid()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00002602 // Emit the ID of the initializer as part of the variable definition.
SJW01901d92020-05-21 08:58:31 -05002603 Ops << InitializerID;
Kévin Petit23d5f182019-08-13 16:21:29 +01002604 }
SJW01901d92020-05-21 08:58:31 -05002605 SPIRVID var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto85082642018-03-24 06:55:20 -07002606
SJWf93f5f32020-05-05 07:27:56 -05002607 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002608
alan-bakere1996972020-05-04 08:38:12 -04002609 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2610 return builtin == spv::BuiltInWorkDim ||
2611 builtin == spv::BuiltInGlobalOffset;
2612 };
2613
alan-bakere1996972020-05-04 08:38:12 -04002614 // If we have a builtin (not an OpenCL builtin).
2615 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002616 //
2617 // Generate OpDecorate.
2618 //
2619 // DOps[0] = Target ID
2620 // DOps[1] = Decoration (Builtin)
2621 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002622 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002623
2624 // WorkgroupSize is different, we decorate the constant composite that has
2625 // its value, rather than the variable that we use to access the value.
2626 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2627 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002628 // Save both the value and variable IDs for later.
2629 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002630 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002631 } else {
SJWf93f5f32020-05-05 07:27:56 -05002632 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002633 }
2634
SJW01901d92020-05-21 08:58:31 -05002635 Ops.clear();
2636 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002637
SJW01901d92020-05-21 08:58:31 -05002638 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002639 } else if (module_scope_constant_external_init) {
2640 // This module scope constant is initialized from a storage buffer with data
2641 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002642 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002643
David Neto862b7d82018-06-14 18:48:37 -04002644 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002645 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2646 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002647 std::string hexbytes;
2648 llvm::raw_string_ostream str(hexbytes);
2649 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002650 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2651 str.str()};
2652 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2653 0);
David Neto85082642018-03-24 06:55:20 -07002654
David Neto85082642018-03-24 06:55:20 -07002655 // OpDecorate %var DescriptorSet <descriptor_set>
SJW01901d92020-05-21 08:58:31 -05002656 Ops.clear();
2657 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2658 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002659
2660 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002661 Ops.clear();
2662 Ops << var_id << spv::DecorationBinding << 0;
2663 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002664 }
2665}
2666
SJW77b87ad2020-04-21 14:37:52 -05002667void SPIRVProducerPass::GenerateDescriptorMapInfo(Function &F) {
2668 const auto &DL = module->getDataLayout();
David Netoc5fb5242018-07-30 13:28:31 -04002669 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2670 return;
2671 }
Kévin Petit717f8572020-04-06 17:31:53 +01002672 // Add entries for each kernel
2673 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
2674 F.getName().str()};
2675 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
2676
David Neto862b7d82018-06-14 18:48:37 -04002677 // Gather the list of resources that are used by this function's arguments.
2678 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2679
David Neto862b7d82018-06-14 18:48:37 -04002680 auto *fty = F.getType()->getPointerElementType();
2681 auto *func_ty = dyn_cast<FunctionType>(fty);
2682
alan-baker038e9242019-04-19 22:14:41 -04002683 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04002684 // If an argument maps to a resource variable, then get descriptor set and
2685 // binding from the resoure variable. Other info comes from the metadata.
alan-bakerff6c9292020-05-04 08:32:09 -04002686 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
2687 auto local_spec_id_md =
2688 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
David Neto862b7d82018-06-14 18:48:37 -04002689 if (arg_map) {
2690 for (const auto &arg : arg_map->operands()) {
2691 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
alan-bakerff6c9292020-05-04 08:32:09 -04002692 assert(arg_node->getNumOperands() == 6);
David Neto862b7d82018-06-14 18:48:37 -04002693 const auto name =
2694 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
2695 const auto old_index =
2696 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
2697 // Remapped argument index
alan-bakerb6b09dc2018-11-08 16:59:28 -05002698 const size_t new_index = static_cast<size_t>(
2699 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002700 const auto offset =
2701 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00002702 const auto arg_size =
2703 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
alan-bakerc4579bb2020-04-29 14:15:50 -04002704 const auto argKind = clspv::GetArgKindFromName(
2705 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
alan-bakerf5e5f692018-11-27 08:33:24 -05002706
alan-bakerff6c9292020-05-04 08:32:09 -04002707 // If this is a local memory argument, find the right spec id for this
2708 // argument.
2709 int64_t spec_id = -1;
2710 if (argKind == clspv::ArgKind::Local) {
2711 for (auto spec_id_arg : local_spec_id_md->operands()) {
2712 if ((&F == dyn_cast<Function>(
2713 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
2714 ->getValue())) &&
2715 (new_index ==
2716 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
2717 ->getZExtValue())) {
2718 spec_id = mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
2719 ->getSExtValue();
2720 break;
2721 }
2722 }
2723 }
alan-bakerf5e5f692018-11-27 08:33:24 -05002724 uint32_t descriptor_set = 0;
2725 uint32_t binding = 0;
2726 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05002727 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
2728 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05002729 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002730 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04002731 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002732 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
2733 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
2734 DL));
David Neto862b7d82018-06-14 18:48:37 -04002735 } else {
2736 auto *info = resource_var_at_index[new_index];
2737 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05002738 descriptor_set = info->descriptor_set;
2739 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04002740 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002741 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
2742 binding);
David Neto862b7d82018-06-14 18:48:37 -04002743 }
2744 } else {
2745 // There is no argument map.
2746 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00002747 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04002748
2749 SmallVector<Argument *, 4> arguments;
2750 for (auto &arg : F.args()) {
2751 arguments.push_back(&arg);
2752 }
2753
2754 unsigned arg_index = 0;
2755 for (auto *info : resource_var_at_index) {
2756 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00002757 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05002758 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04002759 if (info->arg_kind == clspv::ArgKind::Pod ||
2760 info->arg_kind == clspv::ArgKind::PodUBO ||
2761 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002762 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00002763 }
2764
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002765 // Local pointer arguments are unused in this case. Offset is always
2766 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05002767 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05002768 F.getName().str(),
2769 arg->getName().str(),
2770 arg_index,
alan-bakerc4579bb2020-04-29 14:15:50 -04002771 info->arg_kind,
alan-baker21574d32020-01-29 16:00:31 -05002772 0,
2773 0,
2774 0,
2775 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05002776 descriptorMapEntries->emplace_back(std::move(kernel_data),
2777 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04002778 }
2779 arg_index++;
2780 }
2781 // Generate mappings for pointer-to-local arguments.
2782 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
2783 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04002784 auto where = LocalArgSpecIds.find(arg);
2785 if (where != LocalArgSpecIds.end()) {
2786 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05002787 // Pod arguments members are unused in this case.
2788 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05002789 F.getName().str(),
2790 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05002791 arg_index,
2792 ArgKind::Local,
2793 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002794 static_cast<uint32_t>(
2795 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05002796 0,
2797 0};
2798 // Pointer-to-local arguments do not utilize descriptor set and binding.
2799 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04002800 }
2801 }
2802 }
2803}
2804
David Neto22f144c2017-06-12 14:26:21 -04002805void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002806 ValueMapType &VMap = getValueMap();
2807 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002808 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2809 auto &GlobalConstArgSet = getGlobalConstArgSet();
2810
2811 FunctionType *FTy = F.getFunctionType();
2812
2813 //
David Neto22f144c2017-06-12 14:26:21 -04002814 // Generate OPFunction.
2815 //
2816
2817 // FOps[0] : Result Type ID
2818 // FOps[1] : Function Control
2819 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002820 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002821
2822 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002823 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002824
2825 // Check function attributes for SPIRV Function Control.
2826 uint32_t FuncControl = spv::FunctionControlMaskNone;
2827 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2828 FuncControl |= spv::FunctionControlInlineMask;
2829 }
2830 if (F.hasFnAttribute(Attribute::NoInline)) {
2831 FuncControl |= spv::FunctionControlDontInlineMask;
2832 }
2833 // TODO: Check llvm attribute for Function Control Pure.
2834 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2835 FuncControl |= spv::FunctionControlPureMask;
2836 }
2837 // TODO: Check llvm attribute for Function Control Const.
2838 if (F.hasFnAttribute(Attribute::ReadNone)) {
2839 FuncControl |= spv::FunctionControlConstMask;
2840 }
2841
SJW01901d92020-05-21 08:58:31 -05002842 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002843
SJW01901d92020-05-21 08:58:31 -05002844 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002845 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2846 SmallVector<Type *, 4> NewFuncParamTys;
2847 FunctionType *NewFTy =
2848 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002849 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002850 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002851 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002852 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002853 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002854 } else {
SJWf93f5f32020-05-05 07:27:56 -05002855 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002856 }
2857 }
2858
SJW01901d92020-05-21 08:58:31 -05002859 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002860
SJWf93f5f32020-05-05 07:27:56 -05002861 // Generate SPIRV instruction for function.
2862 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2863 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002864
SJWf93f5f32020-05-05 07:27:56 -05002865 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2866 EntryPoints.push_back(std::make_pair(&F, FID));
2867 }
David Neto22f144c2017-06-12 14:26:21 -04002868
David Neto482550a2018-03-24 05:21:07 -07002869 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002870 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002871 }
David Neto22f144c2017-06-12 14:26:21 -04002872
2873 //
2874 // Generate OpFunctionParameter for Normal function.
2875 //
2876
2877 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002878
David Neto22f144c2017-06-12 14:26:21 -04002879 // Iterate Argument for name instead of param type from function type.
2880 unsigned ArgIdx = 0;
2881 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002882 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002883 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002884
2885 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002886 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002887 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2888 if (GlobalConstFuncTyMap.count(FTy)) {
2889 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2890 Type *EleTy = PTy->getPointerElementType();
2891 Type *ArgTy =
2892 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002893 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002894 GlobalConstArgSet.insert(&Arg);
2895 }
2896 }
2897 }
SJW01901d92020-05-21 08:58:31 -05002898 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002899
2900 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002901 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002902 VMap[&Arg] = param_id;
2903
2904 if (CalledWithCoherentResource(Arg)) {
2905 // If the arg is passed a coherent resource ever, then decorate this
2906 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002907 Ops.clear();
2908 Ops << param_id << spv::DecorationCoherent;
2909 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002910 }
David Neto22f144c2017-06-12 14:26:21 -04002911
2912 ArgIdx++;
2913 }
2914 }
2915}
2916
SJW77b87ad2020-04-21 14:37:52 -05002917void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002918 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002919 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
SJW01901d92020-05-21 08:58:31 -05002920 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002921
SJWf93f5f32020-05-05 07:27:56 -05002922 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002923
SJW01901d92020-05-21 08:58:31 -05002924 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002925 //
SJW01901d92020-05-21 08:58:31 -05002926 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002927 //
2928 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002929 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002930 }
2931
2932 // Always add the storage buffer extension
2933 {
David Neto22f144c2017-06-12 14:26:21 -04002934 //
2935 // Generate OpExtension.
2936 //
2937 // Ops[0] = Name (Literal String)
2938 //
SJWf93f5f32020-05-05 07:27:56 -05002939 addSPIRVInst<kExtensions>(spv::OpExtension,
2940 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05002941 }
David Neto22f144c2017-06-12 14:26:21 -04002942
alan-baker5b86ed72019-02-15 08:26:50 -05002943 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2944 //
2945 // Generate OpExtension.
2946 //
2947 // Ops[0] = Name (Literal String)
2948 //
SJWf93f5f32020-05-05 07:27:56 -05002949 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04002950 }
2951
2952 //
2953 // Generate OpMemoryModel
2954 //
2955 // Memory model for Vulkan will always be GLSL450.
2956
2957 // Ops[0] = Addressing Model
2958 // Ops[1] = Memory Model
2959 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002960 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002961
SJWf93f5f32020-05-05 07:27:56 -05002962 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002963
2964 //
2965 // Generate OpEntryPoint
2966 //
2967 for (auto EntryPoint : EntryPoints) {
2968 // Ops[0] = Execution Model
2969 // Ops[1] = EntryPoint ID
2970 // Ops[2] = Name (Literal String)
2971 // ...
2972 //
2973 // TODO: Do we need to consider Interface ID for forward references???
2974 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002975 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002976 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002977
David Neto22f144c2017-06-12 14:26:21 -04002978 for (Value *Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002979 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002980 }
2981
SJWf93f5f32020-05-05 07:27:56 -05002982 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002983 }
2984
2985 for (auto EntryPoint : EntryPoints) {
Kévin Petit21c23c62020-04-29 01:38:28 +01002986 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2987 ->getMetadata("reqd_work_group_size");
2988 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002989
2990 if (!BuiltinDimVec.empty()) {
2991 llvm_unreachable(
2992 "Kernels should have consistent work group size definition");
2993 }
2994
2995 //
2996 // Generate OpExecutionMode
2997 //
2998
2999 // Ops[0] = Entry Point ID
3000 // Ops[1] = Execution Mode
3001 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3002 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003003 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
David Neto22f144c2017-06-12 14:26:21 -04003004
3005 uint32_t XDim = static_cast<uint32_t>(
3006 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3007 uint32_t YDim = static_cast<uint32_t>(
3008 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3009 uint32_t ZDim = static_cast<uint32_t>(
3010 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3011
SJW01901d92020-05-21 08:58:31 -05003012 Ops << XDim << YDim << ZDim;
David Neto22f144c2017-06-12 14:26:21 -04003013
SJWf93f5f32020-05-05 07:27:56 -05003014 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003015 }
3016 }
3017
3018 //
3019 // Generate OpSource.
3020 //
3021 // Ops[0] = SourceLanguage ID
3022 // Ops[1] = Version (LiteralNum)
3023 //
SJW01901d92020-05-21 08:58:31 -05003024 uint32_t LangID = spv::SourceLanguageUnknown;
3025 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00003026 switch (clspv::Option::Language()) {
3027 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05003028 LangID = spv::SourceLanguageOpenCL_C;
3029 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00003030 break;
3031 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05003032 LangID = spv::SourceLanguageOpenCL_C;
3033 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00003034 break;
3035 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05003036 LangID = spv::SourceLanguageOpenCL_C;
3037 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00003038 break;
3039 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05003040 LangID = spv::SourceLanguageOpenCL_C;
3041 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00003042 break;
3043 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05003044 LangID = spv::SourceLanguageOpenCL_CPP;
3045 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00003046 break;
3047 default:
Kévin Petitf0515712020-01-07 18:29:20 +00003048 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003049 }
David Neto22f144c2017-06-12 14:26:21 -04003050
SJW01901d92020-05-21 08:58:31 -05003051 Ops.clear();
3052 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05003053 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003054
3055 if (!BuiltinDimVec.empty()) {
3056 //
3057 // Generate OpDecorates for x/y/z dimension.
3058 //
3059 // Ops[0] = Target ID
3060 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003061 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003062
3063 // X Dimension
3064 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003065 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05003066 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003067
3068 // Y Dimension
3069 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003070 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05003071 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003072
3073 // Z Dimension
3074 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003075 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05003076 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003077 }
3078}
3079
David Netob6e2e062018-04-25 10:32:06 -04003080void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3081 // Work around a driver bug. Initializers on Private variables might not
3082 // work. So the start of the kernel should store the initializer value to the
3083 // variables. Yes, *every* entry point pays this cost if *any* entry point
3084 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3085 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003086 // TODO(dneto): Remove this at some point once fixed drivers are widely
3087 // available.
SJW01901d92020-05-21 08:58:31 -05003088 if (WorkgroupSizeVarID.isValid()) {
3089 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04003090
SJWf93f5f32020-05-05 07:27:56 -05003091 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003092 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04003093
SJWf93f5f32020-05-05 07:27:56 -05003094 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04003095 }
3096}
3097
David Neto22f144c2017-06-12 14:26:21 -04003098void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003099 ValueMapType &VMap = getValueMap();
3100
David Netob6e2e062018-04-25 10:32:06 -04003101 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003102
3103 for (BasicBlock &BB : F) {
3104 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04003105
3106 //
3107 // Generate OpLabel for Basic Block.
3108 //
SJWf93f5f32020-05-05 07:27:56 -05003109 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04003110
David Neto6dcd4712017-06-23 11:06:47 -04003111 // OpVariable instructions must come first.
3112 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003113 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3114 // Allocating a pointer requires variable pointers.
3115 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003116 setVariablePointersCapabilities(
3117 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003118 }
David Neto6dcd4712017-06-23 11:06:47 -04003119 GenerateInstruction(I);
3120 }
3121 }
3122
David Neto22f144c2017-06-12 14:26:21 -04003123 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003124 if (clspv::Option::HackInitializers()) {
3125 GenerateEntryPointInitialStores();
3126 }
David Neto22f144c2017-06-12 14:26:21 -04003127 }
3128
3129 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003130 if (!isa<AllocaInst>(I)) {
3131 GenerateInstruction(I);
3132 }
David Neto22f144c2017-06-12 14:26:21 -04003133 }
3134 }
3135}
3136
3137spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3138 const std::map<CmpInst::Predicate, spv::Op> Map = {
3139 {CmpInst::ICMP_EQ, spv::OpIEqual},
3140 {CmpInst::ICMP_NE, spv::OpINotEqual},
3141 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3142 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3143 {CmpInst::ICMP_ULT, spv::OpULessThan},
3144 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3145 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3146 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3147 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3148 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3149 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3150 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3151 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3152 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3153 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3154 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3155 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3156 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3157 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3158 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3159 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3160 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3161
3162 assert(0 != Map.count(I->getPredicate()));
3163
3164 return Map.at(I->getPredicate());
3165}
3166
3167spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3168 const std::map<unsigned, spv::Op> Map{
3169 {Instruction::Trunc, spv::OpUConvert},
3170 {Instruction::ZExt, spv::OpUConvert},
3171 {Instruction::SExt, spv::OpSConvert},
3172 {Instruction::FPToUI, spv::OpConvertFToU},
3173 {Instruction::FPToSI, spv::OpConvertFToS},
3174 {Instruction::UIToFP, spv::OpConvertUToF},
3175 {Instruction::SIToFP, spv::OpConvertSToF},
3176 {Instruction::FPTrunc, spv::OpFConvert},
3177 {Instruction::FPExt, spv::OpFConvert},
3178 {Instruction::BitCast, spv::OpBitcast}};
3179
3180 assert(0 != Map.count(I.getOpcode()));
3181
3182 return Map.at(I.getOpcode());
3183}
3184
3185spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003186 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003187 switch (I.getOpcode()) {
3188 default:
3189 break;
3190 case Instruction::Or:
3191 return spv::OpLogicalOr;
3192 case Instruction::And:
3193 return spv::OpLogicalAnd;
3194 case Instruction::Xor:
3195 return spv::OpLogicalNotEqual;
3196 }
3197 }
3198
alan-bakerb6b09dc2018-11-08 16:59:28 -05003199 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003200 {Instruction::Add, spv::OpIAdd},
3201 {Instruction::FAdd, spv::OpFAdd},
3202 {Instruction::Sub, spv::OpISub},
3203 {Instruction::FSub, spv::OpFSub},
3204 {Instruction::Mul, spv::OpIMul},
3205 {Instruction::FMul, spv::OpFMul},
3206 {Instruction::UDiv, spv::OpUDiv},
3207 {Instruction::SDiv, spv::OpSDiv},
3208 {Instruction::FDiv, spv::OpFDiv},
3209 {Instruction::URem, spv::OpUMod},
3210 {Instruction::SRem, spv::OpSRem},
3211 {Instruction::FRem, spv::OpFRem},
3212 {Instruction::Or, spv::OpBitwiseOr},
3213 {Instruction::Xor, spv::OpBitwiseXor},
3214 {Instruction::And, spv::OpBitwiseAnd},
3215 {Instruction::Shl, spv::OpShiftLeftLogical},
3216 {Instruction::LShr, spv::OpShiftRightLogical},
3217 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3218
3219 assert(0 != Map.count(I.getOpcode()));
3220
3221 return Map.at(I.getOpcode());
3222}
3223
3224void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003225 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003226 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3227
SJWf93f5f32020-05-05 07:27:56 -05003228 SPIRVID RID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003229
3230 switch (I.getOpcode()) {
3231 default: {
3232 if (Instruction::isCast(I.getOpcode())) {
3233 //
3234 // Generate SPIRV instructions for cast operators.
3235 //
3236
David Netod2de94a2017-08-28 17:27:47 -04003237 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003238 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003239 auto toI8 = Ty == Type::getInt8Ty(Context);
3240 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003241 // Handle zext, sext and uitofp with i1 type specially.
3242 if ((I.getOpcode() == Instruction::ZExt ||
3243 I.getOpcode() == Instruction::SExt ||
3244 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003245 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003246 //
3247 // Generate OpSelect.
3248 //
3249
3250 // Ops[0] = Result Type ID
3251 // Ops[1] = Condition ID
3252 // Ops[2] = True Constant ID
3253 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003254 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003255
SJW01901d92020-05-21 08:58:31 -05003256 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003257
David Neto22f144c2017-06-12 14:26:21 -04003258 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003259 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003260 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003261 Ops << ConstantInt::getSigned(I.getType(), -1);
David Neto22f144c2017-06-12 14:26:21 -04003262 } else {
SJW01901d92020-05-21 08:58:31 -05003263 Ops << ConstantFP::get(Context, APFloat(1.0f));
David Neto22f144c2017-06-12 14:26:21 -04003264 }
David Neto22f144c2017-06-12 14:26:21 -04003265
David Neto22f144c2017-06-12 14:26:21 -04003266 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003267 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003268 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003269 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003270 } else {
SJW01901d92020-05-21 08:58:31 -05003271 Ops << ConstantFP::get(Context, APFloat(0.0f));
David Neto22f144c2017-06-12 14:26:21 -04003272 }
David Neto22f144c2017-06-12 14:26:21 -04003273
SJWf93f5f32020-05-05 07:27:56 -05003274 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003275 } else if (!clspv::Option::Int8Support() &&
3276 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003277 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3278 // 8 bits.
3279 // Before:
3280 // %result = trunc i32 %a to i8
3281 // After
3282 // %result = OpBitwiseAnd %uint %a %uint_255
3283
SJWf93f5f32020-05-05 07:27:56 -05003284 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003285
David Netod2de94a2017-08-28 17:27:47 -04003286 Type *UintTy = Type::getInt32Ty(Context);
SJW01901d92020-05-21 08:58:31 -05003287 Ops << OpTy << I.getOperand(0) << ConstantInt::get(UintTy, 255);
David Netod2de94a2017-08-28 17:27:47 -04003288
SJWf93f5f32020-05-05 07:27:56 -05003289 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003290 } else {
3291 // Ops[0] = Result Type ID
3292 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003293 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003294
SJW01901d92020-05-21 08:58:31 -05003295 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003296
SJWf93f5f32020-05-05 07:27:56 -05003297 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003298 }
3299 } else if (isa<BinaryOperator>(I)) {
3300 //
3301 // Generate SPIRV instructions for binary operators.
3302 //
3303
3304 // Handle xor with i1 type specially.
3305 if (I.getOpcode() == Instruction::Xor &&
3306 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003307 ((isa<ConstantInt>(I.getOperand(0)) &&
3308 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3309 (isa<ConstantInt>(I.getOperand(1)) &&
3310 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003311 //
3312 // Generate OpLogicalNot.
3313 //
3314 // Ops[0] = Result Type ID
3315 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003316 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003317
SJW01901d92020-05-21 08:58:31 -05003318 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003319
3320 Value *CondV = I.getOperand(0);
3321 if (isa<Constant>(I.getOperand(0))) {
3322 CondV = I.getOperand(1);
3323 }
SJW01901d92020-05-21 08:58:31 -05003324 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003325
SJWf93f5f32020-05-05 07:27:56 -05003326 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003327 } else {
3328 // Ops[0] = Result Type ID
3329 // Ops[1] = Operand 0
3330 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003331 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003332
SJW01901d92020-05-21 08:58:31 -05003333 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003334
SJWf93f5f32020-05-05 07:27:56 -05003335 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003336 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003337 } else if (I.getOpcode() == Instruction::FNeg) {
3338 // The only unary operator.
3339 //
3340 // Ops[0] = Result Type ID
3341 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05003342 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003343
SJW01901d92020-05-21 08:58:31 -05003344 Ops << I.getType() << I.getOperand(0);
3345 RID = addSPIRVInst(spv::OpFNegate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003346 } else {
3347 I.print(errs());
3348 llvm_unreachable("Unsupported instruction???");
3349 }
3350 break;
3351 }
3352 case Instruction::GetElementPtr: {
3353 auto &GlobalConstArgSet = getGlobalConstArgSet();
3354
3355 //
3356 // Generate OpAccessChain.
3357 //
3358 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3359
3360 //
3361 // Generate OpAccessChain.
3362 //
3363
3364 // Ops[0] = Result Type ID
3365 // Ops[1] = Base ID
3366 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05003367 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003368
alan-bakerb6b09dc2018-11-08 16:59:28 -05003369 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003370 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3371 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3372 // Use pointer type with private address space for global constant.
3373 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003374 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003375 }
David Neto257c3892018-04-11 13:19:45 -04003376
SJW01901d92020-05-21 08:58:31 -05003377 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04003378
David Neto862b7d82018-06-14 18:48:37 -04003379 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05003380 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04003381
David Neto862b7d82018-06-14 18:48:37 -04003382 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003383
3384 //
3385 // Follows below rules for gep.
3386 //
David Neto862b7d82018-06-14 18:48:37 -04003387 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3388 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003389 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3390 // first index.
3391 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3392 // use gep's first index.
3393 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3394 // gep's first index.
3395 //
3396 spv::Op Opcode = spv::OpAccessChain;
3397 unsigned offset = 0;
3398 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003399 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003400 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003401 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003402 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003403 }
David Neto862b7d82018-06-14 18:48:37 -04003404 } else {
David Neto22f144c2017-06-12 14:26:21 -04003405 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003406 }
3407
3408 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003409 // Do we need to generate ArrayStride? Check against the GEP result type
3410 // rather than the pointer type of the base because when indexing into
3411 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3412 // for something else in the SPIR-V.
3413 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003414 auto address_space = ResultType->getAddressSpace();
3415 setVariablePointersCapabilities(address_space);
3416 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003417 case spv::StorageClassStorageBuffer:
3418 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003419 // Save the need to generate an ArrayStride decoration. But defer
3420 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003421 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003422 break;
3423 default:
3424 break;
David Neto1a1a0582017-07-07 12:01:44 -04003425 }
David Neto22f144c2017-06-12 14:26:21 -04003426 }
3427
3428 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05003429 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04003430 }
3431
SJWf93f5f32020-05-05 07:27:56 -05003432 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003433 break;
3434 }
3435 case Instruction::ExtractValue: {
3436 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3437 // Ops[0] = Result Type ID
3438 // Ops[1] = Composite ID
3439 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003440 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003441
SJW01901d92020-05-21 08:58:31 -05003442 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003443
SJW01901d92020-05-21 08:58:31 -05003444 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04003445
3446 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05003447 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04003448 }
3449
SJWf93f5f32020-05-05 07:27:56 -05003450 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003451 break;
3452 }
3453 case Instruction::InsertValue: {
3454 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3455 // Ops[0] = Result Type ID
3456 // Ops[1] = Object ID
3457 // Ops[2] = Composite ID
3458 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003459 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003460
SJW01901d92020-05-21 08:58:31 -05003461 Ops << I.getType() << IVI->getInsertedValueOperand()
3462 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04003463
3464 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05003465 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04003466 }
3467
SJWf93f5f32020-05-05 07:27:56 -05003468 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003469 break;
3470 }
3471 case Instruction::Select: {
3472 //
3473 // Generate OpSelect.
3474 //
3475
3476 // Ops[0] = Result Type ID
3477 // Ops[1] = Condition ID
3478 // Ops[2] = True Constant ID
3479 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003480 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003481
3482 // Find SPIRV instruction for parameter type.
3483 auto Ty = I.getType();
3484 if (Ty->isPointerTy()) {
3485 auto PointeeTy = Ty->getPointerElementType();
3486 if (PointeeTy->isStructTy() &&
3487 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3488 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003489 } else {
3490 // Selecting between pointers requires variable pointers.
3491 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3492 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05003493 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05003494 }
David Neto22f144c2017-06-12 14:26:21 -04003495 }
3496 }
3497
SJW01901d92020-05-21 08:58:31 -05003498 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04003499
SJWf93f5f32020-05-05 07:27:56 -05003500 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003501 break;
3502 }
3503 case Instruction::ExtractElement: {
3504 // Handle <4 x i8> type manually.
3505 Type *CompositeTy = I.getOperand(0)->getType();
3506 if (is4xi8vec(CompositeTy)) {
3507 //
3508 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3509 // <4 x i8>.
3510 //
3511
3512 //
3513 // Generate OpShiftRightLogical
3514 //
3515 // Ops[0] = Result Type ID
3516 // Ops[1] = Operand 0
3517 // Ops[2] = Operand 1
3518 //
SJWf93f5f32020-05-05 07:27:56 -05003519 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003520
SJW01901d92020-05-21 08:58:31 -05003521 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003522
SJW01901d92020-05-21 08:58:31 -05003523 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003524 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3525 // Handle constant index.
3526 uint64_t Idx = CI->getZExtValue();
3527 Value *ShiftAmount =
3528 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05003529 Op1ID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04003530 } else {
3531 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05003532 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04003533
David Neto22f144c2017-06-12 14:26:21 -04003534 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJW01901d92020-05-21 08:58:31 -05003535 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1) << Cst8;
David Neto22f144c2017-06-12 14:26:21 -04003536
SJWf93f5f32020-05-05 07:27:56 -05003537 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003538 }
SJW01901d92020-05-21 08:58:31 -05003539 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04003540
SJW01901d92020-05-21 08:58:31 -05003541 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003542
3543 //
3544 // Generate OpBitwiseAnd
3545 //
3546 // Ops[0] = Result Type ID
3547 // Ops[1] = Operand 0
3548 // Ops[2] = Operand 1
3549 //
3550 Ops.clear();
3551
David Neto22f144c2017-06-12 14:26:21 -04003552 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJW01901d92020-05-21 08:58:31 -05003553 Ops << CompositeTy << ShiftID << CstFF;
David Neto22f144c2017-06-12 14:26:21 -04003554
SJWf93f5f32020-05-05 07:27:56 -05003555 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003556 break;
3557 }
3558
3559 // Ops[0] = Result Type ID
3560 // Ops[1] = Composite ID
3561 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003562 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003563
SJW01901d92020-05-21 08:58:31 -05003564 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003565
3566 spv::Op Opcode = spv::OpCompositeExtract;
3567 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05003568 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04003569 } else {
SJW01901d92020-05-21 08:58:31 -05003570 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003571 Opcode = spv::OpVectorExtractDynamic;
3572 }
3573
SJWf93f5f32020-05-05 07:27:56 -05003574 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003575 break;
3576 }
3577 case Instruction::InsertElement: {
3578 // Handle <4 x i8> type manually.
3579 Type *CompositeTy = I.getOperand(0)->getType();
3580 if (is4xi8vec(CompositeTy)) {
3581 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJW01901d92020-05-21 08:58:31 -05003582 SPIRVID CstFFID = getSPIRVValue(CstFF);
David Neto22f144c2017-06-12 14:26:21 -04003583
SJW01901d92020-05-21 08:58:31 -05003584 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003585 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
3586 // Handle constant index.
3587 uint64_t Idx = CI->getZExtValue();
3588 Value *ShiftAmount =
3589 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05003590 ShiftAmountID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04003591 } else {
3592 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05003593 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04003594
David Neto22f144c2017-06-12 14:26:21 -04003595 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJW01901d92020-05-21 08:58:31 -05003596 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2) << Cst8;
David Neto22f144c2017-06-12 14:26:21 -04003597
SJWf93f5f32020-05-05 07:27:56 -05003598 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003599 }
3600
3601 //
3602 // Generate mask operations.
3603 //
3604
3605 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05003606 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003607
SJW01901d92020-05-21 08:58:31 -05003608 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04003609
SJW01901d92020-05-21 08:58:31 -05003610 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003611
3612 // Inverse mask.
3613 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003614 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04003615
SJW01901d92020-05-21 08:58:31 -05003616 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003617
3618 // Apply mask.
3619 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003620 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04003621
SJW01901d92020-05-21 08:58:31 -05003622 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003623
3624 // Create correct value according to index of insertelement.
3625 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003626 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04003627
SJW01901d92020-05-21 08:58:31 -05003628 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003629
3630 // Insert value to original value.
3631 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003632 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04003633
SJWf93f5f32020-05-05 07:27:56 -05003634 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003635 break;
3636 }
3637
SJWf93f5f32020-05-05 07:27:56 -05003638 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003639
James Priced26efea2018-06-09 23:28:32 +01003640 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05003641 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003642
3643 spv::Op Opcode = spv::OpCompositeInsert;
3644 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04003645 const auto value = CI->getZExtValue();
3646 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01003647 // Ops[1] = Object ID
3648 // Ops[2] = Composite ID
3649 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05003650 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04003651 } else {
James Priced26efea2018-06-09 23:28:32 +01003652 // Ops[1] = Composite ID
3653 // Ops[2] = Object ID
3654 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05003655 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04003656 Opcode = spv::OpVectorInsertDynamic;
3657 }
3658
SJWf93f5f32020-05-05 07:27:56 -05003659 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003660 break;
3661 }
3662 case Instruction::ShuffleVector: {
3663 // Ops[0] = Result Type ID
3664 // Ops[1] = Vector 1 ID
3665 // Ops[2] = Vector 2 ID
3666 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003667 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003668
SJW01901d92020-05-21 08:58:31 -05003669 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003670
alan-bakerc9666712020-04-01 16:31:21 -04003671 auto shuffle = cast<ShuffleVectorInst>(&I);
3672 SmallVector<int, 4> mask;
3673 shuffle->getShuffleMask(mask);
3674 for (auto i : mask) {
3675 if (i == UndefMaskElem) {
3676 if (clspv::Option::HackUndef())
3677 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05003678 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04003679 else
3680 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05003681 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04003682 } else {
SJW01901d92020-05-21 08:58:31 -05003683 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04003684 }
3685 }
3686
SJWf93f5f32020-05-05 07:27:56 -05003687 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003688 break;
3689 }
3690 case Instruction::ICmp:
3691 case Instruction::FCmp: {
3692 CmpInst *CmpI = cast<CmpInst>(&I);
3693
David Netod4ca2e62017-07-06 18:47:35 -04003694 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003695 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04003696 if (isa<PointerType>(ArgTy)) {
3697 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05003698 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04003699 errs()
3700 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
3701 << "in function " << name << "\n";
3702 llvm_unreachable("Pointer equality check is invalid");
3703 break;
3704 }
3705
David Neto257c3892018-04-11 13:19:45 -04003706 // Ops[0] = Result Type ID
3707 // Ops[1] = Operand 1 ID
3708 // Ops[2] = Operand 2 ID
SJWf93f5f32020-05-05 07:27:56 -05003709 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003710
SJW01901d92020-05-21 08:58:31 -05003711 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003712
3713 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
SJWf93f5f32020-05-05 07:27:56 -05003714 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003715 break;
3716 }
3717 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05003718 // Branch instruction is deferred because it needs label's ID.
3719 BasicBlock *BrBB = I.getParent();
3720 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
3721 // Placeholder for Merge operation
3722 RID = addSPIRVPlaceholder(&I);
3723 }
3724 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04003725 break;
3726 }
3727 case Instruction::Switch: {
3728 I.print(errs());
3729 llvm_unreachable("Unsupported instruction???");
3730 break;
3731 }
3732 case Instruction::IndirectBr: {
3733 I.print(errs());
3734 llvm_unreachable("Unsupported instruction???");
3735 break;
3736 }
3737 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05003738 // PHI instruction is deferred because it needs label's ID.
3739 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04003740 break;
3741 }
3742 case Instruction::Alloca: {
3743 //
3744 // Generate OpVariable.
3745 //
3746 // Ops[0] : Result Type ID
3747 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05003748 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003749
SJW01901d92020-05-21 08:58:31 -05003750 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04003751
SJWf93f5f32020-05-05 07:27:56 -05003752 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003753 break;
3754 }
3755 case Instruction::Load: {
3756 LoadInst *LD = cast<LoadInst>(&I);
3757 //
3758 // Generate OpLoad.
3759 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003760
alan-baker5b86ed72019-02-15 08:26:50 -05003761 if (LD->getType()->isPointerTy()) {
3762 // Loading a pointer requires variable pointers.
3763 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
3764 }
David Neto22f144c2017-06-12 14:26:21 -04003765
SJW01901d92020-05-21 08:58:31 -05003766 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04003767 // This is a hack to work around what looks like a driver bug.
3768 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04003769 // builtin value, use an OpBitWiseAnd of the value's ID rather than
3770 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07003771 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04003772 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04003773 // Generate a bitwise-and of the original value with itself.
3774 // We should have been able to get away with just an OpCopyObject,
3775 // but we need something more complex to get past certain driver bugs.
3776 // This is ridiculous, but necessary.
3777 // TODO(dneto): Revisit this once drivers fix their bugs.
3778
SJWf93f5f32020-05-05 07:27:56 -05003779 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003780 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04003781
SJWf93f5f32020-05-05 07:27:56 -05003782 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04003783 break;
3784 }
3785
3786 // This is the normal path. Generate a load.
3787
David Neto22f144c2017-06-12 14:26:21 -04003788 // Ops[0] = Result Type ID
3789 // Ops[1] = Pointer ID
3790 // Ops[2] ... Ops[n] = Optional Memory Access
3791 //
3792 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04003793
SJWf93f5f32020-05-05 07:27:56 -05003794 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003795 Ops << LD->getType() << LD->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04003796
SJWf93f5f32020-05-05 07:27:56 -05003797 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003798 break;
3799 }
3800 case Instruction::Store: {
3801 StoreInst *ST = cast<StoreInst>(&I);
3802 //
3803 // Generate OpStore.
3804 //
3805
alan-baker5b86ed72019-02-15 08:26:50 -05003806 if (ST->getValueOperand()->getType()->isPointerTy()) {
3807 // Storing a pointer requires variable pointers.
3808 setVariablePointersCapabilities(
3809 ST->getValueOperand()->getType()->getPointerAddressSpace());
3810 }
3811
David Neto22f144c2017-06-12 14:26:21 -04003812 // Ops[0] = Pointer ID
3813 // Ops[1] = Object ID
3814 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
3815 //
3816 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05003817 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003818 Ops << ST->getPointerOperand() << ST->getValueOperand();
David Neto22f144c2017-06-12 14:26:21 -04003819
SJWf93f5f32020-05-05 07:27:56 -05003820 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003821 break;
3822 }
3823 case Instruction::AtomicCmpXchg: {
3824 I.print(errs());
3825 llvm_unreachable("Unsupported instruction???");
3826 break;
3827 }
3828 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01003829 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
3830
3831 spv::Op opcode;
3832
3833 switch (AtomicRMW->getOperation()) {
3834 default:
3835 I.print(errs());
3836 llvm_unreachable("Unsupported instruction???");
3837 case llvm::AtomicRMWInst::Add:
3838 opcode = spv::OpAtomicIAdd;
3839 break;
3840 case llvm::AtomicRMWInst::Sub:
3841 opcode = spv::OpAtomicISub;
3842 break;
3843 case llvm::AtomicRMWInst::Xchg:
3844 opcode = spv::OpAtomicExchange;
3845 break;
3846 case llvm::AtomicRMWInst::Min:
3847 opcode = spv::OpAtomicSMin;
3848 break;
3849 case llvm::AtomicRMWInst::Max:
3850 opcode = spv::OpAtomicSMax;
3851 break;
3852 case llvm::AtomicRMWInst::UMin:
3853 opcode = spv::OpAtomicUMin;
3854 break;
3855 case llvm::AtomicRMWInst::UMax:
3856 opcode = spv::OpAtomicUMax;
3857 break;
3858 case llvm::AtomicRMWInst::And:
3859 opcode = spv::OpAtomicAnd;
3860 break;
3861 case llvm::AtomicRMWInst::Or:
3862 opcode = spv::OpAtomicOr;
3863 break;
3864 case llvm::AtomicRMWInst::Xor:
3865 opcode = spv::OpAtomicXor;
3866 break;
3867 }
3868
3869 //
3870 // Generate OpAtomic*.
3871 //
SJWf93f5f32020-05-05 07:27:56 -05003872 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01003873
SJW01901d92020-05-21 08:58:31 -05003874 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01003875
3876 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01003877 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05003878 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01003879
3880 const auto ConstantMemorySemantics = ConstantInt::get(
3881 IntTy, spv::MemorySemanticsUniformMemoryMask |
3882 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05003883 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01003884
SJWf93f5f32020-05-05 07:27:56 -05003885 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003886 break;
3887 }
3888 case Instruction::Fence: {
3889 I.print(errs());
3890 llvm_unreachable("Unsupported instruction???");
3891 break;
3892 }
3893 case Instruction::Call: {
3894 CallInst *Call = dyn_cast<CallInst>(&I);
3895 Function *Callee = Call->getCalledFunction();
3896
Alan Baker202c8c72018-08-13 13:47:44 -04003897 if (Callee->getName().startswith(clspv::ResourceAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04003898 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3899 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05003900 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003901
SJW01901d92020-05-21 08:58:31 -05003902 Ops << Call->getType()->getPointerElementType()
3903 << ResourceVarDeferredLoadCalls[Call];
David Neto862b7d82018-06-14 18:48:37 -04003904
SJWf93f5f32020-05-05 07:27:56 -05003905 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto862b7d82018-06-14 18:48:37 -04003906 break;
3907
3908 } else {
3909 // This maps to an OpVariable we've already generated.
3910 // No code is generated for the call.
3911 }
3912 break;
alan-bakerb6b09dc2018-11-08 16:59:28 -05003913 } else if (Callee->getName().startswith(
3914 clspv::WorkgroupAccessorFunction())) {
Alan Baker202c8c72018-08-13 13:47:44 -04003915 // Don't codegen an instruction here, but instead map this call directly
3916 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003917 int spec_id = static_cast<int>(
3918 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04003919 const auto &info = LocalSpecIdInfoMap[spec_id];
SJWf93f5f32020-05-05 07:27:56 -05003920 RID = info.variable_id;
Alan Baker202c8c72018-08-13 13:47:44 -04003921 break;
David Neto862b7d82018-06-14 18:48:37 -04003922 }
3923
3924 // Sampler initializers become a load of the corresponding sampler.
3925
Kévin Petitdf71de32019-04-09 14:09:50 +01003926 if (Callee->getName().equals(clspv::LiteralSamplerFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04003927 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05003928 const auto third_param = static_cast<unsigned>(
3929 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3930 auto sampler_value = third_param;
3931 if (clspv::Option::UseSamplerMap()) {
3932 sampler_value = getSamplerMap()[third_param].first;
3933 }
David Neto862b7d82018-06-14 18:48:37 -04003934
3935 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05003936 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003937
SJW01901d92020-05-21 08:58:31 -05003938 Ops << SamplerTy->getPointerElementType()
3939 << SamplerLiteralToIDMap[sampler_value];
David Neto22f144c2017-06-12 14:26:21 -04003940
SJWf93f5f32020-05-05 07:27:56 -05003941 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003942 break;
3943 }
3944
Kévin Petit349c9502019-03-28 17:24:14 +00003945 // Handle SPIR-V intrinsics
Kévin Petit9b340262019-06-19 18:31:11 +01003946 spv::Op opcode = StringSwitch<spv::Op>(Callee->getName())
3947 .Case("spirv.atomic_xor", spv::OpAtomicXor)
3948 .Default(spv::OpNop);
David Neto22f144c2017-06-12 14:26:21 -04003949
Kévin Petit617a76d2019-04-04 13:54:16 +01003950 // If the switch above didn't have an entry maybe the intrinsic
3951 // is using the name mangling logic.
3952 bool usesMangler = false;
3953 if (opcode == spv::OpNop) {
3954 if (Callee->getName().startswith(clspv::SPIRVOpIntrinsicFunction())) {
3955 auto OpCst = cast<ConstantInt>(Call->getOperand(0));
3956 opcode = static_cast<spv::Op>(OpCst->getZExtValue());
3957 usesMangler = true;
3958 }
3959 }
3960
Kévin Petit349c9502019-03-28 17:24:14 +00003961 if (opcode != spv::OpNop) {
3962
SJWf93f5f32020-05-05 07:27:56 -05003963 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003964
Kévin Petit349c9502019-03-28 17:24:14 +00003965 if (!I.getType()->isVoidTy()) {
SJW01901d92020-05-21 08:58:31 -05003966 Ops << I.getType();
Kévin Petit349c9502019-03-28 17:24:14 +00003967 }
David Neto22f144c2017-06-12 14:26:21 -04003968
Kévin Petit617a76d2019-04-04 13:54:16 +01003969 unsigned firstOperand = usesMangler ? 1 : 0;
3970 for (unsigned i = firstOperand; i < Call->getNumArgOperands(); i++) {
SJW01901d92020-05-21 08:58:31 -05003971 Ops << Call->getArgOperand(i);
David Neto22f144c2017-06-12 14:26:21 -04003972 }
3973
SJWf93f5f32020-05-05 07:27:56 -05003974 RID = addSPIRVInst(opcode, Ops);
Kévin Petit8a560882019-03-21 15:24:34 +00003975 break;
3976 }
3977
David Neto22f144c2017-06-12 14:26:21 -04003978 // spirv.copy_memory.* intrinsics become OpMemoryMemory's.
3979 if (Callee->getName().startswith("spirv.copy_memory")) {
3980 //
3981 // Generate OpCopyMemory.
3982 //
3983
3984 // Ops[0] = Dst ID
3985 // Ops[1] = Src ID
3986 // Ops[2] = Memory Access
3987 // Ops[3] = Alignment
3988
3989 auto IsVolatile =
3990 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
3991
3992 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3993 : spv::MemoryAccessMaskNone;
3994
3995 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3996
3997 auto Alignment =
3998 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
3999
SJWf93f5f32020-05-05 07:27:56 -05004000 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004001 Ops << Call->getArgOperand(0) << Call->getArgOperand(1) << MemoryAccess
4002 << static_cast<uint32_t>(Alignment);
David Neto22f144c2017-06-12 14:26:21 -04004003
SJWf93f5f32020-05-05 07:27:56 -05004004 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004005 break;
4006 }
4007
SJW2c317da2020-03-23 07:39:13 -05004008 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4009 // Additionally, OpTypeSampledImage is generated.
SJW173c7e92020-03-16 08:44:47 -05004010 if (IsSampledImageRead(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004011 //
4012 // Generate OpSampledImage.
4013 //
4014 // Ops[0] = Result Type ID
4015 // Ops[1] = Image ID
4016 // Ops[2] = Sampler ID
4017 //
SJWf93f5f32020-05-05 07:27:56 -05004018 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004019
4020 Value *Image = Call->getArgOperand(0);
4021 Value *Sampler = Call->getArgOperand(1);
4022 Value *Coordinate = Call->getArgOperand(2);
4023
4024 TypeMapType &OpImageTypeMap = getImageTypeMap();
4025 Type *ImageTy = Image->getType()->getPointerElementType();
SJW01901d92020-05-21 08:58:31 -05004026 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
David Neto257c3892018-04-11 13:19:45 -04004027
SJW01901d92020-05-21 08:58:31 -05004028 Ops << ImageTyID << Image << Sampler;
David Neto22f144c2017-06-12 14:26:21 -04004029
SJW01901d92020-05-21 08:58:31 -05004030 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004031
4032 //
4033 // Generate OpImageSampleExplicitLod.
4034 //
4035 // Ops[0] = Result Type ID
4036 // Ops[1] = Sampled Image ID
4037 // Ops[2] = Coordinate ID
4038 // Ops[3] = Image Operands Type ID
4039 // Ops[4] ... Ops[n] = Operands ID
4040 //
4041 Ops.clear();
4042
alan-bakerf67468c2019-11-25 15:51:49 -05004043 const bool is_int_image = IsIntImageType(Image->getType());
SJW01901d92020-05-21 08:58:31 -05004044 SPIRVID result_type;
alan-bakerf67468c2019-11-25 15:51:49 -05004045 if (is_int_image) {
4046 result_type = v4int32ID;
4047 } else {
SJWf93f5f32020-05-05 07:27:56 -05004048 result_type = getSPIRVType(Call->getType());
alan-bakerf67468c2019-11-25 15:51:49 -05004049 }
4050
David Neto22f144c2017-06-12 14:26:21 -04004051 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
SJW01901d92020-05-21 08:58:31 -05004052 Ops << result_type << SampledImageID << Coordinate
4053 << spv::ImageOperandsLodMask << CstFP0;
David Neto22f144c2017-06-12 14:26:21 -04004054
SJWf93f5f32020-05-05 07:27:56 -05004055 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004056
4057 if (is_int_image) {
4058 // Generate the bitcast.
4059 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004060 Ops << Call->getType() << RID;
SJWf93f5f32020-05-05 07:27:56 -05004061 RID = addSPIRVInst(spv::OpBitcast, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004062 }
David Neto22f144c2017-06-12 14:26:21 -04004063 break;
4064 }
4065
alan-baker75090e42020-02-20 11:21:04 -05004066 // read_image (without a sampler) is mapped to OpImageFetch.
SJW173c7e92020-03-16 08:44:47 -05004067 if (IsUnsampledImageRead(Callee)) {
alan-baker75090e42020-02-20 11:21:04 -05004068 Value *Image = Call->getArgOperand(0);
4069 Value *Coordinate = Call->getArgOperand(1);
4070
4071 //
4072 // Generate OpImageFetch
4073 //
4074 // Ops[0] = Result Type ID
4075 // Ops[1] = Image ID
4076 // Ops[2] = Coordinate ID
4077 // Ops[3] = Lod
4078 // Ops[4] = 0
4079 //
SJWf93f5f32020-05-05 07:27:56 -05004080 SPIRVOperandVec Ops;
alan-baker75090e42020-02-20 11:21:04 -05004081
4082 const bool is_int_image = IsIntImageType(Image->getType());
SJW01901d92020-05-21 08:58:31 -05004083 SPIRVID result_type;
alan-baker75090e42020-02-20 11:21:04 -05004084 if (is_int_image) {
4085 result_type = v4int32ID;
4086 } else {
SJWf93f5f32020-05-05 07:27:56 -05004087 result_type = getSPIRVType(Call->getType());
alan-baker75090e42020-02-20 11:21:04 -05004088 }
alan-baker75090e42020-02-20 11:21:04 -05004089 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJW01901d92020-05-21 08:58:31 -05004090
4091 Ops << result_type << Image << Coordinate << spv::ImageOperandsLodMask
4092 << CstInt0;
alan-baker75090e42020-02-20 11:21:04 -05004093
SJWf93f5f32020-05-05 07:27:56 -05004094 RID = addSPIRVInst(spv::OpImageFetch, Ops);
alan-baker75090e42020-02-20 11:21:04 -05004095
4096 if (is_int_image) {
4097 // Generate the bitcast.
4098 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004099 Ops << Call->getType() << RID;
SJWf93f5f32020-05-05 07:27:56 -05004100 RID = addSPIRVInst(spv::OpBitcast, Ops);
alan-baker75090e42020-02-20 11:21:04 -05004101 }
4102 break;
4103 }
4104
alan-bakerf67468c2019-11-25 15:51:49 -05004105 // write_image is mapped to OpImageWrite.
SJW173c7e92020-03-16 08:44:47 -05004106 if (IsImageWrite(Callee)) {
David Neto22f144c2017-06-12 14:26:21 -04004107 //
4108 // Generate OpImageWrite.
4109 //
4110 // Ops[0] = Image ID
4111 // Ops[1] = Coordinate ID
4112 // Ops[2] = Texel ID
4113 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4114 // Ops[4] ... Ops[n] = (Optional) Operands ID
4115 //
SJWf93f5f32020-05-05 07:27:56 -05004116 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004117
4118 Value *Image = Call->getArgOperand(0);
4119 Value *Coordinate = Call->getArgOperand(1);
4120 Value *Texel = Call->getArgOperand(2);
4121
SJW01901d92020-05-21 08:58:31 -05004122 SPIRVID TexelID = getSPIRVValue(Texel);
alan-bakerf67468c2019-11-25 15:51:49 -05004123
4124 const bool is_int_image = IsIntImageType(Image->getType());
4125 if (is_int_image) {
4126 // Generate a bitcast to v4int and use it as the texel value.
SJW01901d92020-05-21 08:58:31 -05004127 Ops << v4int32ID << TexelID;
SJWf93f5f32020-05-05 07:27:56 -05004128 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004129 Ops.clear();
alan-bakerf67468c2019-11-25 15:51:49 -05004130 }
SJW01901d92020-05-21 08:58:31 -05004131 Ops << Image << Coordinate << TexelID;
David Neto22f144c2017-06-12 14:26:21 -04004132
SJWf93f5f32020-05-05 07:27:56 -05004133 RID = addSPIRVInst(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004134 break;
4135 }
4136
alan-bakerce179f12019-12-06 19:02:22 -05004137 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW173c7e92020-03-16 08:44:47 -05004138 if (IsImageQuery(Callee)) {
SJW01901d92020-05-21 08:58:31 -05004139
4140 addCapability(spv::CapabilityImageQuery);
4141
David Neto5c22a252018-03-15 16:07:41 -04004142 //
alan-bakerce179f12019-12-06 19:02:22 -05004143 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004144 //
4145 // Ops[0] = Image ID
4146 //
alan-bakerce179f12019-12-06 19:02:22 -05004147 // Result type has components equal to the dimensionality of the image,
4148 // plus 1 if the image is arrayed.
4149 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004150 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
SJWf93f5f32020-05-05 07:27:56 -05004151 SPIRVOperandVec Ops;
David Neto5c22a252018-03-15 16:07:41 -04004152
4153 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004154 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
SJW01901d92020-05-21 08:58:31 -05004155 SPIRVID SizesTypeID;
alan-bakerce179f12019-12-06 19:02:22 -05004156
David Neto5c22a252018-03-15 16:07:41 -04004157 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004158 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05004159 const uint32_t components =
4160 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05004161 if (components == 1) {
SJWf93f5f32020-05-05 07:27:56 -05004162 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
alan-bakerce179f12019-12-06 19:02:22 -05004163 } else {
SJWf93f5f32020-05-05 07:27:56 -05004164 SizesTypeID = getSPIRVType(
4165 VectorType::get(Type::getInt32Ty(Context), components));
alan-bakerce179f12019-12-06 19:02:22 -05004166 }
SJW01901d92020-05-21 08:58:31 -05004167 Ops << SizesTypeID << Image;
alan-bakerce179f12019-12-06 19:02:22 -05004168 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05004169 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05004170 query_opcode = spv::OpImageQuerySizeLod;
4171 // Need explicit 0 for Lod operand.
4172 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJW01901d92020-05-21 08:58:31 -05004173 Ops << CstInt0;
alan-bakerce179f12019-12-06 19:02:22 -05004174 }
David Neto5c22a252018-03-15 16:07:41 -04004175
SJWf93f5f32020-05-05 07:27:56 -05004176 RID = addSPIRVInst(query_opcode, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004177
alan-bakerce179f12019-12-06 19:02:22 -05004178 // May require an extra instruction to create the appropriate result of
4179 // the builtin function.
SJW173c7e92020-03-16 08:44:47 -05004180 if (IsGetImageDim(Callee)) {
alan-bakerce179f12019-12-06 19:02:22 -05004181 if (dim == 3) {
4182 // get_image_dim returns an int4 for 3D images.
4183 //
David Neto5c22a252018-03-15 16:07:41 -04004184
alan-bakerce179f12019-12-06 19:02:22 -05004185 // Implement:
4186 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
alan-bakerce179f12019-12-06 19:02:22 -05004187 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJW01901d92020-05-21 08:58:31 -05004188
4189 Ops.clear();
4190 Ops << VectorType::get(Type::getInt32Ty(Context), 4) << RID
4191 << CstInt0;
David Neto5c22a252018-03-15 16:07:41 -04004192
SJWf93f5f32020-05-05 07:27:56 -05004193 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004194 } else if (dim != components) {
4195 // get_image_dim return an int2 regardless of the arrayedness of the
4196 // image. If the image is arrayed an element must be dropped from the
4197 // query result.
4198 //
alan-bakerce179f12019-12-06 19:02:22 -05004199
4200 // Implement:
4201 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4202 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004203 Ops << VectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
4204 << 0 << 1;
alan-bakerce179f12019-12-06 19:02:22 -05004205
SJWf93f5f32020-05-05 07:27:56 -05004206 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004207 }
4208 } else if (components > 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004209 // Implement:
4210 // %result = OpCompositeExtract %uint %sizes <component number>
4211 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004212 Ops << I.getType() << RID;
alan-bakerce179f12019-12-06 19:02:22 -05004213
4214 uint32_t component = 0;
4215 if (IsGetImageHeight(Callee))
4216 component = 1;
4217 else if (IsGetImageDepth(Callee))
4218 component = 2;
SJW01901d92020-05-21 08:58:31 -05004219 Ops << component;
alan-bakerce179f12019-12-06 19:02:22 -05004220
SJWf93f5f32020-05-05 07:27:56 -05004221 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004222 }
David Neto5c22a252018-03-15 16:07:41 -04004223 break;
4224 }
4225
SJW88ed5fe2020-05-11 12:40:57 -05004226 // Call instruction is deferred because it needs function's ID.
4227 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004228
David Neto3fbb4072017-10-16 11:28:14 -04004229 // Check whether the implementation of this call uses an extended
4230 // instruction plus one more value-producing instruction. If so, then
4231 // reserve the id for the extra value-producing slot.
4232 glsl::ExtInst EInst = getIndirectExtInstEnum(Callee->getName());
4233 if (EInst != kGlslExtInstBad) {
4234 // Reserve a spot for the extra value.
SJW88ed5fe2020-05-11 12:40:57 -05004235 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004236 }
4237 break;
4238 }
4239 case Instruction::Ret: {
4240 unsigned NumOps = I.getNumOperands();
4241 if (NumOps == 0) {
4242 //
4243 // Generate OpReturn.
4244 //
SJWf93f5f32020-05-05 07:27:56 -05004245 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004246 } else {
4247 //
4248 // Generate OpReturnValue.
4249 //
4250
4251 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004252 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004253
SJW01901d92020-05-21 08:58:31 -05004254 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004255
SJWf93f5f32020-05-05 07:27:56 -05004256 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004257 break;
4258 }
4259 break;
4260 }
4261 }
SJWf93f5f32020-05-05 07:27:56 -05004262
4263 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004264 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004265 VMap[&I] = RID;
4266 }
David Neto22f144c2017-06-12 14:26:21 -04004267}
4268
4269void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004270
4271 //
4272 // Generate OpFunctionEnd
4273 //
SJWf93f5f32020-05-05 07:27:56 -05004274 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004275}
4276
4277bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004278 // Don't specialize <4 x i8> if i8 is generally supported.
4279 if (clspv::Option::Int8Support())
4280 return false;
4281
David Neto22f144c2017-06-12 14:26:21 -04004282 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004283 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4284 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
4285 VecTy->getNumElements() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004286 return true;
4287 }
4288 }
4289
4290 return false;
4291}
4292
4293void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004294 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4295
SJW88ed5fe2020-05-11 12:40:57 -05004296 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4297 Value *Inst = DeferredInsts[i].first;
4298 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4299 SPIRVOperandVec Operands;
4300
4301 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4302 ++i;
4303 assert(DeferredInsts.size() > i);
4304 assert(Inst == DeferredInsts[i].first);
4305 Placeholder = DeferredInsts[i].second;
4306 };
David Neto22f144c2017-06-12 14:26:21 -04004307
4308 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004309 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004310 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004311 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004312 //
4313 // Generate OpLoopMerge.
4314 //
4315 // Ops[0] = Merge Block ID
4316 // Ops[1] = Continue Target ID
4317 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004318 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004319
SJW01901d92020-05-21 08:58:31 -05004320 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4321 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004322
SJW88ed5fe2020-05-11 12:40:57 -05004323 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4324
4325 nextDeferred();
4326
alan-baker06cad652019-12-03 17:56:47 -05004327 } else if (MergeBlocks.count(BrBB)) {
4328 //
4329 // Generate OpSelectionMerge.
4330 //
4331 // Ops[0] = Merge Block ID
4332 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004333 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004334
alan-baker06cad652019-12-03 17:56:47 -05004335 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004336 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004337
SJW88ed5fe2020-05-11 12:40:57 -05004338 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4339
4340 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004341 }
4342
4343 if (Br->isConditional()) {
4344 //
4345 // Generate OpBranchConditional.
4346 //
4347 // Ops[0] = Condition ID
4348 // Ops[1] = True Label ID
4349 // Ops[2] = False Label ID
4350 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004351 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004352
SJW01901d92020-05-21 08:58:31 -05004353 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004354
SJW88ed5fe2020-05-11 12:40:57 -05004355 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4356
David Neto22f144c2017-06-12 14:26:21 -04004357 } else {
4358 //
4359 // Generate OpBranch.
4360 //
4361 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004362 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004363
SJW01901d92020-05-21 08:58:31 -05004364 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004365
SJW88ed5fe2020-05-11 12:40:57 -05004366 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004367 }
4368 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004369 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4370 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004371 // OpPhi on pointers requires variable pointers.
4372 setVariablePointersCapabilities(
4373 PHI->getType()->getPointerAddressSpace());
4374 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004375 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004376 }
4377 }
4378
David Neto22f144c2017-06-12 14:26:21 -04004379 //
4380 // Generate OpPhi.
4381 //
4382 // Ops[0] = Result Type ID
4383 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004384 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004385
SJW01901d92020-05-21 08:58:31 -05004386 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004387
SJW88ed5fe2020-05-11 12:40:57 -05004388 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004389 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004390 }
4391
SJW88ed5fe2020-05-11 12:40:57 -05004392 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4393
David Neto22f144c2017-06-12 14:26:21 -04004394 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4395 Function *Callee = Call->getCalledFunction();
SJW2c317da2020-03-23 07:39:13 -05004396 LLVMContext &Context = Callee->getContext();
4397 auto IntTy = Type::getInt32Ty(Context);
4398 auto callee_code = Builtins::Lookup(Callee);
David Neto3fbb4072017-10-16 11:28:14 -04004399 auto callee_name = Callee->getName();
4400 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(callee_name);
David Neto22f144c2017-06-12 14:26:21 -04004401
4402 if (EInst) {
SJW01901d92020-05-21 08:58:31 -05004403 SPIRVID ExtInstImportID = getOpExtInstImportID();
David Neto22f144c2017-06-12 14:26:21 -04004404
4405 //
4406 // Generate OpExtInst.
4407 //
4408
4409 // Ops[0] = Result Type ID
4410 // Ops[1] = Set ID (OpExtInstImport ID)
4411 // Ops[2] = Instruction Number (Literal Number)
4412 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
SJWf93f5f32020-05-05 07:27:56 -05004413 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004414
SJW01901d92020-05-21 08:58:31 -05004415 Ops << Call->getType() << ExtInstImportID << EInst;
David Neto22f144c2017-06-12 14:26:21 -04004416
David Neto22f144c2017-06-12 14:26:21 -04004417 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004418 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
SJW01901d92020-05-21 08:58:31 -05004419 Ops << Call->getOperand(j);
David Neto22f144c2017-06-12 14:26:21 -04004420 }
4421
SJW88ed5fe2020-05-11 12:40:57 -05004422 SPIRVID RID = replaceSPIRVInst(Placeholder, spv::OpExtInst, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004423
David Neto3fbb4072017-10-16 11:28:14 -04004424 const auto IndirectExtInst = getIndirectExtInstEnum(callee_name);
4425 if (IndirectExtInst != kGlslExtInstBad) {
SJW88ed5fe2020-05-11 12:40:57 -05004426
4427 nextDeferred();
4428
David Neto3fbb4072017-10-16 11:28:14 -04004429 // Generate one more instruction that uses the result of the extended
4430 // instruction. Its result id is one more than the id of the
4431 // extended instruction.
SJW88ed5fe2020-05-11 12:40:57 -05004432 auto generate_extra_inst = [this, &Context, &Call, &Placeholder,
SJW01901d92020-05-21 08:58:31 -05004433 &RID](spv::Op opcode,
4434 Constant *constant) {
David Neto3fbb4072017-10-16 11:28:14 -04004435 //
4436 // Generate instruction like:
4437 // result = opcode constant <extinst-result>
4438 //
4439 // Ops[0] = Result Type ID
4440 // Ops[1] = Operand 0 ;; the constant, suitably splatted
4441 // Ops[2] = Operand 1 ;; the result of the extended instruction
SJWf93f5f32020-05-05 07:27:56 -05004442 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004443
David Neto3fbb4072017-10-16 11:28:14 -04004444 Type *resultTy = Call->getType();
David Neto3fbb4072017-10-16 11:28:14 -04004445
4446 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
4447 constant = ConstantVector::getSplat(
alan-baker7261e062020-03-15 14:35:48 -04004448 {static_cast<unsigned>(vectorTy->getNumElements()), false},
4449 constant);
David Neto3fbb4072017-10-16 11:28:14 -04004450 }
SJW01901d92020-05-21 08:58:31 -05004451 Ops << resultTy << constant << RID;
David Neto3fbb4072017-10-16 11:28:14 -04004452
SJW88ed5fe2020-05-11 12:40:57 -05004453 replaceSPIRVInst(Placeholder, opcode, Ops);
David Neto3fbb4072017-10-16 11:28:14 -04004454 };
4455
4456 switch (IndirectExtInst) {
4457 case glsl::ExtInstFindUMsb: // Implementing clz
SJW2c317da2020-03-23 07:39:13 -05004458 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
David Neto3fbb4072017-10-16 11:28:14 -04004459 break;
4460 case glsl::ExtInstAcos: // Implementing acospi
4461 case glsl::ExtInstAsin: // Implementing asinpi
Kévin Petiteb9f90a2018-09-29 12:29:34 +01004462 case glsl::ExtInstAtan: // Implementing atanpi
David Neto3fbb4072017-10-16 11:28:14 -04004463 case glsl::ExtInstAtan2: // Implementing atan2pi
4464 generate_extra_inst(
4465 spv::OpFMul,
4466 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
4467 break;
4468
4469 default:
4470 assert(false && "internally inconsistent");
David Neto4d02a532017-09-17 12:57:44 -04004471 }
David Neto22f144c2017-06-12 14:26:21 -04004472 }
David Neto3fbb4072017-10-16 11:28:14 -04004473
SJW2c317da2020-03-23 07:39:13 -05004474 } else if (callee_code == Builtins::kPopcount) {
David Neto22f144c2017-06-12 14:26:21 -04004475 //
4476 // Generate OpBitCount
4477 //
4478 // Ops[0] = Result Type ID
4479 // Ops[1] = Base ID
SJWf93f5f32020-05-05 07:27:56 -05004480 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004481 Ops << Call->getType() << Call->getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004482
SJW88ed5fe2020-05-11 12:40:57 -05004483 replaceSPIRVInst(Placeholder, spv::OpBitCount, Ops);
David Netoab03f432017-11-03 17:00:44 -04004484
David Neto862b7d82018-06-14 18:48:37 -04004485 } else if (callee_name.startswith(kCompositeConstructFunctionPrefix)) {
David Netoab03f432017-11-03 17:00:44 -04004486
4487 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004488 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004489
4490 // The result type.
SJW01901d92020-05-21 08:58:31 -05004491 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004492
4493 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004494 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004495 }
4496
SJW88ed5fe2020-05-11 12:40:57 -05004497 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004498
Alan Baker202c8c72018-08-13 13:47:44 -04004499 } else if (callee_name.startswith(clspv::ResourceAccessorFunction())) {
4500
4501 // We have already mapped the call's result value to an ID.
4502 // Don't generate any code now.
4503
4504 } else if (callee_name.startswith(clspv::WorkgroupAccessorFunction())) {
David Neto862b7d82018-06-14 18:48:37 -04004505
4506 // We have already mapped the call's result value to an ID.
4507 // Don't generate any code now.
4508
David Neto22f144c2017-06-12 14:26:21 -04004509 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004510 if (Call->getType()->isPointerTy()) {
4511 // Functions returning pointers require variable pointers.
4512 setVariablePointersCapabilities(
4513 Call->getType()->getPointerAddressSpace());
4514 }
4515
David Neto22f144c2017-06-12 14:26:21 -04004516 //
4517 // Generate OpFunctionCall.
4518 //
4519
4520 // Ops[0] = Result Type ID
4521 // Ops[1] = Callee Function ID
4522 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004523 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004524
SJW01901d92020-05-21 08:58:31 -05004525 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004526
SJW01901d92020-05-21 08:58:31 -05004527 SPIRVID CalleeID = getSPIRVValue(Callee);
David Neto43568eb2017-10-13 18:25:25 -04004528 if (CalleeID == 0) {
4529 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004530 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004531 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4532 // causes an infinite loop. Instead, go ahead and generate
4533 // the bad function call. A validator will catch the 0-Id.
4534 // llvm_unreachable("Can't translate function call");
4535 }
David Neto22f144c2017-06-12 14:26:21 -04004536
SJW01901d92020-05-21 08:58:31 -05004537 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004538
David Neto22f144c2017-06-12 14:26:21 -04004539 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004540 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4541 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004542 auto *operand_type = operand->getType();
4543 // Images and samplers can be passed as function parameters without
4544 // variable pointers.
4545 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4546 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004547 auto sc =
4548 GetStorageClass(operand->getType()->getPointerAddressSpace());
4549 if (sc == spv::StorageClassStorageBuffer) {
4550 // Passing SSBO by reference requires variable pointers storage
4551 // buffer.
SJW01901d92020-05-21 08:58:31 -05004552 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004553 } else if (sc == spv::StorageClassWorkgroup) {
4554 // Workgroup references require variable pointers if they are not
4555 // memory object declarations.
4556 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4557 // Workgroup accessor represents a variable reference.
4558 if (!operand_call->getCalledFunction()->getName().startswith(
4559 clspv::WorkgroupAccessorFunction()))
SJW01901d92020-05-21 08:58:31 -05004560 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004561 } else {
4562 // Arguments are function parameters.
4563 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004564 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004565 }
4566 }
4567 }
SJW01901d92020-05-21 08:58:31 -05004568 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004569 }
4570
SJW88ed5fe2020-05-11 12:40:57 -05004571 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004572 }
4573 }
4574 }
4575}
4576
SJW77b87ad2020-04-21 14:37:52 -05004577void SPIRVProducerPass::HandleDeferredDecorations() {
4578 const auto &DL = module->getDataLayout();
Alan Baker202c8c72018-08-13 13:47:44 -04004579 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004580 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004581 }
David Neto1a1a0582017-07-07 12:01:44 -04004582
David Netoc6f3ab22018-04-06 18:02:31 -04004583 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4584 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07004585 for (auto *type : getTypesNeedingArrayStride()) {
4586 Type *elemTy = nullptr;
4587 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4588 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004589 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004590 elemTy = arrayTy->getElementType();
4591 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4592 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004593 } else {
4594 errs() << "Unhandled strided type " << *type << "\n";
4595 llvm_unreachable("Unhandled strided type");
4596 }
David Neto1a1a0582017-07-07 12:01:44 -04004597
4598 // Ops[0] = Target ID
4599 // Ops[1] = Decoration (ArrayStride)
4600 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004601 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004602
David Neto85082642018-03-24 06:55:20 -07004603 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004604 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004605
SJW01901d92020-05-21 08:58:31 -05004606 Ops << type << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004607
SJWf93f5f32020-05-05 07:27:56 -05004608 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004609 }
David Neto1a1a0582017-07-07 12:01:44 -04004610}
4611
David Neto22f144c2017-06-12 14:26:21 -04004612glsl::ExtInst SPIRVProducerPass::getExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05004613
4614 const auto &fi = Builtins::Lookup(Name);
4615 switch (fi) {
4616 case Builtins::kClamp: {
4617 auto param_type = fi.getParameter(0);
4618 if (param_type.type_id == Type::FloatTyID) {
4619 return glsl::ExtInst::ExtInstFClamp;
4620 }
4621 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4622 : glsl::ExtInst::ExtInstUClamp;
4623 }
4624 case Builtins::kMax: {
4625 auto param_type = fi.getParameter(0);
4626 if (param_type.type_id == Type::FloatTyID) {
4627 return glsl::ExtInst::ExtInstFMax;
4628 }
4629 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4630 : glsl::ExtInst::ExtInstUMax;
4631 }
4632 case Builtins::kMin: {
4633 auto param_type = fi.getParameter(0);
4634 if (param_type.type_id == Type::FloatTyID) {
4635 return glsl::ExtInst::ExtInstFMin;
4636 }
4637 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4638 : glsl::ExtInst::ExtInstUMin;
4639 }
4640 case Builtins::kAbs:
4641 return glsl::ExtInst::ExtInstSAbs;
4642 case Builtins::kFmax:
4643 return glsl::ExtInst::ExtInstFMax;
4644 case Builtins::kFmin:
4645 return glsl::ExtInst::ExtInstFMin;
4646 case Builtins::kDegrees:
4647 return glsl::ExtInst::ExtInstDegrees;
4648 case Builtins::kRadians:
4649 return glsl::ExtInst::ExtInstRadians;
4650 case Builtins::kMix:
4651 return glsl::ExtInst::ExtInstFMix;
4652 case Builtins::kAcos:
4653 case Builtins::kAcospi:
4654 return glsl::ExtInst::ExtInstAcos;
4655 case Builtins::kAcosh:
4656 return glsl::ExtInst::ExtInstAcosh;
4657 case Builtins::kAsin:
4658 case Builtins::kAsinpi:
4659 return glsl::ExtInst::ExtInstAsin;
4660 case Builtins::kAsinh:
4661 return glsl::ExtInst::ExtInstAsinh;
4662 case Builtins::kAtan:
4663 case Builtins::kAtanpi:
4664 return glsl::ExtInst::ExtInstAtan;
4665 case Builtins::kAtanh:
4666 return glsl::ExtInst::ExtInstAtanh;
4667 case Builtins::kAtan2:
4668 case Builtins::kAtan2pi:
4669 return glsl::ExtInst::ExtInstAtan2;
4670 case Builtins::kCeil:
4671 return glsl::ExtInst::ExtInstCeil;
4672 case Builtins::kSin:
4673 case Builtins::kHalfSin:
4674 case Builtins::kNativeSin:
4675 return glsl::ExtInst::ExtInstSin;
4676 case Builtins::kSinh:
4677 return glsl::ExtInst::ExtInstSinh;
4678 case Builtins::kCos:
4679 case Builtins::kHalfCos:
4680 case Builtins::kNativeCos:
4681 return glsl::ExtInst::ExtInstCos;
4682 case Builtins::kCosh:
4683 return glsl::ExtInst::ExtInstCosh;
4684 case Builtins::kTan:
4685 case Builtins::kHalfTan:
4686 case Builtins::kNativeTan:
4687 return glsl::ExtInst::ExtInstTan;
4688 case Builtins::kTanh:
4689 return glsl::ExtInst::ExtInstTanh;
4690 case Builtins::kExp:
4691 case Builtins::kHalfExp:
4692 case Builtins::kNativeExp:
4693 return glsl::ExtInst::ExtInstExp;
4694 case Builtins::kExp2:
4695 case Builtins::kHalfExp2:
4696 case Builtins::kNativeExp2:
4697 return glsl::ExtInst::ExtInstExp2;
4698 case Builtins::kLog:
4699 case Builtins::kHalfLog:
4700 case Builtins::kNativeLog:
4701 return glsl::ExtInst::ExtInstLog;
4702 case Builtins::kLog2:
4703 case Builtins::kHalfLog2:
4704 case Builtins::kNativeLog2:
4705 return glsl::ExtInst::ExtInstLog2;
4706 case Builtins::kFabs:
4707 return glsl::ExtInst::ExtInstFAbs;
4708 case Builtins::kFma:
4709 return glsl::ExtInst::ExtInstFma;
4710 case Builtins::kFloor:
4711 return glsl::ExtInst::ExtInstFloor;
4712 case Builtins::kLdexp:
4713 return glsl::ExtInst::ExtInstLdexp;
4714 case Builtins::kPow:
4715 case Builtins::kPowr:
4716 case Builtins::kHalfPowr:
4717 case Builtins::kNativePowr:
4718 return glsl::ExtInst::ExtInstPow;
4719 case Builtins::kRound:
4720 return glsl::ExtInst::ExtInstRound;
4721 case Builtins::kSqrt:
4722 case Builtins::kHalfSqrt:
4723 case Builtins::kNativeSqrt:
4724 return glsl::ExtInst::ExtInstSqrt;
4725 case Builtins::kRsqrt:
4726 case Builtins::kHalfRsqrt:
4727 case Builtins::kNativeRsqrt:
4728 return glsl::ExtInst::ExtInstInverseSqrt;
4729 case Builtins::kTrunc:
4730 return glsl::ExtInst::ExtInstTrunc;
4731 case Builtins::kFrexp:
4732 return glsl::ExtInst::ExtInstFrexp;
4733 case Builtins::kFract:
4734 return glsl::ExtInst::ExtInstFract;
4735 case Builtins::kSign:
4736 return glsl::ExtInst::ExtInstFSign;
4737 case Builtins::kLength:
4738 case Builtins::kFastLength:
4739 return glsl::ExtInst::ExtInstLength;
4740 case Builtins::kDistance:
4741 case Builtins::kFastDistance:
4742 return glsl::ExtInst::ExtInstDistance;
4743 case Builtins::kStep:
4744 return glsl::ExtInst::ExtInstStep;
4745 case Builtins::kSmoothstep:
4746 return glsl::ExtInst::ExtInstSmoothStep;
4747 case Builtins::kCross:
4748 return glsl::ExtInst::ExtInstCross;
4749 case Builtins::kNormalize:
4750 case Builtins::kFastNormalize:
4751 return glsl::ExtInst::ExtInstNormalize;
4752 default:
4753 break;
4754 }
4755
David Neto22f144c2017-06-12 14:26:21 -04004756 return StringSwitch<glsl::ExtInst>(Name)
David Neto22f144c2017-06-12 14:26:21 -04004757 .StartsWith("llvm.fmuladd.", glsl::ExtInst::ExtInstFma)
4758 .Case("spirv.unpack.v2f16", glsl::ExtInst::ExtInstUnpackHalf2x16)
4759 .Case("spirv.pack.v2f16", glsl::ExtInst::ExtInstPackHalf2x16)
David Neto3fbb4072017-10-16 11:28:14 -04004760 .Default(kGlslExtInstBad);
4761}
4762
4763glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(StringRef Name) {
SJW2c317da2020-03-23 07:39:13 -05004764 switch (Builtins::Lookup(Name)) {
4765 case Builtins::kClz:
4766 return glsl::ExtInst::ExtInstFindUMsb;
4767 case Builtins::kAcospi:
4768 return glsl::ExtInst::ExtInstAcos;
4769 case Builtins::kAsinpi:
4770 return glsl::ExtInst::ExtInstAsin;
4771 case Builtins::kAtanpi:
4772 return glsl::ExtInst::ExtInstAtan;
4773 case Builtins::kAtan2pi:
4774 return glsl::ExtInst::ExtInstAtan2;
4775 default:
4776 break;
4777 }
4778 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04004779}
4780
alan-bakerb6b09dc2018-11-08 16:59:28 -05004781glsl::ExtInst
4782SPIRVProducerPass::getDirectOrIndirectExtInstEnum(StringRef Name) {
David Neto3fbb4072017-10-16 11:28:14 -04004783 auto direct = getExtInstEnum(Name);
4784 if (direct != kGlslExtInstBad)
4785 return direct;
4786 return getIndirectExtInstEnum(Name);
David Neto22f144c2017-06-12 14:26:21 -04004787}
4788
David Neto22f144c2017-06-12 14:26:21 -04004789void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04004790 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04004791}
4792
SJW88ed5fe2020-05-11 12:40:57 -05004793void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05004794 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04004795}
4796
SJW88ed5fe2020-05-11 12:40:57 -05004797void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04004798 // High 16 bit : Word Count
4799 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05004800 uint32_t Word = Inst.getOpcode();
4801 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04004802 if (count > 65535) {
4803 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
4804 llvm_unreachable("Word count too high");
4805 }
SJW88ed5fe2020-05-11 12:40:57 -05004806 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04004807 WriteOneWord(Word);
4808}
4809
SJW88ed5fe2020-05-11 12:40:57 -05004810void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
4811 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04004812 switch (OpTy) {
4813 default: {
4814 llvm_unreachable("Unsupported SPIRV Operand Type???");
4815 break;
4816 }
4817 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05004818 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04004819 break;
4820 }
4821 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05004822 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04004823 const char *Data = Str.c_str();
4824 size_t WordSize = Str.size() / 4;
4825 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
4826 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
4827 }
4828
4829 uint32_t Remainder = Str.size() % 4;
4830 uint32_t LastWord = 0;
4831 if (Remainder) {
4832 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
4833 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
4834 }
4835 }
4836
4837 WriteOneWord(LastWord);
4838 break;
4839 }
SJW88ed5fe2020-05-11 12:40:57 -05004840 case SPIRVOperandType::LITERAL_WORD: {
4841 WriteOneWord(Op.getLiteralNum()[0]);
4842 break;
4843 }
4844 case SPIRVOperandType::LITERAL_DWORD: {
4845 WriteOneWord(Op.getLiteralNum()[0]);
4846 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04004847 break;
4848 }
4849 }
4850}
4851
4852void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05004853 for (int i = 0; i < kSectionCount; ++i) {
4854 WriteSPIRVBinary(SPIRVSections[i]);
4855 }
4856}
4857
4858void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04004859
SJW88ed5fe2020-05-11 12:40:57 -05004860 for (const auto &Inst : SPIRVInstList) {
4861 const auto &Ops = Inst.getOperands();
4862 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04004863
4864 switch (Opcode) {
4865 default: {
David Neto5c22a252018-03-15 16:07:41 -04004866 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04004867 llvm_unreachable("Unsupported SPIRV instruction");
4868 break;
4869 }
4870 case spv::OpCapability:
4871 case spv::OpExtension:
4872 case spv::OpMemoryModel:
4873 case spv::OpEntryPoint:
4874 case spv::OpExecutionMode:
4875 case spv::OpSource:
4876 case spv::OpDecorate:
4877 case spv::OpMemberDecorate:
4878 case spv::OpBranch:
4879 case spv::OpBranchConditional:
4880 case spv::OpSelectionMerge:
4881 case spv::OpLoopMerge:
4882 case spv::OpStore:
4883 case spv::OpImageWrite:
4884 case spv::OpReturnValue:
4885 case spv::OpControlBarrier:
4886 case spv::OpMemoryBarrier:
4887 case spv::OpReturn:
4888 case spv::OpFunctionEnd:
4889 case spv::OpCopyMemory: {
4890 WriteWordCountAndOpcode(Inst);
4891 for (uint32_t i = 0; i < Ops.size(); i++) {
4892 WriteOperand(Ops[i]);
4893 }
4894 break;
4895 }
4896 case spv::OpTypeBool:
4897 case spv::OpTypeVoid:
4898 case spv::OpTypeSampler:
4899 case spv::OpLabel:
4900 case spv::OpExtInstImport:
4901 case spv::OpTypePointer:
4902 case spv::OpTypeRuntimeArray:
4903 case spv::OpTypeStruct:
4904 case spv::OpTypeImage:
4905 case spv::OpTypeSampledImage:
4906 case spv::OpTypeInt:
4907 case spv::OpTypeFloat:
4908 case spv::OpTypeArray:
4909 case spv::OpTypeVector:
4910 case spv::OpTypeFunction: {
4911 WriteWordCountAndOpcode(Inst);
4912 WriteResultID(Inst);
4913 for (uint32_t i = 0; i < Ops.size(); i++) {
4914 WriteOperand(Ops[i]);
4915 }
4916 break;
4917 }
4918 case spv::OpFunction:
4919 case spv::OpFunctionParameter:
4920 case spv::OpAccessChain:
4921 case spv::OpPtrAccessChain:
4922 case spv::OpInBoundsAccessChain:
4923 case spv::OpUConvert:
4924 case spv::OpSConvert:
4925 case spv::OpConvertFToU:
4926 case spv::OpConvertFToS:
4927 case spv::OpConvertUToF:
4928 case spv::OpConvertSToF:
4929 case spv::OpFConvert:
4930 case spv::OpConvertPtrToU:
4931 case spv::OpConvertUToPtr:
4932 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05004933 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04004934 case spv::OpIAdd:
4935 case spv::OpFAdd:
4936 case spv::OpISub:
4937 case spv::OpFSub:
4938 case spv::OpIMul:
4939 case spv::OpFMul:
4940 case spv::OpUDiv:
4941 case spv::OpSDiv:
4942 case spv::OpFDiv:
4943 case spv::OpUMod:
4944 case spv::OpSRem:
4945 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00004946 case spv::OpUMulExtended:
4947 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04004948 case spv::OpBitwiseOr:
4949 case spv::OpBitwiseXor:
4950 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04004951 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04004952 case spv::OpShiftLeftLogical:
4953 case spv::OpShiftRightLogical:
4954 case spv::OpShiftRightArithmetic:
4955 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04004956 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04004957 case spv::OpCompositeExtract:
4958 case spv::OpVectorExtractDynamic:
4959 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04004960 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04004961 case spv::OpVectorInsertDynamic:
4962 case spv::OpVectorShuffle:
4963 case spv::OpIEqual:
4964 case spv::OpINotEqual:
4965 case spv::OpUGreaterThan:
4966 case spv::OpUGreaterThanEqual:
4967 case spv::OpULessThan:
4968 case spv::OpULessThanEqual:
4969 case spv::OpSGreaterThan:
4970 case spv::OpSGreaterThanEqual:
4971 case spv::OpSLessThan:
4972 case spv::OpSLessThanEqual:
4973 case spv::OpFOrdEqual:
4974 case spv::OpFOrdGreaterThan:
4975 case spv::OpFOrdGreaterThanEqual:
4976 case spv::OpFOrdLessThan:
4977 case spv::OpFOrdLessThanEqual:
4978 case spv::OpFOrdNotEqual:
4979 case spv::OpFUnordEqual:
4980 case spv::OpFUnordGreaterThan:
4981 case spv::OpFUnordGreaterThanEqual:
4982 case spv::OpFUnordLessThan:
4983 case spv::OpFUnordLessThanEqual:
4984 case spv::OpFUnordNotEqual:
4985 case spv::OpExtInst:
4986 case spv::OpIsInf:
4987 case spv::OpIsNan:
4988 case spv::OpAny:
4989 case spv::OpAll:
4990 case spv::OpUndef:
4991 case spv::OpConstantNull:
4992 case spv::OpLogicalOr:
4993 case spv::OpLogicalAnd:
4994 case spv::OpLogicalNot:
4995 case spv::OpLogicalNotEqual:
4996 case spv::OpConstantComposite:
4997 case spv::OpSpecConstantComposite:
4998 case spv::OpConstantTrue:
4999 case spv::OpConstantFalse:
5000 case spv::OpConstant:
5001 case spv::OpSpecConstant:
5002 case spv::OpVariable:
5003 case spv::OpFunctionCall:
5004 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005005 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04005006 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005007 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005008 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005009 case spv::OpSelect:
5010 case spv::OpPhi:
5011 case spv::OpLoad:
5012 case spv::OpAtomicIAdd:
5013 case spv::OpAtomicISub:
5014 case spv::OpAtomicExchange:
5015 case spv::OpAtomicIIncrement:
5016 case spv::OpAtomicIDecrement:
5017 case spv::OpAtomicCompareExchange:
5018 case spv::OpAtomicUMin:
5019 case spv::OpAtomicSMin:
5020 case spv::OpAtomicUMax:
5021 case spv::OpAtomicSMax:
5022 case spv::OpAtomicAnd:
5023 case spv::OpAtomicOr:
5024 case spv::OpAtomicXor:
5025 case spv::OpDot: {
5026 WriteWordCountAndOpcode(Inst);
5027 WriteOperand(Ops[0]);
5028 WriteResultID(Inst);
5029 for (uint32_t i = 1; i < Ops.size(); i++) {
5030 WriteOperand(Ops[i]);
5031 }
5032 break;
5033 }
5034 }
5035 }
5036}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005037
alan-bakerb6b09dc2018-11-08 16:59:28 -05005038bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005039 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005040 case Type::HalfTyID:
5041 case Type::FloatTyID:
5042 case Type::DoubleTyID:
5043 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005044 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005045 return true;
5046 case Type::PointerTyID: {
5047 const PointerType *pointer_type = cast<PointerType>(type);
5048 if (pointer_type->getPointerAddressSpace() !=
5049 AddressSpace::UniformConstant) {
5050 auto pointee_type = pointer_type->getPointerElementType();
5051 if (pointee_type->isStructTy() &&
5052 cast<StructType>(pointee_type)->isOpaque()) {
5053 // Images and samplers are not nullable.
5054 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005055 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005056 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005057 return true;
5058 }
5059 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005060 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005061 case Type::StructTyID: {
5062 const StructType *struct_type = cast<StructType>(type);
5063 // Images and samplers are not nullable.
5064 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005065 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005066 for (const auto element : struct_type->elements()) {
5067 if (!IsTypeNullable(element))
5068 return false;
5069 }
5070 return true;
5071 }
5072 default:
5073 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005074 }
5075}
Alan Bakerfcda9482018-10-02 17:09:59 -04005076
SJW77b87ad2020-04-21 14:37:52 -05005077void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005078 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005079 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005080 // Metdata is stored as key-value pair operands. The first element of each
5081 // operand is the type and the second is a vector of offsets.
5082 for (const auto *operand : offsets_md->operands()) {
5083 const auto *pair = cast<MDTuple>(operand);
5084 auto *type =
5085 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5086 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5087 std::vector<uint32_t> offsets;
5088 for (const Metadata *offset_md : offset_vector->operands()) {
5089 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005090 offsets.push_back(static_cast<uint32_t>(
5091 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005092 }
5093 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5094 }
5095 }
5096
5097 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005098 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005099 // Metadata is stored as key-value pair operands. The first element of each
5100 // operand is the type and the second is a triple of sizes: type size in
5101 // bits, store size and alloc size.
5102 for (const auto *operand : sizes_md->operands()) {
5103 const auto *pair = cast<MDTuple>(operand);
5104 auto *type =
5105 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5106 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5107 uint64_t type_size_in_bits =
5108 cast<ConstantInt>(
5109 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5110 ->getZExtValue();
5111 uint64_t type_store_size =
5112 cast<ConstantInt>(
5113 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5114 ->getZExtValue();
5115 uint64_t type_alloc_size =
5116 cast<ConstantInt>(
5117 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5118 ->getZExtValue();
5119 RemappedUBOTypeSizes.insert(std::make_pair(
5120 type, std::make_tuple(type_size_in_bits, type_store_size,
5121 type_alloc_size)));
5122 }
5123 }
5124}
5125
5126uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5127 const DataLayout &DL) {
5128 auto iter = RemappedUBOTypeSizes.find(type);
5129 if (iter != RemappedUBOTypeSizes.end()) {
5130 return std::get<0>(iter->second);
5131 }
5132
5133 return DL.getTypeSizeInBits(type);
5134}
5135
5136uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5137 auto iter = RemappedUBOTypeSizes.find(type);
5138 if (iter != RemappedUBOTypeSizes.end()) {
5139 return std::get<1>(iter->second);
5140 }
5141
5142 return DL.getTypeStoreSize(type);
5143}
5144
5145uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5146 auto iter = RemappedUBOTypeSizes.find(type);
5147 if (iter != RemappedUBOTypeSizes.end()) {
5148 return std::get<2>(iter->second);
5149 }
5150
5151 return DL.getTypeAllocSize(type);
5152}
alan-baker5b86ed72019-02-15 08:26:50 -05005153
Kévin Petitbbbda972020-03-03 19:16:31 +00005154uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5155 StructType *type, unsigned member, const DataLayout &DL) {
5156 const auto StructLayout = DL.getStructLayout(type);
5157 // Search for the correct offsets if this type was remapped.
5158 std::vector<uint32_t> *offsets = nullptr;
5159 auto iter = RemappedUBOTypeOffsets.find(type);
5160 if (iter != RemappedUBOTypeOffsets.end()) {
5161 offsets = &iter->second;
5162 }
5163 auto ByteOffset =
5164 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5165 if (offsets) {
5166 ByteOffset = (*offsets)[member];
5167 }
5168
5169 return ByteOffset;
5170}
5171
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005172void SPIRVProducerPass::setVariablePointersCapabilities(
5173 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005174 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005175 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005176 } else {
SJW01901d92020-05-21 08:58:31 -05005177 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005178 }
5179}
5180
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005181Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005182 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5183 return GetBasePointer(gep->getPointerOperand());
5184 }
5185
5186 // Conservatively return |v|.
5187 return v;
5188}
5189
5190bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5191 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5192 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
5193 if (lhs_call->getCalledFunction()->getName().startswith(
5194 clspv::ResourceAccessorFunction()) &&
5195 rhs_call->getCalledFunction()->getName().startswith(
5196 clspv::ResourceAccessorFunction())) {
5197 // For resource accessors, match descriptor set and binding.
5198 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5199 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5200 return true;
5201 } else if (lhs_call->getCalledFunction()->getName().startswith(
5202 clspv::WorkgroupAccessorFunction()) &&
5203 rhs_call->getCalledFunction()->getName().startswith(
5204 clspv::WorkgroupAccessorFunction())) {
5205 // For workgroup resources, match spec id.
5206 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5207 return true;
5208 }
5209 }
5210 }
5211
5212 return false;
5213}
5214
5215bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5216 assert(inst->getType()->isPointerTy());
5217 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5218 spv::StorageClassStorageBuffer);
5219 const bool hack_undef = clspv::Option::HackUndef();
5220 if (auto *select = dyn_cast<SelectInst>(inst)) {
5221 auto *true_base = GetBasePointer(select->getTrueValue());
5222 auto *false_base = GetBasePointer(select->getFalseValue());
5223
5224 if (true_base == false_base)
5225 return true;
5226
5227 // If either the true or false operand is a null, then we satisfy the same
5228 // object constraint.
5229 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5230 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5231 return true;
5232 }
5233
5234 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5235 if (false_cst->isNullValue() ||
5236 (hack_undef && isa<UndefValue>(false_base)))
5237 return true;
5238 }
5239
5240 if (sameResource(true_base, false_base))
5241 return true;
5242 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5243 Value *value = nullptr;
5244 bool ok = true;
5245 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5246 auto *base = GetBasePointer(phi->getIncomingValue(i));
5247 // Null values satisfy the constraint of selecting of selecting from the
5248 // same object.
5249 if (!value) {
5250 if (auto *cst = dyn_cast<Constant>(base)) {
5251 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5252 value = base;
5253 } else {
5254 value = base;
5255 }
5256 } else if (base != value) {
5257 if (auto *base_cst = dyn_cast<Constant>(base)) {
5258 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5259 continue;
5260 }
5261
5262 if (sameResource(value, base))
5263 continue;
5264
5265 // Values don't represent the same base.
5266 ok = false;
5267 }
5268 }
5269
5270 return ok;
5271 }
5272
5273 // Conservatively return false.
5274 return false;
5275}
alan-bakere9308012019-03-15 10:25:13 -04005276
5277bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5278 if (!Arg.getType()->isPointerTy() ||
5279 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5280 // Only SSBOs need to be annotated as coherent.
5281 return false;
5282 }
5283
5284 DenseSet<Value *> visited;
5285 std::vector<Value *> stack;
5286 for (auto *U : Arg.getParent()->users()) {
5287 if (auto *call = dyn_cast<CallInst>(U)) {
5288 stack.push_back(call->getOperand(Arg.getArgNo()));
5289 }
5290 }
5291
5292 while (!stack.empty()) {
5293 Value *v = stack.back();
5294 stack.pop_back();
5295
5296 if (!visited.insert(v).second)
5297 continue;
5298
5299 auto *resource_call = dyn_cast<CallInst>(v);
5300 if (resource_call &&
5301 resource_call->getCalledFunction()->getName().startswith(
5302 clspv::ResourceAccessorFunction())) {
5303 // If this is a resource accessor function, check if the coherent operand
5304 // is set.
5305 const auto coherent =
5306 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5307 ->getZExtValue());
5308 if (coherent == 1)
5309 return true;
5310 } else if (auto *arg = dyn_cast<Argument>(v)) {
5311 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005312 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005313 if (auto *call = dyn_cast<CallInst>(U)) {
5314 stack.push_back(call->getOperand(arg->getArgNo()));
5315 }
5316 }
5317 } else if (auto *user = dyn_cast<User>(v)) {
5318 // If this is a user, traverse all operands that could lead to resource
5319 // variables.
5320 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5321 Value *operand = user->getOperand(i);
5322 if (operand->getType()->isPointerTy() &&
5323 operand->getType()->getPointerAddressSpace() ==
5324 clspv::AddressSpace::Global) {
5325 stack.push_back(operand);
5326 }
5327 }
5328 }
5329 }
5330
5331 // No coherent resource variables encountered.
5332 return false;
5333}
alan-baker06cad652019-12-03 17:56:47 -05005334
SJW77b87ad2020-04-21 14:37:52 -05005335void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005336 // First, track loop merges and continues.
5337 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005338 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005339 if (F.isDeclaration())
5340 continue;
5341
5342 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5343 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5344 std::deque<BasicBlock *> order;
5345 DenseSet<BasicBlock *> visited;
5346 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5347
5348 for (auto BB : order) {
5349 auto terminator = BB->getTerminator();
5350 auto branch = dyn_cast<BranchInst>(terminator);
5351 if (LI.isLoopHeader(BB)) {
5352 auto L = LI.getLoopFor(BB);
5353 BasicBlock *ContinueBB = nullptr;
5354 BasicBlock *MergeBB = nullptr;
5355
5356 MergeBB = L->getExitBlock();
5357 if (!MergeBB) {
5358 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5359 // has regions with single entry/exit. As a result, loop should not
5360 // have multiple exits.
5361 llvm_unreachable("Loop has multiple exits???");
5362 }
5363
5364 if (L->isLoopLatch(BB)) {
5365 ContinueBB = BB;
5366 } else {
5367 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5368 // block.
5369 BasicBlock *Header = L->getHeader();
5370 BasicBlock *Latch = L->getLoopLatch();
5371 for (auto *loop_block : L->blocks()) {
5372 if (loop_block == Header) {
5373 continue;
5374 }
5375
5376 // Check whether block dominates block with back-edge.
5377 // The loop latch is the single block with a back-edge. If it was
5378 // possible, StructurizeCFG made the loop conform to this
5379 // requirement, otherwise |Latch| is a nullptr.
5380 if (DT.dominates(loop_block, Latch)) {
5381 ContinueBB = loop_block;
5382 }
5383 }
5384
5385 if (!ContinueBB) {
5386 llvm_unreachable("Wrong continue block from loop");
5387 }
5388 }
5389
5390 // Record the continue and merge blocks.
5391 MergeBlocks[BB] = MergeBB;
5392 ContinueBlocks[BB] = ContinueBB;
5393 LoopMergesAndContinues.insert(MergeBB);
5394 LoopMergesAndContinues.insert(ContinueBB);
5395 } else if (branch && branch->isConditional()) {
5396 auto L = LI.getLoopFor(BB);
5397 bool HasBackedge = false;
5398 while (L && !HasBackedge) {
5399 if (L->isLoopLatch(BB)) {
5400 HasBackedge = true;
5401 }
5402 L = L->getParentLoop();
5403 }
5404
5405 if (!HasBackedge) {
5406 // Only need a merge if the branch doesn't include a loop break or
5407 // continue.
5408 auto true_bb = branch->getSuccessor(0);
5409 auto false_bb = branch->getSuccessor(1);
5410 if (!LoopMergesAndContinues.count(true_bb) &&
5411 !LoopMergesAndContinues.count(false_bb)) {
5412 // StructurizeCFG pass already manipulated CFG. Just use false block
5413 // of branch instruction as merge block.
5414 MergeBlocks[BB] = false_bb;
5415 }
5416 }
5417 }
5418 }
5419 }
5420}