blob: 535f2f149a1ecb890c17e77b9e2d808d44667e1f [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
SJWf93f5f32020-05-05 07:27:56 -050046// enable spv::HasResultAndType
47#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040048#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040049
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/AddressSpace.h"
alan-bakerf5e5f692018-11-27 08:33:24 -050051#include "clspv/DescriptorMap.h"
David Neto118188e2018-08-24 11:27:54 -040052#include "clspv/Option.h"
David Neto85082642018-03-24 06:55:20 -070053#include "clspv/spirv_c_strings.hpp"
54#include "clspv/spirv_glsl.hpp"
David Neto22f144c2017-06-12 14:26:21 -040055
David Neto4feb7a42017-10-06 17:29:42 -040056#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050057#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050058#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070059#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040060#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040061#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040062#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040063#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040064#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040065#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050066#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040067
David Neto22f144c2017-06-12 14:26:21 -040068#if defined(_MSC_VER)
69#pragma warning(pop)
70#endif
71
72using namespace llvm;
73using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050074using namespace clspv::Builtins;
David Neto156783e2017-07-05 15:39:41 -040075using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040076
77namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040078
David Neto862b7d82018-06-14 18:48:37 -040079cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
80 cl::desc("Show resource variable creation"));
81
alan-baker5ed87542020-03-23 11:05:22 -040082cl::opt<bool>
83 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
84 cl::desc("Dump the IR at the start of SPIRVProducer"));
85
David Neto862b7d82018-06-14 18:48:37 -040086// These hacks exist to help transition code generation algorithms
87// without making huge noise in detailed test output.
88const bool Hack_generate_runtime_array_stride_early = true;
89
David Neto3fbb4072017-10-16 11:28:14 -040090// The value of 1/pi. This value is from MSDN
91// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
92const double kOneOverPi = 0.318309886183790671538;
93const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
94
SJW69939d52020-04-16 07:29:07 -050095// SPIRV Module Sections (per 2.4 of the SPIRV spec)
96// These are used to collect SPIRVInstructions by type on-the-fly.
97enum SPIRVSection {
98 kCapabilities,
99 kExtensions,
100 kImports,
101 kMemoryModel,
102 kEntryPoints,
103 kExecutionModes,
104
105 kDebug,
106 kAnnotations,
107
108 kTypes,
109 kConstants = kTypes,
110 kGlobalVariables,
111
112 kFunctions,
113
114 kSectionCount
115};
116
SJW01901d92020-05-21 08:58:31 -0500117class SPIRVID {
118 uint32_t id;
119
120public:
121 SPIRVID(uint32_t _id = 0) : id(_id) {}
122 uint32_t get() const { return id; }
123 bool isValid() const { return id != 0; }
124 bool operator==(const SPIRVID &that) const { return id == that.id; }
125};
SJWf93f5f32020-05-05 07:27:56 -0500126
SJW88ed5fe2020-05-11 12:40:57 -0500127enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400128
129struct SPIRVOperand {
SJW88ed5fe2020-05-11 12:40:57 -0500130 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
131 LiteralNum[0] = Num;
132 }
David Neto22f144c2017-06-12 14:26:21 -0400133 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
134 : Type(Ty), LiteralStr(Str) {}
135 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
136 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500137 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
138 auto sz = NumVec.size();
139 assert(sz >= 1 && sz <= 2);
140 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
141 LiteralNum[0] = NumVec[0];
142 if (sz == 2) {
143 LiteralNum[1] = NumVec[1];
144 }
145 }
David Neto22f144c2017-06-12 14:26:21 -0400146
James Price11010dc2019-12-19 13:53:09 -0500147 SPIRVOperandType getType() const { return Type; };
148 uint32_t getNumID() const { return LiteralNum[0]; };
149 std::string getLiteralStr() const { return LiteralStr; };
SJW88ed5fe2020-05-11 12:40:57 -0500150 const uint32_t *getLiteralNum() const { return LiteralNum; };
David Neto22f144c2017-06-12 14:26:21 -0400151
David Neto87846742018-04-11 17:36:22 -0400152 uint32_t GetNumWords() const {
153 switch (Type) {
154 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500155 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400156 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500157 case LITERAL_DWORD:
158 return 2;
David Neto87846742018-04-11 17:36:22 -0400159 case LITERAL_STRING:
160 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400161 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400162 }
163 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
164 }
165
David Neto22f144c2017-06-12 14:26:21 -0400166private:
167 SPIRVOperandType Type;
168 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500169 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400170};
171
SJW88ed5fe2020-05-11 12:40:57 -0500172typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400173
David Neto22f144c2017-06-12 14:26:21 -0400174struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500175 // Primary constructor must have Opcode, initializes WordCount based on ResID.
176 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
177 : Opcode(static_cast<uint16_t>(Opc)) {
178 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400179 }
David Neto22f144c2017-06-12 14:26:21 -0400180
SJWf93f5f32020-05-05 07:27:56 -0500181 // Creates an instruction with an opcode and no result ID, and with the given
182 // operands. This calls primary constructor to initialize Opcode, WordCount.
183 // Takes ownership of the operands and clears |Ops|.
184 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
185 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500186 }
SJWf93f5f32020-05-05 07:27:56 -0500187 // Creates an instruction with an opcode and no result ID, and with the given
188 // operands. This calls primary constructor to initialize Opcode, WordCount.
189 // Takes ownership of the operands and clears |Ops|.
190 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
191 : SPIRVInstruction(Opc, ResID) {
192 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500193 }
David Netoef5ba2b2019-12-20 08:35:54 -0500194
David Netoee2660d2018-06-28 16:31:29 -0400195 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400196 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500197 SPIRVID getResultID() const { return ResultID; }
198 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400199
200private:
SJW01901d92020-05-21 08:58:31 -0500201 void setResult(SPIRVID ResID = 0) {
202 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500203 ResultID = ResID;
204 }
205
206 void setOperands(SPIRVOperandVec &Ops) {
207 assert(Operands.empty());
208 Operands = std::move(Ops);
209 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500210 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500211 }
212 }
213
214private:
David Netoee2660d2018-06-28 16:31:29 -0400215 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400216 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500217 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500218 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400219};
220
221struct SPIRVProducerPass final : public ModulePass {
SJW01901d92020-05-21 08:58:31 -0500222 typedef DenseMap<Type *, SPIRVID> TypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400223 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500224 typedef DenseMap<Value *, SPIRVID> ValueMapType;
David Netofb9a7972017-08-25 17:08:24 -0400225 typedef UniqueVector<Value *> ValueList;
SJW01901d92020-05-21 08:58:31 -0500226 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
227 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500228 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
229 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400230 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500231 // - the SPIR-V instruction placeholder that will be replaced
232 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400233 DeferredInstVecType;
234 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
235 GlobalConstFuncMapType;
236
David Neto44795152017-07-13 15:45:28 -0400237 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500238 raw_pwrite_stream &out,
239 std::vector<clspv::version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400240 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400241 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500242 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400243 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
alan-baker00e7a582019-06-07 12:54:21 -0400244 descriptorMapEntries(descriptor_map_entries),
David Neto0676e6f2017-07-11 18:47:44 -0400245 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500246 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
247 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
SJW01901d92020-05-21 08:58:31 -0500248 WorkgroupSizeVarID(0) {
249 addCapability(spv::CapabilityShader);
250 Ptr = this;
251 }
David Neto22f144c2017-06-12 14:26:21 -0400252
James Price11010dc2019-12-19 13:53:09 -0500253 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500254 }
255
David Neto22f144c2017-06-12 14:26:21 -0400256 void getAnalysisUsage(AnalysisUsage &AU) const override {
257 AU.addRequired<DominatorTreeWrapperPass>();
258 AU.addRequired<LoopInfoWrapperPass>();
259 }
260
261 virtual bool runOnModule(Module &module) override;
262
263 // output the SPIR-V header block
264 void outputHeader();
265
266 // patch the SPIR-V header block
267 void patchHeader();
268
SJW01901d92020-05-21 08:58:31 -0500269 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400270 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
271 TypeList &getTypeList() { return Types; };
David Neto22f144c2017-06-12 14:26:21 -0400272 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500273 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
274 return SPIRVSections[Section];
275 };
David Neto22f144c2017-06-12 14:26:21 -0400276 EntryPointVecType &getEntryPointVec() { return EntryPointVec; };
277 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; };
278 ValueList &getEntryPointInterfacesVec() { return EntryPointInterfacesVec; };
SJW01901d92020-05-21 08:58:31 -0500279 SPIRVID getOpExtInstImportID();
280 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; };
SJW2c317da2020-03-23 07:39:13 -0500281
alan-baker5b86ed72019-02-15 08:26:50 -0500282 bool hasVariablePointersStorageBuffer() {
283 return HasVariablePointersStorageBuffer;
284 }
SJW01901d92020-05-21 08:58:31 -0500285 void setVariablePointersStorageBuffer() {
286 if (!HasVariablePointersStorageBuffer) {
287 addCapability(spv::CapabilityVariablePointersStorageBuffer);
288 HasVariablePointersStorageBuffer = true;
289 }
alan-baker5b86ed72019-02-15 08:26:50 -0500290 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400291 bool hasVariablePointers() { return HasVariablePointers; };
SJW01901d92020-05-21 08:58:31 -0500292 void setVariablePointers() {
293 if (!HasVariablePointers) {
294 addCapability(spv::CapabilityVariablePointers);
295 HasVariablePointers = true;
296 }
297 };
alan-bakerb6b09dc2018-11-08 16:59:28 -0500298 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
299 return samplerMap;
300 }
David Neto22f144c2017-06-12 14:26:21 -0400301 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
302 return GlobalConstFuncTypeMap;
303 }
304 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
305 return GlobalConstArgumentSet;
306 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500307 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400308
SJW77b87ad2020-04-21 14:37:52 -0500309 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500310 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
311 // *not* be converted to a storage buffer, replace each such global variable
312 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500313 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400314 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
315 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500316 void FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400317 void FindTypePerGlobalVar(GlobalVariable &GV);
318 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500319 void FindTypesForSamplerMap();
320 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500321 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
322 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400323 void FindType(Type *Ty);
SJWf93f5f32020-05-05 07:27:56 -0500324
325 // Lookup or create Types, Constants.
326 // Returns SPIRVID once it has been created.
327 SPIRVID getSPIRVType(Type *Ty);
328 SPIRVID getSPIRVConstant(Constant *Cst);
329 // Lookup SPIRVID of llvm::Value, may create Constant.
330 SPIRVID getSPIRVValue(Value *V);
331
David Neto19a1bad2017-08-25 15:01:41 -0400332 // Generates instructions for SPIR-V types corresponding to the LLVM types
333 // saved in the |Types| member. A type follows its subtypes. IDs are
334 // allocated sequentially starting with the current value of nextID, and
335 // with a type following its subtypes. Also updates nextID to just beyond
336 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500337 void GenerateSPIRVTypes();
SJW77b87ad2020-04-21 14:37:52 -0500338 void GenerateModuleInfo();
339 void GeneratePushConstantDescriptorMapEntries();
340 void GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400341 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500342 void GenerateWorkgroupVars();
David Neto862b7d82018-06-14 18:48:37 -0400343 // Generate descriptor map entries for resource variables associated with
344 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500345 void GenerateDescriptorMapInfo(Function &F);
346 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400347 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500348 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400349 void GenerateFuncPrologue(Function &F);
350 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400351 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400352 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
353 spv::Op GetSPIRVCastOpcode(Instruction &I);
354 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
355 void GenerateInstruction(Instruction &I);
356 void GenerateFuncEpilogue();
357 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500358 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400359 bool is4xi8vec(Type *Ty) const;
360 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400361 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400362 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400363 // Returns the GLSL extended instruction enum that the given function
364 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500365 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400366 // Returns the GLSL extended instruction enum indirectly used by the given
367 // function. That is, to implement the given function, we use an extended
368 // instruction plus one more instruction. If none, then returns the 0 value,
369 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500370 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400371 // Returns the single GLSL extended instruction used directly or
372 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500373 glsl::ExtInst
374 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400375 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500376 void WriteResultID(const SPIRVInstruction &Inst);
377 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
378 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400379 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500380 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400381
Alan Baker9bf93fb2018-08-28 16:59:26 -0400382 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500383 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400384
Alan Bakerfcda9482018-10-02 17:09:59 -0400385 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500386 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400387
alan-baker06cad652019-12-03 17:56:47 -0500388 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500389 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500390
Alan Bakerfcda9482018-10-02 17:09:59 -0400391 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
392 // uses the internal map, otherwise it falls back on the data layout.
393 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
394 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
395 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000396 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
397 unsigned member,
398 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400399
alan-baker5b86ed72019-02-15 08:26:50 -0500400 // Returns the base pointer of |v|.
401 Value *GetBasePointer(Value *v);
402
SJW01901d92020-05-21 08:58:31 -0500403 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
404 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
405
alan-baker5b86ed72019-02-15 08:26:50 -0500406 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
407 // |address_space|.
408 void setVariablePointersCapabilities(unsigned address_space);
409
410 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
411 // variable.
412 bool sameResource(Value *lhs, Value *rhs) const;
413
414 // Returns true if |inst| is phi or select that selects from the same
415 // structure (or null).
416 bool selectFromSameObject(Instruction *inst);
417
alan-bakere9308012019-03-15 10:25:13 -0400418 // Returns true if |Arg| is called with a coherent resource.
419 bool CalledWithCoherentResource(Argument &Arg);
420
SJWf93f5f32020-05-05 07:27:56 -0500421 //
422 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
423 template <enum SPIRVSection TSection = kFunctions>
424 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
425 bool has_result, has_result_type;
426 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
427 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500428 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500429 return RID;
430 }
431 template <enum SPIRVSection TSection = kFunctions>
432 SPIRVID addSPIRVInst(spv::Op Op) {
433 SPIRVOperandVec Ops;
434 return addSPIRVInst<TSection>(Op, Ops);
435 }
436 template <enum SPIRVSection TSection = kFunctions>
437 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
438 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500439 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500440 return addSPIRVInst<TSection>(Op, Ops);
441 }
442 template <enum SPIRVSection TSection = kFunctions>
443 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
444 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500445 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500446 return addSPIRVInst<TSection>(Op, Ops);
447 }
448
SJW88ed5fe2020-05-11 12:40:57 -0500449 //
450 // Add placeholder for llvm::Value that references future values.
451 // Must have result ID just in case final SPIRVInstruction requires.
452 SPIRVID addSPIRVPlaceholder(Value *I) {
453 SPIRVID RID = incrNextID();
454 SPIRVOperandVec Ops;
455 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
456 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
457 return RID;
458 }
459 // Replace placeholder with actual SPIRVInstruction on the final pass
460 // (HandleDeferredInstruction).
461 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
462 SPIRVOperandVec &Operands) {
463 bool has_result, has_result_type;
464 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
465 SPIRVID RID = has_result ? I->getResultID() : 0;
466 *I = SPIRVInstruction(Opcode, RID, Operands);
467 return RID;
468 }
469
David Neto22f144c2017-06-12 14:26:21 -0400470private:
471 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500472
473 Module *module;
474
SJW01901d92020-05-21 08:58:31 -0500475 // Set of Capabilities required
476 CapabilitySetType CapabilitySet;
477
David Neto44795152017-07-13 15:45:28 -0400478 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400479 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400480
481 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
482 // convert to other formats on demand?
483
484 // When emitting a C initialization list, the WriteSPIRVBinary method
485 // will actually write its words to this vector via binaryTempOut.
486 SmallVector<char, 100> binaryTempUnderlyingVector;
487 raw_svector_ostream binaryTempOut;
488
489 // Binary output writes to this stream, which might be |out| or
490 // |binaryTempOut|. It's the latter when we really want to write a C
491 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400492 raw_pwrite_stream *binaryOut;
alan-bakerf5e5f692018-11-27 08:33:24 -0500493 std::vector<version0::DescriptorMapEntry> *descriptorMapEntries;
David Neto0676e6f2017-07-11 18:47:44 -0400494 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400495 uint64_t patchBoundOffset;
496 uint32_t nextID;
497
SJWf93f5f32020-05-05 07:27:56 -0500498 SPIRVID incrNextID() { return nextID++; }
499
alan-bakerf67468c2019-11-25 15:51:49 -0500500 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500501 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500502 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500503 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500504
David Neto19a1bad2017-08-25 15:01:41 -0400505 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400506 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400507 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400508 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400509 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400510 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400511 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400512 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500513 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400514
David Neto22f144c2017-06-12 14:26:21 -0400515 EntryPointVecType EntryPointVec;
516 DeferredInstVecType DeferredInstVec;
517 ValueList EntryPointInterfacesVec;
SJW01901d92020-05-21 08:58:31 -0500518 SPIRVID OpExtInstImportID;
519 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500520 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400521 bool HasVariablePointers;
522 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500523 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700524
525 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700526 // will map F's type to (G, index of the parameter), where in a first phase
527 // G is F's type. During FindTypePerFunc, G will be changed to F's type
528 // but replacing the pointer-to-constant parameter with
529 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700530 // TODO(dneto): This doesn't seem general enough? A function might have
531 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400532 GlobalConstFuncMapType GlobalConstFuncTypeMap;
533 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400534 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700535 // or array types, and which point into transparent memory (StorageBuffer
536 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400537 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700538 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400539
540 // This is truly ugly, but works around what look like driver bugs.
541 // For get_local_size, an earlier part of the flow has created a module-scope
542 // variable in Private address space to hold the value for the workgroup
543 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
544 // When this is present, save the IDs of the initializer value and variable
545 // in these two variables. We only ever do a vector load from it, and
546 // when we see one of those, substitute just the value of the intializer.
547 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700548 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500549 SPIRVID WorkgroupSizeValueID;
550 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400551
David Neto862b7d82018-06-14 18:48:37 -0400552 // Bookkeeping for mapping kernel arguments to resource variables.
553 struct ResourceVarInfo {
554 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400555 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400556 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400557 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400558 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
559 const int index; // Index into ResourceVarInfoList
560 const unsigned descriptor_set;
561 const unsigned binding;
562 Function *const var_fn; // The @clspv.resource.var.* function.
563 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400564 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400565 const unsigned addr_space; // The LLVM address space
566 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500567 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400568 };
569 // A list of resource var info. Each one correponds to a module-scope
570 // resource variable we will have to create. Resource var indices are
571 // indices into this vector.
572 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
573 // This is a vector of pointers of all the resource vars, but ordered by
574 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500575 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400576 // Map a function to the ordered list of resource variables it uses, one for
577 // each argument. If an argument does not use a resource variable, it
578 // will have a null pointer entry.
579 using FunctionToResourceVarsMapType =
580 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
581 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
582
583 // What LLVM types map to SPIR-V types needing layout? These are the
584 // arrays and structures supporting storage buffers and uniform buffers.
585 TypeList TypesNeedingLayout;
586 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
587 UniqueVector<StructType *> StructTypesNeedingBlock;
588 // For a call that represents a load from an opaque type (samplers, images),
589 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500590 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700591
David Netoc6f3ab22018-04-06 18:02:31 -0400592 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500593 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400594 LocalArgList LocalArgs;
595 // Information about a pointer-to-local argument.
596 struct LocalArgInfo {
597 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500598 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400599 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500600 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400601 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500602 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400603 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500604 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400605 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500606 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400607 // The specialization constant ID of the array size.
608 int spec_id;
609 };
Alan Baker202c8c72018-08-13 13:47:44 -0400610 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500611 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400612 // A mapping from SpecId to its LocalArgInfo.
613 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400614 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500615 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400616 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500617 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
618 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500619
620 // Maps basic block to its merge block.
621 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
622 // Maps basic block to its continue block.
623 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500624
625public:
626 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400627};
628
629char SPIRVProducerPass::ID;
SJW01901d92020-05-21 08:58:31 -0500630SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
David Netoc6f3ab22018-04-06 18:02:31 -0400631
alan-bakerb6b09dc2018-11-08 16:59:28 -0500632} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400633
634namespace clspv {
alan-bakerf5e5f692018-11-27 08:33:24 -0500635ModulePass *createSPIRVProducerPass(
636 raw_pwrite_stream &out,
637 std::vector<version0::DescriptorMapEntry> *descriptor_map_entries,
alan-baker00e7a582019-06-07 12:54:21 -0400638 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
alan-bakerf5e5f692018-11-27 08:33:24 -0500639 bool outputCInitList) {
640 return new SPIRVProducerPass(out, descriptor_map_entries, samplerMap,
alan-baker00e7a582019-06-07 12:54:21 -0400641 outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400642}
David Netoc2c368d2017-06-30 16:50:17 -0400643} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400644
SJW01901d92020-05-21 08:58:31 -0500645namespace {
646SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
647 list.emplace_back(LITERAL_WORD, num);
648 return list;
649}
650
651SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
652 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
653 return list;
654}
655
656SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
657 list.emplace_back(num_vec);
658 return list;
659}
660
661SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
662 list.emplace_back(LITERAL_STRING, str);
663 return list;
664}
665
666SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
667 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
668 return list;
669}
670
671SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
672 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
673 return list;
674}
675
676SPIRVOperandVec &operator<<(SPIRVOperandVec &list, SPIRVID &v) {
677 list.emplace_back(NUMBERID, v.get());
678 return list;
679}
680} // namespace
681
SJW77b87ad2020-04-21 14:37:52 -0500682bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500683 // TODO(sjw): Need to reset all data members for each Module, or better
684 // yet create a new SPIRVProducer for every module.. For now only
685 // allow 1 call.
686 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500687 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400688 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500689 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400690 }
David Neto0676e6f2017-07-11 18:47:44 -0400691 binaryOut = outputCInitList ? &binaryTempOut : &out;
692
SJW77b87ad2020-04-21 14:37:52 -0500693 PopulateUBOTypeMaps();
694 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400695
David Neto22f144c2017-06-12 14:26:21 -0400696 // SPIR-V always begins with its header information
697 outputHeader();
698
699 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500700 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400701
David Neto22f144c2017-06-12 14:26:21 -0400702 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500703 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400704 // If the GV is one of our special __spirv_* variables, remove the
705 // initializer as it was only placed there to force LLVM to not throw the
706 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000707 if (GV.getName().startswith("__spirv_") ||
708 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400709 GV.setInitializer(nullptr);
710 }
711
712 // Collect types' information from global variable.
713 FindTypePerGlobalVar(GV);
714
David Neto22f144c2017-06-12 14:26:21 -0400715 // If the variable is an input, entry points need to know about it.
716 if (AddressSpace::Input == GV.getType()->getPointerAddressSpace()) {
David Netofb9a7972017-08-25 17:08:24 -0400717 getEntryPointInterfacesVec().insert(&GV);
David Neto22f144c2017-06-12 14:26:21 -0400718 }
719 }
720
David Neto22f144c2017-06-12 14:26:21 -0400721 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500722 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400723
alan-baker09cb9802019-12-10 13:16:27 -0500724 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500725 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400726
Kévin Petitbbbda972020-03-03 19:16:31 +0000727 // Generate descriptor map entries for all push constants
SJW77b87ad2020-04-21 14:37:52 -0500728 GeneratePushConstantDescriptorMapEntries();
Kévin Petitbbbda972020-03-03 19:16:31 +0000729
David Neto22f144c2017-06-12 14:26:21 -0400730 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500731 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400732 GenerateGlobalVar(GV);
733 }
SJW77b87ad2020-04-21 14:37:52 -0500734 GenerateResourceVars();
735 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400736
737 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500738 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400739 if (F.isDeclaration()) {
740 continue;
741 }
742
SJW77b87ad2020-04-21 14:37:52 -0500743 GenerateDescriptorMapInfo(F);
David Neto862b7d82018-06-14 18:48:37 -0400744
David Neto22f144c2017-06-12 14:26:21 -0400745 // Generate Function Prologue.
746 GenerateFuncPrologue(F);
747
748 // Generate SPIRV instructions for function body.
749 GenerateFuncBody(F);
750
751 // Generate Function Epilogue.
752 GenerateFuncEpilogue();
753 }
754
755 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500756 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400757
758 // Generate descriptor map entries for module scope specialization constants.
SJW77b87ad2020-04-21 14:37:52 -0500759 GenerateSpecConstantDescriptorMapEntries();
David Neto22f144c2017-06-12 14:26:21 -0400760
761 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500762 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400763
alan-baker00e7a582019-06-07 12:54:21 -0400764 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400765
766 // We need to patch the SPIR-V header to set bound correctly.
767 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400768
769 if (outputCInitList) {
770 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400771 std::ostringstream os;
772
David Neto57fb0b92017-08-04 15:35:09 -0400773 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400774 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400775 os << ",\n";
776 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400777 first = false;
778 };
779
780 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400781 const std::string str(binaryTempOut.str());
782 for (unsigned i = 0; i < str.size(); i += 4) {
783 const uint32_t a = static_cast<unsigned char>(str[i]);
784 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
785 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
786 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
787 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400788 }
789 os << "}\n";
790 out << os.str();
791 }
792
David Neto22f144c2017-06-12 14:26:21 -0400793 return false;
794}
795
796void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400797 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
798 sizeof(spv::MagicNumber));
alan-bakere0902602020-03-23 08:43:40 -0400799 const uint32_t spv_version = 0x10000; // SPIR-V 1.0
800 binaryOut->write(reinterpret_cast<const char *>(&spv_version),
801 sizeof(spv_version));
David Neto22f144c2017-06-12 14:26:21 -0400802
alan-baker0c18ab02019-06-12 10:23:21 -0400803 // use Google's vendor ID
804 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400805 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400806
alan-baker00e7a582019-06-07 12:54:21 -0400807 // we record where we need to come back to and patch in the bound value
808 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400809
alan-baker00e7a582019-06-07 12:54:21 -0400810 // output a bad bound for now
811 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400812
alan-baker00e7a582019-06-07 12:54:21 -0400813 // output the schema (reserved for use and must be 0)
814 const uint32_t schema = 0;
815 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400816}
817
818void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400819 // for a binary we just write the value of nextID over bound
820 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
821 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400822}
823
SJW77b87ad2020-04-21 14:37:52 -0500824void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400825 // This function generates LLVM IR for function such as global variable for
826 // argument, constant and pointer type for argument access. These information
827 // is artificial one because we need Vulkan SPIR-V output. This function is
828 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400829
SJW77b87ad2020-04-21 14:37:52 -0500830 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400831
SJW77b87ad2020-04-21 14:37:52 -0500832 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400833
834 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500835 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400836 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
837 if (spv::BuiltInWorkgroupSize == BuiltinType) {
838 HasWorkGroupBuiltin = true;
839 }
840 }
841
SJW77b87ad2020-04-21 14:37:52 -0500842 FindTypesForSamplerMap();
843 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400844}
845
SJW77b87ad2020-04-21 14:37:52 -0500846void SPIRVProducerPass::FindGlobalConstVars() {
847 clspv::NormalizeGlobalVariables(*module);
848 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400849
David Neto862b7d82018-06-14 18:48:37 -0400850 SmallVector<GlobalVariable *, 8> GVList;
851 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500852 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400853 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
854 if (GV.use_empty()) {
855 DeadGVList.push_back(&GV);
856 } else {
857 GVList.push_back(&GV);
858 }
859 }
860 }
861
862 // Remove dead global __constant variables.
863 for (auto GV : DeadGVList) {
864 GV->eraseFromParent();
865 }
866 DeadGVList.clear();
867
868 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
869 // For now, we only support a single storage buffer.
870 if (GVList.size() > 0) {
871 assert(GVList.size() == 1);
872 const auto *GV = GVList[0];
873 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400874 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400875 const size_t kConstantMaxSize = 65536;
876 if (constants_byte_size > kConstantMaxSize) {
877 outs() << "Max __constant capacity of " << kConstantMaxSize
878 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
879 llvm_unreachable("Max __constant capacity exceeded");
880 }
881 }
882 } else {
883 // Change global constant variable's address space to ModuleScopePrivate.
884 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
885 for (auto GV : GVList) {
886 // Create new gv with ModuleScopePrivate address space.
887 Type *NewGVTy = GV->getType()->getPointerElementType();
888 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500889 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400890 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
891 NewGV->takeName(GV);
892
893 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
894 SmallVector<User *, 8> CandidateUsers;
895
896 auto record_called_function_type_as_user =
897 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
898 // Find argument index.
899 unsigned index = 0;
900 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
901 if (gv == call->getOperand(i)) {
902 // TODO(dneto): Should we break here?
903 index = i;
904 }
905 }
906
907 // Record function type with global constant.
908 GlobalConstFuncTyMap[call->getFunctionType()] =
909 std::make_pair(call->getFunctionType(), index);
910 };
911
912 for (User *GVU : GVUsers) {
913 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
914 record_called_function_type_as_user(GV, Call);
915 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
916 // Check GEP users.
917 for (User *GEPU : GEP->users()) {
918 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
919 record_called_function_type_as_user(GEP, GEPCall);
920 }
921 }
922 }
923
924 CandidateUsers.push_back(GVU);
925 }
926
927 for (User *U : CandidateUsers) {
928 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500929 if (!isa<Constant>(U)) {
930 // #254: Can't change operands of a constant, but this shouldn't be
931 // something that sticks around in the module.
932 U->replaceUsesOfWith(GV, NewGV);
933 }
David Neto862b7d82018-06-14 18:48:37 -0400934 }
935
936 // Delete original gv.
937 GV->eraseFromParent();
938 }
939 }
940}
941
SJW77b87ad2020-04-21 14:37:52 -0500942void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -0400943 ResourceVarInfoList.clear();
944 FunctionToResourceVarsMap.clear();
945 ModuleOrderedResourceVars.reset();
946 // Normally, there is one resource variable per clspv.resource.var.*
947 // function, since that is unique'd by arg type and index. By design,
948 // we can share these resource variables across kernels because all
949 // kernels use the same descriptor set.
950 //
951 // But if the user requested distinct descriptor sets per kernel, then
952 // the descriptor allocator has made different (set,binding) pairs for
953 // the same (type,arg_index) pair. Since we can decorate a resource
954 // variable with only exactly one DescriptorSet and Binding, we are
955 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +0000956 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -0400957 // (set,binding) values.
958 const bool always_distinct_sets =
959 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -0500960 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -0400961 // Rely on the fact the resource var functions have a stable ordering
962 // in the module.
SJW61531372020-06-09 07:31:08 -0500963 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -0400964 // Find all calls to this function with distinct set and binding pairs.
965 // Save them in ResourceVarInfoList.
966
967 // Determine uniqueness of the (set,binding) pairs only withing this
968 // one resource-var builtin function.
969 using SetAndBinding = std::pair<unsigned, unsigned>;
970 // Maps set and binding to the resource var info.
971 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
972 bool first_use = true;
973 for (auto &U : F.uses()) {
974 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
975 const auto set = unsigned(
976 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
977 const auto binding = unsigned(
978 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
979 const auto arg_kind = clspv::ArgKind(
980 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
981 const auto arg_index = unsigned(
982 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -0400983 const auto coherent = unsigned(
984 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -0400985
986 // Find or make the resource var info for this combination.
987 ResourceVarInfo *rv = nullptr;
988 if (always_distinct_sets) {
989 // Make a new resource var any time we see a different
990 // (set,binding) pair.
991 SetAndBinding key{set, binding};
992 auto where = set_and_binding_map.find(key);
993 if (where == set_and_binding_map.end()) {
994 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -0400995 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -0400996 ResourceVarInfoList.emplace_back(rv);
997 set_and_binding_map[key] = rv;
998 } else {
999 rv = where->second;
1000 }
1001 } else {
1002 // The default is to make exactly one resource for each
1003 // clspv.resource.var.* function.
1004 if (first_use) {
1005 first_use = false;
1006 rv = new ResourceVarInfo(int(ResourceVarInfoList.size()), set,
alan-bakere9308012019-03-15 10:25:13 -04001007 binding, &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001008 ResourceVarInfoList.emplace_back(rv);
1009 } else {
1010 rv = ResourceVarInfoList.back().get();
1011 }
1012 }
1013
1014 // Now populate FunctionToResourceVarsMap.
1015 auto &mapping =
1016 FunctionToResourceVarsMap[call->getParent()->getParent()];
1017 while (mapping.size() <= arg_index) {
1018 mapping.push_back(nullptr);
1019 }
1020 mapping[arg_index] = rv;
1021 }
1022 }
1023 }
1024 }
1025
1026 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001027 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001028 auto where = FunctionToResourceVarsMap.find(&F);
1029 if (where != FunctionToResourceVarsMap.end()) {
1030 for (auto &rv : where->second) {
1031 if (rv != nullptr) {
1032 ModuleOrderedResourceVars.insert(rv);
1033 }
1034 }
1035 }
1036 }
1037 if (ShowResourceVars) {
1038 for (auto *info : ModuleOrderedResourceVars) {
1039 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1040 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1041 << "\n";
1042 }
1043 }
1044}
1045
David Neto22f144c2017-06-12 14:26:21 -04001046void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1047 // Investigate global variable's type.
1048 FindType(GV.getType());
1049}
1050
1051void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1052 // Investigate function's type.
1053 FunctionType *FTy = F.getFunctionType();
1054
1055 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1056 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001057 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001058 if (GlobalConstFuncTyMap.count(FTy)) {
1059 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1060 SmallVector<Type *, 4> NewFuncParamTys;
1061 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1062 Type *ParamTy = FTy->getParamType(i);
1063 if (i == GVCstArgIdx) {
1064 Type *EleTy = ParamTy->getPointerElementType();
1065 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1066 }
1067
1068 NewFuncParamTys.push_back(ParamTy);
1069 }
1070
1071 FunctionType *NewFTy =
1072 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1073 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1074 FTy = NewFTy;
1075 }
1076
1077 FindType(FTy);
1078 } else {
1079 // As kernel functions do not have parameters, create new function type and
1080 // add it to type map.
1081 SmallVector<Type *, 4> NewFuncParamTys;
1082 FunctionType *NewFTy =
1083 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1084 FindType(NewFTy);
1085 }
1086
1087 // Investigate instructions' type in function body.
1088 for (BasicBlock &BB : F) {
1089 for (Instruction &I : BB) {
1090 if (isa<ShuffleVectorInst>(I)) {
1091 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1092 // Ignore type for mask of shuffle vector instruction.
1093 if (i == 2) {
1094 continue;
1095 }
1096
1097 Value *Op = I.getOperand(i);
1098 if (!isa<MetadataAsValue>(Op)) {
1099 FindType(Op->getType());
1100 }
1101 }
1102
1103 FindType(I.getType());
1104 continue;
1105 }
1106
David Neto862b7d82018-06-14 18:48:37 -04001107 CallInst *Call = dyn_cast<CallInst>(&I);
1108
SJW61531372020-06-09 07:31:08 -05001109 if (Call) {
1110 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
1111 if (func_info.getType() == Builtins::kClspvResource ||
1112 func_info.getType() == Builtins::kClspvLocal) {
1113 // This is a fake call representing access to a resource/workgroup
1114 // variable. We handle that elsewhere.
1115 continue;
1116 }
Alan Baker202c8c72018-08-13 13:47:44 -04001117 }
1118
alan-bakerf083bed2020-01-29 08:15:42 -05001119 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1120 // OpCompositeExtract which takes literal values for indices. As a result
1121 // don't map the type of indices.
1122 if (I.getOpcode() == Instruction::ExtractValue) {
1123 FindType(I.getOperand(0)->getType());
1124 continue;
1125 }
1126 if (I.getOpcode() == Instruction::InsertValue) {
1127 FindType(I.getOperand(0)->getType());
1128 FindType(I.getOperand(1)->getType());
1129 continue;
1130 }
1131
1132 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1133 // the index is a constant. In such a case don't map the index type.
1134 if (I.getOpcode() == Instruction::ExtractElement) {
1135 FindType(I.getOperand(0)->getType());
1136 Value *op1 = I.getOperand(1);
1137 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1138 FindType(op1->getType());
1139 }
1140 continue;
1141 }
1142 if (I.getOpcode() == Instruction::InsertElement) {
1143 FindType(I.getOperand(0)->getType());
1144 FindType(I.getOperand(1)->getType());
1145 Value *op2 = I.getOperand(2);
1146 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1147 FindType(op2->getType());
1148 }
1149 continue;
1150 }
1151
David Neto22f144c2017-06-12 14:26:21 -04001152 // Work through the operands of the instruction.
1153 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1154 Value *const Op = I.getOperand(i);
1155 // If any of the operands is a constant, find the type!
1156 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1157 FindType(Op->getType());
1158 }
1159 }
1160
1161 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001162 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001163 // Avoid to check call instruction's type.
1164 break;
1165 }
Alan Baker202c8c72018-08-13 13:47:44 -04001166 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
SJW61531372020-06-09 07:31:08 -05001167 if (Builtins::Lookup(OpCall->getCalledFunction()) ==
1168 Builtins::kClspvLocal) {
Alan Baker202c8c72018-08-13 13:47:44 -04001169 // This is a fake call representing access to a workgroup variable.
1170 // We handle that elsewhere.
1171 continue;
1172 }
1173 }
David Neto22f144c2017-06-12 14:26:21 -04001174 if (!isa<MetadataAsValue>(&Op)) {
1175 FindType(Op->getType());
1176 continue;
1177 }
1178 }
1179
David Neto22f144c2017-06-12 14:26:21 -04001180 // We don't want to track the type of this call as we are going to replace
1181 // it.
SJW61531372020-06-09 07:31:08 -05001182 if (Call && Builtins::Lookup(Call->getCalledFunction()) ==
1183 Builtins::kClspvSamplerVarLiteral) {
David Neto22f144c2017-06-12 14:26:21 -04001184 continue;
1185 }
1186
1187 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1188 // If gep's base operand has ModuleScopePrivate address space, make gep
1189 // return ModuleScopePrivate address space.
1190 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1191 // Add pointer type with private address space for global constant to
1192 // type list.
1193 Type *EleTy = I.getType()->getPointerElementType();
1194 Type *NewPTy =
1195 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1196
1197 FindType(NewPTy);
1198 continue;
1199 }
1200 }
1201
1202 FindType(I.getType());
1203 }
1204 }
1205}
1206
SJW77b87ad2020-04-21 14:37:52 -05001207void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001208 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001209 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
David Neto862b7d82018-06-14 18:48:37 -04001210 0 < getSamplerMap().size()) {
SJW77b87ad2020-04-21 14:37:52 -05001211 auto SamplerStructTy = module->getTypeByName("opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001212 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001213 SamplerStructTy =
1214 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001215 }
1216
1217 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1218
1219 FindType(SamplerTy);
1220 }
1221}
1222
SJW77b87ad2020-04-21 14:37:52 -05001223void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001224 // Record types so they are generated.
1225 TypesNeedingLayout.reset();
1226 StructTypesNeedingBlock.reset();
1227
1228 // To match older clspv codegen, generate the float type first if required
1229 // for images.
1230 for (const auto *info : ModuleOrderedResourceVars) {
1231 if (info->arg_kind == clspv::ArgKind::ReadOnlyImage ||
1232 info->arg_kind == clspv::ArgKind::WriteOnlyImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001233 if (IsIntImageType(info->var_fn->getReturnType())) {
1234 // Nothing for now...
1235 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001236 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001237 }
1238
1239 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001240 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001241 }
1242 }
1243
1244 for (const auto *info : ModuleOrderedResourceVars) {
1245 Type *type = info->var_fn->getReturnType();
1246
1247 switch (info->arg_kind) {
1248 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001249 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001250 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1251 StructTypesNeedingBlock.insert(sty);
1252 } else {
1253 errs() << *type << "\n";
1254 llvm_unreachable("Buffer arguments must map to structures!");
1255 }
1256 break;
1257 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001258 case clspv::ArgKind::PodUBO:
1259 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001260 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1261 StructTypesNeedingBlock.insert(sty);
1262 } else {
1263 errs() << *type << "\n";
1264 llvm_unreachable("POD arguments must map to structures!");
1265 }
1266 break;
1267 case clspv::ArgKind::ReadOnlyImage:
1268 case clspv::ArgKind::WriteOnlyImage:
1269 case clspv::ArgKind::Sampler:
1270 // Sampler and image types map to the pointee type but
1271 // in the uniform constant address space.
1272 type = PointerType::get(type->getPointerElementType(),
1273 clspv::AddressSpace::UniformConstant);
1274 break;
1275 default:
1276 break;
1277 }
1278
1279 // The converted type is the type of the OpVariable we will generate.
1280 // If the pointee type is an array of size zero, FindType will convert it
1281 // to a runtime array.
1282 FindType(type);
1283 }
1284
alan-bakerdcd97412019-09-16 15:32:30 -04001285 // If module constants are clustered in a storage buffer then that struct
1286 // needs layout decorations.
1287 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001288 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001289 PointerType *PTy = cast<PointerType>(GV.getType());
1290 const auto AS = PTy->getAddressSpace();
1291 const bool module_scope_constant_external_init =
1292 (AS == AddressSpace::Constant) && GV.hasInitializer();
1293 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1294 if (module_scope_constant_external_init &&
1295 spv::BuiltInMax == BuiltinType) {
1296 StructTypesNeedingBlock.insert(
1297 cast<StructType>(PTy->getPointerElementType()));
1298 }
1299 }
1300 }
1301
SJW77b87ad2020-04-21 14:37:52 -05001302 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001303 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1304 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1305 assert(Ty->isStructTy() && "Push constants have to be structures.");
1306 auto STy = cast<StructType>(Ty);
1307 StructTypesNeedingBlock.insert(STy);
1308 }
1309 }
1310
David Neto862b7d82018-06-14 18:48:37 -04001311 // Traverse the arrays and structures underneath each Block, and
1312 // mark them as needing layout.
1313 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1314 StructTypesNeedingBlock.end());
1315 while (!work_list.empty()) {
1316 Type *type = work_list.back();
1317 work_list.pop_back();
1318 TypesNeedingLayout.insert(type);
1319 switch (type->getTypeID()) {
1320 case Type::ArrayTyID:
1321 work_list.push_back(type->getArrayElementType());
1322 if (!Hack_generate_runtime_array_stride_early) {
1323 // Remember this array type for deferred decoration.
1324 TypesNeedingArrayStride.insert(type);
1325 }
1326 break;
1327 case Type::StructTyID:
1328 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1329 work_list.push_back(elem_ty);
1330 }
1331 default:
1332 // This type and its contained types don't get layout.
1333 break;
1334 }
1335 }
1336}
1337
SJWf93f5f32020-05-05 07:27:56 -05001338void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001339 // The SpecId assignment for pointer-to-local arguments is recorded in
1340 // module-level metadata. Translate that information into local argument
1341 // information.
SJWf93f5f32020-05-05 07:27:56 -05001342 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001343 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001344 if (!nmd)
1345 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001346 for (auto operand : nmd->operands()) {
1347 MDTuple *tuple = cast<MDTuple>(operand);
1348 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1349 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001350 ConstantAsMetadata *arg_index_md =
1351 cast<ConstantAsMetadata>(tuple->getOperand(1));
1352 int arg_index = static_cast<int>(
1353 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1354 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001355
1356 ConstantAsMetadata *spec_id_md =
1357 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001358 int spec_id = static_cast<int>(
1359 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001360
Alan Baker202c8c72018-08-13 13:47:44 -04001361 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001362 if (LocalSpecIdInfoMap.count(spec_id))
1363 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001364
SJWf93f5f32020-05-05 07:27:56 -05001365 // Generate the spec constant.
1366 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001367 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001368 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001369
SJWf93f5f32020-05-05 07:27:56 -05001370 // Generate the array type.
1371 Type *ElemTy = arg->getType()->getPointerElementType();
1372 Ops.clear();
1373 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001374 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001375
1376 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1377
1378 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001379 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001380 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1381
1382 // Generate OpVariable.
1383 //
1384 // Ops[0] : Result Type ID
1385 // Ops[1] : Storage Class
1386 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001387 Ops << PtrArrayTypeID << spv::StorageClassWorkgroup;
SJWf93f5f32020-05-05 07:27:56 -05001388
1389 SPIRVID VariableID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1390
1391 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001392 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001393 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1394
1395 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1396 ArrayTypeID, PtrArrayTypeID, spec_id};
1397 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001398 }
1399}
1400
David Neto22f144c2017-06-12 14:26:21 -04001401void SPIRVProducerPass::FindType(Type *Ty) {
1402 TypeList &TyList = getTypeList();
1403
1404 if (0 != TyList.idFor(Ty)) {
1405 return;
1406 }
1407
1408 if (Ty->isPointerTy()) {
1409 auto AddrSpace = Ty->getPointerAddressSpace();
1410 if ((AddressSpace::Constant == AddrSpace) ||
1411 (AddressSpace::Global == AddrSpace)) {
1412 auto PointeeTy = Ty->getPointerElementType();
1413
1414 if (PointeeTy->isStructTy() &&
1415 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1416 FindType(PointeeTy);
1417 auto ActualPointerTy =
1418 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1419 FindType(ActualPointerTy);
1420 return;
1421 }
1422 }
1423 }
1424
David Neto862b7d82018-06-14 18:48:37 -04001425 // By convention, LLVM array type with 0 elements will map to
1426 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1427 // has a constant number of elements. We need to support type of the
1428 // constant.
1429 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1430 if (arrayTy->getNumElements() > 0) {
1431 LLVMContext &Context = Ty->getContext();
1432 FindType(Type::getInt32Ty(Context));
1433 }
David Neto22f144c2017-06-12 14:26:21 -04001434 }
1435
1436 for (Type *SubTy : Ty->subtypes()) {
1437 FindType(SubTy);
1438 }
1439
1440 TyList.insert(Ty);
1441}
1442
David Neto22f144c2017-06-12 14:26:21 -04001443spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1444 switch (AddrSpace) {
1445 default:
1446 llvm_unreachable("Unsupported OpenCL address space");
1447 case AddressSpace::Private:
1448 return spv::StorageClassFunction;
1449 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001450 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001451 case AddressSpace::Constant:
1452 return clspv::Option::ConstantArgsInUniformBuffer()
1453 ? spv::StorageClassUniform
1454 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001455 case AddressSpace::Input:
1456 return spv::StorageClassInput;
1457 case AddressSpace::Local:
1458 return spv::StorageClassWorkgroup;
1459 case AddressSpace::UniformConstant:
1460 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001461 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001462 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001463 case AddressSpace::ModuleScopePrivate:
1464 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001465 case AddressSpace::PushConstant:
1466 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001467 }
1468}
1469
David Neto862b7d82018-06-14 18:48:37 -04001470spv::StorageClass
1471SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1472 switch (arg_kind) {
1473 case clspv::ArgKind::Buffer:
1474 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001475 case clspv::ArgKind::BufferUBO:
1476 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001477 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001478 return spv::StorageClassStorageBuffer;
1479 case clspv::ArgKind::PodUBO:
1480 return spv::StorageClassUniform;
1481 case clspv::ArgKind::PodPushConstant:
1482 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001483 case clspv::ArgKind::Local:
1484 return spv::StorageClassWorkgroup;
1485 case clspv::ArgKind::ReadOnlyImage:
1486 case clspv::ArgKind::WriteOnlyImage:
1487 case clspv::ArgKind::Sampler:
1488 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001489 default:
1490 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001491 }
1492}
1493
David Neto22f144c2017-06-12 14:26:21 -04001494spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1495 return StringSwitch<spv::BuiltIn>(Name)
1496 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1497 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1498 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1499 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1500 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001501 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001502 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001503 .Default(spv::BuiltInMax);
1504}
1505
SJW01901d92020-05-21 08:58:31 -05001506SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1507 if (OpExtInstImportID == 0) {
1508 //
1509 // Generate OpExtInstImport.
1510 //
1511 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001512
SJW01901d92020-05-21 08:58:31 -05001513 OpExtInstImportID =
1514 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1515 }
1516 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001517}
1518
SJW01901d92020-05-21 08:58:31 -05001519SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
SJWf93f5f32020-05-05 07:27:56 -05001520 auto TI = TypeMap.find(Ty);
1521 if (TI != TypeMap.end()) {
SJW01901d92020-05-21 08:58:31 -05001522 assert(TI->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05001523 return TI->second;
1524 }
1525
1526 const auto &DL = module->getDataLayout();
1527
SJW01901d92020-05-21 08:58:31 -05001528 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001529
1530 switch (Ty->getTypeID()) {
1531 default: {
1532 Ty->print(errs());
1533 llvm_unreachable("Unsupported type???");
1534 break;
1535 }
1536 case Type::MetadataTyID:
1537 case Type::LabelTyID: {
1538 // Ignore these types.
1539 break;
1540 }
1541 case Type::PointerTyID: {
1542 PointerType *PTy = cast<PointerType>(Ty);
1543 unsigned AddrSpace = PTy->getAddressSpace();
1544
1545 if (AddrSpace != AddressSpace::UniformConstant) {
1546 auto PointeeTy = PTy->getElementType();
1547 if (PointeeTy->isStructTy() &&
1548 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1549 // TODO(sjw): assert always an image?
1550 RID = getSPIRVType(PointeeTy);
1551 break;
1552 }
1553 }
1554
1555 // For the purposes of our Vulkan SPIR-V type system, constant and global
1556 // are conflated.
1557 if (AddressSpace::Constant == AddrSpace) {
1558 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1559 AddrSpace = AddressSpace::Global;
1560 // Check to see if we already created this type (for instance, if we
1561 // had a constant <type>* and a global <type>*, the type would be
1562 // created by one of these types, and shared by both).
1563 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1564 if (0 < TypeMap.count(GlobalTy)) {
1565 RID = TypeMap[GlobalTy];
1566 break;
1567 }
1568 }
1569 } else if (AddressSpace::Global == AddrSpace) {
1570 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1571 AddrSpace = AddressSpace::Constant;
1572
1573 // Check to see if we already created this type (for instance, if we
1574 // had a constant <type>* and a global <type>*, the type would be
1575 // created by one of these types, and shared by both).
1576 auto ConstantTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1577 if (0 < TypeMap.count(ConstantTy)) {
1578 RID = TypeMap[ConstantTy];
1579 break;
1580 }
1581 }
1582 }
1583
1584 //
1585 // Generate OpTypePointer.
1586 //
1587
1588 // OpTypePointer
1589 // Ops[0] = Storage Class
1590 // Ops[1] = Element Type ID
1591 SPIRVOperandVec Ops;
1592
SJW01901d92020-05-21 08:58:31 -05001593 Ops << GetStorageClass(AddrSpace) << PTy->getElementType();
SJWf93f5f32020-05-05 07:27:56 -05001594
1595 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1596 break;
1597 }
1598 case Type::StructTyID: {
1599 StructType *STy = cast<StructType>(Ty);
1600
1601 // Handle sampler type.
1602 if (STy->isOpaque()) {
1603 if (STy->getName().equals("opencl.sampler_t")) {
1604 //
1605 // Generate OpTypeSampler
1606 //
1607 // Empty Ops.
1608
1609 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1610 break;
1611 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
1612 STy->getName().startswith("opencl.image1d_wo_t") ||
1613 STy->getName().startswith("opencl.image1d_array_ro_t") ||
1614 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1615 STy->getName().startswith("opencl.image2d_ro_t") ||
1616 STy->getName().startswith("opencl.image2d_wo_t") ||
1617 STy->getName().startswith("opencl.image2d_array_ro_t") ||
1618 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1619 STy->getName().startswith("opencl.image3d_ro_t") ||
1620 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001621
1622 if (STy->getName().contains("_wo_t")) {
1623 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
1624 }
1625 if (STy->getName().startswith("opencl.image1d_")) {
1626 if (STy->getName().contains(".sampled"))
1627 addCapability(spv::CapabilitySampled1D);
1628 else
1629 addCapability(spv::CapabilityImage1D);
1630 }
1631
SJWf93f5f32020-05-05 07:27:56 -05001632 //
1633 // Generate OpTypeImage
1634 //
1635 // Ops[0] = Sampled Type ID
1636 // Ops[1] = Dim ID
1637 // Ops[2] = Depth (Literal Number)
1638 // Ops[3] = Arrayed (Literal Number)
1639 // Ops[4] = MS (Literal Number)
1640 // Ops[5] = Sampled (Literal Number)
1641 // Ops[6] = Image Format ID
1642 //
1643 SPIRVOperandVec Ops;
1644
SJW01901d92020-05-21 08:58:31 -05001645 SPIRVID SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001646 if (STy->getName().contains(".float")) {
1647 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
1648 } else if (STy->getName().contains(".uint")) {
1649 SampledTyID = getSPIRVType(Type::getInt32Ty(Ty->getContext()));
1650 } else if (STy->getName().contains(".int")) {
1651 // Generate a signed 32-bit integer if necessary.
1652 if (int32ID == 0) {
1653 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001654 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001655 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1656 }
1657 SampledTyID = int32ID;
1658
1659 // Generate a vec4 of the signed int if necessary.
1660 if (v4int32ID == 0) {
1661 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001662 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001663 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1664 }
1665 } else {
1666 // This was likely an UndefValue.
1667 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
1668 }
SJW01901d92020-05-21 08:58:31 -05001669 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001670
1671 spv::Dim DimID = spv::Dim2D;
1672 if (STy->getName().startswith("opencl.image1d_ro_t") ||
1673 STy->getName().startswith("opencl.image1d_wo_t") ||
1674 STy->getName().startswith("opencl.image1d_array_ro_t") ||
1675 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1676 DimID = spv::Dim1D;
1677 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
1678 STy->getName().startswith("opencl.image3d_wo_t")) {
1679 DimID = spv::Dim3D;
1680 }
SJW01901d92020-05-21 08:58:31 -05001681 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001682
1683 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001684 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001685
1686 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001687 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001688
1689 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001690 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001691
1692 // Set up Sampled.
1693 //
1694 // From Spec
1695 //
1696 // 0 indicates this is only known at run time, not at compile time
1697 // 1 indicates will be used with sampler
1698 // 2 indicates will be used without a sampler (a storage image)
1699 uint32_t Sampled = 1;
1700 if (!STy->getName().contains(".sampled")) {
1701 Sampled = 2;
1702 }
SJW01901d92020-05-21 08:58:31 -05001703 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001704
1705 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001706 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001707
1708 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1709
1710 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001711 Ops << RID;
SJWf93f5f32020-05-05 07:27:56 -05001712
1713 getImageTypeMap()[Ty] =
1714 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1715 break;
1716 }
1717 }
1718
1719 //
1720 // Generate OpTypeStruct
1721 //
1722 // Ops[0] ... Ops[n] = Member IDs
1723 SPIRVOperandVec Ops;
1724
1725 for (auto *EleTy : STy->elements()) {
SJW01901d92020-05-21 08:58:31 -05001726 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001727 }
1728
1729 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1730
1731 // Generate OpMemberDecorate.
1732 if (TypesNeedingLayout.idFor(STy)) {
1733 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1734 MemberIdx++) {
1735 // Ops[0] = Structure Type ID
1736 // Ops[1] = Member Index(Literal Number)
1737 // Ops[2] = Decoration (Offset)
1738 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001739 const auto ByteOffset =
1740 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1741
SJW01901d92020-05-21 08:58:31 -05001742 Ops.clear();
1743 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001744
1745 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1746 }
1747 }
1748
1749 // Generate OpDecorate.
1750 if (StructTypesNeedingBlock.idFor(STy)) {
1751 Ops.clear();
1752 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001753 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001754
1755 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1756 }
1757 break;
1758 }
1759 case Type::IntegerTyID: {
alan-bakere2a62752020-07-09 22:53:23 -04001760 uint32_t bit_width = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001761
alan-bakere2a62752020-07-09 22:53:23 -04001762 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001763 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001764 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001765 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001766 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001767 addCapability(spv::CapabilityInt64);
1768 }
1769
alan-bakere2a62752020-07-09 22:53:23 -04001770 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001771 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1772 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001773 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001774 // i8 is added to TypeMap as i32.
1775 RID = getSPIRVType(Type::getIntNTy(Ty->getContext(), 32));
1776 } else {
1777 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001778 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001779 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1780 }
1781 }
1782 break;
1783 }
1784 case Type::HalfTyID:
1785 case Type::FloatTyID:
1786 case Type::DoubleTyID: {
alan-bakere2a62752020-07-09 22:53:23 -04001787 uint32_t bit_width = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
1788 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001789 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001790 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001791 addCapability(spv::CapabilityFloat64);
1792 }
1793
SJWf93f5f32020-05-05 07:27:56 -05001794 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001795 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001796
1797 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1798 break;
1799 }
1800 case Type::ArrayTyID: {
1801 ArrayType *ArrTy = cast<ArrayType>(Ty);
1802 const uint64_t Length = ArrTy->getArrayNumElements();
1803 if (Length == 0) {
1804 // By convention, map it to a RuntimeArray.
1805
1806 Type *EleTy = ArrTy->getArrayElementType();
1807
1808 //
1809 // Generate OpTypeRuntimeArray.
1810 //
1811 // OpTypeRuntimeArray
1812 // Ops[0] = Element Type ID
1813 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001814 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001815
1816 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1817
1818 if (Hack_generate_runtime_array_stride_early) {
1819 // Generate OpDecorate.
1820
1821 // Ops[0] = Target ID
1822 // Ops[1] = Decoration (ArrayStride)
1823 // Ops[2] = Stride Number(Literal Number)
1824 Ops.clear();
1825
SJW01901d92020-05-21 08:58:31 -05001826 Ops << RID << spv::DecorationArrayStride
1827 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001828
1829 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1830 }
1831
1832 } else {
1833
1834 //
1835 // Generate OpConstant and OpTypeArray.
1836 //
1837
1838 //
1839 // Generate OpConstant for array length.
1840 //
1841 // Add constant for length to constant list.
1842 Constant *CstLength =
1843 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001844
1845 // Remember to generate ArrayStride later
1846 getTypesNeedingArrayStride().insert(Ty);
1847
1848 //
1849 // Generate OpTypeArray.
1850 //
1851 // Ops[0] = Element Type ID
1852 // Ops[1] = Array Length Constant ID
1853 SPIRVOperandVec Ops;
1854
SJW01901d92020-05-21 08:58:31 -05001855 Ops << ArrTy->getElementType() << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001856
1857 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1858 }
1859 break;
1860 }
1861 case Type::FixedVectorTyID: {
1862 auto VecTy = cast<VectorType>(Ty);
1863 // <4 x i8> is changed to i32 if i8 is not generally supported.
1864 if (!clspv::Option::Int8Support() &&
1865 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
1866 if (VecTy->getNumElements() == 4) {
1867 RID = getSPIRVType(VecTy->getElementType());
1868 break;
1869 } else {
1870 Ty->print(errs());
1871 llvm_unreachable("Support above i8 vector type");
1872 }
1873 }
1874
1875 // Ops[0] = Component Type ID
1876 // Ops[1] = Component Count (Literal Number)
1877 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001878 Ops << VecTy->getElementType() << VecTy->getNumElements();
SJWf93f5f32020-05-05 07:27:56 -05001879
1880 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1881 break;
1882 }
1883 case Type::VoidTyID: {
1884 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
1885 break;
1886 }
1887 case Type::FunctionTyID: {
1888 // Generate SPIRV instruction for function type.
1889 FunctionType *FTy = cast<FunctionType>(Ty);
1890
1891 // Ops[0] = Return Type ID
1892 // Ops[1] ... Ops[n] = Parameter Type IDs
1893 SPIRVOperandVec Ops;
1894
1895 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05001896 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05001897
1898 // Find SPIRV instructions for parameter types
1899 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
1900 // Find SPIRV instruction for parameter type.
1901 auto ParamTy = FTy->getParamType(k);
1902 if (ParamTy->isPointerTy()) {
1903 auto PointeeTy = ParamTy->getPointerElementType();
1904 if (PointeeTy->isStructTy() &&
1905 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1906 ParamTy = PointeeTy;
1907 }
1908 }
1909
SJW01901d92020-05-21 08:58:31 -05001910 Ops << ParamTy;
SJWf93f5f32020-05-05 07:27:56 -05001911 }
1912
1913 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
1914 break;
1915 }
1916 }
1917
SJW01901d92020-05-21 08:58:31 -05001918 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05001919 TypeMap[Ty] = RID;
1920 }
1921 return RID;
David Neto22f144c2017-06-12 14:26:21 -04001922}
1923
SJW77b87ad2020-04-21 14:37:52 -05001924void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04001925 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05001926 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04001927 }
David Neto22f144c2017-06-12 14:26:21 -04001928}
1929
SJWf93f5f32020-05-05 07:27:56 -05001930SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *Cst) {
David Neto22f144c2017-06-12 14:26:21 -04001931 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07001932 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04001933
SJW01901d92020-05-21 08:58:31 -05001934 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04001935
SJWf93f5f32020-05-05 07:27:56 -05001936 //
1937 // Generate OpConstant.
1938 //
1939 // Ops[0] = Result Type ID
1940 // Ops[1] .. Ops[n] = Values LiteralNumber
1941 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04001942
SJW01901d92020-05-21 08:58:31 -05001943 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001944
SJWf93f5f32020-05-05 07:27:56 -05001945 std::vector<uint32_t> LiteralNum;
1946 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04001947
SJWf93f5f32020-05-05 07:27:56 -05001948 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04001949 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05001950 Opcode = spv::OpUndef;
1951 if (hack_undef && IsTypeNullable(Cst->getType())) {
1952 Opcode = spv::OpConstantNull;
1953 }
1954 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04001955 unsigned bit_width = CI->getBitWidth();
1956 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001957 // If the bitwidth of constant is 1, generate OpConstantTrue or
1958 // OpConstantFalse.
1959 if (CI->getZExtValue()) {
1960 // Ops[0] = Result Type ID
1961 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04001962 } else {
SJWf93f5f32020-05-05 07:27:56 -05001963 // Ops[0] = Result Type ID
1964 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04001965 }
SJWf93f5f32020-05-05 07:27:56 -05001966 } else {
1967 auto V = CI->getZExtValue();
1968 LiteralNum.push_back(V & 0xFFFFFFFF);
1969
alan-bakere2a62752020-07-09 22:53:23 -04001970 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05001971 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04001972 }
1973
1974 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04001975
SJW01901d92020-05-21 08:58:31 -05001976 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001977 }
1978 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
1979 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
1980 Type *CFPTy = CFP->getType();
1981 if (CFPTy->isFloatTy()) {
1982 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1983 } else if (CFPTy->isDoubleTy()) {
1984 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1985 LiteralNum.push_back(FPVal >> 32);
1986 } else if (CFPTy->isHalfTy()) {
1987 LiteralNum.push_back(FPVal & 0xFFFF);
1988 } else {
1989 CFPTy->print(errs());
1990 llvm_unreachable("Implement this ConstantFP Type");
1991 }
David Neto22f144c2017-06-12 14:26:21 -04001992
SJWf93f5f32020-05-05 07:27:56 -05001993 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04001994
SJW01901d92020-05-21 08:58:31 -05001995 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001996 } else if (isa<ConstantDataSequential>(Cst) &&
1997 cast<ConstantDataSequential>(Cst)->isString()) {
1998 Cst->print(errs());
1999 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002000
SJWf93f5f32020-05-05 07:27:56 -05002001 } else if (const ConstantDataSequential *CDS =
2002 dyn_cast<ConstantDataSequential>(Cst)) {
2003 // Let's convert <4 x i8> constant to int constant specially.
2004 // This case occurs when all the values are specified as constant
2005 // ints.
2006 Type *CstTy = Cst->getType();
2007 if (is4xi8vec(CstTy)) {
2008 LLVMContext &Context = CstTy->getContext();
David Neto49351ac2017-08-26 17:32:20 -04002009
SJWf93f5f32020-05-05 07:27:56 -05002010 //
2011 // Generate OpConstant with OpTypeInt 32 0.
2012 //
2013 uint32_t IntValue = 0;
2014 for (unsigned k = 0; k < 4; k++) {
2015 const uint64_t Val = CDS->getElementAsInteger(k);
2016 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002017 }
2018
SJWf93f5f32020-05-05 07:27:56 -05002019 Type *i32 = Type::getInt32Ty(Context);
2020 Constant *CstInt = ConstantInt::get(i32, IntValue);
2021 RID = getSPIRVValue(CstInt);
2022 } else {
2023
David Neto49351ac2017-08-26 17:32:20 -04002024 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002025 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002026 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002027 }
2028
2029 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002030 }
2031 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2032 // Let's convert <4 x i8> constant to int constant specially.
2033 // This case occurs when at least one of the values is an undef.
2034 Type *CstTy = Cst->getType();
2035 if (is4xi8vec(CstTy)) {
2036 LLVMContext &Context = CstTy->getContext();
David Neto22f144c2017-06-12 14:26:21 -04002037
SJWf93f5f32020-05-05 07:27:56 -05002038 //
2039 // Generate OpConstant with OpTypeInt 32 0.
2040 //
2041 uint32_t IntValue = 0;
2042 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2043 I != E; ++I) {
2044 uint64_t Val = 0;
2045 const Value *CV = *I;
2046 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2047 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002048 }
SJWf93f5f32020-05-05 07:27:56 -05002049 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002050 }
2051
SJWf93f5f32020-05-05 07:27:56 -05002052 Type *i32 = Type::getInt32Ty(Context);
2053 Constant *CstInt = ConstantInt::get(i32, IntValue);
2054 RID = getSPIRVValue(CstInt);
2055 } else {
2056
David Neto22f144c2017-06-12 14:26:21 -04002057 // We use a constant composite in SPIR-V for our constant aggregate in
2058 // LLVM.
2059 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002060
2061 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002062 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002063 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002064 }
David Neto22f144c2017-06-12 14:26:21 -04002065 }
SJWf93f5f32020-05-05 07:27:56 -05002066 } else if (Cst->isNullValue()) {
2067 Opcode = spv::OpConstantNull;
2068 } else {
2069 Cst->print(errs());
2070 llvm_unreachable("Unsupported Constant???");
2071 }
David Neto22f144c2017-06-12 14:26:21 -04002072
SJWf93f5f32020-05-05 07:27:56 -05002073 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2074 // Null pointer requires variable pointers.
2075 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2076 }
alan-baker5b86ed72019-02-15 08:26:50 -05002077
SJWf93f5f32020-05-05 07:27:56 -05002078 if (RID == 0) {
2079 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2080 }
2081
2082 VMap[Cst] = RID;
2083
2084 return RID;
2085}
2086
2087SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2088 auto II = ValueMap.find(V);
2089 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002090 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002091 return II->second;
2092 }
2093 if (Constant *Cst = dyn_cast<Constant>(V)) {
2094 return getSPIRVConstant(Cst);
2095 } else {
2096 llvm_unreachable("Variable not found");
2097 }
2098}
2099
SJW77b87ad2020-04-21 14:37:52 -05002100void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002101 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002102 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002103 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2104 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002105
David Neto862b7d82018-06-14 18:48:37 -04002106 // We might have samplers in the sampler map that are not used
2107 // in the translation unit. We need to allocate variables
2108 // for them and bindings too.
2109 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002110
SJW77b87ad2020-04-21 14:37:52 -05002111 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002112 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002113 if (!var_fn)
2114 return;
alan-baker09cb9802019-12-10 13:16:27 -05002115
David Neto862b7d82018-06-14 18:48:37 -04002116 for (auto user : var_fn->users()) {
2117 // Populate SamplerLiteralToDescriptorSetMap and
2118 // SamplerLiteralToBindingMap.
2119 //
2120 // Look for calls like
2121 // call %opencl.sampler_t addrspace(2)*
2122 // @clspv.sampler.var.literal(
2123 // i32 descriptor,
2124 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002125 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002126 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002127 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002128 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002129 auto sampler_value = third_param;
2130 if (clspv::Option::UseSamplerMap()) {
2131 if (third_param >= sampler_map.size()) {
2132 errs() << "Out of bounds index to sampler map: " << third_param;
2133 llvm_unreachable("bad sampler init: out of bounds");
2134 }
2135 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002136 }
2137
David Neto862b7d82018-06-14 18:48:37 -04002138 const auto descriptor_set = static_cast<unsigned>(
2139 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2140 const auto binding = static_cast<unsigned>(
2141 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2142
2143 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2144 SamplerLiteralToBindingMap[sampler_value] = binding;
2145 used_bindings.insert(binding);
2146 }
2147 }
2148
alan-baker09cb9802019-12-10 13:16:27 -05002149 DenseSet<size_t> seen;
2150 for (auto user : var_fn->users()) {
2151 if (!isa<CallInst>(user))
2152 continue;
2153
2154 auto call = cast<CallInst>(user);
2155 const unsigned third_param = static_cast<unsigned>(
2156 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2157
2158 // Already allocated a variable for this value.
2159 if (!seen.insert(third_param).second)
2160 continue;
2161
2162 auto sampler_value = third_param;
2163 if (clspv::Option::UseSamplerMap()) {
2164 sampler_value = sampler_map[third_param].first;
2165 }
2166
David Neto22f144c2017-06-12 14:26:21 -04002167 // Generate OpVariable.
2168 //
2169 // GIDOps[0] : Result Type ID
2170 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05002171 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002172
SJW01901d92020-05-21 08:58:31 -05002173 Ops << SamplerTy << spv::StorageClassUniformConstant;
David Neto22f144c2017-06-12 14:26:21 -04002174
SJWf93f5f32020-05-05 07:27:56 -05002175 auto sampler_var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002176
alan-baker09cb9802019-12-10 13:16:27 -05002177 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002178
David Neto862b7d82018-06-14 18:48:37 -04002179 unsigned descriptor_set;
2180 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002181 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002182 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002183 // This sampler is not actually used. Find the next one.
2184 for (binding = 0; used_bindings.count(binding); binding++)
2185 ;
2186 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2187 used_bindings.insert(binding);
2188 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002189 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2190 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002191
alan-baker09cb9802019-12-10 13:16:27 -05002192 version0::DescriptorMapEntry::SamplerData sampler_data = {sampler_value};
alan-bakercff80152019-06-15 00:38:00 -04002193 descriptorMapEntries->emplace_back(std::move(sampler_data),
2194 descriptor_set, binding);
David Neto862b7d82018-06-14 18:48:37 -04002195 }
2196
SJW69939d52020-04-16 07:29:07 -05002197 // Ops[0] = Target ID
2198 // Ops[1] = Decoration (DescriptorSet)
2199 // Ops[2] = LiteralNumber according to Decoration
2200 Ops.clear();
2201
SJW01901d92020-05-21 08:58:31 -05002202 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002203
SJWf93f5f32020-05-05 07:27:56 -05002204 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002205
2206 // Ops[0] = Target ID
2207 // Ops[1] = Decoration (Binding)
2208 // Ops[2] = LiteralNumber according to Decoration
2209 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002210 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002211
SJWf93f5f32020-05-05 07:27:56 -05002212 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002213 }
David Neto862b7d82018-06-14 18:48:37 -04002214}
David Neto22f144c2017-06-12 14:26:21 -04002215
SJW77b87ad2020-04-21 14:37:52 -05002216void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002217 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002218
David Neto862b7d82018-06-14 18:48:37 -04002219 // Generate variables. Make one for each of resource var info object.
2220 for (auto *info : ModuleOrderedResourceVars) {
2221 Type *type = info->var_fn->getReturnType();
2222 // Remap the address space for opaque types.
2223 switch (info->arg_kind) {
2224 case clspv::ArgKind::Sampler:
2225 case clspv::ArgKind::ReadOnlyImage:
2226 case clspv::ArgKind::WriteOnlyImage:
2227 type = PointerType::get(type->getPointerElementType(),
2228 clspv::AddressSpace::UniformConstant);
2229 break;
2230 default:
2231 break;
2232 }
David Neto22f144c2017-06-12 14:26:21 -04002233
David Neto862b7d82018-06-14 18:48:37 -04002234 const auto sc = GetStorageClassForArgKind(info->arg_kind);
SJWf93f5f32020-05-05 07:27:56 -05002235 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002236 Ops << type << sc;
David Neto22f144c2017-06-12 14:26:21 -04002237
SJWf93f5f32020-05-05 07:27:56 -05002238 info->var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002239
2240 // Map calls to the variable-builtin-function.
2241 for (auto &U : info->var_fn->uses()) {
2242 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2243 const auto set = unsigned(
2244 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2245 const auto binding = unsigned(
2246 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2247 if (set == info->descriptor_set && binding == info->binding) {
2248 switch (info->arg_kind) {
2249 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002250 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002251 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002252 case clspv::ArgKind::PodUBO:
2253 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002254 // The call maps to the variable directly.
2255 VMap[call] = info->var_id;
2256 break;
2257 case clspv::ArgKind::Sampler:
2258 case clspv::ArgKind::ReadOnlyImage:
2259 case clspv::ArgKind::WriteOnlyImage:
2260 // The call maps to a load we generate later.
2261 ResourceVarDeferredLoadCalls[call] = info->var_id;
2262 break;
2263 default:
2264 llvm_unreachable("Unhandled arg kind");
2265 }
2266 }
David Neto22f144c2017-06-12 14:26:21 -04002267 }
David Neto862b7d82018-06-14 18:48:37 -04002268 }
2269 }
David Neto22f144c2017-06-12 14:26:21 -04002270
David Neto862b7d82018-06-14 18:48:37 -04002271 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002272 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002273 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002274 // Push constants don't need descriptor set or binding decorations.
2275 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2276 continue;
2277
David Neto862b7d82018-06-14 18:48:37 -04002278 // Decorate with DescriptorSet and Binding.
2279 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002280 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002281 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002282
2283 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002284 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002285 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002286
alan-bakere9308012019-03-15 10:25:13 -04002287 if (info->coherent) {
2288 // Decorate with Coherent if required for the variable.
2289 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002290 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002291 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002292 }
2293
David Neto862b7d82018-06-14 18:48:37 -04002294 // Generate NonWritable and NonReadable
2295 switch (info->arg_kind) {
2296 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002297 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002298 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2299 clspv::AddressSpace::Constant) {
2300 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002301 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002302 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002303 }
David Neto862b7d82018-06-14 18:48:37 -04002304 break;
David Neto862b7d82018-06-14 18:48:37 -04002305 case clspv::ArgKind::WriteOnlyImage:
2306 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002307 Ops << info->var_id << spv::DecorationNonReadable;
SJWf93f5f32020-05-05 07:27:56 -05002308 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002309 break;
2310 default:
2311 break;
David Neto22f144c2017-06-12 14:26:21 -04002312 }
2313 }
2314}
2315
SJW77b87ad2020-04-21 14:37:52 -05002316void SPIRVProducerPass::GeneratePushConstantDescriptorMapEntries() {
Kévin Petitbbbda972020-03-03 19:16:31 +00002317
SJW77b87ad2020-04-21 14:37:52 -05002318 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
2319 auto const &DL = module->getDataLayout();
Kévin Petitbbbda972020-03-03 19:16:31 +00002320 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
2321 auto STy = cast<StructType>(GV->getValueType());
2322
2323 for (unsigned i = 0; i < STy->getNumElements(); i++) {
2324 auto pc = static_cast<clspv::PushConstant>(
2325 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
alan-baker6a3930b2020-05-21 10:09:11 -04002326 if (pc != clspv::PushConstant::KernelArgument) {
2327 auto memberType = STy->getElementType(i);
2328 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
2329 unsigned previousOffset = 0;
2330 if (i > 0) {
2331 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
2332 }
2333 auto size =
2334 static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
2335 assert(isValidExplicitLayout(*module, STy, i,
2336 spv::StorageClassPushConstant, offset,
2337 previousOffset));
2338 version0::DescriptorMapEntry::PushConstantData data = {pc, offset,
2339 size};
2340 descriptorMapEntries->emplace_back(std::move(data));
Kévin Petitbbbda972020-03-03 19:16:31 +00002341 }
Kévin Petitbbbda972020-03-03 19:16:31 +00002342 }
2343 }
2344}
2345
SJW77b87ad2020-04-21 14:37:52 -05002346void SPIRVProducerPass::GenerateSpecConstantDescriptorMapEntries() {
2347 for (auto pair : clspv::GetSpecConstants(module)) {
alan-bakera1be3322020-04-20 12:48:18 -04002348 auto kind = pair.first;
2349 auto id = pair.second;
2350
2351 // Local memory size is only used for kernel arguments.
2352 if (kind == SpecConstant::kLocalMemorySize)
2353 continue;
2354
2355 version0::DescriptorMapEntry::SpecConstantData data = {kind, id};
2356 descriptorMapEntries->emplace_back(std::move(data));
2357 }
2358}
2359
David Neto22f144c2017-06-12 14:26:21 -04002360void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002361 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002362 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002363 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002364
2365 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2366 Type *Ty = GV.getType();
2367 PointerType *PTy = cast<PointerType>(Ty);
2368
SJW01901d92020-05-21 08:58:31 -05002369 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002370
2371 // Workgroup size is handled differently (it goes into a constant)
2372 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2373 std::vector<bool> HasMDVec;
2374 uint32_t PrevXDimCst = 0xFFFFFFFF;
2375 uint32_t PrevYDimCst = 0xFFFFFFFF;
2376 uint32_t PrevZDimCst = 0xFFFFFFFF;
2377 for (Function &Func : *GV.getParent()) {
2378 if (Func.isDeclaration()) {
2379 continue;
2380 }
2381
2382 // We only need to check kernels.
2383 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2384 continue;
2385 }
2386
2387 if (const MDNode *MD =
2388 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2389 uint32_t CurXDimCst = static_cast<uint32_t>(
2390 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2391 uint32_t CurYDimCst = static_cast<uint32_t>(
2392 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2393 uint32_t CurZDimCst = static_cast<uint32_t>(
2394 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2395
2396 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2397 PrevZDimCst == 0xFFFFFFFF) {
2398 PrevXDimCst = CurXDimCst;
2399 PrevYDimCst = CurYDimCst;
2400 PrevZDimCst = CurZDimCst;
2401 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2402 CurZDimCst != PrevZDimCst) {
2403 llvm_unreachable(
2404 "reqd_work_group_size must be the same across all kernels");
2405 } else {
2406 continue;
2407 }
2408
2409 //
2410 // Generate OpConstantComposite.
2411 //
2412 // Ops[0] : Result Type ID
2413 // Ops[1] : Constant size for x dimension.
2414 // Ops[2] : Constant size for y dimension.
2415 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002416 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002417
SJW01901d92020-05-21 08:58:31 -05002418 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002419 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002420 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002421 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002422 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002423 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002424
SJW01901d92020-05-21 08:58:31 -05002425 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2426 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002427
SJWf93f5f32020-05-05 07:27:56 -05002428 InitializerID =
2429 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002430
2431 HasMDVec.push_back(true);
2432 } else {
2433 HasMDVec.push_back(false);
2434 }
2435 }
2436
2437 // Check all kernels have same definitions for work_group_size.
2438 bool HasMD = false;
2439 if (!HasMDVec.empty()) {
2440 HasMD = HasMDVec[0];
2441 for (uint32_t i = 1; i < HasMDVec.size(); i++) {
2442 if (HasMD != HasMDVec[i]) {
2443 llvm_unreachable(
2444 "Kernels should have consistent work group size definition");
2445 }
2446 }
2447 }
2448
2449 // If all kernels do not have metadata for reqd_work_group_size, generate
2450 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002451 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002452 //
2453 // Generate OpSpecConstants for x/y/z dimension.
2454 //
2455 // Ops[0] : Result Type ID
2456 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002457
alan-bakera1be3322020-04-20 12:48:18 -04002458 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002459 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002460
SJWf93f5f32020-05-05 07:27:56 -05002461 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002462 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002463 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002464
David Neto257c3892018-04-11 13:19:45 -04002465 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002466 Ops << result_type_id << 1;
2467 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002468
2469 // Y Dimension
2470 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002471 Ops << result_type_id << 1;
2472 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002473
2474 // Z Dimension
2475 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002476 Ops << result_type_id << 1;
2477 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002478
David Neto257c3892018-04-11 13:19:45 -04002479 BuiltinDimVec.push_back(XDimCstID);
2480 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002481 BuiltinDimVec.push_back(ZDimCstID);
2482
David Neto22f144c2017-06-12 14:26:21 -04002483 //
2484 // Generate OpSpecConstantComposite.
2485 //
2486 // Ops[0] : Result Type ID
2487 // Ops[1] : Constant size for x dimension.
2488 // Ops[2] : Constant size for y dimension.
2489 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002490 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002491 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002492
SJWf93f5f32020-05-05 07:27:56 -05002493 InitializerID =
2494 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002495 }
alan-bakerbed3a882020-04-21 14:42:41 -04002496 } else if (BuiltinType == spv::BuiltInWorkDim) {
2497 // 1. Generate a specialization constant with a default of 3.
2498 // 2. Allocate and annotate a SpecId for the constant.
2499 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002500 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002501
2502 //
2503 // Generate OpSpecConstant.
2504 //
2505 // Ops[0] : Result Type ID
2506 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002507
SJW01901d92020-05-21 08:58:31 -05002508 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002509
SJWf93f5f32020-05-05 07:27:56 -05002510 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002511
2512 //
2513 // Generate SpecId decoration.
2514 //
2515 // Ops[0] : target
2516 // Ops[1] : decoration
2517 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002518 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002519 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002520 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002521
SJWf93f5f32020-05-05 07:27:56 -05002522 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002523 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2524 // 1. Generate a spec constant with a default of {0, 0, 0}.
2525 // 2. Allocate and annotate SpecIds for the constants.
2526 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002527 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002528
2529 //
2530 // Generate OpSpecConstant for each dimension.
2531 //
2532 // Ops[0] : Result Type ID
2533 // Ops[1] : Default literal value
2534 //
SJW01901d92020-05-21 08:58:31 -05002535 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2536 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002537
alan-bakere1996972020-05-04 08:38:12 -04002538 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002539 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2540 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002541
alan-bakere1996972020-05-04 08:38:12 -04002542 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002543 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2544 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002545
2546 //
2547 // Generate SpecId decoration for each dimension.
2548 //
2549 // Ops[0] : target
2550 // Ops[1] : decoration
2551 // Ops[2] : SpecId
2552 //
2553 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2554 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002555 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002556 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002557
2558 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2559 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002560 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002561 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002562
2563 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2564 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002565 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002566 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002567
2568 //
2569 // Generate OpSpecConstantComposite.
2570 //
2571 // Ops[0] : type id
2572 // Ops[1..n-1] : elements
2573 //
alan-bakere1996972020-05-04 08:38:12 -04002574 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002575 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002576 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002577 }
2578
David Neto22f144c2017-06-12 14:26:21 -04002579 //
2580 // Generate OpVariable.
2581 //
2582 // GIDOps[0] : Result Type ID
2583 // GIDOps[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05002584 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002585
David Neto85082642018-03-24 06:55:20 -07002586 const auto AS = PTy->getAddressSpace();
SJW01901d92020-05-21 08:58:31 -05002587 Ops << Ty << GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002588
David Neto85082642018-03-24 06:55:20 -07002589 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002590 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002591 clspv::Option::ModuleConstantsInStorageBuffer();
2592
Kévin Petit23d5f182019-08-13 16:21:29 +01002593 if (GV.hasInitializer()) {
2594 auto GVInit = GV.getInitializer();
2595 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002596 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002597 }
2598 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002599
SJW01901d92020-05-21 08:58:31 -05002600 if (InitializerID.isValid()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00002601 // Emit the ID of the initializer as part of the variable definition.
SJW01901d92020-05-21 08:58:31 -05002602 Ops << InitializerID;
Kévin Petit23d5f182019-08-13 16:21:29 +01002603 }
SJW01901d92020-05-21 08:58:31 -05002604 SPIRVID var_id = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
David Neto85082642018-03-24 06:55:20 -07002605
SJWf93f5f32020-05-05 07:27:56 -05002606 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002607
alan-bakere1996972020-05-04 08:38:12 -04002608 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2609 return builtin == spv::BuiltInWorkDim ||
2610 builtin == spv::BuiltInGlobalOffset;
2611 };
2612
alan-bakere1996972020-05-04 08:38:12 -04002613 // If we have a builtin (not an OpenCL builtin).
2614 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002615 //
2616 // Generate OpDecorate.
2617 //
2618 // DOps[0] = Target ID
2619 // DOps[1] = Decoration (Builtin)
2620 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002621 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002622
2623 // WorkgroupSize is different, we decorate the constant composite that has
2624 // its value, rather than the variable that we use to access the value.
2625 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2626 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002627 // Save both the value and variable IDs for later.
2628 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002629 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002630 } else {
SJWf93f5f32020-05-05 07:27:56 -05002631 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002632 }
2633
SJW01901d92020-05-21 08:58:31 -05002634 Ops.clear();
2635 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002636
SJW01901d92020-05-21 08:58:31 -05002637 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002638 } else if (module_scope_constant_external_init) {
2639 // This module scope constant is initialized from a storage buffer with data
2640 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002641 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002642
David Neto862b7d82018-06-14 18:48:37 -04002643 // Emit the intializer to the descriptor map file.
David Neto85082642018-03-24 06:55:20 -07002644 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2645 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002646 std::string hexbytes;
2647 llvm::raw_string_ostream str(hexbytes);
2648 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002649 version0::DescriptorMapEntry::ConstantData constant_data = {ArgKind::Buffer,
2650 str.str()};
2651 descriptorMapEntries->emplace_back(std::move(constant_data), descriptor_set,
2652 0);
David Neto85082642018-03-24 06:55:20 -07002653
David Neto85082642018-03-24 06:55:20 -07002654 // OpDecorate %var DescriptorSet <descriptor_set>
SJW01901d92020-05-21 08:58:31 -05002655 Ops.clear();
2656 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2657 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002658
2659 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002660 Ops.clear();
2661 Ops << var_id << spv::DecorationBinding << 0;
2662 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002663 }
2664}
2665
SJW77b87ad2020-04-21 14:37:52 -05002666void SPIRVProducerPass::GenerateDescriptorMapInfo(Function &F) {
2667 const auto &DL = module->getDataLayout();
David Netoc5fb5242018-07-30 13:28:31 -04002668 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
2669 return;
2670 }
Kévin Petit717f8572020-04-06 17:31:53 +01002671 // Add entries for each kernel
2672 version0::DescriptorMapEntry::KernelDeclData kernel_decl_data = {
2673 F.getName().str()};
2674 descriptorMapEntries->emplace_back(std::move(kernel_decl_data));
2675
David Neto862b7d82018-06-14 18:48:37 -04002676 // Gather the list of resources that are used by this function's arguments.
2677 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
2678
David Neto862b7d82018-06-14 18:48:37 -04002679 auto *fty = F.getType()->getPointerElementType();
2680 auto *func_ty = dyn_cast<FunctionType>(fty);
2681
alan-baker038e9242019-04-19 22:14:41 -04002682 // If we've clustered POD arguments, then argument details are in metadata.
David Neto862b7d82018-06-14 18:48:37 -04002683 // If an argument maps to a resource variable, then get descriptor set and
2684 // binding from the resoure variable. Other info comes from the metadata.
alan-bakerff6c9292020-05-04 08:32:09 -04002685 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
2686 auto local_spec_id_md =
2687 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
David Neto862b7d82018-06-14 18:48:37 -04002688 if (arg_map) {
2689 for (const auto &arg : arg_map->operands()) {
2690 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
alan-bakerff6c9292020-05-04 08:32:09 -04002691 assert(arg_node->getNumOperands() == 6);
David Neto862b7d82018-06-14 18:48:37 -04002692 const auto name =
2693 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
2694 const auto old_index =
2695 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
2696 // Remapped argument index
alan-baker6a3930b2020-05-21 10:09:11 -04002697 const int new_index = static_cast<int>(
2698 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
David Neto862b7d82018-06-14 18:48:37 -04002699 const auto offset =
2700 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
Kévin PETITa353c832018-03-20 23:21:21 +00002701 const auto arg_size =
2702 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
alan-bakerc4579bb2020-04-29 14:15:50 -04002703 const auto argKind = clspv::GetArgKindFromName(
2704 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
alan-bakerf5e5f692018-11-27 08:33:24 -05002705
alan-bakerff6c9292020-05-04 08:32:09 -04002706 // If this is a local memory argument, find the right spec id for this
2707 // argument.
2708 int64_t spec_id = -1;
2709 if (argKind == clspv::ArgKind::Local) {
2710 for (auto spec_id_arg : local_spec_id_md->operands()) {
2711 if ((&F == dyn_cast<Function>(
2712 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
2713 ->getValue())) &&
alan-baker6a3930b2020-05-21 10:09:11 -04002714 (static_cast<uint64_t>(new_index) ==
alan-bakerff6c9292020-05-04 08:32:09 -04002715 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
2716 ->getZExtValue())) {
2717 spec_id = mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
2718 ->getSExtValue();
2719 break;
2720 }
2721 }
2722 }
alan-bakerf5e5f692018-11-27 08:33:24 -05002723 uint32_t descriptor_set = 0;
2724 uint32_t binding = 0;
2725 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05002726 F.getName().str(), name.str(), static_cast<uint32_t>(old_index),
2727 argKind, static_cast<uint32_t>(spec_id),
alan-bakerf5e5f692018-11-27 08:33:24 -05002728 // This will be set below for pointer-to-local args.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002729 0, static_cast<uint32_t>(offset), static_cast<uint32_t>(arg_size)};
David Neto862b7d82018-06-14 18:48:37 -04002730 if (spec_id > 0) {
alan-bakerf5e5f692018-11-27 08:33:24 -05002731 kernel_data.local_element_size = static_cast<uint32_t>(GetTypeAllocSize(
2732 func_ty->getParamType(unsigned(new_index))->getPointerElementType(),
2733 DL));
alan-baker6a3930b2020-05-21 10:09:11 -04002734 } else if (new_index >= 0) {
David Neto862b7d82018-06-14 18:48:37 -04002735 auto *info = resource_var_at_index[new_index];
2736 assert(info);
alan-bakerf5e5f692018-11-27 08:33:24 -05002737 descriptor_set = info->descriptor_set;
2738 binding = info->binding;
David Neto862b7d82018-06-14 18:48:37 -04002739 }
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002740 descriptorMapEntries->emplace_back(std::move(kernel_data), descriptor_set,
2741 binding);
David Neto862b7d82018-06-14 18:48:37 -04002742 }
2743 } else {
2744 // There is no argument map.
2745 // Take descriptor info from the resource variable calls.
Kévin PETITa353c832018-03-20 23:21:21 +00002746 // Take argument name and size from the arguments list.
David Neto862b7d82018-06-14 18:48:37 -04002747
2748 SmallVector<Argument *, 4> arguments;
2749 for (auto &arg : F.args()) {
2750 arguments.push_back(&arg);
2751 }
2752
2753 unsigned arg_index = 0;
2754 for (auto *info : resource_var_at_index) {
2755 if (info) {
Kévin PETITa353c832018-03-20 23:21:21 +00002756 auto arg = arguments[arg_index];
alan-bakerb6b09dc2018-11-08 16:59:28 -05002757 unsigned arg_size = 0;
alan-baker9b0ec3c2020-04-06 14:45:34 -04002758 if (info->arg_kind == clspv::ArgKind::Pod ||
2759 info->arg_kind == clspv::ArgKind::PodUBO ||
2760 info->arg_kind == clspv::ArgKind::PodPushConstant) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002761 arg_size = static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
Kévin PETITa353c832018-03-20 23:21:21 +00002762 }
2763
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002764 // Local pointer arguments are unused in this case. Offset is always
2765 // zero.
alan-bakerf5e5f692018-11-27 08:33:24 -05002766 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05002767 F.getName().str(),
2768 arg->getName().str(),
2769 arg_index,
alan-bakerc4579bb2020-04-29 14:15:50 -04002770 info->arg_kind,
alan-baker21574d32020-01-29 16:00:31 -05002771 0,
2772 0,
2773 0,
2774 arg_size};
alan-bakerf5e5f692018-11-27 08:33:24 -05002775 descriptorMapEntries->emplace_back(std::move(kernel_data),
2776 info->descriptor_set, info->binding);
David Neto862b7d82018-06-14 18:48:37 -04002777 }
2778 arg_index++;
2779 }
2780 // Generate mappings for pointer-to-local arguments.
2781 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
2782 Argument *arg = arguments[arg_index];
Alan Baker202c8c72018-08-13 13:47:44 -04002783 auto where = LocalArgSpecIds.find(arg);
2784 if (where != LocalArgSpecIds.end()) {
2785 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
alan-bakerf5e5f692018-11-27 08:33:24 -05002786 // Pod arguments members are unused in this case.
2787 version0::DescriptorMapEntry::KernelArgData kernel_data = {
alan-baker21574d32020-01-29 16:00:31 -05002788 F.getName().str(),
2789 arg->getName().str(),
alan-bakerf5e5f692018-11-27 08:33:24 -05002790 arg_index,
2791 ArgKind::Local,
2792 static_cast<uint32_t>(local_arg_info.spec_id),
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002793 static_cast<uint32_t>(
2794 GetTypeAllocSize(local_arg_info.elem_type, DL)),
alan-bakerf5e5f692018-11-27 08:33:24 -05002795 0,
2796 0};
2797 // Pointer-to-local arguments do not utilize descriptor set and binding.
2798 descriptorMapEntries->emplace_back(std::move(kernel_data), 0, 0);
David Neto862b7d82018-06-14 18:48:37 -04002799 }
2800 }
2801 }
2802}
2803
David Neto22f144c2017-06-12 14:26:21 -04002804void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002805 ValueMapType &VMap = getValueMap();
2806 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002807 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2808 auto &GlobalConstArgSet = getGlobalConstArgSet();
2809
2810 FunctionType *FTy = F.getFunctionType();
2811
2812 //
David Neto22f144c2017-06-12 14:26:21 -04002813 // Generate OPFunction.
2814 //
2815
2816 // FOps[0] : Result Type ID
2817 // FOps[1] : Function Control
2818 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002819 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002820
2821 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002822 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002823
2824 // Check function attributes for SPIRV Function Control.
2825 uint32_t FuncControl = spv::FunctionControlMaskNone;
2826 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2827 FuncControl |= spv::FunctionControlInlineMask;
2828 }
2829 if (F.hasFnAttribute(Attribute::NoInline)) {
2830 FuncControl |= spv::FunctionControlDontInlineMask;
2831 }
2832 // TODO: Check llvm attribute for Function Control Pure.
2833 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2834 FuncControl |= spv::FunctionControlPureMask;
2835 }
2836 // TODO: Check llvm attribute for Function Control Const.
2837 if (F.hasFnAttribute(Attribute::ReadNone)) {
2838 FuncControl |= spv::FunctionControlConstMask;
2839 }
2840
SJW01901d92020-05-21 08:58:31 -05002841 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002842
SJW01901d92020-05-21 08:58:31 -05002843 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002844 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2845 SmallVector<Type *, 4> NewFuncParamTys;
2846 FunctionType *NewFTy =
2847 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002848 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002849 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002850 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002851 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002852 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002853 } else {
SJWf93f5f32020-05-05 07:27:56 -05002854 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002855 }
2856 }
2857
SJW01901d92020-05-21 08:58:31 -05002858 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002859
SJWf93f5f32020-05-05 07:27:56 -05002860 // Generate SPIRV instruction for function.
2861 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2862 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002863
SJWf93f5f32020-05-05 07:27:56 -05002864 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2865 EntryPoints.push_back(std::make_pair(&F, FID));
2866 }
David Neto22f144c2017-06-12 14:26:21 -04002867
David Neto482550a2018-03-24 05:21:07 -07002868 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002869 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002870 }
David Neto22f144c2017-06-12 14:26:21 -04002871
2872 //
2873 // Generate OpFunctionParameter for Normal function.
2874 //
2875
2876 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002877
David Neto22f144c2017-06-12 14:26:21 -04002878 // Iterate Argument for name instead of param type from function type.
2879 unsigned ArgIdx = 0;
2880 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002881 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002882 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002883
2884 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002885 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002886 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2887 if (GlobalConstFuncTyMap.count(FTy)) {
2888 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2889 Type *EleTy = PTy->getPointerElementType();
2890 Type *ArgTy =
2891 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002892 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002893 GlobalConstArgSet.insert(&Arg);
2894 }
2895 }
2896 }
SJW01901d92020-05-21 08:58:31 -05002897 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002898
2899 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002900 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002901 VMap[&Arg] = param_id;
2902
2903 if (CalledWithCoherentResource(Arg)) {
2904 // If the arg is passed a coherent resource ever, then decorate this
2905 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002906 Ops.clear();
2907 Ops << param_id << spv::DecorationCoherent;
2908 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002909 }
David Neto22f144c2017-06-12 14:26:21 -04002910
2911 ArgIdx++;
2912 }
2913 }
2914}
2915
SJW77b87ad2020-04-21 14:37:52 -05002916void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002917 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002918 ValueList &EntryPointInterfaces = getEntryPointInterfacesVec();
SJW01901d92020-05-21 08:58:31 -05002919 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002920
SJWf93f5f32020-05-05 07:27:56 -05002921 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002922
SJW01901d92020-05-21 08:58:31 -05002923 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002924 //
SJW01901d92020-05-21 08:58:31 -05002925 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002926 //
2927 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002928 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002929 }
2930
2931 // Always add the storage buffer extension
2932 {
David Neto22f144c2017-06-12 14:26:21 -04002933 //
2934 // Generate OpExtension.
2935 //
2936 // Ops[0] = Name (Literal String)
2937 //
SJWf93f5f32020-05-05 07:27:56 -05002938 addSPIRVInst<kExtensions>(spv::OpExtension,
2939 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05002940 }
David Neto22f144c2017-06-12 14:26:21 -04002941
alan-baker5b86ed72019-02-15 08:26:50 -05002942 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2943 //
2944 // Generate OpExtension.
2945 //
2946 // Ops[0] = Name (Literal String)
2947 //
SJWf93f5f32020-05-05 07:27:56 -05002948 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04002949 }
2950
2951 //
2952 // Generate OpMemoryModel
2953 //
2954 // Memory model for Vulkan will always be GLSL450.
2955
2956 // Ops[0] = Addressing Model
2957 // Ops[1] = Memory Model
2958 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002959 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002960
SJWf93f5f32020-05-05 07:27:56 -05002961 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002962
2963 //
2964 // Generate OpEntryPoint
2965 //
2966 for (auto EntryPoint : EntryPoints) {
2967 // Ops[0] = Execution Model
2968 // Ops[1] = EntryPoint ID
2969 // Ops[2] = Name (Literal String)
2970 // ...
2971 //
2972 // TODO: Do we need to consider Interface ID for forward references???
2973 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002974 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002975 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002976
David Neto22f144c2017-06-12 14:26:21 -04002977 for (Value *Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002978 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002979 }
2980
SJWf93f5f32020-05-05 07:27:56 -05002981 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002982 }
2983
2984 for (auto EntryPoint : EntryPoints) {
Kévin Petit21c23c62020-04-29 01:38:28 +01002985 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2986 ->getMetadata("reqd_work_group_size");
2987 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002988
2989 if (!BuiltinDimVec.empty()) {
2990 llvm_unreachable(
2991 "Kernels should have consistent work group size definition");
2992 }
2993
2994 //
2995 // Generate OpExecutionMode
2996 //
2997
2998 // Ops[0] = Entry Point ID
2999 // Ops[1] = Execution Mode
3000 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
3001 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003002 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
David Neto22f144c2017-06-12 14:26:21 -04003003
3004 uint32_t XDim = static_cast<uint32_t>(
3005 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
3006 uint32_t YDim = static_cast<uint32_t>(
3007 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
3008 uint32_t ZDim = static_cast<uint32_t>(
3009 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
3010
SJW01901d92020-05-21 08:58:31 -05003011 Ops << XDim << YDim << ZDim;
David Neto22f144c2017-06-12 14:26:21 -04003012
SJWf93f5f32020-05-05 07:27:56 -05003013 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003014 }
3015 }
3016
3017 //
3018 // Generate OpSource.
3019 //
3020 // Ops[0] = SourceLanguage ID
3021 // Ops[1] = Version (LiteralNum)
3022 //
SJW01901d92020-05-21 08:58:31 -05003023 uint32_t LangID = spv::SourceLanguageUnknown;
3024 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00003025 switch (clspv::Option::Language()) {
3026 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05003027 LangID = spv::SourceLanguageOpenCL_C;
3028 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00003029 break;
3030 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05003031 LangID = spv::SourceLanguageOpenCL_C;
3032 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00003033 break;
3034 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05003035 LangID = spv::SourceLanguageOpenCL_C;
3036 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00003037 break;
3038 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05003039 LangID = spv::SourceLanguageOpenCL_C;
3040 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00003041 break;
3042 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05003043 LangID = spv::SourceLanguageOpenCL_CPP;
3044 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00003045 break;
3046 default:
Kévin Petitf0515712020-01-07 18:29:20 +00003047 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01003048 }
David Neto22f144c2017-06-12 14:26:21 -04003049
SJW01901d92020-05-21 08:58:31 -05003050 Ops.clear();
3051 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05003052 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003053
3054 if (!BuiltinDimVec.empty()) {
3055 //
3056 // Generate OpDecorates for x/y/z dimension.
3057 //
3058 // Ops[0] = Target ID
3059 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04003060 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04003061
3062 // X Dimension
3063 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003064 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05003065 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003066
3067 // Y Dimension
3068 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003069 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05003070 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003071
3072 // Z Dimension
3073 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003074 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05003075 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003076 }
3077}
3078
David Netob6e2e062018-04-25 10:32:06 -04003079void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3080 // Work around a driver bug. Initializers on Private variables might not
3081 // work. So the start of the kernel should store the initializer value to the
3082 // variables. Yes, *every* entry point pays this cost if *any* entry point
3083 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3084 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003085 // TODO(dneto): Remove this at some point once fixed drivers are widely
3086 // available.
SJW01901d92020-05-21 08:58:31 -05003087 if (WorkgroupSizeVarID.isValid()) {
3088 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04003089
SJWf93f5f32020-05-05 07:27:56 -05003090 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003091 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04003092
SJWf93f5f32020-05-05 07:27:56 -05003093 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04003094 }
3095}
3096
David Neto22f144c2017-06-12 14:26:21 -04003097void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003098 ValueMapType &VMap = getValueMap();
3099
David Netob6e2e062018-04-25 10:32:06 -04003100 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003101
3102 for (BasicBlock &BB : F) {
3103 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04003104
3105 //
3106 // Generate OpLabel for Basic Block.
3107 //
SJWf93f5f32020-05-05 07:27:56 -05003108 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04003109
David Neto6dcd4712017-06-23 11:06:47 -04003110 // OpVariable instructions must come first.
3111 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003112 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3113 // Allocating a pointer requires variable pointers.
3114 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003115 setVariablePointersCapabilities(
3116 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003117 }
David Neto6dcd4712017-06-23 11:06:47 -04003118 GenerateInstruction(I);
3119 }
3120 }
3121
David Neto22f144c2017-06-12 14:26:21 -04003122 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003123 if (clspv::Option::HackInitializers()) {
3124 GenerateEntryPointInitialStores();
3125 }
David Neto22f144c2017-06-12 14:26:21 -04003126 }
3127
3128 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003129 if (!isa<AllocaInst>(I)) {
3130 GenerateInstruction(I);
3131 }
David Neto22f144c2017-06-12 14:26:21 -04003132 }
3133 }
3134}
3135
3136spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3137 const std::map<CmpInst::Predicate, spv::Op> Map = {
3138 {CmpInst::ICMP_EQ, spv::OpIEqual},
3139 {CmpInst::ICMP_NE, spv::OpINotEqual},
3140 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3141 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3142 {CmpInst::ICMP_ULT, spv::OpULessThan},
3143 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3144 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3145 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3146 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3147 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3148 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3149 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3150 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3151 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3152 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3153 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3154 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3155 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3156 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3157 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3158 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3159 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3160
3161 assert(0 != Map.count(I->getPredicate()));
3162
3163 return Map.at(I->getPredicate());
3164}
3165
3166spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3167 const std::map<unsigned, spv::Op> Map{
3168 {Instruction::Trunc, spv::OpUConvert},
3169 {Instruction::ZExt, spv::OpUConvert},
3170 {Instruction::SExt, spv::OpSConvert},
3171 {Instruction::FPToUI, spv::OpConvertFToU},
3172 {Instruction::FPToSI, spv::OpConvertFToS},
3173 {Instruction::UIToFP, spv::OpConvertUToF},
3174 {Instruction::SIToFP, spv::OpConvertSToF},
3175 {Instruction::FPTrunc, spv::OpFConvert},
3176 {Instruction::FPExt, spv::OpFConvert},
3177 {Instruction::BitCast, spv::OpBitcast}};
3178
3179 assert(0 != Map.count(I.getOpcode()));
3180
3181 return Map.at(I.getOpcode());
3182}
3183
3184spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003185 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003186 switch (I.getOpcode()) {
3187 default:
3188 break;
3189 case Instruction::Or:
3190 return spv::OpLogicalOr;
3191 case Instruction::And:
3192 return spv::OpLogicalAnd;
3193 case Instruction::Xor:
3194 return spv::OpLogicalNotEqual;
3195 }
3196 }
3197
alan-bakerb6b09dc2018-11-08 16:59:28 -05003198 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003199 {Instruction::Add, spv::OpIAdd},
3200 {Instruction::FAdd, spv::OpFAdd},
3201 {Instruction::Sub, spv::OpISub},
3202 {Instruction::FSub, spv::OpFSub},
3203 {Instruction::Mul, spv::OpIMul},
3204 {Instruction::FMul, spv::OpFMul},
3205 {Instruction::UDiv, spv::OpUDiv},
3206 {Instruction::SDiv, spv::OpSDiv},
3207 {Instruction::FDiv, spv::OpFDiv},
3208 {Instruction::URem, spv::OpUMod},
3209 {Instruction::SRem, spv::OpSRem},
3210 {Instruction::FRem, spv::OpFRem},
3211 {Instruction::Or, spv::OpBitwiseOr},
3212 {Instruction::Xor, spv::OpBitwiseXor},
3213 {Instruction::And, spv::OpBitwiseAnd},
3214 {Instruction::Shl, spv::OpShiftLeftLogical},
3215 {Instruction::LShr, spv::OpShiftRightLogical},
3216 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3217
3218 assert(0 != Map.count(I.getOpcode()));
3219
3220 return Map.at(I.getOpcode());
3221}
3222
3223void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003224 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04003225 LLVMContext &Context = I.getParent()->getParent()->getParent()->getContext();
3226
SJWf93f5f32020-05-05 07:27:56 -05003227 SPIRVID RID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003228
3229 switch (I.getOpcode()) {
3230 default: {
3231 if (Instruction::isCast(I.getOpcode())) {
3232 //
3233 // Generate SPIRV instructions for cast operators.
3234 //
3235
David Netod2de94a2017-08-28 17:27:47 -04003236 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003237 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003238 auto toI8 = Ty == Type::getInt8Ty(Context);
3239 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003240 // Handle zext, sext and uitofp with i1 type specially.
3241 if ((I.getOpcode() == Instruction::ZExt ||
3242 I.getOpcode() == Instruction::SExt ||
3243 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003244 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003245 //
3246 // Generate OpSelect.
3247 //
3248
3249 // Ops[0] = Result Type ID
3250 // Ops[1] = Condition ID
3251 // Ops[2] = True Constant ID
3252 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003253 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003254
SJW01901d92020-05-21 08:58:31 -05003255 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003256
David Neto22f144c2017-06-12 14:26:21 -04003257 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003258 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003259 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003260 Ops << ConstantInt::getSigned(I.getType(), -1);
David Neto22f144c2017-06-12 14:26:21 -04003261 } else {
SJW01901d92020-05-21 08:58:31 -05003262 Ops << ConstantFP::get(Context, APFloat(1.0f));
David Neto22f144c2017-06-12 14:26:21 -04003263 }
David Neto22f144c2017-06-12 14:26:21 -04003264
David Neto22f144c2017-06-12 14:26:21 -04003265 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003266 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003267 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003268 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003269 } else {
SJW01901d92020-05-21 08:58:31 -05003270 Ops << ConstantFP::get(Context, APFloat(0.0f));
David Neto22f144c2017-06-12 14:26:21 -04003271 }
David Neto22f144c2017-06-12 14:26:21 -04003272
SJWf93f5f32020-05-05 07:27:56 -05003273 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003274 } else if (!clspv::Option::Int8Support() &&
3275 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003276 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3277 // 8 bits.
3278 // Before:
3279 // %result = trunc i32 %a to i8
3280 // After
3281 // %result = OpBitwiseAnd %uint %a %uint_255
3282
SJWf93f5f32020-05-05 07:27:56 -05003283 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003284
David Netod2de94a2017-08-28 17:27:47 -04003285 Type *UintTy = Type::getInt32Ty(Context);
SJW01901d92020-05-21 08:58:31 -05003286 Ops << OpTy << I.getOperand(0) << ConstantInt::get(UintTy, 255);
David Netod2de94a2017-08-28 17:27:47 -04003287
SJWf93f5f32020-05-05 07:27:56 -05003288 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003289 } else {
3290 // Ops[0] = Result Type ID
3291 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003292 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003293
SJW01901d92020-05-21 08:58:31 -05003294 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003295
SJWf93f5f32020-05-05 07:27:56 -05003296 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003297 }
3298 } else if (isa<BinaryOperator>(I)) {
3299 //
3300 // Generate SPIRV instructions for binary operators.
3301 //
3302
3303 // Handle xor with i1 type specially.
3304 if (I.getOpcode() == Instruction::Xor &&
3305 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003306 ((isa<ConstantInt>(I.getOperand(0)) &&
3307 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3308 (isa<ConstantInt>(I.getOperand(1)) &&
3309 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003310 //
3311 // Generate OpLogicalNot.
3312 //
3313 // Ops[0] = Result Type ID
3314 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003315 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003316
SJW01901d92020-05-21 08:58:31 -05003317 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003318
3319 Value *CondV = I.getOperand(0);
3320 if (isa<Constant>(I.getOperand(0))) {
3321 CondV = I.getOperand(1);
3322 }
SJW01901d92020-05-21 08:58:31 -05003323 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003324
SJWf93f5f32020-05-05 07:27:56 -05003325 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003326 } else {
3327 // Ops[0] = Result Type ID
3328 // Ops[1] = Operand 0
3329 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003330 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003331
SJW01901d92020-05-21 08:58:31 -05003332 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003333
SJWf93f5f32020-05-05 07:27:56 -05003334 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003335 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003336 } else if (I.getOpcode() == Instruction::FNeg) {
3337 // The only unary operator.
3338 //
3339 // Ops[0] = Result Type ID
3340 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05003341 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003342
SJW01901d92020-05-21 08:58:31 -05003343 Ops << I.getType() << I.getOperand(0);
3344 RID = addSPIRVInst(spv::OpFNegate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003345 } else {
3346 I.print(errs());
3347 llvm_unreachable("Unsupported instruction???");
3348 }
3349 break;
3350 }
3351 case Instruction::GetElementPtr: {
3352 auto &GlobalConstArgSet = getGlobalConstArgSet();
3353
3354 //
3355 // Generate OpAccessChain.
3356 //
3357 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3358
3359 //
3360 // Generate OpAccessChain.
3361 //
3362
3363 // Ops[0] = Result Type ID
3364 // Ops[1] = Base ID
3365 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05003366 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003367
alan-bakerb6b09dc2018-11-08 16:59:28 -05003368 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003369 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3370 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3371 // Use pointer type with private address space for global constant.
3372 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003373 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003374 }
David Neto257c3892018-04-11 13:19:45 -04003375
SJW01901d92020-05-21 08:58:31 -05003376 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04003377
David Neto862b7d82018-06-14 18:48:37 -04003378 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05003379 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04003380
David Neto862b7d82018-06-14 18:48:37 -04003381 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003382
3383 //
3384 // Follows below rules for gep.
3385 //
David Neto862b7d82018-06-14 18:48:37 -04003386 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3387 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003388 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3389 // first index.
3390 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3391 // use gep's first index.
3392 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3393 // gep's first index.
3394 //
3395 spv::Op Opcode = spv::OpAccessChain;
3396 unsigned offset = 0;
3397 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003398 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003399 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003400 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003401 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003402 }
David Neto862b7d82018-06-14 18:48:37 -04003403 } else {
David Neto22f144c2017-06-12 14:26:21 -04003404 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003405 }
3406
3407 if (Opcode == spv::OpPtrAccessChain) {
David Neto1a1a0582017-07-07 12:01:44 -04003408 // Do we need to generate ArrayStride? Check against the GEP result type
3409 // rather than the pointer type of the base because when indexing into
3410 // an OpenCL program-scope constant, we'll swap out the LLVM base pointer
3411 // for something else in the SPIR-V.
3412 // E.g. see test/PointerAccessChain/pointer_index_is_constant_1.cl
alan-baker5b86ed72019-02-15 08:26:50 -05003413 auto address_space = ResultType->getAddressSpace();
3414 setVariablePointersCapabilities(address_space);
3415 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003416 case spv::StorageClassStorageBuffer:
3417 case spv::StorageClassUniform:
David Neto1a1a0582017-07-07 12:01:44 -04003418 // Save the need to generate an ArrayStride decoration. But defer
3419 // generation until later, so we only make one decoration.
David Neto85082642018-03-24 06:55:20 -07003420 getTypesNeedingArrayStride().insert(ResultType);
Alan Bakerfcda9482018-10-02 17:09:59 -04003421 break;
3422 default:
3423 break;
David Neto1a1a0582017-07-07 12:01:44 -04003424 }
David Neto22f144c2017-06-12 14:26:21 -04003425 }
3426
3427 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05003428 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04003429 }
3430
SJWf93f5f32020-05-05 07:27:56 -05003431 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003432 break;
3433 }
3434 case Instruction::ExtractValue: {
3435 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3436 // Ops[0] = Result Type ID
3437 // Ops[1] = Composite ID
3438 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003439 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003440
SJW01901d92020-05-21 08:58:31 -05003441 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003442
SJW01901d92020-05-21 08:58:31 -05003443 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04003444
3445 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05003446 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04003447 }
3448
SJWf93f5f32020-05-05 07:27:56 -05003449 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003450 break;
3451 }
3452 case Instruction::InsertValue: {
3453 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3454 // Ops[0] = Result Type ID
3455 // Ops[1] = Object ID
3456 // Ops[2] = Composite ID
3457 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003458 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003459
SJW01901d92020-05-21 08:58:31 -05003460 Ops << I.getType() << IVI->getInsertedValueOperand()
3461 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04003462
3463 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05003464 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04003465 }
3466
SJWf93f5f32020-05-05 07:27:56 -05003467 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003468 break;
3469 }
3470 case Instruction::Select: {
3471 //
3472 // Generate OpSelect.
3473 //
3474
3475 // Ops[0] = Result Type ID
3476 // Ops[1] = Condition ID
3477 // Ops[2] = True Constant ID
3478 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003479 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003480
3481 // Find SPIRV instruction for parameter type.
3482 auto Ty = I.getType();
3483 if (Ty->isPointerTy()) {
3484 auto PointeeTy = Ty->getPointerElementType();
3485 if (PointeeTy->isStructTy() &&
3486 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3487 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003488 } else {
3489 // Selecting between pointers requires variable pointers.
3490 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3491 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05003492 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05003493 }
David Neto22f144c2017-06-12 14:26:21 -04003494 }
3495 }
3496
SJW01901d92020-05-21 08:58:31 -05003497 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04003498
SJWf93f5f32020-05-05 07:27:56 -05003499 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003500 break;
3501 }
3502 case Instruction::ExtractElement: {
3503 // Handle <4 x i8> type manually.
3504 Type *CompositeTy = I.getOperand(0)->getType();
3505 if (is4xi8vec(CompositeTy)) {
3506 //
3507 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
3508 // <4 x i8>.
3509 //
3510
3511 //
3512 // Generate OpShiftRightLogical
3513 //
3514 // Ops[0] = Result Type ID
3515 // Ops[1] = Operand 0
3516 // Ops[2] = Operand 1
3517 //
SJWf93f5f32020-05-05 07:27:56 -05003518 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003519
SJW01901d92020-05-21 08:58:31 -05003520 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003521
SJW01901d92020-05-21 08:58:31 -05003522 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003523 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
3524 // Handle constant index.
3525 uint64_t Idx = CI->getZExtValue();
3526 Value *ShiftAmount =
3527 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05003528 Op1ID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04003529 } else {
3530 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05003531 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04003532
David Neto22f144c2017-06-12 14:26:21 -04003533 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJW01901d92020-05-21 08:58:31 -05003534 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1) << Cst8;
David Neto22f144c2017-06-12 14:26:21 -04003535
SJWf93f5f32020-05-05 07:27:56 -05003536 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003537 }
SJW01901d92020-05-21 08:58:31 -05003538 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04003539
SJW01901d92020-05-21 08:58:31 -05003540 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003541
3542 //
3543 // Generate OpBitwiseAnd
3544 //
3545 // Ops[0] = Result Type ID
3546 // Ops[1] = Operand 0
3547 // Ops[2] = Operand 1
3548 //
3549 Ops.clear();
3550
David Neto22f144c2017-06-12 14:26:21 -04003551 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJW01901d92020-05-21 08:58:31 -05003552 Ops << CompositeTy << ShiftID << CstFF;
David Neto22f144c2017-06-12 14:26:21 -04003553
SJWf93f5f32020-05-05 07:27:56 -05003554 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003555 break;
3556 }
3557
3558 // Ops[0] = Result Type ID
3559 // Ops[1] = Composite ID
3560 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003561 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003562
SJW01901d92020-05-21 08:58:31 -05003563 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003564
3565 spv::Op Opcode = spv::OpCompositeExtract;
3566 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05003567 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04003568 } else {
SJW01901d92020-05-21 08:58:31 -05003569 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003570 Opcode = spv::OpVectorExtractDynamic;
3571 }
3572
SJWf93f5f32020-05-05 07:27:56 -05003573 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003574 break;
3575 }
3576 case Instruction::InsertElement: {
3577 // Handle <4 x i8> type manually.
3578 Type *CompositeTy = I.getOperand(0)->getType();
3579 if (is4xi8vec(CompositeTy)) {
3580 Constant *CstFF = ConstantInt::get(Type::getInt32Ty(Context), 0xFF);
SJW01901d92020-05-21 08:58:31 -05003581 SPIRVID CstFFID = getSPIRVValue(CstFF);
David Neto22f144c2017-06-12 14:26:21 -04003582
SJW01901d92020-05-21 08:58:31 -05003583 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04003584 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
3585 // Handle constant index.
3586 uint64_t Idx = CI->getZExtValue();
3587 Value *ShiftAmount =
3588 ConstantInt::get(Type::getInt32Ty(Context), Idx * 8);
SJWf93f5f32020-05-05 07:27:56 -05003589 ShiftAmountID = getSPIRVValue(ShiftAmount);
David Neto22f144c2017-06-12 14:26:21 -04003590 } else {
3591 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05003592 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04003593
David Neto22f144c2017-06-12 14:26:21 -04003594 ConstantInt *Cst8 = ConstantInt::get(Type::getInt32Ty(Context), 8);
SJW01901d92020-05-21 08:58:31 -05003595 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2) << Cst8;
David Neto22f144c2017-06-12 14:26:21 -04003596
SJWf93f5f32020-05-05 07:27:56 -05003597 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04003598 }
3599
3600 //
3601 // Generate mask operations.
3602 //
3603
3604 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05003605 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003606
SJW01901d92020-05-21 08:58:31 -05003607 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04003608
SJW01901d92020-05-21 08:58:31 -05003609 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003610
3611 // Inverse mask.
3612 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003613 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04003614
SJW01901d92020-05-21 08:58:31 -05003615 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003616
3617 // Apply mask.
3618 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003619 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04003620
SJW01901d92020-05-21 08:58:31 -05003621 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003622
3623 // Create correct value according to index of insertelement.
3624 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003625 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04003626
SJW01901d92020-05-21 08:58:31 -05003627 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003628
3629 // Insert value to original value.
3630 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003631 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04003632
SJWf93f5f32020-05-05 07:27:56 -05003633 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003634 break;
3635 }
3636
SJWf93f5f32020-05-05 07:27:56 -05003637 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003638
James Priced26efea2018-06-09 23:28:32 +01003639 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05003640 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003641
3642 spv::Op Opcode = spv::OpCompositeInsert;
3643 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04003644 const auto value = CI->getZExtValue();
3645 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01003646 // Ops[1] = Object ID
3647 // Ops[2] = Composite ID
3648 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05003649 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04003650 } else {
James Priced26efea2018-06-09 23:28:32 +01003651 // Ops[1] = Composite ID
3652 // Ops[2] = Object ID
3653 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05003654 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04003655 Opcode = spv::OpVectorInsertDynamic;
3656 }
3657
SJWf93f5f32020-05-05 07:27:56 -05003658 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003659 break;
3660 }
3661 case Instruction::ShuffleVector: {
3662 // Ops[0] = Result Type ID
3663 // Ops[1] = Vector 1 ID
3664 // Ops[2] = Vector 2 ID
3665 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003666 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003667
SJW01901d92020-05-21 08:58:31 -05003668 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003669
alan-bakerc9666712020-04-01 16:31:21 -04003670 auto shuffle = cast<ShuffleVectorInst>(&I);
3671 SmallVector<int, 4> mask;
3672 shuffle->getShuffleMask(mask);
3673 for (auto i : mask) {
3674 if (i == UndefMaskElem) {
3675 if (clspv::Option::HackUndef())
3676 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05003677 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04003678 else
3679 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05003680 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04003681 } else {
SJW01901d92020-05-21 08:58:31 -05003682 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04003683 }
3684 }
3685
SJWf93f5f32020-05-05 07:27:56 -05003686 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003687 break;
3688 }
3689 case Instruction::ICmp:
3690 case Instruction::FCmp: {
3691 CmpInst *CmpI = cast<CmpInst>(&I);
3692
David Netod4ca2e62017-07-06 18:47:35 -04003693 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003694 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04003695 if (isa<PointerType>(ArgTy)) {
3696 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05003697 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04003698 errs()
3699 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
3700 << "in function " << name << "\n";
3701 llvm_unreachable("Pointer equality check is invalid");
3702 break;
3703 }
3704
David Neto257c3892018-04-11 13:19:45 -04003705 // Ops[0] = Result Type ID
3706 // Ops[1] = Operand 1 ID
3707 // Ops[2] = Operand 2 ID
SJWf93f5f32020-05-05 07:27:56 -05003708 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003709
SJW01901d92020-05-21 08:58:31 -05003710 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003711
3712 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
SJWf93f5f32020-05-05 07:27:56 -05003713 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003714 break;
3715 }
3716 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05003717 // Branch instruction is deferred because it needs label's ID.
3718 BasicBlock *BrBB = I.getParent();
3719 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
3720 // Placeholder for Merge operation
3721 RID = addSPIRVPlaceholder(&I);
3722 }
3723 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04003724 break;
3725 }
3726 case Instruction::Switch: {
3727 I.print(errs());
3728 llvm_unreachable("Unsupported instruction???");
3729 break;
3730 }
3731 case Instruction::IndirectBr: {
3732 I.print(errs());
3733 llvm_unreachable("Unsupported instruction???");
3734 break;
3735 }
3736 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05003737 // PHI instruction is deferred because it needs label's ID.
3738 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04003739 break;
3740 }
3741 case Instruction::Alloca: {
3742 //
3743 // Generate OpVariable.
3744 //
3745 // Ops[0] : Result Type ID
3746 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05003747 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003748
SJW01901d92020-05-21 08:58:31 -05003749 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04003750
SJWf93f5f32020-05-05 07:27:56 -05003751 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003752 break;
3753 }
3754 case Instruction::Load: {
3755 LoadInst *LD = cast<LoadInst>(&I);
3756 //
3757 // Generate OpLoad.
3758 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003759
alan-baker5b86ed72019-02-15 08:26:50 -05003760 if (LD->getType()->isPointerTy()) {
3761 // Loading a pointer requires variable pointers.
3762 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
3763 }
David Neto22f144c2017-06-12 14:26:21 -04003764
SJW01901d92020-05-21 08:58:31 -05003765 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04003766 // This is a hack to work around what looks like a driver bug.
3767 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04003768 // builtin value, use an OpBitWiseAnd of the value's ID rather than
3769 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07003770 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04003771 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04003772 // Generate a bitwise-and of the original value with itself.
3773 // We should have been able to get away with just an OpCopyObject,
3774 // but we need something more complex to get past certain driver bugs.
3775 // This is ridiculous, but necessary.
3776 // TODO(dneto): Revisit this once drivers fix their bugs.
3777
SJWf93f5f32020-05-05 07:27:56 -05003778 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003779 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04003780
SJWf93f5f32020-05-05 07:27:56 -05003781 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04003782 break;
3783 }
3784
3785 // This is the normal path. Generate a load.
3786
David Neto22f144c2017-06-12 14:26:21 -04003787 // Ops[0] = Result Type ID
3788 // Ops[1] = Pointer ID
3789 // Ops[2] ... Ops[n] = Optional Memory Access
3790 //
3791 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04003792
SJWf93f5f32020-05-05 07:27:56 -05003793 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003794 Ops << LD->getType() << LD->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04003795
SJWf93f5f32020-05-05 07:27:56 -05003796 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003797 break;
3798 }
3799 case Instruction::Store: {
3800 StoreInst *ST = cast<StoreInst>(&I);
3801 //
3802 // Generate OpStore.
3803 //
3804
alan-baker5b86ed72019-02-15 08:26:50 -05003805 if (ST->getValueOperand()->getType()->isPointerTy()) {
3806 // Storing a pointer requires variable pointers.
3807 setVariablePointersCapabilities(
3808 ST->getValueOperand()->getType()->getPointerAddressSpace());
3809 }
3810
David Neto22f144c2017-06-12 14:26:21 -04003811 // Ops[0] = Pointer ID
3812 // Ops[1] = Object ID
3813 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
3814 //
3815 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05003816 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003817 Ops << ST->getPointerOperand() << ST->getValueOperand();
David Neto22f144c2017-06-12 14:26:21 -04003818
SJWf93f5f32020-05-05 07:27:56 -05003819 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003820 break;
3821 }
3822 case Instruction::AtomicCmpXchg: {
3823 I.print(errs());
3824 llvm_unreachable("Unsupported instruction???");
3825 break;
3826 }
3827 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01003828 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
3829
3830 spv::Op opcode;
3831
3832 switch (AtomicRMW->getOperation()) {
3833 default:
3834 I.print(errs());
3835 llvm_unreachable("Unsupported instruction???");
3836 case llvm::AtomicRMWInst::Add:
3837 opcode = spv::OpAtomicIAdd;
3838 break;
3839 case llvm::AtomicRMWInst::Sub:
3840 opcode = spv::OpAtomicISub;
3841 break;
3842 case llvm::AtomicRMWInst::Xchg:
3843 opcode = spv::OpAtomicExchange;
3844 break;
3845 case llvm::AtomicRMWInst::Min:
3846 opcode = spv::OpAtomicSMin;
3847 break;
3848 case llvm::AtomicRMWInst::Max:
3849 opcode = spv::OpAtomicSMax;
3850 break;
3851 case llvm::AtomicRMWInst::UMin:
3852 opcode = spv::OpAtomicUMin;
3853 break;
3854 case llvm::AtomicRMWInst::UMax:
3855 opcode = spv::OpAtomicUMax;
3856 break;
3857 case llvm::AtomicRMWInst::And:
3858 opcode = spv::OpAtomicAnd;
3859 break;
3860 case llvm::AtomicRMWInst::Or:
3861 opcode = spv::OpAtomicOr;
3862 break;
3863 case llvm::AtomicRMWInst::Xor:
3864 opcode = spv::OpAtomicXor;
3865 break;
3866 }
3867
3868 //
3869 // Generate OpAtomic*.
3870 //
SJWf93f5f32020-05-05 07:27:56 -05003871 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01003872
SJW01901d92020-05-21 08:58:31 -05003873 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01003874
3875 auto IntTy = Type::getInt32Ty(I.getContext());
Neil Henning39672102017-09-29 14:33:13 +01003876 const auto ConstantScopeDevice = ConstantInt::get(IntTy, spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05003877 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01003878
3879 const auto ConstantMemorySemantics = ConstantInt::get(
3880 IntTy, spv::MemorySemanticsUniformMemoryMask |
3881 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05003882 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01003883
SJWf93f5f32020-05-05 07:27:56 -05003884 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003885 break;
3886 }
3887 case Instruction::Fence: {
3888 I.print(errs());
3889 llvm_unreachable("Unsupported instruction???");
3890 break;
3891 }
3892 case Instruction::Call: {
3893 CallInst *Call = dyn_cast<CallInst>(&I);
3894 Function *Callee = Call->getCalledFunction();
SJW61531372020-06-09 07:31:08 -05003895 auto &func_info = Builtins::Lookup(Callee);
David Neto22f144c2017-06-12 14:26:21 -04003896
SJW61531372020-06-09 07:31:08 -05003897 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3898
3899 if (EInst) {
3900 SPIRVID ExtInstImportID = getOpExtInstImportID();
3901
3902 //
3903 // Generate OpExtInst.
3904 //
3905
3906 // Ops[0] = Result Type ID
3907 // Ops[1] = Set ID (OpExtInstImport ID)
3908 // Ops[2] = Instruction Number (Literal Number)
3909 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3910 SPIRVOperandVec Ops;
3911
3912 Ops << Call->getType() << ExtInstImportID << EInst;
3913
3914 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
3915 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
3916 Ops << Call->getOperand(j);
3917 }
3918
3919 RID = addSPIRVInst(spv::OpExtInst, Ops);
3920
3921 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3922 if (IndirectExtInst != kGlslExtInstBad) {
3923
3924 // Generate one more instruction that uses the result of the extended
3925 // instruction. Its result id is one more than the id of the
3926 // extended instruction.
3927 auto generate_extra_inst = [this, &Context, &Call,
3928 &RID](spv::Op opcode, Constant *constant) {
3929 //
3930 // Generate instruction like:
3931 // result = opcode constant <extinst-result>
3932 //
3933 // Ops[0] = Result Type ID
3934 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3935 // Ops[2] = Operand 1 ;; the result of the extended instruction
3936 SPIRVOperandVec Ops;
3937
3938 Type *resultTy = Call->getType();
3939
3940 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
3941 constant = ConstantVector::getSplat(
3942 {static_cast<unsigned>(vectorTy->getNumElements()), false},
3943 constant);
3944 }
3945 Ops << resultTy << constant << RID;
3946
3947 RID = addSPIRVInst(opcode, Ops);
3948 };
3949
3950 auto IntTy = Type::getInt32Ty(Context);
3951 switch (IndirectExtInst) {
3952 case glsl::ExtInstFindUMsb: // Implementing clz
3953 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
3954 break;
3955 case glsl::ExtInstAcos: // Implementing acospi
3956 case glsl::ExtInstAsin: // Implementing asinpi
3957 case glsl::ExtInstAtan: // Implementing atanpi
3958 case glsl::ExtInstAtan2: // Implementing atan2pi
3959 generate_extra_inst(
3960 spv::OpFMul,
3961 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
3962 break;
3963
3964 default:
3965 assert(false && "internally inconsistent");
3966 }
3967 }
3968 break;
3969 }
3970
3971 switch (func_info.getType()) {
3972 case Builtins::kClspvResource: {
David Neto862b7d82018-06-14 18:48:37 -04003973 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3974 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05003975 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003976
SJW01901d92020-05-21 08:58:31 -05003977 Ops << Call->getType()->getPointerElementType()
3978 << ResourceVarDeferredLoadCalls[Call];
David Neto862b7d82018-06-14 18:48:37 -04003979
SJWf93f5f32020-05-05 07:27:56 -05003980 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto862b7d82018-06-14 18:48:37 -04003981
3982 } else {
3983 // This maps to an OpVariable we've already generated.
3984 // No code is generated for the call.
3985 }
3986 break;
SJW61531372020-06-09 07:31:08 -05003987 }
3988 case Builtins::kClspvLocal: {
Alan Baker202c8c72018-08-13 13:47:44 -04003989 // Don't codegen an instruction here, but instead map this call directly
3990 // to the workgroup variable id.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003991 int spec_id = static_cast<int>(
3992 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04003993 const auto &info = LocalSpecIdInfoMap[spec_id];
SJWf93f5f32020-05-05 07:27:56 -05003994 RID = info.variable_id;
Alan Baker202c8c72018-08-13 13:47:44 -04003995 break;
David Neto862b7d82018-06-14 18:48:37 -04003996 }
SJW61531372020-06-09 07:31:08 -05003997 case Builtins::kClspvSamplerVarLiteral: {
3998 // Sampler initializers become a load of the corresponding sampler.
David Neto862b7d82018-06-14 18:48:37 -04003999 // Map this to a load from the variable.
alan-baker09cb9802019-12-10 13:16:27 -05004000 const auto third_param = static_cast<unsigned>(
4001 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
4002 auto sampler_value = third_param;
4003 if (clspv::Option::UseSamplerMap()) {
4004 sampler_value = getSamplerMap()[third_param].first;
4005 }
David Neto862b7d82018-06-14 18:48:37 -04004006
4007 // Generate an OpLoad
SJWf93f5f32020-05-05 07:27:56 -05004008 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004009
SJW01901d92020-05-21 08:58:31 -05004010 Ops << SamplerTy->getPointerElementType()
4011 << SamplerLiteralToIDMap[sampler_value];
David Neto22f144c2017-06-12 14:26:21 -04004012
SJWf93f5f32020-05-05 07:27:56 -05004013 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004014 break;
4015 }
SJW61531372020-06-09 07:31:08 -05004016 case Builtins::kSpirvAtomicXor: {
4017 // Handle SPIR-V intrinsics
SJWf93f5f32020-05-05 07:27:56 -05004018 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004019
Kévin Petit349c9502019-03-28 17:24:14 +00004020 if (!I.getType()->isVoidTy()) {
SJW01901d92020-05-21 08:58:31 -05004021 Ops << I.getType();
Kévin Petit349c9502019-03-28 17:24:14 +00004022 }
David Neto22f144c2017-06-12 14:26:21 -04004023
SJW61531372020-06-09 07:31:08 -05004024 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
SJW01901d92020-05-21 08:58:31 -05004025 Ops << Call->getArgOperand(i);
David Neto22f144c2017-06-12 14:26:21 -04004026 }
4027
SJW61531372020-06-09 07:31:08 -05004028 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
Kévin Petit8a560882019-03-21 15:24:34 +00004029 break;
4030 }
SJW61531372020-06-09 07:31:08 -05004031 case Builtins::kSpirvOp: {
4032 // Handle SPIR-V intrinsics
4033 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
4034 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
4035 if (opcode != spv::OpNop) {
4036 SPIRVOperandVec Ops;
Kévin Petit8a560882019-03-21 15:24:34 +00004037
SJW61531372020-06-09 07:31:08 -05004038 if (!I.getType()->isVoidTy()) {
4039 Ops << I.getType();
4040 }
4041
4042 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
4043 Ops << Call->getArgOperand(i);
4044 }
4045
4046 RID = addSPIRVInst(opcode, Ops);
4047 }
4048 break;
4049 }
4050 case Builtins::kPopcount: {
4051 //
4052 // Generate OpBitCount
4053 //
4054 // Ops[0] = Result Type ID
4055 // Ops[1] = Base ID
4056 SPIRVOperandVec Ops;
4057 Ops << Call->getType() << Call->getOperand(0);
4058
4059 RID = addSPIRVInst(spv::OpBitCount, Ops);
4060 break;
4061 }
4062 case Builtins::kSpirvCopyMemory: {
David Neto22f144c2017-06-12 14:26:21 -04004063 //
4064 // Generate OpCopyMemory.
4065 //
4066
4067 // Ops[0] = Dst ID
4068 // Ops[1] = Src ID
4069 // Ops[2] = Memory Access
4070 // Ops[3] = Alignment
4071
4072 auto IsVolatile =
4073 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
4074
4075 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
4076 : spv::MemoryAccessMaskNone;
4077
4078 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
4079
4080 auto Alignment =
4081 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
4082
SJWf93f5f32020-05-05 07:27:56 -05004083 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004084 Ops << Call->getArgOperand(0) << Call->getArgOperand(1) << MemoryAccess
4085 << static_cast<uint32_t>(Alignment);
David Neto22f144c2017-06-12 14:26:21 -04004086
SJWf93f5f32020-05-05 07:27:56 -05004087 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004088 break;
4089 }
SJW61531372020-06-09 07:31:08 -05004090 case Builtins::kReadImagef:
4091 case Builtins::kReadImageh:
4092 case Builtins::kReadImagei:
4093 case Builtins::kReadImageui: {
4094 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
4095 // Additionally, OpTypeSampledImage is generated.
4096 const auto &pi = func_info.getParameter(1);
4097 if (pi.isSampler()) {
4098 //
4099 // Generate OpSampledImage.
4100 //
4101 // Ops[0] = Result Type ID
4102 // Ops[1] = Image ID
4103 // Ops[2] = Sampler ID
4104 //
4105 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004106
SJW61531372020-06-09 07:31:08 -05004107 Value *Image = Call->getArgOperand(0);
4108 Value *Sampler = Call->getArgOperand(1);
4109 Value *Coordinate = Call->getArgOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004110
SJW61531372020-06-09 07:31:08 -05004111 TypeMapType &OpImageTypeMap = getImageTypeMap();
4112 Type *ImageTy = Image->getType()->getPointerElementType();
4113 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
David Neto22f144c2017-06-12 14:26:21 -04004114
SJW61531372020-06-09 07:31:08 -05004115 Ops << ImageTyID << Image << Sampler;
David Neto257c3892018-04-11 13:19:45 -04004116
SJW61531372020-06-09 07:31:08 -05004117 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004118
SJW61531372020-06-09 07:31:08 -05004119 //
4120 // Generate OpImageSampleExplicitLod.
4121 //
4122 // Ops[0] = Result Type ID
4123 // Ops[1] = Sampled Image ID
4124 // Ops[2] = Coordinate ID
4125 // Ops[3] = Image Operands Type ID
4126 // Ops[4] ... Ops[n] = Operands ID
4127 //
alan-bakerf67468c2019-11-25 15:51:49 -05004128 Ops.clear();
SJW61531372020-06-09 07:31:08 -05004129
4130 const bool is_int_image = IsIntImageType(Image->getType());
4131 SPIRVID result_type;
4132 if (is_int_image) {
4133 result_type = v4int32ID;
4134 } else {
4135 result_type = getSPIRVType(Call->getType());
4136 }
4137
4138 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
4139 Ops << result_type << SampledImageID << Coordinate
4140 << spv::ImageOperandsLodMask << CstFP0;
4141
4142 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
4143
4144 if (is_int_image) {
4145 // Generate the bitcast.
4146 Ops.clear();
4147 Ops << Call->getType() << RID;
4148 RID = addSPIRVInst(spv::OpBitcast, Ops);
4149 }
4150 } else {
4151
4152 // read_image (without a sampler) is mapped to OpImageFetch.
4153 Value *Image = Call->getArgOperand(0);
4154 Value *Coordinate = Call->getArgOperand(1);
4155
4156 //
4157 // Generate OpImageFetch
4158 //
4159 // Ops[0] = Result Type ID
4160 // Ops[1] = Image ID
4161 // Ops[2] = Coordinate ID
4162 // Ops[3] = Lod
4163 // Ops[4] = 0
4164 //
4165 SPIRVOperandVec Ops;
4166
4167 const bool is_int_image = IsIntImageType(Image->getType());
4168 SPIRVID result_type;
4169 if (is_int_image) {
4170 result_type = v4int32ID;
4171 } else {
4172 result_type = getSPIRVType(Call->getType());
4173 }
4174 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
4175
4176 Ops << result_type << Image << Coordinate << spv::ImageOperandsLodMask
4177 << CstInt0;
4178
4179 RID = addSPIRVInst(spv::OpImageFetch, Ops);
4180
4181 if (is_int_image) {
4182 // Generate the bitcast.
4183 Ops.clear();
4184 Ops << Call->getType() << RID;
4185 RID = addSPIRVInst(spv::OpBitcast, Ops);
4186 }
alan-bakerf67468c2019-11-25 15:51:49 -05004187 }
David Neto22f144c2017-06-12 14:26:21 -04004188 break;
4189 }
4190
SJW61531372020-06-09 07:31:08 -05004191 case Builtins::kWriteImagef:
4192 case Builtins::kWriteImageh:
4193 case Builtins::kWriteImagei:
4194 case Builtins::kWriteImageui: {
4195 // write_image is mapped to OpImageWrite.
David Neto22f144c2017-06-12 14:26:21 -04004196 //
4197 // Generate OpImageWrite.
4198 //
4199 // Ops[0] = Image ID
4200 // Ops[1] = Coordinate ID
4201 // Ops[2] = Texel ID
4202 // Ops[3] = (Optional) Image Operands Type (Literal Number)
4203 // Ops[4] ... Ops[n] = (Optional) Operands ID
4204 //
SJWf93f5f32020-05-05 07:27:56 -05004205 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004206
4207 Value *Image = Call->getArgOperand(0);
4208 Value *Coordinate = Call->getArgOperand(1);
4209 Value *Texel = Call->getArgOperand(2);
4210
SJW01901d92020-05-21 08:58:31 -05004211 SPIRVID TexelID = getSPIRVValue(Texel);
alan-bakerf67468c2019-11-25 15:51:49 -05004212
4213 const bool is_int_image = IsIntImageType(Image->getType());
4214 if (is_int_image) {
4215 // Generate a bitcast to v4int and use it as the texel value.
SJW01901d92020-05-21 08:58:31 -05004216 Ops << v4int32ID << TexelID;
SJWf93f5f32020-05-05 07:27:56 -05004217 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
alan-bakerf67468c2019-11-25 15:51:49 -05004218 Ops.clear();
alan-bakerf67468c2019-11-25 15:51:49 -05004219 }
SJW01901d92020-05-21 08:58:31 -05004220 Ops << Image << Coordinate << TexelID;
David Neto22f144c2017-06-12 14:26:21 -04004221
SJWf93f5f32020-05-05 07:27:56 -05004222 RID = addSPIRVInst(spv::OpImageWrite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004223 break;
4224 }
4225
SJW61531372020-06-09 07:31:08 -05004226 case Builtins::kGetImageHeight:
4227 case Builtins::kGetImageWidth:
4228 case Builtins::kGetImageDepth:
4229 case Builtins::kGetImageDim: {
4230 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
SJW01901d92020-05-21 08:58:31 -05004231 addCapability(spv::CapabilityImageQuery);
4232
David Neto5c22a252018-03-15 16:07:41 -04004233 //
alan-bakerce179f12019-12-06 19:02:22 -05004234 // Generate OpImageQuerySize[Lod]
David Neto5c22a252018-03-15 16:07:41 -04004235 //
4236 // Ops[0] = Image ID
4237 //
alan-bakerce179f12019-12-06 19:02:22 -05004238 // Result type has components equal to the dimensionality of the image,
4239 // plus 1 if the image is arrayed.
4240 //
alan-bakerf906d2b2019-12-10 11:26:23 -05004241 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
SJWf93f5f32020-05-05 07:27:56 -05004242 SPIRVOperandVec Ops;
David Neto5c22a252018-03-15 16:07:41 -04004243
4244 // Implement:
alan-bakerce179f12019-12-06 19:02:22 -05004245 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
SJW01901d92020-05-21 08:58:31 -05004246 SPIRVID SizesTypeID;
alan-bakerce179f12019-12-06 19:02:22 -05004247
David Neto5c22a252018-03-15 16:07:41 -04004248 Value *Image = Call->getArgOperand(0);
alan-bakerce179f12019-12-06 19:02:22 -05004249 const uint32_t dim = ImageDimensionality(Image->getType());
alan-baker7150a1d2020-02-25 08:31:06 -05004250 const uint32_t components =
4251 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
alan-bakerce179f12019-12-06 19:02:22 -05004252 if (components == 1) {
SJWf93f5f32020-05-05 07:27:56 -05004253 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
alan-bakerce179f12019-12-06 19:02:22 -05004254 } else {
SJWf93f5f32020-05-05 07:27:56 -05004255 SizesTypeID = getSPIRVType(
alan-bakerb3e2b6d2020-06-24 23:59:57 -04004256 FixedVectorType::get(Type::getInt32Ty(Context), components));
alan-bakerce179f12019-12-06 19:02:22 -05004257 }
SJW01901d92020-05-21 08:58:31 -05004258 Ops << SizesTypeID << Image;
alan-bakerce179f12019-12-06 19:02:22 -05004259 spv::Op query_opcode = spv::OpImageQuerySize;
SJW173c7e92020-03-16 08:44:47 -05004260 if (IsSampledImageType(Image->getType())) {
alan-bakerce179f12019-12-06 19:02:22 -05004261 query_opcode = spv::OpImageQuerySizeLod;
4262 // Need explicit 0 for Lod operand.
4263 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJW01901d92020-05-21 08:58:31 -05004264 Ops << CstInt0;
alan-bakerce179f12019-12-06 19:02:22 -05004265 }
David Neto5c22a252018-03-15 16:07:41 -04004266
SJWf93f5f32020-05-05 07:27:56 -05004267 RID = addSPIRVInst(query_opcode, Ops);
David Neto5c22a252018-03-15 16:07:41 -04004268
alan-bakerce179f12019-12-06 19:02:22 -05004269 // May require an extra instruction to create the appropriate result of
4270 // the builtin function.
SJW61531372020-06-09 07:31:08 -05004271 if (func_info.getType() == Builtins::kGetImageDim) {
alan-bakerce179f12019-12-06 19:02:22 -05004272 if (dim == 3) {
4273 // get_image_dim returns an int4 for 3D images.
4274 //
David Neto5c22a252018-03-15 16:07:41 -04004275
alan-bakerce179f12019-12-06 19:02:22 -05004276 // Implement:
4277 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
alan-bakerce179f12019-12-06 19:02:22 -05004278 Constant *CstInt0 = ConstantInt::get(Context, APInt(32, 0));
SJW01901d92020-05-21 08:58:31 -05004279
4280 Ops.clear();
alan-bakerb3e2b6d2020-06-24 23:59:57 -04004281 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
SJW01901d92020-05-21 08:58:31 -05004282 << CstInt0;
David Neto5c22a252018-03-15 16:07:41 -04004283
SJWf93f5f32020-05-05 07:27:56 -05004284 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004285 } else if (dim != components) {
4286 // get_image_dim return an int2 regardless of the arrayedness of the
4287 // image. If the image is arrayed an element must be dropped from the
4288 // query result.
4289 //
alan-bakerce179f12019-12-06 19:02:22 -05004290
4291 // Implement:
4292 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
4293 Ops.clear();
alan-bakerb3e2b6d2020-06-24 23:59:57 -04004294 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID
4295 << RID << 0 << 1;
alan-bakerce179f12019-12-06 19:02:22 -05004296
SJWf93f5f32020-05-05 07:27:56 -05004297 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004298 }
4299 } else if (components > 1) {
alan-bakerce179f12019-12-06 19:02:22 -05004300 // Implement:
4301 // %result = OpCompositeExtract %uint %sizes <component number>
4302 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004303 Ops << I.getType() << RID;
alan-bakerce179f12019-12-06 19:02:22 -05004304
4305 uint32_t component = 0;
SJW61531372020-06-09 07:31:08 -05004306 if (func_info.getType() == Builtins::kGetImageHeight)
alan-bakerce179f12019-12-06 19:02:22 -05004307 component = 1;
SJW61531372020-06-09 07:31:08 -05004308 else if (func_info.getType() == Builtins::kGetImageDepth)
alan-bakerce179f12019-12-06 19:02:22 -05004309 component = 2;
SJW01901d92020-05-21 08:58:31 -05004310 Ops << component;
alan-bakerce179f12019-12-06 19:02:22 -05004311
SJWf93f5f32020-05-05 07:27:56 -05004312 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
alan-bakerce179f12019-12-06 19:02:22 -05004313 }
David Neto5c22a252018-03-15 16:07:41 -04004314 break;
4315 }
4316
SJW61531372020-06-09 07:31:08 -05004317 default: {
4318 // Call instruction is deferred because it needs function's ID.
SJW88ed5fe2020-05-11 12:40:57 -05004319 RID = addSPIRVPlaceholder(&I);
SJW61531372020-06-09 07:31:08 -05004320
4321 break;
David Neto22f144c2017-06-12 14:26:21 -04004322 }
SJW61531372020-06-09 07:31:08 -05004323 }
4324
David Neto22f144c2017-06-12 14:26:21 -04004325 break;
4326 }
4327 case Instruction::Ret: {
4328 unsigned NumOps = I.getNumOperands();
4329 if (NumOps == 0) {
4330 //
4331 // Generate OpReturn.
4332 //
SJWf93f5f32020-05-05 07:27:56 -05004333 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004334 } else {
4335 //
4336 // Generate OpReturnValue.
4337 //
4338
4339 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004340 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004341
SJW01901d92020-05-21 08:58:31 -05004342 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004343
SJWf93f5f32020-05-05 07:27:56 -05004344 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004345 break;
4346 }
4347 break;
4348 }
4349 }
SJWf93f5f32020-05-05 07:27:56 -05004350
4351 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004352 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004353 VMap[&I] = RID;
4354 }
David Neto22f144c2017-06-12 14:26:21 -04004355}
4356
4357void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004358
4359 //
4360 // Generate OpFunctionEnd
4361 //
SJWf93f5f32020-05-05 07:27:56 -05004362 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004363}
4364
4365bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004366 // Don't specialize <4 x i8> if i8 is generally supported.
4367 if (clspv::Option::Int8Support())
4368 return false;
4369
David Neto22f144c2017-06-12 14:26:21 -04004370 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004371 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4372 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
4373 VecTy->getNumElements() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004374 return true;
4375 }
4376 }
4377
4378 return false;
4379}
4380
4381void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004382 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4383
SJW88ed5fe2020-05-11 12:40:57 -05004384 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4385 Value *Inst = DeferredInsts[i].first;
4386 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4387 SPIRVOperandVec Operands;
4388
4389 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4390 ++i;
4391 assert(DeferredInsts.size() > i);
4392 assert(Inst == DeferredInsts[i].first);
4393 Placeholder = DeferredInsts[i].second;
4394 };
David Neto22f144c2017-06-12 14:26:21 -04004395
4396 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004397 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004398 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004399 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004400 //
4401 // Generate OpLoopMerge.
4402 //
4403 // Ops[0] = Merge Block ID
4404 // Ops[1] = Continue Target ID
4405 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004406 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004407
SJW01901d92020-05-21 08:58:31 -05004408 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4409 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004410
SJW88ed5fe2020-05-11 12:40:57 -05004411 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4412
4413 nextDeferred();
4414
alan-baker06cad652019-12-03 17:56:47 -05004415 } else if (MergeBlocks.count(BrBB)) {
4416 //
4417 // Generate OpSelectionMerge.
4418 //
4419 // Ops[0] = Merge Block ID
4420 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004421 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004422
alan-baker06cad652019-12-03 17:56:47 -05004423 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004424 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004425
SJW88ed5fe2020-05-11 12:40:57 -05004426 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4427
4428 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004429 }
4430
4431 if (Br->isConditional()) {
4432 //
4433 // Generate OpBranchConditional.
4434 //
4435 // Ops[0] = Condition ID
4436 // Ops[1] = True Label ID
4437 // Ops[2] = False Label ID
4438 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004439 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004440
SJW01901d92020-05-21 08:58:31 -05004441 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004442
SJW88ed5fe2020-05-11 12:40:57 -05004443 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4444
David Neto22f144c2017-06-12 14:26:21 -04004445 } else {
4446 //
4447 // Generate OpBranch.
4448 //
4449 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004450 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004451
SJW01901d92020-05-21 08:58:31 -05004452 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004453
SJW88ed5fe2020-05-11 12:40:57 -05004454 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004455 }
4456 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004457 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4458 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004459 // OpPhi on pointers requires variable pointers.
4460 setVariablePointersCapabilities(
4461 PHI->getType()->getPointerAddressSpace());
4462 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004463 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004464 }
4465 }
4466
David Neto22f144c2017-06-12 14:26:21 -04004467 //
4468 // Generate OpPhi.
4469 //
4470 // Ops[0] = Result Type ID
4471 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004472 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004473
SJW01901d92020-05-21 08:58:31 -05004474 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004475
SJW88ed5fe2020-05-11 12:40:57 -05004476 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004477 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004478 }
4479
SJW88ed5fe2020-05-11 12:40:57 -05004480 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4481
David Neto22f144c2017-06-12 14:26:21 -04004482 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4483 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004484 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004485
SJW61531372020-06-09 07:31:08 -05004486 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004487
4488 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004489 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004490
4491 // The result type.
SJW01901d92020-05-21 08:58:31 -05004492 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004493
4494 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004495 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004496 }
4497
SJW88ed5fe2020-05-11 12:40:57 -05004498 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004499
David Neto22f144c2017-06-12 14:26:21 -04004500 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004501 if (Call->getType()->isPointerTy()) {
4502 // Functions returning pointers require variable pointers.
4503 setVariablePointersCapabilities(
4504 Call->getType()->getPointerAddressSpace());
4505 }
4506
David Neto22f144c2017-06-12 14:26:21 -04004507 //
4508 // Generate OpFunctionCall.
4509 //
4510
4511 // Ops[0] = Result Type ID
4512 // Ops[1] = Callee Function ID
4513 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004514 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004515
SJW01901d92020-05-21 08:58:31 -05004516 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004517
SJW01901d92020-05-21 08:58:31 -05004518 SPIRVID CalleeID = getSPIRVValue(Callee);
David Neto43568eb2017-10-13 18:25:25 -04004519 if (CalleeID == 0) {
4520 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004521 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004522 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4523 // causes an infinite loop. Instead, go ahead and generate
4524 // the bad function call. A validator will catch the 0-Id.
4525 // llvm_unreachable("Can't translate function call");
4526 }
David Neto22f144c2017-06-12 14:26:21 -04004527
SJW01901d92020-05-21 08:58:31 -05004528 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004529
David Neto22f144c2017-06-12 14:26:21 -04004530 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004531 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4532 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004533 auto *operand_type = operand->getType();
4534 // Images and samplers can be passed as function parameters without
4535 // variable pointers.
4536 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4537 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004538 auto sc =
4539 GetStorageClass(operand->getType()->getPointerAddressSpace());
4540 if (sc == spv::StorageClassStorageBuffer) {
4541 // Passing SSBO by reference requires variable pointers storage
4542 // buffer.
SJW01901d92020-05-21 08:58:31 -05004543 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004544 } else if (sc == spv::StorageClassWorkgroup) {
4545 // Workgroup references require variable pointers if they are not
4546 // memory object declarations.
4547 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4548 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004549 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4550 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004551 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004552 } else {
4553 // Arguments are function parameters.
4554 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004555 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004556 }
4557 }
4558 }
SJW01901d92020-05-21 08:58:31 -05004559 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004560 }
4561
SJW88ed5fe2020-05-11 12:40:57 -05004562 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004563 }
4564 }
4565 }
4566}
4567
SJW77b87ad2020-04-21 14:37:52 -05004568void SPIRVProducerPass::HandleDeferredDecorations() {
4569 const auto &DL = module->getDataLayout();
Alan Baker202c8c72018-08-13 13:47:44 -04004570 if (getTypesNeedingArrayStride().empty() && LocalArgSpecIds.empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004571 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004572 }
David Neto1a1a0582017-07-07 12:01:44 -04004573
David Netoc6f3ab22018-04-06 18:02:31 -04004574 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4575 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07004576 for (auto *type : getTypesNeedingArrayStride()) {
4577 Type *elemTy = nullptr;
4578 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4579 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004580 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004581 elemTy = arrayTy->getElementType();
4582 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4583 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004584 } else {
4585 errs() << "Unhandled strided type " << *type << "\n";
4586 llvm_unreachable("Unhandled strided type");
4587 }
David Neto1a1a0582017-07-07 12:01:44 -04004588
4589 // Ops[0] = Target ID
4590 // Ops[1] = Decoration (ArrayStride)
4591 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004592 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004593
David Neto85082642018-03-24 06:55:20 -07004594 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004595 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004596
SJW01901d92020-05-21 08:58:31 -05004597 Ops << type << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004598
SJWf93f5f32020-05-05 07:27:56 -05004599 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004600 }
David Neto1a1a0582017-07-07 12:01:44 -04004601}
4602
SJW61531372020-06-09 07:31:08 -05004603glsl::ExtInst
4604SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW2c317da2020-03-23 07:39:13 -05004605
SJW61531372020-06-09 07:31:08 -05004606 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004607 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05004608 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004609 if (param_type.type_id == Type::FloatTyID) {
4610 return glsl::ExtInst::ExtInstFClamp;
4611 }
4612 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4613 : glsl::ExtInst::ExtInstUClamp;
4614 }
4615 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05004616 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004617 if (param_type.type_id == Type::FloatTyID) {
4618 return glsl::ExtInst::ExtInstFMax;
4619 }
4620 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4621 : glsl::ExtInst::ExtInstUMax;
4622 }
4623 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05004624 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004625 if (param_type.type_id == Type::FloatTyID) {
4626 return glsl::ExtInst::ExtInstFMin;
4627 }
4628 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4629 : glsl::ExtInst::ExtInstUMin;
4630 }
4631 case Builtins::kAbs:
4632 return glsl::ExtInst::ExtInstSAbs;
4633 case Builtins::kFmax:
4634 return glsl::ExtInst::ExtInstFMax;
4635 case Builtins::kFmin:
4636 return glsl::ExtInst::ExtInstFMin;
4637 case Builtins::kDegrees:
4638 return glsl::ExtInst::ExtInstDegrees;
4639 case Builtins::kRadians:
4640 return glsl::ExtInst::ExtInstRadians;
4641 case Builtins::kMix:
4642 return glsl::ExtInst::ExtInstFMix;
4643 case Builtins::kAcos:
4644 case Builtins::kAcospi:
4645 return glsl::ExtInst::ExtInstAcos;
4646 case Builtins::kAcosh:
4647 return glsl::ExtInst::ExtInstAcosh;
4648 case Builtins::kAsin:
4649 case Builtins::kAsinpi:
4650 return glsl::ExtInst::ExtInstAsin;
4651 case Builtins::kAsinh:
4652 return glsl::ExtInst::ExtInstAsinh;
4653 case Builtins::kAtan:
4654 case Builtins::kAtanpi:
4655 return glsl::ExtInst::ExtInstAtan;
4656 case Builtins::kAtanh:
4657 return glsl::ExtInst::ExtInstAtanh;
4658 case Builtins::kAtan2:
4659 case Builtins::kAtan2pi:
4660 return glsl::ExtInst::ExtInstAtan2;
4661 case Builtins::kCeil:
4662 return glsl::ExtInst::ExtInstCeil;
4663 case Builtins::kSin:
4664 case Builtins::kHalfSin:
4665 case Builtins::kNativeSin:
4666 return glsl::ExtInst::ExtInstSin;
4667 case Builtins::kSinh:
4668 return glsl::ExtInst::ExtInstSinh;
4669 case Builtins::kCos:
4670 case Builtins::kHalfCos:
4671 case Builtins::kNativeCos:
4672 return glsl::ExtInst::ExtInstCos;
4673 case Builtins::kCosh:
4674 return glsl::ExtInst::ExtInstCosh;
4675 case Builtins::kTan:
4676 case Builtins::kHalfTan:
4677 case Builtins::kNativeTan:
4678 return glsl::ExtInst::ExtInstTan;
4679 case Builtins::kTanh:
4680 return glsl::ExtInst::ExtInstTanh;
4681 case Builtins::kExp:
4682 case Builtins::kHalfExp:
4683 case Builtins::kNativeExp:
4684 return glsl::ExtInst::ExtInstExp;
4685 case Builtins::kExp2:
4686 case Builtins::kHalfExp2:
4687 case Builtins::kNativeExp2:
4688 return glsl::ExtInst::ExtInstExp2;
4689 case Builtins::kLog:
4690 case Builtins::kHalfLog:
4691 case Builtins::kNativeLog:
4692 return glsl::ExtInst::ExtInstLog;
4693 case Builtins::kLog2:
4694 case Builtins::kHalfLog2:
4695 case Builtins::kNativeLog2:
4696 return glsl::ExtInst::ExtInstLog2;
4697 case Builtins::kFabs:
4698 return glsl::ExtInst::ExtInstFAbs;
4699 case Builtins::kFma:
4700 return glsl::ExtInst::ExtInstFma;
4701 case Builtins::kFloor:
4702 return glsl::ExtInst::ExtInstFloor;
4703 case Builtins::kLdexp:
4704 return glsl::ExtInst::ExtInstLdexp;
4705 case Builtins::kPow:
4706 case Builtins::kPowr:
4707 case Builtins::kHalfPowr:
4708 case Builtins::kNativePowr:
4709 return glsl::ExtInst::ExtInstPow;
4710 case Builtins::kRound:
4711 return glsl::ExtInst::ExtInstRound;
4712 case Builtins::kSqrt:
4713 case Builtins::kHalfSqrt:
4714 case Builtins::kNativeSqrt:
4715 return glsl::ExtInst::ExtInstSqrt;
4716 case Builtins::kRsqrt:
4717 case Builtins::kHalfRsqrt:
4718 case Builtins::kNativeRsqrt:
4719 return glsl::ExtInst::ExtInstInverseSqrt;
4720 case Builtins::kTrunc:
4721 return glsl::ExtInst::ExtInstTrunc;
4722 case Builtins::kFrexp:
4723 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05004724 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05004725 case Builtins::kFract:
4726 return glsl::ExtInst::ExtInstFract;
4727 case Builtins::kSign:
4728 return glsl::ExtInst::ExtInstFSign;
4729 case Builtins::kLength:
4730 case Builtins::kFastLength:
4731 return glsl::ExtInst::ExtInstLength;
4732 case Builtins::kDistance:
4733 case Builtins::kFastDistance:
4734 return glsl::ExtInst::ExtInstDistance;
4735 case Builtins::kStep:
4736 return glsl::ExtInst::ExtInstStep;
4737 case Builtins::kSmoothstep:
4738 return glsl::ExtInst::ExtInstSmoothStep;
4739 case Builtins::kCross:
4740 return glsl::ExtInst::ExtInstCross;
4741 case Builtins::kNormalize:
4742 case Builtins::kFastNormalize:
4743 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05004744 case Builtins::kSpirvPack:
4745 return glsl::ExtInst::ExtInstPackHalf2x16;
4746 case Builtins::kSpirvUnpack:
4747 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05004748 default:
4749 break;
4750 }
4751
SJW61531372020-06-09 07:31:08 -05004752 if (func_info.getName().find("llvm.fmuladd.") == 0) {
4753 return glsl::ExtInst::ExtInstFma;
4754 }
4755 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04004756}
4757
SJW61531372020-06-09 07:31:08 -05004758glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
4759 const Builtins::FunctionInfo &func_info) {
4760 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004761 case Builtins::kClz:
4762 return glsl::ExtInst::ExtInstFindUMsb;
4763 case Builtins::kAcospi:
4764 return glsl::ExtInst::ExtInstAcos;
4765 case Builtins::kAsinpi:
4766 return glsl::ExtInst::ExtInstAsin;
4767 case Builtins::kAtanpi:
4768 return glsl::ExtInst::ExtInstAtan;
4769 case Builtins::kAtan2pi:
4770 return glsl::ExtInst::ExtInstAtan2;
4771 default:
4772 break;
4773 }
4774 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04004775}
4776
SJW61531372020-06-09 07:31:08 -05004777glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
4778 const Builtins::FunctionInfo &func_info) {
4779 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04004780 if (direct != kGlslExtInstBad)
4781 return direct;
SJW61531372020-06-09 07:31:08 -05004782 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04004783}
4784
David Neto22f144c2017-06-12 14:26:21 -04004785void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04004786 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04004787}
4788
SJW88ed5fe2020-05-11 12:40:57 -05004789void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05004790 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04004791}
4792
SJW88ed5fe2020-05-11 12:40:57 -05004793void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04004794 // High 16 bit : Word Count
4795 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05004796 uint32_t Word = Inst.getOpcode();
4797 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04004798 if (count > 65535) {
4799 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
4800 llvm_unreachable("Word count too high");
4801 }
SJW88ed5fe2020-05-11 12:40:57 -05004802 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04004803 WriteOneWord(Word);
4804}
4805
SJW88ed5fe2020-05-11 12:40:57 -05004806void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
4807 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04004808 switch (OpTy) {
4809 default: {
4810 llvm_unreachable("Unsupported SPIRV Operand Type???");
4811 break;
4812 }
4813 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05004814 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04004815 break;
4816 }
4817 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05004818 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04004819 const char *Data = Str.c_str();
4820 size_t WordSize = Str.size() / 4;
4821 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
4822 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
4823 }
4824
4825 uint32_t Remainder = Str.size() % 4;
4826 uint32_t LastWord = 0;
4827 if (Remainder) {
4828 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
4829 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
4830 }
4831 }
4832
4833 WriteOneWord(LastWord);
4834 break;
4835 }
SJW88ed5fe2020-05-11 12:40:57 -05004836 case SPIRVOperandType::LITERAL_WORD: {
4837 WriteOneWord(Op.getLiteralNum()[0]);
4838 break;
4839 }
4840 case SPIRVOperandType::LITERAL_DWORD: {
4841 WriteOneWord(Op.getLiteralNum()[0]);
4842 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04004843 break;
4844 }
4845 }
4846}
4847
4848void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05004849 for (int i = 0; i < kSectionCount; ++i) {
4850 WriteSPIRVBinary(SPIRVSections[i]);
4851 }
4852}
4853
4854void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
David Neto22f144c2017-06-12 14:26:21 -04004855
SJW88ed5fe2020-05-11 12:40:57 -05004856 for (const auto &Inst : SPIRVInstList) {
4857 const auto &Ops = Inst.getOperands();
4858 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04004859
4860 switch (Opcode) {
4861 default: {
David Neto5c22a252018-03-15 16:07:41 -04004862 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04004863 llvm_unreachable("Unsupported SPIRV instruction");
4864 break;
4865 }
4866 case spv::OpCapability:
4867 case spv::OpExtension:
4868 case spv::OpMemoryModel:
4869 case spv::OpEntryPoint:
4870 case spv::OpExecutionMode:
4871 case spv::OpSource:
4872 case spv::OpDecorate:
4873 case spv::OpMemberDecorate:
4874 case spv::OpBranch:
4875 case spv::OpBranchConditional:
4876 case spv::OpSelectionMerge:
4877 case spv::OpLoopMerge:
4878 case spv::OpStore:
4879 case spv::OpImageWrite:
4880 case spv::OpReturnValue:
4881 case spv::OpControlBarrier:
4882 case spv::OpMemoryBarrier:
4883 case spv::OpReturn:
4884 case spv::OpFunctionEnd:
4885 case spv::OpCopyMemory: {
4886 WriteWordCountAndOpcode(Inst);
4887 for (uint32_t i = 0; i < Ops.size(); i++) {
4888 WriteOperand(Ops[i]);
4889 }
4890 break;
4891 }
4892 case spv::OpTypeBool:
4893 case spv::OpTypeVoid:
4894 case spv::OpTypeSampler:
4895 case spv::OpLabel:
4896 case spv::OpExtInstImport:
4897 case spv::OpTypePointer:
4898 case spv::OpTypeRuntimeArray:
4899 case spv::OpTypeStruct:
4900 case spv::OpTypeImage:
4901 case spv::OpTypeSampledImage:
4902 case spv::OpTypeInt:
4903 case spv::OpTypeFloat:
4904 case spv::OpTypeArray:
4905 case spv::OpTypeVector:
4906 case spv::OpTypeFunction: {
4907 WriteWordCountAndOpcode(Inst);
4908 WriteResultID(Inst);
4909 for (uint32_t i = 0; i < Ops.size(); i++) {
4910 WriteOperand(Ops[i]);
4911 }
4912 break;
4913 }
4914 case spv::OpFunction:
4915 case spv::OpFunctionParameter:
4916 case spv::OpAccessChain:
4917 case spv::OpPtrAccessChain:
4918 case spv::OpInBoundsAccessChain:
4919 case spv::OpUConvert:
4920 case spv::OpSConvert:
4921 case spv::OpConvertFToU:
4922 case spv::OpConvertFToS:
4923 case spv::OpConvertUToF:
4924 case spv::OpConvertSToF:
4925 case spv::OpFConvert:
4926 case spv::OpConvertPtrToU:
4927 case spv::OpConvertUToPtr:
4928 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05004929 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04004930 case spv::OpIAdd:
4931 case spv::OpFAdd:
4932 case spv::OpISub:
4933 case spv::OpFSub:
4934 case spv::OpIMul:
4935 case spv::OpFMul:
4936 case spv::OpUDiv:
4937 case spv::OpSDiv:
4938 case spv::OpFDiv:
4939 case spv::OpUMod:
4940 case spv::OpSRem:
4941 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00004942 case spv::OpUMulExtended:
4943 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04004944 case spv::OpBitwiseOr:
4945 case spv::OpBitwiseXor:
4946 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04004947 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04004948 case spv::OpShiftLeftLogical:
4949 case spv::OpShiftRightLogical:
4950 case spv::OpShiftRightArithmetic:
4951 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04004952 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04004953 case spv::OpCompositeExtract:
4954 case spv::OpVectorExtractDynamic:
4955 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04004956 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04004957 case spv::OpVectorInsertDynamic:
4958 case spv::OpVectorShuffle:
4959 case spv::OpIEqual:
4960 case spv::OpINotEqual:
4961 case spv::OpUGreaterThan:
4962 case spv::OpUGreaterThanEqual:
4963 case spv::OpULessThan:
4964 case spv::OpULessThanEqual:
4965 case spv::OpSGreaterThan:
4966 case spv::OpSGreaterThanEqual:
4967 case spv::OpSLessThan:
4968 case spv::OpSLessThanEqual:
4969 case spv::OpFOrdEqual:
4970 case spv::OpFOrdGreaterThan:
4971 case spv::OpFOrdGreaterThanEqual:
4972 case spv::OpFOrdLessThan:
4973 case spv::OpFOrdLessThanEqual:
4974 case spv::OpFOrdNotEqual:
4975 case spv::OpFUnordEqual:
4976 case spv::OpFUnordGreaterThan:
4977 case spv::OpFUnordGreaterThanEqual:
4978 case spv::OpFUnordLessThan:
4979 case spv::OpFUnordLessThanEqual:
4980 case spv::OpFUnordNotEqual:
4981 case spv::OpExtInst:
4982 case spv::OpIsInf:
4983 case spv::OpIsNan:
4984 case spv::OpAny:
4985 case spv::OpAll:
4986 case spv::OpUndef:
4987 case spv::OpConstantNull:
4988 case spv::OpLogicalOr:
4989 case spv::OpLogicalAnd:
4990 case spv::OpLogicalNot:
4991 case spv::OpLogicalNotEqual:
4992 case spv::OpConstantComposite:
4993 case spv::OpSpecConstantComposite:
4994 case spv::OpConstantTrue:
4995 case spv::OpConstantFalse:
4996 case spv::OpConstant:
4997 case spv::OpSpecConstant:
4998 case spv::OpVariable:
4999 case spv::OpFunctionCall:
5000 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005001 case spv::OpImageFetch:
David Neto22f144c2017-06-12 14:26:21 -04005002 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005003 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005004 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005005 case spv::OpSelect:
5006 case spv::OpPhi:
5007 case spv::OpLoad:
5008 case spv::OpAtomicIAdd:
5009 case spv::OpAtomicISub:
5010 case spv::OpAtomicExchange:
5011 case spv::OpAtomicIIncrement:
5012 case spv::OpAtomicIDecrement:
5013 case spv::OpAtomicCompareExchange:
5014 case spv::OpAtomicUMin:
5015 case spv::OpAtomicSMin:
5016 case spv::OpAtomicUMax:
5017 case spv::OpAtomicSMax:
5018 case spv::OpAtomicAnd:
5019 case spv::OpAtomicOr:
5020 case spv::OpAtomicXor:
5021 case spv::OpDot: {
5022 WriteWordCountAndOpcode(Inst);
5023 WriteOperand(Ops[0]);
5024 WriteResultID(Inst);
5025 for (uint32_t i = 1; i < Ops.size(); i++) {
5026 WriteOperand(Ops[i]);
5027 }
5028 break;
5029 }
5030 }
5031 }
5032}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005033
alan-bakerb6b09dc2018-11-08 16:59:28 -05005034bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005035 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005036 case Type::HalfTyID:
5037 case Type::FloatTyID:
5038 case Type::DoubleTyID:
5039 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005040 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005041 return true;
5042 case Type::PointerTyID: {
5043 const PointerType *pointer_type = cast<PointerType>(type);
5044 if (pointer_type->getPointerAddressSpace() !=
5045 AddressSpace::UniformConstant) {
5046 auto pointee_type = pointer_type->getPointerElementType();
5047 if (pointee_type->isStructTy() &&
5048 cast<StructType>(pointee_type)->isOpaque()) {
5049 // Images and samplers are not nullable.
5050 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005051 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005052 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005053 return true;
5054 }
5055 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005056 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005057 case Type::StructTyID: {
5058 const StructType *struct_type = cast<StructType>(type);
5059 // Images and samplers are not nullable.
5060 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005061 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005062 for (const auto element : struct_type->elements()) {
5063 if (!IsTypeNullable(element))
5064 return false;
5065 }
5066 return true;
5067 }
5068 default:
5069 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005070 }
5071}
Alan Bakerfcda9482018-10-02 17:09:59 -04005072
SJW77b87ad2020-04-21 14:37:52 -05005073void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005074 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005075 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005076 // Metdata is stored as key-value pair operands. The first element of each
5077 // operand is the type and the second is a vector of offsets.
5078 for (const auto *operand : offsets_md->operands()) {
5079 const auto *pair = cast<MDTuple>(operand);
5080 auto *type =
5081 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5082 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5083 std::vector<uint32_t> offsets;
5084 for (const Metadata *offset_md : offset_vector->operands()) {
5085 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005086 offsets.push_back(static_cast<uint32_t>(
5087 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005088 }
5089 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5090 }
5091 }
5092
5093 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005094 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005095 // Metadata is stored as key-value pair operands. The first element of each
5096 // operand is the type and the second is a triple of sizes: type size in
5097 // bits, store size and alloc size.
5098 for (const auto *operand : sizes_md->operands()) {
5099 const auto *pair = cast<MDTuple>(operand);
5100 auto *type =
5101 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5102 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5103 uint64_t type_size_in_bits =
5104 cast<ConstantInt>(
5105 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5106 ->getZExtValue();
5107 uint64_t type_store_size =
5108 cast<ConstantInt>(
5109 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5110 ->getZExtValue();
5111 uint64_t type_alloc_size =
5112 cast<ConstantInt>(
5113 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5114 ->getZExtValue();
5115 RemappedUBOTypeSizes.insert(std::make_pair(
5116 type, std::make_tuple(type_size_in_bits, type_store_size,
5117 type_alloc_size)));
5118 }
5119 }
5120}
5121
5122uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5123 const DataLayout &DL) {
5124 auto iter = RemappedUBOTypeSizes.find(type);
5125 if (iter != RemappedUBOTypeSizes.end()) {
5126 return std::get<0>(iter->second);
5127 }
5128
5129 return DL.getTypeSizeInBits(type);
5130}
5131
5132uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5133 auto iter = RemappedUBOTypeSizes.find(type);
5134 if (iter != RemappedUBOTypeSizes.end()) {
5135 return std::get<1>(iter->second);
5136 }
5137
5138 return DL.getTypeStoreSize(type);
5139}
5140
5141uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5142 auto iter = RemappedUBOTypeSizes.find(type);
5143 if (iter != RemappedUBOTypeSizes.end()) {
5144 return std::get<2>(iter->second);
5145 }
5146
5147 return DL.getTypeAllocSize(type);
5148}
alan-baker5b86ed72019-02-15 08:26:50 -05005149
Kévin Petitbbbda972020-03-03 19:16:31 +00005150uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5151 StructType *type, unsigned member, const DataLayout &DL) {
5152 const auto StructLayout = DL.getStructLayout(type);
5153 // Search for the correct offsets if this type was remapped.
5154 std::vector<uint32_t> *offsets = nullptr;
5155 auto iter = RemappedUBOTypeOffsets.find(type);
5156 if (iter != RemappedUBOTypeOffsets.end()) {
5157 offsets = &iter->second;
5158 }
5159 auto ByteOffset =
5160 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5161 if (offsets) {
5162 ByteOffset = (*offsets)[member];
5163 }
5164
5165 return ByteOffset;
5166}
5167
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005168void SPIRVProducerPass::setVariablePointersCapabilities(
5169 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005170 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005171 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005172 } else {
SJW01901d92020-05-21 08:58:31 -05005173 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005174 }
5175}
5176
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005177Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005178 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5179 return GetBasePointer(gep->getPointerOperand());
5180 }
5181
5182 // Conservatively return |v|.
5183 return v;
5184}
5185
5186bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5187 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5188 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
SJW61531372020-06-09 07:31:08 -05005189 auto lhs_func_info = Builtins::Lookup(lhs_call->getCalledFunction());
5190 auto rhs_func_info = Builtins::Lookup(rhs_call->getCalledFunction());
5191 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5192 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005193 // For resource accessors, match descriptor set and binding.
5194 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5195 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5196 return true;
SJW61531372020-06-09 07:31:08 -05005197 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5198 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005199 // For workgroup resources, match spec id.
5200 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5201 return true;
5202 }
5203 }
5204 }
5205
5206 return false;
5207}
5208
5209bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5210 assert(inst->getType()->isPointerTy());
5211 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5212 spv::StorageClassStorageBuffer);
5213 const bool hack_undef = clspv::Option::HackUndef();
5214 if (auto *select = dyn_cast<SelectInst>(inst)) {
5215 auto *true_base = GetBasePointer(select->getTrueValue());
5216 auto *false_base = GetBasePointer(select->getFalseValue());
5217
5218 if (true_base == false_base)
5219 return true;
5220
5221 // If either the true or false operand is a null, then we satisfy the same
5222 // object constraint.
5223 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5224 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5225 return true;
5226 }
5227
5228 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5229 if (false_cst->isNullValue() ||
5230 (hack_undef && isa<UndefValue>(false_base)))
5231 return true;
5232 }
5233
5234 if (sameResource(true_base, false_base))
5235 return true;
5236 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5237 Value *value = nullptr;
5238 bool ok = true;
5239 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5240 auto *base = GetBasePointer(phi->getIncomingValue(i));
5241 // Null values satisfy the constraint of selecting of selecting from the
5242 // same object.
5243 if (!value) {
5244 if (auto *cst = dyn_cast<Constant>(base)) {
5245 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5246 value = base;
5247 } else {
5248 value = base;
5249 }
5250 } else if (base != value) {
5251 if (auto *base_cst = dyn_cast<Constant>(base)) {
5252 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5253 continue;
5254 }
5255
5256 if (sameResource(value, base))
5257 continue;
5258
5259 // Values don't represent the same base.
5260 ok = false;
5261 }
5262 }
5263
5264 return ok;
5265 }
5266
5267 // Conservatively return false.
5268 return false;
5269}
alan-bakere9308012019-03-15 10:25:13 -04005270
5271bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5272 if (!Arg.getType()->isPointerTy() ||
5273 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5274 // Only SSBOs need to be annotated as coherent.
5275 return false;
5276 }
5277
5278 DenseSet<Value *> visited;
5279 std::vector<Value *> stack;
5280 for (auto *U : Arg.getParent()->users()) {
5281 if (auto *call = dyn_cast<CallInst>(U)) {
5282 stack.push_back(call->getOperand(Arg.getArgNo()));
5283 }
5284 }
5285
5286 while (!stack.empty()) {
5287 Value *v = stack.back();
5288 stack.pop_back();
5289
5290 if (!visited.insert(v).second)
5291 continue;
5292
5293 auto *resource_call = dyn_cast<CallInst>(v);
5294 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005295 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5296 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005297 // If this is a resource accessor function, check if the coherent operand
5298 // is set.
5299 const auto coherent =
5300 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5301 ->getZExtValue());
5302 if (coherent == 1)
5303 return true;
5304 } else if (auto *arg = dyn_cast<Argument>(v)) {
5305 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005306 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005307 if (auto *call = dyn_cast<CallInst>(U)) {
5308 stack.push_back(call->getOperand(arg->getArgNo()));
5309 }
5310 }
5311 } else if (auto *user = dyn_cast<User>(v)) {
5312 // If this is a user, traverse all operands that could lead to resource
5313 // variables.
5314 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5315 Value *operand = user->getOperand(i);
5316 if (operand->getType()->isPointerTy() &&
5317 operand->getType()->getPointerAddressSpace() ==
5318 clspv::AddressSpace::Global) {
5319 stack.push_back(operand);
5320 }
5321 }
5322 }
5323 }
5324
5325 // No coherent resource variables encountered.
5326 return false;
5327}
alan-baker06cad652019-12-03 17:56:47 -05005328
SJW77b87ad2020-04-21 14:37:52 -05005329void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005330 // First, track loop merges and continues.
5331 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005332 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005333 if (F.isDeclaration())
5334 continue;
5335
5336 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5337 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5338 std::deque<BasicBlock *> order;
5339 DenseSet<BasicBlock *> visited;
5340 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5341
5342 for (auto BB : order) {
5343 auto terminator = BB->getTerminator();
5344 auto branch = dyn_cast<BranchInst>(terminator);
5345 if (LI.isLoopHeader(BB)) {
5346 auto L = LI.getLoopFor(BB);
5347 BasicBlock *ContinueBB = nullptr;
5348 BasicBlock *MergeBB = nullptr;
5349
5350 MergeBB = L->getExitBlock();
5351 if (!MergeBB) {
5352 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5353 // has regions with single entry/exit. As a result, loop should not
5354 // have multiple exits.
5355 llvm_unreachable("Loop has multiple exits???");
5356 }
5357
5358 if (L->isLoopLatch(BB)) {
5359 ContinueBB = BB;
5360 } else {
5361 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5362 // block.
5363 BasicBlock *Header = L->getHeader();
5364 BasicBlock *Latch = L->getLoopLatch();
5365 for (auto *loop_block : L->blocks()) {
5366 if (loop_block == Header) {
5367 continue;
5368 }
5369
5370 // Check whether block dominates block with back-edge.
5371 // The loop latch is the single block with a back-edge. If it was
5372 // possible, StructurizeCFG made the loop conform to this
5373 // requirement, otherwise |Latch| is a nullptr.
5374 if (DT.dominates(loop_block, Latch)) {
5375 ContinueBB = loop_block;
5376 }
5377 }
5378
5379 if (!ContinueBB) {
5380 llvm_unreachable("Wrong continue block from loop");
5381 }
5382 }
5383
5384 // Record the continue and merge blocks.
5385 MergeBlocks[BB] = MergeBB;
5386 ContinueBlocks[BB] = ContinueBB;
5387 LoopMergesAndContinues.insert(MergeBB);
5388 LoopMergesAndContinues.insert(ContinueBB);
5389 } else if (branch && branch->isConditional()) {
5390 auto L = LI.getLoopFor(BB);
5391 bool HasBackedge = false;
5392 while (L && !HasBackedge) {
5393 if (L->isLoopLatch(BB)) {
5394 HasBackedge = true;
5395 }
5396 L = L->getParentLoop();
5397 }
5398
5399 if (!HasBackedge) {
5400 // Only need a merge if the branch doesn't include a loop break or
5401 // continue.
5402 auto true_bb = branch->getSuccessor(0);
5403 auto false_bb = branch->getSuccessor(1);
5404 if (!LoopMergesAndContinues.count(true_bb) &&
5405 !LoopMergesAndContinues.count(false_bb)) {
5406 // StructurizeCFG pass already manipulated CFG. Just use false block
5407 // of branch instruction as merge block.
5408 MergeBlocks[BB] = false_bb;
5409 }
5410 }
5411 }
5412 }
5413 }
5414}