blob: 04688942ad83c1d0059732e6b3a0200296ae5b0c [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
alan-baker5f2e88e2020-12-07 15:24:04 -050037#include "llvm/IR/Intrinsics.h"
David Neto118188e2018-08-24 11:27:54 -040038#include "llvm/IR/Metadata.h"
39#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050040#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040041#include "llvm/Pass.h"
42#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000043#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040044#include "llvm/Support/raw_ostream.h"
45#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040046
SJWf93f5f32020-05-05 07:27:56 -050047// enable spv::HasResultAndType
48#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040049#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040050
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/AddressSpace.h"
David Neto118188e2018-08-24 11:27:54 -040052#include "clspv/Option.h"
alan-baker86ce19c2020-08-05 13:09:19 -040053#include "clspv/PushConstant.h"
54#include "clspv/SpecConstant.h"
David Neto85082642018-03-24 06:55:20 -070055#include "clspv/spirv_c_strings.hpp"
56#include "clspv/spirv_glsl.hpp"
alan-baker86ce19c2020-08-05 13:09:19 -040057#include "clspv/spirv_reflection.hpp"
David Neto22f144c2017-06-12 14:26:21 -040058
David Neto4feb7a42017-10-06 17:29:42 -040059#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050060#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050061#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070062#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040063#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040064#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040065#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040066#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040067#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040068#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050069#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040070
David Neto22f144c2017-06-12 14:26:21 -040071#if defined(_MSC_VER)
72#pragma warning(pop)
73#endif
74
75using namespace llvm;
76using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050077using namespace clspv::Builtins;
SJW806a5d82020-07-15 12:51:38 -050078using namespace clspv::Option;
David Neto156783e2017-07-05 15:39:41 -040079using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040080
81namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040082
David Neto862b7d82018-06-14 18:48:37 -040083cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
84 cl::desc("Show resource variable creation"));
85
alan-baker5ed87542020-03-23 11:05:22 -040086cl::opt<bool>
87 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
88 cl::desc("Dump the IR at the start of SPIRVProducer"));
89
David Neto862b7d82018-06-14 18:48:37 -040090// These hacks exist to help transition code generation algorithms
91// without making huge noise in detailed test output.
92const bool Hack_generate_runtime_array_stride_early = true;
93
David Neto3fbb4072017-10-16 11:28:14 -040094// The value of 1/pi. This value is from MSDN
95// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
96const double kOneOverPi = 0.318309886183790671538;
97const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
98
alan-baker86ce19c2020-08-05 13:09:19 -040099// SPIRV Module Sections (per 2.4 of the SPIR-V spec)
SJW69939d52020-04-16 07:29:07 -0500100// These are used to collect SPIRVInstructions by type on-the-fly.
101enum SPIRVSection {
102 kCapabilities,
103 kExtensions,
104 kImports,
105 kMemoryModel,
106 kEntryPoints,
107 kExecutionModes,
108
109 kDebug,
110 kAnnotations,
111
112 kTypes,
113 kConstants = kTypes,
114 kGlobalVariables,
115
116 kFunctions,
117
alan-baker86ce19c2020-08-05 13:09:19 -0400118 // This is not a section of the SPIR-V spec and should always immediately
119 // precede kSectionCount. It is a convenient place for the embedded
120 // reflection data.
121 kReflection,
SJW69939d52020-04-16 07:29:07 -0500122 kSectionCount
123};
124
SJW01901d92020-05-21 08:58:31 -0500125class SPIRVID {
126 uint32_t id;
127
128public:
129 SPIRVID(uint32_t _id = 0) : id(_id) {}
130 uint32_t get() const { return id; }
131 bool isValid() const { return id != 0; }
132 bool operator==(const SPIRVID &that) const { return id == that.id; }
SJW806a5d82020-07-15 12:51:38 -0500133 bool operator<(const SPIRVID &that) const { return id < that.id; }
SJW01901d92020-05-21 08:58:31 -0500134};
SJWf93f5f32020-05-05 07:27:56 -0500135
SJW88ed5fe2020-05-11 12:40:57 -0500136enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400137
138struct SPIRVOperand {
SJW88ed5fe2020-05-11 12:40:57 -0500139 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
140 LiteralNum[0] = Num;
141 }
David Neto22f144c2017-06-12 14:26:21 -0400142 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
143 : Type(Ty), LiteralStr(Str) {}
144 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
145 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500146 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
147 auto sz = NumVec.size();
148 assert(sz >= 1 && sz <= 2);
149 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
150 LiteralNum[0] = NumVec[0];
151 if (sz == 2) {
152 LiteralNum[1] = NumVec[1];
153 }
154 }
David Neto22f144c2017-06-12 14:26:21 -0400155
alan-baker7506abb2020-09-10 15:02:55 -0400156 SPIRVOperandType getType() const { return Type; }
157 uint32_t getNumID() const { return LiteralNum[0]; }
158 std::string getLiteralStr() const { return LiteralStr; }
159 const uint32_t *getLiteralNum() const { return LiteralNum; }
David Neto22f144c2017-06-12 14:26:21 -0400160
David Neto87846742018-04-11 17:36:22 -0400161 uint32_t GetNumWords() const {
162 switch (Type) {
163 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500164 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400165 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500166 case LITERAL_DWORD:
167 return 2;
David Neto87846742018-04-11 17:36:22 -0400168 case LITERAL_STRING:
169 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400170 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400171 }
172 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
173 }
174
David Neto22f144c2017-06-12 14:26:21 -0400175private:
176 SPIRVOperandType Type;
177 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500178 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400179};
180
SJW88ed5fe2020-05-11 12:40:57 -0500181typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400182
David Neto22f144c2017-06-12 14:26:21 -0400183struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500184 // Primary constructor must have Opcode, initializes WordCount based on ResID.
185 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
186 : Opcode(static_cast<uint16_t>(Opc)) {
187 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400188 }
David Neto22f144c2017-06-12 14:26:21 -0400189
SJWf93f5f32020-05-05 07:27:56 -0500190 // Creates an instruction with an opcode and no result ID, and with the given
191 // operands. This calls primary constructor to initialize Opcode, WordCount.
192 // Takes ownership of the operands and clears |Ops|.
193 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
194 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500195 }
SJWf93f5f32020-05-05 07:27:56 -0500196 // Creates an instruction with an opcode and no result ID, and with the given
197 // operands. This calls primary constructor to initialize Opcode, WordCount.
198 // Takes ownership of the operands and clears |Ops|.
199 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
200 : SPIRVInstruction(Opc, ResID) {
201 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500202 }
David Netoef5ba2b2019-12-20 08:35:54 -0500203
David Netoee2660d2018-06-28 16:31:29 -0400204 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400205 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500206 SPIRVID getResultID() const { return ResultID; }
207 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400208
209private:
SJW01901d92020-05-21 08:58:31 -0500210 void setResult(SPIRVID ResID = 0) {
211 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500212 ResultID = ResID;
213 }
214
215 void setOperands(SPIRVOperandVec &Ops) {
216 assert(Operands.empty());
217 Operands = std::move(Ops);
218 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500219 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500220 }
221 }
222
223private:
David Netoee2660d2018-06-28 16:31:29 -0400224 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400225 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500226 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500227 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400228};
229
230struct SPIRVProducerPass final : public ModulePass {
SJW01901d92020-05-21 08:58:31 -0500231 typedef DenseMap<Type *, SPIRVID> TypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400232 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500233 typedef DenseMap<Value *, SPIRVID> ValueMapType;
SJW806a5d82020-07-15 12:51:38 -0500234 typedef std::list<SPIRVID> SPIRVIDListType;
SJW01901d92020-05-21 08:58:31 -0500235 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
236 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500237 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
SJW806a5d82020-07-15 12:51:38 -0500238 typedef std::map<spv::BuiltIn, SPIRVID> BuiltinConstantMapType;
SJW88ed5fe2020-05-11 12:40:57 -0500239 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400240 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500241 // - the SPIR-V instruction placeholder that will be replaced
242 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400243 DeferredInstVecType;
244 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
245 GlobalConstFuncMapType;
246
David Neto44795152017-07-13 15:45:28 -0400247 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500248 raw_pwrite_stream &out,
alan-baker00e7a582019-06-07 12:54:21 -0400249 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400250 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500251 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400252 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
David Neto0676e6f2017-07-11 18:47:44 -0400253 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500254 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
255 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
SJW01901d92020-05-21 08:58:31 -0500256 WorkgroupSizeVarID(0) {
257 addCapability(spv::CapabilityShader);
258 Ptr = this;
259 }
David Neto22f144c2017-06-12 14:26:21 -0400260
James Price11010dc2019-12-19 13:53:09 -0500261 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500262 }
263
David Neto22f144c2017-06-12 14:26:21 -0400264 void getAnalysisUsage(AnalysisUsage &AU) const override {
265 AU.addRequired<DominatorTreeWrapperPass>();
266 AU.addRequired<LoopInfoWrapperPass>();
267 }
268
269 virtual bool runOnModule(Module &module) override;
270
271 // output the SPIR-V header block
272 void outputHeader();
273
274 // patch the SPIR-V header block
275 void patchHeader();
276
SJW01901d92020-05-21 08:58:31 -0500277 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400278 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-baker7506abb2020-09-10 15:02:55 -0400279 TypeList &getTypeList() { return Types; }
David Neto22f144c2017-06-12 14:26:21 -0400280 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500281 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
282 return SPIRVSections[Section];
283 };
alan-baker7506abb2020-09-10 15:02:55 -0400284 EntryPointVecType &getEntryPointVec() { return EntryPointVec; }
285 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; }
SJW806a5d82020-07-15 12:51:38 -0500286 SPIRVIDListType &getEntryPointInterfacesList() {
287 return EntryPointInterfacesList;
alan-baker7506abb2020-09-10 15:02:55 -0400288 }
SJW01901d92020-05-21 08:58:31 -0500289 SPIRVID getOpExtInstImportID();
alan-baker7506abb2020-09-10 15:02:55 -0400290 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; }
SJW2c317da2020-03-23 07:39:13 -0500291
alan-baker5b86ed72019-02-15 08:26:50 -0500292 bool hasVariablePointersStorageBuffer() {
293 return HasVariablePointersStorageBuffer;
294 }
SJW01901d92020-05-21 08:58:31 -0500295 void setVariablePointersStorageBuffer() {
296 if (!HasVariablePointersStorageBuffer) {
297 addCapability(spv::CapabilityVariablePointersStorageBuffer);
298 HasVariablePointersStorageBuffer = true;
299 }
alan-baker5b86ed72019-02-15 08:26:50 -0500300 }
alan-baker7506abb2020-09-10 15:02:55 -0400301 bool hasVariablePointers() { return HasVariablePointers; }
SJW01901d92020-05-21 08:58:31 -0500302 void setVariablePointers() {
303 if (!HasVariablePointers) {
304 addCapability(spv::CapabilityVariablePointers);
305 HasVariablePointers = true;
306 }
alan-baker7506abb2020-09-10 15:02:55 -0400307 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500308 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
309 return samplerMap;
310 }
David Neto22f144c2017-06-12 14:26:21 -0400311 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
312 return GlobalConstFuncTypeMap;
313 }
314 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
315 return GlobalConstArgumentSet;
316 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500317 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400318
SJW77b87ad2020-04-21 14:37:52 -0500319 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500320 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
321 // *not* be converted to a storage buffer, replace each such global variable
322 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500323 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400324 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
325 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500326 void FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400327 void FindTypePerGlobalVar(GlobalVariable &GV);
328 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500329 void FindTypesForSamplerMap();
330 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500331 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
332 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400333 void FindType(Type *Ty);
SJWf93f5f32020-05-05 07:27:56 -0500334
alan-bakerc3fd07f2020-10-22 09:48:49 -0400335 // Returns the canonical type of |type|.
336 //
337 // By default, clspv maps both __constant and __global address space pointers
338 // to StorageBuffer storage class. In order to prevent duplicate types from
339 // being generated, clspv uses the canonical type as a representative.
340 Type *CanonicalType(Type *type);
341
SJWf93f5f32020-05-05 07:27:56 -0500342 // Lookup or create Types, Constants.
343 // Returns SPIRVID once it has been created.
344 SPIRVID getSPIRVType(Type *Ty);
345 SPIRVID getSPIRVConstant(Constant *Cst);
SJW806a5d82020-07-15 12:51:38 -0500346 SPIRVID getSPIRVInt32Constant(uint32_t CstVal);
SJWf93f5f32020-05-05 07:27:56 -0500347 // Lookup SPIRVID of llvm::Value, may create Constant.
348 SPIRVID getSPIRVValue(Value *V);
349
SJW806a5d82020-07-15 12:51:38 -0500350 SPIRVID getSPIRVBuiltin(spv::BuiltIn BID, spv::Capability Cap);
351
David Neto19a1bad2017-08-25 15:01:41 -0400352 // Generates instructions for SPIR-V types corresponding to the LLVM types
353 // saved in the |Types| member. A type follows its subtypes. IDs are
354 // allocated sequentially starting with the current value of nextID, and
355 // with a type following its subtypes. Also updates nextID to just beyond
356 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500357 void GenerateSPIRVTypes();
SJW77b87ad2020-04-21 14:37:52 -0500358 void GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400359 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500360 void GenerateWorkgroupVars();
alan-baker86ce19c2020-08-05 13:09:19 -0400361 // Generate reflection instructions for resource variables associated with
David Neto862b7d82018-06-14 18:48:37 -0400362 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500363 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400364 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500365 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400366 void GenerateFuncPrologue(Function &F);
367 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400368 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400369 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
370 spv::Op GetSPIRVCastOpcode(Instruction &I);
371 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
SJW806a5d82020-07-15 12:51:38 -0500372 SPIRVID GenerateClspvInstruction(CallInst *Call,
373 const FunctionInfo &FuncInfo);
374 SPIRVID GenerateImageInstruction(CallInst *Call,
375 const FunctionInfo &FuncInfo);
376 SPIRVID GenerateSubgroupInstruction(CallInst *Call,
377 const FunctionInfo &FuncInfo);
378 SPIRVID GenerateInstructionFromCall(CallInst *Call);
David Neto22f144c2017-06-12 14:26:21 -0400379 void GenerateInstruction(Instruction &I);
380 void GenerateFuncEpilogue();
381 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500382 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400383 bool is4xi8vec(Type *Ty) const;
384 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400385 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400386 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400387 // Returns the GLSL extended instruction enum that the given function
388 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500389 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400390 // Returns the GLSL extended instruction enum indirectly used by the given
391 // function. That is, to implement the given function, we use an extended
392 // instruction plus one more instruction. If none, then returns the 0 value,
393 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500394 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400395 // Returns the single GLSL extended instruction used directly or
396 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500397 glsl::ExtInst
398 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400399 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500400 void WriteResultID(const SPIRVInstruction &Inst);
401 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
402 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400403 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500404 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400405
Alan Baker9bf93fb2018-08-28 16:59:26 -0400406 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500407 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400408
Alan Bakerfcda9482018-10-02 17:09:59 -0400409 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500410 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400411
alan-baker06cad652019-12-03 17:56:47 -0500412 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500413 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500414
Alan Bakerfcda9482018-10-02 17:09:59 -0400415 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
416 // uses the internal map, otherwise it falls back on the data layout.
417 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
418 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
419 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000420 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
421 unsigned member,
422 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400423
alan-baker5b86ed72019-02-15 08:26:50 -0500424 // Returns the base pointer of |v|.
425 Value *GetBasePointer(Value *v);
426
SJW01901d92020-05-21 08:58:31 -0500427 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
428 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
429
alan-baker5b86ed72019-02-15 08:26:50 -0500430 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
431 // |address_space|.
432 void setVariablePointersCapabilities(unsigned address_space);
433
434 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
435 // variable.
436 bool sameResource(Value *lhs, Value *rhs) const;
437
438 // Returns true if |inst| is phi or select that selects from the same
439 // structure (or null).
440 bool selectFromSameObject(Instruction *inst);
441
alan-bakere9308012019-03-15 10:25:13 -0400442 // Returns true if |Arg| is called with a coherent resource.
443 bool CalledWithCoherentResource(Argument &Arg);
444
SJWf93f5f32020-05-05 07:27:56 -0500445 //
446 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
447 template <enum SPIRVSection TSection = kFunctions>
448 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
449 bool has_result, has_result_type;
450 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
451 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500452 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500453 return RID;
454 }
455 template <enum SPIRVSection TSection = kFunctions>
456 SPIRVID addSPIRVInst(spv::Op Op) {
457 SPIRVOperandVec Ops;
458 return addSPIRVInst<TSection>(Op, Ops);
459 }
460 template <enum SPIRVSection TSection = kFunctions>
461 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
462 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500463 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500464 return addSPIRVInst<TSection>(Op, Ops);
465 }
466 template <enum SPIRVSection TSection = kFunctions>
467 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
468 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500469 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500470 return addSPIRVInst<TSection>(Op, Ops);
471 }
472
SJW88ed5fe2020-05-11 12:40:57 -0500473 //
474 // Add placeholder for llvm::Value that references future values.
475 // Must have result ID just in case final SPIRVInstruction requires.
476 SPIRVID addSPIRVPlaceholder(Value *I) {
477 SPIRVID RID = incrNextID();
478 SPIRVOperandVec Ops;
479 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
480 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
481 return RID;
482 }
483 // Replace placeholder with actual SPIRVInstruction on the final pass
484 // (HandleDeferredInstruction).
485 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
486 SPIRVOperandVec &Operands) {
487 bool has_result, has_result_type;
488 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
489 SPIRVID RID = has_result ? I->getResultID() : 0;
490 *I = SPIRVInstruction(Opcode, RID, Operands);
491 return RID;
492 }
493
SJW806a5d82020-07-15 12:51:38 -0500494 //
495 // Add global variable and capture entry point interface
496 SPIRVID addSPIRVGlobalVariable(const SPIRVID &TypeID, spv::StorageClass SC,
497 const SPIRVID &InitID = SPIRVID());
498
alan-baker86ce19c2020-08-05 13:09:19 -0400499 SPIRVID getReflectionImport();
500 void GenerateReflection();
501 void GenerateKernelReflection();
502 void GeneratePushConstantReflection();
503 void GenerateSpecConstantReflection();
504 void AddArgumentReflection(SPIRVID kernel_decl, const std::string &name,
505 clspv::ArgKind arg_kind, uint32_t ordinal,
506 uint32_t descriptor_set, uint32_t binding,
507 uint32_t offset, uint32_t size, uint32_t spec_id,
508 uint32_t elem_size);
509
David Neto22f144c2017-06-12 14:26:21 -0400510private:
511 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500512
513 Module *module;
514
SJW01901d92020-05-21 08:58:31 -0500515 // Set of Capabilities required
516 CapabilitySetType CapabilitySet;
517
SJW806a5d82020-07-15 12:51:38 -0500518 // Map from clspv::BuiltinType to SPIRV Global Variable
519 BuiltinConstantMapType BuiltinConstantMap;
520
David Neto44795152017-07-13 15:45:28 -0400521 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400522 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400523
524 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
525 // convert to other formats on demand?
526
527 // When emitting a C initialization list, the WriteSPIRVBinary method
528 // will actually write its words to this vector via binaryTempOut.
529 SmallVector<char, 100> binaryTempUnderlyingVector;
530 raw_svector_ostream binaryTempOut;
531
532 // Binary output writes to this stream, which might be |out| or
533 // |binaryTempOut|. It's the latter when we really want to write a C
534 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400535 raw_pwrite_stream *binaryOut;
David Neto0676e6f2017-07-11 18:47:44 -0400536 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400537 uint64_t patchBoundOffset;
538 uint32_t nextID;
539
SJWf93f5f32020-05-05 07:27:56 -0500540 SPIRVID incrNextID() { return nextID++; }
541
alan-bakerf67468c2019-11-25 15:51:49 -0500542 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500543 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500544 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500545 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500546
David Neto19a1bad2017-08-25 15:01:41 -0400547 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400548 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400549 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400550 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400551 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400552 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400553 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400554 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500555 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400556
David Neto22f144c2017-06-12 14:26:21 -0400557 EntryPointVecType EntryPointVec;
558 DeferredInstVecType DeferredInstVec;
SJW806a5d82020-07-15 12:51:38 -0500559 SPIRVIDListType EntryPointInterfacesList;
SJW01901d92020-05-21 08:58:31 -0500560 SPIRVID OpExtInstImportID;
561 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500562 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400563 bool HasVariablePointers;
564 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500565 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700566
567 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700568 // will map F's type to (G, index of the parameter), where in a first phase
569 // G is F's type. During FindTypePerFunc, G will be changed to F's type
570 // but replacing the pointer-to-constant parameter with
571 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700572 // TODO(dneto): This doesn't seem general enough? A function might have
573 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400574 GlobalConstFuncMapType GlobalConstFuncTypeMap;
575 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400576 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700577 // or array types, and which point into transparent memory (StorageBuffer
578 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400579 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700580 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400581
582 // This is truly ugly, but works around what look like driver bugs.
583 // For get_local_size, an earlier part of the flow has created a module-scope
584 // variable in Private address space to hold the value for the workgroup
585 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
586 // When this is present, save the IDs of the initializer value and variable
587 // in these two variables. We only ever do a vector load from it, and
588 // when we see one of those, substitute just the value of the intializer.
589 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700590 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500591 SPIRVID WorkgroupSizeValueID;
592 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400593
David Neto862b7d82018-06-14 18:48:37 -0400594 // Bookkeeping for mapping kernel arguments to resource variables.
595 struct ResourceVarInfo {
596 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400597 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400598 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400599 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400600 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
601 const int index; // Index into ResourceVarInfoList
602 const unsigned descriptor_set;
603 const unsigned binding;
604 Function *const var_fn; // The @clspv.resource.var.* function.
605 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400606 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400607 const unsigned addr_space; // The LLVM address space
608 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500609 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400610 };
611 // A list of resource var info. Each one correponds to a module-scope
612 // resource variable we will have to create. Resource var indices are
613 // indices into this vector.
614 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
615 // This is a vector of pointers of all the resource vars, but ordered by
616 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500617 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400618 // Map a function to the ordered list of resource variables it uses, one for
619 // each argument. If an argument does not use a resource variable, it
620 // will have a null pointer entry.
621 using FunctionToResourceVarsMapType =
622 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
623 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
624
625 // What LLVM types map to SPIR-V types needing layout? These are the
626 // arrays and structures supporting storage buffers and uniform buffers.
627 TypeList TypesNeedingLayout;
628 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
629 UniqueVector<StructType *> StructTypesNeedingBlock;
630 // For a call that represents a load from an opaque type (samplers, images),
631 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500632 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700633
David Netoc6f3ab22018-04-06 18:02:31 -0400634 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500635 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400636 LocalArgList LocalArgs;
637 // Information about a pointer-to-local argument.
638 struct LocalArgInfo {
639 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500640 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400641 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500642 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400643 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500644 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400645 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500646 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400647 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500648 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400649 // The specialization constant ID of the array size.
650 int spec_id;
651 };
Alan Baker202c8c72018-08-13 13:47:44 -0400652 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500653 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400654 // A mapping from SpecId to its LocalArgInfo.
655 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400656 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500657 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400658 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500659 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
660 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500661
662 // Maps basic block to its merge block.
663 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
664 // Maps basic block to its continue block.
665 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500666
alan-baker86ce19c2020-08-05 13:09:19 -0400667 SPIRVID ReflectionID;
668 DenseMap<Function *, SPIRVID> KernelDeclarations;
669
SJW01901d92020-05-21 08:58:31 -0500670public:
671 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400672};
673
674char SPIRVProducerPass::ID;
SJW01901d92020-05-21 08:58:31 -0500675SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
David Netoc6f3ab22018-04-06 18:02:31 -0400676
alan-bakerb6b09dc2018-11-08 16:59:28 -0500677} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400678
679namespace clspv {
alan-baker86ce19c2020-08-05 13:09:19 -0400680ModulePass *
681createSPIRVProducerPass(raw_pwrite_stream &out,
682 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
683 bool outputCInitList) {
684 return new SPIRVProducerPass(out, samplerMap, outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400685}
David Netoc2c368d2017-06-30 16:50:17 -0400686} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400687
SJW01901d92020-05-21 08:58:31 -0500688namespace {
689SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
690 list.emplace_back(LITERAL_WORD, num);
691 return list;
692}
693
694SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
695 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
696 return list;
697}
698
699SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
700 list.emplace_back(num_vec);
701 return list;
702}
703
704SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
705 list.emplace_back(LITERAL_STRING, str);
706 return list;
707}
708
709SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
710 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
711 return list;
712}
713
714SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
715 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
716 return list;
717}
718
SJW806a5d82020-07-15 12:51:38 -0500719SPIRVOperandVec &operator<<(SPIRVOperandVec &list, const SPIRVID &v) {
SJW01901d92020-05-21 08:58:31 -0500720 list.emplace_back(NUMBERID, v.get());
721 return list;
722}
723} // namespace
724
SJW77b87ad2020-04-21 14:37:52 -0500725bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500726 // TODO(sjw): Need to reset all data members for each Module, or better
727 // yet create a new SPIRVProducer for every module.. For now only
728 // allow 1 call.
729 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500730 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400731 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500732 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400733 }
David Neto0676e6f2017-07-11 18:47:44 -0400734 binaryOut = outputCInitList ? &binaryTempOut : &out;
735
SJW77b87ad2020-04-21 14:37:52 -0500736 PopulateUBOTypeMaps();
737 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400738
David Neto22f144c2017-06-12 14:26:21 -0400739 // SPIR-V always begins with its header information
740 outputHeader();
741
742 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500743 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400744
David Neto22f144c2017-06-12 14:26:21 -0400745 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500746 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400747 // If the GV is one of our special __spirv_* variables, remove the
748 // initializer as it was only placed there to force LLVM to not throw the
749 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000750 if (GV.getName().startswith("__spirv_") ||
751 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400752 GV.setInitializer(nullptr);
753 }
754
755 // Collect types' information from global variable.
756 FindTypePerGlobalVar(GV);
David Neto22f144c2017-06-12 14:26:21 -0400757 }
758
David Neto22f144c2017-06-12 14:26:21 -0400759 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500760 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400761
alan-baker09cb9802019-12-10 13:16:27 -0500762 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500763 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400764
765 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500766 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400767 GenerateGlobalVar(GV);
768 }
SJW77b87ad2020-04-21 14:37:52 -0500769 GenerateResourceVars();
770 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400771
772 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500773 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400774 if (F.isDeclaration()) {
775 continue;
776 }
777
778 // Generate Function Prologue.
779 GenerateFuncPrologue(F);
780
781 // Generate SPIRV instructions for function body.
782 GenerateFuncBody(F);
783
784 // Generate Function Epilogue.
785 GenerateFuncEpilogue();
786 }
787
788 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500789 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400790
David Neto22f144c2017-06-12 14:26:21 -0400791 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500792 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400793
alan-baker86ce19c2020-08-05 13:09:19 -0400794 // Generate embedded reflection information.
795 GenerateReflection();
796
alan-baker00e7a582019-06-07 12:54:21 -0400797 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400798
799 // We need to patch the SPIR-V header to set bound correctly.
800 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400801
802 if (outputCInitList) {
803 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400804 std::ostringstream os;
805
David Neto57fb0b92017-08-04 15:35:09 -0400806 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400807 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400808 os << ",\n";
809 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400810 first = false;
811 };
812
813 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400814 const std::string str(binaryTempOut.str());
815 for (unsigned i = 0; i < str.size(); i += 4) {
816 const uint32_t a = static_cast<unsigned char>(str[i]);
817 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
818 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
819 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
820 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400821 }
822 os << "}\n";
823 out << os.str();
824 }
825
David Neto22f144c2017-06-12 14:26:21 -0400826 return false;
827}
828
829void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400830 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
831 sizeof(spv::MagicNumber));
SJW806a5d82020-07-15 12:51:38 -0500832 uint32_t minor = 0;
833 if (SpvVersion() == SPIRVVersion::SPIRV_1_3) {
834 minor = 3;
835 }
836 uint32_t version = (1 << 16) | (minor << 8);
837 binaryOut->write(reinterpret_cast<const char *>(&version), sizeof(version));
David Neto22f144c2017-06-12 14:26:21 -0400838
alan-baker0c18ab02019-06-12 10:23:21 -0400839 // use Google's vendor ID
840 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400841 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400842
alan-baker00e7a582019-06-07 12:54:21 -0400843 // we record where we need to come back to and patch in the bound value
844 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400845
alan-baker00e7a582019-06-07 12:54:21 -0400846 // output a bad bound for now
847 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400848
alan-baker00e7a582019-06-07 12:54:21 -0400849 // output the schema (reserved for use and must be 0)
850 const uint32_t schema = 0;
851 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400852}
853
854void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400855 // for a binary we just write the value of nextID over bound
856 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
857 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400858}
859
SJW77b87ad2020-04-21 14:37:52 -0500860void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400861 // This function generates LLVM IR for function such as global variable for
862 // argument, constant and pointer type for argument access. These information
863 // is artificial one because we need Vulkan SPIR-V output. This function is
864 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400865
SJW77b87ad2020-04-21 14:37:52 -0500866 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400867
SJW77b87ad2020-04-21 14:37:52 -0500868 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400869
870 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500871 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400872 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
873 if (spv::BuiltInWorkgroupSize == BuiltinType) {
874 HasWorkGroupBuiltin = true;
875 }
876 }
877
SJW77b87ad2020-04-21 14:37:52 -0500878 FindTypesForSamplerMap();
879 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400880}
881
SJW77b87ad2020-04-21 14:37:52 -0500882void SPIRVProducerPass::FindGlobalConstVars() {
883 clspv::NormalizeGlobalVariables(*module);
884 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400885
David Neto862b7d82018-06-14 18:48:37 -0400886 SmallVector<GlobalVariable *, 8> GVList;
887 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500888 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400889 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
890 if (GV.use_empty()) {
891 DeadGVList.push_back(&GV);
892 } else {
893 GVList.push_back(&GV);
894 }
895 }
896 }
897
898 // Remove dead global __constant variables.
899 for (auto GV : DeadGVList) {
900 GV->eraseFromParent();
901 }
902 DeadGVList.clear();
903
904 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
905 // For now, we only support a single storage buffer.
alan-baker7506abb2020-09-10 15:02:55 -0400906 if (!GVList.empty()) {
David Neto862b7d82018-06-14 18:48:37 -0400907 assert(GVList.size() == 1);
908 const auto *GV = GVList[0];
909 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400910 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400911 const size_t kConstantMaxSize = 65536;
912 if (constants_byte_size > kConstantMaxSize) {
913 outs() << "Max __constant capacity of " << kConstantMaxSize
914 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
915 llvm_unreachable("Max __constant capacity exceeded");
916 }
917 }
918 } else {
919 // Change global constant variable's address space to ModuleScopePrivate.
920 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
921 for (auto GV : GVList) {
922 // Create new gv with ModuleScopePrivate address space.
923 Type *NewGVTy = GV->getType()->getPointerElementType();
924 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500925 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400926 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
927 NewGV->takeName(GV);
928
929 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
930 SmallVector<User *, 8> CandidateUsers;
931
932 auto record_called_function_type_as_user =
933 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
934 // Find argument index.
935 unsigned index = 0;
936 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
937 if (gv == call->getOperand(i)) {
938 // TODO(dneto): Should we break here?
939 index = i;
940 }
941 }
942
943 // Record function type with global constant.
944 GlobalConstFuncTyMap[call->getFunctionType()] =
945 std::make_pair(call->getFunctionType(), index);
946 };
947
948 for (User *GVU : GVUsers) {
949 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
950 record_called_function_type_as_user(GV, Call);
951 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
952 // Check GEP users.
953 for (User *GEPU : GEP->users()) {
954 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
955 record_called_function_type_as_user(GEP, GEPCall);
956 }
957 }
958 }
959
960 CandidateUsers.push_back(GVU);
961 }
962
963 for (User *U : CandidateUsers) {
964 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500965 if (!isa<Constant>(U)) {
966 // #254: Can't change operands of a constant, but this shouldn't be
967 // something that sticks around in the module.
968 U->replaceUsesOfWith(GV, NewGV);
969 }
David Neto862b7d82018-06-14 18:48:37 -0400970 }
971
972 // Delete original gv.
973 GV->eraseFromParent();
974 }
975 }
976}
977
SJW77b87ad2020-04-21 14:37:52 -0500978void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -0400979 ResourceVarInfoList.clear();
980 FunctionToResourceVarsMap.clear();
981 ModuleOrderedResourceVars.reset();
982 // Normally, there is one resource variable per clspv.resource.var.*
983 // function, since that is unique'd by arg type and index. By design,
984 // we can share these resource variables across kernels because all
985 // kernels use the same descriptor set.
986 //
987 // But if the user requested distinct descriptor sets per kernel, then
988 // the descriptor allocator has made different (set,binding) pairs for
989 // the same (type,arg_index) pair. Since we can decorate a resource
990 // variable with only exactly one DescriptorSet and Binding, we are
991 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +0000992 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -0400993 // (set,binding) values.
994 const bool always_distinct_sets =
995 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -0500996 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -0400997 // Rely on the fact the resource var functions have a stable ordering
998 // in the module.
SJW61531372020-06-09 07:31:08 -0500999 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -04001000 // Find all calls to this function with distinct set and binding pairs.
1001 // Save them in ResourceVarInfoList.
1002
1003 // Determine uniqueness of the (set,binding) pairs only withing this
1004 // one resource-var builtin function.
1005 using SetAndBinding = std::pair<unsigned, unsigned>;
1006 // Maps set and binding to the resource var info.
1007 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1008 bool first_use = true;
1009 for (auto &U : F.uses()) {
1010 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1011 const auto set = unsigned(
1012 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1013 const auto binding = unsigned(
1014 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1015 const auto arg_kind = clspv::ArgKind(
1016 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1017 const auto arg_index = unsigned(
1018 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001019 const auto coherent = unsigned(
1020 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001021
1022 // Find or make the resource var info for this combination.
1023 ResourceVarInfo *rv = nullptr;
1024 if (always_distinct_sets) {
1025 // Make a new resource var any time we see a different
1026 // (set,binding) pair.
1027 SetAndBinding key{set, binding};
1028 auto where = set_and_binding_map.find(key);
1029 if (where == set_and_binding_map.end()) {
alan-baker7506abb2020-09-10 15:02:55 -04001030 rv = new ResourceVarInfo(
1031 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1032 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001033 ResourceVarInfoList.emplace_back(rv);
1034 set_and_binding_map[key] = rv;
1035 } else {
1036 rv = where->second;
1037 }
1038 } else {
1039 // The default is to make exactly one resource for each
1040 // clspv.resource.var.* function.
1041 if (first_use) {
1042 first_use = false;
alan-baker7506abb2020-09-10 15:02:55 -04001043 rv = new ResourceVarInfo(
1044 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1045 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001046 ResourceVarInfoList.emplace_back(rv);
1047 } else {
1048 rv = ResourceVarInfoList.back().get();
1049 }
1050 }
1051
1052 // Now populate FunctionToResourceVarsMap.
1053 auto &mapping =
1054 FunctionToResourceVarsMap[call->getParent()->getParent()];
1055 while (mapping.size() <= arg_index) {
1056 mapping.push_back(nullptr);
1057 }
1058 mapping[arg_index] = rv;
1059 }
1060 }
1061 }
1062 }
1063
1064 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001065 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001066 auto where = FunctionToResourceVarsMap.find(&F);
1067 if (where != FunctionToResourceVarsMap.end()) {
1068 for (auto &rv : where->second) {
1069 if (rv != nullptr) {
1070 ModuleOrderedResourceVars.insert(rv);
1071 }
1072 }
1073 }
1074 }
1075 if (ShowResourceVars) {
1076 for (auto *info : ModuleOrderedResourceVars) {
1077 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1078 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1079 << "\n";
1080 }
1081 }
1082}
1083
David Neto22f144c2017-06-12 14:26:21 -04001084void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1085 // Investigate global variable's type.
1086 FindType(GV.getType());
1087}
1088
1089void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1090 // Investigate function's type.
1091 FunctionType *FTy = F.getFunctionType();
1092
1093 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1094 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001095 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001096 if (GlobalConstFuncTyMap.count(FTy)) {
1097 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1098 SmallVector<Type *, 4> NewFuncParamTys;
1099 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1100 Type *ParamTy = FTy->getParamType(i);
1101 if (i == GVCstArgIdx) {
1102 Type *EleTy = ParamTy->getPointerElementType();
1103 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1104 }
1105
1106 NewFuncParamTys.push_back(ParamTy);
1107 }
1108
1109 FunctionType *NewFTy =
1110 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1111 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1112 FTy = NewFTy;
1113 }
1114
1115 FindType(FTy);
1116 } else {
1117 // As kernel functions do not have parameters, create new function type and
1118 // add it to type map.
1119 SmallVector<Type *, 4> NewFuncParamTys;
1120 FunctionType *NewFTy =
1121 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1122 FindType(NewFTy);
1123 }
1124
1125 // Investigate instructions' type in function body.
1126 for (BasicBlock &BB : F) {
1127 for (Instruction &I : BB) {
1128 if (isa<ShuffleVectorInst>(I)) {
1129 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1130 // Ignore type for mask of shuffle vector instruction.
1131 if (i == 2) {
1132 continue;
1133 }
1134
1135 Value *Op = I.getOperand(i);
1136 if (!isa<MetadataAsValue>(Op)) {
1137 FindType(Op->getType());
1138 }
1139 }
1140
1141 FindType(I.getType());
1142 continue;
1143 }
1144
David Neto862b7d82018-06-14 18:48:37 -04001145 CallInst *Call = dyn_cast<CallInst>(&I);
1146
SJW61531372020-06-09 07:31:08 -05001147 if (Call) {
1148 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
1149 if (func_info.getType() == Builtins::kClspvResource ||
1150 func_info.getType() == Builtins::kClspvLocal) {
1151 // This is a fake call representing access to a resource/workgroup
1152 // variable. We handle that elsewhere.
1153 continue;
1154 }
Alan Baker202c8c72018-08-13 13:47:44 -04001155 }
1156
alan-bakerf083bed2020-01-29 08:15:42 -05001157 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1158 // OpCompositeExtract which takes literal values for indices. As a result
1159 // don't map the type of indices.
1160 if (I.getOpcode() == Instruction::ExtractValue) {
1161 FindType(I.getOperand(0)->getType());
1162 continue;
1163 }
1164 if (I.getOpcode() == Instruction::InsertValue) {
1165 FindType(I.getOperand(0)->getType());
1166 FindType(I.getOperand(1)->getType());
1167 continue;
1168 }
1169
1170 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1171 // the index is a constant. In such a case don't map the index type.
1172 if (I.getOpcode() == Instruction::ExtractElement) {
1173 FindType(I.getOperand(0)->getType());
1174 Value *op1 = I.getOperand(1);
1175 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1176 FindType(op1->getType());
1177 }
1178 continue;
1179 }
1180 if (I.getOpcode() == Instruction::InsertElement) {
1181 FindType(I.getOperand(0)->getType());
1182 FindType(I.getOperand(1)->getType());
1183 Value *op2 = I.getOperand(2);
1184 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1185 FindType(op2->getType());
1186 }
1187 continue;
1188 }
1189
David Neto22f144c2017-06-12 14:26:21 -04001190 // Work through the operands of the instruction.
1191 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1192 Value *const Op = I.getOperand(i);
1193 // If any of the operands is a constant, find the type!
1194 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1195 FindType(Op->getType());
1196 }
1197 }
1198
1199 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001200 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001201 // Avoid to check call instruction's type.
1202 break;
1203 }
Alan Baker202c8c72018-08-13 13:47:44 -04001204 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
SJW61531372020-06-09 07:31:08 -05001205 if (Builtins::Lookup(OpCall->getCalledFunction()) ==
1206 Builtins::kClspvLocal) {
Alan Baker202c8c72018-08-13 13:47:44 -04001207 // This is a fake call representing access to a workgroup variable.
1208 // We handle that elsewhere.
1209 continue;
1210 }
1211 }
David Neto22f144c2017-06-12 14:26:21 -04001212 if (!isa<MetadataAsValue>(&Op)) {
1213 FindType(Op->getType());
1214 continue;
1215 }
1216 }
1217
David Neto22f144c2017-06-12 14:26:21 -04001218 // We don't want to track the type of this call as we are going to replace
1219 // it.
SJW61531372020-06-09 07:31:08 -05001220 if (Call && Builtins::Lookup(Call->getCalledFunction()) ==
1221 Builtins::kClspvSamplerVarLiteral) {
David Neto22f144c2017-06-12 14:26:21 -04001222 continue;
1223 }
1224
1225 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1226 // If gep's base operand has ModuleScopePrivate address space, make gep
1227 // return ModuleScopePrivate address space.
1228 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1229 // Add pointer type with private address space for global constant to
1230 // type list.
1231 Type *EleTy = I.getType()->getPointerElementType();
1232 Type *NewPTy =
1233 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1234
1235 FindType(NewPTy);
1236 continue;
1237 }
1238 }
1239
1240 FindType(I.getType());
1241 }
1242 }
1243}
1244
SJW77b87ad2020-04-21 14:37:52 -05001245void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001246 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001247 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
alan-baker7506abb2020-09-10 15:02:55 -04001248 !getSamplerMap().empty()) {
James Pricecbe834f2020-12-01 13:42:25 -05001249 auto SamplerStructTy =
1250 StructType::getTypeByName(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001251 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001252 SamplerStructTy =
1253 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001254 }
1255
1256 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1257
1258 FindType(SamplerTy);
1259 }
1260}
1261
SJW77b87ad2020-04-21 14:37:52 -05001262void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001263 // Record types so they are generated.
1264 TypesNeedingLayout.reset();
1265 StructTypesNeedingBlock.reset();
1266
1267 // To match older clspv codegen, generate the float type first if required
1268 // for images.
1269 for (const auto *info : ModuleOrderedResourceVars) {
alan-bakerf6bc8252020-09-23 14:58:55 -04001270 if (info->arg_kind == clspv::ArgKind::SampledImage ||
1271 info->arg_kind == clspv::ArgKind::StorageImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001272 if (IsIntImageType(info->var_fn->getReturnType())) {
1273 // Nothing for now...
1274 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001275 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001276 }
1277
1278 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001279 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001280 }
1281 }
1282
1283 for (const auto *info : ModuleOrderedResourceVars) {
1284 Type *type = info->var_fn->getReturnType();
1285
1286 switch (info->arg_kind) {
1287 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001288 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001289 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1290 StructTypesNeedingBlock.insert(sty);
1291 } else {
1292 errs() << *type << "\n";
1293 llvm_unreachable("Buffer arguments must map to structures!");
1294 }
1295 break;
1296 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001297 case clspv::ArgKind::PodUBO:
1298 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001299 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1300 StructTypesNeedingBlock.insert(sty);
1301 } else {
1302 errs() << *type << "\n";
1303 llvm_unreachable("POD arguments must map to structures!");
1304 }
1305 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04001306 case clspv::ArgKind::SampledImage:
1307 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001308 case clspv::ArgKind::Sampler:
1309 // Sampler and image types map to the pointee type but
1310 // in the uniform constant address space.
1311 type = PointerType::get(type->getPointerElementType(),
1312 clspv::AddressSpace::UniformConstant);
1313 break;
1314 default:
1315 break;
1316 }
1317
1318 // The converted type is the type of the OpVariable we will generate.
1319 // If the pointee type is an array of size zero, FindType will convert it
1320 // to a runtime array.
1321 FindType(type);
1322 }
1323
alan-bakerdcd97412019-09-16 15:32:30 -04001324 // If module constants are clustered in a storage buffer then that struct
1325 // needs layout decorations.
1326 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001327 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001328 PointerType *PTy = cast<PointerType>(GV.getType());
1329 const auto AS = PTy->getAddressSpace();
1330 const bool module_scope_constant_external_init =
1331 (AS == AddressSpace::Constant) && GV.hasInitializer();
1332 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1333 if (module_scope_constant_external_init &&
1334 spv::BuiltInMax == BuiltinType) {
1335 StructTypesNeedingBlock.insert(
1336 cast<StructType>(PTy->getPointerElementType()));
1337 }
1338 }
1339 }
1340
SJW77b87ad2020-04-21 14:37:52 -05001341 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001342 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1343 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1344 assert(Ty->isStructTy() && "Push constants have to be structures.");
1345 auto STy = cast<StructType>(Ty);
1346 StructTypesNeedingBlock.insert(STy);
1347 }
1348 }
1349
David Neto862b7d82018-06-14 18:48:37 -04001350 // Traverse the arrays and structures underneath each Block, and
1351 // mark them as needing layout.
1352 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1353 StructTypesNeedingBlock.end());
1354 while (!work_list.empty()) {
1355 Type *type = work_list.back();
1356 work_list.pop_back();
1357 TypesNeedingLayout.insert(type);
1358 switch (type->getTypeID()) {
1359 case Type::ArrayTyID:
1360 work_list.push_back(type->getArrayElementType());
1361 if (!Hack_generate_runtime_array_stride_early) {
1362 // Remember this array type for deferred decoration.
1363 TypesNeedingArrayStride.insert(type);
1364 }
1365 break;
1366 case Type::StructTyID:
1367 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1368 work_list.push_back(elem_ty);
1369 }
1370 default:
1371 // This type and its contained types don't get layout.
1372 break;
1373 }
1374 }
1375}
1376
SJWf93f5f32020-05-05 07:27:56 -05001377void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001378 // The SpecId assignment for pointer-to-local arguments is recorded in
1379 // module-level metadata. Translate that information into local argument
1380 // information.
SJWf93f5f32020-05-05 07:27:56 -05001381 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001382 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001383 if (!nmd)
1384 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001385 for (auto operand : nmd->operands()) {
1386 MDTuple *tuple = cast<MDTuple>(operand);
1387 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1388 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001389 ConstantAsMetadata *arg_index_md =
1390 cast<ConstantAsMetadata>(tuple->getOperand(1));
1391 int arg_index = static_cast<int>(
1392 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1393 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001394
1395 ConstantAsMetadata *spec_id_md =
1396 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001397 int spec_id = static_cast<int>(
1398 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001399
Alan Baker202c8c72018-08-13 13:47:44 -04001400 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001401 if (LocalSpecIdInfoMap.count(spec_id))
1402 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001403
SJWf93f5f32020-05-05 07:27:56 -05001404 // Generate the spec constant.
1405 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001406 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001407 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001408
SJWf93f5f32020-05-05 07:27:56 -05001409 // Generate the array type.
1410 Type *ElemTy = arg->getType()->getPointerElementType();
1411 Ops.clear();
1412 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001413 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001414
1415 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1416
1417 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001418 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001419 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1420
1421 // Generate OpVariable.
1422 //
1423 // Ops[0] : Result Type ID
1424 // Ops[1] : Storage Class
SJW806a5d82020-07-15 12:51:38 -05001425 SPIRVID VariableID =
1426 addSPIRVGlobalVariable(PtrArrayTypeID, spv::StorageClassWorkgroup);
SJWf93f5f32020-05-05 07:27:56 -05001427
1428 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001429 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001430 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1431
1432 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1433 ArrayTypeID, PtrArrayTypeID, spec_id};
1434 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001435 }
1436}
1437
David Neto22f144c2017-06-12 14:26:21 -04001438void SPIRVProducerPass::FindType(Type *Ty) {
1439 TypeList &TyList = getTypeList();
1440
1441 if (0 != TyList.idFor(Ty)) {
1442 return;
1443 }
1444
1445 if (Ty->isPointerTy()) {
1446 auto AddrSpace = Ty->getPointerAddressSpace();
1447 if ((AddressSpace::Constant == AddrSpace) ||
1448 (AddressSpace::Global == AddrSpace)) {
1449 auto PointeeTy = Ty->getPointerElementType();
1450
1451 if (PointeeTy->isStructTy() &&
1452 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1453 FindType(PointeeTy);
1454 auto ActualPointerTy =
1455 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1456 FindType(ActualPointerTy);
1457 return;
1458 }
1459 }
1460 }
1461
David Neto862b7d82018-06-14 18:48:37 -04001462 // By convention, LLVM array type with 0 elements will map to
1463 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1464 // has a constant number of elements. We need to support type of the
1465 // constant.
1466 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1467 if (arrayTy->getNumElements() > 0) {
1468 LLVMContext &Context = Ty->getContext();
1469 FindType(Type::getInt32Ty(Context));
1470 }
David Neto22f144c2017-06-12 14:26:21 -04001471 }
1472
1473 for (Type *SubTy : Ty->subtypes()) {
1474 FindType(SubTy);
1475 }
1476
1477 TyList.insert(Ty);
1478}
1479
David Neto22f144c2017-06-12 14:26:21 -04001480spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1481 switch (AddrSpace) {
1482 default:
1483 llvm_unreachable("Unsupported OpenCL address space");
1484 case AddressSpace::Private:
1485 return spv::StorageClassFunction;
1486 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001487 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001488 case AddressSpace::Constant:
1489 return clspv::Option::ConstantArgsInUniformBuffer()
1490 ? spv::StorageClassUniform
1491 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001492 case AddressSpace::Input:
1493 return spv::StorageClassInput;
1494 case AddressSpace::Local:
1495 return spv::StorageClassWorkgroup;
1496 case AddressSpace::UniformConstant:
1497 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001498 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001499 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001500 case AddressSpace::ModuleScopePrivate:
1501 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001502 case AddressSpace::PushConstant:
1503 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001504 }
1505}
1506
David Neto862b7d82018-06-14 18:48:37 -04001507spv::StorageClass
1508SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1509 switch (arg_kind) {
1510 case clspv::ArgKind::Buffer:
1511 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001512 case clspv::ArgKind::BufferUBO:
1513 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001514 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001515 return spv::StorageClassStorageBuffer;
1516 case clspv::ArgKind::PodUBO:
1517 return spv::StorageClassUniform;
1518 case clspv::ArgKind::PodPushConstant:
1519 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001520 case clspv::ArgKind::Local:
1521 return spv::StorageClassWorkgroup;
alan-bakerf6bc8252020-09-23 14:58:55 -04001522 case clspv::ArgKind::SampledImage:
1523 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001524 case clspv::ArgKind::Sampler:
1525 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001526 default:
1527 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001528 }
1529}
1530
David Neto22f144c2017-06-12 14:26:21 -04001531spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1532 return StringSwitch<spv::BuiltIn>(Name)
1533 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1534 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1535 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1536 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1537 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001538 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001539 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001540 .Default(spv::BuiltInMax);
1541}
1542
SJW01901d92020-05-21 08:58:31 -05001543SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1544 if (OpExtInstImportID == 0) {
1545 //
1546 // Generate OpExtInstImport.
1547 //
1548 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001549
SJW01901d92020-05-21 08:58:31 -05001550 OpExtInstImportID =
1551 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1552 }
1553 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001554}
1555
SJW806a5d82020-07-15 12:51:38 -05001556SPIRVID SPIRVProducerPass::addSPIRVGlobalVariable(const SPIRVID &TypeID,
1557 spv::StorageClass SC,
1558 const SPIRVID &InitID) {
1559 // Generate OpVariable.
1560 //
1561 // Ops[0] : Result Type ID
1562 // Ops[1] : Storage Class
1563 // Ops[2] : Initialization Value ID (optional)
1564
1565 SPIRVOperandVec Ops;
1566 Ops << TypeID << SC;
1567 if (InitID.isValid()) {
1568 Ops << InitID;
1569 }
1570
1571 SPIRVID VID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1572
1573 if (SC == spv::StorageClassInput) {
1574 getEntryPointInterfacesList().push_back(VID);
1575 }
1576
1577 return VID;
1578}
1579
alan-bakerc3fd07f2020-10-22 09:48:49 -04001580Type *SPIRVProducerPass::CanonicalType(Type *type) {
1581 if (type->getNumContainedTypes() != 0) {
1582 switch (type->getTypeID()) {
1583 case Type::PointerTyID: {
1584 // For the purposes of our Vulkan SPIR-V type system, constant and global
1585 // are conflated.
1586 auto *ptr_ty = cast<PointerType>(type);
1587 unsigned AddrSpace = ptr_ty->getAddressSpace();
1588 if (AddressSpace::Constant == AddrSpace) {
1589 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1590 AddrSpace = AddressSpace::Global;
1591 // The canonical type of __constant is __global unless constants are
1592 // passed in uniform buffers.
1593 auto *GlobalTy =
1594 ptr_ty->getPointerElementType()->getPointerTo(AddrSpace);
1595 return GlobalTy;
1596 }
1597 }
1598 break;
1599 }
1600 case Type::StructTyID: {
1601 SmallVector<Type *, 8> subtypes;
1602 bool changed = false;
1603 for (auto *subtype : type->subtypes()) {
1604 auto canonical = CanonicalType(subtype);
1605 subtypes.push_back(canonical);
1606 if (canonical != subtype) {
1607 changed = true;
1608 }
1609 }
1610 if (changed) {
1611 return StructType::get(type->getContext(), subtypes,
1612 cast<StructType>(type)->isPacked());
1613 }
1614 break;
1615 }
1616 case Type::ArrayTyID: {
1617 auto *elem_ty = type->getArrayElementType();
1618 auto *equiv_elem_ty = CanonicalType(elem_ty);
1619 if (equiv_elem_ty != elem_ty) {
1620 return ArrayType::get(equiv_elem_ty,
1621 cast<ArrayType>(type)->getNumElements());
1622 }
1623 break;
1624 }
1625 case Type::FunctionTyID: {
1626 auto *func_ty = cast<FunctionType>(type);
1627 auto *return_ty = CanonicalType(func_ty->getReturnType());
1628 SmallVector<Type *, 8> params;
1629 for (unsigned i = 0; i < func_ty->getNumParams(); ++i) {
1630 params.push_back(CanonicalType(func_ty->getParamType(i)));
1631 }
1632 return FunctionType::get(return_ty, params, func_ty->isVarArg());
1633 }
1634 default:
1635 break;
1636 }
1637 }
1638
1639 return type;
1640}
1641
SJW01901d92020-05-21 08:58:31 -05001642SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
SJWf93f5f32020-05-05 07:27:56 -05001643 auto TI = TypeMap.find(Ty);
1644 if (TI != TypeMap.end()) {
SJW01901d92020-05-21 08:58:31 -05001645 assert(TI->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05001646 return TI->second;
1647 }
1648
alan-bakerc3fd07f2020-10-22 09:48:49 -04001649 auto Canonical = CanonicalType(Ty);
1650 if (Canonical != Ty) {
1651 auto CanonicalTI = TypeMap.find(Canonical);
1652 if (CanonicalTI != TypeMap.end()) {
1653 assert(CanonicalTI->second.isValid());
1654 return CanonicalTI->second;
1655 }
1656 }
1657
1658 // Perform the mapping with the canonical type.
1659
SJWf93f5f32020-05-05 07:27:56 -05001660 const auto &DL = module->getDataLayout();
1661
SJW01901d92020-05-21 08:58:31 -05001662 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001663
alan-bakerc3fd07f2020-10-22 09:48:49 -04001664 switch (Canonical->getTypeID()) {
SJWf93f5f32020-05-05 07:27:56 -05001665 default: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001666 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001667 llvm_unreachable("Unsupported type???");
1668 break;
1669 }
1670 case Type::MetadataTyID:
1671 case Type::LabelTyID: {
1672 // Ignore these types.
1673 break;
1674 }
1675 case Type::PointerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001676 PointerType *PTy = cast<PointerType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001677 unsigned AddrSpace = PTy->getAddressSpace();
1678
1679 if (AddrSpace != AddressSpace::UniformConstant) {
1680 auto PointeeTy = PTy->getElementType();
1681 if (PointeeTy->isStructTy() &&
1682 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1683 // TODO(sjw): assert always an image?
1684 RID = getSPIRVType(PointeeTy);
1685 break;
1686 }
1687 }
1688
SJWf93f5f32020-05-05 07:27:56 -05001689 //
1690 // Generate OpTypePointer.
1691 //
1692
1693 // OpTypePointer
1694 // Ops[0] = Storage Class
1695 // Ops[1] = Element Type ID
1696 SPIRVOperandVec Ops;
1697
SJW01901d92020-05-21 08:58:31 -05001698 Ops << GetStorageClass(AddrSpace) << PTy->getElementType();
SJWf93f5f32020-05-05 07:27:56 -05001699
1700 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1701 break;
1702 }
1703 case Type::StructTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001704 StructType *STy = cast<StructType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001705
1706 // Handle sampler type.
1707 if (STy->isOpaque()) {
1708 if (STy->getName().equals("opencl.sampler_t")) {
1709 //
1710 // Generate OpTypeSampler
1711 //
1712 // Empty Ops.
1713
1714 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1715 break;
1716 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001717 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001718 STy->getName().startswith("opencl.image1d_wo_t") ||
1719 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001720 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001721 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1722 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001723 STy->getName().startswith("opencl.image2d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001724 STy->getName().startswith("opencl.image2d_wo_t") ||
1725 STy->getName().startswith("opencl.image2d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001726 STy->getName().startswith("opencl.image2d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001727 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1728 STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001729 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001730 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001731 if (STy->getName().startswith("opencl.image1d_")) {
1732 if (STy->getName().contains(".sampled"))
1733 addCapability(spv::CapabilitySampled1D);
1734 else
1735 addCapability(spv::CapabilityImage1D);
1736 }
1737
SJWf93f5f32020-05-05 07:27:56 -05001738 //
1739 // Generate OpTypeImage
1740 //
1741 // Ops[0] = Sampled Type ID
1742 // Ops[1] = Dim ID
1743 // Ops[2] = Depth (Literal Number)
1744 // Ops[3] = Arrayed (Literal Number)
1745 // Ops[4] = MS (Literal Number)
1746 // Ops[5] = Sampled (Literal Number)
1747 // Ops[6] = Image Format ID
1748 //
1749 SPIRVOperandVec Ops;
1750
SJW01901d92020-05-21 08:58:31 -05001751 SPIRVID SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001752 if (STy->getName().contains(".float")) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001753 SampledTyID = getSPIRVType(Type::getFloatTy(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001754 } else if (STy->getName().contains(".uint")) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001755 SampledTyID = getSPIRVType(Type::getInt32Ty(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001756 } else if (STy->getName().contains(".int")) {
1757 // Generate a signed 32-bit integer if necessary.
1758 if (int32ID == 0) {
1759 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001760 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001761 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1762 }
1763 SampledTyID = int32ID;
1764
1765 // Generate a vec4 of the signed int if necessary.
1766 if (v4int32ID == 0) {
1767 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001768 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001769 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1770 }
1771 } else {
1772 // This was likely an UndefValue.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001773 SampledTyID = getSPIRVType(Type::getFloatTy(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001774 }
SJW01901d92020-05-21 08:58:31 -05001775 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001776
1777 spv::Dim DimID = spv::Dim2D;
1778 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001779 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001780 STy->getName().startswith("opencl.image1d_wo_t") ||
1781 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001782 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001783 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1784 DimID = spv::Dim1D;
1785 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001786 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001787 STy->getName().startswith("opencl.image3d_wo_t")) {
1788 DimID = spv::Dim3D;
1789 }
SJW01901d92020-05-21 08:58:31 -05001790 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001791
1792 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001793 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001794
1795 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001796 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001797
1798 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001799 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001800
1801 // Set up Sampled.
1802 //
1803 // From Spec
1804 //
1805 // 0 indicates this is only known at run time, not at compile time
1806 // 1 indicates will be used with sampler
1807 // 2 indicates will be used without a sampler (a storage image)
1808 uint32_t Sampled = 1;
1809 if (!STy->getName().contains(".sampled")) {
1810 Sampled = 2;
1811 }
SJW01901d92020-05-21 08:58:31 -05001812 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001813
1814 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001815 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001816 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1817
alan-bakerf6bc8252020-09-23 14:58:55 -04001818 // Only need a sampled version of the type if it is used with a sampler.
1819 if (Sampled == 1) {
1820 Ops.clear();
1821 Ops << RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001822 getImageTypeMap()[Canonical] =
alan-bakerf6bc8252020-09-23 14:58:55 -04001823 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1824 }
SJWf93f5f32020-05-05 07:27:56 -05001825 break;
1826 }
1827 }
1828
1829 //
1830 // Generate OpTypeStruct
1831 //
1832 // Ops[0] ... Ops[n] = Member IDs
1833 SPIRVOperandVec Ops;
1834
1835 for (auto *EleTy : STy->elements()) {
SJW01901d92020-05-21 08:58:31 -05001836 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001837 }
1838
1839 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1840
alan-bakerc3fd07f2020-10-22 09:48:49 -04001841 // Generate OpMemberDecorate unless we are generating it for the canonical
1842 // type.
1843 StructType *canonical = cast<StructType>(CanonicalType(STy));
1844 if (TypesNeedingLayout.idFor(STy) &&
1845 (canonical == STy || !TypesNeedingLayout.idFor(canonical))) {
SJWf93f5f32020-05-05 07:27:56 -05001846 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1847 MemberIdx++) {
1848 // Ops[0] = Structure Type ID
1849 // Ops[1] = Member Index(Literal Number)
1850 // Ops[2] = Decoration (Offset)
1851 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001852 const auto ByteOffset =
1853 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1854
SJW01901d92020-05-21 08:58:31 -05001855 Ops.clear();
1856 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001857
1858 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1859 }
1860 }
1861
alan-bakerc3fd07f2020-10-22 09:48:49 -04001862 // Generate OpDecorate unless we are generating it for the canonical type.
1863 if (StructTypesNeedingBlock.idFor(STy) &&
1864 (canonical == STy || !StructTypesNeedingBlock.idFor(canonical))) {
SJWf93f5f32020-05-05 07:27:56 -05001865 Ops.clear();
1866 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001867 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001868
1869 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1870 }
1871 break;
1872 }
1873 case Type::IntegerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001874 uint32_t bit_width =
1875 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001876
alan-bakere2a62752020-07-09 22:53:23 -04001877 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001878 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001879 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001880 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001881 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001882 addCapability(spv::CapabilityInt64);
1883 }
1884
alan-bakere2a62752020-07-09 22:53:23 -04001885 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001886 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1887 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001888 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001889 // i8 is added to TypeMap as i32.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001890 RID = getSPIRVType(Type::getIntNTy(Canonical->getContext(), 32));
SJWf93f5f32020-05-05 07:27:56 -05001891 } else {
1892 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001893 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001894 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1895 }
1896 }
1897 break;
1898 }
1899 case Type::HalfTyID:
1900 case Type::FloatTyID:
1901 case Type::DoubleTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001902 uint32_t bit_width =
1903 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
alan-bakere2a62752020-07-09 22:53:23 -04001904 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001905 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001906 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001907 addCapability(spv::CapabilityFloat64);
1908 }
1909
SJWf93f5f32020-05-05 07:27:56 -05001910 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001911 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001912
1913 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1914 break;
1915 }
1916 case Type::ArrayTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001917 ArrayType *ArrTy = cast<ArrayType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001918 const uint64_t Length = ArrTy->getArrayNumElements();
1919 if (Length == 0) {
1920 // By convention, map it to a RuntimeArray.
1921
1922 Type *EleTy = ArrTy->getArrayElementType();
1923
1924 //
1925 // Generate OpTypeRuntimeArray.
1926 //
1927 // OpTypeRuntimeArray
1928 // Ops[0] = Element Type ID
1929 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001930 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001931
1932 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1933
1934 if (Hack_generate_runtime_array_stride_early) {
1935 // Generate OpDecorate.
1936
1937 // Ops[0] = Target ID
1938 // Ops[1] = Decoration (ArrayStride)
1939 // Ops[2] = Stride Number(Literal Number)
1940 Ops.clear();
1941
SJW01901d92020-05-21 08:58:31 -05001942 Ops << RID << spv::DecorationArrayStride
1943 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001944
1945 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1946 }
1947
1948 } else {
1949
1950 //
1951 // Generate OpConstant and OpTypeArray.
1952 //
1953
1954 //
1955 // Generate OpConstant for array length.
1956 //
1957 // Add constant for length to constant list.
1958 Constant *CstLength =
1959 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001960
1961 // Remember to generate ArrayStride later
alan-bakerc3fd07f2020-10-22 09:48:49 -04001962 getTypesNeedingArrayStride().insert(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001963
1964 //
1965 // Generate OpTypeArray.
1966 //
1967 // Ops[0] = Element Type ID
1968 // Ops[1] = Array Length Constant ID
1969 SPIRVOperandVec Ops;
1970
SJW01901d92020-05-21 08:58:31 -05001971 Ops << ArrTy->getElementType() << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001972
1973 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1974 }
1975 break;
1976 }
1977 case Type::FixedVectorTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001978 auto VecTy = cast<VectorType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001979 // <4 x i8> is changed to i32 if i8 is not generally supported.
1980 if (!clspv::Option::Int8Support() &&
1981 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
alan-baker5a8c3be2020-09-09 13:44:26 -04001982 if (VecTy->getElementCount().getKnownMinValue() == 4) {
SJWf93f5f32020-05-05 07:27:56 -05001983 RID = getSPIRVType(VecTy->getElementType());
1984 break;
1985 } else {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001986 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001987 llvm_unreachable("Support above i8 vector type");
1988 }
1989 }
1990
1991 // Ops[0] = Component Type ID
1992 // Ops[1] = Component Count (Literal Number)
1993 SPIRVOperandVec Ops;
alan-baker5a8c3be2020-09-09 13:44:26 -04001994 Ops << VecTy->getElementType()
1995 << VecTy->getElementCount().getKnownMinValue();
SJWf93f5f32020-05-05 07:27:56 -05001996
1997 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1998 break;
1999 }
2000 case Type::VoidTyID: {
2001 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
2002 break;
2003 }
2004 case Type::FunctionTyID: {
2005 // Generate SPIRV instruction for function type.
alan-bakerc3fd07f2020-10-22 09:48:49 -04002006 FunctionType *FTy = cast<FunctionType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05002007
2008 // Ops[0] = Return Type ID
2009 // Ops[1] ... Ops[n] = Parameter Type IDs
2010 SPIRVOperandVec Ops;
2011
2012 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05002013 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05002014
2015 // Find SPIRV instructions for parameter types
2016 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2017 // Find SPIRV instruction for parameter type.
2018 auto ParamTy = FTy->getParamType(k);
2019 if (ParamTy->isPointerTy()) {
2020 auto PointeeTy = ParamTy->getPointerElementType();
2021 if (PointeeTy->isStructTy() &&
2022 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2023 ParamTy = PointeeTy;
2024 }
2025 }
2026
SJW01901d92020-05-21 08:58:31 -05002027 Ops << ParamTy;
SJWf93f5f32020-05-05 07:27:56 -05002028 }
2029
2030 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
2031 break;
2032 }
2033 }
2034
SJW01901d92020-05-21 08:58:31 -05002035 if (RID.isValid()) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04002036 TypeMap[Canonical] = RID;
2037 if (Ty != Canonical) {
2038 // Speed up future lookups of this type by also caching the non-canonical
2039 // type.
2040 TypeMap[Ty] = RID;
2041 }
SJWf93f5f32020-05-05 07:27:56 -05002042 }
2043 return RID;
David Neto22f144c2017-06-12 14:26:21 -04002044}
2045
SJW77b87ad2020-04-21 14:37:52 -05002046void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04002047 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05002048 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04002049 }
David Neto22f144c2017-06-12 14:26:21 -04002050}
2051
SJW806a5d82020-07-15 12:51:38 -05002052SPIRVID SPIRVProducerPass::getSPIRVInt32Constant(uint32_t CstVal) {
2053 Type *i32 = Type::getInt32Ty(module->getContext());
2054 Constant *Cst = ConstantInt::get(i32, CstVal);
2055 return getSPIRVValue(Cst);
2056}
2057
SJWf93f5f32020-05-05 07:27:56 -05002058SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *Cst) {
David Neto22f144c2017-06-12 14:26:21 -04002059 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07002060 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002061
SJW01901d92020-05-21 08:58:31 -05002062 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04002063
SJWf93f5f32020-05-05 07:27:56 -05002064 //
2065 // Generate OpConstant.
2066 //
2067 // Ops[0] = Result Type ID
2068 // Ops[1] .. Ops[n] = Values LiteralNumber
2069 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002070
SJW01901d92020-05-21 08:58:31 -05002071 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04002072
SJWf93f5f32020-05-05 07:27:56 -05002073 std::vector<uint32_t> LiteralNum;
2074 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04002075
SJWf93f5f32020-05-05 07:27:56 -05002076 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04002077 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05002078 Opcode = spv::OpUndef;
2079 if (hack_undef && IsTypeNullable(Cst->getType())) {
2080 Opcode = spv::OpConstantNull;
2081 }
2082 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04002083 unsigned bit_width = CI->getBitWidth();
2084 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05002085 // If the bitwidth of constant is 1, generate OpConstantTrue or
2086 // OpConstantFalse.
2087 if (CI->getZExtValue()) {
2088 // Ops[0] = Result Type ID
2089 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04002090 } else {
SJWf93f5f32020-05-05 07:27:56 -05002091 // Ops[0] = Result Type ID
2092 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04002093 }
SJWf93f5f32020-05-05 07:27:56 -05002094 } else {
2095 auto V = CI->getZExtValue();
2096 LiteralNum.push_back(V & 0xFFFFFFFF);
2097
alan-bakere2a62752020-07-09 22:53:23 -04002098 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05002099 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002100 }
2101
2102 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002103
SJW01901d92020-05-21 08:58:31 -05002104 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002105 }
2106 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2107 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2108 Type *CFPTy = CFP->getType();
2109 if (CFPTy->isFloatTy()) {
2110 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2111 } else if (CFPTy->isDoubleTy()) {
2112 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2113 LiteralNum.push_back(FPVal >> 32);
2114 } else if (CFPTy->isHalfTy()) {
2115 LiteralNum.push_back(FPVal & 0xFFFF);
2116 } else {
2117 CFPTy->print(errs());
2118 llvm_unreachable("Implement this ConstantFP Type");
2119 }
David Neto22f144c2017-06-12 14:26:21 -04002120
SJWf93f5f32020-05-05 07:27:56 -05002121 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04002122
SJW01901d92020-05-21 08:58:31 -05002123 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002124 } else if (isa<ConstantDataSequential>(Cst) &&
2125 cast<ConstantDataSequential>(Cst)->isString()) {
2126 Cst->print(errs());
2127 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002128
SJWf93f5f32020-05-05 07:27:56 -05002129 } else if (const ConstantDataSequential *CDS =
2130 dyn_cast<ConstantDataSequential>(Cst)) {
2131 // Let's convert <4 x i8> constant to int constant specially.
2132 // This case occurs when all the values are specified as constant
2133 // ints.
2134 Type *CstTy = Cst->getType();
2135 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002136 //
2137 // Generate OpConstant with OpTypeInt 32 0.
2138 //
2139 uint32_t IntValue = 0;
2140 for (unsigned k = 0; k < 4; k++) {
2141 const uint64_t Val = CDS->getElementAsInteger(k);
2142 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002143 }
2144
SJW806a5d82020-07-15 12:51:38 -05002145 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002146 } else {
2147
David Neto49351ac2017-08-26 17:32:20 -04002148 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002149 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002150 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002151 }
2152
2153 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002154 }
2155 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2156 // Let's convert <4 x i8> constant to int constant specially.
2157 // This case occurs when at least one of the values is an undef.
2158 Type *CstTy = Cst->getType();
2159 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002160 //
2161 // Generate OpConstant with OpTypeInt 32 0.
2162 //
2163 uint32_t IntValue = 0;
2164 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2165 I != E; ++I) {
2166 uint64_t Val = 0;
2167 const Value *CV = *I;
2168 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2169 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002170 }
SJWf93f5f32020-05-05 07:27:56 -05002171 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002172 }
2173
SJW806a5d82020-07-15 12:51:38 -05002174 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002175 } else {
2176
David Neto22f144c2017-06-12 14:26:21 -04002177 // We use a constant composite in SPIR-V for our constant aggregate in
2178 // LLVM.
2179 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002180
2181 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002182 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002183 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002184 }
David Neto22f144c2017-06-12 14:26:21 -04002185 }
SJWf93f5f32020-05-05 07:27:56 -05002186 } else if (Cst->isNullValue()) {
2187 Opcode = spv::OpConstantNull;
2188 } else {
2189 Cst->print(errs());
2190 llvm_unreachable("Unsupported Constant???");
2191 }
David Neto22f144c2017-06-12 14:26:21 -04002192
SJWf93f5f32020-05-05 07:27:56 -05002193 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2194 // Null pointer requires variable pointers.
2195 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2196 }
alan-baker5b86ed72019-02-15 08:26:50 -05002197
SJWf93f5f32020-05-05 07:27:56 -05002198 if (RID == 0) {
2199 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2200 }
2201
2202 VMap[Cst] = RID;
2203
2204 return RID;
2205}
2206
2207SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2208 auto II = ValueMap.find(V);
2209 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002210 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002211 return II->second;
2212 }
2213 if (Constant *Cst = dyn_cast<Constant>(V)) {
2214 return getSPIRVConstant(Cst);
2215 } else {
2216 llvm_unreachable("Variable not found");
2217 }
2218}
2219
SJW77b87ad2020-04-21 14:37:52 -05002220void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002221 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002222 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002223 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2224 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002225
David Neto862b7d82018-06-14 18:48:37 -04002226 // We might have samplers in the sampler map that are not used
2227 // in the translation unit. We need to allocate variables
2228 // for them and bindings too.
2229 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002230
SJW77b87ad2020-04-21 14:37:52 -05002231 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002232 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002233 if (!var_fn)
2234 return;
alan-baker09cb9802019-12-10 13:16:27 -05002235
David Neto862b7d82018-06-14 18:48:37 -04002236 for (auto user : var_fn->users()) {
2237 // Populate SamplerLiteralToDescriptorSetMap and
2238 // SamplerLiteralToBindingMap.
2239 //
2240 // Look for calls like
2241 // call %opencl.sampler_t addrspace(2)*
2242 // @clspv.sampler.var.literal(
2243 // i32 descriptor,
2244 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002245 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002246 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002247 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002248 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002249 auto sampler_value = third_param;
2250 if (clspv::Option::UseSamplerMap()) {
2251 if (third_param >= sampler_map.size()) {
2252 errs() << "Out of bounds index to sampler map: " << third_param;
2253 llvm_unreachable("bad sampler init: out of bounds");
2254 }
2255 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002256 }
2257
David Neto862b7d82018-06-14 18:48:37 -04002258 const auto descriptor_set = static_cast<unsigned>(
2259 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2260 const auto binding = static_cast<unsigned>(
2261 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2262
2263 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2264 SamplerLiteralToBindingMap[sampler_value] = binding;
2265 used_bindings.insert(binding);
2266 }
2267 }
2268
alan-baker09cb9802019-12-10 13:16:27 -05002269 DenseSet<size_t> seen;
2270 for (auto user : var_fn->users()) {
2271 if (!isa<CallInst>(user))
2272 continue;
2273
2274 auto call = cast<CallInst>(user);
2275 const unsigned third_param = static_cast<unsigned>(
2276 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2277
2278 // Already allocated a variable for this value.
2279 if (!seen.insert(third_param).second)
2280 continue;
2281
2282 auto sampler_value = third_param;
2283 if (clspv::Option::UseSamplerMap()) {
2284 sampler_value = sampler_map[third_param].first;
2285 }
2286
SJW806a5d82020-07-15 12:51:38 -05002287 auto sampler_var_id = addSPIRVGlobalVariable(
2288 getSPIRVType(SamplerTy), spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002289
alan-baker09cb9802019-12-10 13:16:27 -05002290 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002291
David Neto862b7d82018-06-14 18:48:37 -04002292 unsigned descriptor_set;
2293 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002294 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002295 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002296 // This sampler is not actually used. Find the next one.
alan-baker7506abb2020-09-10 15:02:55 -04002297 for (binding = 0; used_bindings.count(binding); binding++) {
2298 }
David Neto862b7d82018-06-14 18:48:37 -04002299 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2300 used_bindings.insert(binding);
2301 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002302 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2303 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002304
alan-baker86ce19c2020-08-05 13:09:19 -04002305 auto import_id = getReflectionImport();
2306 SPIRVOperandVec Ops;
2307 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
2308 << reflection::ExtInstLiteralSampler
2309 << getSPIRVInt32Constant(descriptor_set)
2310 << getSPIRVInt32Constant(binding)
2311 << getSPIRVInt32Constant(sampler_value);
2312 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002313 }
2314
SJW69939d52020-04-16 07:29:07 -05002315 // Ops[0] = Target ID
2316 // Ops[1] = Decoration (DescriptorSet)
2317 // Ops[2] = LiteralNumber according to Decoration
SJW806a5d82020-07-15 12:51:38 -05002318 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002319 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002320
SJWf93f5f32020-05-05 07:27:56 -05002321 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002322
2323 // Ops[0] = Target ID
2324 // Ops[1] = Decoration (Binding)
2325 // Ops[2] = LiteralNumber according to Decoration
2326 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002327 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002328
SJWf93f5f32020-05-05 07:27:56 -05002329 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002330 }
David Neto862b7d82018-06-14 18:48:37 -04002331}
David Neto22f144c2017-06-12 14:26:21 -04002332
SJW77b87ad2020-04-21 14:37:52 -05002333void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002334 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002335
David Neto862b7d82018-06-14 18:48:37 -04002336 // Generate variables. Make one for each of resource var info object.
2337 for (auto *info : ModuleOrderedResourceVars) {
2338 Type *type = info->var_fn->getReturnType();
2339 // Remap the address space for opaque types.
2340 switch (info->arg_kind) {
2341 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002342 case clspv::ArgKind::SampledImage:
2343 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002344 type = PointerType::get(type->getPointerElementType(),
2345 clspv::AddressSpace::UniformConstant);
2346 break;
2347 default:
2348 break;
2349 }
David Neto22f144c2017-06-12 14:26:21 -04002350
David Neto862b7d82018-06-14 18:48:37 -04002351 const auto sc = GetStorageClassForArgKind(info->arg_kind);
David Neto22f144c2017-06-12 14:26:21 -04002352
SJW806a5d82020-07-15 12:51:38 -05002353 info->var_id = addSPIRVGlobalVariable(getSPIRVType(type), sc);
David Neto862b7d82018-06-14 18:48:37 -04002354
2355 // Map calls to the variable-builtin-function.
2356 for (auto &U : info->var_fn->uses()) {
2357 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2358 const auto set = unsigned(
2359 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2360 const auto binding = unsigned(
2361 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2362 if (set == info->descriptor_set && binding == info->binding) {
2363 switch (info->arg_kind) {
2364 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002365 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002366 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002367 case clspv::ArgKind::PodUBO:
2368 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002369 // The call maps to the variable directly.
2370 VMap[call] = info->var_id;
2371 break;
2372 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002373 case clspv::ArgKind::SampledImage:
2374 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002375 // The call maps to a load we generate later.
2376 ResourceVarDeferredLoadCalls[call] = info->var_id;
2377 break;
2378 default:
2379 llvm_unreachable("Unhandled arg kind");
2380 }
2381 }
David Neto22f144c2017-06-12 14:26:21 -04002382 }
David Neto862b7d82018-06-14 18:48:37 -04002383 }
2384 }
David Neto22f144c2017-06-12 14:26:21 -04002385
David Neto862b7d82018-06-14 18:48:37 -04002386 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002387 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002388 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002389 // Push constants don't need descriptor set or binding decorations.
2390 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2391 continue;
2392
David Neto862b7d82018-06-14 18:48:37 -04002393 // Decorate with DescriptorSet and Binding.
2394 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002395 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002396 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002397
2398 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002399 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002400 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002401
alan-bakere9308012019-03-15 10:25:13 -04002402 if (info->coherent) {
2403 // Decorate with Coherent if required for the variable.
2404 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002405 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002406 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002407 }
2408
David Neto862b7d82018-06-14 18:48:37 -04002409 // Generate NonWritable and NonReadable
2410 switch (info->arg_kind) {
2411 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002412 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002413 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2414 clspv::AddressSpace::Constant) {
2415 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002416 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002417 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002418 }
David Neto862b7d82018-06-14 18:48:37 -04002419 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002420 case clspv::ArgKind::StorageImage: {
2421 auto *type = info->var_fn->getReturnType();
2422 auto *struct_ty = cast<StructType>(type->getPointerElementType());
2423 // TODO(alan-baker): This is conservative. If compiling for OpenCL 2.0 or
2424 // above, the compiler treats all write_only images as read_write images.
2425 if (struct_ty->getName().contains("_wo_t")) {
2426 Ops.clear();
2427 Ops << info->var_id << spv::DecorationNonReadable;
2428 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2429 }
David Neto862b7d82018-06-14 18:48:37 -04002430 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002431 }
David Neto862b7d82018-06-14 18:48:37 -04002432 default:
2433 break;
David Neto22f144c2017-06-12 14:26:21 -04002434 }
2435 }
2436}
2437
2438void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002439 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002440 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002441 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002442
2443 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2444 Type *Ty = GV.getType();
2445 PointerType *PTy = cast<PointerType>(Ty);
2446
SJW01901d92020-05-21 08:58:31 -05002447 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002448
2449 // Workgroup size is handled differently (it goes into a constant)
2450 if (spv::BuiltInWorkgroupSize == BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04002451 uint32_t PrevXDimCst = 0xFFFFFFFF;
2452 uint32_t PrevYDimCst = 0xFFFFFFFF;
2453 uint32_t PrevZDimCst = 0xFFFFFFFF;
alan-baker3b609772020-09-03 19:10:17 -04002454 bool HasMD = true;
David Neto22f144c2017-06-12 14:26:21 -04002455 for (Function &Func : *GV.getParent()) {
2456 if (Func.isDeclaration()) {
2457 continue;
2458 }
2459
2460 // We only need to check kernels.
2461 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2462 continue;
2463 }
2464
2465 if (const MDNode *MD =
2466 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2467 uint32_t CurXDimCst = static_cast<uint32_t>(
2468 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2469 uint32_t CurYDimCst = static_cast<uint32_t>(
2470 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2471 uint32_t CurZDimCst = static_cast<uint32_t>(
2472 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2473
2474 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2475 PrevZDimCst == 0xFFFFFFFF) {
2476 PrevXDimCst = CurXDimCst;
2477 PrevYDimCst = CurYDimCst;
2478 PrevZDimCst = CurZDimCst;
2479 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2480 CurZDimCst != PrevZDimCst) {
alan-baker3b609772020-09-03 19:10:17 -04002481 HasMD = false;
2482 continue;
David Neto22f144c2017-06-12 14:26:21 -04002483 } else {
2484 continue;
2485 }
2486
2487 //
2488 // Generate OpConstantComposite.
2489 //
2490 // Ops[0] : Result Type ID
2491 // Ops[1] : Constant size for x dimension.
2492 // Ops[2] : Constant size for y dimension.
2493 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002494 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002495
SJW01901d92020-05-21 08:58:31 -05002496 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002497 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002498 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002499 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002500 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002501 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002502
SJW01901d92020-05-21 08:58:31 -05002503 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2504 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002505
SJWf93f5f32020-05-05 07:27:56 -05002506 InitializerID =
2507 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002508 } else {
alan-baker3b609772020-09-03 19:10:17 -04002509 HasMD = false;
David Neto22f144c2017-06-12 14:26:21 -04002510 }
2511 }
2512
2513 // If all kernels do not have metadata for reqd_work_group_size, generate
2514 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002515 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002516 //
2517 // Generate OpSpecConstants for x/y/z dimension.
2518 //
2519 // Ops[0] : Result Type ID
2520 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002521
alan-bakera1be3322020-04-20 12:48:18 -04002522 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002523 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002524
SJWf93f5f32020-05-05 07:27:56 -05002525 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002526 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002527 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002528
David Neto257c3892018-04-11 13:19:45 -04002529 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002530 Ops << result_type_id << 1;
2531 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002532
2533 // Y Dimension
2534 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002535 Ops << result_type_id << 1;
2536 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002537
2538 // Z Dimension
2539 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002540 Ops << result_type_id << 1;
2541 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002542
David Neto257c3892018-04-11 13:19:45 -04002543 BuiltinDimVec.push_back(XDimCstID);
2544 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002545 BuiltinDimVec.push_back(ZDimCstID);
2546
David Neto22f144c2017-06-12 14:26:21 -04002547 //
2548 // Generate OpSpecConstantComposite.
2549 //
2550 // Ops[0] : Result Type ID
2551 // Ops[1] : Constant size for x dimension.
2552 // Ops[2] : Constant size for y dimension.
2553 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002554 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002555 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002556
SJWf93f5f32020-05-05 07:27:56 -05002557 InitializerID =
2558 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002559 }
alan-bakerbed3a882020-04-21 14:42:41 -04002560 } else if (BuiltinType == spv::BuiltInWorkDim) {
2561 // 1. Generate a specialization constant with a default of 3.
2562 // 2. Allocate and annotate a SpecId for the constant.
2563 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002564 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002565
2566 //
2567 // Generate OpSpecConstant.
2568 //
2569 // Ops[0] : Result Type ID
2570 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002571
SJW01901d92020-05-21 08:58:31 -05002572 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002573
SJWf93f5f32020-05-05 07:27:56 -05002574 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002575
2576 //
2577 // Generate SpecId decoration.
2578 //
2579 // Ops[0] : target
2580 // Ops[1] : decoration
2581 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002582 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002583 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002584 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002585
SJWf93f5f32020-05-05 07:27:56 -05002586 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002587 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2588 // 1. Generate a spec constant with a default of {0, 0, 0}.
2589 // 2. Allocate and annotate SpecIds for the constants.
2590 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002591 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002592
2593 //
2594 // Generate OpSpecConstant for each dimension.
2595 //
2596 // Ops[0] : Result Type ID
2597 // Ops[1] : Default literal value
2598 //
SJW01901d92020-05-21 08:58:31 -05002599 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2600 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002601
alan-bakere1996972020-05-04 08:38:12 -04002602 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002603 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2604 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002605
alan-bakere1996972020-05-04 08:38:12 -04002606 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002607 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2608 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002609
2610 //
2611 // Generate SpecId decoration for each dimension.
2612 //
2613 // Ops[0] : target
2614 // Ops[1] : decoration
2615 // Ops[2] : SpecId
2616 //
2617 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2618 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002619 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002620 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002621
2622 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2623 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002624 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002625 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002626
2627 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2628 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002629 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002630 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002631
2632 //
2633 // Generate OpSpecConstantComposite.
2634 //
2635 // Ops[0] : type id
2636 // Ops[1..n-1] : elements
2637 //
alan-bakere1996972020-05-04 08:38:12 -04002638 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002639 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002640 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002641 }
2642
David Neto85082642018-03-24 06:55:20 -07002643 const auto AS = PTy->getAddressSpace();
SJW806a5d82020-07-15 12:51:38 -05002644 const auto spvSC = GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002645
David Neto85082642018-03-24 06:55:20 -07002646 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002647 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002648 clspv::Option::ModuleConstantsInStorageBuffer();
2649
Kévin Petit23d5f182019-08-13 16:21:29 +01002650 if (GV.hasInitializer()) {
2651 auto GVInit = GV.getInitializer();
2652 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002653 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002654 }
2655 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002656
SJW806a5d82020-07-15 12:51:38 -05002657 SPIRVID var_id =
2658 addSPIRVGlobalVariable(getSPIRVType(Ty), spvSC, InitializerID);
David Neto85082642018-03-24 06:55:20 -07002659
SJWf93f5f32020-05-05 07:27:56 -05002660 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002661
alan-bakere1996972020-05-04 08:38:12 -04002662 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2663 return builtin == spv::BuiltInWorkDim ||
2664 builtin == spv::BuiltInGlobalOffset;
2665 };
2666
alan-bakere1996972020-05-04 08:38:12 -04002667 // If we have a builtin (not an OpenCL builtin).
2668 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002669 //
2670 // Generate OpDecorate.
2671 //
2672 // DOps[0] = Target ID
2673 // DOps[1] = Decoration (Builtin)
2674 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002675 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002676
2677 // WorkgroupSize is different, we decorate the constant composite that has
2678 // its value, rather than the variable that we use to access the value.
2679 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2680 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002681 // Save both the value and variable IDs for later.
2682 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002683 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002684 } else {
SJWf93f5f32020-05-05 07:27:56 -05002685 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002686 }
2687
SJW806a5d82020-07-15 12:51:38 -05002688 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002689 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002690
SJW01901d92020-05-21 08:58:31 -05002691 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002692 } else if (module_scope_constant_external_init) {
2693 // This module scope constant is initialized from a storage buffer with data
2694 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002695 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002696
alan-baker86ce19c2020-08-05 13:09:19 -04002697 // Emit the intializer as a reflection instruction.
David Neto85082642018-03-24 06:55:20 -07002698 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2699 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002700 std::string hexbytes;
2701 llvm::raw_string_ostream str(hexbytes);
2702 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
alan-baker86ce19c2020-08-05 13:09:19 -04002703
2704 // Reflection instruction for constant data.
2705 SPIRVOperandVec Ops;
2706 auto data_id = addSPIRVInst<kDebug>(spv::OpString, str.str().c_str());
2707 Ops << getSPIRVType(Type::getVoidTy(module->getContext()))
2708 << getReflectionImport() << reflection::ExtInstConstantDataStorageBuffer
2709 << getSPIRVInt32Constant(descriptor_set) << getSPIRVInt32Constant(0)
2710 << data_id;
2711 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto85082642018-03-24 06:55:20 -07002712
David Neto85082642018-03-24 06:55:20 -07002713 // OpDecorate %var DescriptorSet <descriptor_set>
alan-baker86ce19c2020-08-05 13:09:19 -04002714 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002715 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2716 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002717
2718 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002719 Ops.clear();
2720 Ops << var_id << spv::DecorationBinding << 0;
2721 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002722 }
2723}
2724
David Neto22f144c2017-06-12 14:26:21 -04002725void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002726 ValueMapType &VMap = getValueMap();
2727 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002728 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2729 auto &GlobalConstArgSet = getGlobalConstArgSet();
2730
2731 FunctionType *FTy = F.getFunctionType();
2732
2733 //
David Neto22f144c2017-06-12 14:26:21 -04002734 // Generate OPFunction.
2735 //
2736
2737 // FOps[0] : Result Type ID
2738 // FOps[1] : Function Control
2739 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002740 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002741
2742 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002743 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002744
2745 // Check function attributes for SPIRV Function Control.
2746 uint32_t FuncControl = spv::FunctionControlMaskNone;
2747 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2748 FuncControl |= spv::FunctionControlInlineMask;
2749 }
2750 if (F.hasFnAttribute(Attribute::NoInline)) {
2751 FuncControl |= spv::FunctionControlDontInlineMask;
2752 }
2753 // TODO: Check llvm attribute for Function Control Pure.
2754 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2755 FuncControl |= spv::FunctionControlPureMask;
2756 }
2757 // TODO: Check llvm attribute for Function Control Const.
2758 if (F.hasFnAttribute(Attribute::ReadNone)) {
2759 FuncControl |= spv::FunctionControlConstMask;
2760 }
2761
SJW01901d92020-05-21 08:58:31 -05002762 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002763
SJW01901d92020-05-21 08:58:31 -05002764 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002765 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2766 SmallVector<Type *, 4> NewFuncParamTys;
2767 FunctionType *NewFTy =
2768 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002769 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002770 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002771 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002772 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002773 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002774 } else {
SJWf93f5f32020-05-05 07:27:56 -05002775 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002776 }
2777 }
2778
SJW01901d92020-05-21 08:58:31 -05002779 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002780
SJWf93f5f32020-05-05 07:27:56 -05002781 // Generate SPIRV instruction for function.
2782 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2783 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002784
SJWf93f5f32020-05-05 07:27:56 -05002785 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2786 EntryPoints.push_back(std::make_pair(&F, FID));
2787 }
David Neto22f144c2017-06-12 14:26:21 -04002788
David Neto482550a2018-03-24 05:21:07 -07002789 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002790 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002791 }
David Neto22f144c2017-06-12 14:26:21 -04002792
2793 //
2794 // Generate OpFunctionParameter for Normal function.
2795 //
David Neto22f144c2017-06-12 14:26:21 -04002796 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002797
David Neto22f144c2017-06-12 14:26:21 -04002798 // Iterate Argument for name instead of param type from function type.
2799 unsigned ArgIdx = 0;
2800 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002801 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002802 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002803
2804 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002805 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002806 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2807 if (GlobalConstFuncTyMap.count(FTy)) {
2808 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2809 Type *EleTy = PTy->getPointerElementType();
2810 Type *ArgTy =
2811 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002812 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002813 GlobalConstArgSet.insert(&Arg);
2814 }
2815 }
2816 }
SJW01901d92020-05-21 08:58:31 -05002817 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002818
2819 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002820 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002821 VMap[&Arg] = param_id;
2822
2823 if (CalledWithCoherentResource(Arg)) {
2824 // If the arg is passed a coherent resource ever, then decorate this
2825 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002826 Ops.clear();
2827 Ops << param_id << spv::DecorationCoherent;
2828 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002829 }
David Neto22f144c2017-06-12 14:26:21 -04002830
2831 ArgIdx++;
2832 }
2833 }
2834}
2835
SJW77b87ad2020-04-21 14:37:52 -05002836void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002837 EntryPointVecType &EntryPoints = getEntryPointVec();
SJW806a5d82020-07-15 12:51:38 -05002838 auto &EntryPointInterfaces = getEntryPointInterfacesList();
SJW01901d92020-05-21 08:58:31 -05002839 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002840
SJWf93f5f32020-05-05 07:27:56 -05002841 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002842
SJW01901d92020-05-21 08:58:31 -05002843 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002844 //
SJW01901d92020-05-21 08:58:31 -05002845 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002846 //
2847 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002848 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002849 }
2850
2851 // Always add the storage buffer extension
2852 {
David Neto22f144c2017-06-12 14:26:21 -04002853 //
2854 // Generate OpExtension.
2855 //
2856 // Ops[0] = Name (Literal String)
2857 //
SJWf93f5f32020-05-05 07:27:56 -05002858 addSPIRVInst<kExtensions>(spv::OpExtension,
2859 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05002860 }
David Neto22f144c2017-06-12 14:26:21 -04002861
alan-baker5b86ed72019-02-15 08:26:50 -05002862 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2863 //
2864 // Generate OpExtension.
2865 //
2866 // Ops[0] = Name (Literal String)
2867 //
SJWf93f5f32020-05-05 07:27:56 -05002868 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04002869 }
2870
2871 //
2872 // Generate OpMemoryModel
2873 //
2874 // Memory model for Vulkan will always be GLSL450.
2875
2876 // Ops[0] = Addressing Model
2877 // Ops[1] = Memory Model
2878 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002879 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002880
SJWf93f5f32020-05-05 07:27:56 -05002881 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002882
2883 //
2884 // Generate OpEntryPoint
2885 //
2886 for (auto EntryPoint : EntryPoints) {
2887 // Ops[0] = Execution Model
2888 // Ops[1] = EntryPoint ID
2889 // Ops[2] = Name (Literal String)
2890 // ...
2891 //
2892 // TODO: Do we need to consider Interface ID for forward references???
2893 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002894 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002895 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002896
SJW806a5d82020-07-15 12:51:38 -05002897 for (auto &Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002898 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002899 }
2900
SJWf93f5f32020-05-05 07:27:56 -05002901 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002902 }
2903
alan-baker3b609772020-09-03 19:10:17 -04002904 if (BuiltinDimVec.empty()) {
2905 for (auto EntryPoint : EntryPoints) {
2906 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2907 ->getMetadata("reqd_work_group_size");
2908 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
2909 //
2910 // Generate OpExecutionMode
2911 //
David Neto22f144c2017-06-12 14:26:21 -04002912
alan-baker3b609772020-09-03 19:10:17 -04002913 // Ops[0] = Entry Point ID
2914 // Ops[1] = Execution Mode
2915 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
2916 Ops.clear();
2917 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
2918
2919 uint32_t XDim = static_cast<uint32_t>(
2920 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2921 uint32_t YDim = static_cast<uint32_t>(
2922 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2923 uint32_t ZDim = static_cast<uint32_t>(
2924 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2925
2926 Ops << XDim << YDim << ZDim;
2927
2928 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002929 }
David Neto22f144c2017-06-12 14:26:21 -04002930 }
2931 }
2932
2933 //
2934 // Generate OpSource.
2935 //
2936 // Ops[0] = SourceLanguage ID
2937 // Ops[1] = Version (LiteralNum)
2938 //
SJW01901d92020-05-21 08:58:31 -05002939 uint32_t LangID = spv::SourceLanguageUnknown;
2940 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00002941 switch (clspv::Option::Language()) {
2942 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05002943 LangID = spv::SourceLanguageOpenCL_C;
2944 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002945 break;
2946 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05002947 LangID = spv::SourceLanguageOpenCL_C;
2948 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00002949 break;
2950 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05002951 LangID = spv::SourceLanguageOpenCL_C;
2952 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00002953 break;
2954 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05002955 LangID = spv::SourceLanguageOpenCL_C;
2956 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00002957 break;
Kévin Petit77838ff2020-10-19 18:54:51 +01002958 case clspv::Option::SourceLanguage::OpenCL_C_30:
2959 LangID = spv::SourceLanguageOpenCL_C;
2960 LangVer = 300;
2961 break;
Kévin Petitf0515712020-01-07 18:29:20 +00002962 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05002963 LangID = spv::SourceLanguageOpenCL_CPP;
2964 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002965 break;
2966 default:
Kévin Petitf0515712020-01-07 18:29:20 +00002967 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01002968 }
David Neto22f144c2017-06-12 14:26:21 -04002969
SJW01901d92020-05-21 08:58:31 -05002970 Ops.clear();
2971 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05002972 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002973
2974 if (!BuiltinDimVec.empty()) {
2975 //
2976 // Generate OpDecorates for x/y/z dimension.
2977 //
2978 // Ops[0] = Target ID
2979 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04002980 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04002981
2982 // X Dimension
2983 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002984 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05002985 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002986
2987 // Y Dimension
2988 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002989 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05002990 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002991
2992 // Z Dimension
2993 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002994 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05002995 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002996 }
2997}
2998
David Netob6e2e062018-04-25 10:32:06 -04002999void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3000 // Work around a driver bug. Initializers on Private variables might not
3001 // work. So the start of the kernel should store the initializer value to the
3002 // variables. Yes, *every* entry point pays this cost if *any* entry point
3003 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3004 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003005 // TODO(dneto): Remove this at some point once fixed drivers are widely
3006 // available.
SJW01901d92020-05-21 08:58:31 -05003007 if (WorkgroupSizeVarID.isValid()) {
3008 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04003009
SJWf93f5f32020-05-05 07:27:56 -05003010 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003011 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04003012
SJWf93f5f32020-05-05 07:27:56 -05003013 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04003014 }
3015}
3016
David Neto22f144c2017-06-12 14:26:21 -04003017void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003018 ValueMapType &VMap = getValueMap();
3019
David Netob6e2e062018-04-25 10:32:06 -04003020 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003021
3022 for (BasicBlock &BB : F) {
3023 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04003024
3025 //
3026 // Generate OpLabel for Basic Block.
3027 //
SJWf93f5f32020-05-05 07:27:56 -05003028 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04003029
David Neto6dcd4712017-06-23 11:06:47 -04003030 // OpVariable instructions must come first.
3031 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003032 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3033 // Allocating a pointer requires variable pointers.
3034 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003035 setVariablePointersCapabilities(
3036 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003037 }
David Neto6dcd4712017-06-23 11:06:47 -04003038 GenerateInstruction(I);
3039 }
3040 }
3041
David Neto22f144c2017-06-12 14:26:21 -04003042 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003043 if (clspv::Option::HackInitializers()) {
3044 GenerateEntryPointInitialStores();
3045 }
David Neto22f144c2017-06-12 14:26:21 -04003046 }
3047
3048 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003049 if (!isa<AllocaInst>(I)) {
3050 GenerateInstruction(I);
3051 }
David Neto22f144c2017-06-12 14:26:21 -04003052 }
3053 }
3054}
3055
3056spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3057 const std::map<CmpInst::Predicate, spv::Op> Map = {
3058 {CmpInst::ICMP_EQ, spv::OpIEqual},
3059 {CmpInst::ICMP_NE, spv::OpINotEqual},
3060 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3061 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3062 {CmpInst::ICMP_ULT, spv::OpULessThan},
3063 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3064 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3065 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3066 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3067 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3068 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3069 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3070 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3071 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3072 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3073 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3074 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3075 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3076 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3077 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3078 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3079 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3080
3081 assert(0 != Map.count(I->getPredicate()));
3082
3083 return Map.at(I->getPredicate());
3084}
3085
3086spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3087 const std::map<unsigned, spv::Op> Map{
3088 {Instruction::Trunc, spv::OpUConvert},
3089 {Instruction::ZExt, spv::OpUConvert},
3090 {Instruction::SExt, spv::OpSConvert},
3091 {Instruction::FPToUI, spv::OpConvertFToU},
3092 {Instruction::FPToSI, spv::OpConvertFToS},
3093 {Instruction::UIToFP, spv::OpConvertUToF},
3094 {Instruction::SIToFP, spv::OpConvertSToF},
3095 {Instruction::FPTrunc, spv::OpFConvert},
3096 {Instruction::FPExt, spv::OpFConvert},
3097 {Instruction::BitCast, spv::OpBitcast}};
3098
3099 assert(0 != Map.count(I.getOpcode()));
3100
3101 return Map.at(I.getOpcode());
3102}
3103
3104spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003105 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003106 switch (I.getOpcode()) {
3107 default:
3108 break;
3109 case Instruction::Or:
3110 return spv::OpLogicalOr;
3111 case Instruction::And:
3112 return spv::OpLogicalAnd;
3113 case Instruction::Xor:
3114 return spv::OpLogicalNotEqual;
3115 }
3116 }
3117
alan-bakerb6b09dc2018-11-08 16:59:28 -05003118 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003119 {Instruction::Add, spv::OpIAdd},
3120 {Instruction::FAdd, spv::OpFAdd},
3121 {Instruction::Sub, spv::OpISub},
3122 {Instruction::FSub, spv::OpFSub},
3123 {Instruction::Mul, spv::OpIMul},
3124 {Instruction::FMul, spv::OpFMul},
3125 {Instruction::UDiv, spv::OpUDiv},
3126 {Instruction::SDiv, spv::OpSDiv},
3127 {Instruction::FDiv, spv::OpFDiv},
3128 {Instruction::URem, spv::OpUMod},
3129 {Instruction::SRem, spv::OpSRem},
3130 {Instruction::FRem, spv::OpFRem},
3131 {Instruction::Or, spv::OpBitwiseOr},
3132 {Instruction::Xor, spv::OpBitwiseXor},
3133 {Instruction::And, spv::OpBitwiseAnd},
3134 {Instruction::Shl, spv::OpShiftLeftLogical},
3135 {Instruction::LShr, spv::OpShiftRightLogical},
3136 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3137
3138 assert(0 != Map.count(I.getOpcode()));
3139
3140 return Map.at(I.getOpcode());
3141}
3142
SJW806a5d82020-07-15 12:51:38 -05003143SPIRVID SPIRVProducerPass::getSPIRVBuiltin(spv::BuiltIn BID,
3144 spv::Capability Cap) {
3145 SPIRVID RID;
3146
3147 auto ii = BuiltinConstantMap.find(BID);
3148
3149 if (ii != BuiltinConstantMap.end()) {
3150 return ii->second;
3151 } else {
SJW806a5d82020-07-15 12:51:38 -05003152 addCapability(Cap);
3153
3154 Type *type = PointerType::get(IntegerType::get(module->getContext(), 32),
3155 AddressSpace::Input);
3156
3157 RID = addSPIRVGlobalVariable(getSPIRVType(type), spv::StorageClassInput);
3158
3159 BuiltinConstantMap[BID] = RID;
3160
3161 //
3162 // Generate OpDecorate.
3163 //
3164 // Ops[0] : target
3165 // Ops[1] : decoration
3166 // Ops[2] : SpecId
3167 SPIRVOperandVec Ops;
3168 Ops << RID << spv::DecorationBuiltIn << static_cast<int>(BID);
3169
3170 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
3171 }
3172
3173 return RID;
3174}
3175
3176SPIRVID
3177SPIRVProducerPass::GenerateClspvInstruction(CallInst *Call,
3178 const FunctionInfo &FuncInfo) {
3179 SPIRVID RID;
3180
3181 switch (FuncInfo.getType()) {
3182 case Builtins::kClspvCompositeConstruct:
3183 RID = addSPIRVPlaceholder(Call);
3184 break;
3185 case Builtins::kClspvResource: {
3186 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3187 // Generate an OpLoad
3188 SPIRVOperandVec Ops;
3189
3190 Ops << Call->getType()->getPointerElementType()
3191 << ResourceVarDeferredLoadCalls[Call];
3192
3193 RID = addSPIRVInst(spv::OpLoad, Ops);
3194
3195 } else {
3196 // This maps to an OpVariable we've already generated.
3197 // No code is generated for the call.
3198 }
3199 break;
3200 }
3201 case Builtins::kClspvLocal: {
3202 // Don't codegen an instruction here, but instead map this call directly
3203 // to the workgroup variable id.
3204 int spec_id = static_cast<int>(
3205 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
3206 const auto &info = LocalSpecIdInfoMap[spec_id];
3207 RID = info.variable_id;
3208 break;
3209 }
3210 case Builtins::kClspvSamplerVarLiteral: {
3211 // Sampler initializers become a load of the corresponding sampler.
3212 // Map this to a load from the variable.
3213 const auto third_param = static_cast<unsigned>(
3214 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3215 auto sampler_value = third_param;
3216 if (clspv::Option::UseSamplerMap()) {
3217 sampler_value = getSamplerMap()[third_param].first;
3218 }
3219
3220 // Generate an OpLoad
3221 SPIRVOperandVec Ops;
3222
3223 Ops << SamplerTy->getPointerElementType()
3224 << SamplerLiteralToIDMap[sampler_value];
3225
3226 RID = addSPIRVInst(spv::OpLoad, Ops);
3227 break;
3228 }
3229 case Builtins::kSpirvAtomicXor: {
3230 // Handle SPIR-V intrinsics
3231 SPIRVOperandVec Ops;
3232
3233 if (!Call->getType()->isVoidTy()) {
3234 Ops << Call->getType();
3235 }
3236
3237 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
3238 Ops << Call->getArgOperand(i);
3239 }
3240
3241 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
3242 break;
3243 }
3244 case Builtins::kSpirvOp: {
3245 // Handle SPIR-V intrinsics
3246 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
3247 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
3248 if (opcode != spv::OpNop) {
3249 SPIRVOperandVec Ops;
3250
3251 if (!Call->getType()->isVoidTy()) {
3252 Ops << Call->getType();
3253 }
3254
3255 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
3256 Ops << Call->getArgOperand(i);
3257 }
3258
3259 RID = addSPIRVInst(opcode, Ops);
3260 }
3261 break;
3262 }
3263 case Builtins::kSpirvCopyMemory: {
3264 //
3265 // Generate OpCopyMemory.
3266 //
3267
3268 // Ops[0] = Dst ID
3269 // Ops[1] = Src ID
3270 // Ops[2] = Memory Access
3271 // Ops[3] = Alignment
3272
3273 auto IsVolatile =
3274 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
3275
3276 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3277 : spv::MemoryAccessMaskNone;
3278
3279 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3280
3281 auto Alignment =
3282 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
3283
3284 SPIRVOperandVec Ops;
3285 Ops << Call->getArgOperand(0) << Call->getArgOperand(1) << MemoryAccess
3286 << static_cast<uint32_t>(Alignment);
3287
3288 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
3289 break;
3290 }
3291 default:
3292 llvm_unreachable("Unknown CLSPV Instruction");
3293 break;
3294 }
3295 return RID;
3296}
3297
3298SPIRVID
3299SPIRVProducerPass::GenerateImageInstruction(CallInst *Call,
3300 const FunctionInfo &FuncInfo) {
3301 SPIRVID RID;
3302
3303 LLVMContext &Context = module->getContext();
3304 switch (FuncInfo.getType()) {
3305 case Builtins::kReadImagef:
3306 case Builtins::kReadImageh:
3307 case Builtins::kReadImagei:
3308 case Builtins::kReadImageui: {
3309 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
3310 // Additionally, OpTypeSampledImage is generated.
alan-bakerf6bc8252020-09-23 14:58:55 -04003311 const auto image_ty = Call->getArgOperand(0)->getType();
SJW806a5d82020-07-15 12:51:38 -05003312 const auto &pi = FuncInfo.getParameter(1);
3313 if (pi.isSampler()) {
3314 //
3315 // Generate OpSampledImage.
3316 //
3317 // Ops[0] = Result Type ID
3318 // Ops[1] = Image ID
3319 // Ops[2] = Sampler ID
3320 //
3321 SPIRVOperandVec Ops;
3322
3323 Value *Image = Call->getArgOperand(0);
3324 Value *Sampler = Call->getArgOperand(1);
3325 Value *Coordinate = Call->getArgOperand(2);
3326
3327 TypeMapType &OpImageTypeMap = getImageTypeMap();
3328 Type *ImageTy = Image->getType()->getPointerElementType();
3329 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
3330
3331 Ops << ImageTyID << Image << Sampler;
3332
3333 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
3334
3335 //
3336 // Generate OpImageSampleExplicitLod.
3337 //
3338 // Ops[0] = Result Type ID
3339 // Ops[1] = Sampled Image ID
3340 // Ops[2] = Coordinate ID
3341 // Ops[3] = Image Operands Type ID
3342 // Ops[4] ... Ops[n] = Operands ID
3343 //
3344 Ops.clear();
3345
3346 const bool is_int_image = IsIntImageType(Image->getType());
3347 SPIRVID result_type;
3348 if (is_int_image) {
3349 result_type = v4int32ID;
3350 } else {
3351 result_type = getSPIRVType(Call->getType());
3352 }
3353
3354 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
3355 Ops << result_type << SampledImageID << Coordinate
3356 << spv::ImageOperandsLodMask << CstFP0;
3357
3358 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
3359
3360 if (is_int_image) {
3361 // Generate the bitcast.
3362 Ops.clear();
3363 Ops << Call->getType() << RID;
3364 RID = addSPIRVInst(spv::OpBitcast, Ops);
3365 }
alan-bakerf6bc8252020-09-23 14:58:55 -04003366 } else if (IsStorageImageType(image_ty)) {
3367 // read_image on a storage image is mapped to OpImageRead.
3368 Value *Image = Call->getArgOperand(0);
3369 Value *Coordinate = Call->getArgOperand(1);
3370
3371 //
3372 // Generate OpImageRead
3373 //
3374 // Ops[0] = Result Type ID
3375 // Ops[1] = Image ID
3376 // Ops[2] = Coordinate
3377 // No optional image operands.
3378 //
3379 SPIRVOperandVec Ops;
3380
3381 const bool is_int_image = IsIntImageType(Image->getType());
3382 SPIRVID result_type;
3383 if (is_int_image) {
3384 result_type = v4int32ID;
3385 } else {
3386 result_type = getSPIRVType(Call->getType());
3387 }
3388
3389 Ops << result_type << Image << Coordinate;
3390 RID = addSPIRVInst(spv::OpImageRead, Ops);
3391
3392 if (is_int_image) {
3393 // Generate the bitcast.
3394 Ops.clear();
3395 Ops << Call->getType() << RID;
3396 RID = addSPIRVInst(spv::OpBitcast, Ops);
3397 }
3398
3399 // OpImageRead requires StorageImageReadWithoutFormat.
3400 addCapability(spv::CapabilityStorageImageReadWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003401 } else {
alan-bakerf6bc8252020-09-23 14:58:55 -04003402 // read_image on a sampled image (without a sampler) is mapped to
3403 // OpImageFetch.
SJW806a5d82020-07-15 12:51:38 -05003404 Value *Image = Call->getArgOperand(0);
3405 Value *Coordinate = Call->getArgOperand(1);
3406
3407 //
3408 // Generate OpImageFetch
3409 //
3410 // Ops[0] = Result Type ID
3411 // Ops[1] = Image ID
3412 // Ops[2] = Coordinate ID
3413 // Ops[3] = Lod
3414 // Ops[4] = 0
3415 //
3416 SPIRVOperandVec Ops;
3417
3418 const bool is_int_image = IsIntImageType(Image->getType());
3419 SPIRVID result_type;
3420 if (is_int_image) {
3421 result_type = v4int32ID;
3422 } else {
3423 result_type = getSPIRVType(Call->getType());
3424 }
3425
3426 Ops << result_type << Image << Coordinate << spv::ImageOperandsLodMask
3427 << getSPIRVInt32Constant(0);
3428
3429 RID = addSPIRVInst(spv::OpImageFetch, Ops);
3430
3431 if (is_int_image) {
3432 // Generate the bitcast.
3433 Ops.clear();
3434 Ops << Call->getType() << RID;
3435 RID = addSPIRVInst(spv::OpBitcast, Ops);
3436 }
3437 }
3438 break;
3439 }
3440
3441 case Builtins::kWriteImagef:
3442 case Builtins::kWriteImageh:
3443 case Builtins::kWriteImagei:
3444 case Builtins::kWriteImageui: {
3445 // write_image is mapped to OpImageWrite.
3446 //
3447 // Generate OpImageWrite.
3448 //
3449 // Ops[0] = Image ID
3450 // Ops[1] = Coordinate ID
3451 // Ops[2] = Texel ID
3452 // Ops[3] = (Optional) Image Operands Type (Literal Number)
3453 // Ops[4] ... Ops[n] = (Optional) Operands ID
3454 //
3455 SPIRVOperandVec Ops;
3456
3457 Value *Image = Call->getArgOperand(0);
3458 Value *Coordinate = Call->getArgOperand(1);
3459 Value *Texel = Call->getArgOperand(2);
3460
3461 SPIRVID TexelID = getSPIRVValue(Texel);
3462
3463 const bool is_int_image = IsIntImageType(Image->getType());
3464 if (is_int_image) {
3465 // Generate a bitcast to v4int and use it as the texel value.
3466 Ops << v4int32ID << TexelID;
3467 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
3468 Ops.clear();
3469 }
3470 Ops << Image << Coordinate << TexelID;
SJW806a5d82020-07-15 12:51:38 -05003471 RID = addSPIRVInst(spv::OpImageWrite, Ops);
alan-bakerf6bc8252020-09-23 14:58:55 -04003472
3473 // Image writes require StorageImageWriteWithoutFormat.
3474 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003475 break;
3476 }
3477
3478 case Builtins::kGetImageHeight:
3479 case Builtins::kGetImageWidth:
3480 case Builtins::kGetImageDepth:
3481 case Builtins::kGetImageDim: {
3482 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
3483 addCapability(spv::CapabilityImageQuery);
3484
3485 //
3486 // Generate OpImageQuerySize[Lod]
3487 //
3488 // Ops[0] = Image ID
3489 //
3490 // Result type has components equal to the dimensionality of the image,
3491 // plus 1 if the image is arrayed.
3492 //
3493 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3494 SPIRVOperandVec Ops;
3495
3496 // Implement:
3497 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3498 SPIRVID SizesTypeID;
3499
3500 Value *Image = Call->getArgOperand(0);
3501 const uint32_t dim = ImageDimensionality(Image->getType());
3502 const uint32_t components =
3503 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
3504 if (components == 1) {
3505 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
3506 } else {
3507 SizesTypeID = getSPIRVType(
3508 FixedVectorType::get(Type::getInt32Ty(Context), components));
3509 }
3510 Ops << SizesTypeID << Image;
3511 spv::Op query_opcode = spv::OpImageQuerySize;
3512 if (IsSampledImageType(Image->getType())) {
3513 query_opcode = spv::OpImageQuerySizeLod;
3514 // Need explicit 0 for Lod operand.
3515 Ops << getSPIRVInt32Constant(0);
3516 }
3517
3518 RID = addSPIRVInst(query_opcode, Ops);
3519
3520 // May require an extra instruction to create the appropriate result of
3521 // the builtin function.
3522 if (FuncInfo.getType() == Builtins::kGetImageDim) {
3523 if (dim == 3) {
3524 // get_image_dim returns an int4 for 3D images.
3525 //
3526
3527 // Implement:
3528 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
3529 Ops.clear();
3530 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
3531 << getSPIRVInt32Constant(0);
3532
3533 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
3534 } else if (dim != components) {
3535 // get_image_dim return an int2 regardless of the arrayedness of the
3536 // image. If the image is arrayed an element must be dropped from the
3537 // query result.
3538 //
3539
3540 // Implement:
3541 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
3542 Ops.clear();
3543 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
3544 << 0 << 1;
3545
3546 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
3547 }
3548 } else if (components > 1) {
3549 // Implement:
3550 // %result = OpCompositeExtract %uint %sizes <component number>
3551 Ops.clear();
3552 Ops << Call->getType() << RID;
3553
3554 uint32_t component = 0;
3555 if (FuncInfo.getType() == Builtins::kGetImageHeight)
3556 component = 1;
3557 else if (FuncInfo.getType() == Builtins::kGetImageDepth)
3558 component = 2;
3559 Ops << component;
3560
3561 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
3562 }
3563 break;
3564 }
3565 default:
3566 llvm_unreachable("Unsupported Image builtin");
3567 }
3568
3569 return RID;
3570}
3571
3572SPIRVID
3573SPIRVProducerPass::GenerateSubgroupInstruction(CallInst *Call,
3574 const FunctionInfo &FuncInfo) {
3575 SPIRVID RID;
3576
3577 // requires SPIRV version 1.3 or greater
3578 if (SpvVersion() != SPIRVVersion::SPIRV_1_3) {
3579 // llvm_unreachable("SubGroups extension requires SPIRV 1.3 or greater");
3580 // TODO(sjw): error out gracefully
3581 }
3582
3583 auto loadBuiltin = [this, Call](spv::BuiltIn spvBI,
3584 spv::Capability spvCap =
3585 spv::CapabilityGroupNonUniform) {
3586 SPIRVOperandVec Ops;
3587 Ops << Call->getType() << this->getSPIRVBuiltin(spvBI, spvCap);
3588
3589 return addSPIRVInst(spv::OpLoad, Ops);
3590 };
3591
3592 spv::Op op = spv::OpNop;
3593 switch (FuncInfo.getType()) {
3594 case Builtins::kGetSubGroupSize:
3595 return loadBuiltin(spv::BuiltInSubgroupSize);
3596 case Builtins::kGetNumSubGroups:
3597 return loadBuiltin(spv::BuiltInNumSubgroups);
3598 case Builtins::kGetSubGroupId:
3599 return loadBuiltin(spv::BuiltInSubgroupId);
3600 case Builtins::kGetSubGroupLocalId:
3601 return loadBuiltin(spv::BuiltInSubgroupLocalInvocationId);
3602
3603 case Builtins::kSubGroupBroadcast:
3604 if (SpvVersion() < SPIRVVersion::SPIRV_1_5 &&
3605 !dyn_cast<ConstantInt>(Call->getOperand(1))) {
3606 llvm_unreachable("sub_group_broadcast requires constant lane Id for "
3607 "SPIRV version < 1.5");
3608 }
3609 addCapability(spv::CapabilityGroupNonUniformBallot);
3610 op = spv::OpGroupNonUniformBroadcast;
3611 break;
3612
3613 case Builtins::kSubGroupAll:
3614 addCapability(spv::CapabilityGroupNonUniformVote);
3615 op = spv::OpGroupNonUniformAll;
3616 break;
3617 case Builtins::kSubGroupAny:
3618 addCapability(spv::CapabilityGroupNonUniformVote);
3619 op = spv::OpGroupNonUniformAny;
3620 break;
3621 case Builtins::kSubGroupReduceAdd:
3622 case Builtins::kSubGroupScanExclusiveAdd:
3623 case Builtins::kSubGroupScanInclusiveAdd: {
3624 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3625 if (FuncInfo.getParameter(0).type_id == Type::IntegerTyID) {
3626 op = spv::OpGroupNonUniformIAdd;
3627 } else {
3628 op = spv::OpGroupNonUniformFAdd;
3629 }
3630 break;
3631 }
3632 case Builtins::kSubGroupReduceMin:
3633 case Builtins::kSubGroupScanExclusiveMin:
3634 case Builtins::kSubGroupScanInclusiveMin: {
3635 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3636 auto &param = FuncInfo.getParameter(0);
3637 if (param.type_id == Type::IntegerTyID) {
3638 op = param.is_signed ? spv::OpGroupNonUniformSMin
3639 : spv::OpGroupNonUniformUMin;
3640 } else {
3641 op = spv::OpGroupNonUniformFMin;
3642 }
3643 break;
3644 }
3645 case Builtins::kSubGroupReduceMax:
3646 case Builtins::kSubGroupScanExclusiveMax:
3647 case Builtins::kSubGroupScanInclusiveMax: {
3648 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3649 auto &param = FuncInfo.getParameter(0);
3650 if (param.type_id == Type::IntegerTyID) {
3651 op = param.is_signed ? spv::OpGroupNonUniformSMax
3652 : spv::OpGroupNonUniformUMax;
3653 } else {
3654 op = spv::OpGroupNonUniformFMax;
3655 }
3656 break;
3657 }
3658
3659 case Builtins::kGetEnqueuedNumSubGroups:
3660 // TODO(sjw): requires CapabilityKernel (incompatible with Shader)
3661 case Builtins::kGetMaxSubGroupSize:
3662 // TODO(sjw): use SpecConstant, capability Kernel (incompatible with Shader)
3663 case Builtins::kSubGroupBarrier:
3664 case Builtins::kSubGroupReserveReadPipe:
3665 case Builtins::kSubGroupReserveWritePipe:
3666 case Builtins::kSubGroupCommitReadPipe:
3667 case Builtins::kSubGroupCommitWritePipe:
3668 case Builtins::kGetKernelSubGroupCountForNdrange:
3669 case Builtins::kGetKernelMaxSubGroupSizeForNdrange:
3670 default:
3671 Call->print(errs());
3672 llvm_unreachable("Unsupported sub_group operation");
3673 break;
3674 }
3675
3676 assert(op != spv::OpNop);
3677
3678 SPIRVOperandVec Operands;
3679
3680 //
3681 // Generate OpGroupNonUniform*
3682 //
3683 // Ops[0] = Result Type ID
3684 // Ops[1] = ScopeSubgroup
3685 // Ops[2] = Value ID
3686 // Ops[3] = Local ID
3687
3688 // The result type.
3689 Operands << Call->getType();
3690
3691 // Subgroup Scope
3692 Operands << getSPIRVInt32Constant(spv::ScopeSubgroup);
3693
3694 switch (FuncInfo.getType()) {
3695 case Builtins::kSubGroupReduceAdd:
3696 case Builtins::kSubGroupReduceMin:
3697 case Builtins::kSubGroupReduceMax:
3698 Operands << spv::GroupOperationReduce;
3699 break;
3700 case Builtins::kSubGroupScanExclusiveAdd:
3701 case Builtins::kSubGroupScanExclusiveMin:
3702 case Builtins::kSubGroupScanExclusiveMax:
3703 Operands << spv::GroupOperationExclusiveScan;
3704 break;
3705 case Builtins::kSubGroupScanInclusiveAdd:
3706 case Builtins::kSubGroupScanInclusiveMin:
3707 case Builtins::kSubGroupScanInclusiveMax:
3708 Operands << spv::GroupOperationInclusiveScan;
3709 break;
3710 default:
3711 break;
3712 }
3713
3714 for (Use &use : Call->arg_operands()) {
3715 Operands << use.get();
3716 }
3717
3718 return addSPIRVInst(op, Operands);
3719}
3720
3721SPIRVID SPIRVProducerPass::GenerateInstructionFromCall(CallInst *Call) {
3722 LLVMContext &Context = module->getContext();
3723
3724 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
3725 auto func_type = func_info.getType();
3726
3727 if (BUILTIN_IN_GROUP(func_type, Clspv)) {
3728 return GenerateClspvInstruction(Call, func_info);
3729 } else if (BUILTIN_IN_GROUP(func_type, Image)) {
3730 return GenerateImageInstruction(Call, func_info);
3731 } else if (BUILTIN_IN_GROUP(func_type, SubgroupsKHR)) {
3732 return GenerateSubgroupInstruction(Call, func_info);
3733 }
3734
3735 SPIRVID RID;
3736
alan-baker5f2e88e2020-12-07 15:24:04 -05003737 switch (Call->getCalledFunction()->getIntrinsicID()) {
3738 case Intrinsic::ctlz: {
3739 // Implement as 31 - FindUMsb. Ignore the second operand of llvm.ctlz.
3740 SPIRVOperandVec Ops;
3741 Ops << Call->getType() << getOpExtInstImportID()
3742 << glsl::ExtInst::ExtInstFindUMsb << Call->getArgOperand(0);
3743 auto find_msb = addSPIRVInst(spv::OpExtInst, Ops);
3744
3745 Constant *thirty_one = ConstantInt::get(
3746 Call->getType(), Call->getType()->getScalarSizeInBits() - 1);
3747 Ops.clear();
3748 Ops << Call->getType() << thirty_one << find_msb;
3749 return addSPIRVInst(spv::OpISub, Ops);
3750 }
3751 case Intrinsic::cttz: {
3752 // Implement as:
3753 // lsb = FindILsb x
3754 // res = lsb == -1 ? width : lsb
3755 //
3756 // Ignore the second operand of llvm.cttz.
3757 SPIRVOperandVec Ops;
3758 Ops << Call->getType() << getOpExtInstImportID()
3759 << glsl::ExtInst::ExtInstFindILsb << Call->getArgOperand(0);
3760 auto find_lsb = addSPIRVInst(spv::OpExtInst, Ops);
3761
3762 auto neg_one = Constant::getAllOnesValue(Call->getType());
3763 auto i1_ty = Call->getType()->getWithNewBitWidth(1);
3764 auto width = ConstantInt::get(Call->getType(),
3765 Call->getType()->getScalarSizeInBits());
3766
3767 Ops.clear();
3768 Ops << i1_ty << find_lsb << neg_one;
3769 auto cmp = addSPIRVInst(spv::OpIEqual, Ops);
3770
3771 Ops.clear();
3772 Ops << Call->getType() << cmp << width << find_lsb;
3773 return addSPIRVInst(spv::OpSelect, Ops);
3774 }
3775
3776 default:
3777 break;
3778 }
3779
SJW806a5d82020-07-15 12:51:38 -05003780 switch (func_type) {
3781 case Builtins::kPopcount: {
3782 //
3783 // Generate OpBitCount
3784 //
3785 // Ops[0] = Result Type ID
3786 // Ops[1] = Base ID
3787 SPIRVOperandVec Ops;
3788 Ops << Call->getType() << Call->getOperand(0);
3789
3790 RID = addSPIRVInst(spv::OpBitCount, Ops);
3791 break;
3792 }
3793 default: {
3794 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3795
alan-baker5f2e88e2020-12-07 15:24:04 -05003796 // Do not replace functions with implementations.
3797 if (EInst && Call->getCalledFunction()->isDeclaration()) {
SJW806a5d82020-07-15 12:51:38 -05003798 SPIRVID ExtInstImportID = getOpExtInstImportID();
3799
3800 //
3801 // Generate OpExtInst.
3802 //
3803
3804 // Ops[0] = Result Type ID
3805 // Ops[1] = Set ID (OpExtInstImport ID)
3806 // Ops[2] = Instruction Number (Literal Number)
3807 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3808 SPIRVOperandVec Ops;
3809
3810 Ops << Call->getType() << ExtInstImportID << EInst;
3811
3812 for (auto &use : Call->arg_operands()) {
3813 Ops << use.get();
3814 }
3815
3816 RID = addSPIRVInst(spv::OpExtInst, Ops);
3817
3818 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3819 if (IndirectExtInst != kGlslExtInstBad) {
SJW806a5d82020-07-15 12:51:38 -05003820 // Generate one more instruction that uses the result of the extended
3821 // instruction. Its result id is one more than the id of the
3822 // extended instruction.
3823 auto generate_extra_inst = [this, &Context, &Call,
3824 &RID](spv::Op opcode, Constant *constant) {
3825 //
3826 // Generate instruction like:
3827 // result = opcode constant <extinst-result>
3828 //
3829 // Ops[0] = Result Type ID
3830 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3831 // Ops[2] = Operand 1 ;; the result of the extended instruction
3832 SPIRVOperandVec Ops;
3833
3834 Type *resultTy = Call->getType();
3835
3836 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
alan-baker931253b2020-08-20 17:15:38 -04003837 constant =
3838 ConstantVector::getSplat(vectorTy->getElementCount(), constant);
SJW806a5d82020-07-15 12:51:38 -05003839 }
3840 Ops << resultTy << constant << RID;
3841
3842 RID = addSPIRVInst(opcode, Ops);
3843 };
3844
SJW806a5d82020-07-15 12:51:38 -05003845 switch (IndirectExtInst) {
SJW806a5d82020-07-15 12:51:38 -05003846 case glsl::ExtInstAcos: // Implementing acospi
3847 case glsl::ExtInstAsin: // Implementing asinpi
3848 case glsl::ExtInstAtan: // Implementing atanpi
3849 case glsl::ExtInstAtan2: // Implementing atan2pi
3850 generate_extra_inst(
3851 spv::OpFMul,
alan-bakercc2bafb2020-11-02 08:30:18 -05003852 ConstantFP::get(Call->getType()->getScalarType(), kOneOverPi));
SJW806a5d82020-07-15 12:51:38 -05003853 break;
3854
3855 default:
3856 assert(false && "internally inconsistent");
3857 }
3858 }
3859 } else {
SJW806a5d82020-07-15 12:51:38 -05003860 // A real function call (not builtin)
3861 // Call instruction is deferred because it needs function's ID.
3862 RID = addSPIRVPlaceholder(Call);
3863 }
3864
3865 break;
3866 }
3867 }
3868
3869 return RID;
3870}
3871
David Neto22f144c2017-06-12 14:26:21 -04003872void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003873 ValueMapType &VMap = getValueMap();
SJW806a5d82020-07-15 12:51:38 -05003874 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04003875
SJW806a5d82020-07-15 12:51:38 -05003876 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04003877
3878 switch (I.getOpcode()) {
3879 default: {
3880 if (Instruction::isCast(I.getOpcode())) {
3881 //
3882 // Generate SPIRV instructions for cast operators.
3883 //
3884
David Netod2de94a2017-08-28 17:27:47 -04003885 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003886 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003887 auto toI8 = Ty == Type::getInt8Ty(Context);
3888 auto fromI32 = OpTy == Type::getInt32Ty(Context);
James Price757dea82021-01-11 13:42:39 -05003889 // Handle zext, sext, uitofp, and sitofp with i1 type specially.
David Neto22f144c2017-06-12 14:26:21 -04003890 if ((I.getOpcode() == Instruction::ZExt ||
3891 I.getOpcode() == Instruction::SExt ||
James Price757dea82021-01-11 13:42:39 -05003892 I.getOpcode() == Instruction::UIToFP ||
3893 I.getOpcode() == Instruction::SIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003894 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003895 //
3896 // Generate OpSelect.
3897 //
3898
3899 // Ops[0] = Result Type ID
3900 // Ops[1] = Condition ID
3901 // Ops[2] = True Constant ID
3902 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003903 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003904
SJW01901d92020-05-21 08:58:31 -05003905 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003906
David Neto22f144c2017-06-12 14:26:21 -04003907 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003908 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003909 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003910 Ops << ConstantInt::getSigned(I.getType(), -1);
James Price757dea82021-01-11 13:42:39 -05003911 } else if (I.getOpcode() == Instruction::UIToFP) {
James Price96bd3d92020-11-23 09:01:57 -05003912 Ops << ConstantFP::get(I.getType(), 1.0);
James Price757dea82021-01-11 13:42:39 -05003913 } else if (I.getOpcode() == Instruction::SIToFP) {
3914 Ops << ConstantFP::get(I.getType(), -1.0);
David Neto22f144c2017-06-12 14:26:21 -04003915 }
David Neto22f144c2017-06-12 14:26:21 -04003916
David Neto22f144c2017-06-12 14:26:21 -04003917 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003918 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003919 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003920 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003921 } else {
James Price96bd3d92020-11-23 09:01:57 -05003922 Ops << ConstantFP::get(I.getType(), 0.0);
David Neto22f144c2017-06-12 14:26:21 -04003923 }
David Neto22f144c2017-06-12 14:26:21 -04003924
SJWf93f5f32020-05-05 07:27:56 -05003925 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003926 } else if (!clspv::Option::Int8Support() &&
3927 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003928 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3929 // 8 bits.
3930 // Before:
3931 // %result = trunc i32 %a to i8
3932 // After
3933 // %result = OpBitwiseAnd %uint %a %uint_255
3934
SJWf93f5f32020-05-05 07:27:56 -05003935 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003936
SJW806a5d82020-07-15 12:51:38 -05003937 Ops << OpTy << I.getOperand(0) << getSPIRVInt32Constant(255);
David Netod2de94a2017-08-28 17:27:47 -04003938
SJWf93f5f32020-05-05 07:27:56 -05003939 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003940 } else {
3941 // Ops[0] = Result Type ID
3942 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003943 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003944
SJW01901d92020-05-21 08:58:31 -05003945 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003946
SJWf93f5f32020-05-05 07:27:56 -05003947 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003948 }
3949 } else if (isa<BinaryOperator>(I)) {
3950 //
3951 // Generate SPIRV instructions for binary operators.
3952 //
3953
3954 // Handle xor with i1 type specially.
3955 if (I.getOpcode() == Instruction::Xor &&
3956 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003957 ((isa<ConstantInt>(I.getOperand(0)) &&
3958 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3959 (isa<ConstantInt>(I.getOperand(1)) &&
3960 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003961 //
3962 // Generate OpLogicalNot.
3963 //
3964 // Ops[0] = Result Type ID
3965 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003966 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003967
SJW01901d92020-05-21 08:58:31 -05003968 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003969
3970 Value *CondV = I.getOperand(0);
3971 if (isa<Constant>(I.getOperand(0))) {
3972 CondV = I.getOperand(1);
3973 }
SJW01901d92020-05-21 08:58:31 -05003974 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003975
SJWf93f5f32020-05-05 07:27:56 -05003976 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003977 } else {
3978 // Ops[0] = Result Type ID
3979 // Ops[1] = Operand 0
3980 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003981 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003982
SJW01901d92020-05-21 08:58:31 -05003983 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003984
SJWf93f5f32020-05-05 07:27:56 -05003985 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003986 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003987 } else if (I.getOpcode() == Instruction::FNeg) {
3988 // The only unary operator.
3989 //
3990 // Ops[0] = Result Type ID
3991 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05003992 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003993
SJW01901d92020-05-21 08:58:31 -05003994 Ops << I.getType() << I.getOperand(0);
3995 RID = addSPIRVInst(spv::OpFNegate, Ops);
Marco Antognini68e5c512020-09-09 16:08:57 +01003996 } else if (I.getOpcode() == Instruction::Unreachable) {
3997 RID = addSPIRVInst(spv::OpUnreachable);
David Neto22f144c2017-06-12 14:26:21 -04003998 } else {
3999 I.print(errs());
4000 llvm_unreachable("Unsupported instruction???");
4001 }
4002 break;
4003 }
4004 case Instruction::GetElementPtr: {
4005 auto &GlobalConstArgSet = getGlobalConstArgSet();
4006
4007 //
4008 // Generate OpAccessChain.
4009 //
4010 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4011
4012 //
4013 // Generate OpAccessChain.
4014 //
4015
4016 // Ops[0] = Result Type ID
4017 // Ops[1] = Base ID
4018 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05004019 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004020
alan-bakerb6b09dc2018-11-08 16:59:28 -05004021 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004022 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4023 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4024 // Use pointer type with private address space for global constant.
4025 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004026 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004027 }
David Neto257c3892018-04-11 13:19:45 -04004028
SJW01901d92020-05-21 08:58:31 -05004029 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04004030
David Neto862b7d82018-06-14 18:48:37 -04004031 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05004032 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004033
David Neto862b7d82018-06-14 18:48:37 -04004034 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004035
4036 //
4037 // Follows below rules for gep.
4038 //
David Neto862b7d82018-06-14 18:48:37 -04004039 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4040 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004041 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4042 // first index.
4043 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4044 // use gep's first index.
4045 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4046 // gep's first index.
4047 //
4048 spv::Op Opcode = spv::OpAccessChain;
4049 unsigned offset = 0;
4050 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004051 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004052 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004053 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004054 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004055 }
David Neto862b7d82018-06-14 18:48:37 -04004056 } else {
David Neto22f144c2017-06-12 14:26:21 -04004057 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004058 }
4059
4060 if (Opcode == spv::OpPtrAccessChain) {
alan-baker7506abb2020-09-10 15:02:55 -04004061 // Shader validation in the SPIR-V spec requires that the base pointer to
4062 // OpPtrAccessChain (in StorageBuffer storage class) be decorated with
4063 // ArrayStride.
alan-baker5b86ed72019-02-15 08:26:50 -05004064 auto address_space = ResultType->getAddressSpace();
4065 setVariablePointersCapabilities(address_space);
4066 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004067 case spv::StorageClassStorageBuffer:
David Neto1a1a0582017-07-07 12:01:44 -04004068 // Save the need to generate an ArrayStride decoration. But defer
4069 // generation until later, so we only make one decoration.
alan-baker7506abb2020-09-10 15:02:55 -04004070 getTypesNeedingArrayStride().insert(GEP->getPointerOperandType());
4071 break;
4072 case spv::StorageClassWorkgroup:
Alan Bakerfcda9482018-10-02 17:09:59 -04004073 break;
4074 default:
alan-baker7506abb2020-09-10 15:02:55 -04004075 llvm_unreachable(
4076 "OpPtrAccessChain is not supported for this storage class");
Alan Bakerfcda9482018-10-02 17:09:59 -04004077 break;
David Neto1a1a0582017-07-07 12:01:44 -04004078 }
David Neto22f144c2017-06-12 14:26:21 -04004079 }
4080
4081 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05004082 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04004083 }
4084
SJWf93f5f32020-05-05 07:27:56 -05004085 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004086 break;
4087 }
4088 case Instruction::ExtractValue: {
4089 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4090 // Ops[0] = Result Type ID
4091 // Ops[1] = Composite ID
4092 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004093 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004094
SJW01901d92020-05-21 08:58:31 -05004095 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004096
SJW01901d92020-05-21 08:58:31 -05004097 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004098
4099 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004100 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004101 }
4102
SJWf93f5f32020-05-05 07:27:56 -05004103 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004104 break;
4105 }
4106 case Instruction::InsertValue: {
4107 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4108 // Ops[0] = Result Type ID
4109 // Ops[1] = Object ID
4110 // Ops[2] = Composite ID
4111 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004112 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004113
SJW01901d92020-05-21 08:58:31 -05004114 Ops << I.getType() << IVI->getInsertedValueOperand()
4115 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004116
4117 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004118 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004119 }
4120
SJWf93f5f32020-05-05 07:27:56 -05004121 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004122 break;
4123 }
4124 case Instruction::Select: {
4125 //
4126 // Generate OpSelect.
4127 //
4128
4129 // Ops[0] = Result Type ID
4130 // Ops[1] = Condition ID
4131 // Ops[2] = True Constant ID
4132 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05004133 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004134
4135 // Find SPIRV instruction for parameter type.
4136 auto Ty = I.getType();
4137 if (Ty->isPointerTy()) {
4138 auto PointeeTy = Ty->getPointerElementType();
4139 if (PointeeTy->isStructTy() &&
4140 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4141 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004142 } else {
4143 // Selecting between pointers requires variable pointers.
4144 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4145 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05004146 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004147 }
David Neto22f144c2017-06-12 14:26:21 -04004148 }
4149 }
4150
SJW01901d92020-05-21 08:58:31 -05004151 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004152
SJWf93f5f32020-05-05 07:27:56 -05004153 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004154 break;
4155 }
4156 case Instruction::ExtractElement: {
4157 // Handle <4 x i8> type manually.
4158 Type *CompositeTy = I.getOperand(0)->getType();
4159 if (is4xi8vec(CompositeTy)) {
4160 //
4161 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4162 // <4 x i8>.
4163 //
4164
4165 //
4166 // Generate OpShiftRightLogical
4167 //
4168 // Ops[0] = Result Type ID
4169 // Ops[1] = Operand 0
4170 // Ops[2] = Operand 1
4171 //
SJWf93f5f32020-05-05 07:27:56 -05004172 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004173
SJW01901d92020-05-21 08:58:31 -05004174 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004175
SJW01901d92020-05-21 08:58:31 -05004176 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004177 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4178 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004179 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4180 Op1ID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004181 } else {
4182 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004183 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004184
SJW806a5d82020-07-15 12:51:38 -05004185 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1)
4186 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004187
SJWf93f5f32020-05-05 07:27:56 -05004188 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004189 }
SJW01901d92020-05-21 08:58:31 -05004190 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04004191
SJW01901d92020-05-21 08:58:31 -05004192 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004193
4194 //
4195 // Generate OpBitwiseAnd
4196 //
4197 // Ops[0] = Result Type ID
4198 // Ops[1] = Operand 0
4199 // Ops[2] = Operand 1
4200 //
4201 Ops.clear();
4202
SJW806a5d82020-07-15 12:51:38 -05004203 Ops << CompositeTy << ShiftID << getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004204
SJWf93f5f32020-05-05 07:27:56 -05004205 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004206 break;
4207 }
4208
4209 // Ops[0] = Result Type ID
4210 // Ops[1] = Composite ID
4211 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004212 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004213
SJW01901d92020-05-21 08:58:31 -05004214 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004215
4216 spv::Op Opcode = spv::OpCompositeExtract;
4217 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05004218 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04004219 } else {
SJW01901d92020-05-21 08:58:31 -05004220 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004221 Opcode = spv::OpVectorExtractDynamic;
4222 }
4223
SJWf93f5f32020-05-05 07:27:56 -05004224 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004225 break;
4226 }
4227 case Instruction::InsertElement: {
4228 // Handle <4 x i8> type manually.
4229 Type *CompositeTy = I.getOperand(0)->getType();
4230 if (is4xi8vec(CompositeTy)) {
SJW806a5d82020-07-15 12:51:38 -05004231 SPIRVID CstFFID = getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004232
SJW01901d92020-05-21 08:58:31 -05004233 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004234 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4235 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004236 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4237 ShiftAmountID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004238 } else {
4239 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004240 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004241
SJW806a5d82020-07-15 12:51:38 -05004242 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2)
4243 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004244
SJWf93f5f32020-05-05 07:27:56 -05004245 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004246 }
4247
4248 //
4249 // Generate mask operations.
4250 //
4251
4252 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004253 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004254
SJW01901d92020-05-21 08:58:31 -05004255 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004256
SJW01901d92020-05-21 08:58:31 -05004257 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004258
4259 // Inverse mask.
4260 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004261 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04004262
SJW01901d92020-05-21 08:58:31 -05004263 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004264
4265 // Apply mask.
4266 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004267 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04004268
SJW01901d92020-05-21 08:58:31 -05004269 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004270
4271 // Create correct value according to index of insertelement.
4272 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004273 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004274
SJW01901d92020-05-21 08:58:31 -05004275 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004276
4277 // Insert value to original value.
4278 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004279 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04004280
SJWf93f5f32020-05-05 07:27:56 -05004281 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004282 break;
4283 }
4284
SJWf93f5f32020-05-05 07:27:56 -05004285 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004286
James Priced26efea2018-06-09 23:28:32 +01004287 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05004288 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004289
4290 spv::Op Opcode = spv::OpCompositeInsert;
4291 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004292 const auto value = CI->getZExtValue();
4293 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004294 // Ops[1] = Object ID
4295 // Ops[2] = Composite ID
4296 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004297 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04004298 } else {
James Priced26efea2018-06-09 23:28:32 +01004299 // Ops[1] = Composite ID
4300 // Ops[2] = Object ID
4301 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004302 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004303 Opcode = spv::OpVectorInsertDynamic;
4304 }
4305
SJWf93f5f32020-05-05 07:27:56 -05004306 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004307 break;
4308 }
4309 case Instruction::ShuffleVector: {
4310 // Ops[0] = Result Type ID
4311 // Ops[1] = Vector 1 ID
4312 // Ops[2] = Vector 2 ID
4313 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004314 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004315
SJW01901d92020-05-21 08:58:31 -05004316 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004317
alan-bakerc9666712020-04-01 16:31:21 -04004318 auto shuffle = cast<ShuffleVectorInst>(&I);
4319 SmallVector<int, 4> mask;
4320 shuffle->getShuffleMask(mask);
4321 for (auto i : mask) {
4322 if (i == UndefMaskElem) {
4323 if (clspv::Option::HackUndef())
4324 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05004325 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04004326 else
4327 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05004328 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04004329 } else {
SJW01901d92020-05-21 08:58:31 -05004330 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04004331 }
4332 }
4333
SJWf93f5f32020-05-05 07:27:56 -05004334 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004335 break;
4336 }
4337 case Instruction::ICmp:
4338 case Instruction::FCmp: {
4339 CmpInst *CmpI = cast<CmpInst>(&I);
4340
David Netod4ca2e62017-07-06 18:47:35 -04004341 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004342 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004343 if (isa<PointerType>(ArgTy)) {
4344 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004345 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004346 errs()
4347 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4348 << "in function " << name << "\n";
4349 llvm_unreachable("Pointer equality check is invalid");
4350 break;
4351 }
4352
SJWf93f5f32020-05-05 07:27:56 -05004353 SPIRVOperandVec Ops;
alan-baker15106572020-11-06 15:08:10 -05004354 if (CmpI->getPredicate() == CmpInst::FCMP_ORD ||
4355 CmpI->getPredicate() == CmpInst::FCMP_UNO) {
4356 // Implement ordered and unordered comparisons are OpIsNan instructions.
4357 // Optimize the constants to simplify the resulting code.
4358 auto lhs = CmpI->getOperand(0);
4359 auto rhs = CmpI->getOperand(1);
4360 auto const_lhs = dyn_cast_or_null<Constant>(lhs);
4361 auto const_rhs = dyn_cast_or_null<Constant>(rhs);
4362 if ((const_lhs && const_lhs->isNaN()) ||
4363 (const_rhs && const_rhs->isNaN())) {
4364 // Result is a constant, false of ordered, true for unordered.
4365 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4366 RID = getSPIRVConstant(ConstantInt::getFalse(CmpI->getType()));
4367 } else {
4368 RID = getSPIRVConstant(ConstantInt::getTrue(CmpI->getType()));
4369 }
4370 break;
4371 }
4372 SPIRVID lhs_id;
4373 SPIRVID rhs_id;
4374 if (!const_lhs) {
4375 // Generate OpIsNan for the lhs.
4376 Ops.clear();
4377 Ops << CmpI->getType() << lhs;
4378 lhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4379 }
4380 if (!const_rhs) {
4381 // Generate OpIsNan for the rhs.
4382 Ops.clear();
4383 Ops << CmpI->getType() << rhs;
4384 rhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4385 }
4386 if (lhs_id.isValid() && rhs_id.isValid()) {
4387 // Or the results for the lhs and rhs.
4388 Ops.clear();
4389 Ops << CmpI->getType() << lhs_id << rhs_id;
4390 RID = addSPIRVInst(spv::OpLogicalOr, Ops);
4391 } else {
4392 RID = lhs_id.isValid() ? lhs_id : rhs_id;
4393 }
4394 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4395 // For ordered comparisons, invert the intermediate result.
4396 Ops.clear();
4397 Ops << CmpI->getType() << RID;
4398 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
4399 }
4400 break;
4401 } else {
4402 // Remaining comparisons map directly to SPIR-V opcodes.
4403 // Ops[0] = Result Type ID
4404 // Ops[1] = Operand 1 ID
4405 // Ops[2] = Operand 2 ID
4406 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004407
alan-baker15106572020-11-06 15:08:10 -05004408 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
4409 RID = addSPIRVInst(Opcode, Ops);
4410 }
David Neto22f144c2017-06-12 14:26:21 -04004411 break;
4412 }
4413 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004414 // Branch instruction is deferred because it needs label's ID.
4415 BasicBlock *BrBB = I.getParent();
4416 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4417 // Placeholder for Merge operation
4418 RID = addSPIRVPlaceholder(&I);
4419 }
4420 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004421 break;
4422 }
4423 case Instruction::Switch: {
4424 I.print(errs());
4425 llvm_unreachable("Unsupported instruction???");
4426 break;
4427 }
4428 case Instruction::IndirectBr: {
4429 I.print(errs());
4430 llvm_unreachable("Unsupported instruction???");
4431 break;
4432 }
4433 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004434 // PHI instruction is deferred because it needs label's ID.
4435 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004436 break;
4437 }
4438 case Instruction::Alloca: {
4439 //
4440 // Generate OpVariable.
4441 //
4442 // Ops[0] : Result Type ID
4443 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004444 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004445
SJW01901d92020-05-21 08:58:31 -05004446 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04004447
SJWf93f5f32020-05-05 07:27:56 -05004448 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004449 break;
4450 }
4451 case Instruction::Load: {
4452 LoadInst *LD = cast<LoadInst>(&I);
4453 //
4454 // Generate OpLoad.
4455 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004456
alan-baker5b86ed72019-02-15 08:26:50 -05004457 if (LD->getType()->isPointerTy()) {
4458 // Loading a pointer requires variable pointers.
4459 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4460 }
David Neto22f144c2017-06-12 14:26:21 -04004461
SJW01901d92020-05-21 08:58:31 -05004462 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004463 // This is a hack to work around what looks like a driver bug.
4464 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004465 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4466 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004467 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004468 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004469 // Generate a bitwise-and of the original value with itself.
4470 // We should have been able to get away with just an OpCopyObject,
4471 // but we need something more complex to get past certain driver bugs.
4472 // This is ridiculous, but necessary.
4473 // TODO(dneto): Revisit this once drivers fix their bugs.
4474
SJWf93f5f32020-05-05 07:27:56 -05004475 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004476 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04004477
SJWf93f5f32020-05-05 07:27:56 -05004478 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004479 break;
4480 }
4481
4482 // This is the normal path. Generate a load.
4483
David Neto22f144c2017-06-12 14:26:21 -04004484 // Ops[0] = Result Type ID
4485 // Ops[1] = Pointer ID
4486 // Ops[2] ... Ops[n] = Optional Memory Access
4487 //
4488 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004489
SJWf93f5f32020-05-05 07:27:56 -05004490 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004491 Ops << LD->getType() << LD->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004492
SJWf93f5f32020-05-05 07:27:56 -05004493 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004494 break;
4495 }
4496 case Instruction::Store: {
4497 StoreInst *ST = cast<StoreInst>(&I);
4498 //
4499 // Generate OpStore.
4500 //
4501
alan-baker5b86ed72019-02-15 08:26:50 -05004502 if (ST->getValueOperand()->getType()->isPointerTy()) {
4503 // Storing a pointer requires variable pointers.
4504 setVariablePointersCapabilities(
4505 ST->getValueOperand()->getType()->getPointerAddressSpace());
4506 }
4507
David Neto22f144c2017-06-12 14:26:21 -04004508 // Ops[0] = Pointer ID
4509 // Ops[1] = Object ID
4510 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4511 //
4512 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05004513 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004514 Ops << ST->getPointerOperand() << ST->getValueOperand();
David Neto22f144c2017-06-12 14:26:21 -04004515
SJWf93f5f32020-05-05 07:27:56 -05004516 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004517 break;
4518 }
4519 case Instruction::AtomicCmpXchg: {
4520 I.print(errs());
4521 llvm_unreachable("Unsupported instruction???");
4522 break;
4523 }
4524 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004525 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4526
4527 spv::Op opcode;
4528
4529 switch (AtomicRMW->getOperation()) {
4530 default:
4531 I.print(errs());
4532 llvm_unreachable("Unsupported instruction???");
4533 case llvm::AtomicRMWInst::Add:
4534 opcode = spv::OpAtomicIAdd;
4535 break;
4536 case llvm::AtomicRMWInst::Sub:
4537 opcode = spv::OpAtomicISub;
4538 break;
4539 case llvm::AtomicRMWInst::Xchg:
4540 opcode = spv::OpAtomicExchange;
4541 break;
4542 case llvm::AtomicRMWInst::Min:
4543 opcode = spv::OpAtomicSMin;
4544 break;
4545 case llvm::AtomicRMWInst::Max:
4546 opcode = spv::OpAtomicSMax;
4547 break;
4548 case llvm::AtomicRMWInst::UMin:
4549 opcode = spv::OpAtomicUMin;
4550 break;
4551 case llvm::AtomicRMWInst::UMax:
4552 opcode = spv::OpAtomicUMax;
4553 break;
4554 case llvm::AtomicRMWInst::And:
4555 opcode = spv::OpAtomicAnd;
4556 break;
4557 case llvm::AtomicRMWInst::Or:
4558 opcode = spv::OpAtomicOr;
4559 break;
4560 case llvm::AtomicRMWInst::Xor:
4561 opcode = spv::OpAtomicXor;
4562 break;
4563 }
4564
4565 //
4566 // Generate OpAtomic*.
4567 //
SJWf93f5f32020-05-05 07:27:56 -05004568 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004569
SJW01901d92020-05-21 08:58:31 -05004570 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01004571
SJW806a5d82020-07-15 12:51:38 -05004572 const auto ConstantScopeDevice = getSPIRVInt32Constant(spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05004573 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01004574
SJW806a5d82020-07-15 12:51:38 -05004575 const auto ConstantMemorySemantics =
4576 getSPIRVInt32Constant(spv::MemorySemanticsUniformMemoryMask |
4577 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05004578 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01004579
SJWf93f5f32020-05-05 07:27:56 -05004580 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004581 break;
4582 }
4583 case Instruction::Fence: {
4584 I.print(errs());
4585 llvm_unreachable("Unsupported instruction???");
4586 break;
4587 }
4588 case Instruction::Call: {
4589 CallInst *Call = dyn_cast<CallInst>(&I);
SJW806a5d82020-07-15 12:51:38 -05004590 RID = GenerateInstructionFromCall(Call);
David Neto22f144c2017-06-12 14:26:21 -04004591 break;
4592 }
4593 case Instruction::Ret: {
4594 unsigned NumOps = I.getNumOperands();
4595 if (NumOps == 0) {
4596 //
4597 // Generate OpReturn.
4598 //
SJWf93f5f32020-05-05 07:27:56 -05004599 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004600 } else {
4601 //
4602 // Generate OpReturnValue.
4603 //
4604
4605 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004606 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004607
SJW01901d92020-05-21 08:58:31 -05004608 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004609
SJWf93f5f32020-05-05 07:27:56 -05004610 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004611 break;
4612 }
4613 break;
4614 }
4615 }
SJWf93f5f32020-05-05 07:27:56 -05004616
4617 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004618 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004619 VMap[&I] = RID;
4620 }
David Neto22f144c2017-06-12 14:26:21 -04004621}
4622
4623void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004624 //
4625 // Generate OpFunctionEnd
4626 //
SJWf93f5f32020-05-05 07:27:56 -05004627 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004628}
4629
4630bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004631 // Don't specialize <4 x i8> if i8 is generally supported.
4632 if (clspv::Option::Int8Support())
4633 return false;
4634
David Neto22f144c2017-06-12 14:26:21 -04004635 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004636 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4637 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
alan-baker5a8c3be2020-09-09 13:44:26 -04004638 VecTy->getElementCount().getKnownMinValue() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004639 return true;
4640 }
4641 }
4642
4643 return false;
4644}
4645
4646void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004647 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4648
SJW88ed5fe2020-05-11 12:40:57 -05004649 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4650 Value *Inst = DeferredInsts[i].first;
4651 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4652 SPIRVOperandVec Operands;
4653
4654 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4655 ++i;
4656 assert(DeferredInsts.size() > i);
4657 assert(Inst == DeferredInsts[i].first);
4658 Placeholder = DeferredInsts[i].second;
4659 };
David Neto22f144c2017-06-12 14:26:21 -04004660
4661 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004662 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004663 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004664 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004665 //
4666 // Generate OpLoopMerge.
4667 //
4668 // Ops[0] = Merge Block ID
4669 // Ops[1] = Continue Target ID
4670 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004671 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004672
SJW01901d92020-05-21 08:58:31 -05004673 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4674 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004675
SJW88ed5fe2020-05-11 12:40:57 -05004676 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4677
4678 nextDeferred();
4679
alan-baker06cad652019-12-03 17:56:47 -05004680 } else if (MergeBlocks.count(BrBB)) {
4681 //
4682 // Generate OpSelectionMerge.
4683 //
4684 // Ops[0] = Merge Block ID
4685 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004686 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004687
alan-baker06cad652019-12-03 17:56:47 -05004688 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004689 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004690
SJW88ed5fe2020-05-11 12:40:57 -05004691 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4692
4693 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004694 }
4695
4696 if (Br->isConditional()) {
4697 //
4698 // Generate OpBranchConditional.
4699 //
4700 // Ops[0] = Condition ID
4701 // Ops[1] = True Label ID
4702 // Ops[2] = False Label ID
4703 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004704 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004705
SJW01901d92020-05-21 08:58:31 -05004706 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004707
SJW88ed5fe2020-05-11 12:40:57 -05004708 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4709
David Neto22f144c2017-06-12 14:26:21 -04004710 } else {
4711 //
4712 // Generate OpBranch.
4713 //
4714 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004715 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004716
SJW01901d92020-05-21 08:58:31 -05004717 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004718
SJW88ed5fe2020-05-11 12:40:57 -05004719 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004720 }
4721 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004722 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4723 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004724 // OpPhi on pointers requires variable pointers.
4725 setVariablePointersCapabilities(
4726 PHI->getType()->getPointerAddressSpace());
4727 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004728 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004729 }
4730 }
4731
David Neto22f144c2017-06-12 14:26:21 -04004732 //
4733 // Generate OpPhi.
4734 //
4735 // Ops[0] = Result Type ID
4736 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004737 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004738
SJW01901d92020-05-21 08:58:31 -05004739 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004740
SJW88ed5fe2020-05-11 12:40:57 -05004741 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004742 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004743 }
4744
SJW88ed5fe2020-05-11 12:40:57 -05004745 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4746
David Neto22f144c2017-06-12 14:26:21 -04004747 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4748 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004749 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004750
SJW61531372020-06-09 07:31:08 -05004751 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004752 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004753 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004754
4755 // The result type.
SJW01901d92020-05-21 08:58:31 -05004756 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004757
4758 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004759 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004760 }
4761
SJW88ed5fe2020-05-11 12:40:57 -05004762 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004763
David Neto22f144c2017-06-12 14:26:21 -04004764 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004765 if (Call->getType()->isPointerTy()) {
4766 // Functions returning pointers require variable pointers.
4767 setVariablePointersCapabilities(
4768 Call->getType()->getPointerAddressSpace());
4769 }
4770
David Neto22f144c2017-06-12 14:26:21 -04004771 //
4772 // Generate OpFunctionCall.
4773 //
4774
4775 // Ops[0] = Result Type ID
4776 // Ops[1] = Callee Function ID
4777 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004778 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004779
SJW01901d92020-05-21 08:58:31 -05004780 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004781
SJW01901d92020-05-21 08:58:31 -05004782 SPIRVID CalleeID = getSPIRVValue(Callee);
SJW806a5d82020-07-15 12:51:38 -05004783 if (!CalleeID.isValid()) {
David Neto43568eb2017-10-13 18:25:25 -04004784 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004785 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004786 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4787 // causes an infinite loop. Instead, go ahead and generate
4788 // the bad function call. A validator will catch the 0-Id.
4789 // llvm_unreachable("Can't translate function call");
4790 }
David Neto22f144c2017-06-12 14:26:21 -04004791
SJW01901d92020-05-21 08:58:31 -05004792 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004793
David Neto22f144c2017-06-12 14:26:21 -04004794 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004795 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4796 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004797 auto *operand_type = operand->getType();
4798 // Images and samplers can be passed as function parameters without
4799 // variable pointers.
4800 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4801 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004802 auto sc =
4803 GetStorageClass(operand->getType()->getPointerAddressSpace());
4804 if (sc == spv::StorageClassStorageBuffer) {
4805 // Passing SSBO by reference requires variable pointers storage
4806 // buffer.
SJW01901d92020-05-21 08:58:31 -05004807 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004808 } else if (sc == spv::StorageClassWorkgroup) {
4809 // Workgroup references require variable pointers if they are not
4810 // memory object declarations.
4811 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4812 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004813 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4814 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004815 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004816 } else {
4817 // Arguments are function parameters.
4818 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004819 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004820 }
4821 }
4822 }
SJW01901d92020-05-21 08:58:31 -05004823 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004824 }
4825
SJW88ed5fe2020-05-11 12:40:57 -05004826 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004827 }
4828 }
4829 }
4830}
4831
SJW77b87ad2020-04-21 14:37:52 -05004832void SPIRVProducerPass::HandleDeferredDecorations() {
4833 const auto &DL = module->getDataLayout();
alan-baker5a8c3be2020-09-09 13:44:26 -04004834 if (getTypesNeedingArrayStride().empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004835 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004836 }
David Neto1a1a0582017-07-07 12:01:44 -04004837
David Netoc6f3ab22018-04-06 18:02:31 -04004838 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4839 // instructions we generated earlier.
alan-bakerc3fd07f2020-10-22 09:48:49 -04004840 DenseSet<uint32_t> seen;
David Neto85082642018-03-24 06:55:20 -07004841 for (auto *type : getTypesNeedingArrayStride()) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04004842 auto id = getSPIRVType(type);
4843 if (!seen.insert(id.get()).second)
4844 continue;
4845
David Neto85082642018-03-24 06:55:20 -07004846 Type *elemTy = nullptr;
4847 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4848 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004849 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004850 elemTy = arrayTy->getElementType();
4851 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4852 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004853 } else {
4854 errs() << "Unhandled strided type " << *type << "\n";
4855 llvm_unreachable("Unhandled strided type");
4856 }
David Neto1a1a0582017-07-07 12:01:44 -04004857
4858 // Ops[0] = Target ID
4859 // Ops[1] = Decoration (ArrayStride)
4860 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004861 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004862
David Neto85082642018-03-24 06:55:20 -07004863 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004864 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004865
alan-bakerc3fd07f2020-10-22 09:48:49 -04004866 Ops << id << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004867
SJWf93f5f32020-05-05 07:27:56 -05004868 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004869 }
David Neto1a1a0582017-07-07 12:01:44 -04004870}
4871
SJW61531372020-06-09 07:31:08 -05004872glsl::ExtInst
4873SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW61531372020-06-09 07:31:08 -05004874 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004875 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05004876 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004877 if (param_type.type_id == Type::FloatTyID) {
alan-bakerecc9c942020-12-07 13:13:32 -05004878 return glsl::ExtInst::ExtInstNClamp;
SJW2c317da2020-03-23 07:39:13 -05004879 }
4880 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4881 : glsl::ExtInst::ExtInstUClamp;
4882 }
4883 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05004884 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004885 if (param_type.type_id == Type::FloatTyID) {
4886 return glsl::ExtInst::ExtInstFMax;
4887 }
4888 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4889 : glsl::ExtInst::ExtInstUMax;
4890 }
4891 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05004892 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004893 if (param_type.type_id == Type::FloatTyID) {
4894 return glsl::ExtInst::ExtInstFMin;
4895 }
4896 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4897 : glsl::ExtInst::ExtInstUMin;
4898 }
4899 case Builtins::kAbs:
4900 return glsl::ExtInst::ExtInstSAbs;
4901 case Builtins::kFmax:
Marco Antognini55d51862020-07-21 17:50:07 +01004902 return glsl::ExtInst::ExtInstNMax;
SJW2c317da2020-03-23 07:39:13 -05004903 case Builtins::kFmin:
Marco Antognini55d51862020-07-21 17:50:07 +01004904 return glsl::ExtInst::ExtInstNMin;
SJW2c317da2020-03-23 07:39:13 -05004905 case Builtins::kDegrees:
4906 return glsl::ExtInst::ExtInstDegrees;
4907 case Builtins::kRadians:
4908 return glsl::ExtInst::ExtInstRadians;
4909 case Builtins::kMix:
4910 return glsl::ExtInst::ExtInstFMix;
4911 case Builtins::kAcos:
4912 case Builtins::kAcospi:
4913 return glsl::ExtInst::ExtInstAcos;
4914 case Builtins::kAcosh:
4915 return glsl::ExtInst::ExtInstAcosh;
4916 case Builtins::kAsin:
4917 case Builtins::kAsinpi:
4918 return glsl::ExtInst::ExtInstAsin;
4919 case Builtins::kAsinh:
4920 return glsl::ExtInst::ExtInstAsinh;
4921 case Builtins::kAtan:
4922 case Builtins::kAtanpi:
4923 return glsl::ExtInst::ExtInstAtan;
4924 case Builtins::kAtanh:
4925 return glsl::ExtInst::ExtInstAtanh;
4926 case Builtins::kAtan2:
4927 case Builtins::kAtan2pi:
4928 return glsl::ExtInst::ExtInstAtan2;
4929 case Builtins::kCeil:
4930 return glsl::ExtInst::ExtInstCeil;
4931 case Builtins::kSin:
4932 case Builtins::kHalfSin:
4933 case Builtins::kNativeSin:
4934 return glsl::ExtInst::ExtInstSin;
4935 case Builtins::kSinh:
4936 return glsl::ExtInst::ExtInstSinh;
4937 case Builtins::kCos:
4938 case Builtins::kHalfCos:
4939 case Builtins::kNativeCos:
4940 return glsl::ExtInst::ExtInstCos;
4941 case Builtins::kCosh:
4942 return glsl::ExtInst::ExtInstCosh;
4943 case Builtins::kTan:
4944 case Builtins::kHalfTan:
4945 case Builtins::kNativeTan:
4946 return glsl::ExtInst::ExtInstTan;
4947 case Builtins::kTanh:
4948 return glsl::ExtInst::ExtInstTanh;
4949 case Builtins::kExp:
4950 case Builtins::kHalfExp:
4951 case Builtins::kNativeExp:
4952 return glsl::ExtInst::ExtInstExp;
4953 case Builtins::kExp2:
4954 case Builtins::kHalfExp2:
4955 case Builtins::kNativeExp2:
4956 return glsl::ExtInst::ExtInstExp2;
4957 case Builtins::kLog:
4958 case Builtins::kHalfLog:
4959 case Builtins::kNativeLog:
4960 return glsl::ExtInst::ExtInstLog;
4961 case Builtins::kLog2:
4962 case Builtins::kHalfLog2:
4963 case Builtins::kNativeLog2:
4964 return glsl::ExtInst::ExtInstLog2;
4965 case Builtins::kFabs:
4966 return glsl::ExtInst::ExtInstFAbs;
4967 case Builtins::kFma:
4968 return glsl::ExtInst::ExtInstFma;
4969 case Builtins::kFloor:
4970 return glsl::ExtInst::ExtInstFloor;
4971 case Builtins::kLdexp:
4972 return glsl::ExtInst::ExtInstLdexp;
4973 case Builtins::kPow:
4974 case Builtins::kPowr:
4975 case Builtins::kHalfPowr:
4976 case Builtins::kNativePowr:
4977 return glsl::ExtInst::ExtInstPow;
James Price38553362020-09-03 18:30:40 -04004978 case Builtins::kRint:
4979 return glsl::ExtInst::ExtInstRoundEven;
SJW2c317da2020-03-23 07:39:13 -05004980 case Builtins::kRound:
4981 return glsl::ExtInst::ExtInstRound;
4982 case Builtins::kSqrt:
4983 case Builtins::kHalfSqrt:
4984 case Builtins::kNativeSqrt:
4985 return glsl::ExtInst::ExtInstSqrt;
4986 case Builtins::kRsqrt:
4987 case Builtins::kHalfRsqrt:
4988 case Builtins::kNativeRsqrt:
4989 return glsl::ExtInst::ExtInstInverseSqrt;
4990 case Builtins::kTrunc:
4991 return glsl::ExtInst::ExtInstTrunc;
4992 case Builtins::kFrexp:
4993 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05004994 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05004995 case Builtins::kFract:
4996 return glsl::ExtInst::ExtInstFract;
4997 case Builtins::kSign:
4998 return glsl::ExtInst::ExtInstFSign;
4999 case Builtins::kLength:
5000 case Builtins::kFastLength:
5001 return glsl::ExtInst::ExtInstLength;
5002 case Builtins::kDistance:
5003 case Builtins::kFastDistance:
5004 return glsl::ExtInst::ExtInstDistance;
5005 case Builtins::kStep:
5006 return glsl::ExtInst::ExtInstStep;
5007 case Builtins::kSmoothstep:
5008 return glsl::ExtInst::ExtInstSmoothStep;
5009 case Builtins::kCross:
5010 return glsl::ExtInst::ExtInstCross;
5011 case Builtins::kNormalize:
5012 case Builtins::kFastNormalize:
5013 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05005014 case Builtins::kSpirvPack:
5015 return glsl::ExtInst::ExtInstPackHalf2x16;
5016 case Builtins::kSpirvUnpack:
5017 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05005018 default:
5019 break;
5020 }
5021
alan-baker5f2e88e2020-12-07 15:24:04 -05005022 // TODO: improve this by checking the intrinsic id.
SJW61531372020-06-09 07:31:08 -05005023 if (func_info.getName().find("llvm.fmuladd.") == 0) {
5024 return glsl::ExtInst::ExtInstFma;
5025 }
alan-baker5f2e88e2020-12-07 15:24:04 -05005026 if (func_info.getName().find("llvm.sqrt.") == 0) {
5027 return glsl::ExtInst::ExtInstSqrt;
5028 }
5029 if (func_info.getName().find("llvm.trunc.") == 0) {
5030 return glsl::ExtInst::ExtInstTrunc;
5031 }
5032 if (func_info.getName().find("llvm.ctlz.") == 0) {
5033 return glsl::ExtInst::ExtInstFindUMsb;
5034 }
5035 if (func_info.getName().find("llvm.cttz.") == 0) {
5036 return glsl::ExtInst::ExtInstFindILsb;
5037 }
alan-baker3e0de472020-12-08 15:57:17 -05005038 if (func_info.getName().find("llvm.ceil.") == 0) {
5039 return glsl::ExtInst::ExtInstCeil;
5040 }
5041 if (func_info.getName().find("llvm.rint.") == 0) {
5042 return glsl::ExtInst::ExtInstRoundEven;
5043 }
5044 if (func_info.getName().find("llvm.fabs.") == 0) {
5045 return glsl::ExtInst::ExtInstFAbs;
5046 }
5047 if (func_info.getName().find("llvm.floor.") == 0) {
5048 return glsl::ExtInst::ExtInstFloor;
5049 }
5050 if (func_info.getName().find("llvm.sin.") == 0) {
5051 return glsl::ExtInst::ExtInstSin;
5052 }
5053 if (func_info.getName().find("llvm.cos.") == 0) {
5054 return glsl::ExtInst::ExtInstCos;
5055 }
alan-baker8b968112020-12-15 15:53:29 -05005056 if (func_info.getName().find("llvm.exp.") == 0) {
5057 return glsl::ExtInst::ExtInstExp;
5058 }
5059 if (func_info.getName().find("llvm.log.") == 0) {
5060 return glsl::ExtInst::ExtInstLog;
5061 }
5062 if (func_info.getName().find("llvm.pow.") == 0) {
5063 return glsl::ExtInst::ExtInstPow;
5064 }
SJW61531372020-06-09 07:31:08 -05005065 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005066}
5067
SJW61531372020-06-09 07:31:08 -05005068glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
5069 const Builtins::FunctionInfo &func_info) {
5070 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05005071 case Builtins::kAcospi:
5072 return glsl::ExtInst::ExtInstAcos;
5073 case Builtins::kAsinpi:
5074 return glsl::ExtInst::ExtInstAsin;
5075 case Builtins::kAtanpi:
5076 return glsl::ExtInst::ExtInstAtan;
5077 case Builtins::kAtan2pi:
5078 return glsl::ExtInst::ExtInstAtan2;
5079 default:
5080 break;
5081 }
5082 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005083}
5084
SJW61531372020-06-09 07:31:08 -05005085glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
5086 const Builtins::FunctionInfo &func_info) {
5087 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04005088 if (direct != kGlslExtInstBad)
5089 return direct;
SJW61531372020-06-09 07:31:08 -05005090 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04005091}
5092
David Neto22f144c2017-06-12 14:26:21 -04005093void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005094 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005095}
5096
SJW88ed5fe2020-05-11 12:40:57 -05005097void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05005098 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04005099}
5100
SJW88ed5fe2020-05-11 12:40:57 -05005101void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04005102 // High 16 bit : Word Count
5103 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05005104 uint32_t Word = Inst.getOpcode();
5105 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04005106 if (count > 65535) {
5107 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5108 llvm_unreachable("Word count too high");
5109 }
SJW88ed5fe2020-05-11 12:40:57 -05005110 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04005111 WriteOneWord(Word);
5112}
5113
SJW88ed5fe2020-05-11 12:40:57 -05005114void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
5115 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04005116 switch (OpTy) {
5117 default: {
5118 llvm_unreachable("Unsupported SPIRV Operand Type???");
5119 break;
5120 }
5121 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05005122 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04005123 break;
5124 }
5125 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05005126 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04005127 const char *Data = Str.c_str();
5128 size_t WordSize = Str.size() / 4;
5129 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5130 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5131 }
5132
5133 uint32_t Remainder = Str.size() % 4;
5134 uint32_t LastWord = 0;
5135 if (Remainder) {
5136 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5137 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5138 }
5139 }
5140
5141 WriteOneWord(LastWord);
5142 break;
5143 }
SJW88ed5fe2020-05-11 12:40:57 -05005144 case SPIRVOperandType::LITERAL_WORD: {
5145 WriteOneWord(Op.getLiteralNum()[0]);
5146 break;
5147 }
5148 case SPIRVOperandType::LITERAL_DWORD: {
5149 WriteOneWord(Op.getLiteralNum()[0]);
5150 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04005151 break;
5152 }
5153 }
5154}
5155
5156void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005157 for (int i = 0; i < kSectionCount; ++i) {
5158 WriteSPIRVBinary(SPIRVSections[i]);
5159 }
5160}
5161
5162void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
SJW88ed5fe2020-05-11 12:40:57 -05005163 for (const auto &Inst : SPIRVInstList) {
5164 const auto &Ops = Inst.getOperands();
5165 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04005166
5167 switch (Opcode) {
5168 default: {
David Neto5c22a252018-03-15 16:07:41 -04005169 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005170 llvm_unreachable("Unsupported SPIRV instruction");
5171 break;
5172 }
Marco Antognini68e5c512020-09-09 16:08:57 +01005173 case spv::OpUnreachable:
David Neto22f144c2017-06-12 14:26:21 -04005174 case spv::OpCapability:
5175 case spv::OpExtension:
5176 case spv::OpMemoryModel:
5177 case spv::OpEntryPoint:
5178 case spv::OpExecutionMode:
5179 case spv::OpSource:
5180 case spv::OpDecorate:
5181 case spv::OpMemberDecorate:
5182 case spv::OpBranch:
5183 case spv::OpBranchConditional:
5184 case spv::OpSelectionMerge:
5185 case spv::OpLoopMerge:
5186 case spv::OpStore:
5187 case spv::OpImageWrite:
5188 case spv::OpReturnValue:
5189 case spv::OpControlBarrier:
5190 case spv::OpMemoryBarrier:
5191 case spv::OpReturn:
5192 case spv::OpFunctionEnd:
alan-baker4986eff2020-10-29 13:38:00 -04005193 case spv::OpCopyMemory:
5194 case spv::OpAtomicStore: {
David Neto22f144c2017-06-12 14:26:21 -04005195 WriteWordCountAndOpcode(Inst);
5196 for (uint32_t i = 0; i < Ops.size(); i++) {
5197 WriteOperand(Ops[i]);
5198 }
5199 break;
5200 }
5201 case spv::OpTypeBool:
5202 case spv::OpTypeVoid:
5203 case spv::OpTypeSampler:
5204 case spv::OpLabel:
5205 case spv::OpExtInstImport:
5206 case spv::OpTypePointer:
5207 case spv::OpTypeRuntimeArray:
5208 case spv::OpTypeStruct:
5209 case spv::OpTypeImage:
5210 case spv::OpTypeSampledImage:
5211 case spv::OpTypeInt:
5212 case spv::OpTypeFloat:
5213 case spv::OpTypeArray:
5214 case spv::OpTypeVector:
alan-baker86ce19c2020-08-05 13:09:19 -04005215 case spv::OpTypeFunction:
5216 case spv::OpString: {
David Neto22f144c2017-06-12 14:26:21 -04005217 WriteWordCountAndOpcode(Inst);
5218 WriteResultID(Inst);
5219 for (uint32_t i = 0; i < Ops.size(); i++) {
5220 WriteOperand(Ops[i]);
5221 }
5222 break;
5223 }
5224 case spv::OpFunction:
5225 case spv::OpFunctionParameter:
5226 case spv::OpAccessChain:
5227 case spv::OpPtrAccessChain:
5228 case spv::OpInBoundsAccessChain:
5229 case spv::OpUConvert:
5230 case spv::OpSConvert:
5231 case spv::OpConvertFToU:
5232 case spv::OpConvertFToS:
5233 case spv::OpConvertUToF:
5234 case spv::OpConvertSToF:
5235 case spv::OpFConvert:
5236 case spv::OpConvertPtrToU:
5237 case spv::OpConvertUToPtr:
5238 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005239 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005240 case spv::OpIAdd:
alan-bakera52b7312020-10-26 08:58:51 -04005241 case spv::OpIAddCarry:
David Neto22f144c2017-06-12 14:26:21 -04005242 case spv::OpFAdd:
5243 case spv::OpISub:
alan-baker3f1bf492020-11-05 09:07:36 -05005244 case spv::OpISubBorrow:
David Neto22f144c2017-06-12 14:26:21 -04005245 case spv::OpFSub:
5246 case spv::OpIMul:
5247 case spv::OpFMul:
5248 case spv::OpUDiv:
5249 case spv::OpSDiv:
5250 case spv::OpFDiv:
5251 case spv::OpUMod:
5252 case spv::OpSRem:
5253 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005254 case spv::OpUMulExtended:
5255 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005256 case spv::OpBitwiseOr:
5257 case spv::OpBitwiseXor:
5258 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005259 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005260 case spv::OpShiftLeftLogical:
5261 case spv::OpShiftRightLogical:
5262 case spv::OpShiftRightArithmetic:
5263 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005264 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005265 case spv::OpCompositeExtract:
5266 case spv::OpVectorExtractDynamic:
5267 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005268 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005269 case spv::OpVectorInsertDynamic:
5270 case spv::OpVectorShuffle:
5271 case spv::OpIEqual:
5272 case spv::OpINotEqual:
5273 case spv::OpUGreaterThan:
5274 case spv::OpUGreaterThanEqual:
5275 case spv::OpULessThan:
5276 case spv::OpULessThanEqual:
5277 case spv::OpSGreaterThan:
5278 case spv::OpSGreaterThanEqual:
5279 case spv::OpSLessThan:
5280 case spv::OpSLessThanEqual:
5281 case spv::OpFOrdEqual:
5282 case spv::OpFOrdGreaterThan:
5283 case spv::OpFOrdGreaterThanEqual:
5284 case spv::OpFOrdLessThan:
5285 case spv::OpFOrdLessThanEqual:
5286 case spv::OpFOrdNotEqual:
5287 case spv::OpFUnordEqual:
5288 case spv::OpFUnordGreaterThan:
5289 case spv::OpFUnordGreaterThanEqual:
5290 case spv::OpFUnordLessThan:
5291 case spv::OpFUnordLessThanEqual:
5292 case spv::OpFUnordNotEqual:
5293 case spv::OpExtInst:
5294 case spv::OpIsInf:
5295 case spv::OpIsNan:
5296 case spv::OpAny:
5297 case spv::OpAll:
5298 case spv::OpUndef:
5299 case spv::OpConstantNull:
5300 case spv::OpLogicalOr:
5301 case spv::OpLogicalAnd:
5302 case spv::OpLogicalNot:
5303 case spv::OpLogicalNotEqual:
5304 case spv::OpConstantComposite:
5305 case spv::OpSpecConstantComposite:
5306 case spv::OpConstantTrue:
5307 case spv::OpConstantFalse:
5308 case spv::OpConstant:
5309 case spv::OpSpecConstant:
5310 case spv::OpVariable:
5311 case spv::OpFunctionCall:
5312 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005313 case spv::OpImageFetch:
alan-bakerf6bc8252020-09-23 14:58:55 -04005314 case spv::OpImageRead:
David Neto22f144c2017-06-12 14:26:21 -04005315 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005316 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005317 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005318 case spv::OpSelect:
5319 case spv::OpPhi:
5320 case spv::OpLoad:
alan-baker4986eff2020-10-29 13:38:00 -04005321 case spv::OpAtomicLoad:
David Neto22f144c2017-06-12 14:26:21 -04005322 case spv::OpAtomicIAdd:
5323 case spv::OpAtomicISub:
5324 case spv::OpAtomicExchange:
5325 case spv::OpAtomicIIncrement:
5326 case spv::OpAtomicIDecrement:
5327 case spv::OpAtomicCompareExchange:
5328 case spv::OpAtomicUMin:
5329 case spv::OpAtomicSMin:
5330 case spv::OpAtomicUMax:
5331 case spv::OpAtomicSMax:
5332 case spv::OpAtomicAnd:
5333 case spv::OpAtomicOr:
5334 case spv::OpAtomicXor:
SJW806a5d82020-07-15 12:51:38 -05005335 case spv::OpDot:
5336 case spv::OpGroupNonUniformAll:
5337 case spv::OpGroupNonUniformAny:
5338 case spv::OpGroupNonUniformBroadcast:
5339 case spv::OpGroupNonUniformIAdd:
5340 case spv::OpGroupNonUniformFAdd:
5341 case spv::OpGroupNonUniformSMin:
5342 case spv::OpGroupNonUniformUMin:
5343 case spv::OpGroupNonUniformFMin:
5344 case spv::OpGroupNonUniformSMax:
5345 case spv::OpGroupNonUniformUMax:
5346 case spv::OpGroupNonUniformFMax: {
David Neto22f144c2017-06-12 14:26:21 -04005347 WriteWordCountAndOpcode(Inst);
5348 WriteOperand(Ops[0]);
5349 WriteResultID(Inst);
5350 for (uint32_t i = 1; i < Ops.size(); i++) {
5351 WriteOperand(Ops[i]);
5352 }
5353 break;
5354 }
5355 }
5356 }
5357}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005358
alan-bakerb6b09dc2018-11-08 16:59:28 -05005359bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005360 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005361 case Type::HalfTyID:
5362 case Type::FloatTyID:
5363 case Type::DoubleTyID:
5364 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005365 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005366 return true;
5367 case Type::PointerTyID: {
5368 const PointerType *pointer_type = cast<PointerType>(type);
5369 if (pointer_type->getPointerAddressSpace() !=
5370 AddressSpace::UniformConstant) {
5371 auto pointee_type = pointer_type->getPointerElementType();
5372 if (pointee_type->isStructTy() &&
5373 cast<StructType>(pointee_type)->isOpaque()) {
5374 // Images and samplers are not nullable.
5375 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005376 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005377 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005378 return true;
5379 }
5380 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005381 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005382 case Type::StructTyID: {
5383 const StructType *struct_type = cast<StructType>(type);
5384 // Images and samplers are not nullable.
5385 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005386 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005387 for (const auto element : struct_type->elements()) {
5388 if (!IsTypeNullable(element))
5389 return false;
5390 }
5391 return true;
5392 }
5393 default:
5394 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005395 }
5396}
Alan Bakerfcda9482018-10-02 17:09:59 -04005397
SJW77b87ad2020-04-21 14:37:52 -05005398void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005399 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005400 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005401 // Metdata is stored as key-value pair operands. The first element of each
5402 // operand is the type and the second is a vector of offsets.
5403 for (const auto *operand : offsets_md->operands()) {
5404 const auto *pair = cast<MDTuple>(operand);
5405 auto *type =
5406 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5407 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5408 std::vector<uint32_t> offsets;
5409 for (const Metadata *offset_md : offset_vector->operands()) {
5410 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005411 offsets.push_back(static_cast<uint32_t>(
5412 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005413 }
5414 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5415 }
5416 }
5417
5418 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005419 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005420 // Metadata is stored as key-value pair operands. The first element of each
5421 // operand is the type and the second is a triple of sizes: type size in
5422 // bits, store size and alloc size.
5423 for (const auto *operand : sizes_md->operands()) {
5424 const auto *pair = cast<MDTuple>(operand);
5425 auto *type =
5426 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5427 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5428 uint64_t type_size_in_bits =
5429 cast<ConstantInt>(
5430 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5431 ->getZExtValue();
5432 uint64_t type_store_size =
5433 cast<ConstantInt>(
5434 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5435 ->getZExtValue();
5436 uint64_t type_alloc_size =
5437 cast<ConstantInt>(
5438 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5439 ->getZExtValue();
5440 RemappedUBOTypeSizes.insert(std::make_pair(
5441 type, std::make_tuple(type_size_in_bits, type_store_size,
5442 type_alloc_size)));
5443 }
5444 }
5445}
5446
5447uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5448 const DataLayout &DL) {
5449 auto iter = RemappedUBOTypeSizes.find(type);
5450 if (iter != RemappedUBOTypeSizes.end()) {
5451 return std::get<0>(iter->second);
5452 }
5453
5454 return DL.getTypeSizeInBits(type);
5455}
5456
5457uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5458 auto iter = RemappedUBOTypeSizes.find(type);
5459 if (iter != RemappedUBOTypeSizes.end()) {
5460 return std::get<1>(iter->second);
5461 }
5462
5463 return DL.getTypeStoreSize(type);
5464}
5465
5466uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5467 auto iter = RemappedUBOTypeSizes.find(type);
5468 if (iter != RemappedUBOTypeSizes.end()) {
5469 return std::get<2>(iter->second);
5470 }
5471
5472 return DL.getTypeAllocSize(type);
5473}
alan-baker5b86ed72019-02-15 08:26:50 -05005474
Kévin Petitbbbda972020-03-03 19:16:31 +00005475uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5476 StructType *type, unsigned member, const DataLayout &DL) {
5477 const auto StructLayout = DL.getStructLayout(type);
5478 // Search for the correct offsets if this type was remapped.
5479 std::vector<uint32_t> *offsets = nullptr;
5480 auto iter = RemappedUBOTypeOffsets.find(type);
5481 if (iter != RemappedUBOTypeOffsets.end()) {
5482 offsets = &iter->second;
5483 }
5484 auto ByteOffset =
5485 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5486 if (offsets) {
5487 ByteOffset = (*offsets)[member];
5488 }
5489
5490 return ByteOffset;
5491}
5492
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005493void SPIRVProducerPass::setVariablePointersCapabilities(
5494 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005495 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005496 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005497 } else {
SJW01901d92020-05-21 08:58:31 -05005498 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005499 }
5500}
5501
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005502Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005503 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5504 return GetBasePointer(gep->getPointerOperand());
5505 }
5506
5507 // Conservatively return |v|.
5508 return v;
5509}
5510
5511bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5512 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5513 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
alan-baker7506abb2020-09-10 15:02:55 -04005514 const auto &lhs_func_info =
5515 Builtins::Lookup(lhs_call->getCalledFunction());
5516 const auto &rhs_func_info =
5517 Builtins::Lookup(rhs_call->getCalledFunction());
SJW61531372020-06-09 07:31:08 -05005518 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5519 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005520 // For resource accessors, match descriptor set and binding.
5521 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5522 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5523 return true;
SJW61531372020-06-09 07:31:08 -05005524 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5525 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005526 // For workgroup resources, match spec id.
5527 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5528 return true;
5529 }
5530 }
5531 }
5532
5533 return false;
5534}
5535
5536bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5537 assert(inst->getType()->isPointerTy());
5538 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5539 spv::StorageClassStorageBuffer);
5540 const bool hack_undef = clspv::Option::HackUndef();
5541 if (auto *select = dyn_cast<SelectInst>(inst)) {
5542 auto *true_base = GetBasePointer(select->getTrueValue());
5543 auto *false_base = GetBasePointer(select->getFalseValue());
5544
5545 if (true_base == false_base)
5546 return true;
5547
5548 // If either the true or false operand is a null, then we satisfy the same
5549 // object constraint.
5550 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5551 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5552 return true;
5553 }
5554
5555 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5556 if (false_cst->isNullValue() ||
5557 (hack_undef && isa<UndefValue>(false_base)))
5558 return true;
5559 }
5560
5561 if (sameResource(true_base, false_base))
5562 return true;
5563 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5564 Value *value = nullptr;
5565 bool ok = true;
5566 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5567 auto *base = GetBasePointer(phi->getIncomingValue(i));
5568 // Null values satisfy the constraint of selecting of selecting from the
5569 // same object.
5570 if (!value) {
5571 if (auto *cst = dyn_cast<Constant>(base)) {
5572 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5573 value = base;
5574 } else {
5575 value = base;
5576 }
5577 } else if (base != value) {
5578 if (auto *base_cst = dyn_cast<Constant>(base)) {
5579 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5580 continue;
5581 }
5582
5583 if (sameResource(value, base))
5584 continue;
5585
5586 // Values don't represent the same base.
5587 ok = false;
5588 }
5589 }
5590
5591 return ok;
5592 }
5593
5594 // Conservatively return false.
5595 return false;
5596}
alan-bakere9308012019-03-15 10:25:13 -04005597
5598bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5599 if (!Arg.getType()->isPointerTy() ||
5600 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5601 // Only SSBOs need to be annotated as coherent.
5602 return false;
5603 }
5604
5605 DenseSet<Value *> visited;
5606 std::vector<Value *> stack;
5607 for (auto *U : Arg.getParent()->users()) {
5608 if (auto *call = dyn_cast<CallInst>(U)) {
5609 stack.push_back(call->getOperand(Arg.getArgNo()));
5610 }
5611 }
5612
5613 while (!stack.empty()) {
5614 Value *v = stack.back();
5615 stack.pop_back();
5616
5617 if (!visited.insert(v).second)
5618 continue;
5619
5620 auto *resource_call = dyn_cast<CallInst>(v);
5621 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005622 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5623 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005624 // If this is a resource accessor function, check if the coherent operand
5625 // is set.
5626 const auto coherent =
5627 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5628 ->getZExtValue());
5629 if (coherent == 1)
5630 return true;
5631 } else if (auto *arg = dyn_cast<Argument>(v)) {
5632 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005633 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005634 if (auto *call = dyn_cast<CallInst>(U)) {
5635 stack.push_back(call->getOperand(arg->getArgNo()));
5636 }
5637 }
5638 } else if (auto *user = dyn_cast<User>(v)) {
5639 // If this is a user, traverse all operands that could lead to resource
5640 // variables.
5641 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5642 Value *operand = user->getOperand(i);
5643 if (operand->getType()->isPointerTy() &&
5644 operand->getType()->getPointerAddressSpace() ==
5645 clspv::AddressSpace::Global) {
5646 stack.push_back(operand);
5647 }
5648 }
5649 }
5650 }
5651
5652 // No coherent resource variables encountered.
5653 return false;
5654}
alan-baker06cad652019-12-03 17:56:47 -05005655
SJW77b87ad2020-04-21 14:37:52 -05005656void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005657 // First, track loop merges and continues.
5658 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005659 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005660 if (F.isDeclaration())
5661 continue;
5662
5663 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5664 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5665 std::deque<BasicBlock *> order;
5666 DenseSet<BasicBlock *> visited;
5667 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5668
5669 for (auto BB : order) {
5670 auto terminator = BB->getTerminator();
5671 auto branch = dyn_cast<BranchInst>(terminator);
5672 if (LI.isLoopHeader(BB)) {
5673 auto L = LI.getLoopFor(BB);
5674 BasicBlock *ContinueBB = nullptr;
5675 BasicBlock *MergeBB = nullptr;
5676
5677 MergeBB = L->getExitBlock();
5678 if (!MergeBB) {
5679 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5680 // has regions with single entry/exit. As a result, loop should not
5681 // have multiple exits.
5682 llvm_unreachable("Loop has multiple exits???");
5683 }
5684
5685 if (L->isLoopLatch(BB)) {
5686 ContinueBB = BB;
5687 } else {
5688 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5689 // block.
5690 BasicBlock *Header = L->getHeader();
5691 BasicBlock *Latch = L->getLoopLatch();
5692 for (auto *loop_block : L->blocks()) {
5693 if (loop_block == Header) {
5694 continue;
5695 }
5696
5697 // Check whether block dominates block with back-edge.
5698 // The loop latch is the single block with a back-edge. If it was
5699 // possible, StructurizeCFG made the loop conform to this
5700 // requirement, otherwise |Latch| is a nullptr.
5701 if (DT.dominates(loop_block, Latch)) {
5702 ContinueBB = loop_block;
5703 }
5704 }
5705
5706 if (!ContinueBB) {
5707 llvm_unreachable("Wrong continue block from loop");
5708 }
5709 }
5710
5711 // Record the continue and merge blocks.
5712 MergeBlocks[BB] = MergeBB;
5713 ContinueBlocks[BB] = ContinueBB;
5714 LoopMergesAndContinues.insert(MergeBB);
5715 LoopMergesAndContinues.insert(ContinueBB);
5716 } else if (branch && branch->isConditional()) {
5717 auto L = LI.getLoopFor(BB);
5718 bool HasBackedge = false;
5719 while (L && !HasBackedge) {
5720 if (L->isLoopLatch(BB)) {
5721 HasBackedge = true;
5722 }
5723 L = L->getParentLoop();
5724 }
5725
5726 if (!HasBackedge) {
5727 // Only need a merge if the branch doesn't include a loop break or
5728 // continue.
5729 auto true_bb = branch->getSuccessor(0);
5730 auto false_bb = branch->getSuccessor(1);
5731 if (!LoopMergesAndContinues.count(true_bb) &&
5732 !LoopMergesAndContinues.count(false_bb)) {
5733 // StructurizeCFG pass already manipulated CFG. Just use false block
5734 // of branch instruction as merge block.
5735 MergeBlocks[BB] = false_bb;
5736 }
5737 }
5738 }
5739 }
5740 }
5741}
alan-baker86ce19c2020-08-05 13:09:19 -04005742
5743SPIRVID SPIRVProducerPass::getReflectionImport() {
5744 if (!ReflectionID.isValid()) {
5745 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_non_semantic_info");
5746 ReflectionID = addSPIRVInst<kImports>(spv::OpExtInstImport,
5747 "NonSemantic.ClspvReflection.1");
5748 }
5749 return ReflectionID;
5750}
5751
5752void SPIRVProducerPass::GenerateReflection() {
5753 GenerateKernelReflection();
5754 GeneratePushConstantReflection();
5755 GenerateSpecConstantReflection();
5756}
5757
5758void SPIRVProducerPass::GeneratePushConstantReflection() {
5759 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
5760 auto const &DL = module->getDataLayout();
5761 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
5762 auto STy = cast<StructType>(GV->getValueType());
5763
5764 for (unsigned i = 0; i < STy->getNumElements(); i++) {
5765 auto pc = static_cast<clspv::PushConstant>(
5766 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
5767 if (pc == PushConstant::KernelArgument)
5768 continue;
5769
5770 auto memberType = STy->getElementType(i);
5771 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
5772 unsigned previousOffset = 0;
5773 if (i > 0) {
5774 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
5775 }
5776 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
5777 assert(isValidExplicitLayout(*module, STy, i,
5778 spv::StorageClassPushConstant, offset,
5779 previousOffset));
5780
5781 reflection::ExtInst pc_inst = reflection::ExtInstMax;
5782 switch (pc) {
5783 case PushConstant::GlobalOffset:
5784 pc_inst = reflection::ExtInstPushConstantGlobalOffset;
5785 break;
5786 case PushConstant::EnqueuedLocalSize:
5787 pc_inst = reflection::ExtInstPushConstantEnqueuedLocalSize;
5788 break;
5789 case PushConstant::GlobalSize:
5790 pc_inst = reflection::ExtInstPushConstantGlobalSize;
5791 break;
5792 case PushConstant::RegionOffset:
5793 pc_inst = reflection::ExtInstPushConstantRegionOffset;
5794 break;
5795 case PushConstant::NumWorkgroups:
5796 pc_inst = reflection::ExtInstPushConstantNumWorkgroups;
5797 break;
5798 case PushConstant::RegionGroupOffset:
5799 pc_inst = reflection::ExtInstPushConstantRegionGroupOffset;
5800 break;
5801 default:
5802 llvm_unreachable("Unhandled push constant");
5803 break;
5804 }
5805
5806 auto import_id = getReflectionImport();
5807 SPIRVOperandVec Ops;
5808 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
5809 << pc_inst << getSPIRVInt32Constant(offset)
5810 << getSPIRVInt32Constant(size);
5811 addSPIRVInst(spv::OpExtInst, Ops);
5812 }
5813 }
5814}
5815
5816void SPIRVProducerPass::GenerateSpecConstantReflection() {
5817 const uint32_t kMax = std::numeric_limits<uint32_t>::max();
5818 uint32_t wgsize_id[3] = {kMax, kMax, kMax};
5819 uint32_t global_offset_id[3] = {kMax, kMax, kMax};
5820 uint32_t work_dim_id = kMax;
5821 for (auto pair : clspv::GetSpecConstants(module)) {
5822 auto kind = pair.first;
5823 auto id = pair.second;
5824
5825 // Local memory size is only used for kernel arguments.
5826 if (kind == SpecConstant::kLocalMemorySize)
5827 continue;
5828
5829 switch (kind) {
5830 case SpecConstant::kWorkgroupSizeX:
5831 wgsize_id[0] = id;
5832 break;
5833 case SpecConstant::kWorkgroupSizeY:
5834 wgsize_id[1] = id;
5835 break;
5836 case SpecConstant::kWorkgroupSizeZ:
5837 wgsize_id[2] = id;
5838 break;
5839 case SpecConstant::kGlobalOffsetX:
5840 global_offset_id[0] = id;
5841 break;
5842 case SpecConstant::kGlobalOffsetY:
5843 global_offset_id[1] = id;
5844 break;
5845 case SpecConstant::kGlobalOffsetZ:
5846 global_offset_id[2] = id;
5847 break;
5848 case SpecConstant::kWorkDim:
5849 work_dim_id = id;
5850 break;
5851 default:
5852 llvm_unreachable("Unhandled spec constant");
5853 }
5854 }
5855
5856 auto import_id = getReflectionImport();
5857 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5858 SPIRVOperandVec Ops;
5859 if (wgsize_id[0] != kMax) {
5860 assert(wgsize_id[1] != kMax);
5861 assert(wgsize_id[2] != kMax);
5862 Ops.clear();
5863 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkgroupSize
5864 << getSPIRVInt32Constant(wgsize_id[0])
5865 << getSPIRVInt32Constant(wgsize_id[1])
5866 << getSPIRVInt32Constant(wgsize_id[2]);
5867 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5868 }
5869 if (global_offset_id[0] != kMax) {
5870 assert(global_offset_id[1] != kMax);
5871 assert(global_offset_id[2] != kMax);
5872 Ops.clear();
5873 Ops << void_id << import_id << reflection::ExtInstSpecConstantGlobalOffset
5874 << getSPIRVInt32Constant(global_offset_id[0])
5875 << getSPIRVInt32Constant(global_offset_id[1])
5876 << getSPIRVInt32Constant(global_offset_id[2]);
5877 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5878 }
5879 if (work_dim_id != kMax) {
5880 Ops.clear();
5881 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkDim
5882 << getSPIRVInt32Constant(work_dim_id);
5883 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5884 }
5885}
5886
5887void SPIRVProducerPass::GenerateKernelReflection() {
5888 const auto &DL = module->getDataLayout();
5889 auto import_id = getReflectionImport();
5890 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5891
5892 for (auto &F : *module) {
5893 if (F.isDeclaration() || F.getCallingConv() != CallingConv::SPIR_KERNEL) {
5894 continue;
5895 }
5896
5897 // OpString for the kernel name.
5898 auto kernel_name =
5899 addSPIRVInst<kDebug>(spv::OpString, F.getName().str().c_str());
5900
5901 // Kernel declaration
5902 // Ops[0] = void type
5903 // Ops[1] = reflection ext import
5904 // Ops[2] = function id
5905 // Ops[3] = kernel name
5906 SPIRVOperandVec Ops;
5907 Ops << void_id << import_id << reflection::ExtInstKernel << ValueMap[&F]
5908 << kernel_name;
5909 auto kernel_decl = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5910
5911 // Generate the required workgroup size property if it was specified.
5912 if (const MDNode *MD = F.getMetadata("reqd_work_group_size")) {
5913 uint32_t CurXDimCst = static_cast<uint32_t>(
5914 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
5915 uint32_t CurYDimCst = static_cast<uint32_t>(
5916 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
5917 uint32_t CurZDimCst = static_cast<uint32_t>(
5918 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
5919
5920 Ops.clear();
5921 Ops << void_id << import_id
5922 << reflection::ExtInstPropertyRequiredWorkgroupSize << kernel_decl
5923 << getSPIRVInt32Constant(CurXDimCst)
5924 << getSPIRVInt32Constant(CurYDimCst)
5925 << getSPIRVInt32Constant(CurZDimCst);
5926 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5927 }
5928
5929 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
5930 auto *func_ty = F.getFunctionType();
5931
5932 // If we've clustered POD arguments, then argument details are in metadata.
5933 // If an argument maps to a resource variable, then get descriptor set and
5934 // binding from the resource variable. Other info comes from the metadata.
5935 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
5936 auto local_spec_id_md =
5937 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
5938 if (arg_map) {
5939 for (const auto &arg : arg_map->operands()) {
5940 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
5941 assert(arg_node->getNumOperands() == 6);
5942 const auto name =
5943 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
5944 const auto old_index =
5945 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
5946 // Remapped argument index
5947 const int new_index = static_cast<int>(
5948 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
5949 const auto offset =
5950 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
5951 const auto size =
5952 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
5953 const auto argKind = clspv::GetArgKindFromName(
5954 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
5955
5956 // If this is a local memory argument, find the right spec id for this
5957 // argument.
5958 int64_t spec_id = -1;
5959 if (argKind == clspv::ArgKind::Local) {
5960 for (auto spec_id_arg : local_spec_id_md->operands()) {
5961 if ((&F == dyn_cast<Function>(
5962 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
5963 ->getValue())) &&
5964 (static_cast<uint64_t>(new_index) ==
5965 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
5966 ->getZExtValue())) {
5967 spec_id =
5968 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
5969 ->getSExtValue();
5970 break;
5971 }
5972 }
5973 }
5974
5975 // Generate the specific argument instruction.
5976 const uint32_t ordinal = static_cast<uint32_t>(old_index);
5977 const uint32_t arg_offset = static_cast<uint32_t>(offset);
5978 const uint32_t arg_size = static_cast<uint32_t>(size);
5979 uint32_t elem_size = 0;
5980 uint32_t descriptor_set = 0;
5981 uint32_t binding = 0;
5982 if (spec_id > 0) {
5983 elem_size = static_cast<uint32_t>(
5984 GetTypeAllocSize(func_ty->getParamType(unsigned(new_index))
5985 ->getPointerElementType(),
5986 DL));
5987 } else if (new_index >= 0) {
5988 auto *info = resource_var_at_index[new_index];
5989 assert(info);
5990 descriptor_set = info->descriptor_set;
5991 binding = info->binding;
5992 }
5993 AddArgumentReflection(kernel_decl, name.str(), argKind, ordinal,
5994 descriptor_set, binding, arg_offset, arg_size,
5995 static_cast<uint32_t>(spec_id), elem_size);
5996 }
5997 } else {
5998 // There is no argument map.
5999 // Take descriptor info from the resource variable calls.
6000 // Take argument name and size from the arguments list.
6001
6002 SmallVector<Argument *, 4> arguments;
6003 for (auto &arg : F.args()) {
6004 arguments.push_back(&arg);
6005 }
6006
6007 unsigned arg_index = 0;
6008 for (auto *info : resource_var_at_index) {
6009 if (info) {
6010 auto arg = arguments[arg_index];
6011 unsigned arg_size = 0;
6012 if (info->arg_kind == clspv::ArgKind::Pod ||
6013 info->arg_kind == clspv::ArgKind::PodUBO ||
6014 info->arg_kind == clspv::ArgKind::PodPushConstant) {
6015 arg_size =
6016 static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
6017 }
6018
6019 // Local pointer arguments are unused in this case.
6020 // offset, spec_id and elem_size always 0.
6021 AddArgumentReflection(kernel_decl, arg->getName().str(),
6022 info->arg_kind, arg_index, info->descriptor_set,
6023 info->binding, 0, arg_size, 0, 0);
6024 }
6025 arg_index++;
6026 }
6027 // Generate mappings for pointer-to-local arguments.
6028 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
6029 Argument *arg = arguments[arg_index];
6030 auto where = LocalArgSpecIds.find(arg);
6031 if (where != LocalArgSpecIds.end()) {
6032 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
6033
6034 // descriptor_set, binding, offset and size are always 0.
6035 AddArgumentReflection(kernel_decl, arg->getName().str(),
6036 ArgKind::Local, arg_index, 0, 0, 0, 0,
6037 static_cast<uint32_t>(local_arg_info.spec_id),
6038 static_cast<uint32_t>(GetTypeAllocSize(
6039 local_arg_info.elem_type, DL)));
6040 }
6041 }
6042 }
6043 }
6044}
6045
6046void SPIRVProducerPass::AddArgumentReflection(
6047 SPIRVID kernel_decl, const std::string &name, clspv::ArgKind arg_kind,
6048 uint32_t ordinal, uint32_t descriptor_set, uint32_t binding,
6049 uint32_t offset, uint32_t size, uint32_t spec_id, uint32_t elem_size) {
6050 // Generate ArgumentInfo for this argument.
6051 // TODO: generate remaining optional operands.
6052 auto import_id = getReflectionImport();
6053 auto arg_name = addSPIRVInst<kDebug>(spv::OpString, name.c_str());
6054 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
6055 SPIRVOperandVec Ops;
6056 Ops << void_id << import_id << reflection::ExtInstArgumentInfo << arg_name;
6057 auto arg_info = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6058
6059 Ops.clear();
6060 Ops << void_id << import_id;
6061 reflection::ExtInst ext_inst = reflection::ExtInstMax;
6062 // Determine the extended instruction.
6063 switch (arg_kind) {
6064 case clspv::ArgKind::Buffer:
6065 ext_inst = reflection::ExtInstArgumentStorageBuffer;
6066 break;
6067 case clspv::ArgKind::BufferUBO:
6068 ext_inst = reflection::ExtInstArgumentUniform;
6069 break;
6070 case clspv::ArgKind::Local:
6071 ext_inst = reflection::ExtInstArgumentWorkgroup;
6072 break;
6073 case clspv::ArgKind::Pod:
6074 ext_inst = reflection::ExtInstArgumentPodStorageBuffer;
6075 break;
6076 case clspv::ArgKind::PodUBO:
6077 ext_inst = reflection::ExtInstArgumentPodUniform;
6078 break;
6079 case clspv::ArgKind::PodPushConstant:
6080 ext_inst = reflection::ExtInstArgumentPodPushConstant;
6081 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006082 case clspv::ArgKind::SampledImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006083 ext_inst = reflection::ExtInstArgumentSampledImage;
6084 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006085 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006086 ext_inst = reflection::ExtInstArgumentStorageImage;
6087 break;
6088 case clspv::ArgKind::Sampler:
6089 ext_inst = reflection::ExtInstArgumentSampler;
6090 break;
6091 default:
6092 llvm_unreachable("Unhandled argument reflection");
6093 break;
6094 }
6095 Ops << ext_inst << kernel_decl << getSPIRVInt32Constant(ordinal);
6096
6097 // Add descriptor set and binding for applicable arguments.
6098 switch (arg_kind) {
6099 case clspv::ArgKind::Buffer:
6100 case clspv::ArgKind::BufferUBO:
6101 case clspv::ArgKind::Pod:
6102 case clspv::ArgKind::PodUBO:
alan-bakerf6bc8252020-09-23 14:58:55 -04006103 case clspv::ArgKind::SampledImage:
6104 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006105 case clspv::ArgKind::Sampler:
6106 Ops << getSPIRVInt32Constant(descriptor_set)
6107 << getSPIRVInt32Constant(binding);
6108 break;
6109 default:
6110 break;
6111 }
6112
6113 // Add remaining operands for arguments.
6114 switch (arg_kind) {
6115 case clspv::ArgKind::Local:
6116 Ops << getSPIRVInt32Constant(spec_id) << getSPIRVInt32Constant(elem_size);
6117 break;
6118 case clspv::ArgKind::Pod:
6119 case clspv::ArgKind::PodUBO:
6120 case clspv::ArgKind::PodPushConstant:
6121 Ops << getSPIRVInt32Constant(offset) << getSPIRVInt32Constant(size);
6122 break;
6123 default:
6124 break;
6125 }
6126 Ops << arg_info;
6127 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6128}