blob: a1341ab6164bc17d18b58c4dcefc3c5ca7f9126f [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
alan-baker5f2e88e2020-12-07 15:24:04 -050037#include "llvm/IR/Intrinsics.h"
David Neto118188e2018-08-24 11:27:54 -040038#include "llvm/IR/Metadata.h"
39#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050040#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040041#include "llvm/Pass.h"
42#include "llvm/Support/CommandLine.h"
alan-baker3f772c02021-06-15 22:18:11 -040043#include "llvm/Support/FileSystem.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000044#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040045#include "llvm/Support/raw_ostream.h"
46#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040047
SJWf93f5f32020-05-05 07:27:56 -050048// enable spv::HasResultAndType
49#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040050#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040051
David Neto85082642018-03-24 06:55:20 -070052#include "clspv/AddressSpace.h"
David Neto118188e2018-08-24 11:27:54 -040053#include "clspv/Option.h"
alan-baker86ce19c2020-08-05 13:09:19 -040054#include "clspv/PushConstant.h"
55#include "clspv/SpecConstant.h"
David Neto85082642018-03-24 06:55:20 -070056#include "clspv/spirv_c_strings.hpp"
57#include "clspv/spirv_glsl.hpp"
alan-baker86ce19c2020-08-05 13:09:19 -040058#include "clspv/spirv_reflection.hpp"
David Neto22f144c2017-06-12 14:26:21 -040059
David Neto4feb7a42017-10-06 17:29:42 -040060#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050061#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050062#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070063#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040064#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040065#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040066#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040067#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040068#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040069#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050070#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040071
David Neto22f144c2017-06-12 14:26:21 -040072#if defined(_MSC_VER)
73#pragma warning(pop)
74#endif
75
76using namespace llvm;
77using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050078using namespace clspv::Builtins;
SJW806a5d82020-07-15 12:51:38 -050079using namespace clspv::Option;
David Neto156783e2017-07-05 15:39:41 -040080using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040081
82namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040083
alan-baker3f772c02021-06-15 22:18:11 -040084cl::opt<std::string> TestOutFile("producer-out-file", cl::init("test.spv"),
85 cl::ReallyHidden,
86 cl::desc("SPIRVProducer testing output file"));
87
David Neto862b7d82018-06-14 18:48:37 -040088cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
89 cl::desc("Show resource variable creation"));
90
alan-baker5ed87542020-03-23 11:05:22 -040091cl::opt<bool>
92 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
93 cl::desc("Dump the IR at the start of SPIRVProducer"));
94
David Neto862b7d82018-06-14 18:48:37 -040095// These hacks exist to help transition code generation algorithms
96// without making huge noise in detailed test output.
97const bool Hack_generate_runtime_array_stride_early = true;
98
David Neto3fbb4072017-10-16 11:28:14 -040099// The value of 1/pi. This value is from MSDN
100// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
101const double kOneOverPi = 0.318309886183790671538;
102const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
103
alan-baker86ce19c2020-08-05 13:09:19 -0400104// SPIRV Module Sections (per 2.4 of the SPIR-V spec)
SJW69939d52020-04-16 07:29:07 -0500105// These are used to collect SPIRVInstructions by type on-the-fly.
106enum SPIRVSection {
107 kCapabilities,
108 kExtensions,
109 kImports,
110 kMemoryModel,
111 kEntryPoints,
112 kExecutionModes,
113
114 kDebug,
115 kAnnotations,
116
117 kTypes,
118 kConstants = kTypes,
119 kGlobalVariables,
120
121 kFunctions,
122
alan-baker86ce19c2020-08-05 13:09:19 -0400123 // This is not a section of the SPIR-V spec and should always immediately
124 // precede kSectionCount. It is a convenient place for the embedded
125 // reflection data.
126 kReflection,
SJW69939d52020-04-16 07:29:07 -0500127 kSectionCount
128};
129
SJW01901d92020-05-21 08:58:31 -0500130class SPIRVID {
131 uint32_t id;
132
133public:
134 SPIRVID(uint32_t _id = 0) : id(_id) {}
135 uint32_t get() const { return id; }
136 bool isValid() const { return id != 0; }
137 bool operator==(const SPIRVID &that) const { return id == that.id; }
SJW806a5d82020-07-15 12:51:38 -0500138 bool operator<(const SPIRVID &that) const { return id < that.id; }
SJW01901d92020-05-21 08:58:31 -0500139};
SJWf93f5f32020-05-05 07:27:56 -0500140
SJW88ed5fe2020-05-11 12:40:57 -0500141enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400142
143struct SPIRVOperand {
alan-baker3f772c02021-06-15 22:18:11 -0400144 SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
SJW88ed5fe2020-05-11 12:40:57 -0500145 LiteralNum[0] = Num;
146 }
alan-baker3f772c02021-06-15 22:18:11 -0400147 SPIRVOperand(SPIRVOperandType Ty, const char *Str)
David Neto22f144c2017-06-12 14:26:21 -0400148 : Type(Ty), LiteralStr(Str) {}
alan-baker3f772c02021-06-15 22:18:11 -0400149 SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
David Neto22f144c2017-06-12 14:26:21 -0400150 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500151 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
152 auto sz = NumVec.size();
153 assert(sz >= 1 && sz <= 2);
154 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
155 LiteralNum[0] = NumVec[0];
156 if (sz == 2) {
157 LiteralNum[1] = NumVec[1];
158 }
159 }
David Neto22f144c2017-06-12 14:26:21 -0400160
alan-baker7506abb2020-09-10 15:02:55 -0400161 SPIRVOperandType getType() const { return Type; }
162 uint32_t getNumID() const { return LiteralNum[0]; }
163 std::string getLiteralStr() const { return LiteralStr; }
164 const uint32_t *getLiteralNum() const { return LiteralNum; }
David Neto22f144c2017-06-12 14:26:21 -0400165
David Neto87846742018-04-11 17:36:22 -0400166 uint32_t GetNumWords() const {
167 switch (Type) {
168 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500169 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400170 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500171 case LITERAL_DWORD:
172 return 2;
David Neto87846742018-04-11 17:36:22 -0400173 case LITERAL_STRING:
174 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400175 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400176 }
177 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
178 }
179
David Neto22f144c2017-06-12 14:26:21 -0400180private:
181 SPIRVOperandType Type;
182 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500183 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400184};
185
SJW88ed5fe2020-05-11 12:40:57 -0500186typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400187
David Neto22f144c2017-06-12 14:26:21 -0400188struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500189 // Primary constructor must have Opcode, initializes WordCount based on ResID.
190 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
191 : Opcode(static_cast<uint16_t>(Opc)) {
192 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400193 }
David Neto22f144c2017-06-12 14:26:21 -0400194
SJWf93f5f32020-05-05 07:27:56 -0500195 // Creates an instruction with an opcode and no result ID, and with the given
196 // operands. This calls primary constructor to initialize Opcode, WordCount.
197 // Takes ownership of the operands and clears |Ops|.
198 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
199 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500200 }
SJWf93f5f32020-05-05 07:27:56 -0500201 // Creates an instruction with an opcode and no result ID, and with the given
202 // operands. This calls primary constructor to initialize Opcode, WordCount.
203 // Takes ownership of the operands and clears |Ops|.
204 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
205 : SPIRVInstruction(Opc, ResID) {
206 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500207 }
David Netoef5ba2b2019-12-20 08:35:54 -0500208
David Netoee2660d2018-06-28 16:31:29 -0400209 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400210 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500211 SPIRVID getResultID() const { return ResultID; }
212 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400213
214private:
SJW01901d92020-05-21 08:58:31 -0500215 void setResult(SPIRVID ResID = 0) {
216 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500217 ResultID = ResID;
218 }
219
220 void setOperands(SPIRVOperandVec &Ops) {
221 assert(Operands.empty());
222 Operands = std::move(Ops);
223 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500224 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500225 }
226 }
227
228private:
David Netoee2660d2018-06-28 16:31:29 -0400229 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400230 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500231 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500232 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400233};
234
235struct SPIRVProducerPass final : public ModulePass {
alan-baker3f772c02021-06-15 22:18:11 -0400236 static char ID;
237
SJW01901d92020-05-21 08:58:31 -0500238 typedef DenseMap<Type *, SPIRVID> TypeMapType;
alan-baker3f772c02021-06-15 22:18:11 -0400239 typedef DenseMap<Type *, SmallVector<SPIRVID, 2>> LayoutTypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400240 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500241 typedef DenseMap<Value *, SPIRVID> ValueMapType;
SJW806a5d82020-07-15 12:51:38 -0500242 typedef std::list<SPIRVID> SPIRVIDListType;
SJW01901d92020-05-21 08:58:31 -0500243 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
244 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500245 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
SJW806a5d82020-07-15 12:51:38 -0500246 typedef std::map<spv::BuiltIn, SPIRVID> BuiltinConstantMapType;
SJW88ed5fe2020-05-11 12:40:57 -0500247 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400248 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500249 // - the SPIR-V instruction placeholder that will be replaced
250 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400251 DeferredInstVecType;
252 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
253 GlobalConstFuncMapType;
254
alan-baker3f772c02021-06-15 22:18:11 -0400255 SPIRVProducerPass(
256 raw_pwrite_stream *out,
257 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap,
David Neto44795152017-07-13 15:45:28 -0400258 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500259 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
alan-baker3f772c02021-06-15 22:18:11 -0400260 binaryTempOut(binaryTempUnderlyingVector), binaryOut(out),
David Neto0676e6f2017-07-11 18:47:44 -0400261 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500262 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
263 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-baker3f772c02021-06-15 22:18:11 -0400264 WorkgroupSizeVarID(0), TestOutput(false) {
265 addCapability(spv::CapabilityShader);
266 Ptr = this;
267 }
268
269 SPIRVProducerPass()
270 : ModulePass(ID), module(nullptr), samplerMap(nullptr), out(nullptr),
271 binaryTempOut(binaryTempUnderlyingVector), binaryOut(nullptr),
272 outputCInitList(false), patchBoundOffset(0), nextID(1),
273 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
274 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
275 WorkgroupSizeVarID(0), TestOutput(true) {
SJW01901d92020-05-21 08:58:31 -0500276 addCapability(spv::CapabilityShader);
277 Ptr = this;
278 }
David Neto22f144c2017-06-12 14:26:21 -0400279
James Price11010dc2019-12-19 13:53:09 -0500280 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500281 }
282
David Neto22f144c2017-06-12 14:26:21 -0400283 void getAnalysisUsage(AnalysisUsage &AU) const override {
284 AU.addRequired<DominatorTreeWrapperPass>();
285 AU.addRequired<LoopInfoWrapperPass>();
286 }
287
288 virtual bool runOnModule(Module &module) override;
289
290 // output the SPIR-V header block
291 void outputHeader();
292
293 // patch the SPIR-V header block
294 void patchHeader();
295
SJW01901d92020-05-21 08:58:31 -0500296 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400297 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-baker7506abb2020-09-10 15:02:55 -0400298 TypeList &getTypeList() { return Types; }
David Neto22f144c2017-06-12 14:26:21 -0400299 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500300 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
301 return SPIRVSections[Section];
302 };
alan-baker7506abb2020-09-10 15:02:55 -0400303 EntryPointVecType &getEntryPointVec() { return EntryPointVec; }
304 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; }
SJW806a5d82020-07-15 12:51:38 -0500305 SPIRVIDListType &getEntryPointInterfacesList() {
306 return EntryPointInterfacesList;
alan-baker7506abb2020-09-10 15:02:55 -0400307 }
SJW01901d92020-05-21 08:58:31 -0500308 SPIRVID getOpExtInstImportID();
alan-baker7506abb2020-09-10 15:02:55 -0400309 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; }
SJW2c317da2020-03-23 07:39:13 -0500310
alan-baker5b86ed72019-02-15 08:26:50 -0500311 bool hasVariablePointersStorageBuffer() {
312 return HasVariablePointersStorageBuffer;
313 }
SJW01901d92020-05-21 08:58:31 -0500314 void setVariablePointersStorageBuffer() {
315 if (!HasVariablePointersStorageBuffer) {
316 addCapability(spv::CapabilityVariablePointersStorageBuffer);
317 HasVariablePointersStorageBuffer = true;
318 }
alan-baker5b86ed72019-02-15 08:26:50 -0500319 }
alan-baker7506abb2020-09-10 15:02:55 -0400320 bool hasVariablePointers() { return HasVariablePointers; }
SJW01901d92020-05-21 08:58:31 -0500321 void setVariablePointers() {
322 if (!HasVariablePointers) {
323 addCapability(spv::CapabilityVariablePointers);
324 HasVariablePointers = true;
325 }
alan-baker7506abb2020-09-10 15:02:55 -0400326 }
alan-baker3f772c02021-06-15 22:18:11 -0400327 SmallVectorImpl<std::pair<unsigned, std::string>> *getSamplerMap() {
alan-bakerb6b09dc2018-11-08 16:59:28 -0500328 return samplerMap;
329 }
David Neto22f144c2017-06-12 14:26:21 -0400330 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
331 return GlobalConstFuncTypeMap;
332 }
333 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
334 return GlobalConstArgumentSet;
335 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500336 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400337
SJW77b87ad2020-04-21 14:37:52 -0500338 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500339 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
340 // *not* be converted to a storage buffer, replace each such global variable
341 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500342 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400343 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
344 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500345 void FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400346 void FindTypePerGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500347 void FindTypesForSamplerMap();
348 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500349 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
350 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400351 void FindType(Type *Ty);
SJWf93f5f32020-05-05 07:27:56 -0500352
alan-bakerc3fd07f2020-10-22 09:48:49 -0400353 // Returns the canonical type of |type|.
354 //
355 // By default, clspv maps both __constant and __global address space pointers
356 // to StorageBuffer storage class. In order to prevent duplicate types from
357 // being generated, clspv uses the canonical type as a representative.
358 Type *CanonicalType(Type *type);
359
SJWf93f5f32020-05-05 07:27:56 -0500360 // Lookup or create Types, Constants.
361 // Returns SPIRVID once it has been created.
alan-baker3f772c02021-06-15 22:18:11 -0400362 SPIRVID getSPIRVType(Type *Ty, bool needs_layout);
SJWf93f5f32020-05-05 07:27:56 -0500363 SPIRVID getSPIRVType(Type *Ty);
364 SPIRVID getSPIRVConstant(Constant *Cst);
SJW806a5d82020-07-15 12:51:38 -0500365 SPIRVID getSPIRVInt32Constant(uint32_t CstVal);
SJWf93f5f32020-05-05 07:27:56 -0500366 // Lookup SPIRVID of llvm::Value, may create Constant.
367 SPIRVID getSPIRVValue(Value *V);
368
alan-baker3f772c02021-06-15 22:18:11 -0400369 bool PointerRequiresLayout(unsigned aspace);
370
SJW806a5d82020-07-15 12:51:38 -0500371 SPIRVID getSPIRVBuiltin(spv::BuiltIn BID, spv::Capability Cap);
372
David Neto19a1bad2017-08-25 15:01:41 -0400373 // Generates instructions for SPIR-V types corresponding to the LLVM types
374 // saved in the |Types| member. A type follows its subtypes. IDs are
375 // allocated sequentially starting with the current value of nextID, and
376 // with a type following its subtypes. Also updates nextID to just beyond
377 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500378 void GenerateSPIRVTypes();
SJW77b87ad2020-04-21 14:37:52 -0500379 void GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400380 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500381 void GenerateWorkgroupVars();
alan-baker86ce19c2020-08-05 13:09:19 -0400382 // Generate reflection instructions for resource variables associated with
David Neto862b7d82018-06-14 18:48:37 -0400383 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500384 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400385 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500386 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400387 void GenerateFuncPrologue(Function &F);
388 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400389 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400390 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
391 spv::Op GetSPIRVCastOpcode(Instruction &I);
392 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
SJW806a5d82020-07-15 12:51:38 -0500393 SPIRVID GenerateClspvInstruction(CallInst *Call,
394 const FunctionInfo &FuncInfo);
395 SPIRVID GenerateImageInstruction(CallInst *Call,
396 const FunctionInfo &FuncInfo);
397 SPIRVID GenerateSubgroupInstruction(CallInst *Call,
398 const FunctionInfo &FuncInfo);
399 SPIRVID GenerateInstructionFromCall(CallInst *Call);
David Neto22f144c2017-06-12 14:26:21 -0400400 void GenerateInstruction(Instruction &I);
401 void GenerateFuncEpilogue();
402 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500403 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400404 bool is4xi8vec(Type *Ty) const;
405 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400406 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400407 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400408 // Returns the GLSL extended instruction enum that the given function
409 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500410 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400411 // Returns the GLSL extended instruction enum indirectly used by the given
412 // function. That is, to implement the given function, we use an extended
413 // instruction plus one more instruction. If none, then returns the 0 value,
414 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500415 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400416 // Returns the single GLSL extended instruction used directly or
417 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500418 glsl::ExtInst
419 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400420 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500421 void WriteResultID(const SPIRVInstruction &Inst);
422 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
423 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400424 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500425 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400426
Alan Baker9bf93fb2018-08-28 16:59:26 -0400427 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500428 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400429
Alan Bakerfcda9482018-10-02 17:09:59 -0400430 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500431 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400432
alan-baker06cad652019-12-03 17:56:47 -0500433 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500434 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500435
Alan Bakerfcda9482018-10-02 17:09:59 -0400436 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
437 // uses the internal map, otherwise it falls back on the data layout.
438 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400439 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000440 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
441 unsigned member,
442 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400443
alan-baker5b86ed72019-02-15 08:26:50 -0500444 // Returns the base pointer of |v|.
445 Value *GetBasePointer(Value *v);
446
SJW01901d92020-05-21 08:58:31 -0500447 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
448 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
449
alan-baker5b86ed72019-02-15 08:26:50 -0500450 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
451 // |address_space|.
452 void setVariablePointersCapabilities(unsigned address_space);
453
454 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
455 // variable.
456 bool sameResource(Value *lhs, Value *rhs) const;
457
458 // Returns true if |inst| is phi or select that selects from the same
459 // structure (or null).
460 bool selectFromSameObject(Instruction *inst);
461
alan-bakere9308012019-03-15 10:25:13 -0400462 // Returns true if |Arg| is called with a coherent resource.
463 bool CalledWithCoherentResource(Argument &Arg);
464
SJWf93f5f32020-05-05 07:27:56 -0500465 //
466 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
467 template <enum SPIRVSection TSection = kFunctions>
468 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
469 bool has_result, has_result_type;
470 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
471 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500472 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500473 return RID;
474 }
475 template <enum SPIRVSection TSection = kFunctions>
476 SPIRVID addSPIRVInst(spv::Op Op) {
477 SPIRVOperandVec Ops;
478 return addSPIRVInst<TSection>(Op, Ops);
479 }
480 template <enum SPIRVSection TSection = kFunctions>
481 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
482 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500483 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500484 return addSPIRVInst<TSection>(Op, Ops);
485 }
486 template <enum SPIRVSection TSection = kFunctions>
487 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
488 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500489 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500490 return addSPIRVInst<TSection>(Op, Ops);
491 }
492
SJW88ed5fe2020-05-11 12:40:57 -0500493 //
494 // Add placeholder for llvm::Value that references future values.
495 // Must have result ID just in case final SPIRVInstruction requires.
496 SPIRVID addSPIRVPlaceholder(Value *I) {
497 SPIRVID RID = incrNextID();
498 SPIRVOperandVec Ops;
499 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
500 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
501 return RID;
502 }
503 // Replace placeholder with actual SPIRVInstruction on the final pass
504 // (HandleDeferredInstruction).
505 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
506 SPIRVOperandVec &Operands) {
507 bool has_result, has_result_type;
508 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
509 SPIRVID RID = has_result ? I->getResultID() : 0;
510 *I = SPIRVInstruction(Opcode, RID, Operands);
511 return RID;
512 }
513
SJW806a5d82020-07-15 12:51:38 -0500514 //
515 // Add global variable and capture entry point interface
516 SPIRVID addSPIRVGlobalVariable(const SPIRVID &TypeID, spv::StorageClass SC,
alan-baker3f772c02021-06-15 22:18:11 -0400517 const SPIRVID &InitID = SPIRVID(),
518 bool add_interface = false);
SJW806a5d82020-07-15 12:51:38 -0500519
alan-baker86ce19c2020-08-05 13:09:19 -0400520 SPIRVID getReflectionImport();
521 void GenerateReflection();
522 void GenerateKernelReflection();
523 void GeneratePushConstantReflection();
524 void GenerateSpecConstantReflection();
525 void AddArgumentReflection(SPIRVID kernel_decl, const std::string &name,
526 clspv::ArgKind arg_kind, uint32_t ordinal,
527 uint32_t descriptor_set, uint32_t binding,
528 uint32_t offset, uint32_t size, uint32_t spec_id,
529 uint32_t elem_size);
530
David Neto22f144c2017-06-12 14:26:21 -0400531private:
SJW77b87ad2020-04-21 14:37:52 -0500532
533 Module *module;
534
SJW01901d92020-05-21 08:58:31 -0500535 // Set of Capabilities required
536 CapabilitySetType CapabilitySet;
537
SJW806a5d82020-07-15 12:51:38 -0500538 // Map from clspv::BuiltinType to SPIRV Global Variable
539 BuiltinConstantMapType BuiltinConstantMap;
540
alan-baker3f772c02021-06-15 22:18:11 -0400541 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap;
542 raw_pwrite_stream *out;
David Neto0676e6f2017-07-11 18:47:44 -0400543
544 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
545 // convert to other formats on demand?
546
547 // When emitting a C initialization list, the WriteSPIRVBinary method
548 // will actually write its words to this vector via binaryTempOut.
549 SmallVector<char, 100> binaryTempUnderlyingVector;
550 raw_svector_ostream binaryTempOut;
551
552 // Binary output writes to this stream, which might be |out| or
553 // |binaryTempOut|. It's the latter when we really want to write a C
554 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400555 raw_pwrite_stream *binaryOut;
David Neto0676e6f2017-07-11 18:47:44 -0400556 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400557 uint64_t patchBoundOffset;
558 uint32_t nextID;
559
SJWf93f5f32020-05-05 07:27:56 -0500560 SPIRVID incrNextID() { return nextID++; }
561
alan-bakerf67468c2019-11-25 15:51:49 -0500562 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500563 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500564 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500565 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500566
David Neto19a1bad2017-08-25 15:01:41 -0400567 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
alan-baker3f772c02021-06-15 22:18:11 -0400568 LayoutTypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400569 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400570 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400571 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400572 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400573 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400574 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500575 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400576
David Neto22f144c2017-06-12 14:26:21 -0400577 EntryPointVecType EntryPointVec;
578 DeferredInstVecType DeferredInstVec;
SJW806a5d82020-07-15 12:51:38 -0500579 SPIRVIDListType EntryPointInterfacesList;
SJW01901d92020-05-21 08:58:31 -0500580 SPIRVID OpExtInstImportID;
581 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500582 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400583 bool HasVariablePointers;
584 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500585 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700586
587 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700588 // will map F's type to (G, index of the parameter), where in a first phase
Marco Antognini7e338402021-03-15 12:48:37 +0000589 // G is F's type.
David Netoc77d9e22018-03-24 06:30:28 -0700590 // TODO(dneto): This doesn't seem general enough? A function might have
591 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400592 GlobalConstFuncMapType GlobalConstFuncTypeMap;
593 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400594 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700595 // or array types, and which point into transparent memory (StorageBuffer
596 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400597 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700598 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400599
600 // This is truly ugly, but works around what look like driver bugs.
601 // For get_local_size, an earlier part of the flow has created a module-scope
602 // variable in Private address space to hold the value for the workgroup
603 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
604 // When this is present, save the IDs of the initializer value and variable
605 // in these two variables. We only ever do a vector load from it, and
606 // when we see one of those, substitute just the value of the intializer.
607 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700608 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500609 SPIRVID WorkgroupSizeValueID;
610 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400611
alan-baker3f772c02021-06-15 22:18:11 -0400612 bool TestOutput;
613
David Neto862b7d82018-06-14 18:48:37 -0400614 // Bookkeeping for mapping kernel arguments to resource variables.
615 struct ResourceVarInfo {
616 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400617 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400618 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400619 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400620 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
621 const int index; // Index into ResourceVarInfoList
622 const unsigned descriptor_set;
623 const unsigned binding;
624 Function *const var_fn; // The @clspv.resource.var.* function.
625 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400626 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400627 const unsigned addr_space; // The LLVM address space
628 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500629 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400630 };
631 // A list of resource var info. Each one correponds to a module-scope
632 // resource variable we will have to create. Resource var indices are
633 // indices into this vector.
634 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
635 // This is a vector of pointers of all the resource vars, but ordered by
636 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500637 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400638 // Map a function to the ordered list of resource variables it uses, one for
639 // each argument. If an argument does not use a resource variable, it
640 // will have a null pointer entry.
641 using FunctionToResourceVarsMapType =
642 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
643 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
644
645 // What LLVM types map to SPIR-V types needing layout? These are the
646 // arrays and structures supporting storage buffers and uniform buffers.
647 TypeList TypesNeedingLayout;
648 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
649 UniqueVector<StructType *> StructTypesNeedingBlock;
650 // For a call that represents a load from an opaque type (samplers, images),
651 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500652 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700653
David Netoc6f3ab22018-04-06 18:02:31 -0400654 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500655 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400656 LocalArgList LocalArgs;
657 // Information about a pointer-to-local argument.
658 struct LocalArgInfo {
659 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500660 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400661 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500662 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400663 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500664 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400665 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500666 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400667 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500668 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400669 // The specialization constant ID of the array size.
670 int spec_id;
671 };
Alan Baker202c8c72018-08-13 13:47:44 -0400672 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500673 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400674 // A mapping from SpecId to its LocalArgInfo.
675 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400676 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500677 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400678 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500679 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
680 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500681
682 // Maps basic block to its merge block.
683 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
684 // Maps basic block to its continue block.
685 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500686
alan-baker86ce19c2020-08-05 13:09:19 -0400687 SPIRVID ReflectionID;
688 DenseMap<Function *, SPIRVID> KernelDeclarations;
689
SJW01901d92020-05-21 08:58:31 -0500690public:
691 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400692};
693
alan-bakerb6b09dc2018-11-08 16:59:28 -0500694} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400695
alan-baker3f772c02021-06-15 22:18:11 -0400696char SPIRVProducerPass::ID = 0;
697SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
698INITIALIZE_PASS(SPIRVProducerPass, "SPIRVProducerPass", "SPIR-V output pass",
699 false, false)
700
David Neto22f144c2017-06-12 14:26:21 -0400701namespace clspv {
alan-baker3f772c02021-06-15 22:18:11 -0400702ModulePass *createSPIRVProducerPass(
703 raw_pwrite_stream *out,
704 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap,
705 bool outputCInitList) {
alan-baker86ce19c2020-08-05 13:09:19 -0400706 return new SPIRVProducerPass(out, samplerMap, outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400707}
alan-baker3f772c02021-06-15 22:18:11 -0400708
709ModulePass *createSPIRVProducerPass() { return new SPIRVProducerPass(); }
David Netoc2c368d2017-06-30 16:50:17 -0400710} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400711
SJW01901d92020-05-21 08:58:31 -0500712namespace {
713SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
714 list.emplace_back(LITERAL_WORD, num);
715 return list;
716}
717
718SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
719 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
720 return list;
721}
722
723SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
724 list.emplace_back(num_vec);
725 return list;
726}
727
728SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
729 list.emplace_back(LITERAL_STRING, str);
730 return list;
731}
732
733SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
734 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
735 return list;
736}
737
738SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
739 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
740 return list;
741}
742
SJW806a5d82020-07-15 12:51:38 -0500743SPIRVOperandVec &operator<<(SPIRVOperandVec &list, const SPIRVID &v) {
SJW01901d92020-05-21 08:58:31 -0500744 list.emplace_back(NUMBERID, v.get());
745 return list;
746}
747} // namespace
748
SJW77b87ad2020-04-21 14:37:52 -0500749bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500750 // TODO(sjw): Need to reset all data members for each Module, or better
751 // yet create a new SPIRVProducer for every module.. For now only
752 // allow 1 call.
753 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500754 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400755 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500756 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400757 }
alan-baker3f772c02021-06-15 22:18:11 -0400758
759 SmallVector<char, 10000> *binary = nullptr;
760 if (TestOutput) {
761 binary = new SmallVector<char, 10000>();
762 out = new raw_svector_ostream(*binary);
763 }
764
765 binaryOut = outputCInitList ? &binaryTempOut : out;
David Neto0676e6f2017-07-11 18:47:44 -0400766
SJW77b87ad2020-04-21 14:37:52 -0500767 PopulateUBOTypeMaps();
768 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400769
David Neto22f144c2017-06-12 14:26:21 -0400770 // SPIR-V always begins with its header information
771 outputHeader();
772
773 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500774 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400775
David Neto22f144c2017-06-12 14:26:21 -0400776 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500777 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400778 // If the GV is one of our special __spirv_* variables, remove the
779 // initializer as it was only placed there to force LLVM to not throw the
780 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000781 if (GV.getName().startswith("__spirv_") ||
782 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400783 GV.setInitializer(nullptr);
784 }
785
786 // Collect types' information from global variable.
787 FindTypePerGlobalVar(GV);
David Neto22f144c2017-06-12 14:26:21 -0400788 }
789
David Neto22f144c2017-06-12 14:26:21 -0400790 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500791 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400792
alan-baker09cb9802019-12-10 13:16:27 -0500793 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500794 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400795
796 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500797 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400798 GenerateGlobalVar(GV);
799 }
SJW77b87ad2020-04-21 14:37:52 -0500800 GenerateResourceVars();
801 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400802
803 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500804 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400805 if (F.isDeclaration()) {
806 continue;
807 }
808
809 // Generate Function Prologue.
810 GenerateFuncPrologue(F);
811
812 // Generate SPIRV instructions for function body.
813 GenerateFuncBody(F);
814
815 // Generate Function Epilogue.
816 GenerateFuncEpilogue();
817 }
818
819 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500820 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400821
David Neto22f144c2017-06-12 14:26:21 -0400822 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500823 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400824
alan-baker86ce19c2020-08-05 13:09:19 -0400825 // Generate embedded reflection information.
826 GenerateReflection();
827
alan-baker00e7a582019-06-07 12:54:21 -0400828 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400829
830 // We need to patch the SPIR-V header to set bound correctly.
831 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400832
833 if (outputCInitList) {
834 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400835 std::ostringstream os;
836
David Neto57fb0b92017-08-04 15:35:09 -0400837 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400838 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400839 os << ",\n";
840 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400841 first = false;
842 };
843
844 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400845 const std::string str(binaryTempOut.str());
846 for (unsigned i = 0; i < str.size(); i += 4) {
847 const uint32_t a = static_cast<unsigned char>(str[i]);
848 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
849 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
850 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
851 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400852 }
853 os << "}\n";
alan-baker3f772c02021-06-15 22:18:11 -0400854 *out << os.str();
855 }
856
857 if (TestOutput) {
858 std::error_code error;
859 raw_fd_ostream test_output(TestOutFile, error, llvm::sys::fs::FA_Write);
860 test_output << static_cast<raw_svector_ostream *>(out)->str();
861 delete out;
862 delete binary;
David Neto0676e6f2017-07-11 18:47:44 -0400863 }
864
David Neto22f144c2017-06-12 14:26:21 -0400865 return false;
866}
867
868void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400869 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
870 sizeof(spv::MagicNumber));
SJW806a5d82020-07-15 12:51:38 -0500871 uint32_t minor = 0;
alan-baker3f772c02021-06-15 22:18:11 -0400872 switch (SpvVersion()) {
873 case SPIRVVersion::SPIRV_1_0:
874 minor = 0;
875 break;
876 case SPIRVVersion::SPIRV_1_3:
SJW806a5d82020-07-15 12:51:38 -0500877 minor = 3;
alan-baker3f772c02021-06-15 22:18:11 -0400878 break;
879 case SPIRVVersion::SPIRV_1_4:
880 minor = 4;
881 break;
882 case SPIRVVersion::SPIRV_1_5:
883 minor = 5;
884 break;
885 default:
886 llvm_unreachable("unhandled spir-v version");
887 break;
SJW806a5d82020-07-15 12:51:38 -0500888 }
889 uint32_t version = (1 << 16) | (minor << 8);
890 binaryOut->write(reinterpret_cast<const char *>(&version), sizeof(version));
David Neto22f144c2017-06-12 14:26:21 -0400891
alan-baker0c18ab02019-06-12 10:23:21 -0400892 // use Google's vendor ID
893 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400894 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400895
alan-baker00e7a582019-06-07 12:54:21 -0400896 // we record where we need to come back to and patch in the bound value
897 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400898
alan-baker00e7a582019-06-07 12:54:21 -0400899 // output a bad bound for now
900 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400901
alan-baker00e7a582019-06-07 12:54:21 -0400902 // output the schema (reserved for use and must be 0)
903 const uint32_t schema = 0;
904 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400905}
906
907void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400908 // for a binary we just write the value of nextID over bound
909 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
910 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400911}
912
SJW77b87ad2020-04-21 14:37:52 -0500913void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400914 // This function generates LLVM IR for function such as global variable for
915 // argument, constant and pointer type for argument access. These information
916 // is artificial one because we need Vulkan SPIR-V output. This function is
917 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400918
SJW77b87ad2020-04-21 14:37:52 -0500919 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400920
SJW77b87ad2020-04-21 14:37:52 -0500921 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400922
SJW77b87ad2020-04-21 14:37:52 -0500923 FindTypesForSamplerMap();
924 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400925}
926
SJW77b87ad2020-04-21 14:37:52 -0500927void SPIRVProducerPass::FindGlobalConstVars() {
928 clspv::NormalizeGlobalVariables(*module);
929 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400930
David Neto862b7d82018-06-14 18:48:37 -0400931 SmallVector<GlobalVariable *, 8> GVList;
932 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500933 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400934 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
935 if (GV.use_empty()) {
936 DeadGVList.push_back(&GV);
937 } else {
938 GVList.push_back(&GV);
939 }
940 }
941 }
942
943 // Remove dead global __constant variables.
944 for (auto GV : DeadGVList) {
945 GV->eraseFromParent();
946 }
947 DeadGVList.clear();
948
949 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
950 // For now, we only support a single storage buffer.
alan-baker7506abb2020-09-10 15:02:55 -0400951 if (!GVList.empty()) {
David Neto862b7d82018-06-14 18:48:37 -0400952 assert(GVList.size() == 1);
953 const auto *GV = GVList[0];
954 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400955 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400956 const size_t kConstantMaxSize = 65536;
957 if (constants_byte_size > kConstantMaxSize) {
958 outs() << "Max __constant capacity of " << kConstantMaxSize
959 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
960 llvm_unreachable("Max __constant capacity exceeded");
961 }
962 }
963 } else {
964 // Change global constant variable's address space to ModuleScopePrivate.
965 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
966 for (auto GV : GVList) {
967 // Create new gv with ModuleScopePrivate address space.
968 Type *NewGVTy = GV->getType()->getPointerElementType();
969 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500970 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400971 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
972 NewGV->takeName(GV);
973
974 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
975 SmallVector<User *, 8> CandidateUsers;
976
977 auto record_called_function_type_as_user =
978 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
979 // Find argument index.
980 unsigned index = 0;
981 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
982 if (gv == call->getOperand(i)) {
983 // TODO(dneto): Should we break here?
984 index = i;
985 }
986 }
987
988 // Record function type with global constant.
989 GlobalConstFuncTyMap[call->getFunctionType()] =
990 std::make_pair(call->getFunctionType(), index);
991 };
992
993 for (User *GVU : GVUsers) {
994 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
995 record_called_function_type_as_user(GV, Call);
996 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
997 // Check GEP users.
998 for (User *GEPU : GEP->users()) {
999 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
1000 record_called_function_type_as_user(GEP, GEPCall);
1001 }
1002 }
1003 }
1004
1005 CandidateUsers.push_back(GVU);
1006 }
1007
1008 for (User *U : CandidateUsers) {
1009 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -05001010 if (!isa<Constant>(U)) {
1011 // #254: Can't change operands of a constant, but this shouldn't be
1012 // something that sticks around in the module.
1013 U->replaceUsesOfWith(GV, NewGV);
1014 }
David Neto862b7d82018-06-14 18:48:37 -04001015 }
1016
1017 // Delete original gv.
1018 GV->eraseFromParent();
1019 }
1020 }
1021}
1022
SJW77b87ad2020-04-21 14:37:52 -05001023void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001024 ResourceVarInfoList.clear();
1025 FunctionToResourceVarsMap.clear();
1026 ModuleOrderedResourceVars.reset();
1027 // Normally, there is one resource variable per clspv.resource.var.*
1028 // function, since that is unique'd by arg type and index. By design,
1029 // we can share these resource variables across kernels because all
1030 // kernels use the same descriptor set.
1031 //
1032 // But if the user requested distinct descriptor sets per kernel, then
1033 // the descriptor allocator has made different (set,binding) pairs for
1034 // the same (type,arg_index) pair. Since we can decorate a resource
1035 // variable with only exactly one DescriptorSet and Binding, we are
1036 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001037 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001038 // (set,binding) values.
1039 const bool always_distinct_sets =
1040 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -05001041 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001042 // Rely on the fact the resource var functions have a stable ordering
1043 // in the module.
SJW61531372020-06-09 07:31:08 -05001044 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -04001045 // Find all calls to this function with distinct set and binding pairs.
1046 // Save them in ResourceVarInfoList.
1047
1048 // Determine uniqueness of the (set,binding) pairs only withing this
1049 // one resource-var builtin function.
1050 using SetAndBinding = std::pair<unsigned, unsigned>;
1051 // Maps set and binding to the resource var info.
1052 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1053 bool first_use = true;
1054 for (auto &U : F.uses()) {
1055 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1056 const auto set = unsigned(
1057 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1058 const auto binding = unsigned(
1059 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1060 const auto arg_kind = clspv::ArgKind(
1061 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1062 const auto arg_index = unsigned(
1063 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001064 const auto coherent = unsigned(
1065 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001066
1067 // Find or make the resource var info for this combination.
1068 ResourceVarInfo *rv = nullptr;
1069 if (always_distinct_sets) {
1070 // Make a new resource var any time we see a different
1071 // (set,binding) pair.
1072 SetAndBinding key{set, binding};
1073 auto where = set_and_binding_map.find(key);
1074 if (where == set_and_binding_map.end()) {
alan-baker7506abb2020-09-10 15:02:55 -04001075 rv = new ResourceVarInfo(
1076 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1077 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001078 ResourceVarInfoList.emplace_back(rv);
1079 set_and_binding_map[key] = rv;
1080 } else {
1081 rv = where->second;
1082 }
1083 } else {
1084 // The default is to make exactly one resource for each
1085 // clspv.resource.var.* function.
1086 if (first_use) {
1087 first_use = false;
alan-baker7506abb2020-09-10 15:02:55 -04001088 rv = new ResourceVarInfo(
1089 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1090 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001091 ResourceVarInfoList.emplace_back(rv);
1092 } else {
1093 rv = ResourceVarInfoList.back().get();
1094 }
1095 }
1096
1097 // Now populate FunctionToResourceVarsMap.
1098 auto &mapping =
1099 FunctionToResourceVarsMap[call->getParent()->getParent()];
1100 while (mapping.size() <= arg_index) {
1101 mapping.push_back(nullptr);
1102 }
1103 mapping[arg_index] = rv;
1104 }
1105 }
1106 }
1107 }
1108
1109 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001110 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001111 auto where = FunctionToResourceVarsMap.find(&F);
1112 if (where != FunctionToResourceVarsMap.end()) {
1113 for (auto &rv : where->second) {
1114 if (rv != nullptr) {
1115 ModuleOrderedResourceVars.insert(rv);
1116 }
1117 }
1118 }
1119 }
1120 if (ShowResourceVars) {
1121 for (auto *info : ModuleOrderedResourceVars) {
1122 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1123 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1124 << "\n";
1125 }
1126 }
1127}
1128
David Neto22f144c2017-06-12 14:26:21 -04001129void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1130 // Investigate global variable's type.
1131 FindType(GV.getType());
1132}
1133
SJW77b87ad2020-04-21 14:37:52 -05001134void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001135 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001136 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
alan-baker3f772c02021-06-15 22:18:11 -04001137 (getSamplerMap() && !getSamplerMap()->empty())) {
James Pricecbe834f2020-12-01 13:42:25 -05001138 auto SamplerStructTy =
1139 StructType::getTypeByName(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001140 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001141 SamplerStructTy =
1142 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001143 }
1144
1145 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1146
1147 FindType(SamplerTy);
1148 }
1149}
1150
SJW77b87ad2020-04-21 14:37:52 -05001151void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001152 // Record types so they are generated.
1153 TypesNeedingLayout.reset();
1154 StructTypesNeedingBlock.reset();
1155
1156 // To match older clspv codegen, generate the float type first if required
1157 // for images.
1158 for (const auto *info : ModuleOrderedResourceVars) {
alan-bakerf6bc8252020-09-23 14:58:55 -04001159 if (info->arg_kind == clspv::ArgKind::SampledImage ||
1160 info->arg_kind == clspv::ArgKind::StorageImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001161 if (IsIntImageType(info->var_fn->getReturnType())) {
1162 // Nothing for now...
1163 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001164 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001165 }
1166
1167 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001168 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001169 }
1170 }
1171
1172 for (const auto *info : ModuleOrderedResourceVars) {
1173 Type *type = info->var_fn->getReturnType();
1174
1175 switch (info->arg_kind) {
1176 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001177 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001178 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1179 StructTypesNeedingBlock.insert(sty);
1180 } else {
1181 errs() << *type << "\n";
1182 llvm_unreachable("Buffer arguments must map to structures!");
1183 }
1184 break;
1185 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001186 case clspv::ArgKind::PodUBO:
1187 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001188 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1189 StructTypesNeedingBlock.insert(sty);
1190 } else {
1191 errs() << *type << "\n";
1192 llvm_unreachable("POD arguments must map to structures!");
1193 }
1194 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04001195 case clspv::ArgKind::SampledImage:
1196 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001197 case clspv::ArgKind::Sampler:
1198 // Sampler and image types map to the pointee type but
1199 // in the uniform constant address space.
1200 type = PointerType::get(type->getPointerElementType(),
1201 clspv::AddressSpace::UniformConstant);
1202 break;
1203 default:
1204 break;
1205 }
1206
1207 // The converted type is the type of the OpVariable we will generate.
1208 // If the pointee type is an array of size zero, FindType will convert it
1209 // to a runtime array.
1210 FindType(type);
1211 }
1212
alan-bakerdcd97412019-09-16 15:32:30 -04001213 // If module constants are clustered in a storage buffer then that struct
1214 // needs layout decorations.
1215 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001216 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001217 PointerType *PTy = cast<PointerType>(GV.getType());
1218 const auto AS = PTy->getAddressSpace();
1219 const bool module_scope_constant_external_init =
1220 (AS == AddressSpace::Constant) && GV.hasInitializer();
1221 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1222 if (module_scope_constant_external_init &&
1223 spv::BuiltInMax == BuiltinType) {
1224 StructTypesNeedingBlock.insert(
1225 cast<StructType>(PTy->getPointerElementType()));
1226 }
1227 }
1228 }
1229
SJW77b87ad2020-04-21 14:37:52 -05001230 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001231 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1232 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1233 assert(Ty->isStructTy() && "Push constants have to be structures.");
1234 auto STy = cast<StructType>(Ty);
1235 StructTypesNeedingBlock.insert(STy);
1236 }
1237 }
1238
David Neto862b7d82018-06-14 18:48:37 -04001239 // Traverse the arrays and structures underneath each Block, and
1240 // mark them as needing layout.
1241 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1242 StructTypesNeedingBlock.end());
1243 while (!work_list.empty()) {
1244 Type *type = work_list.back();
1245 work_list.pop_back();
1246 TypesNeedingLayout.insert(type);
1247 switch (type->getTypeID()) {
1248 case Type::ArrayTyID:
1249 work_list.push_back(type->getArrayElementType());
1250 if (!Hack_generate_runtime_array_stride_early) {
1251 // Remember this array type for deferred decoration.
1252 TypesNeedingArrayStride.insert(type);
1253 }
1254 break;
1255 case Type::StructTyID:
1256 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1257 work_list.push_back(elem_ty);
1258 }
1259 default:
1260 // This type and its contained types don't get layout.
1261 break;
1262 }
1263 }
1264}
1265
SJWf93f5f32020-05-05 07:27:56 -05001266void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001267 // The SpecId assignment for pointer-to-local arguments is recorded in
1268 // module-level metadata. Translate that information into local argument
1269 // information.
SJWf93f5f32020-05-05 07:27:56 -05001270 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001271 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001272 if (!nmd)
1273 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001274 for (auto operand : nmd->operands()) {
1275 MDTuple *tuple = cast<MDTuple>(operand);
1276 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1277 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001278 ConstantAsMetadata *arg_index_md =
1279 cast<ConstantAsMetadata>(tuple->getOperand(1));
1280 int arg_index = static_cast<int>(
1281 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1282 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001283
1284 ConstantAsMetadata *spec_id_md =
1285 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001286 int spec_id = static_cast<int>(
1287 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001288
Alan Baker202c8c72018-08-13 13:47:44 -04001289 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001290 if (LocalSpecIdInfoMap.count(spec_id))
1291 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001292
SJWf93f5f32020-05-05 07:27:56 -05001293 // Generate the spec constant.
1294 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001295 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001296 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001297
SJWf93f5f32020-05-05 07:27:56 -05001298 // Generate the array type.
1299 Type *ElemTy = arg->getType()->getPointerElementType();
1300 Ops.clear();
1301 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001302 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001303
1304 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1305
1306 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001307 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001308 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1309
1310 // Generate OpVariable.
1311 //
1312 // Ops[0] : Result Type ID
1313 // Ops[1] : Storage Class
SJW806a5d82020-07-15 12:51:38 -05001314 SPIRVID VariableID =
1315 addSPIRVGlobalVariable(PtrArrayTypeID, spv::StorageClassWorkgroup);
SJWf93f5f32020-05-05 07:27:56 -05001316
1317 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001318 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001319 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1320
1321 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1322 ArrayTypeID, PtrArrayTypeID, spec_id};
1323 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001324 }
1325}
1326
David Neto22f144c2017-06-12 14:26:21 -04001327void SPIRVProducerPass::FindType(Type *Ty) {
1328 TypeList &TyList = getTypeList();
1329
1330 if (0 != TyList.idFor(Ty)) {
1331 return;
1332 }
1333
1334 if (Ty->isPointerTy()) {
1335 auto AddrSpace = Ty->getPointerAddressSpace();
1336 if ((AddressSpace::Constant == AddrSpace) ||
1337 (AddressSpace::Global == AddrSpace)) {
1338 auto PointeeTy = Ty->getPointerElementType();
1339
1340 if (PointeeTy->isStructTy() &&
1341 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1342 FindType(PointeeTy);
1343 auto ActualPointerTy =
1344 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1345 FindType(ActualPointerTy);
1346 return;
1347 }
1348 }
1349 }
1350
David Neto862b7d82018-06-14 18:48:37 -04001351 // By convention, LLVM array type with 0 elements will map to
1352 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1353 // has a constant number of elements. We need to support type of the
1354 // constant.
1355 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1356 if (arrayTy->getNumElements() > 0) {
1357 LLVMContext &Context = Ty->getContext();
1358 FindType(Type::getInt32Ty(Context));
1359 }
David Neto22f144c2017-06-12 14:26:21 -04001360 }
1361
1362 for (Type *SubTy : Ty->subtypes()) {
1363 FindType(SubTy);
1364 }
1365
1366 TyList.insert(Ty);
1367}
1368
David Neto22f144c2017-06-12 14:26:21 -04001369spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1370 switch (AddrSpace) {
1371 default:
1372 llvm_unreachable("Unsupported OpenCL address space");
1373 case AddressSpace::Private:
1374 return spv::StorageClassFunction;
1375 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001376 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001377 case AddressSpace::Constant:
1378 return clspv::Option::ConstantArgsInUniformBuffer()
1379 ? spv::StorageClassUniform
1380 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001381 case AddressSpace::Input:
1382 return spv::StorageClassInput;
1383 case AddressSpace::Local:
1384 return spv::StorageClassWorkgroup;
1385 case AddressSpace::UniformConstant:
1386 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001387 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001388 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001389 case AddressSpace::ModuleScopePrivate:
1390 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001391 case AddressSpace::PushConstant:
1392 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001393 }
1394}
1395
David Neto862b7d82018-06-14 18:48:37 -04001396spv::StorageClass
1397SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1398 switch (arg_kind) {
1399 case clspv::ArgKind::Buffer:
1400 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001401 case clspv::ArgKind::BufferUBO:
1402 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001403 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001404 return spv::StorageClassStorageBuffer;
1405 case clspv::ArgKind::PodUBO:
1406 return spv::StorageClassUniform;
1407 case clspv::ArgKind::PodPushConstant:
1408 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001409 case clspv::ArgKind::Local:
1410 return spv::StorageClassWorkgroup;
alan-bakerf6bc8252020-09-23 14:58:55 -04001411 case clspv::ArgKind::SampledImage:
1412 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001413 case clspv::ArgKind::Sampler:
1414 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001415 default:
1416 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001417 }
1418}
1419
David Neto22f144c2017-06-12 14:26:21 -04001420spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1421 return StringSwitch<spv::BuiltIn>(Name)
1422 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1423 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1424 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1425 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1426 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001427 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001428 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001429 .Default(spv::BuiltInMax);
1430}
1431
SJW01901d92020-05-21 08:58:31 -05001432SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1433 if (OpExtInstImportID == 0) {
1434 //
1435 // Generate OpExtInstImport.
1436 //
1437 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001438
SJW01901d92020-05-21 08:58:31 -05001439 OpExtInstImportID =
1440 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1441 }
1442 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001443}
1444
SJW806a5d82020-07-15 12:51:38 -05001445SPIRVID SPIRVProducerPass::addSPIRVGlobalVariable(const SPIRVID &TypeID,
1446 spv::StorageClass SC,
alan-baker3f772c02021-06-15 22:18:11 -04001447 const SPIRVID &InitID,
1448 bool add_interface) {
SJW806a5d82020-07-15 12:51:38 -05001449 // Generate OpVariable.
1450 //
1451 // Ops[0] : Result Type ID
1452 // Ops[1] : Storage Class
1453 // Ops[2] : Initialization Value ID (optional)
1454
1455 SPIRVOperandVec Ops;
1456 Ops << TypeID << SC;
1457 if (InitID.isValid()) {
1458 Ops << InitID;
1459 }
1460
1461 SPIRVID VID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1462
alan-baker3f772c02021-06-15 22:18:11 -04001463 if (SC == spv::StorageClassInput ||
1464 (add_interface && SpvVersion() >= SPIRVVersion::SPIRV_1_4)) {
SJW806a5d82020-07-15 12:51:38 -05001465 getEntryPointInterfacesList().push_back(VID);
1466 }
1467
1468 return VID;
1469}
1470
alan-bakerc3fd07f2020-10-22 09:48:49 -04001471Type *SPIRVProducerPass::CanonicalType(Type *type) {
1472 if (type->getNumContainedTypes() != 0) {
1473 switch (type->getTypeID()) {
1474 case Type::PointerTyID: {
1475 // For the purposes of our Vulkan SPIR-V type system, constant and global
1476 // are conflated.
1477 auto *ptr_ty = cast<PointerType>(type);
1478 unsigned AddrSpace = ptr_ty->getAddressSpace();
1479 if (AddressSpace::Constant == AddrSpace) {
1480 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1481 AddrSpace = AddressSpace::Global;
1482 // The canonical type of __constant is __global unless constants are
1483 // passed in uniform buffers.
1484 auto *GlobalTy =
1485 ptr_ty->getPointerElementType()->getPointerTo(AddrSpace);
1486 return GlobalTy;
1487 }
1488 }
1489 break;
1490 }
1491 case Type::StructTyID: {
1492 SmallVector<Type *, 8> subtypes;
1493 bool changed = false;
1494 for (auto *subtype : type->subtypes()) {
1495 auto canonical = CanonicalType(subtype);
1496 subtypes.push_back(canonical);
1497 if (canonical != subtype) {
1498 changed = true;
1499 }
1500 }
1501 if (changed) {
1502 return StructType::get(type->getContext(), subtypes,
1503 cast<StructType>(type)->isPacked());
1504 }
1505 break;
1506 }
1507 case Type::ArrayTyID: {
1508 auto *elem_ty = type->getArrayElementType();
1509 auto *equiv_elem_ty = CanonicalType(elem_ty);
1510 if (equiv_elem_ty != elem_ty) {
1511 return ArrayType::get(equiv_elem_ty,
1512 cast<ArrayType>(type)->getNumElements());
1513 }
1514 break;
1515 }
1516 case Type::FunctionTyID: {
1517 auto *func_ty = cast<FunctionType>(type);
1518 auto *return_ty = CanonicalType(func_ty->getReturnType());
1519 SmallVector<Type *, 8> params;
1520 for (unsigned i = 0; i < func_ty->getNumParams(); ++i) {
1521 params.push_back(CanonicalType(func_ty->getParamType(i)));
1522 }
1523 return FunctionType::get(return_ty, params, func_ty->isVarArg());
1524 }
1525 default:
1526 break;
1527 }
1528 }
1529
1530 return type;
1531}
1532
alan-baker3f772c02021-06-15 22:18:11 -04001533bool SPIRVProducerPass::PointerRequiresLayout(unsigned aspace) {
1534 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
1535 switch (aspace) {
1536 case AddressSpace::PushConstant:
1537 case AddressSpace::Global:
1538 case AddressSpace::Constant:
1539 return true;
1540 default:
1541 break;
1542 }
1543 }
1544 return false;
1545}
1546
SJW01901d92020-05-21 08:58:31 -05001547SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
alan-baker3f772c02021-06-15 22:18:11 -04001548 // Prior to 1.4, layout decorations are more relaxed so we can reuse a laid
1549 // out type in non-laid out storage classes.
1550 bool needs_layout = false;
1551 if (auto ptr_ty = dyn_cast<PointerType>(Ty)) {
1552 needs_layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
1553 }
1554 return getSPIRVType(Ty, needs_layout);
1555}
1556
1557SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty, bool needs_layout) {
1558 // Only pointers, structs and arrays should have layout decorations.
1559 if (!(isa<PointerType>(Ty) || isa<ArrayType>(Ty) || isa<StructType>(Ty))) {
1560 needs_layout = false;
1561 }
1562 // |layout| is the index used for |Ty|'s entry in the type map. Each type
1563 // stores a laid out and non-laid out version of the type.
1564 const unsigned layout = needs_layout ? 1 : 0;
1565
SJWf93f5f32020-05-05 07:27:56 -05001566 auto TI = TypeMap.find(Ty);
1567 if (TI != TypeMap.end()) {
alan-baker3f772c02021-06-15 22:18:11 -04001568 assert(layout < TI->second.size());
1569 if (TI->second[layout].isValid()) {
1570 return TI->second[layout];
1571 }
SJWf93f5f32020-05-05 07:27:56 -05001572 }
1573
alan-bakerc3fd07f2020-10-22 09:48:49 -04001574 auto Canonical = CanonicalType(Ty);
1575 if (Canonical != Ty) {
1576 auto CanonicalTI = TypeMap.find(Canonical);
1577 if (CanonicalTI != TypeMap.end()) {
alan-baker3f772c02021-06-15 22:18:11 -04001578 assert(layout < CanonicalTI->second.size());
1579 if (CanonicalTI->second[layout].isValid()) {
1580 auto id = CanonicalTI->second[layout];
1581 auto &base = TypeMap[Ty];
1582 if (base.empty()) {
1583 base.resize(2);
1584 }
1585 base[layout] = id;
1586 return id;
1587 }
alan-bakerc3fd07f2020-10-22 09:48:49 -04001588 }
1589 }
1590
1591 // Perform the mapping with the canonical type.
1592
SJWf93f5f32020-05-05 07:27:56 -05001593 const auto &DL = module->getDataLayout();
1594
SJW01901d92020-05-21 08:58:31 -05001595 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001596
alan-bakerc3fd07f2020-10-22 09:48:49 -04001597 switch (Canonical->getTypeID()) {
SJWf93f5f32020-05-05 07:27:56 -05001598 default: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001599 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001600 llvm_unreachable("Unsupported type???");
1601 break;
1602 }
1603 case Type::MetadataTyID:
1604 case Type::LabelTyID: {
1605 // Ignore these types.
1606 break;
1607 }
1608 case Type::PointerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001609 PointerType *PTy = cast<PointerType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001610 unsigned AddrSpace = PTy->getAddressSpace();
1611
1612 if (AddrSpace != AddressSpace::UniformConstant) {
1613 auto PointeeTy = PTy->getElementType();
1614 if (PointeeTy->isStructTy() &&
1615 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
alan-baker3f772c02021-06-15 22:18:11 -04001616 RID = getSPIRVType(PointeeTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001617 break;
1618 }
1619 }
1620
SJWf93f5f32020-05-05 07:27:56 -05001621 //
1622 // Generate OpTypePointer.
1623 //
1624
1625 // OpTypePointer
1626 // Ops[0] = Storage Class
1627 // Ops[1] = Element Type ID
1628 SPIRVOperandVec Ops;
1629
alan-baker3f772c02021-06-15 22:18:11 -04001630 Ops << GetStorageClass(AddrSpace)
1631 << getSPIRVType(PTy->getElementType(), needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001632
1633 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1634 break;
1635 }
1636 case Type::StructTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001637 StructType *STy = cast<StructType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001638
1639 // Handle sampler type.
1640 if (STy->isOpaque()) {
1641 if (STy->getName().equals("opencl.sampler_t")) {
1642 //
1643 // Generate OpTypeSampler
1644 //
1645 // Empty Ops.
1646
1647 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1648 break;
1649 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001650 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001651 STy->getName().startswith("opencl.image1d_wo_t") ||
1652 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001653 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001654 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1655 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001656 STy->getName().startswith("opencl.image2d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001657 STy->getName().startswith("opencl.image2d_wo_t") ||
1658 STy->getName().startswith("opencl.image2d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001659 STy->getName().startswith("opencl.image2d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001660 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1661 STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001662 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001663 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001664 if (STy->getName().startswith("opencl.image1d_")) {
1665 if (STy->getName().contains(".sampled"))
1666 addCapability(spv::CapabilitySampled1D);
1667 else
1668 addCapability(spv::CapabilityImage1D);
1669 }
1670
SJWf93f5f32020-05-05 07:27:56 -05001671 //
1672 // Generate OpTypeImage
1673 //
1674 // Ops[0] = Sampled Type ID
1675 // Ops[1] = Dim ID
1676 // Ops[2] = Depth (Literal Number)
1677 // Ops[3] = Arrayed (Literal Number)
1678 // Ops[4] = MS (Literal Number)
1679 // Ops[5] = Sampled (Literal Number)
1680 // Ops[6] = Image Format ID
1681 //
1682 SPIRVOperandVec Ops;
1683
SJW01901d92020-05-21 08:58:31 -05001684 SPIRVID SampledTyID;
alan-baker3f772c02021-06-15 22:18:11 -04001685 // None of the sampled types have a layout.
SJWf93f5f32020-05-05 07:27:56 -05001686 if (STy->getName().contains(".float")) {
alan-baker3f772c02021-06-15 22:18:11 -04001687 SampledTyID =
1688 getSPIRVType(Type::getFloatTy(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001689 } else if (STy->getName().contains(".uint")) {
alan-baker3f772c02021-06-15 22:18:11 -04001690 SampledTyID =
1691 getSPIRVType(Type::getInt32Ty(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001692 } else if (STy->getName().contains(".int")) {
1693 // Generate a signed 32-bit integer if necessary.
1694 if (int32ID == 0) {
1695 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001696 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001697 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1698 }
1699 SampledTyID = int32ID;
1700
1701 // Generate a vec4 of the signed int if necessary.
1702 if (v4int32ID == 0) {
1703 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001704 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001705 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1706 }
1707 } else {
1708 // This was likely an UndefValue.
alan-baker3f772c02021-06-15 22:18:11 -04001709 SampledTyID =
1710 getSPIRVType(Type::getFloatTy(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001711 }
SJW01901d92020-05-21 08:58:31 -05001712 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001713
1714 spv::Dim DimID = spv::Dim2D;
1715 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001716 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001717 STy->getName().startswith("opencl.image1d_wo_t") ||
1718 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001719 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001720 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1721 DimID = spv::Dim1D;
1722 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001723 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001724 STy->getName().startswith("opencl.image3d_wo_t")) {
1725 DimID = spv::Dim3D;
1726 }
SJW01901d92020-05-21 08:58:31 -05001727 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001728
1729 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001730 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001731
1732 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001733 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001734
1735 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001736 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001737
1738 // Set up Sampled.
1739 //
1740 // From Spec
1741 //
1742 // 0 indicates this is only known at run time, not at compile time
1743 // 1 indicates will be used with sampler
1744 // 2 indicates will be used without a sampler (a storage image)
1745 uint32_t Sampled = 1;
1746 if (!STy->getName().contains(".sampled")) {
1747 Sampled = 2;
1748 }
SJW01901d92020-05-21 08:58:31 -05001749 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001750
1751 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001752 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001753 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1754
alan-bakerf6bc8252020-09-23 14:58:55 -04001755 // Only need a sampled version of the type if it is used with a sampler.
1756 if (Sampled == 1) {
1757 Ops.clear();
1758 Ops << RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001759 getImageTypeMap()[Canonical] =
alan-bakerf6bc8252020-09-23 14:58:55 -04001760 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1761 }
SJWf93f5f32020-05-05 07:27:56 -05001762 break;
1763 }
1764 }
1765
1766 //
1767 // Generate OpTypeStruct
1768 //
1769 // Ops[0] ... Ops[n] = Member IDs
1770 SPIRVOperandVec Ops;
1771
1772 for (auto *EleTy : STy->elements()) {
alan-baker3f772c02021-06-15 22:18:11 -04001773 Ops << getSPIRVType(EleTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001774 }
1775
1776 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1777
alan-bakerc3fd07f2020-10-22 09:48:49 -04001778 // Generate OpMemberDecorate unless we are generating it for the canonical
1779 // type.
1780 StructType *canonical = cast<StructType>(CanonicalType(STy));
alan-baker3f772c02021-06-15 22:18:11 -04001781 bool use_layout =
1782 (Option::SpvVersion() < SPIRVVersion::SPIRV_1_4) || needs_layout;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001783 if (TypesNeedingLayout.idFor(STy) &&
alan-baker3f772c02021-06-15 22:18:11 -04001784 (canonical == STy || !TypesNeedingLayout.idFor(canonical)) &&
1785 use_layout) {
SJWf93f5f32020-05-05 07:27:56 -05001786 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1787 MemberIdx++) {
1788 // Ops[0] = Structure Type ID
1789 // Ops[1] = Member Index(Literal Number)
1790 // Ops[2] = Decoration (Offset)
1791 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001792 const auto ByteOffset =
1793 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1794
SJW01901d92020-05-21 08:58:31 -05001795 Ops.clear();
1796 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001797
1798 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1799 }
1800 }
1801
alan-bakerc3fd07f2020-10-22 09:48:49 -04001802 // Generate OpDecorate unless we are generating it for the canonical type.
1803 if (StructTypesNeedingBlock.idFor(STy) &&
alan-baker3f772c02021-06-15 22:18:11 -04001804 (canonical == STy || !StructTypesNeedingBlock.idFor(canonical)) &&
1805 use_layout) {
SJWf93f5f32020-05-05 07:27:56 -05001806 Ops.clear();
1807 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001808 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001809
1810 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1811 }
1812 break;
1813 }
1814 case Type::IntegerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001815 uint32_t bit_width =
1816 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001817
alan-bakere2a62752020-07-09 22:53:23 -04001818 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001819 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001820 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001821 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001822 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001823 addCapability(spv::CapabilityInt64);
1824 }
1825
alan-bakere2a62752020-07-09 22:53:23 -04001826 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001827 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1828 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001829 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001830 // i8 is added to TypeMap as i32.
alan-baker3f772c02021-06-15 22:18:11 -04001831 RID = getSPIRVType(Type::getIntNTy(Canonical->getContext(), 32), false);
SJWf93f5f32020-05-05 07:27:56 -05001832 } else {
1833 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001834 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001835 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1836 }
1837 }
1838 break;
1839 }
1840 case Type::HalfTyID:
1841 case Type::FloatTyID:
1842 case Type::DoubleTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001843 uint32_t bit_width =
1844 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
alan-bakere2a62752020-07-09 22:53:23 -04001845 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001846 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001847 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001848 addCapability(spv::CapabilityFloat64);
1849 }
1850
SJWf93f5f32020-05-05 07:27:56 -05001851 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001852 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001853
1854 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1855 break;
1856 }
1857 case Type::ArrayTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001858 ArrayType *ArrTy = cast<ArrayType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001859 const uint64_t Length = ArrTy->getArrayNumElements();
1860 if (Length == 0) {
1861 // By convention, map it to a RuntimeArray.
1862
1863 Type *EleTy = ArrTy->getArrayElementType();
1864
1865 //
1866 // Generate OpTypeRuntimeArray.
1867 //
1868 // OpTypeRuntimeArray
1869 // Ops[0] = Element Type ID
1870 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04001871 Ops << getSPIRVType(EleTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001872
1873 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1874
alan-baker3f772c02021-06-15 22:18:11 -04001875 if (Hack_generate_runtime_array_stride_early &&
1876 (Option::SpvVersion() < SPIRVVersion::SPIRV_1_4 || needs_layout)) {
SJWf93f5f32020-05-05 07:27:56 -05001877 // Generate OpDecorate.
1878
1879 // Ops[0] = Target ID
1880 // Ops[1] = Decoration (ArrayStride)
1881 // Ops[2] = Stride Number(Literal Number)
1882 Ops.clear();
1883
SJW01901d92020-05-21 08:58:31 -05001884 Ops << RID << spv::DecorationArrayStride
1885 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001886
1887 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1888 }
1889
1890 } else {
1891
1892 //
1893 // Generate OpConstant and OpTypeArray.
1894 //
1895
1896 //
1897 // Generate OpConstant for array length.
1898 //
1899 // Add constant for length to constant list.
1900 Constant *CstLength =
1901 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001902
1903 // Remember to generate ArrayStride later
alan-bakerc3fd07f2020-10-22 09:48:49 -04001904 getTypesNeedingArrayStride().insert(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001905
1906 //
1907 // Generate OpTypeArray.
1908 //
1909 // Ops[0] = Element Type ID
1910 // Ops[1] = Array Length Constant ID
1911 SPIRVOperandVec Ops;
1912
alan-baker3f772c02021-06-15 22:18:11 -04001913 Ops << getSPIRVType(ArrTy->getElementType(), needs_layout) << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001914
1915 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1916 }
1917 break;
1918 }
1919 case Type::FixedVectorTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001920 auto VecTy = cast<VectorType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001921 // <4 x i8> is changed to i32 if i8 is not generally supported.
1922 if (!clspv::Option::Int8Support() &&
1923 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
alan-baker5a8c3be2020-09-09 13:44:26 -04001924 if (VecTy->getElementCount().getKnownMinValue() == 4) {
SJWf93f5f32020-05-05 07:27:56 -05001925 RID = getSPIRVType(VecTy->getElementType());
1926 break;
1927 } else {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001928 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001929 llvm_unreachable("Support above i8 vector type");
1930 }
1931 }
1932
1933 // Ops[0] = Component Type ID
1934 // Ops[1] = Component Count (Literal Number)
1935 SPIRVOperandVec Ops;
alan-baker5a8c3be2020-09-09 13:44:26 -04001936 Ops << VecTy->getElementType()
1937 << VecTy->getElementCount().getKnownMinValue();
SJWf93f5f32020-05-05 07:27:56 -05001938
1939 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1940 break;
1941 }
1942 case Type::VoidTyID: {
1943 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
1944 break;
1945 }
1946 case Type::FunctionTyID: {
1947 // Generate SPIRV instruction for function type.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001948 FunctionType *FTy = cast<FunctionType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001949
1950 // Ops[0] = Return Type ID
1951 // Ops[1] ... Ops[n] = Parameter Type IDs
1952 SPIRVOperandVec Ops;
1953
1954 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05001955 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05001956
1957 // Find SPIRV instructions for parameter types
1958 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
1959 // Find SPIRV instruction for parameter type.
1960 auto ParamTy = FTy->getParamType(k);
1961 if (ParamTy->isPointerTy()) {
1962 auto PointeeTy = ParamTy->getPointerElementType();
1963 if (PointeeTy->isStructTy() &&
1964 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1965 ParamTy = PointeeTy;
1966 }
1967 }
1968
alan-baker3f772c02021-06-15 22:18:11 -04001969 Ops << getSPIRVType(ParamTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001970 }
1971
1972 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
1973 break;
1974 }
1975 }
1976
SJW01901d92020-05-21 08:58:31 -05001977 if (RID.isValid()) {
alan-baker3f772c02021-06-15 22:18:11 -04001978 auto &entry = TypeMap[Canonical];
1979 if (entry.empty()) {
1980 entry.resize(2);
1981 }
1982 entry[layout] = RID;
1983
1984 if (Canonical != Ty) {
1985 // Also cache the original type.
1986 auto &base_entry = TypeMap[Ty];
1987 if (base_entry.empty()) {
1988 base_entry.resize(2);
1989 }
1990 base_entry[layout] = RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001991 }
SJWf93f5f32020-05-05 07:27:56 -05001992 }
1993 return RID;
David Neto22f144c2017-06-12 14:26:21 -04001994}
1995
SJW77b87ad2020-04-21 14:37:52 -05001996void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04001997 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05001998 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04001999 }
David Neto22f144c2017-06-12 14:26:21 -04002000}
2001
SJW806a5d82020-07-15 12:51:38 -05002002SPIRVID SPIRVProducerPass::getSPIRVInt32Constant(uint32_t CstVal) {
2003 Type *i32 = Type::getInt32Ty(module->getContext());
2004 Constant *Cst = ConstantInt::get(i32, CstVal);
2005 return getSPIRVValue(Cst);
2006}
2007
alan-baker1b333b62021-05-31 14:55:32 -04002008SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *C) {
David Neto22f144c2017-06-12 14:26:21 -04002009 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07002010 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002011
alan-baker1b333b62021-05-31 14:55:32 -04002012 // Treat poison as an undef.
2013 auto *Cst = C;
2014 if (isa<PoisonValue>(Cst)) {
2015 Cst = UndefValue::get(Cst->getType());
2016 }
2017
2018 auto VI = VMap.find(Cst);
2019 if (VI != VMap.end()) {
2020 assert(VI->second.isValid());
2021 return VI->second;
2022 }
2023
SJW01901d92020-05-21 08:58:31 -05002024 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04002025
SJWf93f5f32020-05-05 07:27:56 -05002026 //
2027 // Generate OpConstant.
2028 //
2029 // Ops[0] = Result Type ID
2030 // Ops[1] .. Ops[n] = Values LiteralNumber
2031 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002032
SJW01901d92020-05-21 08:58:31 -05002033 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04002034
SJWf93f5f32020-05-05 07:27:56 -05002035 std::vector<uint32_t> LiteralNum;
2036 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04002037
SJWf93f5f32020-05-05 07:27:56 -05002038 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04002039 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05002040 Opcode = spv::OpUndef;
2041 if (hack_undef && IsTypeNullable(Cst->getType())) {
2042 Opcode = spv::OpConstantNull;
2043 }
2044 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04002045 unsigned bit_width = CI->getBitWidth();
2046 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05002047 // If the bitwidth of constant is 1, generate OpConstantTrue or
2048 // OpConstantFalse.
2049 if (CI->getZExtValue()) {
2050 // Ops[0] = Result Type ID
2051 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04002052 } else {
SJWf93f5f32020-05-05 07:27:56 -05002053 // Ops[0] = Result Type ID
2054 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04002055 }
SJWf93f5f32020-05-05 07:27:56 -05002056 } else {
2057 auto V = CI->getZExtValue();
2058 LiteralNum.push_back(V & 0xFFFFFFFF);
2059
alan-bakere2a62752020-07-09 22:53:23 -04002060 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05002061 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002062 }
2063
2064 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002065
SJW01901d92020-05-21 08:58:31 -05002066 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002067 }
2068 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2069 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2070 Type *CFPTy = CFP->getType();
2071 if (CFPTy->isFloatTy()) {
2072 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2073 } else if (CFPTy->isDoubleTy()) {
2074 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2075 LiteralNum.push_back(FPVal >> 32);
2076 } else if (CFPTy->isHalfTy()) {
2077 LiteralNum.push_back(FPVal & 0xFFFF);
2078 } else {
2079 CFPTy->print(errs());
2080 llvm_unreachable("Implement this ConstantFP Type");
2081 }
David Neto22f144c2017-06-12 14:26:21 -04002082
SJWf93f5f32020-05-05 07:27:56 -05002083 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04002084
SJW01901d92020-05-21 08:58:31 -05002085 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002086 } else if (isa<ConstantDataSequential>(Cst) &&
2087 cast<ConstantDataSequential>(Cst)->isString()) {
2088 Cst->print(errs());
2089 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002090
SJWf93f5f32020-05-05 07:27:56 -05002091 } else if (const ConstantDataSequential *CDS =
2092 dyn_cast<ConstantDataSequential>(Cst)) {
2093 // Let's convert <4 x i8> constant to int constant specially.
2094 // This case occurs when all the values are specified as constant
2095 // ints.
2096 Type *CstTy = Cst->getType();
2097 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002098 //
2099 // Generate OpConstant with OpTypeInt 32 0.
2100 //
2101 uint32_t IntValue = 0;
2102 for (unsigned k = 0; k < 4; k++) {
2103 const uint64_t Val = CDS->getElementAsInteger(k);
2104 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002105 }
2106
SJW806a5d82020-07-15 12:51:38 -05002107 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002108 } else {
2109
David Neto49351ac2017-08-26 17:32:20 -04002110 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002111 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002112 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002113 }
2114
2115 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002116 }
2117 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2118 // Let's convert <4 x i8> constant to int constant specially.
2119 // This case occurs when at least one of the values is an undef.
2120 Type *CstTy = Cst->getType();
2121 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002122 //
2123 // Generate OpConstant with OpTypeInt 32 0.
2124 //
2125 uint32_t IntValue = 0;
2126 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2127 I != E; ++I) {
2128 uint64_t Val = 0;
2129 const Value *CV = *I;
2130 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2131 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002132 }
SJWf93f5f32020-05-05 07:27:56 -05002133 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002134 }
2135
SJW806a5d82020-07-15 12:51:38 -05002136 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002137 } else {
2138
David Neto22f144c2017-06-12 14:26:21 -04002139 // We use a constant composite in SPIR-V for our constant aggregate in
2140 // LLVM.
2141 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002142
2143 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002144 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002145 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002146 }
David Neto22f144c2017-06-12 14:26:21 -04002147 }
SJWf93f5f32020-05-05 07:27:56 -05002148 } else if (Cst->isNullValue()) {
2149 Opcode = spv::OpConstantNull;
2150 } else {
2151 Cst->print(errs());
2152 llvm_unreachable("Unsupported Constant???");
2153 }
David Neto22f144c2017-06-12 14:26:21 -04002154
SJWf93f5f32020-05-05 07:27:56 -05002155 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2156 // Null pointer requires variable pointers.
2157 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2158 }
alan-baker5b86ed72019-02-15 08:26:50 -05002159
SJWf93f5f32020-05-05 07:27:56 -05002160 if (RID == 0) {
2161 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2162 }
2163
2164 VMap[Cst] = RID;
2165
2166 return RID;
2167}
2168
2169SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2170 auto II = ValueMap.find(V);
2171 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002172 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002173 return II->second;
2174 }
2175 if (Constant *Cst = dyn_cast<Constant>(V)) {
2176 return getSPIRVConstant(Cst);
2177 } else {
2178 llvm_unreachable("Variable not found");
2179 }
2180}
2181
SJW77b87ad2020-04-21 14:37:52 -05002182void SPIRVProducerPass::GenerateSamplers() {
alan-baker09cb9802019-12-10 13:16:27 -05002183 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002184 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2185 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002186
David Neto862b7d82018-06-14 18:48:37 -04002187 // We might have samplers in the sampler map that are not used
2188 // in the translation unit. We need to allocate variables
2189 // for them and bindings too.
2190 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002191
SJW77b87ad2020-04-21 14:37:52 -05002192 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002193 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002194 if (!var_fn)
2195 return;
alan-baker09cb9802019-12-10 13:16:27 -05002196
David Neto862b7d82018-06-14 18:48:37 -04002197 for (auto user : var_fn->users()) {
2198 // Populate SamplerLiteralToDescriptorSetMap and
2199 // SamplerLiteralToBindingMap.
2200 //
2201 // Look for calls like
2202 // call %opencl.sampler_t addrspace(2)*
2203 // @clspv.sampler.var.literal(
2204 // i32 descriptor,
2205 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002206 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002207 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002208 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002209 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002210 auto sampler_value = third_param;
2211 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04002212 auto &sampler_map = *getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002213 if (third_param >= sampler_map.size()) {
2214 errs() << "Out of bounds index to sampler map: " << third_param;
2215 llvm_unreachable("bad sampler init: out of bounds");
2216 }
2217 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002218 }
2219
David Neto862b7d82018-06-14 18:48:37 -04002220 const auto descriptor_set = static_cast<unsigned>(
2221 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2222 const auto binding = static_cast<unsigned>(
2223 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2224
2225 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2226 SamplerLiteralToBindingMap[sampler_value] = binding;
2227 used_bindings.insert(binding);
2228 }
2229 }
2230
alan-baker09cb9802019-12-10 13:16:27 -05002231 DenseSet<size_t> seen;
2232 for (auto user : var_fn->users()) {
2233 if (!isa<CallInst>(user))
2234 continue;
2235
2236 auto call = cast<CallInst>(user);
2237 const unsigned third_param = static_cast<unsigned>(
2238 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2239
2240 // Already allocated a variable for this value.
2241 if (!seen.insert(third_param).second)
2242 continue;
2243
2244 auto sampler_value = third_param;
2245 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04002246 sampler_value = (*getSamplerMap())[third_param].first;
alan-baker09cb9802019-12-10 13:16:27 -05002247 }
2248
SJW806a5d82020-07-15 12:51:38 -05002249 auto sampler_var_id = addSPIRVGlobalVariable(
2250 getSPIRVType(SamplerTy), spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002251
alan-baker09cb9802019-12-10 13:16:27 -05002252 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002253
David Neto862b7d82018-06-14 18:48:37 -04002254 unsigned descriptor_set;
2255 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002256 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002257 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002258 // This sampler is not actually used. Find the next one.
alan-baker7506abb2020-09-10 15:02:55 -04002259 for (binding = 0; used_bindings.count(binding); binding++) {
2260 }
David Neto862b7d82018-06-14 18:48:37 -04002261 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2262 used_bindings.insert(binding);
2263 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002264 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2265 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002266
alan-baker86ce19c2020-08-05 13:09:19 -04002267 auto import_id = getReflectionImport();
2268 SPIRVOperandVec Ops;
2269 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
2270 << reflection::ExtInstLiteralSampler
2271 << getSPIRVInt32Constant(descriptor_set)
2272 << getSPIRVInt32Constant(binding)
2273 << getSPIRVInt32Constant(sampler_value);
2274 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002275 }
2276
SJW69939d52020-04-16 07:29:07 -05002277 // Ops[0] = Target ID
2278 // Ops[1] = Decoration (DescriptorSet)
2279 // Ops[2] = LiteralNumber according to Decoration
SJW806a5d82020-07-15 12:51:38 -05002280 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002281 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002282
SJWf93f5f32020-05-05 07:27:56 -05002283 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002284
2285 // Ops[0] = Target ID
2286 // Ops[1] = Decoration (Binding)
2287 // Ops[2] = LiteralNumber according to Decoration
2288 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002289 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002290
SJWf93f5f32020-05-05 07:27:56 -05002291 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002292 }
David Neto862b7d82018-06-14 18:48:37 -04002293}
David Neto22f144c2017-06-12 14:26:21 -04002294
SJW77b87ad2020-04-21 14:37:52 -05002295void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002296 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002297
David Neto862b7d82018-06-14 18:48:37 -04002298 // Generate variables. Make one for each of resource var info object.
2299 for (auto *info : ModuleOrderedResourceVars) {
2300 Type *type = info->var_fn->getReturnType();
2301 // Remap the address space for opaque types.
2302 switch (info->arg_kind) {
2303 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002304 case clspv::ArgKind::SampledImage:
2305 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002306 type = PointerType::get(type->getPointerElementType(),
2307 clspv::AddressSpace::UniformConstant);
2308 break;
2309 default:
2310 break;
2311 }
David Neto22f144c2017-06-12 14:26:21 -04002312
David Neto862b7d82018-06-14 18:48:37 -04002313 const auto sc = GetStorageClassForArgKind(info->arg_kind);
David Neto22f144c2017-06-12 14:26:21 -04002314
SJW806a5d82020-07-15 12:51:38 -05002315 info->var_id = addSPIRVGlobalVariable(getSPIRVType(type), sc);
David Neto862b7d82018-06-14 18:48:37 -04002316
2317 // Map calls to the variable-builtin-function.
2318 for (auto &U : info->var_fn->uses()) {
2319 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2320 const auto set = unsigned(
2321 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2322 const auto binding = unsigned(
2323 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2324 if (set == info->descriptor_set && binding == info->binding) {
2325 switch (info->arg_kind) {
2326 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002327 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002328 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002329 case clspv::ArgKind::PodUBO:
2330 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002331 // The call maps to the variable directly.
2332 VMap[call] = info->var_id;
2333 break;
2334 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002335 case clspv::ArgKind::SampledImage:
2336 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002337 // The call maps to a load we generate later.
2338 ResourceVarDeferredLoadCalls[call] = info->var_id;
2339 break;
2340 default:
2341 llvm_unreachable("Unhandled arg kind");
2342 }
2343 }
David Neto22f144c2017-06-12 14:26:21 -04002344 }
David Neto862b7d82018-06-14 18:48:37 -04002345 }
2346 }
David Neto22f144c2017-06-12 14:26:21 -04002347
David Neto862b7d82018-06-14 18:48:37 -04002348 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002349 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002350 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002351 // Push constants don't need descriptor set or binding decorations.
2352 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2353 continue;
2354
David Neto862b7d82018-06-14 18:48:37 -04002355 // Decorate with DescriptorSet and Binding.
2356 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002357 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002358 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002359
2360 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002361 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002362 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002363
alan-bakere9308012019-03-15 10:25:13 -04002364 if (info->coherent) {
2365 // Decorate with Coherent if required for the variable.
2366 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002367 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002368 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002369 }
2370
David Neto862b7d82018-06-14 18:48:37 -04002371 // Generate NonWritable and NonReadable
2372 switch (info->arg_kind) {
2373 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002374 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002375 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2376 clspv::AddressSpace::Constant) {
2377 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002378 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002379 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002380 }
David Neto862b7d82018-06-14 18:48:37 -04002381 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002382 case clspv::ArgKind::StorageImage: {
2383 auto *type = info->var_fn->getReturnType();
2384 auto *struct_ty = cast<StructType>(type->getPointerElementType());
2385 // TODO(alan-baker): This is conservative. If compiling for OpenCL 2.0 or
2386 // above, the compiler treats all write_only images as read_write images.
2387 if (struct_ty->getName().contains("_wo_t")) {
2388 Ops.clear();
2389 Ops << info->var_id << spv::DecorationNonReadable;
2390 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2391 }
David Neto862b7d82018-06-14 18:48:37 -04002392 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002393 }
David Neto862b7d82018-06-14 18:48:37 -04002394 default:
2395 break;
David Neto22f144c2017-06-12 14:26:21 -04002396 }
2397 }
2398}
2399
2400void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002401 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002402 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002403 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002404
2405 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2406 Type *Ty = GV.getType();
2407 PointerType *PTy = cast<PointerType>(Ty);
2408
SJW01901d92020-05-21 08:58:31 -05002409 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002410
2411 // Workgroup size is handled differently (it goes into a constant)
2412 if (spv::BuiltInWorkgroupSize == BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04002413 uint32_t PrevXDimCst = 0xFFFFFFFF;
2414 uint32_t PrevYDimCst = 0xFFFFFFFF;
2415 uint32_t PrevZDimCst = 0xFFFFFFFF;
alan-baker3b609772020-09-03 19:10:17 -04002416 bool HasMD = true;
David Neto22f144c2017-06-12 14:26:21 -04002417 for (Function &Func : *GV.getParent()) {
2418 if (Func.isDeclaration()) {
2419 continue;
2420 }
2421
2422 // We only need to check kernels.
2423 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2424 continue;
2425 }
2426
2427 if (const MDNode *MD =
2428 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2429 uint32_t CurXDimCst = static_cast<uint32_t>(
2430 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2431 uint32_t CurYDimCst = static_cast<uint32_t>(
2432 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2433 uint32_t CurZDimCst = static_cast<uint32_t>(
2434 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2435
2436 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2437 PrevZDimCst == 0xFFFFFFFF) {
2438 PrevXDimCst = CurXDimCst;
2439 PrevYDimCst = CurYDimCst;
2440 PrevZDimCst = CurZDimCst;
2441 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2442 CurZDimCst != PrevZDimCst) {
alan-baker3b609772020-09-03 19:10:17 -04002443 HasMD = false;
2444 continue;
David Neto22f144c2017-06-12 14:26:21 -04002445 } else {
2446 continue;
2447 }
2448
2449 //
2450 // Generate OpConstantComposite.
2451 //
2452 // Ops[0] : Result Type ID
2453 // Ops[1] : Constant size for x dimension.
2454 // Ops[2] : Constant size for y dimension.
2455 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002456 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002457
SJW01901d92020-05-21 08:58:31 -05002458 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002459 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002460 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002461 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002462 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002463 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002464
SJW01901d92020-05-21 08:58:31 -05002465 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2466 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002467
SJWf93f5f32020-05-05 07:27:56 -05002468 InitializerID =
2469 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002470 } else {
alan-baker3b609772020-09-03 19:10:17 -04002471 HasMD = false;
David Neto22f144c2017-06-12 14:26:21 -04002472 }
2473 }
2474
2475 // If all kernels do not have metadata for reqd_work_group_size, generate
2476 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002477 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002478 //
2479 // Generate OpSpecConstants for x/y/z dimension.
2480 //
2481 // Ops[0] : Result Type ID
2482 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002483
alan-bakera1be3322020-04-20 12:48:18 -04002484 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002485 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002486
SJWf93f5f32020-05-05 07:27:56 -05002487 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002488 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002489 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002490
David Neto257c3892018-04-11 13:19:45 -04002491 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002492 Ops << result_type_id << 1;
2493 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002494
2495 // Y Dimension
2496 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002497 Ops << result_type_id << 1;
2498 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002499
2500 // Z Dimension
2501 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002502 Ops << result_type_id << 1;
2503 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002504
David Neto257c3892018-04-11 13:19:45 -04002505 BuiltinDimVec.push_back(XDimCstID);
2506 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002507 BuiltinDimVec.push_back(ZDimCstID);
2508
David Neto22f144c2017-06-12 14:26:21 -04002509 //
2510 // Generate OpSpecConstantComposite.
2511 //
2512 // Ops[0] : Result Type ID
2513 // Ops[1] : Constant size for x dimension.
2514 // Ops[2] : Constant size for y dimension.
2515 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002516 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002517 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002518
SJWf93f5f32020-05-05 07:27:56 -05002519 InitializerID =
2520 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002521 }
alan-bakerbed3a882020-04-21 14:42:41 -04002522 } else if (BuiltinType == spv::BuiltInWorkDim) {
2523 // 1. Generate a specialization constant with a default of 3.
2524 // 2. Allocate and annotate a SpecId for the constant.
2525 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002526 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002527
2528 //
2529 // Generate OpSpecConstant.
2530 //
2531 // Ops[0] : Result Type ID
2532 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002533
SJW01901d92020-05-21 08:58:31 -05002534 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002535
SJWf93f5f32020-05-05 07:27:56 -05002536 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002537
2538 //
2539 // Generate SpecId decoration.
2540 //
2541 // Ops[0] : target
2542 // Ops[1] : decoration
2543 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002544 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002545 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002546 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002547
SJWf93f5f32020-05-05 07:27:56 -05002548 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002549 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2550 // 1. Generate a spec constant with a default of {0, 0, 0}.
2551 // 2. Allocate and annotate SpecIds for the constants.
2552 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002553 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002554
2555 //
2556 // Generate OpSpecConstant for each dimension.
2557 //
2558 // Ops[0] : Result Type ID
2559 // Ops[1] : Default literal value
2560 //
SJW01901d92020-05-21 08:58:31 -05002561 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2562 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002563
alan-bakere1996972020-05-04 08:38:12 -04002564 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002565 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2566 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002567
alan-bakere1996972020-05-04 08:38:12 -04002568 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002569 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2570 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002571
2572 //
2573 // Generate SpecId decoration for each dimension.
2574 //
2575 // Ops[0] : target
2576 // Ops[1] : decoration
2577 // Ops[2] : SpecId
2578 //
2579 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2580 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002581 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002582 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002583
2584 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2585 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002586 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002587 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002588
2589 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2590 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002591 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002592 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002593
2594 //
2595 // Generate OpSpecConstantComposite.
2596 //
2597 // Ops[0] : type id
2598 // Ops[1..n-1] : elements
2599 //
alan-bakere1996972020-05-04 08:38:12 -04002600 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002601 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002602 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002603 }
2604
David Neto85082642018-03-24 06:55:20 -07002605 const auto AS = PTy->getAddressSpace();
SJW806a5d82020-07-15 12:51:38 -05002606 const auto spvSC = GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002607
David Neto85082642018-03-24 06:55:20 -07002608 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002609 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002610 clspv::Option::ModuleConstantsInStorageBuffer();
2611
Kévin Petit23d5f182019-08-13 16:21:29 +01002612 if (GV.hasInitializer()) {
2613 auto GVInit = GV.getInitializer();
2614 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002615 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002616 }
2617 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002618
alan-baker3f772c02021-06-15 22:18:11 -04002619 // All private, module private, and local global variables can be added to
2620 // interfaces conservatively.
2621 const bool interface =
2622 (AS == AddressSpace::Private || AS == AddressSpace::ModuleScopePrivate ||
2623 AS == AddressSpace::Local);
SJW806a5d82020-07-15 12:51:38 -05002624 SPIRVID var_id =
alan-baker3f772c02021-06-15 22:18:11 -04002625 addSPIRVGlobalVariable(getSPIRVType(Ty), spvSC, InitializerID, interface);
David Neto85082642018-03-24 06:55:20 -07002626
SJWf93f5f32020-05-05 07:27:56 -05002627 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002628
alan-bakere1996972020-05-04 08:38:12 -04002629 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2630 return builtin == spv::BuiltInWorkDim ||
2631 builtin == spv::BuiltInGlobalOffset;
2632 };
2633
alan-bakere1996972020-05-04 08:38:12 -04002634 // If we have a builtin (not an OpenCL builtin).
2635 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002636 //
2637 // Generate OpDecorate.
2638 //
2639 // DOps[0] = Target ID
2640 // DOps[1] = Decoration (Builtin)
2641 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002642 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002643
2644 // WorkgroupSize is different, we decorate the constant composite that has
2645 // its value, rather than the variable that we use to access the value.
2646 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2647 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002648 // Save both the value and variable IDs for later.
2649 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002650 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002651 } else {
SJWf93f5f32020-05-05 07:27:56 -05002652 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002653 }
2654
SJW806a5d82020-07-15 12:51:38 -05002655 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002656 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002657
SJW01901d92020-05-21 08:58:31 -05002658 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002659 } else if (module_scope_constant_external_init) {
2660 // This module scope constant is initialized from a storage buffer with data
2661 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002662 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002663
alan-baker86ce19c2020-08-05 13:09:19 -04002664 // Emit the intializer as a reflection instruction.
David Neto85082642018-03-24 06:55:20 -07002665 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2666 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002667 std::string hexbytes;
2668 llvm::raw_string_ostream str(hexbytes);
2669 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
alan-baker86ce19c2020-08-05 13:09:19 -04002670
2671 // Reflection instruction for constant data.
2672 SPIRVOperandVec Ops;
2673 auto data_id = addSPIRVInst<kDebug>(spv::OpString, str.str().c_str());
2674 Ops << getSPIRVType(Type::getVoidTy(module->getContext()))
2675 << getReflectionImport() << reflection::ExtInstConstantDataStorageBuffer
2676 << getSPIRVInt32Constant(descriptor_set) << getSPIRVInt32Constant(0)
2677 << data_id;
2678 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto85082642018-03-24 06:55:20 -07002679
David Neto85082642018-03-24 06:55:20 -07002680 // OpDecorate %var DescriptorSet <descriptor_set>
alan-baker86ce19c2020-08-05 13:09:19 -04002681 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002682 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2683 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002684
2685 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002686 Ops.clear();
2687 Ops << var_id << spv::DecorationBinding << 0;
2688 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002689 }
2690}
2691
David Neto22f144c2017-06-12 14:26:21 -04002692void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002693 ValueMapType &VMap = getValueMap();
2694 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002695 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2696 auto &GlobalConstArgSet = getGlobalConstArgSet();
2697
2698 FunctionType *FTy = F.getFunctionType();
2699
2700 //
David Neto22f144c2017-06-12 14:26:21 -04002701 // Generate OPFunction.
2702 //
2703
2704 // FOps[0] : Result Type ID
2705 // FOps[1] : Function Control
2706 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002707 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002708
2709 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002710 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002711
2712 // Check function attributes for SPIRV Function Control.
2713 uint32_t FuncControl = spv::FunctionControlMaskNone;
2714 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2715 FuncControl |= spv::FunctionControlInlineMask;
2716 }
2717 if (F.hasFnAttribute(Attribute::NoInline)) {
2718 FuncControl |= spv::FunctionControlDontInlineMask;
2719 }
2720 // TODO: Check llvm attribute for Function Control Pure.
2721 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2722 FuncControl |= spv::FunctionControlPureMask;
2723 }
2724 // TODO: Check llvm attribute for Function Control Const.
2725 if (F.hasFnAttribute(Attribute::ReadNone)) {
2726 FuncControl |= spv::FunctionControlConstMask;
2727 }
2728
SJW01901d92020-05-21 08:58:31 -05002729 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002730
SJW01901d92020-05-21 08:58:31 -05002731 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002732 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2733 SmallVector<Type *, 4> NewFuncParamTys;
2734 FunctionType *NewFTy =
2735 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002736 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002737 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002738 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002739 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002740 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002741 } else {
SJWf93f5f32020-05-05 07:27:56 -05002742 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002743 }
2744 }
2745
SJW01901d92020-05-21 08:58:31 -05002746 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002747
SJWf93f5f32020-05-05 07:27:56 -05002748 // Generate SPIRV instruction for function.
2749 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2750 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002751
SJWf93f5f32020-05-05 07:27:56 -05002752 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2753 EntryPoints.push_back(std::make_pair(&F, FID));
2754 }
David Neto22f144c2017-06-12 14:26:21 -04002755
David Neto482550a2018-03-24 05:21:07 -07002756 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002757 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002758 }
David Neto22f144c2017-06-12 14:26:21 -04002759
2760 //
2761 // Generate OpFunctionParameter for Normal function.
2762 //
David Neto22f144c2017-06-12 14:26:21 -04002763 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002764
David Neto22f144c2017-06-12 14:26:21 -04002765 // Iterate Argument for name instead of param type from function type.
2766 unsigned ArgIdx = 0;
2767 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002768 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002769 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002770
2771 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002772 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002773 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2774 if (GlobalConstFuncTyMap.count(FTy)) {
2775 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2776 Type *EleTy = PTy->getPointerElementType();
2777 Type *ArgTy =
2778 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002779 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002780 GlobalConstArgSet.insert(&Arg);
2781 }
2782 }
2783 }
SJW01901d92020-05-21 08:58:31 -05002784 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002785
2786 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002787 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002788 VMap[&Arg] = param_id;
2789
2790 if (CalledWithCoherentResource(Arg)) {
2791 // If the arg is passed a coherent resource ever, then decorate this
2792 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002793 Ops.clear();
2794 Ops << param_id << spv::DecorationCoherent;
2795 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002796 }
David Neto22f144c2017-06-12 14:26:21 -04002797
2798 ArgIdx++;
2799 }
2800 }
2801}
2802
SJW77b87ad2020-04-21 14:37:52 -05002803void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002804 EntryPointVecType &EntryPoints = getEntryPointVec();
SJW806a5d82020-07-15 12:51:38 -05002805 auto &EntryPointInterfaces = getEntryPointInterfacesList();
SJW01901d92020-05-21 08:58:31 -05002806 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002807
SJWf93f5f32020-05-05 07:27:56 -05002808 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002809
SJW01901d92020-05-21 08:58:31 -05002810 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002811 //
SJW01901d92020-05-21 08:58:31 -05002812 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002813 //
2814 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002815 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002816 }
2817
alan-baker3f772c02021-06-15 22:18:11 -04002818 // Storage buffer and variable pointer extensions were made core in SPIR-V
2819 // 1.3.
2820 if (SpvVersion() < SPIRVVersion::SPIRV_1_3) {
David Neto22f144c2017-06-12 14:26:21 -04002821 //
2822 // Generate OpExtension.
2823 //
2824 // Ops[0] = Name (Literal String)
2825 //
SJWf93f5f32020-05-05 07:27:56 -05002826 addSPIRVInst<kExtensions>(spv::OpExtension,
2827 "SPV_KHR_storage_buffer_storage_class");
David Neto22f144c2017-06-12 14:26:21 -04002828
alan-baker3f772c02021-06-15 22:18:11 -04002829 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2830 //
2831 // Generate OpExtension.
2832 //
2833 // Ops[0] = Name (Literal String)
2834 //
2835 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
2836 }
David Neto22f144c2017-06-12 14:26:21 -04002837 }
2838
2839 //
2840 // Generate OpMemoryModel
2841 //
2842 // Memory model for Vulkan will always be GLSL450.
2843
2844 // Ops[0] = Addressing Model
2845 // Ops[1] = Memory Model
2846 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002847 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002848
SJWf93f5f32020-05-05 07:27:56 -05002849 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002850
2851 //
2852 // Generate OpEntryPoint
2853 //
2854 for (auto EntryPoint : EntryPoints) {
2855 // Ops[0] = Execution Model
2856 // Ops[1] = EntryPoint ID
2857 // Ops[2] = Name (Literal String)
2858 // ...
2859 //
2860 // TODO: Do we need to consider Interface ID for forward references???
2861 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002862 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002863 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002864
SJW806a5d82020-07-15 12:51:38 -05002865 for (auto &Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002866 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002867 }
2868
alan-baker3f772c02021-06-15 22:18:11 -04002869 // Starting in SPIR-V 1.4, all statically used global variables must be
2870 // included in the interface. Private and statically-sized workgroup
2871 // variables are added to all entry points. Kernel arguments are handled
2872 // here.
2873 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
2874 auto *F = dyn_cast<Function>(EntryPoint.first);
2875 assert(F);
2876 assert(F->getCallingConv() == CallingConv::SPIR_KERNEL);
2877
2878 auto &resource_var_at_index = FunctionToResourceVarsMap[F];
2879 for (auto *info : resource_var_at_index) {
2880 if (info) {
2881 Ops << info->var_id;
2882 }
2883 }
2884
2885 auto local_spec_id_md =
2886 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
2887 if (local_spec_id_md) {
2888 for (auto spec_id_op : local_spec_id_md->operands()) {
2889 if (dyn_cast<Function>(
2890 dyn_cast<ValueAsMetadata>(spec_id_op->getOperand(0))
2891 ->getValue()) == F) {
2892 int64_t spec_id =
2893 mdconst::extract<ConstantInt>(spec_id_op->getOperand(2))
2894 ->getSExtValue();
2895 if (spec_id > 0) {
2896 auto &info = LocalSpecIdInfoMap[spec_id];
2897 Ops << info.variable_id;
2898 }
2899 }
2900 }
2901 }
2902
2903 // If the kernel uses the global push constant interface it will not be
2904 // covered by the resource variable iteration above.
2905 if (GetPodArgsImpl(*F) == PodArgImpl::kGlobalPushConstant) {
2906 auto *PC =
2907 module->getGlobalVariable(clspv::PushConstantsVariableName());
2908 assert(PC);
2909 Ops << getValueMap()[PC];
2910 }
2911 }
2912
SJWf93f5f32020-05-05 07:27:56 -05002913 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002914 }
2915
alan-baker3b609772020-09-03 19:10:17 -04002916 if (BuiltinDimVec.empty()) {
2917 for (auto EntryPoint : EntryPoints) {
2918 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2919 ->getMetadata("reqd_work_group_size");
2920 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
2921 //
2922 // Generate OpExecutionMode
2923 //
David Neto22f144c2017-06-12 14:26:21 -04002924
alan-baker3b609772020-09-03 19:10:17 -04002925 // Ops[0] = Entry Point ID
2926 // Ops[1] = Execution Mode
2927 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
2928 Ops.clear();
2929 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
2930
2931 uint32_t XDim = static_cast<uint32_t>(
2932 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2933 uint32_t YDim = static_cast<uint32_t>(
2934 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2935 uint32_t ZDim = static_cast<uint32_t>(
2936 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2937
2938 Ops << XDim << YDim << ZDim;
2939
2940 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002941 }
David Neto22f144c2017-06-12 14:26:21 -04002942 }
2943 }
2944
2945 //
2946 // Generate OpSource.
2947 //
2948 // Ops[0] = SourceLanguage ID
2949 // Ops[1] = Version (LiteralNum)
2950 //
SJW01901d92020-05-21 08:58:31 -05002951 uint32_t LangID = spv::SourceLanguageUnknown;
2952 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00002953 switch (clspv::Option::Language()) {
2954 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05002955 LangID = spv::SourceLanguageOpenCL_C;
2956 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002957 break;
2958 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05002959 LangID = spv::SourceLanguageOpenCL_C;
2960 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00002961 break;
2962 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05002963 LangID = spv::SourceLanguageOpenCL_C;
2964 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00002965 break;
2966 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05002967 LangID = spv::SourceLanguageOpenCL_C;
2968 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00002969 break;
Kévin Petit77838ff2020-10-19 18:54:51 +01002970 case clspv::Option::SourceLanguage::OpenCL_C_30:
2971 LangID = spv::SourceLanguageOpenCL_C;
2972 LangVer = 300;
2973 break;
Kévin Petitf0515712020-01-07 18:29:20 +00002974 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05002975 LangID = spv::SourceLanguageOpenCL_CPP;
2976 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002977 break;
2978 default:
Kévin Petitf0515712020-01-07 18:29:20 +00002979 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01002980 }
David Neto22f144c2017-06-12 14:26:21 -04002981
SJW01901d92020-05-21 08:58:31 -05002982 Ops.clear();
2983 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05002984 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002985
2986 if (!BuiltinDimVec.empty()) {
2987 //
2988 // Generate OpDecorates for x/y/z dimension.
2989 //
2990 // Ops[0] = Target ID
2991 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04002992 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04002993
2994 // X Dimension
2995 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002996 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05002997 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002998
2999 // Y Dimension
3000 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003001 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05003002 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003003
3004 // Z Dimension
3005 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05003006 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05003007 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003008 }
3009}
3010
David Netob6e2e062018-04-25 10:32:06 -04003011void SPIRVProducerPass::GenerateEntryPointInitialStores() {
3012 // Work around a driver bug. Initializers on Private variables might not
3013 // work. So the start of the kernel should store the initializer value to the
3014 // variables. Yes, *every* entry point pays this cost if *any* entry point
3015 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3016 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003017 // TODO(dneto): Remove this at some point once fixed drivers are widely
3018 // available.
SJW01901d92020-05-21 08:58:31 -05003019 if (WorkgroupSizeVarID.isValid()) {
3020 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04003021
SJWf93f5f32020-05-05 07:27:56 -05003022 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003023 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04003024
SJWf93f5f32020-05-05 07:27:56 -05003025 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04003026 }
3027}
3028
David Neto22f144c2017-06-12 14:26:21 -04003029void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003030 ValueMapType &VMap = getValueMap();
3031
David Netob6e2e062018-04-25 10:32:06 -04003032 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003033
3034 for (BasicBlock &BB : F) {
3035 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04003036
3037 //
3038 // Generate OpLabel for Basic Block.
3039 //
SJWf93f5f32020-05-05 07:27:56 -05003040 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04003041
David Neto6dcd4712017-06-23 11:06:47 -04003042 // OpVariable instructions must come first.
3043 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003044 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3045 // Allocating a pointer requires variable pointers.
3046 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003047 setVariablePointersCapabilities(
3048 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003049 }
David Neto6dcd4712017-06-23 11:06:47 -04003050 GenerateInstruction(I);
3051 }
3052 }
3053
David Neto22f144c2017-06-12 14:26:21 -04003054 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003055 if (clspv::Option::HackInitializers()) {
3056 GenerateEntryPointInitialStores();
3057 }
David Neto22f144c2017-06-12 14:26:21 -04003058 }
3059
3060 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003061 if (!isa<AllocaInst>(I)) {
3062 GenerateInstruction(I);
3063 }
David Neto22f144c2017-06-12 14:26:21 -04003064 }
3065 }
3066}
3067
3068spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3069 const std::map<CmpInst::Predicate, spv::Op> Map = {
3070 {CmpInst::ICMP_EQ, spv::OpIEqual},
3071 {CmpInst::ICMP_NE, spv::OpINotEqual},
3072 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3073 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3074 {CmpInst::ICMP_ULT, spv::OpULessThan},
3075 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3076 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3077 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3078 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3079 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3080 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3081 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3082 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3083 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3084 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3085 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3086 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3087 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3088 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3089 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3090 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3091 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3092
3093 assert(0 != Map.count(I->getPredicate()));
3094
3095 return Map.at(I->getPredicate());
3096}
3097
3098spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3099 const std::map<unsigned, spv::Op> Map{
3100 {Instruction::Trunc, spv::OpUConvert},
3101 {Instruction::ZExt, spv::OpUConvert},
3102 {Instruction::SExt, spv::OpSConvert},
3103 {Instruction::FPToUI, spv::OpConvertFToU},
3104 {Instruction::FPToSI, spv::OpConvertFToS},
3105 {Instruction::UIToFP, spv::OpConvertUToF},
3106 {Instruction::SIToFP, spv::OpConvertSToF},
3107 {Instruction::FPTrunc, spv::OpFConvert},
3108 {Instruction::FPExt, spv::OpFConvert},
3109 {Instruction::BitCast, spv::OpBitcast}};
3110
3111 assert(0 != Map.count(I.getOpcode()));
3112
3113 return Map.at(I.getOpcode());
3114}
3115
3116spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003117 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003118 switch (I.getOpcode()) {
3119 default:
3120 break;
3121 case Instruction::Or:
3122 return spv::OpLogicalOr;
3123 case Instruction::And:
3124 return spv::OpLogicalAnd;
3125 case Instruction::Xor:
3126 return spv::OpLogicalNotEqual;
3127 }
3128 }
3129
alan-bakerb6b09dc2018-11-08 16:59:28 -05003130 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003131 {Instruction::Add, spv::OpIAdd},
3132 {Instruction::FAdd, spv::OpFAdd},
3133 {Instruction::Sub, spv::OpISub},
3134 {Instruction::FSub, spv::OpFSub},
3135 {Instruction::Mul, spv::OpIMul},
3136 {Instruction::FMul, spv::OpFMul},
3137 {Instruction::UDiv, spv::OpUDiv},
3138 {Instruction::SDiv, spv::OpSDiv},
3139 {Instruction::FDiv, spv::OpFDiv},
3140 {Instruction::URem, spv::OpUMod},
3141 {Instruction::SRem, spv::OpSRem},
3142 {Instruction::FRem, spv::OpFRem},
3143 {Instruction::Or, spv::OpBitwiseOr},
3144 {Instruction::Xor, spv::OpBitwiseXor},
3145 {Instruction::And, spv::OpBitwiseAnd},
3146 {Instruction::Shl, spv::OpShiftLeftLogical},
3147 {Instruction::LShr, spv::OpShiftRightLogical},
3148 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3149
3150 assert(0 != Map.count(I.getOpcode()));
3151
3152 return Map.at(I.getOpcode());
3153}
3154
SJW806a5d82020-07-15 12:51:38 -05003155SPIRVID SPIRVProducerPass::getSPIRVBuiltin(spv::BuiltIn BID,
3156 spv::Capability Cap) {
3157 SPIRVID RID;
3158
3159 auto ii = BuiltinConstantMap.find(BID);
3160
3161 if (ii != BuiltinConstantMap.end()) {
3162 return ii->second;
3163 } else {
SJW806a5d82020-07-15 12:51:38 -05003164 addCapability(Cap);
3165
3166 Type *type = PointerType::get(IntegerType::get(module->getContext(), 32),
3167 AddressSpace::Input);
3168
3169 RID = addSPIRVGlobalVariable(getSPIRVType(type), spv::StorageClassInput);
3170
3171 BuiltinConstantMap[BID] = RID;
3172
3173 //
3174 // Generate OpDecorate.
3175 //
3176 // Ops[0] : target
3177 // Ops[1] : decoration
3178 // Ops[2] : SpecId
3179 SPIRVOperandVec Ops;
3180 Ops << RID << spv::DecorationBuiltIn << static_cast<int>(BID);
3181
3182 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
3183 }
3184
3185 return RID;
3186}
3187
3188SPIRVID
3189SPIRVProducerPass::GenerateClspvInstruction(CallInst *Call,
3190 const FunctionInfo &FuncInfo) {
3191 SPIRVID RID;
3192
3193 switch (FuncInfo.getType()) {
3194 case Builtins::kClspvCompositeConstruct:
3195 RID = addSPIRVPlaceholder(Call);
3196 break;
3197 case Builtins::kClspvResource: {
3198 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3199 // Generate an OpLoad
3200 SPIRVOperandVec Ops;
3201
3202 Ops << Call->getType()->getPointerElementType()
3203 << ResourceVarDeferredLoadCalls[Call];
3204
3205 RID = addSPIRVInst(spv::OpLoad, Ops);
3206
3207 } else {
3208 // This maps to an OpVariable we've already generated.
3209 // No code is generated for the call.
3210 }
3211 break;
3212 }
3213 case Builtins::kClspvLocal: {
3214 // Don't codegen an instruction here, but instead map this call directly
3215 // to the workgroup variable id.
3216 int spec_id = static_cast<int>(
3217 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
3218 const auto &info = LocalSpecIdInfoMap[spec_id];
3219 RID = info.variable_id;
3220 break;
3221 }
3222 case Builtins::kClspvSamplerVarLiteral: {
3223 // Sampler initializers become a load of the corresponding sampler.
3224 // Map this to a load from the variable.
3225 const auto third_param = static_cast<unsigned>(
3226 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3227 auto sampler_value = third_param;
3228 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04003229 sampler_value = (*getSamplerMap())[third_param].first;
SJW806a5d82020-07-15 12:51:38 -05003230 }
3231
3232 // Generate an OpLoad
3233 SPIRVOperandVec Ops;
3234
3235 Ops << SamplerTy->getPointerElementType()
3236 << SamplerLiteralToIDMap[sampler_value];
3237
3238 RID = addSPIRVInst(spv::OpLoad, Ops);
3239 break;
3240 }
3241 case Builtins::kSpirvAtomicXor: {
3242 // Handle SPIR-V intrinsics
3243 SPIRVOperandVec Ops;
3244
3245 if (!Call->getType()->isVoidTy()) {
3246 Ops << Call->getType();
3247 }
3248
3249 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
3250 Ops << Call->getArgOperand(i);
3251 }
3252
3253 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
3254 break;
3255 }
3256 case Builtins::kSpirvOp: {
3257 // Handle SPIR-V intrinsics
3258 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
3259 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
3260 if (opcode != spv::OpNop) {
3261 SPIRVOperandVec Ops;
3262
3263 if (!Call->getType()->isVoidTy()) {
3264 Ops << Call->getType();
3265 }
3266
3267 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
3268 Ops << Call->getArgOperand(i);
3269 }
3270
3271 RID = addSPIRVInst(opcode, Ops);
3272 }
3273 break;
3274 }
3275 case Builtins::kSpirvCopyMemory: {
3276 //
3277 // Generate OpCopyMemory.
3278 //
3279
3280 // Ops[0] = Dst ID
3281 // Ops[1] = Src ID
3282 // Ops[2] = Memory Access
3283 // Ops[3] = Alignment
3284
alan-baker3f772c02021-06-15 22:18:11 -04003285 const auto volatile_arg = SpvVersion() >= SPIRVVersion::SPIRV_1_4 ? 4 : 3;
3286 auto IsVolatile = dyn_cast<ConstantInt>(Call->getArgOperand(volatile_arg))
3287 ->getZExtValue() != 0;
SJW806a5d82020-07-15 12:51:38 -05003288
3289 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3290 : spv::MemoryAccessMaskNone;
3291
3292 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3293
alan-baker3f772c02021-06-15 22:18:11 -04003294 auto DstAlignment =
SJW806a5d82020-07-15 12:51:38 -05003295 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
alan-baker3f772c02021-06-15 22:18:11 -04003296 auto SrcAlignment = DstAlignment;
3297 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
3298 SrcAlignment =
3299 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue();
3300 }
SJW806a5d82020-07-15 12:51:38 -05003301
alan-baker3f772c02021-06-15 22:18:11 -04003302 // OpCopyMemory only works if the pointer element type are the same id. If
3303 // we are generating code for SPIR-V 1.4 or later, this may not be the
3304 // case.
3305 auto dst = Call->getArgOperand(0);
3306 auto src = Call->getArgOperand(1);
3307 auto dst_layout =
3308 PointerRequiresLayout(dst->getType()->getPointerAddressSpace());
3309 auto src_layout =
3310 PointerRequiresLayout(src->getType()->getPointerAddressSpace());
3311 auto dst_id =
3312 getSPIRVType(dst->getType()->getPointerElementType(), dst_layout);
3313 auto src_id =
3314 getSPIRVType(src->getType()->getPointerElementType(), src_layout);
SJW806a5d82020-07-15 12:51:38 -05003315 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04003316 if (dst_id.get() != src_id.get()) {
3317 assert(Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4);
3318 // Types differ so generate:
3319 // OpLoad
3320 // OpCopyLogical
3321 // OpStore
3322 auto load_type_id =
3323 getSPIRVType(src->getType()->getPointerElementType(), src_layout);
3324 Ops << load_type_id << src << MemoryAccess
3325 << static_cast<uint32_t>(SrcAlignment);
3326 auto load = addSPIRVInst(spv::OpLoad, Ops);
SJW806a5d82020-07-15 12:51:38 -05003327
alan-baker3f772c02021-06-15 22:18:11 -04003328 auto copy_type_id =
3329 getSPIRVType(dst->getType()->getPointerElementType(), dst_layout);
3330 Ops.clear();
3331 Ops << copy_type_id << load;
3332 auto copy = addSPIRVInst(spv::OpCopyLogical, Ops);
3333
3334 Ops.clear();
3335 Ops << dst << copy << MemoryAccess << static_cast<uint32_t>(DstAlignment);
3336 RID = addSPIRVInst(spv::OpStore, Ops);
3337 } else {
3338 Ops << dst << src << MemoryAccess << static_cast<uint32_t>(DstAlignment);
3339 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
3340 Ops << MemoryAccess << static_cast<uint32_t>(SrcAlignment);
3341 }
3342
3343 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
3344 }
SJW806a5d82020-07-15 12:51:38 -05003345 break;
3346 }
3347 default:
3348 llvm_unreachable("Unknown CLSPV Instruction");
3349 break;
3350 }
3351 return RID;
3352}
3353
3354SPIRVID
3355SPIRVProducerPass::GenerateImageInstruction(CallInst *Call,
3356 const FunctionInfo &FuncInfo) {
3357 SPIRVID RID;
3358
alan-baker3f772c02021-06-15 22:18:11 -04003359 auto GetExtendMask = [this](Type *sample_type,
3360 bool is_int_image) -> uint32_t {
3361 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4 &&
3362 sample_type->getScalarType()->isIntegerTy()) {
3363 if (is_int_image)
3364 return spv::ImageOperandsSignExtendMask;
3365 else
3366 return spv::ImageOperandsZeroExtendMask;
3367 }
3368 return 0;
3369 };
3370
SJW806a5d82020-07-15 12:51:38 -05003371 LLVMContext &Context = module->getContext();
3372 switch (FuncInfo.getType()) {
3373 case Builtins::kReadImagef:
3374 case Builtins::kReadImageh:
3375 case Builtins::kReadImagei:
3376 case Builtins::kReadImageui: {
3377 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
3378 // Additionally, OpTypeSampledImage is generated.
alan-bakerf6bc8252020-09-23 14:58:55 -04003379 const auto image_ty = Call->getArgOperand(0)->getType();
SJW806a5d82020-07-15 12:51:38 -05003380 const auto &pi = FuncInfo.getParameter(1);
3381 if (pi.isSampler()) {
3382 //
3383 // Generate OpSampledImage.
3384 //
3385 // Ops[0] = Result Type ID
3386 // Ops[1] = Image ID
3387 // Ops[2] = Sampler ID
3388 //
3389 SPIRVOperandVec Ops;
3390
3391 Value *Image = Call->getArgOperand(0);
3392 Value *Sampler = Call->getArgOperand(1);
3393 Value *Coordinate = Call->getArgOperand(2);
3394
3395 TypeMapType &OpImageTypeMap = getImageTypeMap();
3396 Type *ImageTy = Image->getType()->getPointerElementType();
3397 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
3398
3399 Ops << ImageTyID << Image << Sampler;
3400
3401 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
3402
3403 //
3404 // Generate OpImageSampleExplicitLod.
3405 //
3406 // Ops[0] = Result Type ID
3407 // Ops[1] = Sampled Image ID
3408 // Ops[2] = Coordinate ID
3409 // Ops[3] = Image Operands Type ID
3410 // Ops[4] ... Ops[n] = Operands ID
3411 //
3412 Ops.clear();
3413
3414 const bool is_int_image = IsIntImageType(Image->getType());
3415 SPIRVID result_type;
3416 if (is_int_image) {
3417 result_type = v4int32ID;
3418 } else {
3419 result_type = getSPIRVType(Call->getType());
3420 }
3421
alan-baker3f772c02021-06-15 22:18:11 -04003422 uint32_t mask = spv::ImageOperandsLodMask |
3423 GetExtendMask(Call->getType(), is_int_image);
SJW806a5d82020-07-15 12:51:38 -05003424 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
alan-baker3f772c02021-06-15 22:18:11 -04003425 Ops << result_type << SampledImageID << Coordinate << mask << CstFP0;
SJW806a5d82020-07-15 12:51:38 -05003426
3427 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
3428
3429 if (is_int_image) {
3430 // Generate the bitcast.
3431 Ops.clear();
3432 Ops << Call->getType() << RID;
3433 RID = addSPIRVInst(spv::OpBitcast, Ops);
3434 }
alan-bakerf6bc8252020-09-23 14:58:55 -04003435 } else if (IsStorageImageType(image_ty)) {
3436 // read_image on a storage image is mapped to OpImageRead.
3437 Value *Image = Call->getArgOperand(0);
3438 Value *Coordinate = Call->getArgOperand(1);
3439
3440 //
3441 // Generate OpImageRead
3442 //
3443 // Ops[0] = Result Type ID
3444 // Ops[1] = Image ID
3445 // Ops[2] = Coordinate
3446 // No optional image operands.
3447 //
3448 SPIRVOperandVec Ops;
3449
3450 const bool is_int_image = IsIntImageType(Image->getType());
3451 SPIRVID result_type;
3452 if (is_int_image) {
3453 result_type = v4int32ID;
3454 } else {
3455 result_type = getSPIRVType(Call->getType());
3456 }
3457
3458 Ops << result_type << Image << Coordinate;
alan-baker3f772c02021-06-15 22:18:11 -04003459 uint32_t mask = GetExtendMask(Call->getType(), is_int_image);
3460 if (mask != 0)
3461 Ops << mask;
alan-bakerf6bc8252020-09-23 14:58:55 -04003462 RID = addSPIRVInst(spv::OpImageRead, Ops);
3463
3464 if (is_int_image) {
3465 // Generate the bitcast.
3466 Ops.clear();
3467 Ops << Call->getType() << RID;
3468 RID = addSPIRVInst(spv::OpBitcast, Ops);
3469 }
3470
3471 // OpImageRead requires StorageImageReadWithoutFormat.
3472 addCapability(spv::CapabilityStorageImageReadWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003473 } else {
alan-bakerf6bc8252020-09-23 14:58:55 -04003474 // read_image on a sampled image (without a sampler) is mapped to
3475 // OpImageFetch.
SJW806a5d82020-07-15 12:51:38 -05003476 Value *Image = Call->getArgOperand(0);
3477 Value *Coordinate = Call->getArgOperand(1);
3478
3479 //
3480 // Generate OpImageFetch
3481 //
3482 // Ops[0] = Result Type ID
3483 // Ops[1] = Image ID
3484 // Ops[2] = Coordinate ID
3485 // Ops[3] = Lod
3486 // Ops[4] = 0
3487 //
3488 SPIRVOperandVec Ops;
3489
3490 const bool is_int_image = IsIntImageType(Image->getType());
3491 SPIRVID result_type;
3492 if (is_int_image) {
3493 result_type = v4int32ID;
3494 } else {
3495 result_type = getSPIRVType(Call->getType());
3496 }
3497
alan-baker3f772c02021-06-15 22:18:11 -04003498 uint32_t mask = spv::ImageOperandsLodMask |
3499 GetExtendMask(Call->getType(), is_int_image);
3500 Ops << result_type << Image << Coordinate << mask
SJW806a5d82020-07-15 12:51:38 -05003501 << getSPIRVInt32Constant(0);
3502
3503 RID = addSPIRVInst(spv::OpImageFetch, Ops);
3504
3505 if (is_int_image) {
3506 // Generate the bitcast.
3507 Ops.clear();
3508 Ops << Call->getType() << RID;
3509 RID = addSPIRVInst(spv::OpBitcast, Ops);
3510 }
3511 }
3512 break;
3513 }
3514
3515 case Builtins::kWriteImagef:
3516 case Builtins::kWriteImageh:
3517 case Builtins::kWriteImagei:
3518 case Builtins::kWriteImageui: {
3519 // write_image is mapped to OpImageWrite.
3520 //
3521 // Generate OpImageWrite.
3522 //
3523 // Ops[0] = Image ID
3524 // Ops[1] = Coordinate ID
3525 // Ops[2] = Texel ID
3526 // Ops[3] = (Optional) Image Operands Type (Literal Number)
3527 // Ops[4] ... Ops[n] = (Optional) Operands ID
3528 //
3529 SPIRVOperandVec Ops;
3530
3531 Value *Image = Call->getArgOperand(0);
3532 Value *Coordinate = Call->getArgOperand(1);
3533 Value *Texel = Call->getArgOperand(2);
3534
3535 SPIRVID TexelID = getSPIRVValue(Texel);
3536
3537 const bool is_int_image = IsIntImageType(Image->getType());
3538 if (is_int_image) {
3539 // Generate a bitcast to v4int and use it as the texel value.
3540 Ops << v4int32ID << TexelID;
3541 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
3542 Ops.clear();
3543 }
3544 Ops << Image << Coordinate << TexelID;
alan-baker3f772c02021-06-15 22:18:11 -04003545 uint32_t mask = GetExtendMask(Texel->getType(), is_int_image);
3546 if (mask != 0)
3547 Ops << mask;
SJW806a5d82020-07-15 12:51:38 -05003548 RID = addSPIRVInst(spv::OpImageWrite, Ops);
alan-bakerf6bc8252020-09-23 14:58:55 -04003549
3550 // Image writes require StorageImageWriteWithoutFormat.
3551 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003552 break;
3553 }
3554
3555 case Builtins::kGetImageHeight:
3556 case Builtins::kGetImageWidth:
3557 case Builtins::kGetImageDepth:
3558 case Builtins::kGetImageDim: {
3559 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
3560 addCapability(spv::CapabilityImageQuery);
3561
3562 //
3563 // Generate OpImageQuerySize[Lod]
3564 //
3565 // Ops[0] = Image ID
3566 //
3567 // Result type has components equal to the dimensionality of the image,
3568 // plus 1 if the image is arrayed.
3569 //
3570 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3571 SPIRVOperandVec Ops;
3572
3573 // Implement:
3574 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3575 SPIRVID SizesTypeID;
3576
3577 Value *Image = Call->getArgOperand(0);
3578 const uint32_t dim = ImageDimensionality(Image->getType());
3579 const uint32_t components =
3580 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
3581 if (components == 1) {
3582 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
3583 } else {
3584 SizesTypeID = getSPIRVType(
3585 FixedVectorType::get(Type::getInt32Ty(Context), components));
3586 }
3587 Ops << SizesTypeID << Image;
3588 spv::Op query_opcode = spv::OpImageQuerySize;
3589 if (IsSampledImageType(Image->getType())) {
3590 query_opcode = spv::OpImageQuerySizeLod;
3591 // Need explicit 0 for Lod operand.
3592 Ops << getSPIRVInt32Constant(0);
3593 }
3594
3595 RID = addSPIRVInst(query_opcode, Ops);
3596
3597 // May require an extra instruction to create the appropriate result of
3598 // the builtin function.
3599 if (FuncInfo.getType() == Builtins::kGetImageDim) {
3600 if (dim == 3) {
3601 // get_image_dim returns an int4 for 3D images.
3602 //
3603
3604 // Implement:
3605 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
3606 Ops.clear();
3607 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
3608 << getSPIRVInt32Constant(0);
3609
3610 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
3611 } else if (dim != components) {
3612 // get_image_dim return an int2 regardless of the arrayedness of the
3613 // image. If the image is arrayed an element must be dropped from the
3614 // query result.
3615 //
3616
3617 // Implement:
3618 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
3619 Ops.clear();
3620 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
3621 << 0 << 1;
3622
3623 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
3624 }
3625 } else if (components > 1) {
3626 // Implement:
3627 // %result = OpCompositeExtract %uint %sizes <component number>
3628 Ops.clear();
3629 Ops << Call->getType() << RID;
3630
3631 uint32_t component = 0;
3632 if (FuncInfo.getType() == Builtins::kGetImageHeight)
3633 component = 1;
3634 else if (FuncInfo.getType() == Builtins::kGetImageDepth)
3635 component = 2;
3636 Ops << component;
3637
3638 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
3639 }
3640 break;
3641 }
3642 default:
3643 llvm_unreachable("Unsupported Image builtin");
3644 }
3645
3646 return RID;
3647}
3648
3649SPIRVID
3650SPIRVProducerPass::GenerateSubgroupInstruction(CallInst *Call,
3651 const FunctionInfo &FuncInfo) {
3652 SPIRVID RID;
3653
3654 // requires SPIRV version 1.3 or greater
3655 if (SpvVersion() != SPIRVVersion::SPIRV_1_3) {
3656 // llvm_unreachable("SubGroups extension requires SPIRV 1.3 or greater");
3657 // TODO(sjw): error out gracefully
3658 }
3659
3660 auto loadBuiltin = [this, Call](spv::BuiltIn spvBI,
3661 spv::Capability spvCap =
3662 spv::CapabilityGroupNonUniform) {
3663 SPIRVOperandVec Ops;
3664 Ops << Call->getType() << this->getSPIRVBuiltin(spvBI, spvCap);
3665
3666 return addSPIRVInst(spv::OpLoad, Ops);
3667 };
3668
3669 spv::Op op = spv::OpNop;
3670 switch (FuncInfo.getType()) {
3671 case Builtins::kGetSubGroupSize:
3672 return loadBuiltin(spv::BuiltInSubgroupSize);
3673 case Builtins::kGetNumSubGroups:
3674 return loadBuiltin(spv::BuiltInNumSubgroups);
3675 case Builtins::kGetSubGroupId:
3676 return loadBuiltin(spv::BuiltInSubgroupId);
3677 case Builtins::kGetSubGroupLocalId:
3678 return loadBuiltin(spv::BuiltInSubgroupLocalInvocationId);
3679
3680 case Builtins::kSubGroupBroadcast:
3681 if (SpvVersion() < SPIRVVersion::SPIRV_1_5 &&
3682 !dyn_cast<ConstantInt>(Call->getOperand(1))) {
3683 llvm_unreachable("sub_group_broadcast requires constant lane Id for "
3684 "SPIRV version < 1.5");
3685 }
3686 addCapability(spv::CapabilityGroupNonUniformBallot);
3687 op = spv::OpGroupNonUniformBroadcast;
3688 break;
3689
3690 case Builtins::kSubGroupAll:
3691 addCapability(spv::CapabilityGroupNonUniformVote);
3692 op = spv::OpGroupNonUniformAll;
3693 break;
3694 case Builtins::kSubGroupAny:
3695 addCapability(spv::CapabilityGroupNonUniformVote);
3696 op = spv::OpGroupNonUniformAny;
3697 break;
3698 case Builtins::kSubGroupReduceAdd:
3699 case Builtins::kSubGroupScanExclusiveAdd:
3700 case Builtins::kSubGroupScanInclusiveAdd: {
3701 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3702 if (FuncInfo.getParameter(0).type_id == Type::IntegerTyID) {
3703 op = spv::OpGroupNonUniformIAdd;
3704 } else {
3705 op = spv::OpGroupNonUniformFAdd;
3706 }
3707 break;
3708 }
3709 case Builtins::kSubGroupReduceMin:
3710 case Builtins::kSubGroupScanExclusiveMin:
3711 case Builtins::kSubGroupScanInclusiveMin: {
3712 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3713 auto &param = FuncInfo.getParameter(0);
3714 if (param.type_id == Type::IntegerTyID) {
3715 op = param.is_signed ? spv::OpGroupNonUniformSMin
3716 : spv::OpGroupNonUniformUMin;
3717 } else {
3718 op = spv::OpGroupNonUniformFMin;
3719 }
3720 break;
3721 }
3722 case Builtins::kSubGroupReduceMax:
3723 case Builtins::kSubGroupScanExclusiveMax:
3724 case Builtins::kSubGroupScanInclusiveMax: {
3725 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3726 auto &param = FuncInfo.getParameter(0);
3727 if (param.type_id == Type::IntegerTyID) {
3728 op = param.is_signed ? spv::OpGroupNonUniformSMax
3729 : spv::OpGroupNonUniformUMax;
3730 } else {
3731 op = spv::OpGroupNonUniformFMax;
3732 }
3733 break;
3734 }
3735
3736 case Builtins::kGetEnqueuedNumSubGroups:
3737 // TODO(sjw): requires CapabilityKernel (incompatible with Shader)
3738 case Builtins::kGetMaxSubGroupSize:
3739 // TODO(sjw): use SpecConstant, capability Kernel (incompatible with Shader)
3740 case Builtins::kSubGroupBarrier:
3741 case Builtins::kSubGroupReserveReadPipe:
3742 case Builtins::kSubGroupReserveWritePipe:
3743 case Builtins::kSubGroupCommitReadPipe:
3744 case Builtins::kSubGroupCommitWritePipe:
3745 case Builtins::kGetKernelSubGroupCountForNdrange:
3746 case Builtins::kGetKernelMaxSubGroupSizeForNdrange:
3747 default:
3748 Call->print(errs());
3749 llvm_unreachable("Unsupported sub_group operation");
3750 break;
3751 }
3752
3753 assert(op != spv::OpNop);
3754
3755 SPIRVOperandVec Operands;
3756
3757 //
3758 // Generate OpGroupNonUniform*
3759 //
3760 // Ops[0] = Result Type ID
3761 // Ops[1] = ScopeSubgroup
3762 // Ops[2] = Value ID
3763 // Ops[3] = Local ID
3764
3765 // The result type.
3766 Operands << Call->getType();
3767
3768 // Subgroup Scope
3769 Operands << getSPIRVInt32Constant(spv::ScopeSubgroup);
3770
3771 switch (FuncInfo.getType()) {
3772 case Builtins::kSubGroupReduceAdd:
3773 case Builtins::kSubGroupReduceMin:
3774 case Builtins::kSubGroupReduceMax:
3775 Operands << spv::GroupOperationReduce;
3776 break;
3777 case Builtins::kSubGroupScanExclusiveAdd:
3778 case Builtins::kSubGroupScanExclusiveMin:
3779 case Builtins::kSubGroupScanExclusiveMax:
3780 Operands << spv::GroupOperationExclusiveScan;
3781 break;
3782 case Builtins::kSubGroupScanInclusiveAdd:
3783 case Builtins::kSubGroupScanInclusiveMin:
3784 case Builtins::kSubGroupScanInclusiveMax:
3785 Operands << spv::GroupOperationInclusiveScan;
3786 break;
3787 default:
3788 break;
3789 }
3790
3791 for (Use &use : Call->arg_operands()) {
3792 Operands << use.get();
3793 }
3794
3795 return addSPIRVInst(op, Operands);
3796}
3797
3798SPIRVID SPIRVProducerPass::GenerateInstructionFromCall(CallInst *Call) {
3799 LLVMContext &Context = module->getContext();
3800
3801 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
3802 auto func_type = func_info.getType();
3803
3804 if (BUILTIN_IN_GROUP(func_type, Clspv)) {
3805 return GenerateClspvInstruction(Call, func_info);
3806 } else if (BUILTIN_IN_GROUP(func_type, Image)) {
3807 return GenerateImageInstruction(Call, func_info);
3808 } else if (BUILTIN_IN_GROUP(func_type, SubgroupsKHR)) {
3809 return GenerateSubgroupInstruction(Call, func_info);
3810 }
3811
3812 SPIRVID RID;
3813
alan-baker5f2e88e2020-12-07 15:24:04 -05003814 switch (Call->getCalledFunction()->getIntrinsicID()) {
3815 case Intrinsic::ctlz: {
3816 // Implement as 31 - FindUMsb. Ignore the second operand of llvm.ctlz.
3817 SPIRVOperandVec Ops;
3818 Ops << Call->getType() << getOpExtInstImportID()
3819 << glsl::ExtInst::ExtInstFindUMsb << Call->getArgOperand(0);
3820 auto find_msb = addSPIRVInst(spv::OpExtInst, Ops);
3821
3822 Constant *thirty_one = ConstantInt::get(
3823 Call->getType(), Call->getType()->getScalarSizeInBits() - 1);
3824 Ops.clear();
3825 Ops << Call->getType() << thirty_one << find_msb;
3826 return addSPIRVInst(spv::OpISub, Ops);
3827 }
3828 case Intrinsic::cttz: {
3829 // Implement as:
3830 // lsb = FindILsb x
3831 // res = lsb == -1 ? width : lsb
3832 //
3833 // Ignore the second operand of llvm.cttz.
3834 SPIRVOperandVec Ops;
3835 Ops << Call->getType() << getOpExtInstImportID()
3836 << glsl::ExtInst::ExtInstFindILsb << Call->getArgOperand(0);
3837 auto find_lsb = addSPIRVInst(spv::OpExtInst, Ops);
3838
3839 auto neg_one = Constant::getAllOnesValue(Call->getType());
3840 auto i1_ty = Call->getType()->getWithNewBitWidth(1);
3841 auto width = ConstantInt::get(Call->getType(),
3842 Call->getType()->getScalarSizeInBits());
3843
3844 Ops.clear();
3845 Ops << i1_ty << find_lsb << neg_one;
3846 auto cmp = addSPIRVInst(spv::OpIEqual, Ops);
3847
3848 Ops.clear();
3849 Ops << Call->getType() << cmp << width << find_lsb;
3850 return addSPIRVInst(spv::OpSelect, Ops);
3851 }
3852
3853 default:
3854 break;
3855 }
3856
SJW806a5d82020-07-15 12:51:38 -05003857 switch (func_type) {
3858 case Builtins::kPopcount: {
3859 //
3860 // Generate OpBitCount
3861 //
3862 // Ops[0] = Result Type ID
3863 // Ops[1] = Base ID
3864 SPIRVOperandVec Ops;
3865 Ops << Call->getType() << Call->getOperand(0);
3866
3867 RID = addSPIRVInst(spv::OpBitCount, Ops);
3868 break;
3869 }
3870 default: {
3871 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3872
alan-baker5f2e88e2020-12-07 15:24:04 -05003873 // Do not replace functions with implementations.
3874 if (EInst && Call->getCalledFunction()->isDeclaration()) {
SJW806a5d82020-07-15 12:51:38 -05003875 SPIRVID ExtInstImportID = getOpExtInstImportID();
3876
3877 //
3878 // Generate OpExtInst.
3879 //
3880
3881 // Ops[0] = Result Type ID
3882 // Ops[1] = Set ID (OpExtInstImport ID)
3883 // Ops[2] = Instruction Number (Literal Number)
3884 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3885 SPIRVOperandVec Ops;
3886
3887 Ops << Call->getType() << ExtInstImportID << EInst;
3888
3889 for (auto &use : Call->arg_operands()) {
3890 Ops << use.get();
3891 }
3892
3893 RID = addSPIRVInst(spv::OpExtInst, Ops);
3894
3895 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3896 if (IndirectExtInst != kGlslExtInstBad) {
SJW806a5d82020-07-15 12:51:38 -05003897 // Generate one more instruction that uses the result of the extended
3898 // instruction. Its result id is one more than the id of the
3899 // extended instruction.
3900 auto generate_extra_inst = [this, &Context, &Call,
3901 &RID](spv::Op opcode, Constant *constant) {
3902 //
3903 // Generate instruction like:
3904 // result = opcode constant <extinst-result>
3905 //
3906 // Ops[0] = Result Type ID
3907 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3908 // Ops[2] = Operand 1 ;; the result of the extended instruction
3909 SPIRVOperandVec Ops;
3910
3911 Type *resultTy = Call->getType();
3912
3913 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
alan-baker931253b2020-08-20 17:15:38 -04003914 constant =
3915 ConstantVector::getSplat(vectorTy->getElementCount(), constant);
SJW806a5d82020-07-15 12:51:38 -05003916 }
3917 Ops << resultTy << constant << RID;
3918
3919 RID = addSPIRVInst(opcode, Ops);
3920 };
3921
SJW806a5d82020-07-15 12:51:38 -05003922 switch (IndirectExtInst) {
SJW806a5d82020-07-15 12:51:38 -05003923 case glsl::ExtInstAcos: // Implementing acospi
3924 case glsl::ExtInstAsin: // Implementing asinpi
3925 case glsl::ExtInstAtan: // Implementing atanpi
3926 case glsl::ExtInstAtan2: // Implementing atan2pi
3927 generate_extra_inst(
3928 spv::OpFMul,
alan-bakercc2bafb2020-11-02 08:30:18 -05003929 ConstantFP::get(Call->getType()->getScalarType(), kOneOverPi));
SJW806a5d82020-07-15 12:51:38 -05003930 break;
3931
3932 default:
3933 assert(false && "internally inconsistent");
3934 }
3935 }
3936 } else {
Pedro Olsen Ferreira208d1e72021-06-17 19:24:48 +01003937 switch (Call->getIntrinsicID()) {
3938 // These LLVM intrinsics have no SPV equivalent.
3939 // Because they are optimiser hints, we can safely discard them.
3940 case Intrinsic::experimental_noalias_scope_decl:
3941 break;
3942 default:
3943 // A real function call (not builtin)
3944 // Call instruction is deferred because it needs function's ID.
3945 RID = addSPIRVPlaceholder(Call);
3946 break;
3947 }
SJW806a5d82020-07-15 12:51:38 -05003948 }
3949
3950 break;
3951 }
3952 }
3953
3954 return RID;
3955}
3956
David Neto22f144c2017-06-12 14:26:21 -04003957void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003958 ValueMapType &VMap = getValueMap();
SJW806a5d82020-07-15 12:51:38 -05003959 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04003960
SJW806a5d82020-07-15 12:51:38 -05003961 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04003962
3963 switch (I.getOpcode()) {
3964 default: {
3965 if (Instruction::isCast(I.getOpcode())) {
3966 //
3967 // Generate SPIRV instructions for cast operators.
3968 //
3969
David Netod2de94a2017-08-28 17:27:47 -04003970 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003971 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003972 auto toI8 = Ty == Type::getInt8Ty(Context);
3973 auto fromI32 = OpTy == Type::getInt32Ty(Context);
James Price757dea82021-01-11 13:42:39 -05003974 // Handle zext, sext, uitofp, and sitofp with i1 type specially.
David Neto22f144c2017-06-12 14:26:21 -04003975 if ((I.getOpcode() == Instruction::ZExt ||
3976 I.getOpcode() == Instruction::SExt ||
James Price757dea82021-01-11 13:42:39 -05003977 I.getOpcode() == Instruction::UIToFP ||
3978 I.getOpcode() == Instruction::SIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003979 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003980 //
3981 // Generate OpSelect.
3982 //
3983
3984 // Ops[0] = Result Type ID
3985 // Ops[1] = Condition ID
3986 // Ops[2] = True Constant ID
3987 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003988 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003989
SJW01901d92020-05-21 08:58:31 -05003990 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003991
David Neto22f144c2017-06-12 14:26:21 -04003992 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003993 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003994 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003995 Ops << ConstantInt::getSigned(I.getType(), -1);
James Price757dea82021-01-11 13:42:39 -05003996 } else if (I.getOpcode() == Instruction::UIToFP) {
James Price96bd3d92020-11-23 09:01:57 -05003997 Ops << ConstantFP::get(I.getType(), 1.0);
James Price757dea82021-01-11 13:42:39 -05003998 } else if (I.getOpcode() == Instruction::SIToFP) {
3999 Ops << ConstantFP::get(I.getType(), -1.0);
David Neto22f144c2017-06-12 14:26:21 -04004000 }
David Neto22f144c2017-06-12 14:26:21 -04004001
David Neto22f144c2017-06-12 14:26:21 -04004002 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05004003 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04004004 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05004005 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04004006 } else {
James Price96bd3d92020-11-23 09:01:57 -05004007 Ops << ConstantFP::get(I.getType(), 0.0);
David Neto22f144c2017-06-12 14:26:21 -04004008 }
David Neto22f144c2017-06-12 14:26:21 -04004009
SJWf93f5f32020-05-05 07:27:56 -05004010 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05004011 } else if (!clspv::Option::Int8Support() &&
4012 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04004013 // The SPIR-V target type is a 32-bit int. Keep only the bottom
4014 // 8 bits.
4015 // Before:
4016 // %result = trunc i32 %a to i8
4017 // After
4018 // %result = OpBitwiseAnd %uint %a %uint_255
4019
SJWf93f5f32020-05-05 07:27:56 -05004020 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04004021
SJW806a5d82020-07-15 12:51:38 -05004022 Ops << OpTy << I.getOperand(0) << getSPIRVInt32Constant(255);
David Netod2de94a2017-08-28 17:27:47 -04004023
SJWf93f5f32020-05-05 07:27:56 -05004024 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004025 } else {
4026 // Ops[0] = Result Type ID
4027 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05004028 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004029
SJW01901d92020-05-21 08:58:31 -05004030 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004031
SJWf93f5f32020-05-05 07:27:56 -05004032 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004033 }
4034 } else if (isa<BinaryOperator>(I)) {
4035 //
4036 // Generate SPIRV instructions for binary operators.
4037 //
4038
4039 // Handle xor with i1 type specially.
4040 if (I.getOpcode() == Instruction::Xor &&
4041 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00004042 ((isa<ConstantInt>(I.getOperand(0)) &&
4043 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
4044 (isa<ConstantInt>(I.getOperand(1)) &&
4045 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04004046 //
4047 // Generate OpLogicalNot.
4048 //
4049 // Ops[0] = Result Type ID
4050 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05004051 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004052
SJW01901d92020-05-21 08:58:31 -05004053 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004054
4055 Value *CondV = I.getOperand(0);
4056 if (isa<Constant>(I.getOperand(0))) {
4057 CondV = I.getOperand(1);
4058 }
SJW01901d92020-05-21 08:58:31 -05004059 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04004060
SJWf93f5f32020-05-05 07:27:56 -05004061 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004062 } else {
4063 // Ops[0] = Result Type ID
4064 // Ops[1] = Operand 0
4065 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05004066 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004067
SJW01901d92020-05-21 08:58:31 -05004068 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004069
SJWf93f5f32020-05-05 07:27:56 -05004070 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04004071 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05004072 } else if (I.getOpcode() == Instruction::FNeg) {
4073 // The only unary operator.
4074 //
4075 // Ops[0] = Result Type ID
4076 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05004077 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05004078
SJW01901d92020-05-21 08:58:31 -05004079 Ops << I.getType() << I.getOperand(0);
4080 RID = addSPIRVInst(spv::OpFNegate, Ops);
Marco Antognini68e5c512020-09-09 16:08:57 +01004081 } else if (I.getOpcode() == Instruction::Unreachable) {
4082 RID = addSPIRVInst(spv::OpUnreachable);
David Neto22f144c2017-06-12 14:26:21 -04004083 } else {
4084 I.print(errs());
4085 llvm_unreachable("Unsupported instruction???");
4086 }
4087 break;
4088 }
4089 case Instruction::GetElementPtr: {
4090 auto &GlobalConstArgSet = getGlobalConstArgSet();
4091
4092 //
4093 // Generate OpAccessChain.
4094 //
4095 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4096
4097 //
4098 // Generate OpAccessChain.
4099 //
4100
4101 // Ops[0] = Result Type ID
4102 // Ops[1] = Base ID
4103 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05004104 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004105
alan-bakerb6b09dc2018-11-08 16:59:28 -05004106 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004107 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4108 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4109 // Use pointer type with private address space for global constant.
4110 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004111 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004112 }
David Neto257c3892018-04-11 13:19:45 -04004113
SJW01901d92020-05-21 08:58:31 -05004114 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04004115
David Neto862b7d82018-06-14 18:48:37 -04004116 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05004117 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004118
David Neto862b7d82018-06-14 18:48:37 -04004119 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004120
4121 //
4122 // Follows below rules for gep.
4123 //
David Neto862b7d82018-06-14 18:48:37 -04004124 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4125 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004126 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4127 // first index.
4128 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4129 // use gep's first index.
4130 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4131 // gep's first index.
4132 //
4133 spv::Op Opcode = spv::OpAccessChain;
4134 unsigned offset = 0;
4135 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004136 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004137 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004138 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004139 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004140 }
David Neto862b7d82018-06-14 18:48:37 -04004141 } else {
David Neto22f144c2017-06-12 14:26:21 -04004142 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004143 }
4144
4145 if (Opcode == spv::OpPtrAccessChain) {
alan-baker7506abb2020-09-10 15:02:55 -04004146 // Shader validation in the SPIR-V spec requires that the base pointer to
4147 // OpPtrAccessChain (in StorageBuffer storage class) be decorated with
4148 // ArrayStride.
alan-baker5b86ed72019-02-15 08:26:50 -05004149 auto address_space = ResultType->getAddressSpace();
4150 setVariablePointersCapabilities(address_space);
4151 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004152 case spv::StorageClassStorageBuffer:
David Neto1a1a0582017-07-07 12:01:44 -04004153 // Save the need to generate an ArrayStride decoration. But defer
4154 // generation until later, so we only make one decoration.
alan-baker7506abb2020-09-10 15:02:55 -04004155 getTypesNeedingArrayStride().insert(GEP->getPointerOperandType());
4156 break;
4157 case spv::StorageClassWorkgroup:
Alan Bakerfcda9482018-10-02 17:09:59 -04004158 break;
4159 default:
alan-baker7506abb2020-09-10 15:02:55 -04004160 llvm_unreachable(
4161 "OpPtrAccessChain is not supported for this storage class");
Alan Bakerfcda9482018-10-02 17:09:59 -04004162 break;
David Neto1a1a0582017-07-07 12:01:44 -04004163 }
David Neto22f144c2017-06-12 14:26:21 -04004164 }
4165
4166 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05004167 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04004168 }
4169
SJWf93f5f32020-05-05 07:27:56 -05004170 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004171 break;
4172 }
4173 case Instruction::ExtractValue: {
4174 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4175 // Ops[0] = Result Type ID
4176 // Ops[1] = Composite ID
4177 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004178 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004179
SJW01901d92020-05-21 08:58:31 -05004180 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004181
SJW01901d92020-05-21 08:58:31 -05004182 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004183
4184 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004185 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004186 }
4187
SJWf93f5f32020-05-05 07:27:56 -05004188 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004189 break;
4190 }
4191 case Instruction::InsertValue: {
4192 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4193 // Ops[0] = Result Type ID
4194 // Ops[1] = Object ID
4195 // Ops[2] = Composite ID
4196 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004197 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004198
SJW01901d92020-05-21 08:58:31 -05004199 Ops << I.getType() << IVI->getInsertedValueOperand()
4200 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004201
4202 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004203 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004204 }
4205
SJWf93f5f32020-05-05 07:27:56 -05004206 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004207 break;
4208 }
4209 case Instruction::Select: {
4210 //
4211 // Generate OpSelect.
4212 //
4213
4214 // Ops[0] = Result Type ID
4215 // Ops[1] = Condition ID
4216 // Ops[2] = True Constant ID
4217 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05004218 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004219
4220 // Find SPIRV instruction for parameter type.
4221 auto Ty = I.getType();
4222 if (Ty->isPointerTy()) {
4223 auto PointeeTy = Ty->getPointerElementType();
4224 if (PointeeTy->isStructTy() &&
4225 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4226 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004227 } else {
4228 // Selecting between pointers requires variable pointers.
4229 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4230 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05004231 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004232 }
David Neto22f144c2017-06-12 14:26:21 -04004233 }
4234 }
4235
SJW01901d92020-05-21 08:58:31 -05004236 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004237
SJWf93f5f32020-05-05 07:27:56 -05004238 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004239 break;
4240 }
4241 case Instruction::ExtractElement: {
4242 // Handle <4 x i8> type manually.
4243 Type *CompositeTy = I.getOperand(0)->getType();
4244 if (is4xi8vec(CompositeTy)) {
4245 //
4246 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4247 // <4 x i8>.
4248 //
4249
4250 //
4251 // Generate OpShiftRightLogical
4252 //
4253 // Ops[0] = Result Type ID
4254 // Ops[1] = Operand 0
4255 // Ops[2] = Operand 1
4256 //
SJWf93f5f32020-05-05 07:27:56 -05004257 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004258
SJW01901d92020-05-21 08:58:31 -05004259 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004260
SJW01901d92020-05-21 08:58:31 -05004261 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004262 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4263 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004264 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4265 Op1ID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004266 } else {
4267 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004268 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004269
SJW806a5d82020-07-15 12:51:38 -05004270 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1)
4271 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004272
SJWf93f5f32020-05-05 07:27:56 -05004273 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004274 }
SJW01901d92020-05-21 08:58:31 -05004275 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04004276
SJW01901d92020-05-21 08:58:31 -05004277 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004278
4279 //
4280 // Generate OpBitwiseAnd
4281 //
4282 // Ops[0] = Result Type ID
4283 // Ops[1] = Operand 0
4284 // Ops[2] = Operand 1
4285 //
4286 Ops.clear();
4287
SJW806a5d82020-07-15 12:51:38 -05004288 Ops << CompositeTy << ShiftID << getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004289
SJWf93f5f32020-05-05 07:27:56 -05004290 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004291 break;
4292 }
4293
4294 // Ops[0] = Result Type ID
4295 // Ops[1] = Composite ID
4296 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004297 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004298
SJW01901d92020-05-21 08:58:31 -05004299 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004300
4301 spv::Op Opcode = spv::OpCompositeExtract;
4302 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05004303 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04004304 } else {
SJW01901d92020-05-21 08:58:31 -05004305 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004306 Opcode = spv::OpVectorExtractDynamic;
4307 }
4308
SJWf93f5f32020-05-05 07:27:56 -05004309 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004310 break;
4311 }
4312 case Instruction::InsertElement: {
4313 // Handle <4 x i8> type manually.
4314 Type *CompositeTy = I.getOperand(0)->getType();
4315 if (is4xi8vec(CompositeTy)) {
SJW806a5d82020-07-15 12:51:38 -05004316 SPIRVID CstFFID = getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004317
SJW01901d92020-05-21 08:58:31 -05004318 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004319 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4320 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004321 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4322 ShiftAmountID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004323 } else {
4324 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004325 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004326
SJW806a5d82020-07-15 12:51:38 -05004327 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2)
4328 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004329
SJWf93f5f32020-05-05 07:27:56 -05004330 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004331 }
4332
4333 //
4334 // Generate mask operations.
4335 //
4336
4337 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004338 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004339
SJW01901d92020-05-21 08:58:31 -05004340 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004341
SJW01901d92020-05-21 08:58:31 -05004342 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004343
4344 // Inverse mask.
4345 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004346 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04004347
SJW01901d92020-05-21 08:58:31 -05004348 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004349
4350 // Apply mask.
4351 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004352 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04004353
SJW01901d92020-05-21 08:58:31 -05004354 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004355
4356 // Create correct value according to index of insertelement.
4357 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004358 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004359
SJW01901d92020-05-21 08:58:31 -05004360 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004361
4362 // Insert value to original value.
4363 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004364 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04004365
SJWf93f5f32020-05-05 07:27:56 -05004366 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004367 break;
4368 }
4369
SJWf93f5f32020-05-05 07:27:56 -05004370 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004371
James Priced26efea2018-06-09 23:28:32 +01004372 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05004373 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004374
4375 spv::Op Opcode = spv::OpCompositeInsert;
4376 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004377 const auto value = CI->getZExtValue();
4378 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004379 // Ops[1] = Object ID
4380 // Ops[2] = Composite ID
4381 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004382 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04004383 } else {
James Priced26efea2018-06-09 23:28:32 +01004384 // Ops[1] = Composite ID
4385 // Ops[2] = Object ID
4386 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004387 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004388 Opcode = spv::OpVectorInsertDynamic;
4389 }
4390
SJWf93f5f32020-05-05 07:27:56 -05004391 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004392 break;
4393 }
4394 case Instruction::ShuffleVector: {
4395 // Ops[0] = Result Type ID
4396 // Ops[1] = Vector 1 ID
4397 // Ops[2] = Vector 2 ID
4398 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004399 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004400
SJW01901d92020-05-21 08:58:31 -05004401 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004402
alan-bakerc9666712020-04-01 16:31:21 -04004403 auto shuffle = cast<ShuffleVectorInst>(&I);
4404 SmallVector<int, 4> mask;
4405 shuffle->getShuffleMask(mask);
4406 for (auto i : mask) {
4407 if (i == UndefMaskElem) {
4408 if (clspv::Option::HackUndef())
4409 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05004410 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04004411 else
4412 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05004413 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04004414 } else {
SJW01901d92020-05-21 08:58:31 -05004415 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04004416 }
4417 }
4418
SJWf93f5f32020-05-05 07:27:56 -05004419 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004420 break;
4421 }
4422 case Instruction::ICmp:
4423 case Instruction::FCmp: {
4424 CmpInst *CmpI = cast<CmpInst>(&I);
4425
David Netod4ca2e62017-07-06 18:47:35 -04004426 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004427 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004428 if (isa<PointerType>(ArgTy)) {
4429 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004430 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004431 errs()
4432 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4433 << "in function " << name << "\n";
4434 llvm_unreachable("Pointer equality check is invalid");
4435 break;
4436 }
4437
SJWf93f5f32020-05-05 07:27:56 -05004438 SPIRVOperandVec Ops;
alan-baker15106572020-11-06 15:08:10 -05004439 if (CmpI->getPredicate() == CmpInst::FCMP_ORD ||
4440 CmpI->getPredicate() == CmpInst::FCMP_UNO) {
4441 // Implement ordered and unordered comparisons are OpIsNan instructions.
4442 // Optimize the constants to simplify the resulting code.
4443 auto lhs = CmpI->getOperand(0);
4444 auto rhs = CmpI->getOperand(1);
4445 auto const_lhs = dyn_cast_or_null<Constant>(lhs);
4446 auto const_rhs = dyn_cast_or_null<Constant>(rhs);
4447 if ((const_lhs && const_lhs->isNaN()) ||
4448 (const_rhs && const_rhs->isNaN())) {
4449 // Result is a constant, false of ordered, true for unordered.
4450 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4451 RID = getSPIRVConstant(ConstantInt::getFalse(CmpI->getType()));
4452 } else {
4453 RID = getSPIRVConstant(ConstantInt::getTrue(CmpI->getType()));
4454 }
4455 break;
4456 }
4457 SPIRVID lhs_id;
4458 SPIRVID rhs_id;
4459 if (!const_lhs) {
4460 // Generate OpIsNan for the lhs.
4461 Ops.clear();
4462 Ops << CmpI->getType() << lhs;
4463 lhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4464 }
4465 if (!const_rhs) {
4466 // Generate OpIsNan for the rhs.
4467 Ops.clear();
4468 Ops << CmpI->getType() << rhs;
4469 rhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4470 }
4471 if (lhs_id.isValid() && rhs_id.isValid()) {
4472 // Or the results for the lhs and rhs.
4473 Ops.clear();
4474 Ops << CmpI->getType() << lhs_id << rhs_id;
4475 RID = addSPIRVInst(spv::OpLogicalOr, Ops);
4476 } else {
4477 RID = lhs_id.isValid() ? lhs_id : rhs_id;
4478 }
4479 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4480 // For ordered comparisons, invert the intermediate result.
4481 Ops.clear();
4482 Ops << CmpI->getType() << RID;
4483 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
4484 }
4485 break;
4486 } else {
4487 // Remaining comparisons map directly to SPIR-V opcodes.
4488 // Ops[0] = Result Type ID
4489 // Ops[1] = Operand 1 ID
4490 // Ops[2] = Operand 2 ID
4491 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004492
alan-baker15106572020-11-06 15:08:10 -05004493 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
4494 RID = addSPIRVInst(Opcode, Ops);
4495 }
David Neto22f144c2017-06-12 14:26:21 -04004496 break;
4497 }
4498 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004499 // Branch instruction is deferred because it needs label's ID.
4500 BasicBlock *BrBB = I.getParent();
4501 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4502 // Placeholder for Merge operation
4503 RID = addSPIRVPlaceholder(&I);
4504 }
4505 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004506 break;
4507 }
4508 case Instruction::Switch: {
4509 I.print(errs());
4510 llvm_unreachable("Unsupported instruction???");
4511 break;
4512 }
4513 case Instruction::IndirectBr: {
4514 I.print(errs());
4515 llvm_unreachable("Unsupported instruction???");
4516 break;
4517 }
4518 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004519 // PHI instruction is deferred because it needs label's ID.
4520 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004521 break;
4522 }
4523 case Instruction::Alloca: {
4524 //
4525 // Generate OpVariable.
4526 //
4527 // Ops[0] : Result Type ID
4528 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004529 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004530
SJW01901d92020-05-21 08:58:31 -05004531 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04004532
SJWf93f5f32020-05-05 07:27:56 -05004533 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004534 break;
4535 }
4536 case Instruction::Load: {
4537 LoadInst *LD = cast<LoadInst>(&I);
4538 //
4539 // Generate OpLoad.
4540 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004541
alan-baker5b86ed72019-02-15 08:26:50 -05004542 if (LD->getType()->isPointerTy()) {
4543 // Loading a pointer requires variable pointers.
4544 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4545 }
David Neto22f144c2017-06-12 14:26:21 -04004546
SJW01901d92020-05-21 08:58:31 -05004547 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004548 // This is a hack to work around what looks like a driver bug.
4549 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004550 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4551 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004552 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004553 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004554 // Generate a bitwise-and of the original value with itself.
4555 // We should have been able to get away with just an OpCopyObject,
4556 // but we need something more complex to get past certain driver bugs.
4557 // This is ridiculous, but necessary.
4558 // TODO(dneto): Revisit this once drivers fix their bugs.
4559
SJWf93f5f32020-05-05 07:27:56 -05004560 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004561 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04004562
SJWf93f5f32020-05-05 07:27:56 -05004563 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004564 break;
4565 }
4566
4567 // This is the normal path. Generate a load.
4568
David Neto22f144c2017-06-12 14:26:21 -04004569 // Ops[0] = Result Type ID
4570 // Ops[1] = Pointer ID
4571 // Ops[2] ... Ops[n] = Optional Memory Access
4572 //
4573 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004574
alan-baker3f772c02021-06-15 22:18:11 -04004575 auto ptr = LD->getPointerOperand();
4576 auto ptr_ty = ptr->getType();
4577 SPIRVID result_type_id;
4578 if (LD->getType()->isPointerTy()) {
4579 result_type_id = getSPIRVType(LD->getType());
4580 } else {
4581 auto layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
4582 result_type_id = getSPIRVType(LD->getType(), layout);
4583 }
SJWf93f5f32020-05-05 07:27:56 -05004584 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04004585 Ops << result_type_id << ptr;
David Neto22f144c2017-06-12 14:26:21 -04004586
SJWf93f5f32020-05-05 07:27:56 -05004587 RID = addSPIRVInst(spv::OpLoad, Ops);
alan-baker3f772c02021-06-15 22:18:11 -04004588
4589 auto no_layout_id = getSPIRVType(LD->getType());
4590 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4 &&
4591 no_layout_id.get() != result_type_id.get()) {
4592 // Generate an OpCopyLogical to convert from the laid out type to a
4593 // non-laid out type.
4594 Ops.clear();
4595 Ops << no_layout_id << RID;
4596 RID = addSPIRVInst(spv::OpCopyLogical, Ops);
4597 }
David Neto22f144c2017-06-12 14:26:21 -04004598 break;
4599 }
4600 case Instruction::Store: {
4601 StoreInst *ST = cast<StoreInst>(&I);
4602 //
4603 // Generate OpStore.
4604 //
4605
alan-baker5b86ed72019-02-15 08:26:50 -05004606 if (ST->getValueOperand()->getType()->isPointerTy()) {
4607 // Storing a pointer requires variable pointers.
4608 setVariablePointersCapabilities(
4609 ST->getValueOperand()->getType()->getPointerAddressSpace());
4610 }
4611
alan-baker3f772c02021-06-15 22:18:11 -04004612 SPIRVOperandVec Ops;
4613 auto ptr = ST->getPointerOperand();
4614 auto ptr_ty = ptr->getType();
4615 auto value = ST->getValueOperand();
4616 auto value_ty = value->getType();
4617 auto needs_layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
4618 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4 && needs_layout &&
4619 (value_ty->isArrayTy() || value_ty->isStructTy())) {
4620 // Generate an OpCopyLogical to convert from the non-laid type to the
4621 // laid out type.
4622 Ops << getSPIRVType(value_ty, needs_layout) << value;
4623 RID = addSPIRVInst(spv::OpCopyLogical, Ops);
4624 Ops.clear();
4625 }
4626
David Neto22f144c2017-06-12 14:26:21 -04004627 // Ops[0] = Pointer ID
4628 // Ops[1] = Object ID
4629 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4630 //
4631 // TODO: Do we need to implement Optional Memory Access???
alan-baker3f772c02021-06-15 22:18:11 -04004632 Ops << ST->getPointerOperand();
4633 if (RID.isValid()) {
4634 Ops << RID;
4635 } else {
4636 Ops << ST->getValueOperand();
4637 }
SJWf93f5f32020-05-05 07:27:56 -05004638 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004639 break;
4640 }
4641 case Instruction::AtomicCmpXchg: {
4642 I.print(errs());
4643 llvm_unreachable("Unsupported instruction???");
4644 break;
4645 }
4646 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004647 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4648
4649 spv::Op opcode;
4650
4651 switch (AtomicRMW->getOperation()) {
4652 default:
4653 I.print(errs());
4654 llvm_unreachable("Unsupported instruction???");
4655 case llvm::AtomicRMWInst::Add:
4656 opcode = spv::OpAtomicIAdd;
4657 break;
4658 case llvm::AtomicRMWInst::Sub:
4659 opcode = spv::OpAtomicISub;
4660 break;
4661 case llvm::AtomicRMWInst::Xchg:
4662 opcode = spv::OpAtomicExchange;
4663 break;
4664 case llvm::AtomicRMWInst::Min:
4665 opcode = spv::OpAtomicSMin;
4666 break;
4667 case llvm::AtomicRMWInst::Max:
4668 opcode = spv::OpAtomicSMax;
4669 break;
4670 case llvm::AtomicRMWInst::UMin:
4671 opcode = spv::OpAtomicUMin;
4672 break;
4673 case llvm::AtomicRMWInst::UMax:
4674 opcode = spv::OpAtomicUMax;
4675 break;
4676 case llvm::AtomicRMWInst::And:
4677 opcode = spv::OpAtomicAnd;
4678 break;
4679 case llvm::AtomicRMWInst::Or:
4680 opcode = spv::OpAtomicOr;
4681 break;
4682 case llvm::AtomicRMWInst::Xor:
4683 opcode = spv::OpAtomicXor;
4684 break;
4685 }
4686
4687 //
4688 // Generate OpAtomic*.
4689 //
SJWf93f5f32020-05-05 07:27:56 -05004690 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004691
SJW01901d92020-05-21 08:58:31 -05004692 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01004693
SJW806a5d82020-07-15 12:51:38 -05004694 const auto ConstantScopeDevice = getSPIRVInt32Constant(spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05004695 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01004696
SJW806a5d82020-07-15 12:51:38 -05004697 const auto ConstantMemorySemantics =
4698 getSPIRVInt32Constant(spv::MemorySemanticsUniformMemoryMask |
4699 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05004700 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01004701
SJWf93f5f32020-05-05 07:27:56 -05004702 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004703 break;
4704 }
4705 case Instruction::Fence: {
4706 I.print(errs());
4707 llvm_unreachable("Unsupported instruction???");
4708 break;
4709 }
4710 case Instruction::Call: {
4711 CallInst *Call = dyn_cast<CallInst>(&I);
SJW806a5d82020-07-15 12:51:38 -05004712 RID = GenerateInstructionFromCall(Call);
David Neto22f144c2017-06-12 14:26:21 -04004713 break;
4714 }
4715 case Instruction::Ret: {
4716 unsigned NumOps = I.getNumOperands();
4717 if (NumOps == 0) {
4718 //
4719 // Generate OpReturn.
4720 //
SJWf93f5f32020-05-05 07:27:56 -05004721 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004722 } else {
4723 //
4724 // Generate OpReturnValue.
4725 //
4726
4727 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004728 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004729
SJW01901d92020-05-21 08:58:31 -05004730 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004731
SJWf93f5f32020-05-05 07:27:56 -05004732 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004733 break;
4734 }
4735 break;
4736 }
4737 }
SJWf93f5f32020-05-05 07:27:56 -05004738
4739 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004740 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004741 VMap[&I] = RID;
4742 }
David Neto22f144c2017-06-12 14:26:21 -04004743}
4744
4745void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004746 //
4747 // Generate OpFunctionEnd
4748 //
SJWf93f5f32020-05-05 07:27:56 -05004749 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004750}
4751
4752bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004753 // Don't specialize <4 x i8> if i8 is generally supported.
4754 if (clspv::Option::Int8Support())
4755 return false;
4756
David Neto22f144c2017-06-12 14:26:21 -04004757 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004758 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4759 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
alan-baker5a8c3be2020-09-09 13:44:26 -04004760 VecTy->getElementCount().getKnownMinValue() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004761 return true;
4762 }
4763 }
4764
4765 return false;
4766}
4767
4768void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004769 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4770
SJW88ed5fe2020-05-11 12:40:57 -05004771 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4772 Value *Inst = DeferredInsts[i].first;
4773 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4774 SPIRVOperandVec Operands;
4775
4776 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4777 ++i;
4778 assert(DeferredInsts.size() > i);
4779 assert(Inst == DeferredInsts[i].first);
4780 Placeholder = DeferredInsts[i].second;
4781 };
David Neto22f144c2017-06-12 14:26:21 -04004782
4783 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004784 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004785 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004786 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004787 //
4788 // Generate OpLoopMerge.
4789 //
4790 // Ops[0] = Merge Block ID
4791 // Ops[1] = Continue Target ID
4792 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004793 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004794
SJW01901d92020-05-21 08:58:31 -05004795 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4796 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004797
SJW88ed5fe2020-05-11 12:40:57 -05004798 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4799
4800 nextDeferred();
4801
alan-baker06cad652019-12-03 17:56:47 -05004802 } else if (MergeBlocks.count(BrBB)) {
4803 //
4804 // Generate OpSelectionMerge.
4805 //
4806 // Ops[0] = Merge Block ID
4807 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004808 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004809
alan-baker06cad652019-12-03 17:56:47 -05004810 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004811 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004812
SJW88ed5fe2020-05-11 12:40:57 -05004813 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4814
4815 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004816 }
4817
4818 if (Br->isConditional()) {
4819 //
4820 // Generate OpBranchConditional.
4821 //
4822 // Ops[0] = Condition ID
4823 // Ops[1] = True Label ID
4824 // Ops[2] = False Label ID
4825 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004826 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004827
SJW01901d92020-05-21 08:58:31 -05004828 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004829
SJW88ed5fe2020-05-11 12:40:57 -05004830 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4831
David Neto22f144c2017-06-12 14:26:21 -04004832 } else {
4833 //
4834 // Generate OpBranch.
4835 //
4836 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004837 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004838
SJW01901d92020-05-21 08:58:31 -05004839 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004840
SJW88ed5fe2020-05-11 12:40:57 -05004841 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004842 }
4843 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004844 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4845 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004846 // OpPhi on pointers requires variable pointers.
4847 setVariablePointersCapabilities(
4848 PHI->getType()->getPointerAddressSpace());
4849 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004850 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004851 }
4852 }
4853
David Neto22f144c2017-06-12 14:26:21 -04004854 //
4855 // Generate OpPhi.
4856 //
4857 // Ops[0] = Result Type ID
4858 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004859 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004860
SJW01901d92020-05-21 08:58:31 -05004861 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004862
SJW88ed5fe2020-05-11 12:40:57 -05004863 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004864 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004865 }
4866
SJW88ed5fe2020-05-11 12:40:57 -05004867 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4868
David Neto22f144c2017-06-12 14:26:21 -04004869 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4870 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004871 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004872
SJW61531372020-06-09 07:31:08 -05004873 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004874 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004875 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004876
4877 // The result type.
SJW01901d92020-05-21 08:58:31 -05004878 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004879
4880 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004881 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004882 }
4883
SJW88ed5fe2020-05-11 12:40:57 -05004884 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004885
David Neto22f144c2017-06-12 14:26:21 -04004886 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004887 if (Call->getType()->isPointerTy()) {
4888 // Functions returning pointers require variable pointers.
4889 setVariablePointersCapabilities(
4890 Call->getType()->getPointerAddressSpace());
4891 }
4892
David Neto22f144c2017-06-12 14:26:21 -04004893 //
4894 // Generate OpFunctionCall.
4895 //
4896
4897 // Ops[0] = Result Type ID
4898 // Ops[1] = Callee Function ID
4899 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004900 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004901
SJW01901d92020-05-21 08:58:31 -05004902 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004903
SJW01901d92020-05-21 08:58:31 -05004904 SPIRVID CalleeID = getSPIRVValue(Callee);
SJW806a5d82020-07-15 12:51:38 -05004905 if (!CalleeID.isValid()) {
David Neto43568eb2017-10-13 18:25:25 -04004906 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004907 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004908 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4909 // causes an infinite loop. Instead, go ahead and generate
4910 // the bad function call. A validator will catch the 0-Id.
4911 // llvm_unreachable("Can't translate function call");
4912 }
David Neto22f144c2017-06-12 14:26:21 -04004913
SJW01901d92020-05-21 08:58:31 -05004914 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004915
David Neto22f144c2017-06-12 14:26:21 -04004916 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004917 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4918 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004919 auto *operand_type = operand->getType();
4920 // Images and samplers can be passed as function parameters without
4921 // variable pointers.
4922 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4923 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004924 auto sc =
4925 GetStorageClass(operand->getType()->getPointerAddressSpace());
4926 if (sc == spv::StorageClassStorageBuffer) {
4927 // Passing SSBO by reference requires variable pointers storage
4928 // buffer.
SJW01901d92020-05-21 08:58:31 -05004929 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004930 } else if (sc == spv::StorageClassWorkgroup) {
4931 // Workgroup references require variable pointers if they are not
4932 // memory object declarations.
4933 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4934 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004935 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4936 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004937 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004938 } else {
4939 // Arguments are function parameters.
4940 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004941 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004942 }
4943 }
4944 }
SJW01901d92020-05-21 08:58:31 -05004945 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004946 }
4947
SJW88ed5fe2020-05-11 12:40:57 -05004948 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004949 }
4950 }
4951 }
4952}
4953
SJW77b87ad2020-04-21 14:37:52 -05004954void SPIRVProducerPass::HandleDeferredDecorations() {
4955 const auto &DL = module->getDataLayout();
alan-baker5a8c3be2020-09-09 13:44:26 -04004956 if (getTypesNeedingArrayStride().empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004957 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004958 }
David Neto1a1a0582017-07-07 12:01:44 -04004959
David Netoc6f3ab22018-04-06 18:02:31 -04004960 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4961 // instructions we generated earlier.
alan-bakerc3fd07f2020-10-22 09:48:49 -04004962 DenseSet<uint32_t> seen;
David Neto85082642018-03-24 06:55:20 -07004963 for (auto *type : getTypesNeedingArrayStride()) {
alan-baker3f772c02021-06-15 22:18:11 -04004964 auto TI = TypeMap.find(type);
4965 unsigned index = SpvVersion() < SPIRVVersion::SPIRV_1_4 ? 0 : 1;
4966 assert(TI != TypeMap.end());
4967 assert(index < TI->second.size());
4968 if (!TI->second[index].isValid())
4969 continue;
4970
4971 auto id = TI->second[index];
alan-bakerc3fd07f2020-10-22 09:48:49 -04004972 if (!seen.insert(id.get()).second)
4973 continue;
4974
David Neto85082642018-03-24 06:55:20 -07004975 Type *elemTy = nullptr;
4976 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4977 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004978 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004979 elemTy = arrayTy->getElementType();
4980 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4981 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004982 } else {
4983 errs() << "Unhandled strided type " << *type << "\n";
4984 llvm_unreachable("Unhandled strided type");
4985 }
David Neto1a1a0582017-07-07 12:01:44 -04004986
4987 // Ops[0] = Target ID
4988 // Ops[1] = Decoration (ArrayStride)
4989 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004990 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004991
David Neto85082642018-03-24 06:55:20 -07004992 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004993 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004994
alan-bakerc3fd07f2020-10-22 09:48:49 -04004995 Ops << id << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004996
SJWf93f5f32020-05-05 07:27:56 -05004997 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004998 }
David Neto1a1a0582017-07-07 12:01:44 -04004999}
5000
SJW61531372020-06-09 07:31:08 -05005001glsl::ExtInst
5002SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW61531372020-06-09 07:31:08 -05005003 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05005004 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05005005 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05005006 if (param_type.type_id == Type::FloatTyID) {
alan-bakerecc9c942020-12-07 13:13:32 -05005007 return glsl::ExtInst::ExtInstNClamp;
SJW2c317da2020-03-23 07:39:13 -05005008 }
5009 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
5010 : glsl::ExtInst::ExtInstUClamp;
5011 }
5012 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05005013 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05005014 if (param_type.type_id == Type::FloatTyID) {
5015 return glsl::ExtInst::ExtInstFMax;
5016 }
5017 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
5018 : glsl::ExtInst::ExtInstUMax;
5019 }
5020 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05005021 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05005022 if (param_type.type_id == Type::FloatTyID) {
5023 return glsl::ExtInst::ExtInstFMin;
5024 }
5025 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
5026 : glsl::ExtInst::ExtInstUMin;
5027 }
5028 case Builtins::kAbs:
5029 return glsl::ExtInst::ExtInstSAbs;
5030 case Builtins::kFmax:
Marco Antognini55d51862020-07-21 17:50:07 +01005031 return glsl::ExtInst::ExtInstNMax;
SJW2c317da2020-03-23 07:39:13 -05005032 case Builtins::kFmin:
Marco Antognini55d51862020-07-21 17:50:07 +01005033 return glsl::ExtInst::ExtInstNMin;
SJW2c317da2020-03-23 07:39:13 -05005034 case Builtins::kDegrees:
5035 return glsl::ExtInst::ExtInstDegrees;
5036 case Builtins::kRadians:
5037 return glsl::ExtInst::ExtInstRadians;
5038 case Builtins::kMix:
5039 return glsl::ExtInst::ExtInstFMix;
5040 case Builtins::kAcos:
5041 case Builtins::kAcospi:
5042 return glsl::ExtInst::ExtInstAcos;
5043 case Builtins::kAcosh:
5044 return glsl::ExtInst::ExtInstAcosh;
5045 case Builtins::kAsin:
5046 case Builtins::kAsinpi:
5047 return glsl::ExtInst::ExtInstAsin;
5048 case Builtins::kAsinh:
5049 return glsl::ExtInst::ExtInstAsinh;
5050 case Builtins::kAtan:
5051 case Builtins::kAtanpi:
5052 return glsl::ExtInst::ExtInstAtan;
5053 case Builtins::kAtanh:
5054 return glsl::ExtInst::ExtInstAtanh;
5055 case Builtins::kAtan2:
5056 case Builtins::kAtan2pi:
5057 return glsl::ExtInst::ExtInstAtan2;
5058 case Builtins::kCeil:
5059 return glsl::ExtInst::ExtInstCeil;
5060 case Builtins::kSin:
5061 case Builtins::kHalfSin:
5062 case Builtins::kNativeSin:
5063 return glsl::ExtInst::ExtInstSin;
5064 case Builtins::kSinh:
5065 return glsl::ExtInst::ExtInstSinh;
5066 case Builtins::kCos:
5067 case Builtins::kHalfCos:
5068 case Builtins::kNativeCos:
5069 return glsl::ExtInst::ExtInstCos;
5070 case Builtins::kCosh:
5071 return glsl::ExtInst::ExtInstCosh;
5072 case Builtins::kTan:
5073 case Builtins::kHalfTan:
5074 case Builtins::kNativeTan:
5075 return glsl::ExtInst::ExtInstTan;
5076 case Builtins::kTanh:
5077 return glsl::ExtInst::ExtInstTanh;
5078 case Builtins::kExp:
5079 case Builtins::kHalfExp:
5080 case Builtins::kNativeExp:
5081 return glsl::ExtInst::ExtInstExp;
5082 case Builtins::kExp2:
5083 case Builtins::kHalfExp2:
5084 case Builtins::kNativeExp2:
5085 return glsl::ExtInst::ExtInstExp2;
5086 case Builtins::kLog:
5087 case Builtins::kHalfLog:
5088 case Builtins::kNativeLog:
5089 return glsl::ExtInst::ExtInstLog;
5090 case Builtins::kLog2:
5091 case Builtins::kHalfLog2:
5092 case Builtins::kNativeLog2:
5093 return glsl::ExtInst::ExtInstLog2;
5094 case Builtins::kFabs:
5095 return glsl::ExtInst::ExtInstFAbs;
5096 case Builtins::kFma:
5097 return glsl::ExtInst::ExtInstFma;
5098 case Builtins::kFloor:
5099 return glsl::ExtInst::ExtInstFloor;
5100 case Builtins::kLdexp:
5101 return glsl::ExtInst::ExtInstLdexp;
5102 case Builtins::kPow:
5103 case Builtins::kPowr:
5104 case Builtins::kHalfPowr:
5105 case Builtins::kNativePowr:
5106 return glsl::ExtInst::ExtInstPow;
James Price38553362020-09-03 18:30:40 -04005107 case Builtins::kRint:
5108 return glsl::ExtInst::ExtInstRoundEven;
SJW2c317da2020-03-23 07:39:13 -05005109 case Builtins::kRound:
5110 return glsl::ExtInst::ExtInstRound;
5111 case Builtins::kSqrt:
5112 case Builtins::kHalfSqrt:
5113 case Builtins::kNativeSqrt:
5114 return glsl::ExtInst::ExtInstSqrt;
5115 case Builtins::kRsqrt:
5116 case Builtins::kHalfRsqrt:
5117 case Builtins::kNativeRsqrt:
5118 return glsl::ExtInst::ExtInstInverseSqrt;
5119 case Builtins::kTrunc:
5120 return glsl::ExtInst::ExtInstTrunc;
5121 case Builtins::kFrexp:
5122 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05005123 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05005124 case Builtins::kFract:
5125 return glsl::ExtInst::ExtInstFract;
5126 case Builtins::kSign:
5127 return glsl::ExtInst::ExtInstFSign;
5128 case Builtins::kLength:
5129 case Builtins::kFastLength:
5130 return glsl::ExtInst::ExtInstLength;
5131 case Builtins::kDistance:
5132 case Builtins::kFastDistance:
5133 return glsl::ExtInst::ExtInstDistance;
5134 case Builtins::kStep:
5135 return glsl::ExtInst::ExtInstStep;
5136 case Builtins::kSmoothstep:
5137 return glsl::ExtInst::ExtInstSmoothStep;
5138 case Builtins::kCross:
5139 return glsl::ExtInst::ExtInstCross;
5140 case Builtins::kNormalize:
5141 case Builtins::kFastNormalize:
5142 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05005143 case Builtins::kSpirvPack:
5144 return glsl::ExtInst::ExtInstPackHalf2x16;
5145 case Builtins::kSpirvUnpack:
5146 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05005147 default:
5148 break;
5149 }
5150
alan-baker5f2e88e2020-12-07 15:24:04 -05005151 // TODO: improve this by checking the intrinsic id.
SJW61531372020-06-09 07:31:08 -05005152 if (func_info.getName().find("llvm.fmuladd.") == 0) {
5153 return glsl::ExtInst::ExtInstFma;
5154 }
alan-baker5f2e88e2020-12-07 15:24:04 -05005155 if (func_info.getName().find("llvm.sqrt.") == 0) {
5156 return glsl::ExtInst::ExtInstSqrt;
5157 }
5158 if (func_info.getName().find("llvm.trunc.") == 0) {
5159 return glsl::ExtInst::ExtInstTrunc;
5160 }
5161 if (func_info.getName().find("llvm.ctlz.") == 0) {
5162 return glsl::ExtInst::ExtInstFindUMsb;
5163 }
5164 if (func_info.getName().find("llvm.cttz.") == 0) {
5165 return glsl::ExtInst::ExtInstFindILsb;
5166 }
alan-baker3e0de472020-12-08 15:57:17 -05005167 if (func_info.getName().find("llvm.ceil.") == 0) {
5168 return glsl::ExtInst::ExtInstCeil;
5169 }
5170 if (func_info.getName().find("llvm.rint.") == 0) {
5171 return glsl::ExtInst::ExtInstRoundEven;
5172 }
5173 if (func_info.getName().find("llvm.fabs.") == 0) {
5174 return glsl::ExtInst::ExtInstFAbs;
5175 }
5176 if (func_info.getName().find("llvm.floor.") == 0) {
5177 return glsl::ExtInst::ExtInstFloor;
5178 }
5179 if (func_info.getName().find("llvm.sin.") == 0) {
5180 return glsl::ExtInst::ExtInstSin;
5181 }
5182 if (func_info.getName().find("llvm.cos.") == 0) {
5183 return glsl::ExtInst::ExtInstCos;
5184 }
alan-baker8b968112020-12-15 15:53:29 -05005185 if (func_info.getName().find("llvm.exp.") == 0) {
5186 return glsl::ExtInst::ExtInstExp;
5187 }
5188 if (func_info.getName().find("llvm.log.") == 0) {
5189 return glsl::ExtInst::ExtInstLog;
5190 }
5191 if (func_info.getName().find("llvm.pow.") == 0) {
5192 return glsl::ExtInst::ExtInstPow;
5193 }
James Price8cc3bb12021-05-05 10:20:58 -04005194 if (func_info.getName().find("llvm.smax.") == 0) {
5195 return glsl::ExtInst::ExtInstSMax;
5196 }
5197 if (func_info.getName().find("llvm.smin.") == 0) {
5198 return glsl::ExtInst::ExtInstSMin;
5199 }
SJW61531372020-06-09 07:31:08 -05005200 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005201}
5202
SJW61531372020-06-09 07:31:08 -05005203glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
5204 const Builtins::FunctionInfo &func_info) {
5205 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05005206 case Builtins::kAcospi:
5207 return glsl::ExtInst::ExtInstAcos;
5208 case Builtins::kAsinpi:
5209 return glsl::ExtInst::ExtInstAsin;
5210 case Builtins::kAtanpi:
5211 return glsl::ExtInst::ExtInstAtan;
5212 case Builtins::kAtan2pi:
5213 return glsl::ExtInst::ExtInstAtan2;
5214 default:
5215 break;
5216 }
5217 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005218}
5219
SJW61531372020-06-09 07:31:08 -05005220glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
5221 const Builtins::FunctionInfo &func_info) {
5222 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04005223 if (direct != kGlslExtInstBad)
5224 return direct;
SJW61531372020-06-09 07:31:08 -05005225 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04005226}
5227
David Neto22f144c2017-06-12 14:26:21 -04005228void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005229 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005230}
5231
SJW88ed5fe2020-05-11 12:40:57 -05005232void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05005233 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04005234}
5235
SJW88ed5fe2020-05-11 12:40:57 -05005236void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04005237 // High 16 bit : Word Count
5238 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05005239 uint32_t Word = Inst.getOpcode();
5240 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04005241 if (count > 65535) {
5242 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5243 llvm_unreachable("Word count too high");
5244 }
SJW88ed5fe2020-05-11 12:40:57 -05005245 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04005246 WriteOneWord(Word);
5247}
5248
SJW88ed5fe2020-05-11 12:40:57 -05005249void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
5250 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04005251 switch (OpTy) {
5252 default: {
5253 llvm_unreachable("Unsupported SPIRV Operand Type???");
5254 break;
5255 }
5256 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05005257 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04005258 break;
5259 }
5260 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05005261 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04005262 const char *Data = Str.c_str();
5263 size_t WordSize = Str.size() / 4;
5264 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5265 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5266 }
5267
5268 uint32_t Remainder = Str.size() % 4;
5269 uint32_t LastWord = 0;
5270 if (Remainder) {
5271 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5272 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5273 }
5274 }
5275
5276 WriteOneWord(LastWord);
5277 break;
5278 }
SJW88ed5fe2020-05-11 12:40:57 -05005279 case SPIRVOperandType::LITERAL_WORD: {
5280 WriteOneWord(Op.getLiteralNum()[0]);
5281 break;
5282 }
5283 case SPIRVOperandType::LITERAL_DWORD: {
5284 WriteOneWord(Op.getLiteralNum()[0]);
5285 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04005286 break;
5287 }
5288 }
5289}
5290
5291void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005292 for (int i = 0; i < kSectionCount; ++i) {
5293 WriteSPIRVBinary(SPIRVSections[i]);
5294 }
5295}
5296
5297void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
SJW88ed5fe2020-05-11 12:40:57 -05005298 for (const auto &Inst : SPIRVInstList) {
5299 const auto &Ops = Inst.getOperands();
5300 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04005301
5302 switch (Opcode) {
5303 default: {
David Neto5c22a252018-03-15 16:07:41 -04005304 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005305 llvm_unreachable("Unsupported SPIRV instruction");
5306 break;
5307 }
Marco Antognini68e5c512020-09-09 16:08:57 +01005308 case spv::OpUnreachable:
David Neto22f144c2017-06-12 14:26:21 -04005309 case spv::OpCapability:
5310 case spv::OpExtension:
5311 case spv::OpMemoryModel:
5312 case spv::OpEntryPoint:
5313 case spv::OpExecutionMode:
5314 case spv::OpSource:
5315 case spv::OpDecorate:
5316 case spv::OpMemberDecorate:
5317 case spv::OpBranch:
5318 case spv::OpBranchConditional:
5319 case spv::OpSelectionMerge:
5320 case spv::OpLoopMerge:
5321 case spv::OpStore:
5322 case spv::OpImageWrite:
5323 case spv::OpReturnValue:
5324 case spv::OpControlBarrier:
5325 case spv::OpMemoryBarrier:
5326 case spv::OpReturn:
5327 case spv::OpFunctionEnd:
alan-baker4986eff2020-10-29 13:38:00 -04005328 case spv::OpCopyMemory:
5329 case spv::OpAtomicStore: {
David Neto22f144c2017-06-12 14:26:21 -04005330 WriteWordCountAndOpcode(Inst);
5331 for (uint32_t i = 0; i < Ops.size(); i++) {
5332 WriteOperand(Ops[i]);
5333 }
5334 break;
5335 }
5336 case spv::OpTypeBool:
5337 case spv::OpTypeVoid:
5338 case spv::OpTypeSampler:
5339 case spv::OpLabel:
5340 case spv::OpExtInstImport:
5341 case spv::OpTypePointer:
5342 case spv::OpTypeRuntimeArray:
5343 case spv::OpTypeStruct:
5344 case spv::OpTypeImage:
5345 case spv::OpTypeSampledImage:
5346 case spv::OpTypeInt:
5347 case spv::OpTypeFloat:
5348 case spv::OpTypeArray:
5349 case spv::OpTypeVector:
alan-baker86ce19c2020-08-05 13:09:19 -04005350 case spv::OpTypeFunction:
5351 case spv::OpString: {
David Neto22f144c2017-06-12 14:26:21 -04005352 WriteWordCountAndOpcode(Inst);
5353 WriteResultID(Inst);
5354 for (uint32_t i = 0; i < Ops.size(); i++) {
5355 WriteOperand(Ops[i]);
5356 }
5357 break;
5358 }
5359 case spv::OpFunction:
5360 case spv::OpFunctionParameter:
5361 case spv::OpAccessChain:
5362 case spv::OpPtrAccessChain:
5363 case spv::OpInBoundsAccessChain:
5364 case spv::OpUConvert:
5365 case spv::OpSConvert:
5366 case spv::OpConvertFToU:
5367 case spv::OpConvertFToS:
5368 case spv::OpConvertUToF:
5369 case spv::OpConvertSToF:
5370 case spv::OpFConvert:
5371 case spv::OpConvertPtrToU:
5372 case spv::OpConvertUToPtr:
5373 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005374 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005375 case spv::OpIAdd:
alan-bakera52b7312020-10-26 08:58:51 -04005376 case spv::OpIAddCarry:
David Neto22f144c2017-06-12 14:26:21 -04005377 case spv::OpFAdd:
5378 case spv::OpISub:
alan-baker3f1bf492020-11-05 09:07:36 -05005379 case spv::OpISubBorrow:
David Neto22f144c2017-06-12 14:26:21 -04005380 case spv::OpFSub:
5381 case spv::OpIMul:
5382 case spv::OpFMul:
5383 case spv::OpUDiv:
5384 case spv::OpSDiv:
5385 case spv::OpFDiv:
5386 case spv::OpUMod:
5387 case spv::OpSRem:
5388 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005389 case spv::OpUMulExtended:
5390 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005391 case spv::OpBitwiseOr:
5392 case spv::OpBitwiseXor:
5393 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005394 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005395 case spv::OpShiftLeftLogical:
5396 case spv::OpShiftRightLogical:
5397 case spv::OpShiftRightArithmetic:
5398 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005399 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005400 case spv::OpCompositeExtract:
5401 case spv::OpVectorExtractDynamic:
5402 case spv::OpCompositeInsert:
alan-baker3f772c02021-06-15 22:18:11 -04005403 case spv::OpCopyLogical:
David Neto0a2f98d2017-09-15 19:38:40 -04005404 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005405 case spv::OpVectorInsertDynamic:
5406 case spv::OpVectorShuffle:
5407 case spv::OpIEqual:
5408 case spv::OpINotEqual:
5409 case spv::OpUGreaterThan:
5410 case spv::OpUGreaterThanEqual:
5411 case spv::OpULessThan:
5412 case spv::OpULessThanEqual:
5413 case spv::OpSGreaterThan:
5414 case spv::OpSGreaterThanEqual:
5415 case spv::OpSLessThan:
5416 case spv::OpSLessThanEqual:
5417 case spv::OpFOrdEqual:
5418 case spv::OpFOrdGreaterThan:
5419 case spv::OpFOrdGreaterThanEqual:
5420 case spv::OpFOrdLessThan:
5421 case spv::OpFOrdLessThanEqual:
5422 case spv::OpFOrdNotEqual:
5423 case spv::OpFUnordEqual:
5424 case spv::OpFUnordGreaterThan:
5425 case spv::OpFUnordGreaterThanEqual:
5426 case spv::OpFUnordLessThan:
5427 case spv::OpFUnordLessThanEqual:
5428 case spv::OpFUnordNotEqual:
5429 case spv::OpExtInst:
5430 case spv::OpIsInf:
5431 case spv::OpIsNan:
5432 case spv::OpAny:
5433 case spv::OpAll:
5434 case spv::OpUndef:
5435 case spv::OpConstantNull:
5436 case spv::OpLogicalOr:
5437 case spv::OpLogicalAnd:
5438 case spv::OpLogicalNot:
5439 case spv::OpLogicalNotEqual:
5440 case spv::OpConstantComposite:
5441 case spv::OpSpecConstantComposite:
5442 case spv::OpConstantTrue:
5443 case spv::OpConstantFalse:
5444 case spv::OpConstant:
5445 case spv::OpSpecConstant:
5446 case spv::OpVariable:
5447 case spv::OpFunctionCall:
5448 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005449 case spv::OpImageFetch:
alan-bakerf6bc8252020-09-23 14:58:55 -04005450 case spv::OpImageRead:
David Neto22f144c2017-06-12 14:26:21 -04005451 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005452 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005453 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005454 case spv::OpSelect:
5455 case spv::OpPhi:
5456 case spv::OpLoad:
alan-baker4986eff2020-10-29 13:38:00 -04005457 case spv::OpAtomicLoad:
David Neto22f144c2017-06-12 14:26:21 -04005458 case spv::OpAtomicIAdd:
5459 case spv::OpAtomicISub:
5460 case spv::OpAtomicExchange:
5461 case spv::OpAtomicIIncrement:
5462 case spv::OpAtomicIDecrement:
5463 case spv::OpAtomicCompareExchange:
5464 case spv::OpAtomicUMin:
5465 case spv::OpAtomicSMin:
5466 case spv::OpAtomicUMax:
5467 case spv::OpAtomicSMax:
5468 case spv::OpAtomicAnd:
5469 case spv::OpAtomicOr:
5470 case spv::OpAtomicXor:
SJW806a5d82020-07-15 12:51:38 -05005471 case spv::OpDot:
5472 case spv::OpGroupNonUniformAll:
5473 case spv::OpGroupNonUniformAny:
5474 case spv::OpGroupNonUniformBroadcast:
5475 case spv::OpGroupNonUniformIAdd:
5476 case spv::OpGroupNonUniformFAdd:
5477 case spv::OpGroupNonUniformSMin:
5478 case spv::OpGroupNonUniformUMin:
5479 case spv::OpGroupNonUniformFMin:
5480 case spv::OpGroupNonUniformSMax:
5481 case spv::OpGroupNonUniformUMax:
5482 case spv::OpGroupNonUniformFMax: {
David Neto22f144c2017-06-12 14:26:21 -04005483 WriteWordCountAndOpcode(Inst);
5484 WriteOperand(Ops[0]);
5485 WriteResultID(Inst);
5486 for (uint32_t i = 1; i < Ops.size(); i++) {
5487 WriteOperand(Ops[i]);
5488 }
5489 break;
5490 }
5491 }
5492 }
5493}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005494
alan-bakerb6b09dc2018-11-08 16:59:28 -05005495bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005496 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005497 case Type::HalfTyID:
5498 case Type::FloatTyID:
5499 case Type::DoubleTyID:
5500 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005501 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005502 return true;
5503 case Type::PointerTyID: {
5504 const PointerType *pointer_type = cast<PointerType>(type);
5505 if (pointer_type->getPointerAddressSpace() !=
5506 AddressSpace::UniformConstant) {
5507 auto pointee_type = pointer_type->getPointerElementType();
5508 if (pointee_type->isStructTy() &&
5509 cast<StructType>(pointee_type)->isOpaque()) {
5510 // Images and samplers are not nullable.
5511 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005512 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005513 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005514 return true;
5515 }
5516 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005517 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005518 case Type::StructTyID: {
5519 const StructType *struct_type = cast<StructType>(type);
5520 // Images and samplers are not nullable.
5521 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005522 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005523 for (const auto element : struct_type->elements()) {
5524 if (!IsTypeNullable(element))
5525 return false;
5526 }
5527 return true;
5528 }
5529 default:
5530 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005531 }
5532}
Alan Bakerfcda9482018-10-02 17:09:59 -04005533
SJW77b87ad2020-04-21 14:37:52 -05005534void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005535 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005536 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005537 // Metdata is stored as key-value pair operands. The first element of each
5538 // operand is the type and the second is a vector of offsets.
5539 for (const auto *operand : offsets_md->operands()) {
5540 const auto *pair = cast<MDTuple>(operand);
5541 auto *type =
5542 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5543 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5544 std::vector<uint32_t> offsets;
5545 for (const Metadata *offset_md : offset_vector->operands()) {
5546 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005547 offsets.push_back(static_cast<uint32_t>(
5548 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005549 }
5550 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5551 }
5552 }
5553
5554 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005555 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005556 // Metadata is stored as key-value pair operands. The first element of each
5557 // operand is the type and the second is a triple of sizes: type size in
5558 // bits, store size and alloc size.
5559 for (const auto *operand : sizes_md->operands()) {
5560 const auto *pair = cast<MDTuple>(operand);
5561 auto *type =
5562 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5563 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5564 uint64_t type_size_in_bits =
5565 cast<ConstantInt>(
5566 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5567 ->getZExtValue();
5568 uint64_t type_store_size =
5569 cast<ConstantInt>(
5570 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5571 ->getZExtValue();
5572 uint64_t type_alloc_size =
5573 cast<ConstantInt>(
5574 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5575 ->getZExtValue();
5576 RemappedUBOTypeSizes.insert(std::make_pair(
5577 type, std::make_tuple(type_size_in_bits, type_store_size,
5578 type_alloc_size)));
5579 }
5580 }
5581}
5582
5583uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5584 const DataLayout &DL) {
5585 auto iter = RemappedUBOTypeSizes.find(type);
5586 if (iter != RemappedUBOTypeSizes.end()) {
5587 return std::get<0>(iter->second);
5588 }
5589
5590 return DL.getTypeSizeInBits(type);
5591}
5592
Alan Bakerfcda9482018-10-02 17:09:59 -04005593uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5594 auto iter = RemappedUBOTypeSizes.find(type);
5595 if (iter != RemappedUBOTypeSizes.end()) {
5596 return std::get<2>(iter->second);
5597 }
5598
5599 return DL.getTypeAllocSize(type);
5600}
alan-baker5b86ed72019-02-15 08:26:50 -05005601
Kévin Petitbbbda972020-03-03 19:16:31 +00005602uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5603 StructType *type, unsigned member, const DataLayout &DL) {
5604 const auto StructLayout = DL.getStructLayout(type);
5605 // Search for the correct offsets if this type was remapped.
5606 std::vector<uint32_t> *offsets = nullptr;
5607 auto iter = RemappedUBOTypeOffsets.find(type);
5608 if (iter != RemappedUBOTypeOffsets.end()) {
5609 offsets = &iter->second;
5610 }
5611 auto ByteOffset =
5612 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5613 if (offsets) {
5614 ByteOffset = (*offsets)[member];
5615 }
5616
5617 return ByteOffset;
5618}
5619
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005620void SPIRVProducerPass::setVariablePointersCapabilities(
5621 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005622 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005623 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005624 } else {
SJW01901d92020-05-21 08:58:31 -05005625 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005626 }
5627}
5628
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005629Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005630 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5631 return GetBasePointer(gep->getPointerOperand());
5632 }
5633
5634 // Conservatively return |v|.
5635 return v;
5636}
5637
5638bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5639 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5640 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
alan-baker7506abb2020-09-10 15:02:55 -04005641 const auto &lhs_func_info =
5642 Builtins::Lookup(lhs_call->getCalledFunction());
5643 const auto &rhs_func_info =
5644 Builtins::Lookup(rhs_call->getCalledFunction());
SJW61531372020-06-09 07:31:08 -05005645 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5646 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005647 // For resource accessors, match descriptor set and binding.
5648 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5649 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5650 return true;
SJW61531372020-06-09 07:31:08 -05005651 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5652 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005653 // For workgroup resources, match spec id.
5654 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5655 return true;
5656 }
5657 }
5658 }
5659
5660 return false;
5661}
5662
5663bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5664 assert(inst->getType()->isPointerTy());
5665 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5666 spv::StorageClassStorageBuffer);
5667 const bool hack_undef = clspv::Option::HackUndef();
5668 if (auto *select = dyn_cast<SelectInst>(inst)) {
5669 auto *true_base = GetBasePointer(select->getTrueValue());
5670 auto *false_base = GetBasePointer(select->getFalseValue());
5671
5672 if (true_base == false_base)
5673 return true;
5674
5675 // If either the true or false operand is a null, then we satisfy the same
5676 // object constraint.
5677 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5678 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5679 return true;
5680 }
5681
5682 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5683 if (false_cst->isNullValue() ||
5684 (hack_undef && isa<UndefValue>(false_base)))
5685 return true;
5686 }
5687
5688 if (sameResource(true_base, false_base))
5689 return true;
5690 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5691 Value *value = nullptr;
5692 bool ok = true;
5693 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5694 auto *base = GetBasePointer(phi->getIncomingValue(i));
5695 // Null values satisfy the constraint of selecting of selecting from the
5696 // same object.
5697 if (!value) {
5698 if (auto *cst = dyn_cast<Constant>(base)) {
5699 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5700 value = base;
5701 } else {
5702 value = base;
5703 }
5704 } else if (base != value) {
5705 if (auto *base_cst = dyn_cast<Constant>(base)) {
5706 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5707 continue;
5708 }
5709
5710 if (sameResource(value, base))
5711 continue;
5712
5713 // Values don't represent the same base.
5714 ok = false;
5715 }
5716 }
5717
5718 return ok;
5719 }
5720
5721 // Conservatively return false.
5722 return false;
5723}
alan-bakere9308012019-03-15 10:25:13 -04005724
5725bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5726 if (!Arg.getType()->isPointerTy() ||
5727 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5728 // Only SSBOs need to be annotated as coherent.
5729 return false;
5730 }
5731
5732 DenseSet<Value *> visited;
5733 std::vector<Value *> stack;
5734 for (auto *U : Arg.getParent()->users()) {
5735 if (auto *call = dyn_cast<CallInst>(U)) {
5736 stack.push_back(call->getOperand(Arg.getArgNo()));
5737 }
5738 }
5739
5740 while (!stack.empty()) {
5741 Value *v = stack.back();
5742 stack.pop_back();
5743
5744 if (!visited.insert(v).second)
5745 continue;
5746
5747 auto *resource_call = dyn_cast<CallInst>(v);
5748 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005749 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5750 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005751 // If this is a resource accessor function, check if the coherent operand
5752 // is set.
5753 const auto coherent =
5754 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5755 ->getZExtValue());
5756 if (coherent == 1)
5757 return true;
5758 } else if (auto *arg = dyn_cast<Argument>(v)) {
5759 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005760 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005761 if (auto *call = dyn_cast<CallInst>(U)) {
5762 stack.push_back(call->getOperand(arg->getArgNo()));
5763 }
5764 }
5765 } else if (auto *user = dyn_cast<User>(v)) {
5766 // If this is a user, traverse all operands that could lead to resource
5767 // variables.
5768 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5769 Value *operand = user->getOperand(i);
5770 if (operand->getType()->isPointerTy() &&
5771 operand->getType()->getPointerAddressSpace() ==
5772 clspv::AddressSpace::Global) {
5773 stack.push_back(operand);
5774 }
5775 }
5776 }
5777 }
5778
5779 // No coherent resource variables encountered.
5780 return false;
5781}
alan-baker06cad652019-12-03 17:56:47 -05005782
SJW77b87ad2020-04-21 14:37:52 -05005783void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005784 // First, track loop merges and continues.
5785 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005786 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005787 if (F.isDeclaration())
5788 continue;
5789
5790 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5791 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5792 std::deque<BasicBlock *> order;
5793 DenseSet<BasicBlock *> visited;
5794 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5795
5796 for (auto BB : order) {
5797 auto terminator = BB->getTerminator();
5798 auto branch = dyn_cast<BranchInst>(terminator);
5799 if (LI.isLoopHeader(BB)) {
5800 auto L = LI.getLoopFor(BB);
5801 BasicBlock *ContinueBB = nullptr;
5802 BasicBlock *MergeBB = nullptr;
5803
5804 MergeBB = L->getExitBlock();
5805 if (!MergeBB) {
5806 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5807 // has regions with single entry/exit. As a result, loop should not
5808 // have multiple exits.
5809 llvm_unreachable("Loop has multiple exits???");
5810 }
5811
5812 if (L->isLoopLatch(BB)) {
5813 ContinueBB = BB;
5814 } else {
5815 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5816 // block.
5817 BasicBlock *Header = L->getHeader();
5818 BasicBlock *Latch = L->getLoopLatch();
5819 for (auto *loop_block : L->blocks()) {
5820 if (loop_block == Header) {
5821 continue;
5822 }
5823
5824 // Check whether block dominates block with back-edge.
5825 // The loop latch is the single block with a back-edge. If it was
5826 // possible, StructurizeCFG made the loop conform to this
5827 // requirement, otherwise |Latch| is a nullptr.
5828 if (DT.dominates(loop_block, Latch)) {
5829 ContinueBB = loop_block;
5830 }
5831 }
5832
5833 if (!ContinueBB) {
5834 llvm_unreachable("Wrong continue block from loop");
5835 }
5836 }
5837
5838 // Record the continue and merge blocks.
5839 MergeBlocks[BB] = MergeBB;
5840 ContinueBlocks[BB] = ContinueBB;
5841 LoopMergesAndContinues.insert(MergeBB);
5842 LoopMergesAndContinues.insert(ContinueBB);
5843 } else if (branch && branch->isConditional()) {
5844 auto L = LI.getLoopFor(BB);
5845 bool HasBackedge = false;
5846 while (L && !HasBackedge) {
5847 if (L->isLoopLatch(BB)) {
5848 HasBackedge = true;
5849 }
5850 L = L->getParentLoop();
5851 }
5852
5853 if (!HasBackedge) {
5854 // Only need a merge if the branch doesn't include a loop break or
5855 // continue.
5856 auto true_bb = branch->getSuccessor(0);
5857 auto false_bb = branch->getSuccessor(1);
5858 if (!LoopMergesAndContinues.count(true_bb) &&
5859 !LoopMergesAndContinues.count(false_bb)) {
5860 // StructurizeCFG pass already manipulated CFG. Just use false block
5861 // of branch instruction as merge block.
5862 MergeBlocks[BB] = false_bb;
5863 }
5864 }
5865 }
5866 }
5867 }
5868}
alan-baker86ce19c2020-08-05 13:09:19 -04005869
5870SPIRVID SPIRVProducerPass::getReflectionImport() {
5871 if (!ReflectionID.isValid()) {
5872 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_non_semantic_info");
5873 ReflectionID = addSPIRVInst<kImports>(spv::OpExtInstImport,
5874 "NonSemantic.ClspvReflection.1");
5875 }
5876 return ReflectionID;
5877}
5878
5879void SPIRVProducerPass::GenerateReflection() {
5880 GenerateKernelReflection();
5881 GeneratePushConstantReflection();
5882 GenerateSpecConstantReflection();
5883}
5884
5885void SPIRVProducerPass::GeneratePushConstantReflection() {
5886 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
5887 auto const &DL = module->getDataLayout();
5888 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
5889 auto STy = cast<StructType>(GV->getValueType());
5890
5891 for (unsigned i = 0; i < STy->getNumElements(); i++) {
5892 auto pc = static_cast<clspv::PushConstant>(
5893 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
5894 if (pc == PushConstant::KernelArgument)
5895 continue;
5896
5897 auto memberType = STy->getElementType(i);
5898 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
Marco Antognini7e338402021-03-15 12:48:37 +00005899#ifndef NDEBUG
alan-baker86ce19c2020-08-05 13:09:19 -04005900 unsigned previousOffset = 0;
5901 if (i > 0) {
5902 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
5903 }
alan-baker86ce19c2020-08-05 13:09:19 -04005904 assert(isValidExplicitLayout(*module, STy, i,
5905 spv::StorageClassPushConstant, offset,
5906 previousOffset));
Marco Antognini7e338402021-03-15 12:48:37 +00005907#endif
alan-baker86ce19c2020-08-05 13:09:19 -04005908
5909 reflection::ExtInst pc_inst = reflection::ExtInstMax;
5910 switch (pc) {
5911 case PushConstant::GlobalOffset:
5912 pc_inst = reflection::ExtInstPushConstantGlobalOffset;
5913 break;
5914 case PushConstant::EnqueuedLocalSize:
5915 pc_inst = reflection::ExtInstPushConstantEnqueuedLocalSize;
5916 break;
5917 case PushConstant::GlobalSize:
5918 pc_inst = reflection::ExtInstPushConstantGlobalSize;
5919 break;
5920 case PushConstant::RegionOffset:
5921 pc_inst = reflection::ExtInstPushConstantRegionOffset;
5922 break;
5923 case PushConstant::NumWorkgroups:
5924 pc_inst = reflection::ExtInstPushConstantNumWorkgroups;
5925 break;
5926 case PushConstant::RegionGroupOffset:
5927 pc_inst = reflection::ExtInstPushConstantRegionGroupOffset;
5928 break;
5929 default:
5930 llvm_unreachable("Unhandled push constant");
5931 break;
5932 }
5933
5934 auto import_id = getReflectionImport();
Marco Antognini7e338402021-03-15 12:48:37 +00005935 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
alan-baker86ce19c2020-08-05 13:09:19 -04005936 SPIRVOperandVec Ops;
5937 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
5938 << pc_inst << getSPIRVInt32Constant(offset)
5939 << getSPIRVInt32Constant(size);
5940 addSPIRVInst(spv::OpExtInst, Ops);
5941 }
5942 }
5943}
5944
5945void SPIRVProducerPass::GenerateSpecConstantReflection() {
5946 const uint32_t kMax = std::numeric_limits<uint32_t>::max();
5947 uint32_t wgsize_id[3] = {kMax, kMax, kMax};
5948 uint32_t global_offset_id[3] = {kMax, kMax, kMax};
5949 uint32_t work_dim_id = kMax;
5950 for (auto pair : clspv::GetSpecConstants(module)) {
5951 auto kind = pair.first;
5952 auto id = pair.second;
5953
5954 // Local memory size is only used for kernel arguments.
5955 if (kind == SpecConstant::kLocalMemorySize)
5956 continue;
5957
5958 switch (kind) {
5959 case SpecConstant::kWorkgroupSizeX:
5960 wgsize_id[0] = id;
5961 break;
5962 case SpecConstant::kWorkgroupSizeY:
5963 wgsize_id[1] = id;
5964 break;
5965 case SpecConstant::kWorkgroupSizeZ:
5966 wgsize_id[2] = id;
5967 break;
5968 case SpecConstant::kGlobalOffsetX:
5969 global_offset_id[0] = id;
5970 break;
5971 case SpecConstant::kGlobalOffsetY:
5972 global_offset_id[1] = id;
5973 break;
5974 case SpecConstant::kGlobalOffsetZ:
5975 global_offset_id[2] = id;
5976 break;
5977 case SpecConstant::kWorkDim:
5978 work_dim_id = id;
5979 break;
5980 default:
5981 llvm_unreachable("Unhandled spec constant");
5982 }
5983 }
5984
5985 auto import_id = getReflectionImport();
5986 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5987 SPIRVOperandVec Ops;
5988 if (wgsize_id[0] != kMax) {
5989 assert(wgsize_id[1] != kMax);
5990 assert(wgsize_id[2] != kMax);
5991 Ops.clear();
5992 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkgroupSize
5993 << getSPIRVInt32Constant(wgsize_id[0])
5994 << getSPIRVInt32Constant(wgsize_id[1])
5995 << getSPIRVInt32Constant(wgsize_id[2]);
5996 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5997 }
5998 if (global_offset_id[0] != kMax) {
5999 assert(global_offset_id[1] != kMax);
6000 assert(global_offset_id[2] != kMax);
6001 Ops.clear();
6002 Ops << void_id << import_id << reflection::ExtInstSpecConstantGlobalOffset
6003 << getSPIRVInt32Constant(global_offset_id[0])
6004 << getSPIRVInt32Constant(global_offset_id[1])
6005 << getSPIRVInt32Constant(global_offset_id[2]);
6006 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6007 }
6008 if (work_dim_id != kMax) {
6009 Ops.clear();
6010 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkDim
6011 << getSPIRVInt32Constant(work_dim_id);
6012 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6013 }
6014}
6015
6016void SPIRVProducerPass::GenerateKernelReflection() {
6017 const auto &DL = module->getDataLayout();
6018 auto import_id = getReflectionImport();
6019 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
6020
6021 for (auto &F : *module) {
6022 if (F.isDeclaration() || F.getCallingConv() != CallingConv::SPIR_KERNEL) {
6023 continue;
6024 }
6025
6026 // OpString for the kernel name.
6027 auto kernel_name =
6028 addSPIRVInst<kDebug>(spv::OpString, F.getName().str().c_str());
6029
6030 // Kernel declaration
6031 // Ops[0] = void type
6032 // Ops[1] = reflection ext import
6033 // Ops[2] = function id
6034 // Ops[3] = kernel name
6035 SPIRVOperandVec Ops;
6036 Ops << void_id << import_id << reflection::ExtInstKernel << ValueMap[&F]
6037 << kernel_name;
6038 auto kernel_decl = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6039
6040 // Generate the required workgroup size property if it was specified.
6041 if (const MDNode *MD = F.getMetadata("reqd_work_group_size")) {
6042 uint32_t CurXDimCst = static_cast<uint32_t>(
6043 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
6044 uint32_t CurYDimCst = static_cast<uint32_t>(
6045 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
6046 uint32_t CurZDimCst = static_cast<uint32_t>(
6047 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
6048
6049 Ops.clear();
6050 Ops << void_id << import_id
6051 << reflection::ExtInstPropertyRequiredWorkgroupSize << kernel_decl
6052 << getSPIRVInt32Constant(CurXDimCst)
6053 << getSPIRVInt32Constant(CurYDimCst)
6054 << getSPIRVInt32Constant(CurZDimCst);
6055 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6056 }
6057
6058 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
6059 auto *func_ty = F.getFunctionType();
6060
6061 // If we've clustered POD arguments, then argument details are in metadata.
6062 // If an argument maps to a resource variable, then get descriptor set and
6063 // binding from the resource variable. Other info comes from the metadata.
6064 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
6065 auto local_spec_id_md =
6066 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
6067 if (arg_map) {
6068 for (const auto &arg : arg_map->operands()) {
6069 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
6070 assert(arg_node->getNumOperands() == 6);
6071 const auto name =
6072 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
6073 const auto old_index =
6074 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
6075 // Remapped argument index
6076 const int new_index = static_cast<int>(
6077 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
6078 const auto offset =
6079 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
6080 const auto size =
6081 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
6082 const auto argKind = clspv::GetArgKindFromName(
6083 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
6084
6085 // If this is a local memory argument, find the right spec id for this
6086 // argument.
6087 int64_t spec_id = -1;
6088 if (argKind == clspv::ArgKind::Local) {
6089 for (auto spec_id_arg : local_spec_id_md->operands()) {
6090 if ((&F == dyn_cast<Function>(
6091 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
6092 ->getValue())) &&
6093 (static_cast<uint64_t>(new_index) ==
6094 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
6095 ->getZExtValue())) {
6096 spec_id =
6097 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
6098 ->getSExtValue();
6099 break;
6100 }
6101 }
6102 }
6103
6104 // Generate the specific argument instruction.
6105 const uint32_t ordinal = static_cast<uint32_t>(old_index);
6106 const uint32_t arg_offset = static_cast<uint32_t>(offset);
6107 const uint32_t arg_size = static_cast<uint32_t>(size);
6108 uint32_t elem_size = 0;
6109 uint32_t descriptor_set = 0;
6110 uint32_t binding = 0;
6111 if (spec_id > 0) {
6112 elem_size = static_cast<uint32_t>(
6113 GetTypeAllocSize(func_ty->getParamType(unsigned(new_index))
6114 ->getPointerElementType(),
6115 DL));
6116 } else if (new_index >= 0) {
6117 auto *info = resource_var_at_index[new_index];
6118 assert(info);
6119 descriptor_set = info->descriptor_set;
6120 binding = info->binding;
6121 }
6122 AddArgumentReflection(kernel_decl, name.str(), argKind, ordinal,
6123 descriptor_set, binding, arg_offset, arg_size,
6124 static_cast<uint32_t>(spec_id), elem_size);
6125 }
6126 } else {
6127 // There is no argument map.
6128 // Take descriptor info from the resource variable calls.
6129 // Take argument name and size from the arguments list.
6130
6131 SmallVector<Argument *, 4> arguments;
6132 for (auto &arg : F.args()) {
6133 arguments.push_back(&arg);
6134 }
6135
6136 unsigned arg_index = 0;
6137 for (auto *info : resource_var_at_index) {
6138 if (info) {
6139 auto arg = arguments[arg_index];
6140 unsigned arg_size = 0;
6141 if (info->arg_kind == clspv::ArgKind::Pod ||
6142 info->arg_kind == clspv::ArgKind::PodUBO ||
6143 info->arg_kind == clspv::ArgKind::PodPushConstant) {
6144 arg_size =
6145 static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
6146 }
6147
6148 // Local pointer arguments are unused in this case.
6149 // offset, spec_id and elem_size always 0.
6150 AddArgumentReflection(kernel_decl, arg->getName().str(),
6151 info->arg_kind, arg_index, info->descriptor_set,
6152 info->binding, 0, arg_size, 0, 0);
6153 }
6154 arg_index++;
6155 }
6156 // Generate mappings for pointer-to-local arguments.
6157 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
6158 Argument *arg = arguments[arg_index];
6159 auto where = LocalArgSpecIds.find(arg);
6160 if (where != LocalArgSpecIds.end()) {
6161 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
6162
6163 // descriptor_set, binding, offset and size are always 0.
6164 AddArgumentReflection(kernel_decl, arg->getName().str(),
6165 ArgKind::Local, arg_index, 0, 0, 0, 0,
6166 static_cast<uint32_t>(local_arg_info.spec_id),
6167 static_cast<uint32_t>(GetTypeAllocSize(
6168 local_arg_info.elem_type, DL)));
6169 }
6170 }
6171 }
6172 }
6173}
6174
6175void SPIRVProducerPass::AddArgumentReflection(
6176 SPIRVID kernel_decl, const std::string &name, clspv::ArgKind arg_kind,
6177 uint32_t ordinal, uint32_t descriptor_set, uint32_t binding,
6178 uint32_t offset, uint32_t size, uint32_t spec_id, uint32_t elem_size) {
6179 // Generate ArgumentInfo for this argument.
6180 // TODO: generate remaining optional operands.
6181 auto import_id = getReflectionImport();
6182 auto arg_name = addSPIRVInst<kDebug>(spv::OpString, name.c_str());
6183 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
6184 SPIRVOperandVec Ops;
6185 Ops << void_id << import_id << reflection::ExtInstArgumentInfo << arg_name;
6186 auto arg_info = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6187
6188 Ops.clear();
6189 Ops << void_id << import_id;
6190 reflection::ExtInst ext_inst = reflection::ExtInstMax;
6191 // Determine the extended instruction.
6192 switch (arg_kind) {
6193 case clspv::ArgKind::Buffer:
6194 ext_inst = reflection::ExtInstArgumentStorageBuffer;
6195 break;
6196 case clspv::ArgKind::BufferUBO:
6197 ext_inst = reflection::ExtInstArgumentUniform;
6198 break;
6199 case clspv::ArgKind::Local:
6200 ext_inst = reflection::ExtInstArgumentWorkgroup;
6201 break;
6202 case clspv::ArgKind::Pod:
6203 ext_inst = reflection::ExtInstArgumentPodStorageBuffer;
6204 break;
6205 case clspv::ArgKind::PodUBO:
6206 ext_inst = reflection::ExtInstArgumentPodUniform;
6207 break;
6208 case clspv::ArgKind::PodPushConstant:
6209 ext_inst = reflection::ExtInstArgumentPodPushConstant;
6210 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006211 case clspv::ArgKind::SampledImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006212 ext_inst = reflection::ExtInstArgumentSampledImage;
6213 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006214 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006215 ext_inst = reflection::ExtInstArgumentStorageImage;
6216 break;
6217 case clspv::ArgKind::Sampler:
6218 ext_inst = reflection::ExtInstArgumentSampler;
6219 break;
6220 default:
6221 llvm_unreachable("Unhandled argument reflection");
6222 break;
6223 }
6224 Ops << ext_inst << kernel_decl << getSPIRVInt32Constant(ordinal);
6225
6226 // Add descriptor set and binding for applicable arguments.
6227 switch (arg_kind) {
6228 case clspv::ArgKind::Buffer:
6229 case clspv::ArgKind::BufferUBO:
6230 case clspv::ArgKind::Pod:
6231 case clspv::ArgKind::PodUBO:
alan-bakerf6bc8252020-09-23 14:58:55 -04006232 case clspv::ArgKind::SampledImage:
6233 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006234 case clspv::ArgKind::Sampler:
6235 Ops << getSPIRVInt32Constant(descriptor_set)
6236 << getSPIRVInt32Constant(binding);
6237 break;
6238 default:
6239 break;
6240 }
6241
6242 // Add remaining operands for arguments.
6243 switch (arg_kind) {
6244 case clspv::ArgKind::Local:
6245 Ops << getSPIRVInt32Constant(spec_id) << getSPIRVInt32Constant(elem_size);
6246 break;
6247 case clspv::ArgKind::Pod:
6248 case clspv::ArgKind::PodUBO:
6249 case clspv::ArgKind::PodPushConstant:
6250 Ops << getSPIRVInt32Constant(offset) << getSPIRVInt32Constant(size);
6251 break;
6252 default:
6253 break;
6254 }
6255 Ops << arg_info;
6256 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6257}