blob: 70c64626236853e8fa2faf858118efe309e2bad1 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
alan-baker5f2e88e2020-12-07 15:24:04 -050037#include "llvm/IR/Intrinsics.h"
David Neto118188e2018-08-24 11:27:54 -040038#include "llvm/IR/Metadata.h"
39#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050040#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040041#include "llvm/Pass.h"
42#include "llvm/Support/CommandLine.h"
alan-baker3f772c02021-06-15 22:18:11 -040043#include "llvm/Support/FileSystem.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000044#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040045#include "llvm/Support/raw_ostream.h"
46#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040047
SJWf93f5f32020-05-05 07:27:56 -050048// enable spv::HasResultAndType
49#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040050#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040051
David Neto85082642018-03-24 06:55:20 -070052#include "clspv/AddressSpace.h"
David Neto118188e2018-08-24 11:27:54 -040053#include "clspv/Option.h"
alan-baker86ce19c2020-08-05 13:09:19 -040054#include "clspv/PushConstant.h"
55#include "clspv/SpecConstant.h"
David Neto85082642018-03-24 06:55:20 -070056#include "clspv/spirv_c_strings.hpp"
57#include "clspv/spirv_glsl.hpp"
alan-baker86ce19c2020-08-05 13:09:19 -040058#include "clspv/spirv_reflection.hpp"
David Neto22f144c2017-06-12 14:26:21 -040059
David Neto4feb7a42017-10-06 17:29:42 -040060#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050061#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050062#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070063#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040064#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040065#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040066#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040067#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040068#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040069#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050070#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040071
David Neto22f144c2017-06-12 14:26:21 -040072#if defined(_MSC_VER)
73#pragma warning(pop)
74#endif
75
76using namespace llvm;
77using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050078using namespace clspv::Builtins;
SJW806a5d82020-07-15 12:51:38 -050079using namespace clspv::Option;
David Neto156783e2017-07-05 15:39:41 -040080using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040081
82namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040083
alan-baker3f772c02021-06-15 22:18:11 -040084cl::opt<std::string> TestOutFile("producer-out-file", cl::init("test.spv"),
85 cl::ReallyHidden,
86 cl::desc("SPIRVProducer testing output file"));
87
David Neto862b7d82018-06-14 18:48:37 -040088cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
89 cl::desc("Show resource variable creation"));
90
alan-baker5ed87542020-03-23 11:05:22 -040091cl::opt<bool>
92 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
93 cl::desc("Dump the IR at the start of SPIRVProducer"));
94
David Neto862b7d82018-06-14 18:48:37 -040095// These hacks exist to help transition code generation algorithms
96// without making huge noise in detailed test output.
97const bool Hack_generate_runtime_array_stride_early = true;
98
David Neto3fbb4072017-10-16 11:28:14 -040099// The value of 1/pi. This value is from MSDN
100// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
101const double kOneOverPi = 0.318309886183790671538;
102const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
103
alan-baker86ce19c2020-08-05 13:09:19 -0400104// SPIRV Module Sections (per 2.4 of the SPIR-V spec)
SJW69939d52020-04-16 07:29:07 -0500105// These are used to collect SPIRVInstructions by type on-the-fly.
106enum SPIRVSection {
107 kCapabilities,
108 kExtensions,
109 kImports,
110 kMemoryModel,
111 kEntryPoints,
112 kExecutionModes,
113
114 kDebug,
115 kAnnotations,
116
117 kTypes,
118 kConstants = kTypes,
119 kGlobalVariables,
120
121 kFunctions,
122
alan-baker86ce19c2020-08-05 13:09:19 -0400123 // This is not a section of the SPIR-V spec and should always immediately
124 // precede kSectionCount. It is a convenient place for the embedded
125 // reflection data.
126 kReflection,
SJW69939d52020-04-16 07:29:07 -0500127 kSectionCount
128};
129
SJW01901d92020-05-21 08:58:31 -0500130class SPIRVID {
131 uint32_t id;
132
133public:
134 SPIRVID(uint32_t _id = 0) : id(_id) {}
135 uint32_t get() const { return id; }
136 bool isValid() const { return id != 0; }
137 bool operator==(const SPIRVID &that) const { return id == that.id; }
SJW806a5d82020-07-15 12:51:38 -0500138 bool operator<(const SPIRVID &that) const { return id < that.id; }
SJW01901d92020-05-21 08:58:31 -0500139};
SJWf93f5f32020-05-05 07:27:56 -0500140
SJW88ed5fe2020-05-11 12:40:57 -0500141enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400142
143struct SPIRVOperand {
alan-baker3f772c02021-06-15 22:18:11 -0400144 SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
SJW88ed5fe2020-05-11 12:40:57 -0500145 LiteralNum[0] = Num;
146 }
alan-baker3f772c02021-06-15 22:18:11 -0400147 SPIRVOperand(SPIRVOperandType Ty, const char *Str)
David Neto22f144c2017-06-12 14:26:21 -0400148 : Type(Ty), LiteralStr(Str) {}
alan-baker3f772c02021-06-15 22:18:11 -0400149 SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
David Neto22f144c2017-06-12 14:26:21 -0400150 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500151 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
152 auto sz = NumVec.size();
153 assert(sz >= 1 && sz <= 2);
154 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
155 LiteralNum[0] = NumVec[0];
156 if (sz == 2) {
157 LiteralNum[1] = NumVec[1];
158 }
159 }
David Neto22f144c2017-06-12 14:26:21 -0400160
alan-baker7506abb2020-09-10 15:02:55 -0400161 SPIRVOperandType getType() const { return Type; }
162 uint32_t getNumID() const { return LiteralNum[0]; }
163 std::string getLiteralStr() const { return LiteralStr; }
164 const uint32_t *getLiteralNum() const { return LiteralNum; }
David Neto22f144c2017-06-12 14:26:21 -0400165
David Neto87846742018-04-11 17:36:22 -0400166 uint32_t GetNumWords() const {
167 switch (Type) {
168 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500169 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400170 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500171 case LITERAL_DWORD:
172 return 2;
David Neto87846742018-04-11 17:36:22 -0400173 case LITERAL_STRING:
174 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400175 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400176 }
177 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
178 }
179
David Neto22f144c2017-06-12 14:26:21 -0400180private:
181 SPIRVOperandType Type;
182 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500183 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400184};
185
SJW88ed5fe2020-05-11 12:40:57 -0500186typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400187
David Neto22f144c2017-06-12 14:26:21 -0400188struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500189 // Primary constructor must have Opcode, initializes WordCount based on ResID.
190 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
191 : Opcode(static_cast<uint16_t>(Opc)) {
192 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400193 }
David Neto22f144c2017-06-12 14:26:21 -0400194
SJWf93f5f32020-05-05 07:27:56 -0500195 // Creates an instruction with an opcode and no result ID, and with the given
196 // operands. This calls primary constructor to initialize Opcode, WordCount.
197 // Takes ownership of the operands and clears |Ops|.
198 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
199 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500200 }
SJWf93f5f32020-05-05 07:27:56 -0500201 // Creates an instruction with an opcode and no result ID, and with the given
202 // operands. This calls primary constructor to initialize Opcode, WordCount.
203 // Takes ownership of the operands and clears |Ops|.
204 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
205 : SPIRVInstruction(Opc, ResID) {
206 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500207 }
David Netoef5ba2b2019-12-20 08:35:54 -0500208
David Netoee2660d2018-06-28 16:31:29 -0400209 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400210 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500211 SPIRVID getResultID() const { return ResultID; }
212 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400213
214private:
SJW01901d92020-05-21 08:58:31 -0500215 void setResult(SPIRVID ResID = 0) {
216 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500217 ResultID = ResID;
218 }
219
220 void setOperands(SPIRVOperandVec &Ops) {
221 assert(Operands.empty());
222 Operands = std::move(Ops);
223 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500224 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500225 }
226 }
227
228private:
David Netoee2660d2018-06-28 16:31:29 -0400229 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400230 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500231 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500232 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400233};
234
235struct SPIRVProducerPass final : public ModulePass {
alan-baker3f772c02021-06-15 22:18:11 -0400236 static char ID;
237
SJW01901d92020-05-21 08:58:31 -0500238 typedef DenseMap<Type *, SPIRVID> TypeMapType;
alan-baker3f772c02021-06-15 22:18:11 -0400239 typedef DenseMap<Type *, SmallVector<SPIRVID, 2>> LayoutTypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400240 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500241 typedef DenseMap<Value *, SPIRVID> ValueMapType;
SJW806a5d82020-07-15 12:51:38 -0500242 typedef std::list<SPIRVID> SPIRVIDListType;
SJW01901d92020-05-21 08:58:31 -0500243 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
244 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500245 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
SJW806a5d82020-07-15 12:51:38 -0500246 typedef std::map<spv::BuiltIn, SPIRVID> BuiltinConstantMapType;
SJW88ed5fe2020-05-11 12:40:57 -0500247 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400248 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500249 // - the SPIR-V instruction placeholder that will be replaced
250 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400251 DeferredInstVecType;
252 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
253 GlobalConstFuncMapType;
254
alan-baker3f772c02021-06-15 22:18:11 -0400255 SPIRVProducerPass(
256 raw_pwrite_stream *out,
257 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap,
David Neto44795152017-07-13 15:45:28 -0400258 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500259 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
alan-baker3f772c02021-06-15 22:18:11 -0400260 binaryTempOut(binaryTempUnderlyingVector), binaryOut(out),
David Neto0676e6f2017-07-11 18:47:44 -0400261 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500262 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
263 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-baker3f772c02021-06-15 22:18:11 -0400264 WorkgroupSizeVarID(0), TestOutput(false) {
265 addCapability(spv::CapabilityShader);
266 Ptr = this;
267 }
268
269 SPIRVProducerPass()
270 : ModulePass(ID), module(nullptr), samplerMap(nullptr), out(nullptr),
271 binaryTempOut(binaryTempUnderlyingVector), binaryOut(nullptr),
272 outputCInitList(false), patchBoundOffset(0), nextID(1),
273 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
274 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
275 WorkgroupSizeVarID(0), TestOutput(true) {
SJW01901d92020-05-21 08:58:31 -0500276 addCapability(spv::CapabilityShader);
277 Ptr = this;
278 }
David Neto22f144c2017-06-12 14:26:21 -0400279
James Price11010dc2019-12-19 13:53:09 -0500280 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500281 }
282
David Neto22f144c2017-06-12 14:26:21 -0400283 void getAnalysisUsage(AnalysisUsage &AU) const override {
284 AU.addRequired<DominatorTreeWrapperPass>();
285 AU.addRequired<LoopInfoWrapperPass>();
286 }
287
288 virtual bool runOnModule(Module &module) override;
289
290 // output the SPIR-V header block
291 void outputHeader();
292
293 // patch the SPIR-V header block
294 void patchHeader();
295
SJW01901d92020-05-21 08:58:31 -0500296 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400297 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
David Neto22f144c2017-06-12 14:26:21 -0400298 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500299 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
300 return SPIRVSections[Section];
301 };
alan-baker7506abb2020-09-10 15:02:55 -0400302 EntryPointVecType &getEntryPointVec() { return EntryPointVec; }
303 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; }
SJW806a5d82020-07-15 12:51:38 -0500304 SPIRVIDListType &getEntryPointInterfacesList() {
305 return EntryPointInterfacesList;
alan-baker7506abb2020-09-10 15:02:55 -0400306 }
SJW01901d92020-05-21 08:58:31 -0500307 SPIRVID getOpExtInstImportID();
alan-baker7506abb2020-09-10 15:02:55 -0400308 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; }
SJW2c317da2020-03-23 07:39:13 -0500309
alan-baker5b86ed72019-02-15 08:26:50 -0500310 bool hasVariablePointersStorageBuffer() {
311 return HasVariablePointersStorageBuffer;
312 }
SJW01901d92020-05-21 08:58:31 -0500313 void setVariablePointersStorageBuffer() {
314 if (!HasVariablePointersStorageBuffer) {
315 addCapability(spv::CapabilityVariablePointersStorageBuffer);
316 HasVariablePointersStorageBuffer = true;
317 }
alan-baker5b86ed72019-02-15 08:26:50 -0500318 }
alan-baker7506abb2020-09-10 15:02:55 -0400319 bool hasVariablePointers() { return HasVariablePointers; }
SJW01901d92020-05-21 08:58:31 -0500320 void setVariablePointers() {
321 if (!HasVariablePointers) {
322 addCapability(spv::CapabilityVariablePointers);
323 HasVariablePointers = true;
324 }
alan-baker7506abb2020-09-10 15:02:55 -0400325 }
alan-baker3f772c02021-06-15 22:18:11 -0400326 SmallVectorImpl<std::pair<unsigned, std::string>> *getSamplerMap() {
alan-bakerb6b09dc2018-11-08 16:59:28 -0500327 return samplerMap;
328 }
David Neto22f144c2017-06-12 14:26:21 -0400329 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
330 return GlobalConstFuncTypeMap;
331 }
332 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
333 return GlobalConstArgumentSet;
334 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500335 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400336
SJW77b87ad2020-04-21 14:37:52 -0500337 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500338 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
339 // *not* be converted to a storage buffer, replace each such global variable
340 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500341 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400342 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
343 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500344 void FindResourceVars();
SJW77b87ad2020-04-21 14:37:52 -0500345 void FindTypesForSamplerMap();
346 void FindTypesForResourceVars();
SJWf93f5f32020-05-05 07:27:56 -0500347
alan-bakerc3fd07f2020-10-22 09:48:49 -0400348 // Returns the canonical type of |type|.
349 //
350 // By default, clspv maps both __constant and __global address space pointers
351 // to StorageBuffer storage class. In order to prevent duplicate types from
352 // being generated, clspv uses the canonical type as a representative.
353 Type *CanonicalType(Type *type);
354
SJWf93f5f32020-05-05 07:27:56 -0500355 // Lookup or create Types, Constants.
356 // Returns SPIRVID once it has been created.
alan-baker3f772c02021-06-15 22:18:11 -0400357 SPIRVID getSPIRVType(Type *Ty, bool needs_layout);
SJWf93f5f32020-05-05 07:27:56 -0500358 SPIRVID getSPIRVType(Type *Ty);
359 SPIRVID getSPIRVConstant(Constant *Cst);
SJW806a5d82020-07-15 12:51:38 -0500360 SPIRVID getSPIRVInt32Constant(uint32_t CstVal);
SJWf93f5f32020-05-05 07:27:56 -0500361 // Lookup SPIRVID of llvm::Value, may create Constant.
362 SPIRVID getSPIRVValue(Value *V);
363
alan-baker3f772c02021-06-15 22:18:11 -0400364 bool PointerRequiresLayout(unsigned aspace);
365
SJW806a5d82020-07-15 12:51:38 -0500366 SPIRVID getSPIRVBuiltin(spv::BuiltIn BID, spv::Capability Cap);
367
SJW77b87ad2020-04-21 14:37:52 -0500368 void GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400369 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500370 void GenerateWorkgroupVars();
alan-baker86ce19c2020-08-05 13:09:19 -0400371 // Generate reflection instructions for resource variables associated with
David Neto862b7d82018-06-14 18:48:37 -0400372 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500373 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400374 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500375 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400376 void GenerateFuncPrologue(Function &F);
377 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400378 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400379 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
380 spv::Op GetSPIRVCastOpcode(Instruction &I);
381 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
SJW806a5d82020-07-15 12:51:38 -0500382 SPIRVID GenerateClspvInstruction(CallInst *Call,
383 const FunctionInfo &FuncInfo);
384 SPIRVID GenerateImageInstruction(CallInst *Call,
385 const FunctionInfo &FuncInfo);
386 SPIRVID GenerateSubgroupInstruction(CallInst *Call,
387 const FunctionInfo &FuncInfo);
388 SPIRVID GenerateInstructionFromCall(CallInst *Call);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateInstruction(Instruction &I);
390 void GenerateFuncEpilogue();
391 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500392 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400393 bool is4xi8vec(Type *Ty) const;
394 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400395 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400396 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400397 // Returns the GLSL extended instruction enum that the given function
398 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500399 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400400 // Returns the GLSL extended instruction enum indirectly used by the given
401 // function. That is, to implement the given function, we use an extended
402 // instruction plus one more instruction. If none, then returns the 0 value,
403 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500404 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400405 // Returns the single GLSL extended instruction used directly or
406 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500407 glsl::ExtInst
408 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400409 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500410 void WriteResultID(const SPIRVInstruction &Inst);
411 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
412 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400413 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500414 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400415
Alan Baker9bf93fb2018-08-28 16:59:26 -0400416 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500417 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400418
Alan Bakerfcda9482018-10-02 17:09:59 -0400419 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500420 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400421
alan-baker06cad652019-12-03 17:56:47 -0500422 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500423 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500424
Alan Bakerfcda9482018-10-02 17:09:59 -0400425 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
426 // uses the internal map, otherwise it falls back on the data layout.
427 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400428 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000429 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
430 unsigned member,
431 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400432
alan-baker5b86ed72019-02-15 08:26:50 -0500433 // Returns the base pointer of |v|.
434 Value *GetBasePointer(Value *v);
435
SJW01901d92020-05-21 08:58:31 -0500436 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
437 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
438
alan-baker5b86ed72019-02-15 08:26:50 -0500439 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
440 // |address_space|.
441 void setVariablePointersCapabilities(unsigned address_space);
442
443 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
444 // variable.
445 bool sameResource(Value *lhs, Value *rhs) const;
446
447 // Returns true if |inst| is phi or select that selects from the same
448 // structure (or null).
449 bool selectFromSameObject(Instruction *inst);
450
alan-bakere9308012019-03-15 10:25:13 -0400451 // Returns true if |Arg| is called with a coherent resource.
452 bool CalledWithCoherentResource(Argument &Arg);
453
SJWf93f5f32020-05-05 07:27:56 -0500454 //
455 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
456 template <enum SPIRVSection TSection = kFunctions>
457 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
458 bool has_result, has_result_type;
459 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
460 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500461 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500462 return RID;
463 }
464 template <enum SPIRVSection TSection = kFunctions>
465 SPIRVID addSPIRVInst(spv::Op Op) {
466 SPIRVOperandVec Ops;
467 return addSPIRVInst<TSection>(Op, Ops);
468 }
469 template <enum SPIRVSection TSection = kFunctions>
470 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
471 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500472 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500473 return addSPIRVInst<TSection>(Op, Ops);
474 }
475 template <enum SPIRVSection TSection = kFunctions>
476 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
477 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500478 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500479 return addSPIRVInst<TSection>(Op, Ops);
480 }
481
SJW88ed5fe2020-05-11 12:40:57 -0500482 //
483 // Add placeholder for llvm::Value that references future values.
484 // Must have result ID just in case final SPIRVInstruction requires.
485 SPIRVID addSPIRVPlaceholder(Value *I) {
486 SPIRVID RID = incrNextID();
487 SPIRVOperandVec Ops;
488 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
489 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
490 return RID;
491 }
492 // Replace placeholder with actual SPIRVInstruction on the final pass
493 // (HandleDeferredInstruction).
494 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
495 SPIRVOperandVec &Operands) {
496 bool has_result, has_result_type;
497 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
498 SPIRVID RID = has_result ? I->getResultID() : 0;
499 *I = SPIRVInstruction(Opcode, RID, Operands);
500 return RID;
501 }
502
SJW806a5d82020-07-15 12:51:38 -0500503 //
504 // Add global variable and capture entry point interface
505 SPIRVID addSPIRVGlobalVariable(const SPIRVID &TypeID, spv::StorageClass SC,
alan-baker3f772c02021-06-15 22:18:11 -0400506 const SPIRVID &InitID = SPIRVID(),
507 bool add_interface = false);
SJW806a5d82020-07-15 12:51:38 -0500508
alan-baker86ce19c2020-08-05 13:09:19 -0400509 SPIRVID getReflectionImport();
510 void GenerateReflection();
511 void GenerateKernelReflection();
512 void GeneratePushConstantReflection();
513 void GenerateSpecConstantReflection();
514 void AddArgumentReflection(SPIRVID kernel_decl, const std::string &name,
515 clspv::ArgKind arg_kind, uint32_t ordinal,
516 uint32_t descriptor_set, uint32_t binding,
517 uint32_t offset, uint32_t size, uint32_t spec_id,
518 uint32_t elem_size);
519
David Neto22f144c2017-06-12 14:26:21 -0400520private:
SJW77b87ad2020-04-21 14:37:52 -0500521
522 Module *module;
523
SJW01901d92020-05-21 08:58:31 -0500524 // Set of Capabilities required
525 CapabilitySetType CapabilitySet;
526
SJW806a5d82020-07-15 12:51:38 -0500527 // Map from clspv::BuiltinType to SPIRV Global Variable
528 BuiltinConstantMapType BuiltinConstantMap;
529
alan-baker3f772c02021-06-15 22:18:11 -0400530 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap;
531 raw_pwrite_stream *out;
David Neto0676e6f2017-07-11 18:47:44 -0400532
533 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
534 // convert to other formats on demand?
535
536 // When emitting a C initialization list, the WriteSPIRVBinary method
537 // will actually write its words to this vector via binaryTempOut.
538 SmallVector<char, 100> binaryTempUnderlyingVector;
539 raw_svector_ostream binaryTempOut;
540
541 // Binary output writes to this stream, which might be |out| or
542 // |binaryTempOut|. It's the latter when we really want to write a C
543 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400544 raw_pwrite_stream *binaryOut;
David Neto0676e6f2017-07-11 18:47:44 -0400545 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400546 uint64_t patchBoundOffset;
547 uint32_t nextID;
548
SJWf93f5f32020-05-05 07:27:56 -0500549 SPIRVID incrNextID() { return nextID++; }
550
alan-bakerf67468c2019-11-25 15:51:49 -0500551 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500552 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500553 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500554 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500555
David Neto19a1bad2017-08-25 15:01:41 -0400556 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
alan-baker3f772c02021-06-15 22:18:11 -0400557 LayoutTypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400558 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400559 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400560 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400561 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400562 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400563 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500564 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400565
David Neto22f144c2017-06-12 14:26:21 -0400566 EntryPointVecType EntryPointVec;
567 DeferredInstVecType DeferredInstVec;
SJW806a5d82020-07-15 12:51:38 -0500568 SPIRVIDListType EntryPointInterfacesList;
SJW01901d92020-05-21 08:58:31 -0500569 SPIRVID OpExtInstImportID;
570 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500571 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400572 bool HasVariablePointers;
573 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500574 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700575
576 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700577 // will map F's type to (G, index of the parameter), where in a first phase
Marco Antognini7e338402021-03-15 12:48:37 +0000578 // G is F's type.
David Netoc77d9e22018-03-24 06:30:28 -0700579 // TODO(dneto): This doesn't seem general enough? A function might have
580 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400581 GlobalConstFuncMapType GlobalConstFuncTypeMap;
582 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400583 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700584 // or array types, and which point into transparent memory (StorageBuffer
585 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400586 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700587 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400588
589 // This is truly ugly, but works around what look like driver bugs.
590 // For get_local_size, an earlier part of the flow has created a module-scope
591 // variable in Private address space to hold the value for the workgroup
592 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
593 // When this is present, save the IDs of the initializer value and variable
594 // in these two variables. We only ever do a vector load from it, and
595 // when we see one of those, substitute just the value of the intializer.
596 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700597 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500598 SPIRVID WorkgroupSizeValueID;
599 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400600
alan-baker3f772c02021-06-15 22:18:11 -0400601 bool TestOutput;
602
David Neto862b7d82018-06-14 18:48:37 -0400603 // Bookkeeping for mapping kernel arguments to resource variables.
604 struct ResourceVarInfo {
605 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400606 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400607 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400608 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400609 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
610 const int index; // Index into ResourceVarInfoList
611 const unsigned descriptor_set;
612 const unsigned binding;
613 Function *const var_fn; // The @clspv.resource.var.* function.
614 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400615 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400616 const unsigned addr_space; // The LLVM address space
617 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500618 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400619 };
620 // A list of resource var info. Each one correponds to a module-scope
621 // resource variable we will have to create. Resource var indices are
622 // indices into this vector.
623 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
624 // This is a vector of pointers of all the resource vars, but ordered by
625 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500626 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400627 // Map a function to the ordered list of resource variables it uses, one for
628 // each argument. If an argument does not use a resource variable, it
629 // will have a null pointer entry.
630 using FunctionToResourceVarsMapType =
631 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
632 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
633
634 // What LLVM types map to SPIR-V types needing layout? These are the
635 // arrays and structures supporting storage buffers and uniform buffers.
636 TypeList TypesNeedingLayout;
637 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
638 UniqueVector<StructType *> StructTypesNeedingBlock;
639 // For a call that represents a load from an opaque type (samplers, images),
640 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500641 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700642
David Netoc6f3ab22018-04-06 18:02:31 -0400643 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500644 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400645 LocalArgList LocalArgs;
646 // Information about a pointer-to-local argument.
647 struct LocalArgInfo {
648 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500649 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400650 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500651 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400652 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500653 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400654 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500655 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400656 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500657 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400658 // The specialization constant ID of the array size.
659 int spec_id;
660 };
Alan Baker202c8c72018-08-13 13:47:44 -0400661 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500662 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400663 // A mapping from SpecId to its LocalArgInfo.
664 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400665 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500666 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400667 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500668 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
669 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500670
671 // Maps basic block to its merge block.
672 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
673 // Maps basic block to its continue block.
674 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500675
alan-baker86ce19c2020-08-05 13:09:19 -0400676 SPIRVID ReflectionID;
677 DenseMap<Function *, SPIRVID> KernelDeclarations;
678
SJW01901d92020-05-21 08:58:31 -0500679public:
680 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400681};
682
alan-bakerb6b09dc2018-11-08 16:59:28 -0500683} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400684
alan-baker3f772c02021-06-15 22:18:11 -0400685char SPIRVProducerPass::ID = 0;
686SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
687INITIALIZE_PASS(SPIRVProducerPass, "SPIRVProducerPass", "SPIR-V output pass",
688 false, false)
689
David Neto22f144c2017-06-12 14:26:21 -0400690namespace clspv {
alan-baker3f772c02021-06-15 22:18:11 -0400691ModulePass *createSPIRVProducerPass(
692 raw_pwrite_stream *out,
693 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap,
694 bool outputCInitList) {
alan-baker86ce19c2020-08-05 13:09:19 -0400695 return new SPIRVProducerPass(out, samplerMap, outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400696}
alan-baker3f772c02021-06-15 22:18:11 -0400697
698ModulePass *createSPIRVProducerPass() { return new SPIRVProducerPass(); }
David Netoc2c368d2017-06-30 16:50:17 -0400699} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400700
SJW01901d92020-05-21 08:58:31 -0500701namespace {
702SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
703 list.emplace_back(LITERAL_WORD, num);
704 return list;
705}
706
707SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
708 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
709 return list;
710}
711
712SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
713 list.emplace_back(num_vec);
714 return list;
715}
716
717SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
718 list.emplace_back(LITERAL_STRING, str);
719 return list;
720}
721
722SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
723 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
724 return list;
725}
726
727SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
728 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
729 return list;
730}
731
SJW806a5d82020-07-15 12:51:38 -0500732SPIRVOperandVec &operator<<(SPIRVOperandVec &list, const SPIRVID &v) {
SJW01901d92020-05-21 08:58:31 -0500733 list.emplace_back(NUMBERID, v.get());
734 return list;
735}
736} // namespace
737
SJW77b87ad2020-04-21 14:37:52 -0500738bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500739 // TODO(sjw): Need to reset all data members for each Module, or better
740 // yet create a new SPIRVProducer for every module.. For now only
741 // allow 1 call.
742 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500743 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400744 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500745 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400746 }
alan-baker3f772c02021-06-15 22:18:11 -0400747
748 SmallVector<char, 10000> *binary = nullptr;
749 if (TestOutput) {
750 binary = new SmallVector<char, 10000>();
751 out = new raw_svector_ostream(*binary);
752 }
753
754 binaryOut = outputCInitList ? &binaryTempOut : out;
David Neto0676e6f2017-07-11 18:47:44 -0400755
SJW77b87ad2020-04-21 14:37:52 -0500756 PopulateUBOTypeMaps();
757 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400758
David Neto22f144c2017-06-12 14:26:21 -0400759 // SPIR-V always begins with its header information
760 outputHeader();
761
762 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500763 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400764
David Neto22f144c2017-06-12 14:26:21 -0400765 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500766 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400767 // If the GV is one of our special __spirv_* variables, remove the
768 // initializer as it was only placed there to force LLVM to not throw the
769 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000770 if (GV.getName().startswith("__spirv_") ||
771 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400772 GV.setInitializer(nullptr);
773 }
David Neto22f144c2017-06-12 14:26:21 -0400774 }
775
alan-baker09cb9802019-12-10 13:16:27 -0500776 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500777 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400778
779 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500780 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400781 GenerateGlobalVar(GV);
782 }
SJW77b87ad2020-04-21 14:37:52 -0500783 GenerateResourceVars();
784 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400785
786 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500787 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400788 if (F.isDeclaration()) {
789 continue;
790 }
791
792 // Generate Function Prologue.
793 GenerateFuncPrologue(F);
794
795 // Generate SPIRV instructions for function body.
796 GenerateFuncBody(F);
797
798 // Generate Function Epilogue.
799 GenerateFuncEpilogue();
800 }
801
802 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500803 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400804
David Neto22f144c2017-06-12 14:26:21 -0400805 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500806 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400807
alan-baker86ce19c2020-08-05 13:09:19 -0400808 // Generate embedded reflection information.
809 GenerateReflection();
810
alan-baker00e7a582019-06-07 12:54:21 -0400811 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400812
813 // We need to patch the SPIR-V header to set bound correctly.
814 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400815
816 if (outputCInitList) {
817 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400818 std::ostringstream os;
819
David Neto57fb0b92017-08-04 15:35:09 -0400820 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400821 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400822 os << ",\n";
823 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400824 first = false;
825 };
826
827 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400828 const std::string str(binaryTempOut.str());
829 for (unsigned i = 0; i < str.size(); i += 4) {
830 const uint32_t a = static_cast<unsigned char>(str[i]);
831 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
832 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
833 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
834 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400835 }
836 os << "}\n";
alan-baker3f772c02021-06-15 22:18:11 -0400837 *out << os.str();
838 }
839
840 if (TestOutput) {
841 std::error_code error;
842 raw_fd_ostream test_output(TestOutFile, error, llvm::sys::fs::FA_Write);
843 test_output << static_cast<raw_svector_ostream *>(out)->str();
844 delete out;
845 delete binary;
David Neto0676e6f2017-07-11 18:47:44 -0400846 }
847
David Neto22f144c2017-06-12 14:26:21 -0400848 return false;
849}
850
851void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400852 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
853 sizeof(spv::MagicNumber));
SJW806a5d82020-07-15 12:51:38 -0500854 uint32_t minor = 0;
alan-baker3f772c02021-06-15 22:18:11 -0400855 switch (SpvVersion()) {
856 case SPIRVVersion::SPIRV_1_0:
857 minor = 0;
858 break;
859 case SPIRVVersion::SPIRV_1_3:
SJW806a5d82020-07-15 12:51:38 -0500860 minor = 3;
alan-baker3f772c02021-06-15 22:18:11 -0400861 break;
862 case SPIRVVersion::SPIRV_1_4:
863 minor = 4;
864 break;
865 case SPIRVVersion::SPIRV_1_5:
866 minor = 5;
867 break;
868 default:
869 llvm_unreachable("unhandled spir-v version");
870 break;
SJW806a5d82020-07-15 12:51:38 -0500871 }
872 uint32_t version = (1 << 16) | (minor << 8);
873 binaryOut->write(reinterpret_cast<const char *>(&version), sizeof(version));
David Neto22f144c2017-06-12 14:26:21 -0400874
alan-baker0c18ab02019-06-12 10:23:21 -0400875 // use Google's vendor ID
876 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400877 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400878
alan-baker00e7a582019-06-07 12:54:21 -0400879 // we record where we need to come back to and patch in the bound value
880 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400881
alan-baker00e7a582019-06-07 12:54:21 -0400882 // output a bad bound for now
883 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400884
alan-baker00e7a582019-06-07 12:54:21 -0400885 // output the schema (reserved for use and must be 0)
886 const uint32_t schema = 0;
887 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400888}
889
890void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400891 // for a binary we just write the value of nextID over bound
892 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
893 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400894}
895
SJW77b87ad2020-04-21 14:37:52 -0500896void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400897 // This function generates LLVM IR for function such as global variable for
898 // argument, constant and pointer type for argument access. These information
899 // is artificial one because we need Vulkan SPIR-V output. This function is
900 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400901
SJW77b87ad2020-04-21 14:37:52 -0500902 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400903
SJW77b87ad2020-04-21 14:37:52 -0500904 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400905
SJW77b87ad2020-04-21 14:37:52 -0500906 FindTypesForSamplerMap();
907 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400908}
909
SJW77b87ad2020-04-21 14:37:52 -0500910void SPIRVProducerPass::FindGlobalConstVars() {
911 clspv::NormalizeGlobalVariables(*module);
912 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400913
David Neto862b7d82018-06-14 18:48:37 -0400914 SmallVector<GlobalVariable *, 8> GVList;
915 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500916 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400917 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
918 if (GV.use_empty()) {
919 DeadGVList.push_back(&GV);
920 } else {
921 GVList.push_back(&GV);
922 }
923 }
924 }
925
926 // Remove dead global __constant variables.
927 for (auto GV : DeadGVList) {
928 GV->eraseFromParent();
929 }
930 DeadGVList.clear();
931
932 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
933 // For now, we only support a single storage buffer.
alan-baker7506abb2020-09-10 15:02:55 -0400934 if (!GVList.empty()) {
David Neto862b7d82018-06-14 18:48:37 -0400935 assert(GVList.size() == 1);
936 const auto *GV = GVList[0];
937 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400938 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400939 const size_t kConstantMaxSize = 65536;
940 if (constants_byte_size > kConstantMaxSize) {
941 outs() << "Max __constant capacity of " << kConstantMaxSize
942 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
943 llvm_unreachable("Max __constant capacity exceeded");
944 }
945 }
946 } else {
947 // Change global constant variable's address space to ModuleScopePrivate.
948 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
949 for (auto GV : GVList) {
950 // Create new gv with ModuleScopePrivate address space.
951 Type *NewGVTy = GV->getType()->getPointerElementType();
952 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500953 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400954 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
955 NewGV->takeName(GV);
956
957 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
958 SmallVector<User *, 8> CandidateUsers;
959
960 auto record_called_function_type_as_user =
961 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
962 // Find argument index.
963 unsigned index = 0;
964 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
965 if (gv == call->getOperand(i)) {
966 // TODO(dneto): Should we break here?
967 index = i;
968 }
969 }
970
971 // Record function type with global constant.
972 GlobalConstFuncTyMap[call->getFunctionType()] =
973 std::make_pair(call->getFunctionType(), index);
974 };
975
976 for (User *GVU : GVUsers) {
977 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
978 record_called_function_type_as_user(GV, Call);
979 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
980 // Check GEP users.
981 for (User *GEPU : GEP->users()) {
982 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
983 record_called_function_type_as_user(GEP, GEPCall);
984 }
985 }
986 }
987
988 CandidateUsers.push_back(GVU);
989 }
990
991 for (User *U : CandidateUsers) {
992 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500993 if (!isa<Constant>(U)) {
994 // #254: Can't change operands of a constant, but this shouldn't be
995 // something that sticks around in the module.
996 U->replaceUsesOfWith(GV, NewGV);
997 }
David Neto862b7d82018-06-14 18:48:37 -0400998 }
999
1000 // Delete original gv.
1001 GV->eraseFromParent();
1002 }
1003 }
1004}
1005
SJW77b87ad2020-04-21 14:37:52 -05001006void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001007 ResourceVarInfoList.clear();
1008 FunctionToResourceVarsMap.clear();
1009 ModuleOrderedResourceVars.reset();
1010 // Normally, there is one resource variable per clspv.resource.var.*
1011 // function, since that is unique'd by arg type and index. By design,
1012 // we can share these resource variables across kernels because all
1013 // kernels use the same descriptor set.
1014 //
1015 // But if the user requested distinct descriptor sets per kernel, then
1016 // the descriptor allocator has made different (set,binding) pairs for
1017 // the same (type,arg_index) pair. Since we can decorate a resource
1018 // variable with only exactly one DescriptorSet and Binding, we are
1019 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001020 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001021 // (set,binding) values.
1022 const bool always_distinct_sets =
1023 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -05001024 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001025 // Rely on the fact the resource var functions have a stable ordering
1026 // in the module.
SJW61531372020-06-09 07:31:08 -05001027 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -04001028 // Find all calls to this function with distinct set and binding pairs.
1029 // Save them in ResourceVarInfoList.
1030
1031 // Determine uniqueness of the (set,binding) pairs only withing this
1032 // one resource-var builtin function.
1033 using SetAndBinding = std::pair<unsigned, unsigned>;
1034 // Maps set and binding to the resource var info.
1035 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1036 bool first_use = true;
1037 for (auto &U : F.uses()) {
1038 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1039 const auto set = unsigned(
1040 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1041 const auto binding = unsigned(
1042 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1043 const auto arg_kind = clspv::ArgKind(
1044 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1045 const auto arg_index = unsigned(
1046 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001047 const auto coherent = unsigned(
1048 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001049
1050 // Find or make the resource var info for this combination.
1051 ResourceVarInfo *rv = nullptr;
1052 if (always_distinct_sets) {
1053 // Make a new resource var any time we see a different
1054 // (set,binding) pair.
1055 SetAndBinding key{set, binding};
1056 auto where = set_and_binding_map.find(key);
1057 if (where == set_and_binding_map.end()) {
alan-baker7506abb2020-09-10 15:02:55 -04001058 rv = new ResourceVarInfo(
1059 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1060 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001061 ResourceVarInfoList.emplace_back(rv);
1062 set_and_binding_map[key] = rv;
1063 } else {
1064 rv = where->second;
1065 }
1066 } else {
1067 // The default is to make exactly one resource for each
1068 // clspv.resource.var.* function.
1069 if (first_use) {
1070 first_use = false;
alan-baker7506abb2020-09-10 15:02:55 -04001071 rv = new ResourceVarInfo(
1072 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1073 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001074 ResourceVarInfoList.emplace_back(rv);
1075 } else {
1076 rv = ResourceVarInfoList.back().get();
1077 }
1078 }
1079
1080 // Now populate FunctionToResourceVarsMap.
1081 auto &mapping =
1082 FunctionToResourceVarsMap[call->getParent()->getParent()];
1083 while (mapping.size() <= arg_index) {
1084 mapping.push_back(nullptr);
1085 }
1086 mapping[arg_index] = rv;
1087 }
1088 }
1089 }
1090 }
1091
1092 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001093 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001094 auto where = FunctionToResourceVarsMap.find(&F);
1095 if (where != FunctionToResourceVarsMap.end()) {
1096 for (auto &rv : where->second) {
1097 if (rv != nullptr) {
1098 ModuleOrderedResourceVars.insert(rv);
1099 }
1100 }
1101 }
1102 }
1103 if (ShowResourceVars) {
1104 for (auto *info : ModuleOrderedResourceVars) {
1105 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1106 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1107 << "\n";
1108 }
1109 }
1110}
1111
SJW77b87ad2020-04-21 14:37:52 -05001112void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001113 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001114 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
alan-baker3f772c02021-06-15 22:18:11 -04001115 (getSamplerMap() && !getSamplerMap()->empty())) {
James Pricecbe834f2020-12-01 13:42:25 -05001116 auto SamplerStructTy =
1117 StructType::getTypeByName(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001118 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001119 SamplerStructTy =
1120 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001121 }
David Neto862b7d82018-06-14 18:48:37 -04001122 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
David Neto862b7d82018-06-14 18:48:37 -04001123 }
1124}
1125
SJW77b87ad2020-04-21 14:37:52 -05001126void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001127 // Record types so they are generated.
1128 TypesNeedingLayout.reset();
1129 StructTypesNeedingBlock.reset();
1130
David Neto862b7d82018-06-14 18:48:37 -04001131 for (const auto *info : ModuleOrderedResourceVars) {
1132 Type *type = info->var_fn->getReturnType();
1133
1134 switch (info->arg_kind) {
1135 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001136 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001137 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1138 StructTypesNeedingBlock.insert(sty);
1139 } else {
1140 errs() << *type << "\n";
1141 llvm_unreachable("Buffer arguments must map to structures!");
1142 }
1143 break;
1144 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001145 case clspv::ArgKind::PodUBO:
1146 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001147 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1148 StructTypesNeedingBlock.insert(sty);
1149 } else {
1150 errs() << *type << "\n";
1151 llvm_unreachable("POD arguments must map to structures!");
1152 }
1153 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04001154 case clspv::ArgKind::SampledImage:
1155 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001156 case clspv::ArgKind::Sampler:
1157 // Sampler and image types map to the pointee type but
1158 // in the uniform constant address space.
1159 type = PointerType::get(type->getPointerElementType(),
1160 clspv::AddressSpace::UniformConstant);
1161 break;
1162 default:
1163 break;
1164 }
David Neto862b7d82018-06-14 18:48:37 -04001165 }
1166
alan-bakerdcd97412019-09-16 15:32:30 -04001167 // If module constants are clustered in a storage buffer then that struct
1168 // needs layout decorations.
1169 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001170 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001171 PointerType *PTy = cast<PointerType>(GV.getType());
1172 const auto AS = PTy->getAddressSpace();
1173 const bool module_scope_constant_external_init =
1174 (AS == AddressSpace::Constant) && GV.hasInitializer();
1175 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1176 if (module_scope_constant_external_init &&
1177 spv::BuiltInMax == BuiltinType) {
1178 StructTypesNeedingBlock.insert(
1179 cast<StructType>(PTy->getPointerElementType()));
1180 }
1181 }
1182 }
1183
SJW77b87ad2020-04-21 14:37:52 -05001184 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001185 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1186 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1187 assert(Ty->isStructTy() && "Push constants have to be structures.");
1188 auto STy = cast<StructType>(Ty);
1189 StructTypesNeedingBlock.insert(STy);
1190 }
1191 }
1192
David Neto862b7d82018-06-14 18:48:37 -04001193 // Traverse the arrays and structures underneath each Block, and
1194 // mark them as needing layout.
1195 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1196 StructTypesNeedingBlock.end());
1197 while (!work_list.empty()) {
1198 Type *type = work_list.back();
1199 work_list.pop_back();
1200 TypesNeedingLayout.insert(type);
1201 switch (type->getTypeID()) {
1202 case Type::ArrayTyID:
1203 work_list.push_back(type->getArrayElementType());
1204 if (!Hack_generate_runtime_array_stride_early) {
1205 // Remember this array type for deferred decoration.
1206 TypesNeedingArrayStride.insert(type);
1207 }
1208 break;
1209 case Type::StructTyID:
1210 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1211 work_list.push_back(elem_ty);
1212 }
1213 default:
1214 // This type and its contained types don't get layout.
1215 break;
1216 }
1217 }
1218}
1219
SJWf93f5f32020-05-05 07:27:56 -05001220void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001221 // The SpecId assignment for pointer-to-local arguments is recorded in
1222 // module-level metadata. Translate that information into local argument
1223 // information.
SJWf93f5f32020-05-05 07:27:56 -05001224 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001225 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001226 if (!nmd)
1227 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001228 for (auto operand : nmd->operands()) {
1229 MDTuple *tuple = cast<MDTuple>(operand);
1230 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1231 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001232 ConstantAsMetadata *arg_index_md =
1233 cast<ConstantAsMetadata>(tuple->getOperand(1));
1234 int arg_index = static_cast<int>(
1235 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1236 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001237
1238 ConstantAsMetadata *spec_id_md =
1239 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001240 int spec_id = static_cast<int>(
1241 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001242
Alan Baker202c8c72018-08-13 13:47:44 -04001243 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001244 if (LocalSpecIdInfoMap.count(spec_id))
1245 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001246
SJWf93f5f32020-05-05 07:27:56 -05001247 // Generate the spec constant.
1248 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001249 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001250 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001251
SJWf93f5f32020-05-05 07:27:56 -05001252 // Generate the array type.
1253 Type *ElemTy = arg->getType()->getPointerElementType();
1254 Ops.clear();
1255 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001256 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001257
1258 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1259
1260 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001261 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001262 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1263
1264 // Generate OpVariable.
1265 //
1266 // Ops[0] : Result Type ID
1267 // Ops[1] : Storage Class
SJW806a5d82020-07-15 12:51:38 -05001268 SPIRVID VariableID =
1269 addSPIRVGlobalVariable(PtrArrayTypeID, spv::StorageClassWorkgroup);
SJWf93f5f32020-05-05 07:27:56 -05001270
1271 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001272 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001273 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1274
1275 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1276 ArrayTypeID, PtrArrayTypeID, spec_id};
1277 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001278 }
1279}
1280
David Neto22f144c2017-06-12 14:26:21 -04001281spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1282 switch (AddrSpace) {
1283 default:
1284 llvm_unreachable("Unsupported OpenCL address space");
1285 case AddressSpace::Private:
1286 return spv::StorageClassFunction;
1287 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001288 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001289 case AddressSpace::Constant:
1290 return clspv::Option::ConstantArgsInUniformBuffer()
1291 ? spv::StorageClassUniform
1292 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001293 case AddressSpace::Input:
1294 return spv::StorageClassInput;
1295 case AddressSpace::Local:
1296 return spv::StorageClassWorkgroup;
1297 case AddressSpace::UniformConstant:
1298 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001299 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001300 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001301 case AddressSpace::ModuleScopePrivate:
1302 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001303 case AddressSpace::PushConstant:
1304 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001305 }
1306}
1307
David Neto862b7d82018-06-14 18:48:37 -04001308spv::StorageClass
1309SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1310 switch (arg_kind) {
1311 case clspv::ArgKind::Buffer:
1312 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001313 case clspv::ArgKind::BufferUBO:
1314 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001315 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001316 return spv::StorageClassStorageBuffer;
1317 case clspv::ArgKind::PodUBO:
1318 return spv::StorageClassUniform;
1319 case clspv::ArgKind::PodPushConstant:
1320 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001321 case clspv::ArgKind::Local:
1322 return spv::StorageClassWorkgroup;
alan-bakerf6bc8252020-09-23 14:58:55 -04001323 case clspv::ArgKind::SampledImage:
1324 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001325 case clspv::ArgKind::Sampler:
1326 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001327 default:
1328 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001329 }
1330}
1331
David Neto22f144c2017-06-12 14:26:21 -04001332spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1333 return StringSwitch<spv::BuiltIn>(Name)
1334 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1335 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1336 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1337 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1338 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001339 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001340 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001341 .Default(spv::BuiltInMax);
1342}
1343
SJW01901d92020-05-21 08:58:31 -05001344SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1345 if (OpExtInstImportID == 0) {
1346 //
1347 // Generate OpExtInstImport.
1348 //
1349 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001350
SJW01901d92020-05-21 08:58:31 -05001351 OpExtInstImportID =
1352 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1353 }
1354 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001355}
1356
SJW806a5d82020-07-15 12:51:38 -05001357SPIRVID SPIRVProducerPass::addSPIRVGlobalVariable(const SPIRVID &TypeID,
1358 spv::StorageClass SC,
alan-baker3f772c02021-06-15 22:18:11 -04001359 const SPIRVID &InitID,
1360 bool add_interface) {
SJW806a5d82020-07-15 12:51:38 -05001361 // Generate OpVariable.
1362 //
1363 // Ops[0] : Result Type ID
1364 // Ops[1] : Storage Class
1365 // Ops[2] : Initialization Value ID (optional)
1366
1367 SPIRVOperandVec Ops;
1368 Ops << TypeID << SC;
1369 if (InitID.isValid()) {
1370 Ops << InitID;
1371 }
1372
1373 SPIRVID VID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1374
alan-baker3f772c02021-06-15 22:18:11 -04001375 if (SC == spv::StorageClassInput ||
1376 (add_interface && SpvVersion() >= SPIRVVersion::SPIRV_1_4)) {
SJW806a5d82020-07-15 12:51:38 -05001377 getEntryPointInterfacesList().push_back(VID);
1378 }
1379
1380 return VID;
1381}
1382
alan-bakerc3fd07f2020-10-22 09:48:49 -04001383Type *SPIRVProducerPass::CanonicalType(Type *type) {
1384 if (type->getNumContainedTypes() != 0) {
1385 switch (type->getTypeID()) {
1386 case Type::PointerTyID: {
1387 // For the purposes of our Vulkan SPIR-V type system, constant and global
1388 // are conflated.
1389 auto *ptr_ty = cast<PointerType>(type);
1390 unsigned AddrSpace = ptr_ty->getAddressSpace();
1391 if (AddressSpace::Constant == AddrSpace) {
1392 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1393 AddrSpace = AddressSpace::Global;
1394 // The canonical type of __constant is __global unless constants are
1395 // passed in uniform buffers.
1396 auto *GlobalTy =
1397 ptr_ty->getPointerElementType()->getPointerTo(AddrSpace);
1398 return GlobalTy;
1399 }
1400 }
1401 break;
1402 }
1403 case Type::StructTyID: {
1404 SmallVector<Type *, 8> subtypes;
1405 bool changed = false;
1406 for (auto *subtype : type->subtypes()) {
1407 auto canonical = CanonicalType(subtype);
1408 subtypes.push_back(canonical);
1409 if (canonical != subtype) {
1410 changed = true;
1411 }
1412 }
1413 if (changed) {
1414 return StructType::get(type->getContext(), subtypes,
1415 cast<StructType>(type)->isPacked());
1416 }
1417 break;
1418 }
1419 case Type::ArrayTyID: {
1420 auto *elem_ty = type->getArrayElementType();
1421 auto *equiv_elem_ty = CanonicalType(elem_ty);
1422 if (equiv_elem_ty != elem_ty) {
1423 return ArrayType::get(equiv_elem_ty,
1424 cast<ArrayType>(type)->getNumElements());
1425 }
1426 break;
1427 }
1428 case Type::FunctionTyID: {
1429 auto *func_ty = cast<FunctionType>(type);
1430 auto *return_ty = CanonicalType(func_ty->getReturnType());
1431 SmallVector<Type *, 8> params;
1432 for (unsigned i = 0; i < func_ty->getNumParams(); ++i) {
1433 params.push_back(CanonicalType(func_ty->getParamType(i)));
1434 }
1435 return FunctionType::get(return_ty, params, func_ty->isVarArg());
1436 }
1437 default:
1438 break;
1439 }
1440 }
1441
1442 return type;
1443}
1444
alan-baker3f772c02021-06-15 22:18:11 -04001445bool SPIRVProducerPass::PointerRequiresLayout(unsigned aspace) {
1446 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
1447 switch (aspace) {
1448 case AddressSpace::PushConstant:
1449 case AddressSpace::Global:
1450 case AddressSpace::Constant:
1451 return true;
1452 default:
1453 break;
1454 }
1455 }
1456 return false;
1457}
1458
SJW01901d92020-05-21 08:58:31 -05001459SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
alan-baker3f772c02021-06-15 22:18:11 -04001460 // Prior to 1.4, layout decorations are more relaxed so we can reuse a laid
1461 // out type in non-laid out storage classes.
1462 bool needs_layout = false;
1463 if (auto ptr_ty = dyn_cast<PointerType>(Ty)) {
1464 needs_layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
1465 }
1466 return getSPIRVType(Ty, needs_layout);
1467}
1468
1469SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty, bool needs_layout) {
1470 // Only pointers, structs and arrays should have layout decorations.
1471 if (!(isa<PointerType>(Ty) || isa<ArrayType>(Ty) || isa<StructType>(Ty))) {
1472 needs_layout = false;
1473 }
1474 // |layout| is the index used for |Ty|'s entry in the type map. Each type
1475 // stores a laid out and non-laid out version of the type.
1476 const unsigned layout = needs_layout ? 1 : 0;
1477
SJWf93f5f32020-05-05 07:27:56 -05001478 auto TI = TypeMap.find(Ty);
1479 if (TI != TypeMap.end()) {
alan-baker3f772c02021-06-15 22:18:11 -04001480 assert(layout < TI->second.size());
1481 if (TI->second[layout].isValid()) {
1482 return TI->second[layout];
1483 }
SJWf93f5f32020-05-05 07:27:56 -05001484 }
1485
alan-bakerc3fd07f2020-10-22 09:48:49 -04001486 auto Canonical = CanonicalType(Ty);
1487 if (Canonical != Ty) {
1488 auto CanonicalTI = TypeMap.find(Canonical);
1489 if (CanonicalTI != TypeMap.end()) {
alan-baker3f772c02021-06-15 22:18:11 -04001490 assert(layout < CanonicalTI->second.size());
1491 if (CanonicalTI->second[layout].isValid()) {
1492 auto id = CanonicalTI->second[layout];
1493 auto &base = TypeMap[Ty];
1494 if (base.empty()) {
1495 base.resize(2);
1496 }
1497 base[layout] = id;
1498 return id;
1499 }
alan-bakerc3fd07f2020-10-22 09:48:49 -04001500 }
1501 }
1502
1503 // Perform the mapping with the canonical type.
1504
SJWf93f5f32020-05-05 07:27:56 -05001505 const auto &DL = module->getDataLayout();
1506
SJW01901d92020-05-21 08:58:31 -05001507 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001508
alan-bakerc3fd07f2020-10-22 09:48:49 -04001509 switch (Canonical->getTypeID()) {
SJWf93f5f32020-05-05 07:27:56 -05001510 default: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001511 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001512 llvm_unreachable("Unsupported type???");
1513 break;
1514 }
1515 case Type::MetadataTyID:
1516 case Type::LabelTyID: {
1517 // Ignore these types.
1518 break;
1519 }
1520 case Type::PointerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001521 PointerType *PTy = cast<PointerType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001522 unsigned AddrSpace = PTy->getAddressSpace();
1523
1524 if (AddrSpace != AddressSpace::UniformConstant) {
1525 auto PointeeTy = PTy->getElementType();
1526 if (PointeeTy->isStructTy() &&
1527 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
alan-baker3f772c02021-06-15 22:18:11 -04001528 RID = getSPIRVType(PointeeTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001529 break;
1530 }
1531 }
1532
SJWf93f5f32020-05-05 07:27:56 -05001533 //
1534 // Generate OpTypePointer.
1535 //
1536
1537 // OpTypePointer
1538 // Ops[0] = Storage Class
1539 // Ops[1] = Element Type ID
1540 SPIRVOperandVec Ops;
1541
alan-baker3f772c02021-06-15 22:18:11 -04001542 Ops << GetStorageClass(AddrSpace)
1543 << getSPIRVType(PTy->getElementType(), needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001544
1545 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1546 break;
1547 }
1548 case Type::StructTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001549 StructType *STy = cast<StructType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001550
1551 // Handle sampler type.
1552 if (STy->isOpaque()) {
1553 if (STy->getName().equals("opencl.sampler_t")) {
1554 //
1555 // Generate OpTypeSampler
1556 //
1557 // Empty Ops.
1558
1559 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1560 break;
1561 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001562 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001563 STy->getName().startswith("opencl.image1d_wo_t") ||
1564 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001565 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001566 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1567 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001568 STy->getName().startswith("opencl.image2d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001569 STy->getName().startswith("opencl.image2d_wo_t") ||
1570 STy->getName().startswith("opencl.image2d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001571 STy->getName().startswith("opencl.image2d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001572 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1573 STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001574 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001575 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001576 if (STy->getName().startswith("opencl.image1d_")) {
1577 if (STy->getName().contains(".sampled"))
1578 addCapability(spv::CapabilitySampled1D);
1579 else
1580 addCapability(spv::CapabilityImage1D);
1581 }
1582
SJWf93f5f32020-05-05 07:27:56 -05001583 //
1584 // Generate OpTypeImage
1585 //
1586 // Ops[0] = Sampled Type ID
1587 // Ops[1] = Dim ID
1588 // Ops[2] = Depth (Literal Number)
1589 // Ops[3] = Arrayed (Literal Number)
1590 // Ops[4] = MS (Literal Number)
1591 // Ops[5] = Sampled (Literal Number)
1592 // Ops[6] = Image Format ID
1593 //
1594 SPIRVOperandVec Ops;
1595
SJW01901d92020-05-21 08:58:31 -05001596 SPIRVID SampledTyID;
alan-baker3f772c02021-06-15 22:18:11 -04001597 // None of the sampled types have a layout.
SJWf93f5f32020-05-05 07:27:56 -05001598 if (STy->getName().contains(".float")) {
alan-baker3f772c02021-06-15 22:18:11 -04001599 SampledTyID =
1600 getSPIRVType(Type::getFloatTy(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001601 } else if (STy->getName().contains(".uint")) {
alan-baker3f772c02021-06-15 22:18:11 -04001602 SampledTyID =
1603 getSPIRVType(Type::getInt32Ty(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001604 } else if (STy->getName().contains(".int")) {
1605 // Generate a signed 32-bit integer if necessary.
1606 if (int32ID == 0) {
1607 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001608 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001609 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1610 }
1611 SampledTyID = int32ID;
1612
1613 // Generate a vec4 of the signed int if necessary.
1614 if (v4int32ID == 0) {
1615 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001616 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001617 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1618 }
1619 } else {
1620 // This was likely an UndefValue.
alan-baker3f772c02021-06-15 22:18:11 -04001621 SampledTyID =
1622 getSPIRVType(Type::getFloatTy(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001623 }
SJW01901d92020-05-21 08:58:31 -05001624 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001625
1626 spv::Dim DimID = spv::Dim2D;
1627 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001628 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001629 STy->getName().startswith("opencl.image1d_wo_t") ||
1630 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001631 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001632 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1633 DimID = spv::Dim1D;
1634 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001635 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001636 STy->getName().startswith("opencl.image3d_wo_t")) {
1637 DimID = spv::Dim3D;
1638 }
SJW01901d92020-05-21 08:58:31 -05001639 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001640
1641 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001642 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001643
1644 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001645 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001646
1647 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001648 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001649
1650 // Set up Sampled.
1651 //
1652 // From Spec
1653 //
1654 // 0 indicates this is only known at run time, not at compile time
1655 // 1 indicates will be used with sampler
1656 // 2 indicates will be used without a sampler (a storage image)
1657 uint32_t Sampled = 1;
1658 if (!STy->getName().contains(".sampled")) {
1659 Sampled = 2;
1660 }
SJW01901d92020-05-21 08:58:31 -05001661 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001662
1663 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001664 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001665 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1666
alan-bakerf6bc8252020-09-23 14:58:55 -04001667 // Only need a sampled version of the type if it is used with a sampler.
1668 if (Sampled == 1) {
1669 Ops.clear();
1670 Ops << RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001671 getImageTypeMap()[Canonical] =
alan-bakerf6bc8252020-09-23 14:58:55 -04001672 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1673 }
SJWf93f5f32020-05-05 07:27:56 -05001674 break;
1675 }
1676 }
1677
1678 //
1679 // Generate OpTypeStruct
1680 //
1681 // Ops[0] ... Ops[n] = Member IDs
1682 SPIRVOperandVec Ops;
1683
1684 for (auto *EleTy : STy->elements()) {
alan-baker3f772c02021-06-15 22:18:11 -04001685 Ops << getSPIRVType(EleTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001686 }
1687
1688 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1689
alan-bakerc3fd07f2020-10-22 09:48:49 -04001690 // Generate OpMemberDecorate unless we are generating it for the canonical
1691 // type.
1692 StructType *canonical = cast<StructType>(CanonicalType(STy));
alan-baker3f772c02021-06-15 22:18:11 -04001693 bool use_layout =
1694 (Option::SpvVersion() < SPIRVVersion::SPIRV_1_4) || needs_layout;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001695 if (TypesNeedingLayout.idFor(STy) &&
alan-baker3f772c02021-06-15 22:18:11 -04001696 (canonical == STy || !TypesNeedingLayout.idFor(canonical)) &&
1697 use_layout) {
SJWf93f5f32020-05-05 07:27:56 -05001698 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1699 MemberIdx++) {
1700 // Ops[0] = Structure Type ID
1701 // Ops[1] = Member Index(Literal Number)
1702 // Ops[2] = Decoration (Offset)
1703 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001704 const auto ByteOffset =
1705 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1706
SJW01901d92020-05-21 08:58:31 -05001707 Ops.clear();
1708 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001709
1710 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1711 }
1712 }
1713
alan-bakerc3fd07f2020-10-22 09:48:49 -04001714 // Generate OpDecorate unless we are generating it for the canonical type.
1715 if (StructTypesNeedingBlock.idFor(STy) &&
alan-baker3f772c02021-06-15 22:18:11 -04001716 (canonical == STy || !StructTypesNeedingBlock.idFor(canonical)) &&
1717 use_layout) {
SJWf93f5f32020-05-05 07:27:56 -05001718 Ops.clear();
1719 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001720 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001721
1722 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1723 }
1724 break;
1725 }
1726 case Type::IntegerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001727 uint32_t bit_width =
1728 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001729
alan-bakere2a62752020-07-09 22:53:23 -04001730 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001731 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001732 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001733 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001734 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001735 addCapability(spv::CapabilityInt64);
1736 }
1737
alan-bakere2a62752020-07-09 22:53:23 -04001738 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001739 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1740 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001741 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001742 // i8 is added to TypeMap as i32.
alan-baker3f772c02021-06-15 22:18:11 -04001743 RID = getSPIRVType(Type::getIntNTy(Canonical->getContext(), 32), false);
SJWf93f5f32020-05-05 07:27:56 -05001744 } else {
1745 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001746 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001747 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1748 }
1749 }
1750 break;
1751 }
1752 case Type::HalfTyID:
1753 case Type::FloatTyID:
1754 case Type::DoubleTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001755 uint32_t bit_width =
1756 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
alan-bakere2a62752020-07-09 22:53:23 -04001757 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001758 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001759 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001760 addCapability(spv::CapabilityFloat64);
1761 }
1762
SJWf93f5f32020-05-05 07:27:56 -05001763 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001764 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001765
1766 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1767 break;
1768 }
1769 case Type::ArrayTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001770 ArrayType *ArrTy = cast<ArrayType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001771 const uint64_t Length = ArrTy->getArrayNumElements();
1772 if (Length == 0) {
1773 // By convention, map it to a RuntimeArray.
1774
1775 Type *EleTy = ArrTy->getArrayElementType();
1776
1777 //
1778 // Generate OpTypeRuntimeArray.
1779 //
1780 // OpTypeRuntimeArray
1781 // Ops[0] = Element Type ID
1782 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04001783 Ops << getSPIRVType(EleTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001784
1785 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1786
alan-baker3f772c02021-06-15 22:18:11 -04001787 if (Hack_generate_runtime_array_stride_early &&
1788 (Option::SpvVersion() < SPIRVVersion::SPIRV_1_4 || needs_layout)) {
SJWf93f5f32020-05-05 07:27:56 -05001789 // Generate OpDecorate.
1790
1791 // Ops[0] = Target ID
1792 // Ops[1] = Decoration (ArrayStride)
1793 // Ops[2] = Stride Number(Literal Number)
1794 Ops.clear();
1795
SJW01901d92020-05-21 08:58:31 -05001796 Ops << RID << spv::DecorationArrayStride
1797 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001798
1799 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1800 }
1801
1802 } else {
1803
1804 //
1805 // Generate OpConstant and OpTypeArray.
1806 //
1807
1808 //
1809 // Generate OpConstant for array length.
1810 //
1811 // Add constant for length to constant list.
1812 Constant *CstLength =
1813 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001814
1815 // Remember to generate ArrayStride later
alan-bakerc3fd07f2020-10-22 09:48:49 -04001816 getTypesNeedingArrayStride().insert(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001817
1818 //
1819 // Generate OpTypeArray.
1820 //
1821 // Ops[0] = Element Type ID
1822 // Ops[1] = Array Length Constant ID
1823 SPIRVOperandVec Ops;
1824
alan-baker3f772c02021-06-15 22:18:11 -04001825 Ops << getSPIRVType(ArrTy->getElementType(), needs_layout) << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001826
1827 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1828 }
1829 break;
1830 }
1831 case Type::FixedVectorTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001832 auto VecTy = cast<VectorType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001833 // <4 x i8> is changed to i32 if i8 is not generally supported.
1834 if (!clspv::Option::Int8Support() &&
1835 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
alan-baker5a8c3be2020-09-09 13:44:26 -04001836 if (VecTy->getElementCount().getKnownMinValue() == 4) {
SJWf93f5f32020-05-05 07:27:56 -05001837 RID = getSPIRVType(VecTy->getElementType());
1838 break;
1839 } else {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001840 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001841 llvm_unreachable("Support above i8 vector type");
1842 }
1843 }
1844
1845 // Ops[0] = Component Type ID
1846 // Ops[1] = Component Count (Literal Number)
1847 SPIRVOperandVec Ops;
alan-baker5a8c3be2020-09-09 13:44:26 -04001848 Ops << VecTy->getElementType()
1849 << VecTy->getElementCount().getKnownMinValue();
SJWf93f5f32020-05-05 07:27:56 -05001850
1851 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1852 break;
1853 }
1854 case Type::VoidTyID: {
1855 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
1856 break;
1857 }
1858 case Type::FunctionTyID: {
1859 // Generate SPIRV instruction for function type.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001860 FunctionType *FTy = cast<FunctionType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001861
1862 // Ops[0] = Return Type ID
1863 // Ops[1] ... Ops[n] = Parameter Type IDs
1864 SPIRVOperandVec Ops;
1865
1866 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05001867 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05001868
1869 // Find SPIRV instructions for parameter types
1870 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
1871 // Find SPIRV instruction for parameter type.
1872 auto ParamTy = FTy->getParamType(k);
1873 if (ParamTy->isPointerTy()) {
1874 auto PointeeTy = ParamTy->getPointerElementType();
1875 if (PointeeTy->isStructTy() &&
1876 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1877 ParamTy = PointeeTy;
1878 }
1879 }
1880
alan-baker3f772c02021-06-15 22:18:11 -04001881 Ops << getSPIRVType(ParamTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001882 }
1883
1884 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
1885 break;
1886 }
1887 }
1888
SJW01901d92020-05-21 08:58:31 -05001889 if (RID.isValid()) {
alan-baker3f772c02021-06-15 22:18:11 -04001890 auto &entry = TypeMap[Canonical];
1891 if (entry.empty()) {
1892 entry.resize(2);
1893 }
1894 entry[layout] = RID;
1895
1896 if (Canonical != Ty) {
1897 // Also cache the original type.
1898 auto &base_entry = TypeMap[Ty];
1899 if (base_entry.empty()) {
1900 base_entry.resize(2);
1901 }
1902 base_entry[layout] = RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001903 }
SJWf93f5f32020-05-05 07:27:56 -05001904 }
1905 return RID;
David Neto22f144c2017-06-12 14:26:21 -04001906}
1907
SJW806a5d82020-07-15 12:51:38 -05001908SPIRVID SPIRVProducerPass::getSPIRVInt32Constant(uint32_t CstVal) {
1909 Type *i32 = Type::getInt32Ty(module->getContext());
1910 Constant *Cst = ConstantInt::get(i32, CstVal);
1911 return getSPIRVValue(Cst);
1912}
1913
alan-baker1b333b62021-05-31 14:55:32 -04001914SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *C) {
David Neto22f144c2017-06-12 14:26:21 -04001915 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07001916 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04001917
alan-baker1b333b62021-05-31 14:55:32 -04001918 // Treat poison as an undef.
1919 auto *Cst = C;
1920 if (isa<PoisonValue>(Cst)) {
1921 Cst = UndefValue::get(Cst->getType());
1922 }
1923
1924 auto VI = VMap.find(Cst);
1925 if (VI != VMap.end()) {
1926 assert(VI->second.isValid());
1927 return VI->second;
1928 }
1929
SJW01901d92020-05-21 08:58:31 -05001930 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04001931
SJWf93f5f32020-05-05 07:27:56 -05001932 //
1933 // Generate OpConstant.
1934 //
1935 // Ops[0] = Result Type ID
1936 // Ops[1] .. Ops[n] = Values LiteralNumber
1937 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04001938
SJW01901d92020-05-21 08:58:31 -05001939 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001940
SJWf93f5f32020-05-05 07:27:56 -05001941 std::vector<uint32_t> LiteralNum;
1942 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04001943
SJWf93f5f32020-05-05 07:27:56 -05001944 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04001945 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05001946 Opcode = spv::OpUndef;
1947 if (hack_undef && IsTypeNullable(Cst->getType())) {
1948 Opcode = spv::OpConstantNull;
1949 }
1950 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04001951 unsigned bit_width = CI->getBitWidth();
1952 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001953 // If the bitwidth of constant is 1, generate OpConstantTrue or
1954 // OpConstantFalse.
1955 if (CI->getZExtValue()) {
1956 // Ops[0] = Result Type ID
1957 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04001958 } else {
SJWf93f5f32020-05-05 07:27:56 -05001959 // Ops[0] = Result Type ID
1960 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04001961 }
SJWf93f5f32020-05-05 07:27:56 -05001962 } else {
1963 auto V = CI->getZExtValue();
1964 LiteralNum.push_back(V & 0xFFFFFFFF);
1965
alan-bakere2a62752020-07-09 22:53:23 -04001966 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05001967 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04001968 }
1969
1970 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04001971
SJW01901d92020-05-21 08:58:31 -05001972 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001973 }
1974 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
1975 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
1976 Type *CFPTy = CFP->getType();
1977 if (CFPTy->isFloatTy()) {
1978 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1979 } else if (CFPTy->isDoubleTy()) {
1980 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1981 LiteralNum.push_back(FPVal >> 32);
1982 } else if (CFPTy->isHalfTy()) {
1983 LiteralNum.push_back(FPVal & 0xFFFF);
1984 } else {
1985 CFPTy->print(errs());
1986 llvm_unreachable("Implement this ConstantFP Type");
1987 }
David Neto22f144c2017-06-12 14:26:21 -04001988
SJWf93f5f32020-05-05 07:27:56 -05001989 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04001990
SJW01901d92020-05-21 08:58:31 -05001991 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001992 } else if (isa<ConstantDataSequential>(Cst) &&
1993 cast<ConstantDataSequential>(Cst)->isString()) {
1994 Cst->print(errs());
1995 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04001996
SJWf93f5f32020-05-05 07:27:56 -05001997 } else if (const ConstantDataSequential *CDS =
1998 dyn_cast<ConstantDataSequential>(Cst)) {
1999 // Let's convert <4 x i8> constant to int constant specially.
2000 // This case occurs when all the values are specified as constant
2001 // ints.
2002 Type *CstTy = Cst->getType();
2003 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002004 //
2005 // Generate OpConstant with OpTypeInt 32 0.
2006 //
2007 uint32_t IntValue = 0;
2008 for (unsigned k = 0; k < 4; k++) {
2009 const uint64_t Val = CDS->getElementAsInteger(k);
2010 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002011 }
2012
SJW806a5d82020-07-15 12:51:38 -05002013 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002014 } else {
2015
David Neto49351ac2017-08-26 17:32:20 -04002016 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002017 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002018 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002019 }
2020
2021 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002022 }
2023 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2024 // Let's convert <4 x i8> constant to int constant specially.
2025 // This case occurs when at least one of the values is an undef.
2026 Type *CstTy = Cst->getType();
2027 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002028 //
2029 // Generate OpConstant with OpTypeInt 32 0.
2030 //
2031 uint32_t IntValue = 0;
2032 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2033 I != E; ++I) {
2034 uint64_t Val = 0;
2035 const Value *CV = *I;
2036 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2037 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002038 }
SJWf93f5f32020-05-05 07:27:56 -05002039 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002040 }
2041
SJW806a5d82020-07-15 12:51:38 -05002042 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002043 } else {
2044
David Neto22f144c2017-06-12 14:26:21 -04002045 // We use a constant composite in SPIR-V for our constant aggregate in
2046 // LLVM.
2047 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002048
2049 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002050 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002051 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002052 }
David Neto22f144c2017-06-12 14:26:21 -04002053 }
SJWf93f5f32020-05-05 07:27:56 -05002054 } else if (Cst->isNullValue()) {
2055 Opcode = spv::OpConstantNull;
2056 } else {
2057 Cst->print(errs());
2058 llvm_unreachable("Unsupported Constant???");
2059 }
David Neto22f144c2017-06-12 14:26:21 -04002060
SJWf93f5f32020-05-05 07:27:56 -05002061 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2062 // Null pointer requires variable pointers.
2063 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2064 }
alan-baker5b86ed72019-02-15 08:26:50 -05002065
SJWf93f5f32020-05-05 07:27:56 -05002066 if (RID == 0) {
2067 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2068 }
2069
2070 VMap[Cst] = RID;
2071
2072 return RID;
2073}
2074
2075SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2076 auto II = ValueMap.find(V);
2077 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002078 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002079 return II->second;
2080 }
2081 if (Constant *Cst = dyn_cast<Constant>(V)) {
2082 return getSPIRVConstant(Cst);
2083 } else {
2084 llvm_unreachable("Variable not found");
2085 }
2086}
2087
SJW77b87ad2020-04-21 14:37:52 -05002088void SPIRVProducerPass::GenerateSamplers() {
alan-baker09cb9802019-12-10 13:16:27 -05002089 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002090 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2091 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002092
David Neto862b7d82018-06-14 18:48:37 -04002093 // We might have samplers in the sampler map that are not used
2094 // in the translation unit. We need to allocate variables
2095 // for them and bindings too.
2096 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002097
SJW77b87ad2020-04-21 14:37:52 -05002098 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002099 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002100 if (!var_fn)
2101 return;
alan-baker09cb9802019-12-10 13:16:27 -05002102
David Neto862b7d82018-06-14 18:48:37 -04002103 for (auto user : var_fn->users()) {
2104 // Populate SamplerLiteralToDescriptorSetMap and
2105 // SamplerLiteralToBindingMap.
2106 //
2107 // Look for calls like
2108 // call %opencl.sampler_t addrspace(2)*
2109 // @clspv.sampler.var.literal(
2110 // i32 descriptor,
2111 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002112 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002113 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002114 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002115 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002116 auto sampler_value = third_param;
2117 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04002118 auto &sampler_map = *getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002119 if (third_param >= sampler_map.size()) {
2120 errs() << "Out of bounds index to sampler map: " << third_param;
2121 llvm_unreachable("bad sampler init: out of bounds");
2122 }
2123 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002124 }
2125
David Neto862b7d82018-06-14 18:48:37 -04002126 const auto descriptor_set = static_cast<unsigned>(
2127 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2128 const auto binding = static_cast<unsigned>(
2129 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2130
2131 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2132 SamplerLiteralToBindingMap[sampler_value] = binding;
2133 used_bindings.insert(binding);
2134 }
2135 }
2136
alan-baker09cb9802019-12-10 13:16:27 -05002137 DenseSet<size_t> seen;
2138 for (auto user : var_fn->users()) {
2139 if (!isa<CallInst>(user))
2140 continue;
2141
2142 auto call = cast<CallInst>(user);
2143 const unsigned third_param = static_cast<unsigned>(
2144 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2145
2146 // Already allocated a variable for this value.
2147 if (!seen.insert(third_param).second)
2148 continue;
2149
2150 auto sampler_value = third_param;
2151 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04002152 sampler_value = (*getSamplerMap())[third_param].first;
alan-baker09cb9802019-12-10 13:16:27 -05002153 }
2154
SJW806a5d82020-07-15 12:51:38 -05002155 auto sampler_var_id = addSPIRVGlobalVariable(
2156 getSPIRVType(SamplerTy), spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002157
alan-baker09cb9802019-12-10 13:16:27 -05002158 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002159
David Neto862b7d82018-06-14 18:48:37 -04002160 unsigned descriptor_set;
2161 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002162 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002163 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002164 // This sampler is not actually used. Find the next one.
alan-baker7506abb2020-09-10 15:02:55 -04002165 for (binding = 0; used_bindings.count(binding); binding++) {
2166 }
David Neto862b7d82018-06-14 18:48:37 -04002167 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2168 used_bindings.insert(binding);
2169 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002170 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2171 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002172
alan-baker86ce19c2020-08-05 13:09:19 -04002173 auto import_id = getReflectionImport();
2174 SPIRVOperandVec Ops;
2175 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
2176 << reflection::ExtInstLiteralSampler
2177 << getSPIRVInt32Constant(descriptor_set)
2178 << getSPIRVInt32Constant(binding)
2179 << getSPIRVInt32Constant(sampler_value);
2180 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002181 }
2182
SJW69939d52020-04-16 07:29:07 -05002183 // Ops[0] = Target ID
2184 // Ops[1] = Decoration (DescriptorSet)
2185 // Ops[2] = LiteralNumber according to Decoration
SJW806a5d82020-07-15 12:51:38 -05002186 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002187 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002188
SJWf93f5f32020-05-05 07:27:56 -05002189 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002190
2191 // Ops[0] = Target ID
2192 // Ops[1] = Decoration (Binding)
2193 // Ops[2] = LiteralNumber according to Decoration
2194 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002195 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002196
SJWf93f5f32020-05-05 07:27:56 -05002197 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002198 }
David Neto862b7d82018-06-14 18:48:37 -04002199}
David Neto22f144c2017-06-12 14:26:21 -04002200
SJW77b87ad2020-04-21 14:37:52 -05002201void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002202 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002203
David Neto862b7d82018-06-14 18:48:37 -04002204 // Generate variables. Make one for each of resource var info object.
2205 for (auto *info : ModuleOrderedResourceVars) {
2206 Type *type = info->var_fn->getReturnType();
2207 // Remap the address space for opaque types.
2208 switch (info->arg_kind) {
2209 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002210 case clspv::ArgKind::SampledImage:
2211 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002212 type = PointerType::get(type->getPointerElementType(),
2213 clspv::AddressSpace::UniformConstant);
2214 break;
2215 default:
2216 break;
2217 }
David Neto22f144c2017-06-12 14:26:21 -04002218
David Neto862b7d82018-06-14 18:48:37 -04002219 const auto sc = GetStorageClassForArgKind(info->arg_kind);
David Neto22f144c2017-06-12 14:26:21 -04002220
SJW806a5d82020-07-15 12:51:38 -05002221 info->var_id = addSPIRVGlobalVariable(getSPIRVType(type), sc);
David Neto862b7d82018-06-14 18:48:37 -04002222
2223 // Map calls to the variable-builtin-function.
2224 for (auto &U : info->var_fn->uses()) {
2225 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2226 const auto set = unsigned(
2227 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2228 const auto binding = unsigned(
2229 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2230 if (set == info->descriptor_set && binding == info->binding) {
2231 switch (info->arg_kind) {
2232 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002233 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002234 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002235 case clspv::ArgKind::PodUBO:
2236 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002237 // The call maps to the variable directly.
2238 VMap[call] = info->var_id;
2239 break;
2240 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002241 case clspv::ArgKind::SampledImage:
2242 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002243 // The call maps to a load we generate later.
2244 ResourceVarDeferredLoadCalls[call] = info->var_id;
2245 break;
2246 default:
2247 llvm_unreachable("Unhandled arg kind");
2248 }
2249 }
David Neto22f144c2017-06-12 14:26:21 -04002250 }
David Neto862b7d82018-06-14 18:48:37 -04002251 }
2252 }
David Neto22f144c2017-06-12 14:26:21 -04002253
David Neto862b7d82018-06-14 18:48:37 -04002254 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002255 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002256 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002257 // Push constants don't need descriptor set or binding decorations.
2258 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2259 continue;
2260
David Neto862b7d82018-06-14 18:48:37 -04002261 // Decorate with DescriptorSet and Binding.
2262 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002263 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002264 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002265
2266 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002267 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002268 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002269
alan-bakere9308012019-03-15 10:25:13 -04002270 if (info->coherent) {
2271 // Decorate with Coherent if required for the variable.
2272 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002273 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002274 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002275 }
2276
David Neto862b7d82018-06-14 18:48:37 -04002277 // Generate NonWritable and NonReadable
2278 switch (info->arg_kind) {
2279 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002280 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002281 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2282 clspv::AddressSpace::Constant) {
2283 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002284 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002285 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002286 }
David Neto862b7d82018-06-14 18:48:37 -04002287 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002288 case clspv::ArgKind::StorageImage: {
2289 auto *type = info->var_fn->getReturnType();
2290 auto *struct_ty = cast<StructType>(type->getPointerElementType());
2291 // TODO(alan-baker): This is conservative. If compiling for OpenCL 2.0 or
2292 // above, the compiler treats all write_only images as read_write images.
2293 if (struct_ty->getName().contains("_wo_t")) {
2294 Ops.clear();
2295 Ops << info->var_id << spv::DecorationNonReadable;
2296 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2297 }
David Neto862b7d82018-06-14 18:48:37 -04002298 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002299 }
David Neto862b7d82018-06-14 18:48:37 -04002300 default:
2301 break;
David Neto22f144c2017-06-12 14:26:21 -04002302 }
2303 }
2304}
2305
2306void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002307 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002308 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002309 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002310
2311 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2312 Type *Ty = GV.getType();
2313 PointerType *PTy = cast<PointerType>(Ty);
2314
SJW01901d92020-05-21 08:58:31 -05002315 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002316
2317 // Workgroup size is handled differently (it goes into a constant)
2318 if (spv::BuiltInWorkgroupSize == BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04002319 uint32_t PrevXDimCst = 0xFFFFFFFF;
2320 uint32_t PrevYDimCst = 0xFFFFFFFF;
2321 uint32_t PrevZDimCst = 0xFFFFFFFF;
alan-baker3b609772020-09-03 19:10:17 -04002322 bool HasMD = true;
David Neto22f144c2017-06-12 14:26:21 -04002323 for (Function &Func : *GV.getParent()) {
2324 if (Func.isDeclaration()) {
2325 continue;
2326 }
2327
2328 // We only need to check kernels.
2329 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2330 continue;
2331 }
2332
2333 if (const MDNode *MD =
2334 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2335 uint32_t CurXDimCst = static_cast<uint32_t>(
2336 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2337 uint32_t CurYDimCst = static_cast<uint32_t>(
2338 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2339 uint32_t CurZDimCst = static_cast<uint32_t>(
2340 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2341
2342 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2343 PrevZDimCst == 0xFFFFFFFF) {
2344 PrevXDimCst = CurXDimCst;
2345 PrevYDimCst = CurYDimCst;
2346 PrevZDimCst = CurZDimCst;
2347 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2348 CurZDimCst != PrevZDimCst) {
alan-baker3b609772020-09-03 19:10:17 -04002349 HasMD = false;
2350 continue;
David Neto22f144c2017-06-12 14:26:21 -04002351 } else {
2352 continue;
2353 }
2354
2355 //
2356 // Generate OpConstantComposite.
2357 //
2358 // Ops[0] : Result Type ID
2359 // Ops[1] : Constant size for x dimension.
2360 // Ops[2] : Constant size for y dimension.
2361 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002362 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002363
SJW01901d92020-05-21 08:58:31 -05002364 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002365 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002366 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002367 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002368 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002369 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002370
SJW01901d92020-05-21 08:58:31 -05002371 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2372 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002373
SJWf93f5f32020-05-05 07:27:56 -05002374 InitializerID =
2375 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002376 } else {
alan-baker3b609772020-09-03 19:10:17 -04002377 HasMD = false;
David Neto22f144c2017-06-12 14:26:21 -04002378 }
2379 }
2380
2381 // If all kernels do not have metadata for reqd_work_group_size, generate
2382 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002383 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002384 //
2385 // Generate OpSpecConstants for x/y/z dimension.
2386 //
2387 // Ops[0] : Result Type ID
2388 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002389
alan-bakera1be3322020-04-20 12:48:18 -04002390 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002391 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002392
SJWf93f5f32020-05-05 07:27:56 -05002393 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002394 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002395 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002396
David Neto257c3892018-04-11 13:19:45 -04002397 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002398 Ops << result_type_id << 1;
2399 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002400
2401 // Y Dimension
2402 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002403 Ops << result_type_id << 1;
2404 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002405
2406 // Z Dimension
2407 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002408 Ops << result_type_id << 1;
2409 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002410
David Neto257c3892018-04-11 13:19:45 -04002411 BuiltinDimVec.push_back(XDimCstID);
2412 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002413 BuiltinDimVec.push_back(ZDimCstID);
2414
David Neto22f144c2017-06-12 14:26:21 -04002415 //
2416 // Generate OpSpecConstantComposite.
2417 //
2418 // Ops[0] : Result Type ID
2419 // Ops[1] : Constant size for x dimension.
2420 // Ops[2] : Constant size for y dimension.
2421 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002422 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002423 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002424
SJWf93f5f32020-05-05 07:27:56 -05002425 InitializerID =
2426 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002427 }
alan-bakerbed3a882020-04-21 14:42:41 -04002428 } else if (BuiltinType == spv::BuiltInWorkDim) {
2429 // 1. Generate a specialization constant with a default of 3.
2430 // 2. Allocate and annotate a SpecId for the constant.
2431 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002432 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002433
2434 //
2435 // Generate OpSpecConstant.
2436 //
2437 // Ops[0] : Result Type ID
2438 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002439
SJW01901d92020-05-21 08:58:31 -05002440 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002441
SJWf93f5f32020-05-05 07:27:56 -05002442 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002443
2444 //
2445 // Generate SpecId decoration.
2446 //
2447 // Ops[0] : target
2448 // Ops[1] : decoration
2449 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002450 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002451 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002452 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002453
SJWf93f5f32020-05-05 07:27:56 -05002454 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002455 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2456 // 1. Generate a spec constant with a default of {0, 0, 0}.
2457 // 2. Allocate and annotate SpecIds for the constants.
2458 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002459 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002460
2461 //
2462 // Generate OpSpecConstant for each dimension.
2463 //
2464 // Ops[0] : Result Type ID
2465 // Ops[1] : Default literal value
2466 //
SJW01901d92020-05-21 08:58:31 -05002467 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2468 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002469
alan-bakere1996972020-05-04 08:38:12 -04002470 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002471 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2472 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002473
alan-bakere1996972020-05-04 08:38:12 -04002474 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002475 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2476 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002477
2478 //
2479 // Generate SpecId decoration for each dimension.
2480 //
2481 // Ops[0] : target
2482 // Ops[1] : decoration
2483 // Ops[2] : SpecId
2484 //
2485 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2486 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002487 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002488 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002489
2490 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2491 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002492 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002493 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002494
2495 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2496 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002497 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002498 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002499
2500 //
2501 // Generate OpSpecConstantComposite.
2502 //
2503 // Ops[0] : type id
2504 // Ops[1..n-1] : elements
2505 //
alan-bakere1996972020-05-04 08:38:12 -04002506 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002507 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002508 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002509 }
2510
David Neto85082642018-03-24 06:55:20 -07002511 const auto AS = PTy->getAddressSpace();
SJW806a5d82020-07-15 12:51:38 -05002512 const auto spvSC = GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002513
David Neto85082642018-03-24 06:55:20 -07002514 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002515 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002516 clspv::Option::ModuleConstantsInStorageBuffer();
2517
Kévin Petit23d5f182019-08-13 16:21:29 +01002518 if (GV.hasInitializer()) {
2519 auto GVInit = GV.getInitializer();
2520 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002521 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002522 }
2523 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002524
alan-baker3f772c02021-06-15 22:18:11 -04002525 // All private, module private, and local global variables can be added to
2526 // interfaces conservatively.
2527 const bool interface =
2528 (AS == AddressSpace::Private || AS == AddressSpace::ModuleScopePrivate ||
2529 AS == AddressSpace::Local);
SJW806a5d82020-07-15 12:51:38 -05002530 SPIRVID var_id =
alan-baker3f772c02021-06-15 22:18:11 -04002531 addSPIRVGlobalVariable(getSPIRVType(Ty), spvSC, InitializerID, interface);
David Neto85082642018-03-24 06:55:20 -07002532
SJWf93f5f32020-05-05 07:27:56 -05002533 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002534
alan-bakere1996972020-05-04 08:38:12 -04002535 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2536 return builtin == spv::BuiltInWorkDim ||
2537 builtin == spv::BuiltInGlobalOffset;
2538 };
2539
alan-bakere1996972020-05-04 08:38:12 -04002540 // If we have a builtin (not an OpenCL builtin).
2541 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002542 //
2543 // Generate OpDecorate.
2544 //
2545 // DOps[0] = Target ID
2546 // DOps[1] = Decoration (Builtin)
2547 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002548 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002549
2550 // WorkgroupSize is different, we decorate the constant composite that has
2551 // its value, rather than the variable that we use to access the value.
2552 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2553 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002554 // Save both the value and variable IDs for later.
2555 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002556 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002557 } else {
SJWf93f5f32020-05-05 07:27:56 -05002558 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002559 }
2560
SJW806a5d82020-07-15 12:51:38 -05002561 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002562 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002563
SJW01901d92020-05-21 08:58:31 -05002564 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002565 } else if (module_scope_constant_external_init) {
2566 // This module scope constant is initialized from a storage buffer with data
2567 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002568 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002569
alan-baker86ce19c2020-08-05 13:09:19 -04002570 // Emit the intializer as a reflection instruction.
David Neto85082642018-03-24 06:55:20 -07002571 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2572 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002573 std::string hexbytes;
2574 llvm::raw_string_ostream str(hexbytes);
2575 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
alan-baker86ce19c2020-08-05 13:09:19 -04002576
2577 // Reflection instruction for constant data.
2578 SPIRVOperandVec Ops;
2579 auto data_id = addSPIRVInst<kDebug>(spv::OpString, str.str().c_str());
2580 Ops << getSPIRVType(Type::getVoidTy(module->getContext()))
2581 << getReflectionImport() << reflection::ExtInstConstantDataStorageBuffer
2582 << getSPIRVInt32Constant(descriptor_set) << getSPIRVInt32Constant(0)
2583 << data_id;
2584 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto85082642018-03-24 06:55:20 -07002585
David Neto85082642018-03-24 06:55:20 -07002586 // OpDecorate %var DescriptorSet <descriptor_set>
alan-baker86ce19c2020-08-05 13:09:19 -04002587 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002588 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2589 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002590
2591 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002592 Ops.clear();
2593 Ops << var_id << spv::DecorationBinding << 0;
2594 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002595 }
2596}
2597
David Neto22f144c2017-06-12 14:26:21 -04002598void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002599 ValueMapType &VMap = getValueMap();
2600 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002601 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2602 auto &GlobalConstArgSet = getGlobalConstArgSet();
2603
2604 FunctionType *FTy = F.getFunctionType();
2605
2606 //
David Neto22f144c2017-06-12 14:26:21 -04002607 // Generate OPFunction.
2608 //
2609
2610 // FOps[0] : Result Type ID
2611 // FOps[1] : Function Control
2612 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002613 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002614
2615 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002616 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002617
2618 // Check function attributes for SPIRV Function Control.
2619 uint32_t FuncControl = spv::FunctionControlMaskNone;
2620 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2621 FuncControl |= spv::FunctionControlInlineMask;
2622 }
2623 if (F.hasFnAttribute(Attribute::NoInline)) {
2624 FuncControl |= spv::FunctionControlDontInlineMask;
2625 }
2626 // TODO: Check llvm attribute for Function Control Pure.
2627 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2628 FuncControl |= spv::FunctionControlPureMask;
2629 }
2630 // TODO: Check llvm attribute for Function Control Const.
2631 if (F.hasFnAttribute(Attribute::ReadNone)) {
2632 FuncControl |= spv::FunctionControlConstMask;
2633 }
2634
SJW01901d92020-05-21 08:58:31 -05002635 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002636
SJW01901d92020-05-21 08:58:31 -05002637 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002638 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2639 SmallVector<Type *, 4> NewFuncParamTys;
2640 FunctionType *NewFTy =
2641 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002642 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002643 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002644 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002645 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002646 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002647 } else {
SJWf93f5f32020-05-05 07:27:56 -05002648 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002649 }
2650 }
2651
SJW01901d92020-05-21 08:58:31 -05002652 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002653
SJWf93f5f32020-05-05 07:27:56 -05002654 // Generate SPIRV instruction for function.
2655 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2656 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002657
SJWf93f5f32020-05-05 07:27:56 -05002658 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2659 EntryPoints.push_back(std::make_pair(&F, FID));
2660 }
David Neto22f144c2017-06-12 14:26:21 -04002661
David Neto482550a2018-03-24 05:21:07 -07002662 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002663 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002664 }
David Neto22f144c2017-06-12 14:26:21 -04002665
2666 //
2667 // Generate OpFunctionParameter for Normal function.
2668 //
David Neto22f144c2017-06-12 14:26:21 -04002669 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002670
David Neto22f144c2017-06-12 14:26:21 -04002671 // Iterate Argument for name instead of param type from function type.
2672 unsigned ArgIdx = 0;
2673 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002674 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002675 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002676
2677 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002678 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002679 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2680 if (GlobalConstFuncTyMap.count(FTy)) {
2681 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2682 Type *EleTy = PTy->getPointerElementType();
2683 Type *ArgTy =
2684 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002685 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002686 GlobalConstArgSet.insert(&Arg);
2687 }
2688 }
2689 }
SJW01901d92020-05-21 08:58:31 -05002690 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002691
2692 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002693 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002694 VMap[&Arg] = param_id;
2695
2696 if (CalledWithCoherentResource(Arg)) {
2697 // If the arg is passed a coherent resource ever, then decorate this
2698 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002699 Ops.clear();
2700 Ops << param_id << spv::DecorationCoherent;
2701 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002702 }
David Neto22f144c2017-06-12 14:26:21 -04002703
2704 ArgIdx++;
2705 }
2706 }
2707}
2708
SJW77b87ad2020-04-21 14:37:52 -05002709void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002710 EntryPointVecType &EntryPoints = getEntryPointVec();
SJW806a5d82020-07-15 12:51:38 -05002711 auto &EntryPointInterfaces = getEntryPointInterfacesList();
SJW01901d92020-05-21 08:58:31 -05002712 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002713
SJWf93f5f32020-05-05 07:27:56 -05002714 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002715
SJW01901d92020-05-21 08:58:31 -05002716 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002717 //
SJW01901d92020-05-21 08:58:31 -05002718 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002719 //
2720 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002721 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002722 }
2723
alan-baker3f772c02021-06-15 22:18:11 -04002724 // Storage buffer and variable pointer extensions were made core in SPIR-V
2725 // 1.3.
2726 if (SpvVersion() < SPIRVVersion::SPIRV_1_3) {
David Neto22f144c2017-06-12 14:26:21 -04002727 //
2728 // Generate OpExtension.
2729 //
2730 // Ops[0] = Name (Literal String)
2731 //
SJWf93f5f32020-05-05 07:27:56 -05002732 addSPIRVInst<kExtensions>(spv::OpExtension,
2733 "SPV_KHR_storage_buffer_storage_class");
David Neto22f144c2017-06-12 14:26:21 -04002734
alan-baker3f772c02021-06-15 22:18:11 -04002735 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2736 //
2737 // Generate OpExtension.
2738 //
2739 // Ops[0] = Name (Literal String)
2740 //
2741 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
2742 }
David Neto22f144c2017-06-12 14:26:21 -04002743 }
2744
2745 //
2746 // Generate OpMemoryModel
2747 //
2748 // Memory model for Vulkan will always be GLSL450.
2749
2750 // Ops[0] = Addressing Model
2751 // Ops[1] = Memory Model
2752 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002753 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002754
SJWf93f5f32020-05-05 07:27:56 -05002755 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002756
2757 //
2758 // Generate OpEntryPoint
2759 //
2760 for (auto EntryPoint : EntryPoints) {
2761 // Ops[0] = Execution Model
2762 // Ops[1] = EntryPoint ID
2763 // Ops[2] = Name (Literal String)
2764 // ...
2765 //
2766 // TODO: Do we need to consider Interface ID for forward references???
2767 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002768 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002769 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002770
SJW806a5d82020-07-15 12:51:38 -05002771 for (auto &Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002772 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002773 }
2774
alan-baker3f772c02021-06-15 22:18:11 -04002775 // Starting in SPIR-V 1.4, all statically used global variables must be
2776 // included in the interface. Private and statically-sized workgroup
2777 // variables are added to all entry points. Kernel arguments are handled
2778 // here.
2779 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
2780 auto *F = dyn_cast<Function>(EntryPoint.first);
2781 assert(F);
2782 assert(F->getCallingConv() == CallingConv::SPIR_KERNEL);
2783
2784 auto &resource_var_at_index = FunctionToResourceVarsMap[F];
2785 for (auto *info : resource_var_at_index) {
2786 if (info) {
2787 Ops << info->var_id;
2788 }
2789 }
2790
2791 auto local_spec_id_md =
2792 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
2793 if (local_spec_id_md) {
2794 for (auto spec_id_op : local_spec_id_md->operands()) {
2795 if (dyn_cast<Function>(
2796 dyn_cast<ValueAsMetadata>(spec_id_op->getOperand(0))
2797 ->getValue()) == F) {
2798 int64_t spec_id =
2799 mdconst::extract<ConstantInt>(spec_id_op->getOperand(2))
2800 ->getSExtValue();
2801 if (spec_id > 0) {
2802 auto &info = LocalSpecIdInfoMap[spec_id];
2803 Ops << info.variable_id;
2804 }
2805 }
2806 }
2807 }
2808
2809 // If the kernel uses the global push constant interface it will not be
2810 // covered by the resource variable iteration above.
2811 if (GetPodArgsImpl(*F) == PodArgImpl::kGlobalPushConstant) {
2812 auto *PC =
2813 module->getGlobalVariable(clspv::PushConstantsVariableName());
2814 assert(PC);
2815 Ops << getValueMap()[PC];
2816 }
2817 }
2818
SJWf93f5f32020-05-05 07:27:56 -05002819 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002820 }
2821
alan-baker3b609772020-09-03 19:10:17 -04002822 if (BuiltinDimVec.empty()) {
2823 for (auto EntryPoint : EntryPoints) {
2824 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2825 ->getMetadata("reqd_work_group_size");
2826 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
2827 //
2828 // Generate OpExecutionMode
2829 //
David Neto22f144c2017-06-12 14:26:21 -04002830
alan-baker3b609772020-09-03 19:10:17 -04002831 // Ops[0] = Entry Point ID
2832 // Ops[1] = Execution Mode
2833 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
2834 Ops.clear();
2835 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
2836
2837 uint32_t XDim = static_cast<uint32_t>(
2838 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2839 uint32_t YDim = static_cast<uint32_t>(
2840 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2841 uint32_t ZDim = static_cast<uint32_t>(
2842 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2843
2844 Ops << XDim << YDim << ZDim;
2845
2846 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002847 }
David Neto22f144c2017-06-12 14:26:21 -04002848 }
2849 }
2850
2851 //
2852 // Generate OpSource.
2853 //
2854 // Ops[0] = SourceLanguage ID
2855 // Ops[1] = Version (LiteralNum)
2856 //
SJW01901d92020-05-21 08:58:31 -05002857 uint32_t LangID = spv::SourceLanguageUnknown;
2858 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00002859 switch (clspv::Option::Language()) {
2860 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05002861 LangID = spv::SourceLanguageOpenCL_C;
2862 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002863 break;
2864 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05002865 LangID = spv::SourceLanguageOpenCL_C;
2866 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00002867 break;
2868 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05002869 LangID = spv::SourceLanguageOpenCL_C;
2870 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00002871 break;
2872 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05002873 LangID = spv::SourceLanguageOpenCL_C;
2874 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00002875 break;
Kévin Petit77838ff2020-10-19 18:54:51 +01002876 case clspv::Option::SourceLanguage::OpenCL_C_30:
2877 LangID = spv::SourceLanguageOpenCL_C;
2878 LangVer = 300;
2879 break;
Kévin Petitf0515712020-01-07 18:29:20 +00002880 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05002881 LangID = spv::SourceLanguageOpenCL_CPP;
2882 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002883 break;
2884 default:
Kévin Petitf0515712020-01-07 18:29:20 +00002885 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01002886 }
David Neto22f144c2017-06-12 14:26:21 -04002887
SJW01901d92020-05-21 08:58:31 -05002888 Ops.clear();
2889 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05002890 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002891
2892 if (!BuiltinDimVec.empty()) {
2893 //
2894 // Generate OpDecorates for x/y/z dimension.
2895 //
2896 // Ops[0] = Target ID
2897 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04002898 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04002899
2900 // X Dimension
2901 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002902 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05002903 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002904
2905 // Y Dimension
2906 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002907 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05002908 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002909
2910 // Z Dimension
2911 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002912 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05002913 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002914 }
2915}
2916
David Netob6e2e062018-04-25 10:32:06 -04002917void SPIRVProducerPass::GenerateEntryPointInitialStores() {
2918 // Work around a driver bug. Initializers on Private variables might not
2919 // work. So the start of the kernel should store the initializer value to the
2920 // variables. Yes, *every* entry point pays this cost if *any* entry point
2921 // uses this builtin. At this point I judge this to be an acceptable tradeoff
2922 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002923 // TODO(dneto): Remove this at some point once fixed drivers are widely
2924 // available.
SJW01901d92020-05-21 08:58:31 -05002925 if (WorkgroupSizeVarID.isValid()) {
2926 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04002927
SJWf93f5f32020-05-05 07:27:56 -05002928 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002929 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04002930
SJWf93f5f32020-05-05 07:27:56 -05002931 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04002932 }
2933}
2934
David Neto22f144c2017-06-12 14:26:21 -04002935void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002936 ValueMapType &VMap = getValueMap();
2937
David Netob6e2e062018-04-25 10:32:06 -04002938 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04002939
2940 for (BasicBlock &BB : F) {
2941 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04002942
2943 //
2944 // Generate OpLabel for Basic Block.
2945 //
SJWf93f5f32020-05-05 07:27:56 -05002946 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04002947
David Neto6dcd4712017-06-23 11:06:47 -04002948 // OpVariable instructions must come first.
2949 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05002950 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
2951 // Allocating a pointer requires variable pointers.
2952 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002953 setVariablePointersCapabilities(
2954 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05002955 }
David Neto6dcd4712017-06-23 11:06:47 -04002956 GenerateInstruction(I);
2957 }
2958 }
2959
David Neto22f144c2017-06-12 14:26:21 -04002960 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04002961 if (clspv::Option::HackInitializers()) {
2962 GenerateEntryPointInitialStores();
2963 }
David Neto22f144c2017-06-12 14:26:21 -04002964 }
2965
2966 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04002967 if (!isa<AllocaInst>(I)) {
2968 GenerateInstruction(I);
2969 }
David Neto22f144c2017-06-12 14:26:21 -04002970 }
2971 }
2972}
2973
2974spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
2975 const std::map<CmpInst::Predicate, spv::Op> Map = {
2976 {CmpInst::ICMP_EQ, spv::OpIEqual},
2977 {CmpInst::ICMP_NE, spv::OpINotEqual},
2978 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
2979 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
2980 {CmpInst::ICMP_ULT, spv::OpULessThan},
2981 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
2982 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
2983 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
2984 {CmpInst::ICMP_SLT, spv::OpSLessThan},
2985 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
2986 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
2987 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
2988 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
2989 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
2990 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
2991 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
2992 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
2993 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
2994 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
2995 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
2996 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
2997 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
2998
2999 assert(0 != Map.count(I->getPredicate()));
3000
3001 return Map.at(I->getPredicate());
3002}
3003
3004spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3005 const std::map<unsigned, spv::Op> Map{
3006 {Instruction::Trunc, spv::OpUConvert},
3007 {Instruction::ZExt, spv::OpUConvert},
3008 {Instruction::SExt, spv::OpSConvert},
3009 {Instruction::FPToUI, spv::OpConvertFToU},
3010 {Instruction::FPToSI, spv::OpConvertFToS},
3011 {Instruction::UIToFP, spv::OpConvertUToF},
3012 {Instruction::SIToFP, spv::OpConvertSToF},
3013 {Instruction::FPTrunc, spv::OpFConvert},
3014 {Instruction::FPExt, spv::OpFConvert},
3015 {Instruction::BitCast, spv::OpBitcast}};
3016
3017 assert(0 != Map.count(I.getOpcode()));
3018
3019 return Map.at(I.getOpcode());
3020}
3021
3022spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003023 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003024 switch (I.getOpcode()) {
3025 default:
3026 break;
3027 case Instruction::Or:
3028 return spv::OpLogicalOr;
3029 case Instruction::And:
3030 return spv::OpLogicalAnd;
3031 case Instruction::Xor:
3032 return spv::OpLogicalNotEqual;
3033 }
3034 }
3035
alan-bakerb6b09dc2018-11-08 16:59:28 -05003036 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003037 {Instruction::Add, spv::OpIAdd},
3038 {Instruction::FAdd, spv::OpFAdd},
3039 {Instruction::Sub, spv::OpISub},
3040 {Instruction::FSub, spv::OpFSub},
3041 {Instruction::Mul, spv::OpIMul},
3042 {Instruction::FMul, spv::OpFMul},
3043 {Instruction::UDiv, spv::OpUDiv},
3044 {Instruction::SDiv, spv::OpSDiv},
3045 {Instruction::FDiv, spv::OpFDiv},
3046 {Instruction::URem, spv::OpUMod},
3047 {Instruction::SRem, spv::OpSRem},
3048 {Instruction::FRem, spv::OpFRem},
3049 {Instruction::Or, spv::OpBitwiseOr},
3050 {Instruction::Xor, spv::OpBitwiseXor},
3051 {Instruction::And, spv::OpBitwiseAnd},
3052 {Instruction::Shl, spv::OpShiftLeftLogical},
3053 {Instruction::LShr, spv::OpShiftRightLogical},
3054 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3055
3056 assert(0 != Map.count(I.getOpcode()));
3057
3058 return Map.at(I.getOpcode());
3059}
3060
SJW806a5d82020-07-15 12:51:38 -05003061SPIRVID SPIRVProducerPass::getSPIRVBuiltin(spv::BuiltIn BID,
3062 spv::Capability Cap) {
3063 SPIRVID RID;
3064
3065 auto ii = BuiltinConstantMap.find(BID);
3066
3067 if (ii != BuiltinConstantMap.end()) {
3068 return ii->second;
3069 } else {
SJW806a5d82020-07-15 12:51:38 -05003070 addCapability(Cap);
3071
3072 Type *type = PointerType::get(IntegerType::get(module->getContext(), 32),
3073 AddressSpace::Input);
3074
3075 RID = addSPIRVGlobalVariable(getSPIRVType(type), spv::StorageClassInput);
3076
3077 BuiltinConstantMap[BID] = RID;
3078
3079 //
3080 // Generate OpDecorate.
3081 //
3082 // Ops[0] : target
3083 // Ops[1] : decoration
3084 // Ops[2] : SpecId
3085 SPIRVOperandVec Ops;
3086 Ops << RID << spv::DecorationBuiltIn << static_cast<int>(BID);
3087
3088 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
3089 }
3090
3091 return RID;
3092}
3093
3094SPIRVID
3095SPIRVProducerPass::GenerateClspvInstruction(CallInst *Call,
3096 const FunctionInfo &FuncInfo) {
3097 SPIRVID RID;
3098
3099 switch (FuncInfo.getType()) {
3100 case Builtins::kClspvCompositeConstruct:
3101 RID = addSPIRVPlaceholder(Call);
3102 break;
3103 case Builtins::kClspvResource: {
3104 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3105 // Generate an OpLoad
3106 SPIRVOperandVec Ops;
3107
3108 Ops << Call->getType()->getPointerElementType()
3109 << ResourceVarDeferredLoadCalls[Call];
3110
3111 RID = addSPIRVInst(spv::OpLoad, Ops);
3112
3113 } else {
3114 // This maps to an OpVariable we've already generated.
3115 // No code is generated for the call.
3116 }
3117 break;
3118 }
3119 case Builtins::kClspvLocal: {
3120 // Don't codegen an instruction here, but instead map this call directly
3121 // to the workgroup variable id.
3122 int spec_id = static_cast<int>(
3123 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
3124 const auto &info = LocalSpecIdInfoMap[spec_id];
3125 RID = info.variable_id;
3126 break;
3127 }
3128 case Builtins::kClspvSamplerVarLiteral: {
3129 // Sampler initializers become a load of the corresponding sampler.
3130 // Map this to a load from the variable.
3131 const auto third_param = static_cast<unsigned>(
3132 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3133 auto sampler_value = third_param;
3134 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04003135 sampler_value = (*getSamplerMap())[third_param].first;
SJW806a5d82020-07-15 12:51:38 -05003136 }
3137
3138 // Generate an OpLoad
3139 SPIRVOperandVec Ops;
3140
3141 Ops << SamplerTy->getPointerElementType()
3142 << SamplerLiteralToIDMap[sampler_value];
3143
3144 RID = addSPIRVInst(spv::OpLoad, Ops);
3145 break;
3146 }
3147 case Builtins::kSpirvAtomicXor: {
3148 // Handle SPIR-V intrinsics
3149 SPIRVOperandVec Ops;
3150
3151 if (!Call->getType()->isVoidTy()) {
3152 Ops << Call->getType();
3153 }
3154
3155 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
3156 Ops << Call->getArgOperand(i);
3157 }
3158
3159 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
3160 break;
3161 }
3162 case Builtins::kSpirvOp: {
3163 // Handle SPIR-V intrinsics
3164 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
3165 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
3166 if (opcode != spv::OpNop) {
3167 SPIRVOperandVec Ops;
3168
3169 if (!Call->getType()->isVoidTy()) {
3170 Ops << Call->getType();
3171 }
3172
3173 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
3174 Ops << Call->getArgOperand(i);
3175 }
3176
3177 RID = addSPIRVInst(opcode, Ops);
3178 }
3179 break;
3180 }
3181 case Builtins::kSpirvCopyMemory: {
3182 //
3183 // Generate OpCopyMemory.
3184 //
3185
3186 // Ops[0] = Dst ID
3187 // Ops[1] = Src ID
3188 // Ops[2] = Memory Access
3189 // Ops[3] = Alignment
3190
alan-baker3f772c02021-06-15 22:18:11 -04003191 const auto volatile_arg = SpvVersion() >= SPIRVVersion::SPIRV_1_4 ? 4 : 3;
3192 auto IsVolatile = dyn_cast<ConstantInt>(Call->getArgOperand(volatile_arg))
3193 ->getZExtValue() != 0;
SJW806a5d82020-07-15 12:51:38 -05003194
3195 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3196 : spv::MemoryAccessMaskNone;
3197
3198 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3199
alan-baker3f772c02021-06-15 22:18:11 -04003200 auto DstAlignment =
SJW806a5d82020-07-15 12:51:38 -05003201 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
alan-baker3f772c02021-06-15 22:18:11 -04003202 auto SrcAlignment = DstAlignment;
3203 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
3204 SrcAlignment =
3205 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue();
3206 }
SJW806a5d82020-07-15 12:51:38 -05003207
alan-baker3f772c02021-06-15 22:18:11 -04003208 // OpCopyMemory only works if the pointer element type are the same id. If
3209 // we are generating code for SPIR-V 1.4 or later, this may not be the
3210 // case.
3211 auto dst = Call->getArgOperand(0);
3212 auto src = Call->getArgOperand(1);
3213 auto dst_layout =
3214 PointerRequiresLayout(dst->getType()->getPointerAddressSpace());
3215 auto src_layout =
3216 PointerRequiresLayout(src->getType()->getPointerAddressSpace());
3217 auto dst_id =
3218 getSPIRVType(dst->getType()->getPointerElementType(), dst_layout);
3219 auto src_id =
3220 getSPIRVType(src->getType()->getPointerElementType(), src_layout);
SJW806a5d82020-07-15 12:51:38 -05003221 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04003222 if (dst_id.get() != src_id.get()) {
3223 assert(Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4);
3224 // Types differ so generate:
3225 // OpLoad
3226 // OpCopyLogical
3227 // OpStore
3228 auto load_type_id =
3229 getSPIRVType(src->getType()->getPointerElementType(), src_layout);
3230 Ops << load_type_id << src << MemoryAccess
3231 << static_cast<uint32_t>(SrcAlignment);
3232 auto load = addSPIRVInst(spv::OpLoad, Ops);
SJW806a5d82020-07-15 12:51:38 -05003233
alan-baker3f772c02021-06-15 22:18:11 -04003234 auto copy_type_id =
3235 getSPIRVType(dst->getType()->getPointerElementType(), dst_layout);
3236 Ops.clear();
3237 Ops << copy_type_id << load;
3238 auto copy = addSPIRVInst(spv::OpCopyLogical, Ops);
3239
3240 Ops.clear();
3241 Ops << dst << copy << MemoryAccess << static_cast<uint32_t>(DstAlignment);
3242 RID = addSPIRVInst(spv::OpStore, Ops);
3243 } else {
3244 Ops << dst << src << MemoryAccess << static_cast<uint32_t>(DstAlignment);
3245 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
3246 Ops << MemoryAccess << static_cast<uint32_t>(SrcAlignment);
3247 }
3248
3249 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
3250 }
SJW806a5d82020-07-15 12:51:38 -05003251 break;
3252 }
3253 default:
3254 llvm_unreachable("Unknown CLSPV Instruction");
3255 break;
3256 }
3257 return RID;
3258}
3259
3260SPIRVID
3261SPIRVProducerPass::GenerateImageInstruction(CallInst *Call,
3262 const FunctionInfo &FuncInfo) {
3263 SPIRVID RID;
3264
alan-baker3f772c02021-06-15 22:18:11 -04003265 auto GetExtendMask = [this](Type *sample_type,
3266 bool is_int_image) -> uint32_t {
3267 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4 &&
3268 sample_type->getScalarType()->isIntegerTy()) {
3269 if (is_int_image)
3270 return spv::ImageOperandsSignExtendMask;
3271 else
3272 return spv::ImageOperandsZeroExtendMask;
3273 }
3274 return 0;
3275 };
3276
SJW806a5d82020-07-15 12:51:38 -05003277 LLVMContext &Context = module->getContext();
3278 switch (FuncInfo.getType()) {
3279 case Builtins::kReadImagef:
3280 case Builtins::kReadImageh:
3281 case Builtins::kReadImagei:
3282 case Builtins::kReadImageui: {
3283 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
3284 // Additionally, OpTypeSampledImage is generated.
alan-bakerf6bc8252020-09-23 14:58:55 -04003285 const auto image_ty = Call->getArgOperand(0)->getType();
SJW806a5d82020-07-15 12:51:38 -05003286 const auto &pi = FuncInfo.getParameter(1);
3287 if (pi.isSampler()) {
3288 //
3289 // Generate OpSampledImage.
3290 //
3291 // Ops[0] = Result Type ID
3292 // Ops[1] = Image ID
3293 // Ops[2] = Sampler ID
3294 //
3295 SPIRVOperandVec Ops;
3296
3297 Value *Image = Call->getArgOperand(0);
3298 Value *Sampler = Call->getArgOperand(1);
3299 Value *Coordinate = Call->getArgOperand(2);
3300
3301 TypeMapType &OpImageTypeMap = getImageTypeMap();
3302 Type *ImageTy = Image->getType()->getPointerElementType();
3303 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
3304
3305 Ops << ImageTyID << Image << Sampler;
3306
3307 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
3308
3309 //
3310 // Generate OpImageSampleExplicitLod.
3311 //
3312 // Ops[0] = Result Type ID
3313 // Ops[1] = Sampled Image ID
3314 // Ops[2] = Coordinate ID
3315 // Ops[3] = Image Operands Type ID
3316 // Ops[4] ... Ops[n] = Operands ID
3317 //
3318 Ops.clear();
3319
3320 const bool is_int_image = IsIntImageType(Image->getType());
3321 SPIRVID result_type;
3322 if (is_int_image) {
3323 result_type = v4int32ID;
3324 } else {
3325 result_type = getSPIRVType(Call->getType());
3326 }
3327
alan-baker3f772c02021-06-15 22:18:11 -04003328 uint32_t mask = spv::ImageOperandsLodMask |
3329 GetExtendMask(Call->getType(), is_int_image);
SJW806a5d82020-07-15 12:51:38 -05003330 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
alan-baker3f772c02021-06-15 22:18:11 -04003331 Ops << result_type << SampledImageID << Coordinate << mask << CstFP0;
SJW806a5d82020-07-15 12:51:38 -05003332
3333 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
3334
3335 if (is_int_image) {
3336 // Generate the bitcast.
3337 Ops.clear();
3338 Ops << Call->getType() << RID;
3339 RID = addSPIRVInst(spv::OpBitcast, Ops);
3340 }
alan-bakerf6bc8252020-09-23 14:58:55 -04003341 } else if (IsStorageImageType(image_ty)) {
3342 // read_image on a storage image is mapped to OpImageRead.
3343 Value *Image = Call->getArgOperand(0);
3344 Value *Coordinate = Call->getArgOperand(1);
3345
3346 //
3347 // Generate OpImageRead
3348 //
3349 // Ops[0] = Result Type ID
3350 // Ops[1] = Image ID
3351 // Ops[2] = Coordinate
3352 // No optional image operands.
3353 //
3354 SPIRVOperandVec Ops;
3355
3356 const bool is_int_image = IsIntImageType(Image->getType());
3357 SPIRVID result_type;
3358 if (is_int_image) {
3359 result_type = v4int32ID;
3360 } else {
3361 result_type = getSPIRVType(Call->getType());
3362 }
3363
3364 Ops << result_type << Image << Coordinate;
alan-baker3f772c02021-06-15 22:18:11 -04003365 uint32_t mask = GetExtendMask(Call->getType(), is_int_image);
3366 if (mask != 0)
3367 Ops << mask;
alan-bakerf6bc8252020-09-23 14:58:55 -04003368 RID = addSPIRVInst(spv::OpImageRead, Ops);
3369
3370 if (is_int_image) {
3371 // Generate the bitcast.
3372 Ops.clear();
3373 Ops << Call->getType() << RID;
3374 RID = addSPIRVInst(spv::OpBitcast, Ops);
3375 }
3376
3377 // OpImageRead requires StorageImageReadWithoutFormat.
3378 addCapability(spv::CapabilityStorageImageReadWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003379 } else {
alan-bakerf6bc8252020-09-23 14:58:55 -04003380 // read_image on a sampled image (without a sampler) is mapped to
3381 // OpImageFetch.
SJW806a5d82020-07-15 12:51:38 -05003382 Value *Image = Call->getArgOperand(0);
3383 Value *Coordinate = Call->getArgOperand(1);
3384
3385 //
3386 // Generate OpImageFetch
3387 //
3388 // Ops[0] = Result Type ID
3389 // Ops[1] = Image ID
3390 // Ops[2] = Coordinate ID
3391 // Ops[3] = Lod
3392 // Ops[4] = 0
3393 //
3394 SPIRVOperandVec Ops;
3395
3396 const bool is_int_image = IsIntImageType(Image->getType());
3397 SPIRVID result_type;
3398 if (is_int_image) {
3399 result_type = v4int32ID;
3400 } else {
3401 result_type = getSPIRVType(Call->getType());
3402 }
3403
alan-baker3f772c02021-06-15 22:18:11 -04003404 uint32_t mask = spv::ImageOperandsLodMask |
3405 GetExtendMask(Call->getType(), is_int_image);
3406 Ops << result_type << Image << Coordinate << mask
SJW806a5d82020-07-15 12:51:38 -05003407 << getSPIRVInt32Constant(0);
3408
3409 RID = addSPIRVInst(spv::OpImageFetch, Ops);
3410
3411 if (is_int_image) {
3412 // Generate the bitcast.
3413 Ops.clear();
3414 Ops << Call->getType() << RID;
3415 RID = addSPIRVInst(spv::OpBitcast, Ops);
3416 }
3417 }
3418 break;
3419 }
3420
3421 case Builtins::kWriteImagef:
3422 case Builtins::kWriteImageh:
3423 case Builtins::kWriteImagei:
3424 case Builtins::kWriteImageui: {
3425 // write_image is mapped to OpImageWrite.
3426 //
3427 // Generate OpImageWrite.
3428 //
3429 // Ops[0] = Image ID
3430 // Ops[1] = Coordinate ID
3431 // Ops[2] = Texel ID
3432 // Ops[3] = (Optional) Image Operands Type (Literal Number)
3433 // Ops[4] ... Ops[n] = (Optional) Operands ID
3434 //
3435 SPIRVOperandVec Ops;
3436
3437 Value *Image = Call->getArgOperand(0);
3438 Value *Coordinate = Call->getArgOperand(1);
3439 Value *Texel = Call->getArgOperand(2);
3440
3441 SPIRVID TexelID = getSPIRVValue(Texel);
3442
3443 const bool is_int_image = IsIntImageType(Image->getType());
3444 if (is_int_image) {
3445 // Generate a bitcast to v4int and use it as the texel value.
3446 Ops << v4int32ID << TexelID;
3447 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
3448 Ops.clear();
3449 }
3450 Ops << Image << Coordinate << TexelID;
alan-baker3f772c02021-06-15 22:18:11 -04003451 uint32_t mask = GetExtendMask(Texel->getType(), is_int_image);
3452 if (mask != 0)
3453 Ops << mask;
SJW806a5d82020-07-15 12:51:38 -05003454 RID = addSPIRVInst(spv::OpImageWrite, Ops);
alan-bakerf6bc8252020-09-23 14:58:55 -04003455
3456 // Image writes require StorageImageWriteWithoutFormat.
3457 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003458 break;
3459 }
3460
3461 case Builtins::kGetImageHeight:
3462 case Builtins::kGetImageWidth:
3463 case Builtins::kGetImageDepth:
3464 case Builtins::kGetImageDim: {
3465 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
3466 addCapability(spv::CapabilityImageQuery);
3467
3468 //
3469 // Generate OpImageQuerySize[Lod]
3470 //
3471 // Ops[0] = Image ID
3472 //
3473 // Result type has components equal to the dimensionality of the image,
3474 // plus 1 if the image is arrayed.
3475 //
3476 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3477 SPIRVOperandVec Ops;
3478
3479 // Implement:
3480 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3481 SPIRVID SizesTypeID;
3482
3483 Value *Image = Call->getArgOperand(0);
3484 const uint32_t dim = ImageDimensionality(Image->getType());
3485 const uint32_t components =
3486 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
3487 if (components == 1) {
3488 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
3489 } else {
3490 SizesTypeID = getSPIRVType(
3491 FixedVectorType::get(Type::getInt32Ty(Context), components));
3492 }
3493 Ops << SizesTypeID << Image;
3494 spv::Op query_opcode = spv::OpImageQuerySize;
3495 if (IsSampledImageType(Image->getType())) {
3496 query_opcode = spv::OpImageQuerySizeLod;
3497 // Need explicit 0 for Lod operand.
3498 Ops << getSPIRVInt32Constant(0);
3499 }
3500
3501 RID = addSPIRVInst(query_opcode, Ops);
3502
3503 // May require an extra instruction to create the appropriate result of
3504 // the builtin function.
3505 if (FuncInfo.getType() == Builtins::kGetImageDim) {
3506 if (dim == 3) {
3507 // get_image_dim returns an int4 for 3D images.
3508 //
3509
3510 // Implement:
3511 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
3512 Ops.clear();
3513 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
3514 << getSPIRVInt32Constant(0);
3515
3516 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
3517 } else if (dim != components) {
3518 // get_image_dim return an int2 regardless of the arrayedness of the
3519 // image. If the image is arrayed an element must be dropped from the
3520 // query result.
3521 //
3522
3523 // Implement:
3524 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
3525 Ops.clear();
3526 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
3527 << 0 << 1;
3528
3529 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
3530 }
3531 } else if (components > 1) {
3532 // Implement:
3533 // %result = OpCompositeExtract %uint %sizes <component number>
3534 Ops.clear();
3535 Ops << Call->getType() << RID;
3536
3537 uint32_t component = 0;
3538 if (FuncInfo.getType() == Builtins::kGetImageHeight)
3539 component = 1;
3540 else if (FuncInfo.getType() == Builtins::kGetImageDepth)
3541 component = 2;
3542 Ops << component;
3543
3544 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
3545 }
3546 break;
3547 }
3548 default:
3549 llvm_unreachable("Unsupported Image builtin");
3550 }
3551
3552 return RID;
3553}
3554
3555SPIRVID
3556SPIRVProducerPass::GenerateSubgroupInstruction(CallInst *Call,
3557 const FunctionInfo &FuncInfo) {
3558 SPIRVID RID;
3559
3560 // requires SPIRV version 1.3 or greater
3561 if (SpvVersion() != SPIRVVersion::SPIRV_1_3) {
3562 // llvm_unreachable("SubGroups extension requires SPIRV 1.3 or greater");
3563 // TODO(sjw): error out gracefully
3564 }
3565
3566 auto loadBuiltin = [this, Call](spv::BuiltIn spvBI,
3567 spv::Capability spvCap =
3568 spv::CapabilityGroupNonUniform) {
3569 SPIRVOperandVec Ops;
3570 Ops << Call->getType() << this->getSPIRVBuiltin(spvBI, spvCap);
3571
3572 return addSPIRVInst(spv::OpLoad, Ops);
3573 };
3574
3575 spv::Op op = spv::OpNop;
3576 switch (FuncInfo.getType()) {
3577 case Builtins::kGetSubGroupSize:
3578 return loadBuiltin(spv::BuiltInSubgroupSize);
3579 case Builtins::kGetNumSubGroups:
3580 return loadBuiltin(spv::BuiltInNumSubgroups);
3581 case Builtins::kGetSubGroupId:
3582 return loadBuiltin(spv::BuiltInSubgroupId);
3583 case Builtins::kGetSubGroupLocalId:
3584 return loadBuiltin(spv::BuiltInSubgroupLocalInvocationId);
3585
3586 case Builtins::kSubGroupBroadcast:
3587 if (SpvVersion() < SPIRVVersion::SPIRV_1_5 &&
3588 !dyn_cast<ConstantInt>(Call->getOperand(1))) {
3589 llvm_unreachable("sub_group_broadcast requires constant lane Id for "
3590 "SPIRV version < 1.5");
3591 }
3592 addCapability(spv::CapabilityGroupNonUniformBallot);
3593 op = spv::OpGroupNonUniformBroadcast;
3594 break;
3595
3596 case Builtins::kSubGroupAll:
3597 addCapability(spv::CapabilityGroupNonUniformVote);
3598 op = spv::OpGroupNonUniformAll;
3599 break;
3600 case Builtins::kSubGroupAny:
3601 addCapability(spv::CapabilityGroupNonUniformVote);
3602 op = spv::OpGroupNonUniformAny;
3603 break;
3604 case Builtins::kSubGroupReduceAdd:
3605 case Builtins::kSubGroupScanExclusiveAdd:
3606 case Builtins::kSubGroupScanInclusiveAdd: {
3607 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3608 if (FuncInfo.getParameter(0).type_id == Type::IntegerTyID) {
3609 op = spv::OpGroupNonUniformIAdd;
3610 } else {
3611 op = spv::OpGroupNonUniformFAdd;
3612 }
3613 break;
3614 }
3615 case Builtins::kSubGroupReduceMin:
3616 case Builtins::kSubGroupScanExclusiveMin:
3617 case Builtins::kSubGroupScanInclusiveMin: {
3618 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3619 auto &param = FuncInfo.getParameter(0);
3620 if (param.type_id == Type::IntegerTyID) {
3621 op = param.is_signed ? spv::OpGroupNonUniformSMin
3622 : spv::OpGroupNonUniformUMin;
3623 } else {
3624 op = spv::OpGroupNonUniformFMin;
3625 }
3626 break;
3627 }
3628 case Builtins::kSubGroupReduceMax:
3629 case Builtins::kSubGroupScanExclusiveMax:
3630 case Builtins::kSubGroupScanInclusiveMax: {
3631 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3632 auto &param = FuncInfo.getParameter(0);
3633 if (param.type_id == Type::IntegerTyID) {
3634 op = param.is_signed ? spv::OpGroupNonUniformSMax
3635 : spv::OpGroupNonUniformUMax;
3636 } else {
3637 op = spv::OpGroupNonUniformFMax;
3638 }
3639 break;
3640 }
3641
3642 case Builtins::kGetEnqueuedNumSubGroups:
3643 // TODO(sjw): requires CapabilityKernel (incompatible with Shader)
3644 case Builtins::kGetMaxSubGroupSize:
3645 // TODO(sjw): use SpecConstant, capability Kernel (incompatible with Shader)
3646 case Builtins::kSubGroupBarrier:
3647 case Builtins::kSubGroupReserveReadPipe:
3648 case Builtins::kSubGroupReserveWritePipe:
3649 case Builtins::kSubGroupCommitReadPipe:
3650 case Builtins::kSubGroupCommitWritePipe:
3651 case Builtins::kGetKernelSubGroupCountForNdrange:
3652 case Builtins::kGetKernelMaxSubGroupSizeForNdrange:
3653 default:
3654 Call->print(errs());
3655 llvm_unreachable("Unsupported sub_group operation");
3656 break;
3657 }
3658
3659 assert(op != spv::OpNop);
3660
3661 SPIRVOperandVec Operands;
3662
3663 //
3664 // Generate OpGroupNonUniform*
3665 //
3666 // Ops[0] = Result Type ID
3667 // Ops[1] = ScopeSubgroup
3668 // Ops[2] = Value ID
3669 // Ops[3] = Local ID
3670
3671 // The result type.
3672 Operands << Call->getType();
3673
3674 // Subgroup Scope
3675 Operands << getSPIRVInt32Constant(spv::ScopeSubgroup);
3676
3677 switch (FuncInfo.getType()) {
3678 case Builtins::kSubGroupReduceAdd:
3679 case Builtins::kSubGroupReduceMin:
3680 case Builtins::kSubGroupReduceMax:
3681 Operands << spv::GroupOperationReduce;
3682 break;
3683 case Builtins::kSubGroupScanExclusiveAdd:
3684 case Builtins::kSubGroupScanExclusiveMin:
3685 case Builtins::kSubGroupScanExclusiveMax:
3686 Operands << spv::GroupOperationExclusiveScan;
3687 break;
3688 case Builtins::kSubGroupScanInclusiveAdd:
3689 case Builtins::kSubGroupScanInclusiveMin:
3690 case Builtins::kSubGroupScanInclusiveMax:
3691 Operands << spv::GroupOperationInclusiveScan;
3692 break;
3693 default:
3694 break;
3695 }
3696
3697 for (Use &use : Call->arg_operands()) {
3698 Operands << use.get();
3699 }
3700
3701 return addSPIRVInst(op, Operands);
3702}
3703
3704SPIRVID SPIRVProducerPass::GenerateInstructionFromCall(CallInst *Call) {
3705 LLVMContext &Context = module->getContext();
3706
3707 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
3708 auto func_type = func_info.getType();
3709
3710 if (BUILTIN_IN_GROUP(func_type, Clspv)) {
3711 return GenerateClspvInstruction(Call, func_info);
3712 } else if (BUILTIN_IN_GROUP(func_type, Image)) {
3713 return GenerateImageInstruction(Call, func_info);
3714 } else if (BUILTIN_IN_GROUP(func_type, SubgroupsKHR)) {
3715 return GenerateSubgroupInstruction(Call, func_info);
3716 }
3717
3718 SPIRVID RID;
3719
alan-baker5f2e88e2020-12-07 15:24:04 -05003720 switch (Call->getCalledFunction()->getIntrinsicID()) {
3721 case Intrinsic::ctlz: {
3722 // Implement as 31 - FindUMsb. Ignore the second operand of llvm.ctlz.
3723 SPIRVOperandVec Ops;
3724 Ops << Call->getType() << getOpExtInstImportID()
3725 << glsl::ExtInst::ExtInstFindUMsb << Call->getArgOperand(0);
3726 auto find_msb = addSPIRVInst(spv::OpExtInst, Ops);
3727
3728 Constant *thirty_one = ConstantInt::get(
3729 Call->getType(), Call->getType()->getScalarSizeInBits() - 1);
3730 Ops.clear();
3731 Ops << Call->getType() << thirty_one << find_msb;
3732 return addSPIRVInst(spv::OpISub, Ops);
3733 }
3734 case Intrinsic::cttz: {
3735 // Implement as:
3736 // lsb = FindILsb x
3737 // res = lsb == -1 ? width : lsb
3738 //
3739 // Ignore the second operand of llvm.cttz.
3740 SPIRVOperandVec Ops;
3741 Ops << Call->getType() << getOpExtInstImportID()
3742 << glsl::ExtInst::ExtInstFindILsb << Call->getArgOperand(0);
3743 auto find_lsb = addSPIRVInst(spv::OpExtInst, Ops);
3744
3745 auto neg_one = Constant::getAllOnesValue(Call->getType());
3746 auto i1_ty = Call->getType()->getWithNewBitWidth(1);
3747 auto width = ConstantInt::get(Call->getType(),
3748 Call->getType()->getScalarSizeInBits());
3749
3750 Ops.clear();
3751 Ops << i1_ty << find_lsb << neg_one;
3752 auto cmp = addSPIRVInst(spv::OpIEqual, Ops);
3753
3754 Ops.clear();
3755 Ops << Call->getType() << cmp << width << find_lsb;
3756 return addSPIRVInst(spv::OpSelect, Ops);
3757 }
3758
3759 default:
3760 break;
3761 }
3762
SJW806a5d82020-07-15 12:51:38 -05003763 switch (func_type) {
3764 case Builtins::kPopcount: {
3765 //
3766 // Generate OpBitCount
3767 //
3768 // Ops[0] = Result Type ID
3769 // Ops[1] = Base ID
3770 SPIRVOperandVec Ops;
3771 Ops << Call->getType() << Call->getOperand(0);
3772
3773 RID = addSPIRVInst(spv::OpBitCount, Ops);
3774 break;
3775 }
3776 default: {
3777 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3778
alan-baker5f2e88e2020-12-07 15:24:04 -05003779 // Do not replace functions with implementations.
3780 if (EInst && Call->getCalledFunction()->isDeclaration()) {
SJW806a5d82020-07-15 12:51:38 -05003781 SPIRVID ExtInstImportID = getOpExtInstImportID();
3782
3783 //
3784 // Generate OpExtInst.
3785 //
3786
3787 // Ops[0] = Result Type ID
3788 // Ops[1] = Set ID (OpExtInstImport ID)
3789 // Ops[2] = Instruction Number (Literal Number)
3790 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3791 SPIRVOperandVec Ops;
3792
3793 Ops << Call->getType() << ExtInstImportID << EInst;
3794
3795 for (auto &use : Call->arg_operands()) {
3796 Ops << use.get();
3797 }
3798
3799 RID = addSPIRVInst(spv::OpExtInst, Ops);
3800
3801 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3802 if (IndirectExtInst != kGlslExtInstBad) {
SJW806a5d82020-07-15 12:51:38 -05003803 // Generate one more instruction that uses the result of the extended
3804 // instruction. Its result id is one more than the id of the
3805 // extended instruction.
3806 auto generate_extra_inst = [this, &Context, &Call,
3807 &RID](spv::Op opcode, Constant *constant) {
3808 //
3809 // Generate instruction like:
3810 // result = opcode constant <extinst-result>
3811 //
3812 // Ops[0] = Result Type ID
3813 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3814 // Ops[2] = Operand 1 ;; the result of the extended instruction
3815 SPIRVOperandVec Ops;
3816
3817 Type *resultTy = Call->getType();
3818
3819 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
alan-baker931253b2020-08-20 17:15:38 -04003820 constant =
3821 ConstantVector::getSplat(vectorTy->getElementCount(), constant);
SJW806a5d82020-07-15 12:51:38 -05003822 }
3823 Ops << resultTy << constant << RID;
3824
3825 RID = addSPIRVInst(opcode, Ops);
3826 };
3827
SJW806a5d82020-07-15 12:51:38 -05003828 switch (IndirectExtInst) {
SJW806a5d82020-07-15 12:51:38 -05003829 case glsl::ExtInstAcos: // Implementing acospi
3830 case glsl::ExtInstAsin: // Implementing asinpi
3831 case glsl::ExtInstAtan: // Implementing atanpi
3832 case glsl::ExtInstAtan2: // Implementing atan2pi
3833 generate_extra_inst(
3834 spv::OpFMul,
alan-bakercc2bafb2020-11-02 08:30:18 -05003835 ConstantFP::get(Call->getType()->getScalarType(), kOneOverPi));
SJW806a5d82020-07-15 12:51:38 -05003836 break;
3837
3838 default:
3839 assert(false && "internally inconsistent");
3840 }
3841 }
3842 } else {
Pedro Olsen Ferreira208d1e72021-06-17 19:24:48 +01003843 switch (Call->getIntrinsicID()) {
3844 // These LLVM intrinsics have no SPV equivalent.
3845 // Because they are optimiser hints, we can safely discard them.
3846 case Intrinsic::experimental_noalias_scope_decl:
3847 break;
3848 default:
3849 // A real function call (not builtin)
3850 // Call instruction is deferred because it needs function's ID.
3851 RID = addSPIRVPlaceholder(Call);
3852 break;
3853 }
SJW806a5d82020-07-15 12:51:38 -05003854 }
3855
3856 break;
3857 }
3858 }
3859
3860 return RID;
3861}
3862
David Neto22f144c2017-06-12 14:26:21 -04003863void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003864 ValueMapType &VMap = getValueMap();
SJW806a5d82020-07-15 12:51:38 -05003865 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04003866
SJW806a5d82020-07-15 12:51:38 -05003867 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04003868
3869 switch (I.getOpcode()) {
3870 default: {
3871 if (Instruction::isCast(I.getOpcode())) {
3872 //
3873 // Generate SPIRV instructions for cast operators.
3874 //
3875
David Netod2de94a2017-08-28 17:27:47 -04003876 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003877 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003878 auto toI8 = Ty == Type::getInt8Ty(Context);
3879 auto fromI32 = OpTy == Type::getInt32Ty(Context);
James Price757dea82021-01-11 13:42:39 -05003880 // Handle zext, sext, uitofp, and sitofp with i1 type specially.
David Neto22f144c2017-06-12 14:26:21 -04003881 if ((I.getOpcode() == Instruction::ZExt ||
3882 I.getOpcode() == Instruction::SExt ||
James Price757dea82021-01-11 13:42:39 -05003883 I.getOpcode() == Instruction::UIToFP ||
3884 I.getOpcode() == Instruction::SIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003885 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003886 //
3887 // Generate OpSelect.
3888 //
3889
3890 // Ops[0] = Result Type ID
3891 // Ops[1] = Condition ID
3892 // Ops[2] = True Constant ID
3893 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003894 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003895
SJW01901d92020-05-21 08:58:31 -05003896 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003897
David Neto22f144c2017-06-12 14:26:21 -04003898 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003899 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003900 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003901 Ops << ConstantInt::getSigned(I.getType(), -1);
James Price757dea82021-01-11 13:42:39 -05003902 } else if (I.getOpcode() == Instruction::UIToFP) {
James Price96bd3d92020-11-23 09:01:57 -05003903 Ops << ConstantFP::get(I.getType(), 1.0);
James Price757dea82021-01-11 13:42:39 -05003904 } else if (I.getOpcode() == Instruction::SIToFP) {
3905 Ops << ConstantFP::get(I.getType(), -1.0);
David Neto22f144c2017-06-12 14:26:21 -04003906 }
David Neto22f144c2017-06-12 14:26:21 -04003907
David Neto22f144c2017-06-12 14:26:21 -04003908 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003909 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003910 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003911 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003912 } else {
James Price96bd3d92020-11-23 09:01:57 -05003913 Ops << ConstantFP::get(I.getType(), 0.0);
David Neto22f144c2017-06-12 14:26:21 -04003914 }
David Neto22f144c2017-06-12 14:26:21 -04003915
SJWf93f5f32020-05-05 07:27:56 -05003916 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003917 } else if (!clspv::Option::Int8Support() &&
3918 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003919 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3920 // 8 bits.
3921 // Before:
3922 // %result = trunc i32 %a to i8
3923 // After
3924 // %result = OpBitwiseAnd %uint %a %uint_255
3925
SJWf93f5f32020-05-05 07:27:56 -05003926 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003927
SJW806a5d82020-07-15 12:51:38 -05003928 Ops << OpTy << I.getOperand(0) << getSPIRVInt32Constant(255);
David Netod2de94a2017-08-28 17:27:47 -04003929
SJWf93f5f32020-05-05 07:27:56 -05003930 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003931 } else {
3932 // Ops[0] = Result Type ID
3933 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003934 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003935
SJW01901d92020-05-21 08:58:31 -05003936 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003937
SJWf93f5f32020-05-05 07:27:56 -05003938 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003939 }
3940 } else if (isa<BinaryOperator>(I)) {
3941 //
3942 // Generate SPIRV instructions for binary operators.
3943 //
3944
3945 // Handle xor with i1 type specially.
3946 if (I.getOpcode() == Instruction::Xor &&
3947 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003948 ((isa<ConstantInt>(I.getOperand(0)) &&
3949 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3950 (isa<ConstantInt>(I.getOperand(1)) &&
3951 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003952 //
3953 // Generate OpLogicalNot.
3954 //
3955 // Ops[0] = Result Type ID
3956 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003957 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003958
SJW01901d92020-05-21 08:58:31 -05003959 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003960
3961 Value *CondV = I.getOperand(0);
3962 if (isa<Constant>(I.getOperand(0))) {
3963 CondV = I.getOperand(1);
3964 }
SJW01901d92020-05-21 08:58:31 -05003965 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003966
SJWf93f5f32020-05-05 07:27:56 -05003967 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003968 } else {
3969 // Ops[0] = Result Type ID
3970 // Ops[1] = Operand 0
3971 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003972 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003973
SJW01901d92020-05-21 08:58:31 -05003974 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003975
SJWf93f5f32020-05-05 07:27:56 -05003976 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003977 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003978 } else if (I.getOpcode() == Instruction::FNeg) {
3979 // The only unary operator.
3980 //
3981 // Ops[0] = Result Type ID
3982 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05003983 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003984
SJW01901d92020-05-21 08:58:31 -05003985 Ops << I.getType() << I.getOperand(0);
3986 RID = addSPIRVInst(spv::OpFNegate, Ops);
Marco Antognini68e5c512020-09-09 16:08:57 +01003987 } else if (I.getOpcode() == Instruction::Unreachable) {
3988 RID = addSPIRVInst(spv::OpUnreachable);
David Neto22f144c2017-06-12 14:26:21 -04003989 } else {
3990 I.print(errs());
3991 llvm_unreachable("Unsupported instruction???");
3992 }
3993 break;
3994 }
3995 case Instruction::GetElementPtr: {
3996 auto &GlobalConstArgSet = getGlobalConstArgSet();
3997
3998 //
3999 // Generate OpAccessChain.
4000 //
4001 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4002
4003 //
4004 // Generate OpAccessChain.
4005 //
4006
4007 // Ops[0] = Result Type ID
4008 // Ops[1] = Base ID
4009 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05004010 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004011
alan-bakerb6b09dc2018-11-08 16:59:28 -05004012 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004013 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4014 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4015 // Use pointer type with private address space for global constant.
4016 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004017 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004018 }
David Neto257c3892018-04-11 13:19:45 -04004019
SJW01901d92020-05-21 08:58:31 -05004020 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04004021
David Neto862b7d82018-06-14 18:48:37 -04004022 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05004023 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004024
David Neto862b7d82018-06-14 18:48:37 -04004025 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004026
4027 //
4028 // Follows below rules for gep.
4029 //
David Neto862b7d82018-06-14 18:48:37 -04004030 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4031 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004032 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4033 // first index.
4034 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4035 // use gep's first index.
4036 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4037 // gep's first index.
4038 //
4039 spv::Op Opcode = spv::OpAccessChain;
4040 unsigned offset = 0;
4041 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004042 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004043 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004044 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004045 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004046 }
David Neto862b7d82018-06-14 18:48:37 -04004047 } else {
David Neto22f144c2017-06-12 14:26:21 -04004048 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004049 }
4050
4051 if (Opcode == spv::OpPtrAccessChain) {
alan-baker7506abb2020-09-10 15:02:55 -04004052 // Shader validation in the SPIR-V spec requires that the base pointer to
4053 // OpPtrAccessChain (in StorageBuffer storage class) be decorated with
4054 // ArrayStride.
alan-baker5b86ed72019-02-15 08:26:50 -05004055 auto address_space = ResultType->getAddressSpace();
4056 setVariablePointersCapabilities(address_space);
4057 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004058 case spv::StorageClassStorageBuffer:
David Neto1a1a0582017-07-07 12:01:44 -04004059 // Save the need to generate an ArrayStride decoration. But defer
4060 // generation until later, so we only make one decoration.
alan-baker7506abb2020-09-10 15:02:55 -04004061 getTypesNeedingArrayStride().insert(GEP->getPointerOperandType());
4062 break;
4063 case spv::StorageClassWorkgroup:
Alan Bakerfcda9482018-10-02 17:09:59 -04004064 break;
4065 default:
alan-baker7506abb2020-09-10 15:02:55 -04004066 llvm_unreachable(
4067 "OpPtrAccessChain is not supported for this storage class");
Alan Bakerfcda9482018-10-02 17:09:59 -04004068 break;
David Neto1a1a0582017-07-07 12:01:44 -04004069 }
David Neto22f144c2017-06-12 14:26:21 -04004070 }
4071
4072 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05004073 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04004074 }
4075
SJWf93f5f32020-05-05 07:27:56 -05004076 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004077 break;
4078 }
4079 case Instruction::ExtractValue: {
4080 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4081 // Ops[0] = Result Type ID
4082 // Ops[1] = Composite ID
4083 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004084 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004085
SJW01901d92020-05-21 08:58:31 -05004086 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004087
SJW01901d92020-05-21 08:58:31 -05004088 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004089
4090 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004091 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004092 }
4093
SJWf93f5f32020-05-05 07:27:56 -05004094 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004095 break;
4096 }
4097 case Instruction::InsertValue: {
4098 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4099 // Ops[0] = Result Type ID
4100 // Ops[1] = Object ID
4101 // Ops[2] = Composite ID
4102 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004103 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004104
SJW01901d92020-05-21 08:58:31 -05004105 Ops << I.getType() << IVI->getInsertedValueOperand()
4106 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004107
4108 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004109 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004110 }
4111
SJWf93f5f32020-05-05 07:27:56 -05004112 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004113 break;
4114 }
4115 case Instruction::Select: {
4116 //
4117 // Generate OpSelect.
4118 //
4119
4120 // Ops[0] = Result Type ID
4121 // Ops[1] = Condition ID
4122 // Ops[2] = True Constant ID
4123 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05004124 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004125
4126 // Find SPIRV instruction for parameter type.
4127 auto Ty = I.getType();
4128 if (Ty->isPointerTy()) {
4129 auto PointeeTy = Ty->getPointerElementType();
4130 if (PointeeTy->isStructTy() &&
4131 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4132 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004133 } else {
4134 // Selecting between pointers requires variable pointers.
4135 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4136 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05004137 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004138 }
David Neto22f144c2017-06-12 14:26:21 -04004139 }
4140 }
4141
SJW01901d92020-05-21 08:58:31 -05004142 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004143
SJWf93f5f32020-05-05 07:27:56 -05004144 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004145 break;
4146 }
4147 case Instruction::ExtractElement: {
4148 // Handle <4 x i8> type manually.
4149 Type *CompositeTy = I.getOperand(0)->getType();
4150 if (is4xi8vec(CompositeTy)) {
4151 //
4152 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4153 // <4 x i8>.
4154 //
4155
4156 //
4157 // Generate OpShiftRightLogical
4158 //
4159 // Ops[0] = Result Type ID
4160 // Ops[1] = Operand 0
4161 // Ops[2] = Operand 1
4162 //
SJWf93f5f32020-05-05 07:27:56 -05004163 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004164
SJW01901d92020-05-21 08:58:31 -05004165 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004166
SJW01901d92020-05-21 08:58:31 -05004167 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004168 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4169 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004170 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4171 Op1ID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004172 } else {
4173 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004174 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004175
SJW806a5d82020-07-15 12:51:38 -05004176 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1)
4177 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004178
SJWf93f5f32020-05-05 07:27:56 -05004179 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004180 }
SJW01901d92020-05-21 08:58:31 -05004181 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04004182
SJW01901d92020-05-21 08:58:31 -05004183 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004184
4185 //
4186 // Generate OpBitwiseAnd
4187 //
4188 // Ops[0] = Result Type ID
4189 // Ops[1] = Operand 0
4190 // Ops[2] = Operand 1
4191 //
4192 Ops.clear();
4193
SJW806a5d82020-07-15 12:51:38 -05004194 Ops << CompositeTy << ShiftID << getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004195
SJWf93f5f32020-05-05 07:27:56 -05004196 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004197 break;
4198 }
4199
4200 // Ops[0] = Result Type ID
4201 // Ops[1] = Composite ID
4202 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004203 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004204
SJW01901d92020-05-21 08:58:31 -05004205 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004206
4207 spv::Op Opcode = spv::OpCompositeExtract;
4208 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05004209 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04004210 } else {
SJW01901d92020-05-21 08:58:31 -05004211 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004212 Opcode = spv::OpVectorExtractDynamic;
4213 }
4214
SJWf93f5f32020-05-05 07:27:56 -05004215 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004216 break;
4217 }
4218 case Instruction::InsertElement: {
4219 // Handle <4 x i8> type manually.
4220 Type *CompositeTy = I.getOperand(0)->getType();
4221 if (is4xi8vec(CompositeTy)) {
SJW806a5d82020-07-15 12:51:38 -05004222 SPIRVID CstFFID = getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004223
SJW01901d92020-05-21 08:58:31 -05004224 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004225 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4226 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004227 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4228 ShiftAmountID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004229 } else {
4230 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004231 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004232
SJW806a5d82020-07-15 12:51:38 -05004233 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2)
4234 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004235
SJWf93f5f32020-05-05 07:27:56 -05004236 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004237 }
4238
4239 //
4240 // Generate mask operations.
4241 //
4242
4243 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004244 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004245
SJW01901d92020-05-21 08:58:31 -05004246 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004247
SJW01901d92020-05-21 08:58:31 -05004248 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004249
4250 // Inverse mask.
4251 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004252 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04004253
SJW01901d92020-05-21 08:58:31 -05004254 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004255
4256 // Apply mask.
4257 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004258 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04004259
SJW01901d92020-05-21 08:58:31 -05004260 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004261
4262 // Create correct value according to index of insertelement.
4263 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004264 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004265
SJW01901d92020-05-21 08:58:31 -05004266 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004267
4268 // Insert value to original value.
4269 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004270 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04004271
SJWf93f5f32020-05-05 07:27:56 -05004272 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004273 break;
4274 }
4275
SJWf93f5f32020-05-05 07:27:56 -05004276 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004277
James Priced26efea2018-06-09 23:28:32 +01004278 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05004279 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004280
4281 spv::Op Opcode = spv::OpCompositeInsert;
4282 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004283 const auto value = CI->getZExtValue();
4284 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004285 // Ops[1] = Object ID
4286 // Ops[2] = Composite ID
4287 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004288 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04004289 } else {
James Priced26efea2018-06-09 23:28:32 +01004290 // Ops[1] = Composite ID
4291 // Ops[2] = Object ID
4292 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004293 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004294 Opcode = spv::OpVectorInsertDynamic;
4295 }
4296
SJWf93f5f32020-05-05 07:27:56 -05004297 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004298 break;
4299 }
4300 case Instruction::ShuffleVector: {
4301 // Ops[0] = Result Type ID
4302 // Ops[1] = Vector 1 ID
4303 // Ops[2] = Vector 2 ID
4304 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004305 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004306
SJW01901d92020-05-21 08:58:31 -05004307 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004308
alan-bakerc9666712020-04-01 16:31:21 -04004309 auto shuffle = cast<ShuffleVectorInst>(&I);
4310 SmallVector<int, 4> mask;
4311 shuffle->getShuffleMask(mask);
4312 for (auto i : mask) {
4313 if (i == UndefMaskElem) {
4314 if (clspv::Option::HackUndef())
4315 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05004316 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04004317 else
4318 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05004319 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04004320 } else {
SJW01901d92020-05-21 08:58:31 -05004321 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04004322 }
4323 }
4324
SJWf93f5f32020-05-05 07:27:56 -05004325 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004326 break;
4327 }
4328 case Instruction::ICmp:
4329 case Instruction::FCmp: {
4330 CmpInst *CmpI = cast<CmpInst>(&I);
4331
David Netod4ca2e62017-07-06 18:47:35 -04004332 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004333 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004334 if (isa<PointerType>(ArgTy)) {
4335 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004336 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004337 errs()
4338 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4339 << "in function " << name << "\n";
4340 llvm_unreachable("Pointer equality check is invalid");
4341 break;
4342 }
4343
SJWf93f5f32020-05-05 07:27:56 -05004344 SPIRVOperandVec Ops;
alan-baker15106572020-11-06 15:08:10 -05004345 if (CmpI->getPredicate() == CmpInst::FCMP_ORD ||
4346 CmpI->getPredicate() == CmpInst::FCMP_UNO) {
4347 // Implement ordered and unordered comparisons are OpIsNan instructions.
4348 // Optimize the constants to simplify the resulting code.
4349 auto lhs = CmpI->getOperand(0);
4350 auto rhs = CmpI->getOperand(1);
4351 auto const_lhs = dyn_cast_or_null<Constant>(lhs);
4352 auto const_rhs = dyn_cast_or_null<Constant>(rhs);
4353 if ((const_lhs && const_lhs->isNaN()) ||
4354 (const_rhs && const_rhs->isNaN())) {
4355 // Result is a constant, false of ordered, true for unordered.
4356 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4357 RID = getSPIRVConstant(ConstantInt::getFalse(CmpI->getType()));
4358 } else {
4359 RID = getSPIRVConstant(ConstantInt::getTrue(CmpI->getType()));
4360 }
4361 break;
4362 }
4363 SPIRVID lhs_id;
4364 SPIRVID rhs_id;
4365 if (!const_lhs) {
4366 // Generate OpIsNan for the lhs.
4367 Ops.clear();
4368 Ops << CmpI->getType() << lhs;
4369 lhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4370 }
4371 if (!const_rhs) {
4372 // Generate OpIsNan for the rhs.
4373 Ops.clear();
4374 Ops << CmpI->getType() << rhs;
4375 rhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4376 }
4377 if (lhs_id.isValid() && rhs_id.isValid()) {
4378 // Or the results for the lhs and rhs.
4379 Ops.clear();
4380 Ops << CmpI->getType() << lhs_id << rhs_id;
4381 RID = addSPIRVInst(spv::OpLogicalOr, Ops);
4382 } else {
4383 RID = lhs_id.isValid() ? lhs_id : rhs_id;
4384 }
4385 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4386 // For ordered comparisons, invert the intermediate result.
4387 Ops.clear();
4388 Ops << CmpI->getType() << RID;
4389 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
4390 }
4391 break;
4392 } else {
4393 // Remaining comparisons map directly to SPIR-V opcodes.
4394 // Ops[0] = Result Type ID
4395 // Ops[1] = Operand 1 ID
4396 // Ops[2] = Operand 2 ID
4397 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004398
alan-baker15106572020-11-06 15:08:10 -05004399 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
4400 RID = addSPIRVInst(Opcode, Ops);
4401 }
David Neto22f144c2017-06-12 14:26:21 -04004402 break;
4403 }
4404 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004405 // Branch instruction is deferred because it needs label's ID.
4406 BasicBlock *BrBB = I.getParent();
4407 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4408 // Placeholder for Merge operation
4409 RID = addSPIRVPlaceholder(&I);
4410 }
4411 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004412 break;
4413 }
4414 case Instruction::Switch: {
4415 I.print(errs());
4416 llvm_unreachable("Unsupported instruction???");
4417 break;
4418 }
4419 case Instruction::IndirectBr: {
4420 I.print(errs());
4421 llvm_unreachable("Unsupported instruction???");
4422 break;
4423 }
4424 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004425 // PHI instruction is deferred because it needs label's ID.
4426 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004427 break;
4428 }
4429 case Instruction::Alloca: {
4430 //
4431 // Generate OpVariable.
4432 //
4433 // Ops[0] : Result Type ID
4434 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004435 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004436
SJW01901d92020-05-21 08:58:31 -05004437 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04004438
SJWf93f5f32020-05-05 07:27:56 -05004439 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004440 break;
4441 }
4442 case Instruction::Load: {
4443 LoadInst *LD = cast<LoadInst>(&I);
4444 //
4445 // Generate OpLoad.
4446 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004447
alan-baker5b86ed72019-02-15 08:26:50 -05004448 if (LD->getType()->isPointerTy()) {
4449 // Loading a pointer requires variable pointers.
4450 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4451 }
David Neto22f144c2017-06-12 14:26:21 -04004452
SJW01901d92020-05-21 08:58:31 -05004453 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004454 // This is a hack to work around what looks like a driver bug.
4455 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004456 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4457 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004458 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004459 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004460 // Generate a bitwise-and of the original value with itself.
4461 // We should have been able to get away with just an OpCopyObject,
4462 // but we need something more complex to get past certain driver bugs.
4463 // This is ridiculous, but necessary.
4464 // TODO(dneto): Revisit this once drivers fix their bugs.
4465
SJWf93f5f32020-05-05 07:27:56 -05004466 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004467 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04004468
SJWf93f5f32020-05-05 07:27:56 -05004469 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004470 break;
4471 }
4472
4473 // This is the normal path. Generate a load.
4474
David Neto22f144c2017-06-12 14:26:21 -04004475 // Ops[0] = Result Type ID
4476 // Ops[1] = Pointer ID
4477 // Ops[2] ... Ops[n] = Optional Memory Access
4478 //
4479 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004480
alan-baker3f772c02021-06-15 22:18:11 -04004481 auto ptr = LD->getPointerOperand();
4482 auto ptr_ty = ptr->getType();
4483 SPIRVID result_type_id;
4484 if (LD->getType()->isPointerTy()) {
4485 result_type_id = getSPIRVType(LD->getType());
4486 } else {
4487 auto layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
4488 result_type_id = getSPIRVType(LD->getType(), layout);
4489 }
SJWf93f5f32020-05-05 07:27:56 -05004490 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04004491 Ops << result_type_id << ptr;
David Neto22f144c2017-06-12 14:26:21 -04004492
SJWf93f5f32020-05-05 07:27:56 -05004493 RID = addSPIRVInst(spv::OpLoad, Ops);
alan-baker3f772c02021-06-15 22:18:11 -04004494
4495 auto no_layout_id = getSPIRVType(LD->getType());
4496 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4 &&
4497 no_layout_id.get() != result_type_id.get()) {
4498 // Generate an OpCopyLogical to convert from the laid out type to a
4499 // non-laid out type.
4500 Ops.clear();
4501 Ops << no_layout_id << RID;
4502 RID = addSPIRVInst(spv::OpCopyLogical, Ops);
4503 }
David Neto22f144c2017-06-12 14:26:21 -04004504 break;
4505 }
4506 case Instruction::Store: {
4507 StoreInst *ST = cast<StoreInst>(&I);
4508 //
4509 // Generate OpStore.
4510 //
4511
alan-baker5b86ed72019-02-15 08:26:50 -05004512 if (ST->getValueOperand()->getType()->isPointerTy()) {
4513 // Storing a pointer requires variable pointers.
4514 setVariablePointersCapabilities(
4515 ST->getValueOperand()->getType()->getPointerAddressSpace());
4516 }
4517
alan-baker3f772c02021-06-15 22:18:11 -04004518 SPIRVOperandVec Ops;
4519 auto ptr = ST->getPointerOperand();
4520 auto ptr_ty = ptr->getType();
4521 auto value = ST->getValueOperand();
4522 auto value_ty = value->getType();
4523 auto needs_layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
4524 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4 && needs_layout &&
4525 (value_ty->isArrayTy() || value_ty->isStructTy())) {
4526 // Generate an OpCopyLogical to convert from the non-laid type to the
4527 // laid out type.
4528 Ops << getSPIRVType(value_ty, needs_layout) << value;
4529 RID = addSPIRVInst(spv::OpCopyLogical, Ops);
4530 Ops.clear();
4531 }
4532
David Neto22f144c2017-06-12 14:26:21 -04004533 // Ops[0] = Pointer ID
4534 // Ops[1] = Object ID
4535 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4536 //
4537 // TODO: Do we need to implement Optional Memory Access???
alan-baker3f772c02021-06-15 22:18:11 -04004538 Ops << ST->getPointerOperand();
4539 if (RID.isValid()) {
4540 Ops << RID;
4541 } else {
4542 Ops << ST->getValueOperand();
4543 }
SJWf93f5f32020-05-05 07:27:56 -05004544 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004545 break;
4546 }
4547 case Instruction::AtomicCmpXchg: {
4548 I.print(errs());
4549 llvm_unreachable("Unsupported instruction???");
4550 break;
4551 }
4552 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004553 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4554
4555 spv::Op opcode;
4556
4557 switch (AtomicRMW->getOperation()) {
4558 default:
4559 I.print(errs());
4560 llvm_unreachable("Unsupported instruction???");
4561 case llvm::AtomicRMWInst::Add:
4562 opcode = spv::OpAtomicIAdd;
4563 break;
4564 case llvm::AtomicRMWInst::Sub:
4565 opcode = spv::OpAtomicISub;
4566 break;
4567 case llvm::AtomicRMWInst::Xchg:
4568 opcode = spv::OpAtomicExchange;
4569 break;
4570 case llvm::AtomicRMWInst::Min:
4571 opcode = spv::OpAtomicSMin;
4572 break;
4573 case llvm::AtomicRMWInst::Max:
4574 opcode = spv::OpAtomicSMax;
4575 break;
4576 case llvm::AtomicRMWInst::UMin:
4577 opcode = spv::OpAtomicUMin;
4578 break;
4579 case llvm::AtomicRMWInst::UMax:
4580 opcode = spv::OpAtomicUMax;
4581 break;
4582 case llvm::AtomicRMWInst::And:
4583 opcode = spv::OpAtomicAnd;
4584 break;
4585 case llvm::AtomicRMWInst::Or:
4586 opcode = spv::OpAtomicOr;
4587 break;
4588 case llvm::AtomicRMWInst::Xor:
4589 opcode = spv::OpAtomicXor;
4590 break;
4591 }
4592
4593 //
4594 // Generate OpAtomic*.
4595 //
SJWf93f5f32020-05-05 07:27:56 -05004596 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004597
SJW01901d92020-05-21 08:58:31 -05004598 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01004599
SJW806a5d82020-07-15 12:51:38 -05004600 const auto ConstantScopeDevice = getSPIRVInt32Constant(spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05004601 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01004602
SJW806a5d82020-07-15 12:51:38 -05004603 const auto ConstantMemorySemantics =
4604 getSPIRVInt32Constant(spv::MemorySemanticsUniformMemoryMask |
4605 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05004606 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01004607
SJWf93f5f32020-05-05 07:27:56 -05004608 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004609 break;
4610 }
4611 case Instruction::Fence: {
4612 I.print(errs());
4613 llvm_unreachable("Unsupported instruction???");
4614 break;
4615 }
4616 case Instruction::Call: {
4617 CallInst *Call = dyn_cast<CallInst>(&I);
SJW806a5d82020-07-15 12:51:38 -05004618 RID = GenerateInstructionFromCall(Call);
David Neto22f144c2017-06-12 14:26:21 -04004619 break;
4620 }
4621 case Instruction::Ret: {
4622 unsigned NumOps = I.getNumOperands();
4623 if (NumOps == 0) {
4624 //
4625 // Generate OpReturn.
4626 //
SJWf93f5f32020-05-05 07:27:56 -05004627 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004628 } else {
4629 //
4630 // Generate OpReturnValue.
4631 //
4632
4633 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004634 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004635
SJW01901d92020-05-21 08:58:31 -05004636 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004637
SJWf93f5f32020-05-05 07:27:56 -05004638 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004639 break;
4640 }
4641 break;
4642 }
4643 }
SJWf93f5f32020-05-05 07:27:56 -05004644
4645 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004646 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004647 VMap[&I] = RID;
4648 }
David Neto22f144c2017-06-12 14:26:21 -04004649}
4650
4651void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004652 //
4653 // Generate OpFunctionEnd
4654 //
SJWf93f5f32020-05-05 07:27:56 -05004655 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004656}
4657
4658bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004659 // Don't specialize <4 x i8> if i8 is generally supported.
4660 if (clspv::Option::Int8Support())
4661 return false;
4662
David Neto22f144c2017-06-12 14:26:21 -04004663 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004664 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4665 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
alan-baker5a8c3be2020-09-09 13:44:26 -04004666 VecTy->getElementCount().getKnownMinValue() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004667 return true;
4668 }
4669 }
4670
4671 return false;
4672}
4673
4674void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004675 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4676
SJW88ed5fe2020-05-11 12:40:57 -05004677 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4678 Value *Inst = DeferredInsts[i].first;
4679 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4680 SPIRVOperandVec Operands;
4681
4682 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4683 ++i;
4684 assert(DeferredInsts.size() > i);
4685 assert(Inst == DeferredInsts[i].first);
4686 Placeholder = DeferredInsts[i].second;
4687 };
David Neto22f144c2017-06-12 14:26:21 -04004688
4689 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004690 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004691 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004692 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004693 //
4694 // Generate OpLoopMerge.
4695 //
4696 // Ops[0] = Merge Block ID
4697 // Ops[1] = Continue Target ID
4698 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004699 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004700
SJW01901d92020-05-21 08:58:31 -05004701 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4702 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004703
SJW88ed5fe2020-05-11 12:40:57 -05004704 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4705
4706 nextDeferred();
4707
alan-baker06cad652019-12-03 17:56:47 -05004708 } else if (MergeBlocks.count(BrBB)) {
4709 //
4710 // Generate OpSelectionMerge.
4711 //
4712 // Ops[0] = Merge Block ID
4713 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004714 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004715
alan-baker06cad652019-12-03 17:56:47 -05004716 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004717 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004718
SJW88ed5fe2020-05-11 12:40:57 -05004719 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4720
4721 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004722 }
4723
4724 if (Br->isConditional()) {
4725 //
4726 // Generate OpBranchConditional.
4727 //
4728 // Ops[0] = Condition ID
4729 // Ops[1] = True Label ID
4730 // Ops[2] = False Label ID
4731 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004732 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004733
SJW01901d92020-05-21 08:58:31 -05004734 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004735
SJW88ed5fe2020-05-11 12:40:57 -05004736 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4737
David Neto22f144c2017-06-12 14:26:21 -04004738 } else {
4739 //
4740 // Generate OpBranch.
4741 //
4742 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004743 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004744
SJW01901d92020-05-21 08:58:31 -05004745 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004746
SJW88ed5fe2020-05-11 12:40:57 -05004747 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004748 }
4749 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004750 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4751 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004752 // OpPhi on pointers requires variable pointers.
4753 setVariablePointersCapabilities(
4754 PHI->getType()->getPointerAddressSpace());
4755 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004756 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004757 }
4758 }
4759
David Neto22f144c2017-06-12 14:26:21 -04004760 //
4761 // Generate OpPhi.
4762 //
4763 // Ops[0] = Result Type ID
4764 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004765 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004766
SJW01901d92020-05-21 08:58:31 -05004767 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004768
SJW88ed5fe2020-05-11 12:40:57 -05004769 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004770 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004771 }
4772
SJW88ed5fe2020-05-11 12:40:57 -05004773 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4774
David Neto22f144c2017-06-12 14:26:21 -04004775 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4776 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004777 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004778
SJW61531372020-06-09 07:31:08 -05004779 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004780 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004781 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004782
4783 // The result type.
SJW01901d92020-05-21 08:58:31 -05004784 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004785
4786 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004787 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004788 }
4789
SJW88ed5fe2020-05-11 12:40:57 -05004790 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004791
David Neto22f144c2017-06-12 14:26:21 -04004792 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004793 if (Call->getType()->isPointerTy()) {
4794 // Functions returning pointers require variable pointers.
4795 setVariablePointersCapabilities(
4796 Call->getType()->getPointerAddressSpace());
4797 }
4798
David Neto22f144c2017-06-12 14:26:21 -04004799 //
4800 // Generate OpFunctionCall.
4801 //
4802
4803 // Ops[0] = Result Type ID
4804 // Ops[1] = Callee Function ID
4805 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004806 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004807
SJW01901d92020-05-21 08:58:31 -05004808 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004809
SJW01901d92020-05-21 08:58:31 -05004810 SPIRVID CalleeID = getSPIRVValue(Callee);
SJW806a5d82020-07-15 12:51:38 -05004811 if (!CalleeID.isValid()) {
David Neto43568eb2017-10-13 18:25:25 -04004812 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004813 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004814 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4815 // causes an infinite loop. Instead, go ahead and generate
4816 // the bad function call. A validator will catch the 0-Id.
4817 // llvm_unreachable("Can't translate function call");
4818 }
David Neto22f144c2017-06-12 14:26:21 -04004819
SJW01901d92020-05-21 08:58:31 -05004820 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004821
David Neto22f144c2017-06-12 14:26:21 -04004822 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004823 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4824 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004825 auto *operand_type = operand->getType();
4826 // Images and samplers can be passed as function parameters without
4827 // variable pointers.
4828 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4829 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004830 auto sc =
4831 GetStorageClass(operand->getType()->getPointerAddressSpace());
4832 if (sc == spv::StorageClassStorageBuffer) {
4833 // Passing SSBO by reference requires variable pointers storage
4834 // buffer.
SJW01901d92020-05-21 08:58:31 -05004835 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004836 } else if (sc == spv::StorageClassWorkgroup) {
4837 // Workgroup references require variable pointers if they are not
4838 // memory object declarations.
4839 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4840 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004841 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4842 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004843 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004844 } else {
4845 // Arguments are function parameters.
4846 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004847 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004848 }
4849 }
4850 }
SJW01901d92020-05-21 08:58:31 -05004851 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004852 }
4853
SJW88ed5fe2020-05-11 12:40:57 -05004854 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004855 }
4856 }
4857 }
4858}
4859
SJW77b87ad2020-04-21 14:37:52 -05004860void SPIRVProducerPass::HandleDeferredDecorations() {
4861 const auto &DL = module->getDataLayout();
alan-baker5a8c3be2020-09-09 13:44:26 -04004862 if (getTypesNeedingArrayStride().empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004863 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004864 }
David Neto1a1a0582017-07-07 12:01:44 -04004865
David Netoc6f3ab22018-04-06 18:02:31 -04004866 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4867 // instructions we generated earlier.
alan-bakerc3fd07f2020-10-22 09:48:49 -04004868 DenseSet<uint32_t> seen;
David Neto85082642018-03-24 06:55:20 -07004869 for (auto *type : getTypesNeedingArrayStride()) {
alan-baker3f772c02021-06-15 22:18:11 -04004870 auto TI = TypeMap.find(type);
4871 unsigned index = SpvVersion() < SPIRVVersion::SPIRV_1_4 ? 0 : 1;
4872 assert(TI != TypeMap.end());
4873 assert(index < TI->second.size());
4874 if (!TI->second[index].isValid())
4875 continue;
4876
4877 auto id = TI->second[index];
alan-bakerc3fd07f2020-10-22 09:48:49 -04004878 if (!seen.insert(id.get()).second)
4879 continue;
4880
David Neto85082642018-03-24 06:55:20 -07004881 Type *elemTy = nullptr;
4882 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4883 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004884 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004885 elemTy = arrayTy->getElementType();
4886 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4887 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004888 } else {
4889 errs() << "Unhandled strided type " << *type << "\n";
4890 llvm_unreachable("Unhandled strided type");
4891 }
David Neto1a1a0582017-07-07 12:01:44 -04004892
4893 // Ops[0] = Target ID
4894 // Ops[1] = Decoration (ArrayStride)
4895 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004896 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004897
David Neto85082642018-03-24 06:55:20 -07004898 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004899 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004900
alan-bakerc3fd07f2020-10-22 09:48:49 -04004901 Ops << id << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004902
SJWf93f5f32020-05-05 07:27:56 -05004903 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004904 }
David Neto1a1a0582017-07-07 12:01:44 -04004905}
4906
SJW61531372020-06-09 07:31:08 -05004907glsl::ExtInst
4908SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW61531372020-06-09 07:31:08 -05004909 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004910 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05004911 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004912 if (param_type.type_id == Type::FloatTyID) {
alan-bakerecc9c942020-12-07 13:13:32 -05004913 return glsl::ExtInst::ExtInstNClamp;
SJW2c317da2020-03-23 07:39:13 -05004914 }
4915 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4916 : glsl::ExtInst::ExtInstUClamp;
4917 }
4918 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05004919 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004920 if (param_type.type_id == Type::FloatTyID) {
4921 return glsl::ExtInst::ExtInstFMax;
4922 }
4923 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4924 : glsl::ExtInst::ExtInstUMax;
4925 }
4926 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05004927 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004928 if (param_type.type_id == Type::FloatTyID) {
4929 return glsl::ExtInst::ExtInstFMin;
4930 }
4931 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4932 : glsl::ExtInst::ExtInstUMin;
4933 }
4934 case Builtins::kAbs:
4935 return glsl::ExtInst::ExtInstSAbs;
4936 case Builtins::kFmax:
Marco Antognini55d51862020-07-21 17:50:07 +01004937 return glsl::ExtInst::ExtInstNMax;
SJW2c317da2020-03-23 07:39:13 -05004938 case Builtins::kFmin:
Marco Antognini55d51862020-07-21 17:50:07 +01004939 return glsl::ExtInst::ExtInstNMin;
SJW2c317da2020-03-23 07:39:13 -05004940 case Builtins::kDegrees:
4941 return glsl::ExtInst::ExtInstDegrees;
4942 case Builtins::kRadians:
4943 return glsl::ExtInst::ExtInstRadians;
4944 case Builtins::kMix:
4945 return glsl::ExtInst::ExtInstFMix;
4946 case Builtins::kAcos:
4947 case Builtins::kAcospi:
4948 return glsl::ExtInst::ExtInstAcos;
4949 case Builtins::kAcosh:
4950 return glsl::ExtInst::ExtInstAcosh;
4951 case Builtins::kAsin:
4952 case Builtins::kAsinpi:
4953 return glsl::ExtInst::ExtInstAsin;
4954 case Builtins::kAsinh:
4955 return glsl::ExtInst::ExtInstAsinh;
4956 case Builtins::kAtan:
4957 case Builtins::kAtanpi:
4958 return glsl::ExtInst::ExtInstAtan;
4959 case Builtins::kAtanh:
4960 return glsl::ExtInst::ExtInstAtanh;
4961 case Builtins::kAtan2:
4962 case Builtins::kAtan2pi:
4963 return glsl::ExtInst::ExtInstAtan2;
4964 case Builtins::kCeil:
4965 return glsl::ExtInst::ExtInstCeil;
4966 case Builtins::kSin:
4967 case Builtins::kHalfSin:
4968 case Builtins::kNativeSin:
4969 return glsl::ExtInst::ExtInstSin;
4970 case Builtins::kSinh:
4971 return glsl::ExtInst::ExtInstSinh;
4972 case Builtins::kCos:
4973 case Builtins::kHalfCos:
4974 case Builtins::kNativeCos:
4975 return glsl::ExtInst::ExtInstCos;
4976 case Builtins::kCosh:
4977 return glsl::ExtInst::ExtInstCosh;
4978 case Builtins::kTan:
4979 case Builtins::kHalfTan:
4980 case Builtins::kNativeTan:
4981 return glsl::ExtInst::ExtInstTan;
4982 case Builtins::kTanh:
4983 return glsl::ExtInst::ExtInstTanh;
4984 case Builtins::kExp:
4985 case Builtins::kHalfExp:
4986 case Builtins::kNativeExp:
4987 return glsl::ExtInst::ExtInstExp;
4988 case Builtins::kExp2:
4989 case Builtins::kHalfExp2:
4990 case Builtins::kNativeExp2:
4991 return glsl::ExtInst::ExtInstExp2;
4992 case Builtins::kLog:
4993 case Builtins::kHalfLog:
4994 case Builtins::kNativeLog:
4995 return glsl::ExtInst::ExtInstLog;
4996 case Builtins::kLog2:
4997 case Builtins::kHalfLog2:
4998 case Builtins::kNativeLog2:
4999 return glsl::ExtInst::ExtInstLog2;
5000 case Builtins::kFabs:
5001 return glsl::ExtInst::ExtInstFAbs;
5002 case Builtins::kFma:
5003 return glsl::ExtInst::ExtInstFma;
5004 case Builtins::kFloor:
5005 return glsl::ExtInst::ExtInstFloor;
5006 case Builtins::kLdexp:
5007 return glsl::ExtInst::ExtInstLdexp;
5008 case Builtins::kPow:
5009 case Builtins::kPowr:
5010 case Builtins::kHalfPowr:
5011 case Builtins::kNativePowr:
5012 return glsl::ExtInst::ExtInstPow;
James Price38553362020-09-03 18:30:40 -04005013 case Builtins::kRint:
5014 return glsl::ExtInst::ExtInstRoundEven;
SJW2c317da2020-03-23 07:39:13 -05005015 case Builtins::kRound:
5016 return glsl::ExtInst::ExtInstRound;
5017 case Builtins::kSqrt:
5018 case Builtins::kHalfSqrt:
5019 case Builtins::kNativeSqrt:
5020 return glsl::ExtInst::ExtInstSqrt;
5021 case Builtins::kRsqrt:
5022 case Builtins::kHalfRsqrt:
5023 case Builtins::kNativeRsqrt:
5024 return glsl::ExtInst::ExtInstInverseSqrt;
5025 case Builtins::kTrunc:
5026 return glsl::ExtInst::ExtInstTrunc;
5027 case Builtins::kFrexp:
5028 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05005029 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05005030 case Builtins::kFract:
5031 return glsl::ExtInst::ExtInstFract;
5032 case Builtins::kSign:
5033 return glsl::ExtInst::ExtInstFSign;
5034 case Builtins::kLength:
5035 case Builtins::kFastLength:
5036 return glsl::ExtInst::ExtInstLength;
5037 case Builtins::kDistance:
5038 case Builtins::kFastDistance:
5039 return glsl::ExtInst::ExtInstDistance;
5040 case Builtins::kStep:
5041 return glsl::ExtInst::ExtInstStep;
5042 case Builtins::kSmoothstep:
5043 return glsl::ExtInst::ExtInstSmoothStep;
5044 case Builtins::kCross:
5045 return glsl::ExtInst::ExtInstCross;
5046 case Builtins::kNormalize:
5047 case Builtins::kFastNormalize:
5048 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05005049 case Builtins::kSpirvPack:
5050 return glsl::ExtInst::ExtInstPackHalf2x16;
5051 case Builtins::kSpirvUnpack:
5052 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05005053 default:
5054 break;
5055 }
5056
alan-baker5f2e88e2020-12-07 15:24:04 -05005057 // TODO: improve this by checking the intrinsic id.
SJW61531372020-06-09 07:31:08 -05005058 if (func_info.getName().find("llvm.fmuladd.") == 0) {
5059 return glsl::ExtInst::ExtInstFma;
5060 }
alan-baker5f2e88e2020-12-07 15:24:04 -05005061 if (func_info.getName().find("llvm.sqrt.") == 0) {
5062 return glsl::ExtInst::ExtInstSqrt;
5063 }
5064 if (func_info.getName().find("llvm.trunc.") == 0) {
5065 return glsl::ExtInst::ExtInstTrunc;
5066 }
5067 if (func_info.getName().find("llvm.ctlz.") == 0) {
5068 return glsl::ExtInst::ExtInstFindUMsb;
5069 }
5070 if (func_info.getName().find("llvm.cttz.") == 0) {
5071 return glsl::ExtInst::ExtInstFindILsb;
5072 }
alan-baker3e0de472020-12-08 15:57:17 -05005073 if (func_info.getName().find("llvm.ceil.") == 0) {
5074 return glsl::ExtInst::ExtInstCeil;
5075 }
5076 if (func_info.getName().find("llvm.rint.") == 0) {
5077 return glsl::ExtInst::ExtInstRoundEven;
5078 }
5079 if (func_info.getName().find("llvm.fabs.") == 0) {
5080 return glsl::ExtInst::ExtInstFAbs;
5081 }
5082 if (func_info.getName().find("llvm.floor.") == 0) {
5083 return glsl::ExtInst::ExtInstFloor;
5084 }
5085 if (func_info.getName().find("llvm.sin.") == 0) {
5086 return glsl::ExtInst::ExtInstSin;
5087 }
5088 if (func_info.getName().find("llvm.cos.") == 0) {
5089 return glsl::ExtInst::ExtInstCos;
5090 }
alan-baker8b968112020-12-15 15:53:29 -05005091 if (func_info.getName().find("llvm.exp.") == 0) {
5092 return glsl::ExtInst::ExtInstExp;
5093 }
5094 if (func_info.getName().find("llvm.log.") == 0) {
5095 return glsl::ExtInst::ExtInstLog;
5096 }
5097 if (func_info.getName().find("llvm.pow.") == 0) {
5098 return glsl::ExtInst::ExtInstPow;
5099 }
James Price8cc3bb12021-05-05 10:20:58 -04005100 if (func_info.getName().find("llvm.smax.") == 0) {
5101 return glsl::ExtInst::ExtInstSMax;
5102 }
5103 if (func_info.getName().find("llvm.smin.") == 0) {
5104 return glsl::ExtInst::ExtInstSMin;
5105 }
Kévin Petit0c0c3882021-07-27 17:01:14 +01005106 if (func_info.getName().find("llvm.umax.") == 0) {
5107 return glsl::ExtInst::ExtInstUMax;
5108 }
5109 if (func_info.getName().find("llvm.umin.") == 0) {
5110 return glsl::ExtInst::ExtInstUMin;
5111 }
SJW61531372020-06-09 07:31:08 -05005112 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005113}
5114
SJW61531372020-06-09 07:31:08 -05005115glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
5116 const Builtins::FunctionInfo &func_info) {
5117 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05005118 case Builtins::kAcospi:
5119 return glsl::ExtInst::ExtInstAcos;
5120 case Builtins::kAsinpi:
5121 return glsl::ExtInst::ExtInstAsin;
5122 case Builtins::kAtanpi:
5123 return glsl::ExtInst::ExtInstAtan;
5124 case Builtins::kAtan2pi:
5125 return glsl::ExtInst::ExtInstAtan2;
5126 default:
5127 break;
5128 }
5129 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005130}
5131
SJW61531372020-06-09 07:31:08 -05005132glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
5133 const Builtins::FunctionInfo &func_info) {
5134 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04005135 if (direct != kGlslExtInstBad)
5136 return direct;
SJW61531372020-06-09 07:31:08 -05005137 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04005138}
5139
David Neto22f144c2017-06-12 14:26:21 -04005140void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005141 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005142}
5143
SJW88ed5fe2020-05-11 12:40:57 -05005144void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05005145 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04005146}
5147
SJW88ed5fe2020-05-11 12:40:57 -05005148void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04005149 // High 16 bit : Word Count
5150 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05005151 uint32_t Word = Inst.getOpcode();
5152 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04005153 if (count > 65535) {
5154 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5155 llvm_unreachable("Word count too high");
5156 }
SJW88ed5fe2020-05-11 12:40:57 -05005157 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04005158 WriteOneWord(Word);
5159}
5160
SJW88ed5fe2020-05-11 12:40:57 -05005161void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
5162 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04005163 switch (OpTy) {
5164 default: {
5165 llvm_unreachable("Unsupported SPIRV Operand Type???");
5166 break;
5167 }
5168 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05005169 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04005170 break;
5171 }
5172 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05005173 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04005174 const char *Data = Str.c_str();
5175 size_t WordSize = Str.size() / 4;
5176 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5177 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5178 }
5179
5180 uint32_t Remainder = Str.size() % 4;
5181 uint32_t LastWord = 0;
5182 if (Remainder) {
5183 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5184 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5185 }
5186 }
5187
5188 WriteOneWord(LastWord);
5189 break;
5190 }
SJW88ed5fe2020-05-11 12:40:57 -05005191 case SPIRVOperandType::LITERAL_WORD: {
5192 WriteOneWord(Op.getLiteralNum()[0]);
5193 break;
5194 }
5195 case SPIRVOperandType::LITERAL_DWORD: {
5196 WriteOneWord(Op.getLiteralNum()[0]);
5197 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04005198 break;
5199 }
5200 }
5201}
5202
5203void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005204 for (int i = 0; i < kSectionCount; ++i) {
5205 WriteSPIRVBinary(SPIRVSections[i]);
5206 }
5207}
5208
5209void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
SJW88ed5fe2020-05-11 12:40:57 -05005210 for (const auto &Inst : SPIRVInstList) {
5211 const auto &Ops = Inst.getOperands();
5212 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04005213
5214 switch (Opcode) {
5215 default: {
David Neto5c22a252018-03-15 16:07:41 -04005216 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005217 llvm_unreachable("Unsupported SPIRV instruction");
5218 break;
5219 }
Marco Antognini68e5c512020-09-09 16:08:57 +01005220 case spv::OpUnreachable:
David Neto22f144c2017-06-12 14:26:21 -04005221 case spv::OpCapability:
5222 case spv::OpExtension:
5223 case spv::OpMemoryModel:
5224 case spv::OpEntryPoint:
5225 case spv::OpExecutionMode:
5226 case spv::OpSource:
5227 case spv::OpDecorate:
5228 case spv::OpMemberDecorate:
5229 case spv::OpBranch:
5230 case spv::OpBranchConditional:
5231 case spv::OpSelectionMerge:
5232 case spv::OpLoopMerge:
5233 case spv::OpStore:
5234 case spv::OpImageWrite:
5235 case spv::OpReturnValue:
5236 case spv::OpControlBarrier:
5237 case spv::OpMemoryBarrier:
5238 case spv::OpReturn:
5239 case spv::OpFunctionEnd:
alan-baker4986eff2020-10-29 13:38:00 -04005240 case spv::OpCopyMemory:
5241 case spv::OpAtomicStore: {
David Neto22f144c2017-06-12 14:26:21 -04005242 WriteWordCountAndOpcode(Inst);
5243 for (uint32_t i = 0; i < Ops.size(); i++) {
5244 WriteOperand(Ops[i]);
5245 }
5246 break;
5247 }
5248 case spv::OpTypeBool:
5249 case spv::OpTypeVoid:
5250 case spv::OpTypeSampler:
5251 case spv::OpLabel:
5252 case spv::OpExtInstImport:
5253 case spv::OpTypePointer:
5254 case spv::OpTypeRuntimeArray:
5255 case spv::OpTypeStruct:
5256 case spv::OpTypeImage:
5257 case spv::OpTypeSampledImage:
5258 case spv::OpTypeInt:
5259 case spv::OpTypeFloat:
5260 case spv::OpTypeArray:
5261 case spv::OpTypeVector:
alan-baker86ce19c2020-08-05 13:09:19 -04005262 case spv::OpTypeFunction:
5263 case spv::OpString: {
David Neto22f144c2017-06-12 14:26:21 -04005264 WriteWordCountAndOpcode(Inst);
5265 WriteResultID(Inst);
5266 for (uint32_t i = 0; i < Ops.size(); i++) {
5267 WriteOperand(Ops[i]);
5268 }
5269 break;
5270 }
5271 case spv::OpFunction:
5272 case spv::OpFunctionParameter:
5273 case spv::OpAccessChain:
5274 case spv::OpPtrAccessChain:
5275 case spv::OpInBoundsAccessChain:
5276 case spv::OpUConvert:
5277 case spv::OpSConvert:
5278 case spv::OpConvertFToU:
5279 case spv::OpConvertFToS:
5280 case spv::OpConvertUToF:
5281 case spv::OpConvertSToF:
5282 case spv::OpFConvert:
5283 case spv::OpConvertPtrToU:
5284 case spv::OpConvertUToPtr:
5285 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005286 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005287 case spv::OpIAdd:
alan-bakera52b7312020-10-26 08:58:51 -04005288 case spv::OpIAddCarry:
David Neto22f144c2017-06-12 14:26:21 -04005289 case spv::OpFAdd:
5290 case spv::OpISub:
alan-baker3f1bf492020-11-05 09:07:36 -05005291 case spv::OpISubBorrow:
David Neto22f144c2017-06-12 14:26:21 -04005292 case spv::OpFSub:
5293 case spv::OpIMul:
5294 case spv::OpFMul:
5295 case spv::OpUDiv:
5296 case spv::OpSDiv:
5297 case spv::OpFDiv:
5298 case spv::OpUMod:
5299 case spv::OpSRem:
5300 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005301 case spv::OpUMulExtended:
5302 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005303 case spv::OpBitwiseOr:
5304 case spv::OpBitwiseXor:
5305 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005306 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005307 case spv::OpShiftLeftLogical:
5308 case spv::OpShiftRightLogical:
5309 case spv::OpShiftRightArithmetic:
5310 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005311 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005312 case spv::OpCompositeExtract:
5313 case spv::OpVectorExtractDynamic:
5314 case spv::OpCompositeInsert:
alan-baker3f772c02021-06-15 22:18:11 -04005315 case spv::OpCopyLogical:
David Neto0a2f98d2017-09-15 19:38:40 -04005316 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005317 case spv::OpVectorInsertDynamic:
5318 case spv::OpVectorShuffle:
5319 case spv::OpIEqual:
5320 case spv::OpINotEqual:
5321 case spv::OpUGreaterThan:
5322 case spv::OpUGreaterThanEqual:
5323 case spv::OpULessThan:
5324 case spv::OpULessThanEqual:
5325 case spv::OpSGreaterThan:
5326 case spv::OpSGreaterThanEqual:
5327 case spv::OpSLessThan:
5328 case spv::OpSLessThanEqual:
5329 case spv::OpFOrdEqual:
5330 case spv::OpFOrdGreaterThan:
5331 case spv::OpFOrdGreaterThanEqual:
5332 case spv::OpFOrdLessThan:
5333 case spv::OpFOrdLessThanEqual:
5334 case spv::OpFOrdNotEqual:
5335 case spv::OpFUnordEqual:
5336 case spv::OpFUnordGreaterThan:
5337 case spv::OpFUnordGreaterThanEqual:
5338 case spv::OpFUnordLessThan:
5339 case spv::OpFUnordLessThanEqual:
5340 case spv::OpFUnordNotEqual:
5341 case spv::OpExtInst:
5342 case spv::OpIsInf:
5343 case spv::OpIsNan:
5344 case spv::OpAny:
5345 case spv::OpAll:
5346 case spv::OpUndef:
5347 case spv::OpConstantNull:
5348 case spv::OpLogicalOr:
5349 case spv::OpLogicalAnd:
5350 case spv::OpLogicalNot:
5351 case spv::OpLogicalNotEqual:
5352 case spv::OpConstantComposite:
5353 case spv::OpSpecConstantComposite:
5354 case spv::OpConstantTrue:
5355 case spv::OpConstantFalse:
5356 case spv::OpConstant:
5357 case spv::OpSpecConstant:
5358 case spv::OpVariable:
5359 case spv::OpFunctionCall:
5360 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005361 case spv::OpImageFetch:
alan-bakerf6bc8252020-09-23 14:58:55 -04005362 case spv::OpImageRead:
David Neto22f144c2017-06-12 14:26:21 -04005363 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005364 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005365 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005366 case spv::OpSelect:
5367 case spv::OpPhi:
5368 case spv::OpLoad:
alan-baker4986eff2020-10-29 13:38:00 -04005369 case spv::OpAtomicLoad:
David Neto22f144c2017-06-12 14:26:21 -04005370 case spv::OpAtomicIAdd:
5371 case spv::OpAtomicISub:
5372 case spv::OpAtomicExchange:
5373 case spv::OpAtomicIIncrement:
5374 case spv::OpAtomicIDecrement:
5375 case spv::OpAtomicCompareExchange:
5376 case spv::OpAtomicUMin:
5377 case spv::OpAtomicSMin:
5378 case spv::OpAtomicUMax:
5379 case spv::OpAtomicSMax:
5380 case spv::OpAtomicAnd:
5381 case spv::OpAtomicOr:
5382 case spv::OpAtomicXor:
SJW806a5d82020-07-15 12:51:38 -05005383 case spv::OpDot:
5384 case spv::OpGroupNonUniformAll:
5385 case spv::OpGroupNonUniformAny:
5386 case spv::OpGroupNonUniformBroadcast:
5387 case spv::OpGroupNonUniformIAdd:
5388 case spv::OpGroupNonUniformFAdd:
5389 case spv::OpGroupNonUniformSMin:
5390 case spv::OpGroupNonUniformUMin:
5391 case spv::OpGroupNonUniformFMin:
5392 case spv::OpGroupNonUniformSMax:
5393 case spv::OpGroupNonUniformUMax:
5394 case spv::OpGroupNonUniformFMax: {
David Neto22f144c2017-06-12 14:26:21 -04005395 WriteWordCountAndOpcode(Inst);
5396 WriteOperand(Ops[0]);
5397 WriteResultID(Inst);
5398 for (uint32_t i = 1; i < Ops.size(); i++) {
5399 WriteOperand(Ops[i]);
5400 }
5401 break;
5402 }
5403 }
5404 }
5405}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005406
alan-bakerb6b09dc2018-11-08 16:59:28 -05005407bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005408 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005409 case Type::HalfTyID:
5410 case Type::FloatTyID:
5411 case Type::DoubleTyID:
5412 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005413 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005414 return true;
5415 case Type::PointerTyID: {
5416 const PointerType *pointer_type = cast<PointerType>(type);
5417 if (pointer_type->getPointerAddressSpace() !=
5418 AddressSpace::UniformConstant) {
5419 auto pointee_type = pointer_type->getPointerElementType();
5420 if (pointee_type->isStructTy() &&
5421 cast<StructType>(pointee_type)->isOpaque()) {
5422 // Images and samplers are not nullable.
5423 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005424 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005425 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005426 return true;
5427 }
5428 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005429 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005430 case Type::StructTyID: {
5431 const StructType *struct_type = cast<StructType>(type);
5432 // Images and samplers are not nullable.
5433 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005434 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005435 for (const auto element : struct_type->elements()) {
5436 if (!IsTypeNullable(element))
5437 return false;
5438 }
5439 return true;
5440 }
5441 default:
5442 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005443 }
5444}
Alan Bakerfcda9482018-10-02 17:09:59 -04005445
SJW77b87ad2020-04-21 14:37:52 -05005446void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005447 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005448 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005449 // Metdata is stored as key-value pair operands. The first element of each
5450 // operand is the type and the second is a vector of offsets.
5451 for (const auto *operand : offsets_md->operands()) {
5452 const auto *pair = cast<MDTuple>(operand);
5453 auto *type =
5454 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5455 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5456 std::vector<uint32_t> offsets;
5457 for (const Metadata *offset_md : offset_vector->operands()) {
5458 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005459 offsets.push_back(static_cast<uint32_t>(
5460 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005461 }
5462 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5463 }
5464 }
5465
5466 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005467 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005468 // Metadata is stored as key-value pair operands. The first element of each
5469 // operand is the type and the second is a triple of sizes: type size in
5470 // bits, store size and alloc size.
5471 for (const auto *operand : sizes_md->operands()) {
5472 const auto *pair = cast<MDTuple>(operand);
5473 auto *type =
5474 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5475 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5476 uint64_t type_size_in_bits =
5477 cast<ConstantInt>(
5478 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5479 ->getZExtValue();
5480 uint64_t type_store_size =
5481 cast<ConstantInt>(
5482 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5483 ->getZExtValue();
5484 uint64_t type_alloc_size =
5485 cast<ConstantInt>(
5486 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5487 ->getZExtValue();
5488 RemappedUBOTypeSizes.insert(std::make_pair(
5489 type, std::make_tuple(type_size_in_bits, type_store_size,
5490 type_alloc_size)));
5491 }
5492 }
5493}
5494
5495uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5496 const DataLayout &DL) {
5497 auto iter = RemappedUBOTypeSizes.find(type);
5498 if (iter != RemappedUBOTypeSizes.end()) {
5499 return std::get<0>(iter->second);
5500 }
5501
5502 return DL.getTypeSizeInBits(type);
5503}
5504
Alan Bakerfcda9482018-10-02 17:09:59 -04005505uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5506 auto iter = RemappedUBOTypeSizes.find(type);
5507 if (iter != RemappedUBOTypeSizes.end()) {
5508 return std::get<2>(iter->second);
5509 }
5510
5511 return DL.getTypeAllocSize(type);
5512}
alan-baker5b86ed72019-02-15 08:26:50 -05005513
Kévin Petitbbbda972020-03-03 19:16:31 +00005514uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5515 StructType *type, unsigned member, const DataLayout &DL) {
5516 const auto StructLayout = DL.getStructLayout(type);
5517 // Search for the correct offsets if this type was remapped.
5518 std::vector<uint32_t> *offsets = nullptr;
5519 auto iter = RemappedUBOTypeOffsets.find(type);
5520 if (iter != RemappedUBOTypeOffsets.end()) {
5521 offsets = &iter->second;
5522 }
5523 auto ByteOffset =
5524 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5525 if (offsets) {
5526 ByteOffset = (*offsets)[member];
5527 }
5528
5529 return ByteOffset;
5530}
5531
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005532void SPIRVProducerPass::setVariablePointersCapabilities(
5533 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005534 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005535 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005536 } else {
SJW01901d92020-05-21 08:58:31 -05005537 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005538 }
5539}
5540
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005541Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005542 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5543 return GetBasePointer(gep->getPointerOperand());
5544 }
5545
5546 // Conservatively return |v|.
5547 return v;
5548}
5549
5550bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5551 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5552 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
alan-baker7506abb2020-09-10 15:02:55 -04005553 const auto &lhs_func_info =
5554 Builtins::Lookup(lhs_call->getCalledFunction());
5555 const auto &rhs_func_info =
5556 Builtins::Lookup(rhs_call->getCalledFunction());
SJW61531372020-06-09 07:31:08 -05005557 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5558 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005559 // For resource accessors, match descriptor set and binding.
5560 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5561 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5562 return true;
SJW61531372020-06-09 07:31:08 -05005563 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5564 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005565 // For workgroup resources, match spec id.
5566 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5567 return true;
5568 }
5569 }
5570 }
5571
5572 return false;
5573}
5574
5575bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5576 assert(inst->getType()->isPointerTy());
5577 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5578 spv::StorageClassStorageBuffer);
5579 const bool hack_undef = clspv::Option::HackUndef();
5580 if (auto *select = dyn_cast<SelectInst>(inst)) {
5581 auto *true_base = GetBasePointer(select->getTrueValue());
5582 auto *false_base = GetBasePointer(select->getFalseValue());
5583
5584 if (true_base == false_base)
5585 return true;
5586
5587 // If either the true or false operand is a null, then we satisfy the same
5588 // object constraint.
5589 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5590 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5591 return true;
5592 }
5593
5594 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5595 if (false_cst->isNullValue() ||
5596 (hack_undef && isa<UndefValue>(false_base)))
5597 return true;
5598 }
5599
5600 if (sameResource(true_base, false_base))
5601 return true;
5602 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5603 Value *value = nullptr;
5604 bool ok = true;
5605 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5606 auto *base = GetBasePointer(phi->getIncomingValue(i));
5607 // Null values satisfy the constraint of selecting of selecting from the
5608 // same object.
5609 if (!value) {
5610 if (auto *cst = dyn_cast<Constant>(base)) {
5611 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5612 value = base;
5613 } else {
5614 value = base;
5615 }
5616 } else if (base != value) {
5617 if (auto *base_cst = dyn_cast<Constant>(base)) {
5618 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5619 continue;
5620 }
5621
5622 if (sameResource(value, base))
5623 continue;
5624
5625 // Values don't represent the same base.
5626 ok = false;
5627 }
5628 }
5629
5630 return ok;
5631 }
5632
5633 // Conservatively return false.
5634 return false;
5635}
alan-bakere9308012019-03-15 10:25:13 -04005636
5637bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5638 if (!Arg.getType()->isPointerTy() ||
5639 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5640 // Only SSBOs need to be annotated as coherent.
5641 return false;
5642 }
5643
5644 DenseSet<Value *> visited;
5645 std::vector<Value *> stack;
5646 for (auto *U : Arg.getParent()->users()) {
5647 if (auto *call = dyn_cast<CallInst>(U)) {
5648 stack.push_back(call->getOperand(Arg.getArgNo()));
5649 }
5650 }
5651
5652 while (!stack.empty()) {
5653 Value *v = stack.back();
5654 stack.pop_back();
5655
5656 if (!visited.insert(v).second)
5657 continue;
5658
5659 auto *resource_call = dyn_cast<CallInst>(v);
5660 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005661 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5662 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005663 // If this is a resource accessor function, check if the coherent operand
5664 // is set.
5665 const auto coherent =
5666 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5667 ->getZExtValue());
5668 if (coherent == 1)
5669 return true;
5670 } else if (auto *arg = dyn_cast<Argument>(v)) {
5671 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005672 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005673 if (auto *call = dyn_cast<CallInst>(U)) {
5674 stack.push_back(call->getOperand(arg->getArgNo()));
5675 }
5676 }
5677 } else if (auto *user = dyn_cast<User>(v)) {
5678 // If this is a user, traverse all operands that could lead to resource
5679 // variables.
5680 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5681 Value *operand = user->getOperand(i);
5682 if (operand->getType()->isPointerTy() &&
5683 operand->getType()->getPointerAddressSpace() ==
5684 clspv::AddressSpace::Global) {
5685 stack.push_back(operand);
5686 }
5687 }
5688 }
5689 }
5690
5691 // No coherent resource variables encountered.
5692 return false;
5693}
alan-baker06cad652019-12-03 17:56:47 -05005694
SJW77b87ad2020-04-21 14:37:52 -05005695void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005696 // First, track loop merges and continues.
5697 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005698 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005699 if (F.isDeclaration())
5700 continue;
5701
5702 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5703 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5704 std::deque<BasicBlock *> order;
5705 DenseSet<BasicBlock *> visited;
5706 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5707
5708 for (auto BB : order) {
5709 auto terminator = BB->getTerminator();
5710 auto branch = dyn_cast<BranchInst>(terminator);
5711 if (LI.isLoopHeader(BB)) {
5712 auto L = LI.getLoopFor(BB);
5713 BasicBlock *ContinueBB = nullptr;
5714 BasicBlock *MergeBB = nullptr;
5715
5716 MergeBB = L->getExitBlock();
5717 if (!MergeBB) {
5718 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5719 // has regions with single entry/exit. As a result, loop should not
5720 // have multiple exits.
5721 llvm_unreachable("Loop has multiple exits???");
5722 }
5723
5724 if (L->isLoopLatch(BB)) {
5725 ContinueBB = BB;
5726 } else {
5727 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5728 // block.
5729 BasicBlock *Header = L->getHeader();
5730 BasicBlock *Latch = L->getLoopLatch();
5731 for (auto *loop_block : L->blocks()) {
5732 if (loop_block == Header) {
5733 continue;
5734 }
5735
5736 // Check whether block dominates block with back-edge.
5737 // The loop latch is the single block with a back-edge. If it was
5738 // possible, StructurizeCFG made the loop conform to this
5739 // requirement, otherwise |Latch| is a nullptr.
5740 if (DT.dominates(loop_block, Latch)) {
5741 ContinueBB = loop_block;
5742 }
5743 }
5744
5745 if (!ContinueBB) {
5746 llvm_unreachable("Wrong continue block from loop");
5747 }
5748 }
5749
5750 // Record the continue and merge blocks.
5751 MergeBlocks[BB] = MergeBB;
5752 ContinueBlocks[BB] = ContinueBB;
5753 LoopMergesAndContinues.insert(MergeBB);
5754 LoopMergesAndContinues.insert(ContinueBB);
5755 } else if (branch && branch->isConditional()) {
5756 auto L = LI.getLoopFor(BB);
5757 bool HasBackedge = false;
5758 while (L && !HasBackedge) {
5759 if (L->isLoopLatch(BB)) {
5760 HasBackedge = true;
5761 }
5762 L = L->getParentLoop();
5763 }
5764
5765 if (!HasBackedge) {
5766 // Only need a merge if the branch doesn't include a loop break or
5767 // continue.
5768 auto true_bb = branch->getSuccessor(0);
5769 auto false_bb = branch->getSuccessor(1);
5770 if (!LoopMergesAndContinues.count(true_bb) &&
5771 !LoopMergesAndContinues.count(false_bb)) {
5772 // StructurizeCFG pass already manipulated CFG. Just use false block
5773 // of branch instruction as merge block.
5774 MergeBlocks[BB] = false_bb;
5775 }
5776 }
5777 }
5778 }
5779 }
5780}
alan-baker86ce19c2020-08-05 13:09:19 -04005781
5782SPIRVID SPIRVProducerPass::getReflectionImport() {
5783 if (!ReflectionID.isValid()) {
5784 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_non_semantic_info");
5785 ReflectionID = addSPIRVInst<kImports>(spv::OpExtInstImport,
5786 "NonSemantic.ClspvReflection.1");
5787 }
5788 return ReflectionID;
5789}
5790
5791void SPIRVProducerPass::GenerateReflection() {
5792 GenerateKernelReflection();
5793 GeneratePushConstantReflection();
5794 GenerateSpecConstantReflection();
5795}
5796
5797void SPIRVProducerPass::GeneratePushConstantReflection() {
5798 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
5799 auto const &DL = module->getDataLayout();
5800 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
5801 auto STy = cast<StructType>(GV->getValueType());
5802
5803 for (unsigned i = 0; i < STy->getNumElements(); i++) {
5804 auto pc = static_cast<clspv::PushConstant>(
5805 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
5806 if (pc == PushConstant::KernelArgument)
5807 continue;
5808
5809 auto memberType = STy->getElementType(i);
5810 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
Marco Antognini7e338402021-03-15 12:48:37 +00005811#ifndef NDEBUG
alan-baker86ce19c2020-08-05 13:09:19 -04005812 unsigned previousOffset = 0;
5813 if (i > 0) {
5814 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
5815 }
alan-baker86ce19c2020-08-05 13:09:19 -04005816 assert(isValidExplicitLayout(*module, STy, i,
5817 spv::StorageClassPushConstant, offset,
5818 previousOffset));
Marco Antognini7e338402021-03-15 12:48:37 +00005819#endif
alan-baker86ce19c2020-08-05 13:09:19 -04005820
5821 reflection::ExtInst pc_inst = reflection::ExtInstMax;
5822 switch (pc) {
5823 case PushConstant::GlobalOffset:
5824 pc_inst = reflection::ExtInstPushConstantGlobalOffset;
5825 break;
5826 case PushConstant::EnqueuedLocalSize:
5827 pc_inst = reflection::ExtInstPushConstantEnqueuedLocalSize;
5828 break;
5829 case PushConstant::GlobalSize:
5830 pc_inst = reflection::ExtInstPushConstantGlobalSize;
5831 break;
5832 case PushConstant::RegionOffset:
5833 pc_inst = reflection::ExtInstPushConstantRegionOffset;
5834 break;
5835 case PushConstant::NumWorkgroups:
5836 pc_inst = reflection::ExtInstPushConstantNumWorkgroups;
5837 break;
5838 case PushConstant::RegionGroupOffset:
5839 pc_inst = reflection::ExtInstPushConstantRegionGroupOffset;
5840 break;
5841 default:
5842 llvm_unreachable("Unhandled push constant");
5843 break;
5844 }
5845
5846 auto import_id = getReflectionImport();
Marco Antognini7e338402021-03-15 12:48:37 +00005847 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
alan-baker86ce19c2020-08-05 13:09:19 -04005848 SPIRVOperandVec Ops;
5849 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
5850 << pc_inst << getSPIRVInt32Constant(offset)
5851 << getSPIRVInt32Constant(size);
5852 addSPIRVInst(spv::OpExtInst, Ops);
5853 }
5854 }
5855}
5856
5857void SPIRVProducerPass::GenerateSpecConstantReflection() {
5858 const uint32_t kMax = std::numeric_limits<uint32_t>::max();
5859 uint32_t wgsize_id[3] = {kMax, kMax, kMax};
5860 uint32_t global_offset_id[3] = {kMax, kMax, kMax};
5861 uint32_t work_dim_id = kMax;
5862 for (auto pair : clspv::GetSpecConstants(module)) {
5863 auto kind = pair.first;
5864 auto id = pair.second;
5865
5866 // Local memory size is only used for kernel arguments.
5867 if (kind == SpecConstant::kLocalMemorySize)
5868 continue;
5869
5870 switch (kind) {
5871 case SpecConstant::kWorkgroupSizeX:
5872 wgsize_id[0] = id;
5873 break;
5874 case SpecConstant::kWorkgroupSizeY:
5875 wgsize_id[1] = id;
5876 break;
5877 case SpecConstant::kWorkgroupSizeZ:
5878 wgsize_id[2] = id;
5879 break;
5880 case SpecConstant::kGlobalOffsetX:
5881 global_offset_id[0] = id;
5882 break;
5883 case SpecConstant::kGlobalOffsetY:
5884 global_offset_id[1] = id;
5885 break;
5886 case SpecConstant::kGlobalOffsetZ:
5887 global_offset_id[2] = id;
5888 break;
5889 case SpecConstant::kWorkDim:
5890 work_dim_id = id;
5891 break;
5892 default:
5893 llvm_unreachable("Unhandled spec constant");
5894 }
5895 }
5896
5897 auto import_id = getReflectionImport();
5898 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5899 SPIRVOperandVec Ops;
5900 if (wgsize_id[0] != kMax) {
5901 assert(wgsize_id[1] != kMax);
5902 assert(wgsize_id[2] != kMax);
5903 Ops.clear();
5904 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkgroupSize
5905 << getSPIRVInt32Constant(wgsize_id[0])
5906 << getSPIRVInt32Constant(wgsize_id[1])
5907 << getSPIRVInt32Constant(wgsize_id[2]);
5908 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5909 }
5910 if (global_offset_id[0] != kMax) {
5911 assert(global_offset_id[1] != kMax);
5912 assert(global_offset_id[2] != kMax);
5913 Ops.clear();
5914 Ops << void_id << import_id << reflection::ExtInstSpecConstantGlobalOffset
5915 << getSPIRVInt32Constant(global_offset_id[0])
5916 << getSPIRVInt32Constant(global_offset_id[1])
5917 << getSPIRVInt32Constant(global_offset_id[2]);
5918 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5919 }
5920 if (work_dim_id != kMax) {
5921 Ops.clear();
5922 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkDim
5923 << getSPIRVInt32Constant(work_dim_id);
5924 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5925 }
5926}
5927
5928void SPIRVProducerPass::GenerateKernelReflection() {
5929 const auto &DL = module->getDataLayout();
5930 auto import_id = getReflectionImport();
5931 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5932
5933 for (auto &F : *module) {
5934 if (F.isDeclaration() || F.getCallingConv() != CallingConv::SPIR_KERNEL) {
5935 continue;
5936 }
5937
5938 // OpString for the kernel name.
5939 auto kernel_name =
5940 addSPIRVInst<kDebug>(spv::OpString, F.getName().str().c_str());
5941
5942 // Kernel declaration
5943 // Ops[0] = void type
5944 // Ops[1] = reflection ext import
5945 // Ops[2] = function id
5946 // Ops[3] = kernel name
5947 SPIRVOperandVec Ops;
5948 Ops << void_id << import_id << reflection::ExtInstKernel << ValueMap[&F]
5949 << kernel_name;
5950 auto kernel_decl = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5951
5952 // Generate the required workgroup size property if it was specified.
5953 if (const MDNode *MD = F.getMetadata("reqd_work_group_size")) {
5954 uint32_t CurXDimCst = static_cast<uint32_t>(
5955 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
5956 uint32_t CurYDimCst = static_cast<uint32_t>(
5957 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
5958 uint32_t CurZDimCst = static_cast<uint32_t>(
5959 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
5960
5961 Ops.clear();
5962 Ops << void_id << import_id
5963 << reflection::ExtInstPropertyRequiredWorkgroupSize << kernel_decl
5964 << getSPIRVInt32Constant(CurXDimCst)
5965 << getSPIRVInt32Constant(CurYDimCst)
5966 << getSPIRVInt32Constant(CurZDimCst);
5967 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5968 }
5969
5970 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
5971 auto *func_ty = F.getFunctionType();
5972
5973 // If we've clustered POD arguments, then argument details are in metadata.
5974 // If an argument maps to a resource variable, then get descriptor set and
5975 // binding from the resource variable. Other info comes from the metadata.
5976 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
5977 auto local_spec_id_md =
5978 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
5979 if (arg_map) {
5980 for (const auto &arg : arg_map->operands()) {
5981 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
5982 assert(arg_node->getNumOperands() == 6);
5983 const auto name =
5984 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
5985 const auto old_index =
5986 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
5987 // Remapped argument index
5988 const int new_index = static_cast<int>(
5989 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
5990 const auto offset =
5991 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
5992 const auto size =
5993 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
5994 const auto argKind = clspv::GetArgKindFromName(
5995 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
5996
5997 // If this is a local memory argument, find the right spec id for this
5998 // argument.
5999 int64_t spec_id = -1;
6000 if (argKind == clspv::ArgKind::Local) {
6001 for (auto spec_id_arg : local_spec_id_md->operands()) {
6002 if ((&F == dyn_cast<Function>(
6003 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
6004 ->getValue())) &&
6005 (static_cast<uint64_t>(new_index) ==
6006 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
6007 ->getZExtValue())) {
6008 spec_id =
6009 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
6010 ->getSExtValue();
6011 break;
6012 }
6013 }
6014 }
6015
6016 // Generate the specific argument instruction.
6017 const uint32_t ordinal = static_cast<uint32_t>(old_index);
6018 const uint32_t arg_offset = static_cast<uint32_t>(offset);
6019 const uint32_t arg_size = static_cast<uint32_t>(size);
6020 uint32_t elem_size = 0;
6021 uint32_t descriptor_set = 0;
6022 uint32_t binding = 0;
6023 if (spec_id > 0) {
6024 elem_size = static_cast<uint32_t>(
6025 GetTypeAllocSize(func_ty->getParamType(unsigned(new_index))
6026 ->getPointerElementType(),
6027 DL));
6028 } else if (new_index >= 0) {
6029 auto *info = resource_var_at_index[new_index];
6030 assert(info);
6031 descriptor_set = info->descriptor_set;
6032 binding = info->binding;
6033 }
6034 AddArgumentReflection(kernel_decl, name.str(), argKind, ordinal,
6035 descriptor_set, binding, arg_offset, arg_size,
6036 static_cast<uint32_t>(spec_id), elem_size);
6037 }
6038 } else {
6039 // There is no argument map.
6040 // Take descriptor info from the resource variable calls.
6041 // Take argument name and size from the arguments list.
6042
6043 SmallVector<Argument *, 4> arguments;
6044 for (auto &arg : F.args()) {
6045 arguments.push_back(&arg);
6046 }
6047
6048 unsigned arg_index = 0;
6049 for (auto *info : resource_var_at_index) {
6050 if (info) {
6051 auto arg = arguments[arg_index];
6052 unsigned arg_size = 0;
6053 if (info->arg_kind == clspv::ArgKind::Pod ||
6054 info->arg_kind == clspv::ArgKind::PodUBO ||
6055 info->arg_kind == clspv::ArgKind::PodPushConstant) {
6056 arg_size =
6057 static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
6058 }
6059
6060 // Local pointer arguments are unused in this case.
6061 // offset, spec_id and elem_size always 0.
6062 AddArgumentReflection(kernel_decl, arg->getName().str(),
6063 info->arg_kind, arg_index, info->descriptor_set,
6064 info->binding, 0, arg_size, 0, 0);
6065 }
6066 arg_index++;
6067 }
6068 // Generate mappings for pointer-to-local arguments.
6069 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
6070 Argument *arg = arguments[arg_index];
6071 auto where = LocalArgSpecIds.find(arg);
6072 if (where != LocalArgSpecIds.end()) {
6073 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
6074
6075 // descriptor_set, binding, offset and size are always 0.
6076 AddArgumentReflection(kernel_decl, arg->getName().str(),
6077 ArgKind::Local, arg_index, 0, 0, 0, 0,
6078 static_cast<uint32_t>(local_arg_info.spec_id),
6079 static_cast<uint32_t>(GetTypeAllocSize(
6080 local_arg_info.elem_type, DL)));
6081 }
6082 }
6083 }
6084 }
6085}
6086
6087void SPIRVProducerPass::AddArgumentReflection(
6088 SPIRVID kernel_decl, const std::string &name, clspv::ArgKind arg_kind,
6089 uint32_t ordinal, uint32_t descriptor_set, uint32_t binding,
6090 uint32_t offset, uint32_t size, uint32_t spec_id, uint32_t elem_size) {
6091 // Generate ArgumentInfo for this argument.
6092 // TODO: generate remaining optional operands.
6093 auto import_id = getReflectionImport();
6094 auto arg_name = addSPIRVInst<kDebug>(spv::OpString, name.c_str());
6095 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
6096 SPIRVOperandVec Ops;
6097 Ops << void_id << import_id << reflection::ExtInstArgumentInfo << arg_name;
6098 auto arg_info = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6099
6100 Ops.clear();
6101 Ops << void_id << import_id;
6102 reflection::ExtInst ext_inst = reflection::ExtInstMax;
6103 // Determine the extended instruction.
6104 switch (arg_kind) {
6105 case clspv::ArgKind::Buffer:
6106 ext_inst = reflection::ExtInstArgumentStorageBuffer;
6107 break;
6108 case clspv::ArgKind::BufferUBO:
6109 ext_inst = reflection::ExtInstArgumentUniform;
6110 break;
6111 case clspv::ArgKind::Local:
6112 ext_inst = reflection::ExtInstArgumentWorkgroup;
6113 break;
6114 case clspv::ArgKind::Pod:
6115 ext_inst = reflection::ExtInstArgumentPodStorageBuffer;
6116 break;
6117 case clspv::ArgKind::PodUBO:
6118 ext_inst = reflection::ExtInstArgumentPodUniform;
6119 break;
6120 case clspv::ArgKind::PodPushConstant:
6121 ext_inst = reflection::ExtInstArgumentPodPushConstant;
6122 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006123 case clspv::ArgKind::SampledImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006124 ext_inst = reflection::ExtInstArgumentSampledImage;
6125 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006126 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006127 ext_inst = reflection::ExtInstArgumentStorageImage;
6128 break;
6129 case clspv::ArgKind::Sampler:
6130 ext_inst = reflection::ExtInstArgumentSampler;
6131 break;
6132 default:
6133 llvm_unreachable("Unhandled argument reflection");
6134 break;
6135 }
6136 Ops << ext_inst << kernel_decl << getSPIRVInt32Constant(ordinal);
6137
6138 // Add descriptor set and binding for applicable arguments.
6139 switch (arg_kind) {
6140 case clspv::ArgKind::Buffer:
6141 case clspv::ArgKind::BufferUBO:
6142 case clspv::ArgKind::Pod:
6143 case clspv::ArgKind::PodUBO:
alan-bakerf6bc8252020-09-23 14:58:55 -04006144 case clspv::ArgKind::SampledImage:
6145 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006146 case clspv::ArgKind::Sampler:
6147 Ops << getSPIRVInt32Constant(descriptor_set)
6148 << getSPIRVInt32Constant(binding);
6149 break;
6150 default:
6151 break;
6152 }
6153
6154 // Add remaining operands for arguments.
6155 switch (arg_kind) {
6156 case clspv::ArgKind::Local:
6157 Ops << getSPIRVInt32Constant(spec_id) << getSPIRVInt32Constant(elem_size);
6158 break;
6159 case clspv::ArgKind::Pod:
6160 case clspv::ArgKind::PodUBO:
6161 case clspv::ArgKind::PodPushConstant:
6162 Ops << getSPIRVInt32Constant(offset) << getSPIRVInt32Constant(size);
6163 break;
6164 default:
6165 break;
6166 }
6167 Ops << arg_info;
6168 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6169}