blob: dff843e9830245ac136d2668a61b37953e6fe251 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
alan-baker5f2e88e2020-12-07 15:24:04 -050037#include "llvm/IR/Intrinsics.h"
David Neto118188e2018-08-24 11:27:54 -040038#include "llvm/IR/Metadata.h"
39#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050040#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040041#include "llvm/Pass.h"
42#include "llvm/Support/CommandLine.h"
alan-baker3f772c02021-06-15 22:18:11 -040043#include "llvm/Support/FileSystem.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000044#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040045#include "llvm/Support/raw_ostream.h"
46#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040047
SJWf93f5f32020-05-05 07:27:56 -050048// enable spv::HasResultAndType
49#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040050#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040051
David Neto85082642018-03-24 06:55:20 -070052#include "clspv/AddressSpace.h"
David Neto118188e2018-08-24 11:27:54 -040053#include "clspv/Option.h"
alan-baker86ce19c2020-08-05 13:09:19 -040054#include "clspv/PushConstant.h"
55#include "clspv/SpecConstant.h"
David Neto85082642018-03-24 06:55:20 -070056#include "clspv/spirv_c_strings.hpp"
57#include "clspv/spirv_glsl.hpp"
alan-baker86ce19c2020-08-05 13:09:19 -040058#include "clspv/spirv_reflection.hpp"
David Neto22f144c2017-06-12 14:26:21 -040059
David Neto4feb7a42017-10-06 17:29:42 -040060#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050061#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050062#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070063#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040064#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040065#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040066#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040067#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040068#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040069#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050070#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040071
David Neto22f144c2017-06-12 14:26:21 -040072#if defined(_MSC_VER)
73#pragma warning(pop)
74#endif
75
76using namespace llvm;
77using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050078using namespace clspv::Builtins;
SJW806a5d82020-07-15 12:51:38 -050079using namespace clspv::Option;
David Neto156783e2017-07-05 15:39:41 -040080using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040081
82namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040083
alan-baker3f772c02021-06-15 22:18:11 -040084cl::opt<std::string> TestOutFile("producer-out-file", cl::init("test.spv"),
85 cl::ReallyHidden,
86 cl::desc("SPIRVProducer testing output file"));
87
David Neto862b7d82018-06-14 18:48:37 -040088cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
89 cl::desc("Show resource variable creation"));
90
alan-baker5ed87542020-03-23 11:05:22 -040091cl::opt<bool>
92 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
93 cl::desc("Dump the IR at the start of SPIRVProducer"));
94
David Neto862b7d82018-06-14 18:48:37 -040095// These hacks exist to help transition code generation algorithms
96// without making huge noise in detailed test output.
97const bool Hack_generate_runtime_array_stride_early = true;
98
David Neto3fbb4072017-10-16 11:28:14 -040099// The value of 1/pi. This value is from MSDN
100// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
101const double kOneOverPi = 0.318309886183790671538;
102const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
103
alan-baker86ce19c2020-08-05 13:09:19 -0400104// SPIRV Module Sections (per 2.4 of the SPIR-V spec)
SJW69939d52020-04-16 07:29:07 -0500105// These are used to collect SPIRVInstructions by type on-the-fly.
106enum SPIRVSection {
107 kCapabilities,
108 kExtensions,
109 kImports,
110 kMemoryModel,
111 kEntryPoints,
112 kExecutionModes,
113
114 kDebug,
115 kAnnotations,
116
117 kTypes,
118 kConstants = kTypes,
119 kGlobalVariables,
120
121 kFunctions,
122
alan-baker86ce19c2020-08-05 13:09:19 -0400123 // This is not a section of the SPIR-V spec and should always immediately
124 // precede kSectionCount. It is a convenient place for the embedded
125 // reflection data.
126 kReflection,
SJW69939d52020-04-16 07:29:07 -0500127 kSectionCount
128};
129
SJW01901d92020-05-21 08:58:31 -0500130class SPIRVID {
131 uint32_t id;
132
133public:
134 SPIRVID(uint32_t _id = 0) : id(_id) {}
135 uint32_t get() const { return id; }
136 bool isValid() const { return id != 0; }
137 bool operator==(const SPIRVID &that) const { return id == that.id; }
SJW806a5d82020-07-15 12:51:38 -0500138 bool operator<(const SPIRVID &that) const { return id < that.id; }
SJW01901d92020-05-21 08:58:31 -0500139};
SJWf93f5f32020-05-05 07:27:56 -0500140
SJW88ed5fe2020-05-11 12:40:57 -0500141enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400142
143struct SPIRVOperand {
alan-baker3f772c02021-06-15 22:18:11 -0400144 SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
SJW88ed5fe2020-05-11 12:40:57 -0500145 LiteralNum[0] = Num;
146 }
alan-baker3f772c02021-06-15 22:18:11 -0400147 SPIRVOperand(SPIRVOperandType Ty, const char *Str)
David Neto22f144c2017-06-12 14:26:21 -0400148 : Type(Ty), LiteralStr(Str) {}
alan-baker3f772c02021-06-15 22:18:11 -0400149 SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
David Neto22f144c2017-06-12 14:26:21 -0400150 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500151 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
152 auto sz = NumVec.size();
153 assert(sz >= 1 && sz <= 2);
154 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
155 LiteralNum[0] = NumVec[0];
156 if (sz == 2) {
157 LiteralNum[1] = NumVec[1];
158 }
159 }
David Neto22f144c2017-06-12 14:26:21 -0400160
alan-baker7506abb2020-09-10 15:02:55 -0400161 SPIRVOperandType getType() const { return Type; }
162 uint32_t getNumID() const { return LiteralNum[0]; }
163 std::string getLiteralStr() const { return LiteralStr; }
164 const uint32_t *getLiteralNum() const { return LiteralNum; }
David Neto22f144c2017-06-12 14:26:21 -0400165
David Neto87846742018-04-11 17:36:22 -0400166 uint32_t GetNumWords() const {
167 switch (Type) {
168 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500169 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400170 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500171 case LITERAL_DWORD:
172 return 2;
David Neto87846742018-04-11 17:36:22 -0400173 case LITERAL_STRING:
174 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400175 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400176 }
177 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
178 }
179
David Neto22f144c2017-06-12 14:26:21 -0400180private:
181 SPIRVOperandType Type;
182 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500183 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400184};
185
SJW88ed5fe2020-05-11 12:40:57 -0500186typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400187
David Neto22f144c2017-06-12 14:26:21 -0400188struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500189 // Primary constructor must have Opcode, initializes WordCount based on ResID.
190 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
191 : Opcode(static_cast<uint16_t>(Opc)) {
192 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400193 }
David Neto22f144c2017-06-12 14:26:21 -0400194
SJWf93f5f32020-05-05 07:27:56 -0500195 // Creates an instruction with an opcode and no result ID, and with the given
196 // operands. This calls primary constructor to initialize Opcode, WordCount.
197 // Takes ownership of the operands and clears |Ops|.
198 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
199 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500200 }
SJWf93f5f32020-05-05 07:27:56 -0500201 // Creates an instruction with an opcode and no result ID, and with the given
202 // operands. This calls primary constructor to initialize Opcode, WordCount.
203 // Takes ownership of the operands and clears |Ops|.
204 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
205 : SPIRVInstruction(Opc, ResID) {
206 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500207 }
David Netoef5ba2b2019-12-20 08:35:54 -0500208
David Netoee2660d2018-06-28 16:31:29 -0400209 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400210 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500211 SPIRVID getResultID() const { return ResultID; }
212 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400213
214private:
SJW01901d92020-05-21 08:58:31 -0500215 void setResult(SPIRVID ResID = 0) {
216 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500217 ResultID = ResID;
218 }
219
220 void setOperands(SPIRVOperandVec &Ops) {
221 assert(Operands.empty());
222 Operands = std::move(Ops);
223 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500224 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500225 }
226 }
227
228private:
David Netoee2660d2018-06-28 16:31:29 -0400229 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400230 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500231 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500232 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400233};
234
235struct SPIRVProducerPass final : public ModulePass {
alan-baker3f772c02021-06-15 22:18:11 -0400236 static char ID;
237
SJW01901d92020-05-21 08:58:31 -0500238 typedef DenseMap<Type *, SPIRVID> TypeMapType;
alan-baker3f772c02021-06-15 22:18:11 -0400239 typedef DenseMap<Type *, SmallVector<SPIRVID, 2>> LayoutTypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400240 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500241 typedef DenseMap<Value *, SPIRVID> ValueMapType;
SJW806a5d82020-07-15 12:51:38 -0500242 typedef std::list<SPIRVID> SPIRVIDListType;
SJW01901d92020-05-21 08:58:31 -0500243 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
244 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500245 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
SJW806a5d82020-07-15 12:51:38 -0500246 typedef std::map<spv::BuiltIn, SPIRVID> BuiltinConstantMapType;
SJW88ed5fe2020-05-11 12:40:57 -0500247 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400248 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500249 // - the SPIR-V instruction placeholder that will be replaced
250 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400251 DeferredInstVecType;
252 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
253 GlobalConstFuncMapType;
254
alan-baker3f772c02021-06-15 22:18:11 -0400255 SPIRVProducerPass(
256 raw_pwrite_stream *out,
257 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap,
David Neto44795152017-07-13 15:45:28 -0400258 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500259 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
alan-baker3f772c02021-06-15 22:18:11 -0400260 binaryTempOut(binaryTempUnderlyingVector), binaryOut(out),
David Neto0676e6f2017-07-11 18:47:44 -0400261 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500262 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
263 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
alan-baker3f772c02021-06-15 22:18:11 -0400264 WorkgroupSizeVarID(0), TestOutput(false) {
265 addCapability(spv::CapabilityShader);
266 Ptr = this;
267 }
268
269 SPIRVProducerPass()
270 : ModulePass(ID), module(nullptr), samplerMap(nullptr), out(nullptr),
271 binaryTempOut(binaryTempUnderlyingVector), binaryOut(nullptr),
272 outputCInitList(false), patchBoundOffset(0), nextID(1),
273 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
274 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
275 WorkgroupSizeVarID(0), TestOutput(true) {
SJW01901d92020-05-21 08:58:31 -0500276 addCapability(spv::CapabilityShader);
277 Ptr = this;
278 }
David Neto22f144c2017-06-12 14:26:21 -0400279
James Price11010dc2019-12-19 13:53:09 -0500280 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500281 }
282
David Neto22f144c2017-06-12 14:26:21 -0400283 void getAnalysisUsage(AnalysisUsage &AU) const override {
284 AU.addRequired<DominatorTreeWrapperPass>();
285 AU.addRequired<LoopInfoWrapperPass>();
286 }
287
288 virtual bool runOnModule(Module &module) override;
289
290 // output the SPIR-V header block
291 void outputHeader();
292
293 // patch the SPIR-V header block
294 void patchHeader();
295
SJW01901d92020-05-21 08:58:31 -0500296 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400297 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
David Neto22f144c2017-06-12 14:26:21 -0400298 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500299 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
300 return SPIRVSections[Section];
301 };
alan-baker7506abb2020-09-10 15:02:55 -0400302 EntryPointVecType &getEntryPointVec() { return EntryPointVec; }
303 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; }
SJW806a5d82020-07-15 12:51:38 -0500304 SPIRVIDListType &getEntryPointInterfacesList() {
305 return EntryPointInterfacesList;
alan-baker7506abb2020-09-10 15:02:55 -0400306 }
SJW01901d92020-05-21 08:58:31 -0500307 SPIRVID getOpExtInstImportID();
alan-baker7506abb2020-09-10 15:02:55 -0400308 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; }
SJW2c317da2020-03-23 07:39:13 -0500309
alan-baker5b86ed72019-02-15 08:26:50 -0500310 bool hasVariablePointersStorageBuffer() {
311 return HasVariablePointersStorageBuffer;
312 }
SJW01901d92020-05-21 08:58:31 -0500313 void setVariablePointersStorageBuffer() {
314 if (!HasVariablePointersStorageBuffer) {
315 addCapability(spv::CapabilityVariablePointersStorageBuffer);
316 HasVariablePointersStorageBuffer = true;
317 }
alan-baker5b86ed72019-02-15 08:26:50 -0500318 }
alan-baker7506abb2020-09-10 15:02:55 -0400319 bool hasVariablePointers() { return HasVariablePointers; }
SJW01901d92020-05-21 08:58:31 -0500320 void setVariablePointers() {
321 if (!HasVariablePointers) {
322 addCapability(spv::CapabilityVariablePointers);
323 HasVariablePointers = true;
324 }
alan-baker7506abb2020-09-10 15:02:55 -0400325 }
alan-baker3f772c02021-06-15 22:18:11 -0400326 SmallVectorImpl<std::pair<unsigned, std::string>> *getSamplerMap() {
alan-bakerb6b09dc2018-11-08 16:59:28 -0500327 return samplerMap;
328 }
David Neto22f144c2017-06-12 14:26:21 -0400329 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
330 return GlobalConstFuncTypeMap;
331 }
332 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
333 return GlobalConstArgumentSet;
334 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500335 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400336
SJW77b87ad2020-04-21 14:37:52 -0500337 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500338 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
339 // *not* be converted to a storage buffer, replace each such global variable
340 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500341 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400342 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
343 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500344 void FindResourceVars();
SJW77b87ad2020-04-21 14:37:52 -0500345 void FindTypesForSamplerMap();
346 void FindTypesForResourceVars();
SJWf93f5f32020-05-05 07:27:56 -0500347
alan-bakerc3fd07f2020-10-22 09:48:49 -0400348 // Returns the canonical type of |type|.
349 //
350 // By default, clspv maps both __constant and __global address space pointers
351 // to StorageBuffer storage class. In order to prevent duplicate types from
352 // being generated, clspv uses the canonical type as a representative.
353 Type *CanonicalType(Type *type);
354
SJWf93f5f32020-05-05 07:27:56 -0500355 // Lookup or create Types, Constants.
356 // Returns SPIRVID once it has been created.
alan-baker3f772c02021-06-15 22:18:11 -0400357 SPIRVID getSPIRVType(Type *Ty, bool needs_layout);
SJWf93f5f32020-05-05 07:27:56 -0500358 SPIRVID getSPIRVType(Type *Ty);
359 SPIRVID getSPIRVConstant(Constant *Cst);
SJW806a5d82020-07-15 12:51:38 -0500360 SPIRVID getSPIRVInt32Constant(uint32_t CstVal);
SJWf93f5f32020-05-05 07:27:56 -0500361 // Lookup SPIRVID of llvm::Value, may create Constant.
362 SPIRVID getSPIRVValue(Value *V);
363
alan-baker3f772c02021-06-15 22:18:11 -0400364 bool PointerRequiresLayout(unsigned aspace);
365
SJW806a5d82020-07-15 12:51:38 -0500366 SPIRVID getSPIRVBuiltin(spv::BuiltIn BID, spv::Capability Cap);
367
SJW77b87ad2020-04-21 14:37:52 -0500368 void GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400369 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500370 void GenerateWorkgroupVars();
alan-baker86ce19c2020-08-05 13:09:19 -0400371 // Generate reflection instructions for resource variables associated with
David Neto862b7d82018-06-14 18:48:37 -0400372 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500373 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400374 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500375 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400376 void GenerateFuncPrologue(Function &F);
377 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400378 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400379 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
380 spv::Op GetSPIRVCastOpcode(Instruction &I);
381 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
SJW806a5d82020-07-15 12:51:38 -0500382 SPIRVID GenerateClspvInstruction(CallInst *Call,
383 const FunctionInfo &FuncInfo);
384 SPIRVID GenerateImageInstruction(CallInst *Call,
385 const FunctionInfo &FuncInfo);
386 SPIRVID GenerateSubgroupInstruction(CallInst *Call,
387 const FunctionInfo &FuncInfo);
388 SPIRVID GenerateInstructionFromCall(CallInst *Call);
David Neto22f144c2017-06-12 14:26:21 -0400389 void GenerateInstruction(Instruction &I);
390 void GenerateFuncEpilogue();
391 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500392 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400393 bool is4xi8vec(Type *Ty) const;
394 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400395 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400396 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400397 // Returns the GLSL extended instruction enum that the given function
398 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500399 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400400 // Returns the GLSL extended instruction enum indirectly used by the given
401 // function. That is, to implement the given function, we use an extended
402 // instruction plus one more instruction. If none, then returns the 0 value,
403 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500404 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400405 // Returns the single GLSL extended instruction used directly or
406 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500407 glsl::ExtInst
408 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400409 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500410 void WriteResultID(const SPIRVInstruction &Inst);
411 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
412 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400413 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500414 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400415
Alan Baker9bf93fb2018-08-28 16:59:26 -0400416 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500417 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400418
Alan Bakerfcda9482018-10-02 17:09:59 -0400419 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500420 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400421
alan-baker06cad652019-12-03 17:56:47 -0500422 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500423 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500424
Alan Bakerfcda9482018-10-02 17:09:59 -0400425 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
426 // uses the internal map, otherwise it falls back on the data layout.
427 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400428 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000429 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
430 unsigned member,
431 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400432
alan-baker5b86ed72019-02-15 08:26:50 -0500433 // Returns the base pointer of |v|.
434 Value *GetBasePointer(Value *v);
435
SJW01901d92020-05-21 08:58:31 -0500436 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
437 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
438
alan-baker5b86ed72019-02-15 08:26:50 -0500439 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
440 // |address_space|.
441 void setVariablePointersCapabilities(unsigned address_space);
442
443 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
444 // variable.
445 bool sameResource(Value *lhs, Value *rhs) const;
446
447 // Returns true if |inst| is phi or select that selects from the same
448 // structure (or null).
449 bool selectFromSameObject(Instruction *inst);
450
alan-bakere9308012019-03-15 10:25:13 -0400451 // Returns true if |Arg| is called with a coherent resource.
452 bool CalledWithCoherentResource(Argument &Arg);
453
SJWf93f5f32020-05-05 07:27:56 -0500454 //
455 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
456 template <enum SPIRVSection TSection = kFunctions>
457 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
458 bool has_result, has_result_type;
459 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
460 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500461 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500462 return RID;
463 }
464 template <enum SPIRVSection TSection = kFunctions>
465 SPIRVID addSPIRVInst(spv::Op Op) {
466 SPIRVOperandVec Ops;
467 return addSPIRVInst<TSection>(Op, Ops);
468 }
469 template <enum SPIRVSection TSection = kFunctions>
470 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
471 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500472 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500473 return addSPIRVInst<TSection>(Op, Ops);
474 }
475 template <enum SPIRVSection TSection = kFunctions>
476 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
477 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500478 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500479 return addSPIRVInst<TSection>(Op, Ops);
480 }
481
SJW88ed5fe2020-05-11 12:40:57 -0500482 //
483 // Add placeholder for llvm::Value that references future values.
484 // Must have result ID just in case final SPIRVInstruction requires.
485 SPIRVID addSPIRVPlaceholder(Value *I) {
486 SPIRVID RID = incrNextID();
487 SPIRVOperandVec Ops;
488 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
489 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
490 return RID;
491 }
492 // Replace placeholder with actual SPIRVInstruction on the final pass
493 // (HandleDeferredInstruction).
494 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
495 SPIRVOperandVec &Operands) {
496 bool has_result, has_result_type;
497 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
498 SPIRVID RID = has_result ? I->getResultID() : 0;
499 *I = SPIRVInstruction(Opcode, RID, Operands);
500 return RID;
501 }
502
SJW806a5d82020-07-15 12:51:38 -0500503 //
504 // Add global variable and capture entry point interface
505 SPIRVID addSPIRVGlobalVariable(const SPIRVID &TypeID, spv::StorageClass SC,
alan-baker3f772c02021-06-15 22:18:11 -0400506 const SPIRVID &InitID = SPIRVID(),
507 bool add_interface = false);
SJW806a5d82020-07-15 12:51:38 -0500508
alan-baker86ce19c2020-08-05 13:09:19 -0400509 SPIRVID getReflectionImport();
510 void GenerateReflection();
511 void GenerateKernelReflection();
512 void GeneratePushConstantReflection();
513 void GenerateSpecConstantReflection();
514 void AddArgumentReflection(SPIRVID kernel_decl, const std::string &name,
515 clspv::ArgKind arg_kind, uint32_t ordinal,
516 uint32_t descriptor_set, uint32_t binding,
517 uint32_t offset, uint32_t size, uint32_t spec_id,
518 uint32_t elem_size);
519
David Neto22f144c2017-06-12 14:26:21 -0400520private:
SJW77b87ad2020-04-21 14:37:52 -0500521
522 Module *module;
523
SJW01901d92020-05-21 08:58:31 -0500524 // Set of Capabilities required
525 CapabilitySetType CapabilitySet;
526
SJW806a5d82020-07-15 12:51:38 -0500527 // Map from clspv::BuiltinType to SPIRV Global Variable
528 BuiltinConstantMapType BuiltinConstantMap;
529
alan-baker3f772c02021-06-15 22:18:11 -0400530 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap;
531 raw_pwrite_stream *out;
David Neto0676e6f2017-07-11 18:47:44 -0400532
533 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
534 // convert to other formats on demand?
535
536 // When emitting a C initialization list, the WriteSPIRVBinary method
537 // will actually write its words to this vector via binaryTempOut.
538 SmallVector<char, 100> binaryTempUnderlyingVector;
539 raw_svector_ostream binaryTempOut;
540
541 // Binary output writes to this stream, which might be |out| or
542 // |binaryTempOut|. It's the latter when we really want to write a C
543 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400544 raw_pwrite_stream *binaryOut;
David Neto0676e6f2017-07-11 18:47:44 -0400545 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400546 uint64_t patchBoundOffset;
547 uint32_t nextID;
548
SJWf93f5f32020-05-05 07:27:56 -0500549 SPIRVID incrNextID() { return nextID++; }
550
alan-bakerf67468c2019-11-25 15:51:49 -0500551 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500552 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500553 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500554 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500555
David Neto19a1bad2017-08-25 15:01:41 -0400556 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
alan-baker3f772c02021-06-15 22:18:11 -0400557 LayoutTypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400558 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400559 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400560 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400561 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400562 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400563 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500564 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400565
David Neto22f144c2017-06-12 14:26:21 -0400566 EntryPointVecType EntryPointVec;
567 DeferredInstVecType DeferredInstVec;
SJW806a5d82020-07-15 12:51:38 -0500568 SPIRVIDListType EntryPointInterfacesList;
SJW01901d92020-05-21 08:58:31 -0500569 SPIRVID OpExtInstImportID;
570 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500571 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400572 bool HasVariablePointers;
573 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500574 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700575
576 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700577 // will map F's type to (G, index of the parameter), where in a first phase
Marco Antognini7e338402021-03-15 12:48:37 +0000578 // G is F's type.
David Netoc77d9e22018-03-24 06:30:28 -0700579 // TODO(dneto): This doesn't seem general enough? A function might have
580 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400581 GlobalConstFuncMapType GlobalConstFuncTypeMap;
582 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400583 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700584 // or array types, and which point into transparent memory (StorageBuffer
585 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400586 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700587 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400588
589 // This is truly ugly, but works around what look like driver bugs.
590 // For get_local_size, an earlier part of the flow has created a module-scope
591 // variable in Private address space to hold the value for the workgroup
592 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
593 // When this is present, save the IDs of the initializer value and variable
594 // in these two variables. We only ever do a vector load from it, and
595 // when we see one of those, substitute just the value of the intializer.
596 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700597 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500598 SPIRVID WorkgroupSizeValueID;
599 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400600
alan-baker3f772c02021-06-15 22:18:11 -0400601 bool TestOutput;
602
David Neto862b7d82018-06-14 18:48:37 -0400603 // Bookkeeping for mapping kernel arguments to resource variables.
604 struct ResourceVarInfo {
605 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400606 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400607 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400608 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400609 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
610 const int index; // Index into ResourceVarInfoList
611 const unsigned descriptor_set;
612 const unsigned binding;
613 Function *const var_fn; // The @clspv.resource.var.* function.
614 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400615 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400616 const unsigned addr_space; // The LLVM address space
617 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500618 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400619 };
620 // A list of resource var info. Each one correponds to a module-scope
621 // resource variable we will have to create. Resource var indices are
622 // indices into this vector.
623 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
624 // This is a vector of pointers of all the resource vars, but ordered by
625 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500626 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400627 // Map a function to the ordered list of resource variables it uses, one for
628 // each argument. If an argument does not use a resource variable, it
629 // will have a null pointer entry.
630 using FunctionToResourceVarsMapType =
631 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
632 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
Kévin Petit85bcee02021-08-03 18:21:30 +0100633 // Map of functions and the literal samplers they use. Built during sampler
634 // generation and used to create entry point interfaces.
635 DenseMap<Function *, SmallVector<SPIRVID, 8>> FunctionToLiteralSamplersMap;
David Neto862b7d82018-06-14 18:48:37 -0400636
637 // What LLVM types map to SPIR-V types needing layout? These are the
638 // arrays and structures supporting storage buffers and uniform buffers.
639 TypeList TypesNeedingLayout;
640 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
641 UniqueVector<StructType *> StructTypesNeedingBlock;
642 // For a call that represents a load from an opaque type (samplers, images),
643 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500644 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700645
David Netoc6f3ab22018-04-06 18:02:31 -0400646 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500647 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400648 LocalArgList LocalArgs;
649 // Information about a pointer-to-local argument.
650 struct LocalArgInfo {
651 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500652 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400653 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500654 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400655 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500656 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400657 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500658 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400659 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500660 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400661 // The specialization constant ID of the array size.
662 int spec_id;
663 };
Alan Baker202c8c72018-08-13 13:47:44 -0400664 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500665 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400666 // A mapping from SpecId to its LocalArgInfo.
667 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400668 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500669 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400670 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500671 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
672 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500673
674 // Maps basic block to its merge block.
675 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
676 // Maps basic block to its continue block.
677 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500678
alan-baker86ce19c2020-08-05 13:09:19 -0400679 SPIRVID ReflectionID;
680 DenseMap<Function *, SPIRVID> KernelDeclarations;
681
SJW01901d92020-05-21 08:58:31 -0500682public:
683 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400684};
685
alan-bakerb6b09dc2018-11-08 16:59:28 -0500686} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400687
alan-baker3f772c02021-06-15 22:18:11 -0400688char SPIRVProducerPass::ID = 0;
689SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
690INITIALIZE_PASS(SPIRVProducerPass, "SPIRVProducerPass", "SPIR-V output pass",
691 false, false)
692
David Neto22f144c2017-06-12 14:26:21 -0400693namespace clspv {
alan-baker3f772c02021-06-15 22:18:11 -0400694ModulePass *createSPIRVProducerPass(
695 raw_pwrite_stream *out,
696 SmallVectorImpl<std::pair<unsigned, std::string>> *samplerMap,
697 bool outputCInitList) {
alan-baker86ce19c2020-08-05 13:09:19 -0400698 return new SPIRVProducerPass(out, samplerMap, outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400699}
alan-baker3f772c02021-06-15 22:18:11 -0400700
701ModulePass *createSPIRVProducerPass() { return new SPIRVProducerPass(); }
David Netoc2c368d2017-06-30 16:50:17 -0400702} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400703
SJW01901d92020-05-21 08:58:31 -0500704namespace {
705SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
706 list.emplace_back(LITERAL_WORD, num);
707 return list;
708}
709
710SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
711 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
712 return list;
713}
714
715SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
716 list.emplace_back(num_vec);
717 return list;
718}
719
720SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
721 list.emplace_back(LITERAL_STRING, str);
722 return list;
723}
724
725SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
726 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
727 return list;
728}
729
730SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
731 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
732 return list;
733}
734
SJW806a5d82020-07-15 12:51:38 -0500735SPIRVOperandVec &operator<<(SPIRVOperandVec &list, const SPIRVID &v) {
SJW01901d92020-05-21 08:58:31 -0500736 list.emplace_back(NUMBERID, v.get());
737 return list;
738}
739} // namespace
740
SJW77b87ad2020-04-21 14:37:52 -0500741bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500742 // TODO(sjw): Need to reset all data members for each Module, or better
743 // yet create a new SPIRVProducer for every module.. For now only
744 // allow 1 call.
745 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500746 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400747 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500748 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400749 }
alan-baker3f772c02021-06-15 22:18:11 -0400750
751 SmallVector<char, 10000> *binary = nullptr;
752 if (TestOutput) {
753 binary = new SmallVector<char, 10000>();
754 out = new raw_svector_ostream(*binary);
755 }
756
757 binaryOut = outputCInitList ? &binaryTempOut : out;
David Neto0676e6f2017-07-11 18:47:44 -0400758
SJW77b87ad2020-04-21 14:37:52 -0500759 PopulateUBOTypeMaps();
760 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400761
David Neto22f144c2017-06-12 14:26:21 -0400762 // SPIR-V always begins with its header information
763 outputHeader();
764
765 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500766 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400767
David Neto22f144c2017-06-12 14:26:21 -0400768 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500769 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400770 // If the GV is one of our special __spirv_* variables, remove the
771 // initializer as it was only placed there to force LLVM to not throw the
772 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000773 if (GV.getName().startswith("__spirv_") ||
774 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400775 GV.setInitializer(nullptr);
776 }
David Neto22f144c2017-06-12 14:26:21 -0400777 }
778
alan-baker09cb9802019-12-10 13:16:27 -0500779 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500780 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400781
782 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500783 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400784 GenerateGlobalVar(GV);
785 }
SJW77b87ad2020-04-21 14:37:52 -0500786 GenerateResourceVars();
787 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400788
789 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500790 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400791 if (F.isDeclaration()) {
792 continue;
793 }
794
795 // Generate Function Prologue.
796 GenerateFuncPrologue(F);
797
798 // Generate SPIRV instructions for function body.
799 GenerateFuncBody(F);
800
801 // Generate Function Epilogue.
802 GenerateFuncEpilogue();
803 }
804
805 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500806 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400807
David Neto22f144c2017-06-12 14:26:21 -0400808 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500809 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400810
alan-baker86ce19c2020-08-05 13:09:19 -0400811 // Generate embedded reflection information.
812 GenerateReflection();
813
alan-baker00e7a582019-06-07 12:54:21 -0400814 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400815
816 // We need to patch the SPIR-V header to set bound correctly.
817 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400818
819 if (outputCInitList) {
820 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400821 std::ostringstream os;
822
David Neto57fb0b92017-08-04 15:35:09 -0400823 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400824 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400825 os << ",\n";
826 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400827 first = false;
828 };
829
830 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400831 const std::string str(binaryTempOut.str());
832 for (unsigned i = 0; i < str.size(); i += 4) {
833 const uint32_t a = static_cast<unsigned char>(str[i]);
834 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
835 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
836 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
837 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400838 }
839 os << "}\n";
alan-baker3f772c02021-06-15 22:18:11 -0400840 *out << os.str();
841 }
842
843 if (TestOutput) {
844 std::error_code error;
845 raw_fd_ostream test_output(TestOutFile, error, llvm::sys::fs::FA_Write);
846 test_output << static_cast<raw_svector_ostream *>(out)->str();
847 delete out;
848 delete binary;
David Neto0676e6f2017-07-11 18:47:44 -0400849 }
850
David Neto22f144c2017-06-12 14:26:21 -0400851 return false;
852}
853
854void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400855 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
856 sizeof(spv::MagicNumber));
SJW806a5d82020-07-15 12:51:38 -0500857 uint32_t minor = 0;
alan-baker3f772c02021-06-15 22:18:11 -0400858 switch (SpvVersion()) {
859 case SPIRVVersion::SPIRV_1_0:
860 minor = 0;
861 break;
862 case SPIRVVersion::SPIRV_1_3:
SJW806a5d82020-07-15 12:51:38 -0500863 minor = 3;
alan-baker3f772c02021-06-15 22:18:11 -0400864 break;
865 case SPIRVVersion::SPIRV_1_4:
866 minor = 4;
867 break;
868 case SPIRVVersion::SPIRV_1_5:
869 minor = 5;
870 break;
871 default:
872 llvm_unreachable("unhandled spir-v version");
873 break;
SJW806a5d82020-07-15 12:51:38 -0500874 }
875 uint32_t version = (1 << 16) | (minor << 8);
876 binaryOut->write(reinterpret_cast<const char *>(&version), sizeof(version));
David Neto22f144c2017-06-12 14:26:21 -0400877
alan-baker0c18ab02019-06-12 10:23:21 -0400878 // use Google's vendor ID
879 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400880 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400881
alan-baker00e7a582019-06-07 12:54:21 -0400882 // we record where we need to come back to and patch in the bound value
883 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400884
alan-baker00e7a582019-06-07 12:54:21 -0400885 // output a bad bound for now
886 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400887
alan-baker00e7a582019-06-07 12:54:21 -0400888 // output the schema (reserved for use and must be 0)
889 const uint32_t schema = 0;
890 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400891}
892
893void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400894 // for a binary we just write the value of nextID over bound
895 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
896 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400897}
898
SJW77b87ad2020-04-21 14:37:52 -0500899void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400900 // This function generates LLVM IR for function such as global variable for
901 // argument, constant and pointer type for argument access. These information
902 // is artificial one because we need Vulkan SPIR-V output. This function is
903 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400904
SJW77b87ad2020-04-21 14:37:52 -0500905 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400906
SJW77b87ad2020-04-21 14:37:52 -0500907 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400908
SJW77b87ad2020-04-21 14:37:52 -0500909 FindTypesForSamplerMap();
910 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400911}
912
SJW77b87ad2020-04-21 14:37:52 -0500913void SPIRVProducerPass::FindGlobalConstVars() {
914 clspv::NormalizeGlobalVariables(*module);
915 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400916
David Neto862b7d82018-06-14 18:48:37 -0400917 SmallVector<GlobalVariable *, 8> GVList;
918 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500919 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400920 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
921 if (GV.use_empty()) {
922 DeadGVList.push_back(&GV);
923 } else {
924 GVList.push_back(&GV);
925 }
926 }
927 }
928
929 // Remove dead global __constant variables.
930 for (auto GV : DeadGVList) {
931 GV->eraseFromParent();
932 }
933 DeadGVList.clear();
934
935 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
936 // For now, we only support a single storage buffer.
alan-baker7506abb2020-09-10 15:02:55 -0400937 if (!GVList.empty()) {
David Neto862b7d82018-06-14 18:48:37 -0400938 assert(GVList.size() == 1);
939 const auto *GV = GVList[0];
940 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400941 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400942 const size_t kConstantMaxSize = 65536;
943 if (constants_byte_size > kConstantMaxSize) {
944 outs() << "Max __constant capacity of " << kConstantMaxSize
945 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
946 llvm_unreachable("Max __constant capacity exceeded");
947 }
948 }
949 } else {
950 // Change global constant variable's address space to ModuleScopePrivate.
951 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
952 for (auto GV : GVList) {
953 // Create new gv with ModuleScopePrivate address space.
954 Type *NewGVTy = GV->getType()->getPointerElementType();
955 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500956 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400957 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
958 NewGV->takeName(GV);
959
960 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
961 SmallVector<User *, 8> CandidateUsers;
962
963 auto record_called_function_type_as_user =
964 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
965 // Find argument index.
966 unsigned index = 0;
967 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
968 if (gv == call->getOperand(i)) {
969 // TODO(dneto): Should we break here?
970 index = i;
971 }
972 }
973
974 // Record function type with global constant.
975 GlobalConstFuncTyMap[call->getFunctionType()] =
976 std::make_pair(call->getFunctionType(), index);
977 };
978
979 for (User *GVU : GVUsers) {
980 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
981 record_called_function_type_as_user(GV, Call);
982 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
983 // Check GEP users.
984 for (User *GEPU : GEP->users()) {
985 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
986 record_called_function_type_as_user(GEP, GEPCall);
987 }
988 }
989 }
990
991 CandidateUsers.push_back(GVU);
992 }
993
994 for (User *U : CandidateUsers) {
995 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500996 if (!isa<Constant>(U)) {
997 // #254: Can't change operands of a constant, but this shouldn't be
998 // something that sticks around in the module.
999 U->replaceUsesOfWith(GV, NewGV);
1000 }
David Neto862b7d82018-06-14 18:48:37 -04001001 }
1002
1003 // Delete original gv.
1004 GV->eraseFromParent();
1005 }
1006 }
1007}
1008
SJW77b87ad2020-04-21 14:37:52 -05001009void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001010 ResourceVarInfoList.clear();
1011 FunctionToResourceVarsMap.clear();
1012 ModuleOrderedResourceVars.reset();
1013 // Normally, there is one resource variable per clspv.resource.var.*
1014 // function, since that is unique'd by arg type and index. By design,
1015 // we can share these resource variables across kernels because all
1016 // kernels use the same descriptor set.
1017 //
1018 // But if the user requested distinct descriptor sets per kernel, then
1019 // the descriptor allocator has made different (set,binding) pairs for
1020 // the same (type,arg_index) pair. Since we can decorate a resource
1021 // variable with only exactly one DescriptorSet and Binding, we are
1022 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +00001023 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -04001024 // (set,binding) values.
1025 const bool always_distinct_sets =
1026 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -05001027 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001028 // Rely on the fact the resource var functions have a stable ordering
1029 // in the module.
SJW61531372020-06-09 07:31:08 -05001030 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -04001031 // Find all calls to this function with distinct set and binding pairs.
1032 // Save them in ResourceVarInfoList.
1033
1034 // Determine uniqueness of the (set,binding) pairs only withing this
1035 // one resource-var builtin function.
1036 using SetAndBinding = std::pair<unsigned, unsigned>;
1037 // Maps set and binding to the resource var info.
1038 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1039 bool first_use = true;
1040 for (auto &U : F.uses()) {
1041 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1042 const auto set = unsigned(
1043 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1044 const auto binding = unsigned(
1045 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1046 const auto arg_kind = clspv::ArgKind(
1047 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1048 const auto arg_index = unsigned(
1049 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001050 const auto coherent = unsigned(
1051 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001052
1053 // Find or make the resource var info for this combination.
1054 ResourceVarInfo *rv = nullptr;
1055 if (always_distinct_sets) {
1056 // Make a new resource var any time we see a different
1057 // (set,binding) pair.
1058 SetAndBinding key{set, binding};
1059 auto where = set_and_binding_map.find(key);
1060 if (where == set_and_binding_map.end()) {
alan-baker7506abb2020-09-10 15:02:55 -04001061 rv = new ResourceVarInfo(
1062 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1063 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001064 ResourceVarInfoList.emplace_back(rv);
1065 set_and_binding_map[key] = rv;
1066 } else {
1067 rv = where->second;
1068 }
1069 } else {
1070 // The default is to make exactly one resource for each
1071 // clspv.resource.var.* function.
1072 if (first_use) {
1073 first_use = false;
alan-baker7506abb2020-09-10 15:02:55 -04001074 rv = new ResourceVarInfo(
1075 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1076 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001077 ResourceVarInfoList.emplace_back(rv);
1078 } else {
1079 rv = ResourceVarInfoList.back().get();
1080 }
1081 }
1082
1083 // Now populate FunctionToResourceVarsMap.
1084 auto &mapping =
1085 FunctionToResourceVarsMap[call->getParent()->getParent()];
1086 while (mapping.size() <= arg_index) {
1087 mapping.push_back(nullptr);
1088 }
1089 mapping[arg_index] = rv;
1090 }
1091 }
1092 }
1093 }
1094
1095 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001096 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001097 auto where = FunctionToResourceVarsMap.find(&F);
1098 if (where != FunctionToResourceVarsMap.end()) {
1099 for (auto &rv : where->second) {
1100 if (rv != nullptr) {
1101 ModuleOrderedResourceVars.insert(rv);
1102 }
1103 }
1104 }
1105 }
1106 if (ShowResourceVars) {
1107 for (auto *info : ModuleOrderedResourceVars) {
1108 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1109 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1110 << "\n";
1111 }
1112 }
1113}
1114
SJW77b87ad2020-04-21 14:37:52 -05001115void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001116 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001117 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
alan-baker3f772c02021-06-15 22:18:11 -04001118 (getSamplerMap() && !getSamplerMap()->empty())) {
James Pricecbe834f2020-12-01 13:42:25 -05001119 auto SamplerStructTy =
1120 StructType::getTypeByName(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001121 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001122 SamplerStructTy =
1123 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001124 }
David Neto862b7d82018-06-14 18:48:37 -04001125 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
David Neto862b7d82018-06-14 18:48:37 -04001126 }
1127}
1128
SJW77b87ad2020-04-21 14:37:52 -05001129void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001130 // Record types so they are generated.
1131 TypesNeedingLayout.reset();
1132 StructTypesNeedingBlock.reset();
1133
David Neto862b7d82018-06-14 18:48:37 -04001134 for (const auto *info : ModuleOrderedResourceVars) {
1135 Type *type = info->var_fn->getReturnType();
1136
1137 switch (info->arg_kind) {
1138 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001139 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001140 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1141 StructTypesNeedingBlock.insert(sty);
1142 } else {
1143 errs() << *type << "\n";
1144 llvm_unreachable("Buffer arguments must map to structures!");
1145 }
1146 break;
1147 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001148 case clspv::ArgKind::PodUBO:
1149 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001150 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1151 StructTypesNeedingBlock.insert(sty);
1152 } else {
1153 errs() << *type << "\n";
1154 llvm_unreachable("POD arguments must map to structures!");
1155 }
1156 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04001157 case clspv::ArgKind::SampledImage:
1158 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001159 case clspv::ArgKind::Sampler:
1160 // Sampler and image types map to the pointee type but
1161 // in the uniform constant address space.
1162 type = PointerType::get(type->getPointerElementType(),
1163 clspv::AddressSpace::UniformConstant);
1164 break;
1165 default:
1166 break;
1167 }
David Neto862b7d82018-06-14 18:48:37 -04001168 }
1169
alan-bakerdcd97412019-09-16 15:32:30 -04001170 // If module constants are clustered in a storage buffer then that struct
1171 // needs layout decorations.
1172 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001173 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001174 PointerType *PTy = cast<PointerType>(GV.getType());
1175 const auto AS = PTy->getAddressSpace();
1176 const bool module_scope_constant_external_init =
1177 (AS == AddressSpace::Constant) && GV.hasInitializer();
1178 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1179 if (module_scope_constant_external_init &&
1180 spv::BuiltInMax == BuiltinType) {
1181 StructTypesNeedingBlock.insert(
1182 cast<StructType>(PTy->getPointerElementType()));
1183 }
1184 }
1185 }
1186
SJW77b87ad2020-04-21 14:37:52 -05001187 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001188 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1189 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1190 assert(Ty->isStructTy() && "Push constants have to be structures.");
1191 auto STy = cast<StructType>(Ty);
1192 StructTypesNeedingBlock.insert(STy);
1193 }
1194 }
1195
David Neto862b7d82018-06-14 18:48:37 -04001196 // Traverse the arrays and structures underneath each Block, and
1197 // mark them as needing layout.
1198 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1199 StructTypesNeedingBlock.end());
1200 while (!work_list.empty()) {
1201 Type *type = work_list.back();
1202 work_list.pop_back();
1203 TypesNeedingLayout.insert(type);
1204 switch (type->getTypeID()) {
1205 case Type::ArrayTyID:
1206 work_list.push_back(type->getArrayElementType());
1207 if (!Hack_generate_runtime_array_stride_early) {
1208 // Remember this array type for deferred decoration.
1209 TypesNeedingArrayStride.insert(type);
1210 }
1211 break;
1212 case Type::StructTyID:
1213 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1214 work_list.push_back(elem_ty);
1215 }
1216 default:
1217 // This type and its contained types don't get layout.
1218 break;
1219 }
1220 }
1221}
1222
SJWf93f5f32020-05-05 07:27:56 -05001223void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001224 // The SpecId assignment for pointer-to-local arguments is recorded in
1225 // module-level metadata. Translate that information into local argument
1226 // information.
SJWf93f5f32020-05-05 07:27:56 -05001227 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001228 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001229 if (!nmd)
1230 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001231 for (auto operand : nmd->operands()) {
1232 MDTuple *tuple = cast<MDTuple>(operand);
1233 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1234 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001235 ConstantAsMetadata *arg_index_md =
1236 cast<ConstantAsMetadata>(tuple->getOperand(1));
1237 int arg_index = static_cast<int>(
1238 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1239 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001240
1241 ConstantAsMetadata *spec_id_md =
1242 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001243 int spec_id = static_cast<int>(
1244 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001245
Alan Baker202c8c72018-08-13 13:47:44 -04001246 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001247 if (LocalSpecIdInfoMap.count(spec_id))
1248 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001249
SJWf93f5f32020-05-05 07:27:56 -05001250 // Generate the spec constant.
1251 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001252 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001253 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001254
SJWf93f5f32020-05-05 07:27:56 -05001255 // Generate the array type.
1256 Type *ElemTy = arg->getType()->getPointerElementType();
1257 Ops.clear();
1258 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001259 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001260
1261 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1262
1263 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001264 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001265 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1266
1267 // Generate OpVariable.
1268 //
1269 // Ops[0] : Result Type ID
1270 // Ops[1] : Storage Class
SJW806a5d82020-07-15 12:51:38 -05001271 SPIRVID VariableID =
1272 addSPIRVGlobalVariable(PtrArrayTypeID, spv::StorageClassWorkgroup);
SJWf93f5f32020-05-05 07:27:56 -05001273
1274 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001275 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001276 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1277
1278 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1279 ArrayTypeID, PtrArrayTypeID, spec_id};
1280 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001281 }
1282}
1283
David Neto22f144c2017-06-12 14:26:21 -04001284spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1285 switch (AddrSpace) {
1286 default:
1287 llvm_unreachable("Unsupported OpenCL address space");
1288 case AddressSpace::Private:
1289 return spv::StorageClassFunction;
1290 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001291 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001292 case AddressSpace::Constant:
1293 return clspv::Option::ConstantArgsInUniformBuffer()
1294 ? spv::StorageClassUniform
1295 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001296 case AddressSpace::Input:
1297 return spv::StorageClassInput;
1298 case AddressSpace::Local:
1299 return spv::StorageClassWorkgroup;
1300 case AddressSpace::UniformConstant:
1301 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001302 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001303 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001304 case AddressSpace::ModuleScopePrivate:
1305 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001306 case AddressSpace::PushConstant:
1307 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001308 }
1309}
1310
David Neto862b7d82018-06-14 18:48:37 -04001311spv::StorageClass
1312SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1313 switch (arg_kind) {
1314 case clspv::ArgKind::Buffer:
1315 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001316 case clspv::ArgKind::BufferUBO:
1317 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001318 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001319 return spv::StorageClassStorageBuffer;
1320 case clspv::ArgKind::PodUBO:
1321 return spv::StorageClassUniform;
1322 case clspv::ArgKind::PodPushConstant:
1323 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001324 case clspv::ArgKind::Local:
1325 return spv::StorageClassWorkgroup;
alan-bakerf6bc8252020-09-23 14:58:55 -04001326 case clspv::ArgKind::SampledImage:
1327 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001328 case clspv::ArgKind::Sampler:
1329 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001330 default:
1331 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001332 }
1333}
1334
David Neto22f144c2017-06-12 14:26:21 -04001335spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1336 return StringSwitch<spv::BuiltIn>(Name)
1337 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1338 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1339 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1340 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1341 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001342 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001343 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001344 .Default(spv::BuiltInMax);
1345}
1346
SJW01901d92020-05-21 08:58:31 -05001347SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1348 if (OpExtInstImportID == 0) {
1349 //
1350 // Generate OpExtInstImport.
1351 //
1352 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001353
SJW01901d92020-05-21 08:58:31 -05001354 OpExtInstImportID =
1355 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1356 }
1357 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001358}
1359
SJW806a5d82020-07-15 12:51:38 -05001360SPIRVID SPIRVProducerPass::addSPIRVGlobalVariable(const SPIRVID &TypeID,
1361 spv::StorageClass SC,
alan-baker3f772c02021-06-15 22:18:11 -04001362 const SPIRVID &InitID,
1363 bool add_interface) {
SJW806a5d82020-07-15 12:51:38 -05001364 // Generate OpVariable.
1365 //
1366 // Ops[0] : Result Type ID
1367 // Ops[1] : Storage Class
1368 // Ops[2] : Initialization Value ID (optional)
1369
1370 SPIRVOperandVec Ops;
1371 Ops << TypeID << SC;
1372 if (InitID.isValid()) {
1373 Ops << InitID;
1374 }
1375
1376 SPIRVID VID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1377
alan-baker3f772c02021-06-15 22:18:11 -04001378 if (SC == spv::StorageClassInput ||
1379 (add_interface && SpvVersion() >= SPIRVVersion::SPIRV_1_4)) {
SJW806a5d82020-07-15 12:51:38 -05001380 getEntryPointInterfacesList().push_back(VID);
1381 }
1382
1383 return VID;
1384}
1385
alan-bakerc3fd07f2020-10-22 09:48:49 -04001386Type *SPIRVProducerPass::CanonicalType(Type *type) {
1387 if (type->getNumContainedTypes() != 0) {
1388 switch (type->getTypeID()) {
1389 case Type::PointerTyID: {
1390 // For the purposes of our Vulkan SPIR-V type system, constant and global
1391 // are conflated.
1392 auto *ptr_ty = cast<PointerType>(type);
1393 unsigned AddrSpace = ptr_ty->getAddressSpace();
1394 if (AddressSpace::Constant == AddrSpace) {
1395 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1396 AddrSpace = AddressSpace::Global;
1397 // The canonical type of __constant is __global unless constants are
1398 // passed in uniform buffers.
1399 auto *GlobalTy =
1400 ptr_ty->getPointerElementType()->getPointerTo(AddrSpace);
1401 return GlobalTy;
1402 }
1403 }
1404 break;
1405 }
1406 case Type::StructTyID: {
1407 SmallVector<Type *, 8> subtypes;
1408 bool changed = false;
1409 for (auto *subtype : type->subtypes()) {
1410 auto canonical = CanonicalType(subtype);
1411 subtypes.push_back(canonical);
1412 if (canonical != subtype) {
1413 changed = true;
1414 }
1415 }
1416 if (changed) {
1417 return StructType::get(type->getContext(), subtypes,
1418 cast<StructType>(type)->isPacked());
1419 }
1420 break;
1421 }
1422 case Type::ArrayTyID: {
1423 auto *elem_ty = type->getArrayElementType();
1424 auto *equiv_elem_ty = CanonicalType(elem_ty);
1425 if (equiv_elem_ty != elem_ty) {
1426 return ArrayType::get(equiv_elem_ty,
1427 cast<ArrayType>(type)->getNumElements());
1428 }
1429 break;
1430 }
1431 case Type::FunctionTyID: {
1432 auto *func_ty = cast<FunctionType>(type);
1433 auto *return_ty = CanonicalType(func_ty->getReturnType());
1434 SmallVector<Type *, 8> params;
1435 for (unsigned i = 0; i < func_ty->getNumParams(); ++i) {
1436 params.push_back(CanonicalType(func_ty->getParamType(i)));
1437 }
1438 return FunctionType::get(return_ty, params, func_ty->isVarArg());
1439 }
1440 default:
1441 break;
1442 }
1443 }
1444
1445 return type;
1446}
1447
alan-baker3f772c02021-06-15 22:18:11 -04001448bool SPIRVProducerPass::PointerRequiresLayout(unsigned aspace) {
1449 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
1450 switch (aspace) {
1451 case AddressSpace::PushConstant:
Kévin Petit85bcee02021-08-03 18:21:30 +01001452 case AddressSpace::Uniform:
alan-baker3f772c02021-06-15 22:18:11 -04001453 case AddressSpace::Global:
1454 case AddressSpace::Constant:
1455 return true;
1456 default:
1457 break;
1458 }
1459 }
1460 return false;
1461}
1462
SJW01901d92020-05-21 08:58:31 -05001463SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
alan-baker3f772c02021-06-15 22:18:11 -04001464 // Prior to 1.4, layout decorations are more relaxed so we can reuse a laid
1465 // out type in non-laid out storage classes.
1466 bool needs_layout = false;
1467 if (auto ptr_ty = dyn_cast<PointerType>(Ty)) {
1468 needs_layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
1469 }
1470 return getSPIRVType(Ty, needs_layout);
1471}
1472
1473SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty, bool needs_layout) {
1474 // Only pointers, structs and arrays should have layout decorations.
1475 if (!(isa<PointerType>(Ty) || isa<ArrayType>(Ty) || isa<StructType>(Ty))) {
1476 needs_layout = false;
1477 }
1478 // |layout| is the index used for |Ty|'s entry in the type map. Each type
1479 // stores a laid out and non-laid out version of the type.
1480 const unsigned layout = needs_layout ? 1 : 0;
1481
SJWf93f5f32020-05-05 07:27:56 -05001482 auto TI = TypeMap.find(Ty);
1483 if (TI != TypeMap.end()) {
alan-baker3f772c02021-06-15 22:18:11 -04001484 assert(layout < TI->second.size());
1485 if (TI->second[layout].isValid()) {
1486 return TI->second[layout];
1487 }
SJWf93f5f32020-05-05 07:27:56 -05001488 }
1489
alan-bakerc3fd07f2020-10-22 09:48:49 -04001490 auto Canonical = CanonicalType(Ty);
1491 if (Canonical != Ty) {
1492 auto CanonicalTI = TypeMap.find(Canonical);
1493 if (CanonicalTI != TypeMap.end()) {
alan-baker3f772c02021-06-15 22:18:11 -04001494 assert(layout < CanonicalTI->second.size());
1495 if (CanonicalTI->second[layout].isValid()) {
1496 auto id = CanonicalTI->second[layout];
1497 auto &base = TypeMap[Ty];
1498 if (base.empty()) {
1499 base.resize(2);
1500 }
1501 base[layout] = id;
1502 return id;
1503 }
alan-bakerc3fd07f2020-10-22 09:48:49 -04001504 }
1505 }
1506
1507 // Perform the mapping with the canonical type.
1508
SJWf93f5f32020-05-05 07:27:56 -05001509 const auto &DL = module->getDataLayout();
1510
SJW01901d92020-05-21 08:58:31 -05001511 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001512
alan-bakerc3fd07f2020-10-22 09:48:49 -04001513 switch (Canonical->getTypeID()) {
SJWf93f5f32020-05-05 07:27:56 -05001514 default: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001515 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001516 llvm_unreachable("Unsupported type???");
1517 break;
1518 }
1519 case Type::MetadataTyID:
1520 case Type::LabelTyID: {
1521 // Ignore these types.
1522 break;
1523 }
1524 case Type::PointerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001525 PointerType *PTy = cast<PointerType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001526 unsigned AddrSpace = PTy->getAddressSpace();
1527
1528 if (AddrSpace != AddressSpace::UniformConstant) {
1529 auto PointeeTy = PTy->getElementType();
1530 if (PointeeTy->isStructTy() &&
1531 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
alan-baker3f772c02021-06-15 22:18:11 -04001532 RID = getSPIRVType(PointeeTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001533 break;
1534 }
1535 }
1536
SJWf93f5f32020-05-05 07:27:56 -05001537 //
1538 // Generate OpTypePointer.
1539 //
1540
1541 // OpTypePointer
1542 // Ops[0] = Storage Class
1543 // Ops[1] = Element Type ID
1544 SPIRVOperandVec Ops;
1545
alan-baker3f772c02021-06-15 22:18:11 -04001546 Ops << GetStorageClass(AddrSpace)
1547 << getSPIRVType(PTy->getElementType(), needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001548
1549 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1550 break;
1551 }
1552 case Type::StructTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001553 StructType *STy = cast<StructType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001554
1555 // Handle sampler type.
1556 if (STy->isOpaque()) {
1557 if (STy->getName().equals("opencl.sampler_t")) {
1558 //
1559 // Generate OpTypeSampler
1560 //
1561 // Empty Ops.
1562
1563 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1564 break;
1565 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001566 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001567 STy->getName().startswith("opencl.image1d_wo_t") ||
1568 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001569 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001570 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1571 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001572 STy->getName().startswith("opencl.image2d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001573 STy->getName().startswith("opencl.image2d_wo_t") ||
1574 STy->getName().startswith("opencl.image2d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001575 STy->getName().startswith("opencl.image2d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001576 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1577 STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001578 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001579 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001580 if (STy->getName().startswith("opencl.image1d_")) {
1581 if (STy->getName().contains(".sampled"))
1582 addCapability(spv::CapabilitySampled1D);
1583 else
1584 addCapability(spv::CapabilityImage1D);
1585 }
1586
SJWf93f5f32020-05-05 07:27:56 -05001587 //
1588 // Generate OpTypeImage
1589 //
1590 // Ops[0] = Sampled Type ID
1591 // Ops[1] = Dim ID
1592 // Ops[2] = Depth (Literal Number)
1593 // Ops[3] = Arrayed (Literal Number)
1594 // Ops[4] = MS (Literal Number)
1595 // Ops[5] = Sampled (Literal Number)
1596 // Ops[6] = Image Format ID
1597 //
1598 SPIRVOperandVec Ops;
1599
SJW01901d92020-05-21 08:58:31 -05001600 SPIRVID SampledTyID;
alan-baker3f772c02021-06-15 22:18:11 -04001601 // None of the sampled types have a layout.
SJWf93f5f32020-05-05 07:27:56 -05001602 if (STy->getName().contains(".float")) {
alan-baker3f772c02021-06-15 22:18:11 -04001603 SampledTyID =
1604 getSPIRVType(Type::getFloatTy(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001605 } else if (STy->getName().contains(".uint")) {
alan-baker3f772c02021-06-15 22:18:11 -04001606 SampledTyID =
1607 getSPIRVType(Type::getInt32Ty(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001608 } else if (STy->getName().contains(".int")) {
1609 // Generate a signed 32-bit integer if necessary.
1610 if (int32ID == 0) {
1611 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001612 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001613 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1614 }
1615 SampledTyID = int32ID;
1616
1617 // Generate a vec4 of the signed int if necessary.
1618 if (v4int32ID == 0) {
1619 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001620 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001621 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1622 }
1623 } else {
1624 // This was likely an UndefValue.
alan-baker3f772c02021-06-15 22:18:11 -04001625 SampledTyID =
1626 getSPIRVType(Type::getFloatTy(Canonical->getContext()), false);
SJWf93f5f32020-05-05 07:27:56 -05001627 }
SJW01901d92020-05-21 08:58:31 -05001628 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001629
1630 spv::Dim DimID = spv::Dim2D;
1631 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001632 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001633 STy->getName().startswith("opencl.image1d_wo_t") ||
1634 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001635 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001636 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1637 DimID = spv::Dim1D;
1638 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001639 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001640 STy->getName().startswith("opencl.image3d_wo_t")) {
1641 DimID = spv::Dim3D;
1642 }
SJW01901d92020-05-21 08:58:31 -05001643 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001644
1645 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001646 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001647
1648 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001649 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001650
1651 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001652 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001653
1654 // Set up Sampled.
1655 //
1656 // From Spec
1657 //
1658 // 0 indicates this is only known at run time, not at compile time
1659 // 1 indicates will be used with sampler
1660 // 2 indicates will be used without a sampler (a storage image)
1661 uint32_t Sampled = 1;
1662 if (!STy->getName().contains(".sampled")) {
1663 Sampled = 2;
1664 }
SJW01901d92020-05-21 08:58:31 -05001665 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001666
1667 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001668 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001669 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1670
alan-bakerf6bc8252020-09-23 14:58:55 -04001671 // Only need a sampled version of the type if it is used with a sampler.
1672 if (Sampled == 1) {
1673 Ops.clear();
1674 Ops << RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001675 getImageTypeMap()[Canonical] =
alan-bakerf6bc8252020-09-23 14:58:55 -04001676 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1677 }
SJWf93f5f32020-05-05 07:27:56 -05001678 break;
1679 }
1680 }
1681
1682 //
1683 // Generate OpTypeStruct
1684 //
1685 // Ops[0] ... Ops[n] = Member IDs
1686 SPIRVOperandVec Ops;
1687
1688 for (auto *EleTy : STy->elements()) {
alan-baker3f772c02021-06-15 22:18:11 -04001689 Ops << getSPIRVType(EleTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001690 }
1691
1692 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1693
alan-bakerc3fd07f2020-10-22 09:48:49 -04001694 // Generate OpMemberDecorate unless we are generating it for the canonical
1695 // type.
1696 StructType *canonical = cast<StructType>(CanonicalType(STy));
alan-baker3f772c02021-06-15 22:18:11 -04001697 bool use_layout =
1698 (Option::SpvVersion() < SPIRVVersion::SPIRV_1_4) || needs_layout;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001699 if (TypesNeedingLayout.idFor(STy) &&
alan-baker3f772c02021-06-15 22:18:11 -04001700 (canonical == STy || !TypesNeedingLayout.idFor(canonical)) &&
1701 use_layout) {
SJWf93f5f32020-05-05 07:27:56 -05001702 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1703 MemberIdx++) {
1704 // Ops[0] = Structure Type ID
1705 // Ops[1] = Member Index(Literal Number)
1706 // Ops[2] = Decoration (Offset)
1707 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001708 const auto ByteOffset =
1709 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1710
SJW01901d92020-05-21 08:58:31 -05001711 Ops.clear();
1712 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001713
1714 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1715 }
1716 }
1717
alan-bakerc3fd07f2020-10-22 09:48:49 -04001718 // Generate OpDecorate unless we are generating it for the canonical type.
1719 if (StructTypesNeedingBlock.idFor(STy) &&
alan-baker3f772c02021-06-15 22:18:11 -04001720 (canonical == STy || !StructTypesNeedingBlock.idFor(canonical)) &&
1721 use_layout) {
SJWf93f5f32020-05-05 07:27:56 -05001722 Ops.clear();
1723 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001724 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001725
1726 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1727 }
1728 break;
1729 }
1730 case Type::IntegerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001731 uint32_t bit_width =
1732 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001733
alan-bakere2a62752020-07-09 22:53:23 -04001734 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001735 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001736 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001737 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001738 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001739 addCapability(spv::CapabilityInt64);
1740 }
1741
alan-bakere2a62752020-07-09 22:53:23 -04001742 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001743 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1744 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001745 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001746 // i8 is added to TypeMap as i32.
alan-baker3f772c02021-06-15 22:18:11 -04001747 RID = getSPIRVType(Type::getIntNTy(Canonical->getContext(), 32), false);
SJWf93f5f32020-05-05 07:27:56 -05001748 } else {
1749 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001750 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001751 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1752 }
1753 }
1754 break;
1755 }
1756 case Type::HalfTyID:
1757 case Type::FloatTyID:
1758 case Type::DoubleTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001759 uint32_t bit_width =
1760 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
alan-bakere2a62752020-07-09 22:53:23 -04001761 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001762 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001763 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001764 addCapability(spv::CapabilityFloat64);
1765 }
1766
SJWf93f5f32020-05-05 07:27:56 -05001767 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001768 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001769
1770 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1771 break;
1772 }
1773 case Type::ArrayTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001774 ArrayType *ArrTy = cast<ArrayType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001775 const uint64_t Length = ArrTy->getArrayNumElements();
1776 if (Length == 0) {
1777 // By convention, map it to a RuntimeArray.
1778
1779 Type *EleTy = ArrTy->getArrayElementType();
1780
1781 //
1782 // Generate OpTypeRuntimeArray.
1783 //
1784 // OpTypeRuntimeArray
1785 // Ops[0] = Element Type ID
1786 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04001787 Ops << getSPIRVType(EleTy, needs_layout);
SJWf93f5f32020-05-05 07:27:56 -05001788
1789 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1790
alan-baker3f772c02021-06-15 22:18:11 -04001791 if (Hack_generate_runtime_array_stride_early &&
1792 (Option::SpvVersion() < SPIRVVersion::SPIRV_1_4 || needs_layout)) {
SJWf93f5f32020-05-05 07:27:56 -05001793 // Generate OpDecorate.
1794
1795 // Ops[0] = Target ID
1796 // Ops[1] = Decoration (ArrayStride)
1797 // Ops[2] = Stride Number(Literal Number)
1798 Ops.clear();
1799
SJW01901d92020-05-21 08:58:31 -05001800 Ops << RID << spv::DecorationArrayStride
1801 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001802
1803 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1804 }
1805
1806 } else {
1807
1808 //
1809 // Generate OpConstant and OpTypeArray.
1810 //
1811
1812 //
1813 // Generate OpConstant for array length.
1814 //
1815 // Add constant for length to constant list.
1816 Constant *CstLength =
1817 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001818
1819 // Remember to generate ArrayStride later
alan-bakerc3fd07f2020-10-22 09:48:49 -04001820 getTypesNeedingArrayStride().insert(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001821
1822 //
1823 // Generate OpTypeArray.
1824 //
1825 // Ops[0] = Element Type ID
1826 // Ops[1] = Array Length Constant ID
1827 SPIRVOperandVec Ops;
1828
alan-baker3f772c02021-06-15 22:18:11 -04001829 Ops << getSPIRVType(ArrTy->getElementType(), needs_layout) << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001830
1831 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1832 }
1833 break;
1834 }
1835 case Type::FixedVectorTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001836 auto VecTy = cast<VectorType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001837 // <4 x i8> is changed to i32 if i8 is not generally supported.
1838 if (!clspv::Option::Int8Support() &&
1839 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
alan-baker5a8c3be2020-09-09 13:44:26 -04001840 if (VecTy->getElementCount().getKnownMinValue() == 4) {
SJWf93f5f32020-05-05 07:27:56 -05001841 RID = getSPIRVType(VecTy->getElementType());
1842 break;
1843 } else {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001844 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001845 llvm_unreachable("Support above i8 vector type");
1846 }
1847 }
1848
1849 // Ops[0] = Component Type ID
1850 // Ops[1] = Component Count (Literal Number)
1851 SPIRVOperandVec Ops;
alan-baker5a8c3be2020-09-09 13:44:26 -04001852 Ops << VecTy->getElementType()
1853 << VecTy->getElementCount().getKnownMinValue();
SJWf93f5f32020-05-05 07:27:56 -05001854
1855 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1856 break;
1857 }
1858 case Type::VoidTyID: {
1859 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
1860 break;
1861 }
1862 case Type::FunctionTyID: {
1863 // Generate SPIRV instruction for function type.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001864 FunctionType *FTy = cast<FunctionType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001865
1866 // Ops[0] = Return Type ID
1867 // Ops[1] ... Ops[n] = Parameter Type IDs
1868 SPIRVOperandVec Ops;
1869
1870 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05001871 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05001872
1873 // Find SPIRV instructions for parameter types
1874 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
1875 // Find SPIRV instruction for parameter type.
1876 auto ParamTy = FTy->getParamType(k);
1877 if (ParamTy->isPointerTy()) {
1878 auto PointeeTy = ParamTy->getPointerElementType();
1879 if (PointeeTy->isStructTy() &&
1880 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1881 ParamTy = PointeeTy;
1882 }
1883 }
1884
Kévin Petit85bcee02021-08-03 18:21:30 +01001885 Ops << getSPIRVType(ParamTy);
SJWf93f5f32020-05-05 07:27:56 -05001886 }
1887
1888 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
1889 break;
1890 }
1891 }
1892
SJW01901d92020-05-21 08:58:31 -05001893 if (RID.isValid()) {
alan-baker3f772c02021-06-15 22:18:11 -04001894 auto &entry = TypeMap[Canonical];
1895 if (entry.empty()) {
1896 entry.resize(2);
1897 }
1898 entry[layout] = RID;
1899
1900 if (Canonical != Ty) {
1901 // Also cache the original type.
1902 auto &base_entry = TypeMap[Ty];
1903 if (base_entry.empty()) {
1904 base_entry.resize(2);
1905 }
1906 base_entry[layout] = RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001907 }
SJWf93f5f32020-05-05 07:27:56 -05001908 }
1909 return RID;
David Neto22f144c2017-06-12 14:26:21 -04001910}
1911
SJW806a5d82020-07-15 12:51:38 -05001912SPIRVID SPIRVProducerPass::getSPIRVInt32Constant(uint32_t CstVal) {
1913 Type *i32 = Type::getInt32Ty(module->getContext());
1914 Constant *Cst = ConstantInt::get(i32, CstVal);
1915 return getSPIRVValue(Cst);
1916}
1917
alan-baker1b333b62021-05-31 14:55:32 -04001918SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *C) {
David Neto22f144c2017-06-12 14:26:21 -04001919 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07001920 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04001921
alan-baker1b333b62021-05-31 14:55:32 -04001922 // Treat poison as an undef.
1923 auto *Cst = C;
1924 if (isa<PoisonValue>(Cst)) {
1925 Cst = UndefValue::get(Cst->getType());
1926 }
1927
1928 auto VI = VMap.find(Cst);
1929 if (VI != VMap.end()) {
1930 assert(VI->second.isValid());
1931 return VI->second;
1932 }
1933
SJW01901d92020-05-21 08:58:31 -05001934 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04001935
SJWf93f5f32020-05-05 07:27:56 -05001936 //
1937 // Generate OpConstant.
1938 //
1939 // Ops[0] = Result Type ID
1940 // Ops[1] .. Ops[n] = Values LiteralNumber
1941 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04001942
SJW01901d92020-05-21 08:58:31 -05001943 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001944
SJWf93f5f32020-05-05 07:27:56 -05001945 std::vector<uint32_t> LiteralNum;
1946 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04001947
SJWf93f5f32020-05-05 07:27:56 -05001948 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04001949 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05001950 Opcode = spv::OpUndef;
1951 if (hack_undef && IsTypeNullable(Cst->getType())) {
1952 Opcode = spv::OpConstantNull;
1953 }
1954 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04001955 unsigned bit_width = CI->getBitWidth();
1956 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001957 // If the bitwidth of constant is 1, generate OpConstantTrue or
1958 // OpConstantFalse.
1959 if (CI->getZExtValue()) {
1960 // Ops[0] = Result Type ID
1961 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04001962 } else {
SJWf93f5f32020-05-05 07:27:56 -05001963 // Ops[0] = Result Type ID
1964 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04001965 }
SJWf93f5f32020-05-05 07:27:56 -05001966 } else {
1967 auto V = CI->getZExtValue();
1968 LiteralNum.push_back(V & 0xFFFFFFFF);
1969
alan-bakere2a62752020-07-09 22:53:23 -04001970 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05001971 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04001972 }
1973
1974 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04001975
SJW01901d92020-05-21 08:58:31 -05001976 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001977 }
1978 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
1979 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
1980 Type *CFPTy = CFP->getType();
1981 if (CFPTy->isFloatTy()) {
1982 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1983 } else if (CFPTy->isDoubleTy()) {
1984 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1985 LiteralNum.push_back(FPVal >> 32);
1986 } else if (CFPTy->isHalfTy()) {
1987 LiteralNum.push_back(FPVal & 0xFFFF);
1988 } else {
1989 CFPTy->print(errs());
1990 llvm_unreachable("Implement this ConstantFP Type");
1991 }
David Neto22f144c2017-06-12 14:26:21 -04001992
SJWf93f5f32020-05-05 07:27:56 -05001993 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04001994
SJW01901d92020-05-21 08:58:31 -05001995 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001996 } else if (isa<ConstantDataSequential>(Cst) &&
1997 cast<ConstantDataSequential>(Cst)->isString()) {
1998 Cst->print(errs());
1999 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002000
SJWf93f5f32020-05-05 07:27:56 -05002001 } else if (const ConstantDataSequential *CDS =
2002 dyn_cast<ConstantDataSequential>(Cst)) {
2003 // Let's convert <4 x i8> constant to int constant specially.
2004 // This case occurs when all the values are specified as constant
2005 // ints.
2006 Type *CstTy = Cst->getType();
2007 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002008 //
2009 // Generate OpConstant with OpTypeInt 32 0.
2010 //
2011 uint32_t IntValue = 0;
2012 for (unsigned k = 0; k < 4; k++) {
2013 const uint64_t Val = CDS->getElementAsInteger(k);
2014 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002015 }
2016
SJW806a5d82020-07-15 12:51:38 -05002017 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002018 } else {
2019
David Neto49351ac2017-08-26 17:32:20 -04002020 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002021 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002022 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002023 }
2024
2025 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002026 }
2027 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2028 // Let's convert <4 x i8> constant to int constant specially.
2029 // This case occurs when at least one of the values is an undef.
2030 Type *CstTy = Cst->getType();
2031 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002032 //
2033 // Generate OpConstant with OpTypeInt 32 0.
2034 //
2035 uint32_t IntValue = 0;
2036 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2037 I != E; ++I) {
2038 uint64_t Val = 0;
2039 const Value *CV = *I;
2040 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2041 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002042 }
SJWf93f5f32020-05-05 07:27:56 -05002043 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002044 }
2045
SJW806a5d82020-07-15 12:51:38 -05002046 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002047 } else {
2048
David Neto22f144c2017-06-12 14:26:21 -04002049 // We use a constant composite in SPIR-V for our constant aggregate in
2050 // LLVM.
2051 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002052
2053 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002054 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002055 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002056 }
David Neto22f144c2017-06-12 14:26:21 -04002057 }
SJWf93f5f32020-05-05 07:27:56 -05002058 } else if (Cst->isNullValue()) {
2059 Opcode = spv::OpConstantNull;
2060 } else {
2061 Cst->print(errs());
2062 llvm_unreachable("Unsupported Constant???");
2063 }
David Neto22f144c2017-06-12 14:26:21 -04002064
SJWf93f5f32020-05-05 07:27:56 -05002065 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2066 // Null pointer requires variable pointers.
2067 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2068 }
alan-baker5b86ed72019-02-15 08:26:50 -05002069
SJWf93f5f32020-05-05 07:27:56 -05002070 if (RID == 0) {
2071 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2072 }
2073
2074 VMap[Cst] = RID;
2075
2076 return RID;
2077}
2078
2079SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2080 auto II = ValueMap.find(V);
2081 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002082 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002083 return II->second;
2084 }
2085 if (Constant *Cst = dyn_cast<Constant>(V)) {
2086 return getSPIRVConstant(Cst);
2087 } else {
2088 llvm_unreachable("Variable not found");
2089 }
2090}
2091
SJW77b87ad2020-04-21 14:37:52 -05002092void SPIRVProducerPass::GenerateSamplers() {
alan-baker09cb9802019-12-10 13:16:27 -05002093 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002094 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2095 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002096
David Neto862b7d82018-06-14 18:48:37 -04002097 // We might have samplers in the sampler map that are not used
2098 // in the translation unit. We need to allocate variables
2099 // for them and bindings too.
2100 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002101
SJW77b87ad2020-04-21 14:37:52 -05002102 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002103 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002104 if (!var_fn)
2105 return;
alan-baker09cb9802019-12-10 13:16:27 -05002106
David Neto862b7d82018-06-14 18:48:37 -04002107 for (auto user : var_fn->users()) {
2108 // Populate SamplerLiteralToDescriptorSetMap and
2109 // SamplerLiteralToBindingMap.
2110 //
2111 // Look for calls like
2112 // call %opencl.sampler_t addrspace(2)*
2113 // @clspv.sampler.var.literal(
2114 // i32 descriptor,
2115 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002116 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002117 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002118 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002119 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002120 auto sampler_value = third_param;
2121 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04002122 auto &sampler_map = *getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002123 if (third_param >= sampler_map.size()) {
2124 errs() << "Out of bounds index to sampler map: " << third_param;
2125 llvm_unreachable("bad sampler init: out of bounds");
2126 }
2127 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002128 }
2129
David Neto862b7d82018-06-14 18:48:37 -04002130 const auto descriptor_set = static_cast<unsigned>(
2131 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2132 const auto binding = static_cast<unsigned>(
2133 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2134
2135 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2136 SamplerLiteralToBindingMap[sampler_value] = binding;
2137 used_bindings.insert(binding);
2138 }
2139 }
2140
alan-baker09cb9802019-12-10 13:16:27 -05002141 DenseSet<size_t> seen;
2142 for (auto user : var_fn->users()) {
2143 if (!isa<CallInst>(user))
2144 continue;
2145
2146 auto call = cast<CallInst>(user);
2147 const unsigned third_param = static_cast<unsigned>(
2148 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2149
2150 // Already allocated a variable for this value.
2151 if (!seen.insert(third_param).second)
2152 continue;
2153
2154 auto sampler_value = third_param;
2155 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04002156 sampler_value = (*getSamplerMap())[third_param].first;
alan-baker09cb9802019-12-10 13:16:27 -05002157 }
2158
SJW806a5d82020-07-15 12:51:38 -05002159 auto sampler_var_id = addSPIRVGlobalVariable(
2160 getSPIRVType(SamplerTy), spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002161
alan-baker09cb9802019-12-10 13:16:27 -05002162 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002163
Kévin Petit85bcee02021-08-03 18:21:30 +01002164 // Record mapping between the parent function and the sampler variables it
2165 // uses so we can add it to its interface list
2166 auto F = call->getParent()->getParent();
2167 FunctionToLiteralSamplersMap[F].push_back(sampler_var_id);
2168
David Neto862b7d82018-06-14 18:48:37 -04002169 unsigned descriptor_set;
2170 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002171 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002172 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002173 // This sampler is not actually used. Find the next one.
alan-baker7506abb2020-09-10 15:02:55 -04002174 for (binding = 0; used_bindings.count(binding); binding++) {
2175 }
David Neto862b7d82018-06-14 18:48:37 -04002176 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2177 used_bindings.insert(binding);
2178 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002179 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2180 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002181
alan-baker86ce19c2020-08-05 13:09:19 -04002182 auto import_id = getReflectionImport();
2183 SPIRVOperandVec Ops;
2184 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
2185 << reflection::ExtInstLiteralSampler
2186 << getSPIRVInt32Constant(descriptor_set)
2187 << getSPIRVInt32Constant(binding)
2188 << getSPIRVInt32Constant(sampler_value);
2189 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002190 }
2191
SJW69939d52020-04-16 07:29:07 -05002192 // Ops[0] = Target ID
2193 // Ops[1] = Decoration (DescriptorSet)
2194 // Ops[2] = LiteralNumber according to Decoration
SJW806a5d82020-07-15 12:51:38 -05002195 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002196 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002197
SJWf93f5f32020-05-05 07:27:56 -05002198 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002199
2200 // Ops[0] = Target ID
2201 // Ops[1] = Decoration (Binding)
2202 // Ops[2] = LiteralNumber according to Decoration
2203 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002204 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002205
SJWf93f5f32020-05-05 07:27:56 -05002206 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002207 }
David Neto862b7d82018-06-14 18:48:37 -04002208}
David Neto22f144c2017-06-12 14:26:21 -04002209
SJW77b87ad2020-04-21 14:37:52 -05002210void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002211 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002212
David Neto862b7d82018-06-14 18:48:37 -04002213 // Generate variables. Make one for each of resource var info object.
2214 for (auto *info : ModuleOrderedResourceVars) {
2215 Type *type = info->var_fn->getReturnType();
2216 // Remap the address space for opaque types.
2217 switch (info->arg_kind) {
2218 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002219 case clspv::ArgKind::SampledImage:
2220 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002221 type = PointerType::get(type->getPointerElementType(),
2222 clspv::AddressSpace::UniformConstant);
2223 break;
2224 default:
2225 break;
2226 }
David Neto22f144c2017-06-12 14:26:21 -04002227
David Neto862b7d82018-06-14 18:48:37 -04002228 const auto sc = GetStorageClassForArgKind(info->arg_kind);
David Neto22f144c2017-06-12 14:26:21 -04002229
SJW806a5d82020-07-15 12:51:38 -05002230 info->var_id = addSPIRVGlobalVariable(getSPIRVType(type), sc);
David Neto862b7d82018-06-14 18:48:37 -04002231
2232 // Map calls to the variable-builtin-function.
2233 for (auto &U : info->var_fn->uses()) {
2234 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2235 const auto set = unsigned(
2236 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2237 const auto binding = unsigned(
2238 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2239 if (set == info->descriptor_set && binding == info->binding) {
2240 switch (info->arg_kind) {
2241 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002242 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002243 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002244 case clspv::ArgKind::PodUBO:
2245 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002246 // The call maps to the variable directly.
2247 VMap[call] = info->var_id;
2248 break;
2249 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002250 case clspv::ArgKind::SampledImage:
2251 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002252 // The call maps to a load we generate later.
2253 ResourceVarDeferredLoadCalls[call] = info->var_id;
2254 break;
2255 default:
2256 llvm_unreachable("Unhandled arg kind");
2257 }
2258 }
David Neto22f144c2017-06-12 14:26:21 -04002259 }
David Neto862b7d82018-06-14 18:48:37 -04002260 }
2261 }
David Neto22f144c2017-06-12 14:26:21 -04002262
David Neto862b7d82018-06-14 18:48:37 -04002263 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002264 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002265 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002266 // Push constants don't need descriptor set or binding decorations.
2267 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2268 continue;
2269
David Neto862b7d82018-06-14 18:48:37 -04002270 // Decorate with DescriptorSet and Binding.
2271 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002272 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002273 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002274
2275 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002276 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002277 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002278
alan-bakere9308012019-03-15 10:25:13 -04002279 if (info->coherent) {
2280 // Decorate with Coherent if required for the variable.
2281 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002282 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002283 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002284 }
2285
David Neto862b7d82018-06-14 18:48:37 -04002286 // Generate NonWritable and NonReadable
2287 switch (info->arg_kind) {
2288 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002289 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002290 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2291 clspv::AddressSpace::Constant) {
2292 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002293 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002294 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002295 }
David Neto862b7d82018-06-14 18:48:37 -04002296 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002297 case clspv::ArgKind::StorageImage: {
2298 auto *type = info->var_fn->getReturnType();
2299 auto *struct_ty = cast<StructType>(type->getPointerElementType());
2300 // TODO(alan-baker): This is conservative. If compiling for OpenCL 2.0 or
2301 // above, the compiler treats all write_only images as read_write images.
2302 if (struct_ty->getName().contains("_wo_t")) {
2303 Ops.clear();
2304 Ops << info->var_id << spv::DecorationNonReadable;
2305 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2306 }
David Neto862b7d82018-06-14 18:48:37 -04002307 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002308 }
David Neto862b7d82018-06-14 18:48:37 -04002309 default:
2310 break;
David Neto22f144c2017-06-12 14:26:21 -04002311 }
2312 }
2313}
2314
2315void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002316 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002317 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002318 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002319
2320 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2321 Type *Ty = GV.getType();
2322 PointerType *PTy = cast<PointerType>(Ty);
2323
SJW01901d92020-05-21 08:58:31 -05002324 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002325
2326 // Workgroup size is handled differently (it goes into a constant)
2327 if (spv::BuiltInWorkgroupSize == BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04002328 uint32_t PrevXDimCst = 0xFFFFFFFF;
2329 uint32_t PrevYDimCst = 0xFFFFFFFF;
2330 uint32_t PrevZDimCst = 0xFFFFFFFF;
alan-baker3b609772020-09-03 19:10:17 -04002331 bool HasMD = true;
David Neto22f144c2017-06-12 14:26:21 -04002332 for (Function &Func : *GV.getParent()) {
2333 if (Func.isDeclaration()) {
2334 continue;
2335 }
2336
2337 // We only need to check kernels.
2338 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2339 continue;
2340 }
2341
2342 if (const MDNode *MD =
2343 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2344 uint32_t CurXDimCst = static_cast<uint32_t>(
2345 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2346 uint32_t CurYDimCst = static_cast<uint32_t>(
2347 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2348 uint32_t CurZDimCst = static_cast<uint32_t>(
2349 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2350
2351 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2352 PrevZDimCst == 0xFFFFFFFF) {
2353 PrevXDimCst = CurXDimCst;
2354 PrevYDimCst = CurYDimCst;
2355 PrevZDimCst = CurZDimCst;
2356 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2357 CurZDimCst != PrevZDimCst) {
alan-baker3b609772020-09-03 19:10:17 -04002358 HasMD = false;
2359 continue;
David Neto22f144c2017-06-12 14:26:21 -04002360 } else {
2361 continue;
2362 }
2363
2364 //
2365 // Generate OpConstantComposite.
2366 //
2367 // Ops[0] : Result Type ID
2368 // Ops[1] : Constant size for x dimension.
2369 // Ops[2] : Constant size for y dimension.
2370 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002371 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002372
SJW01901d92020-05-21 08:58:31 -05002373 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002374 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002375 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002376 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002377 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002378 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002379
SJW01901d92020-05-21 08:58:31 -05002380 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2381 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002382
SJWf93f5f32020-05-05 07:27:56 -05002383 InitializerID =
2384 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002385 } else {
alan-baker3b609772020-09-03 19:10:17 -04002386 HasMD = false;
David Neto22f144c2017-06-12 14:26:21 -04002387 }
2388 }
2389
2390 // If all kernels do not have metadata for reqd_work_group_size, generate
2391 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002392 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002393 //
2394 // Generate OpSpecConstants for x/y/z dimension.
2395 //
2396 // Ops[0] : Result Type ID
2397 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002398
alan-bakera1be3322020-04-20 12:48:18 -04002399 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002400 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002401
SJWf93f5f32020-05-05 07:27:56 -05002402 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002403 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002404 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002405
David Neto257c3892018-04-11 13:19:45 -04002406 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002407 Ops << result_type_id << 1;
2408 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002409
2410 // Y Dimension
2411 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002412 Ops << result_type_id << 1;
2413 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002414
2415 // Z Dimension
2416 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002417 Ops << result_type_id << 1;
2418 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002419
David Neto257c3892018-04-11 13:19:45 -04002420 BuiltinDimVec.push_back(XDimCstID);
2421 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002422 BuiltinDimVec.push_back(ZDimCstID);
2423
David Neto22f144c2017-06-12 14:26:21 -04002424 //
2425 // Generate OpSpecConstantComposite.
2426 //
2427 // Ops[0] : Result Type ID
2428 // Ops[1] : Constant size for x dimension.
2429 // Ops[2] : Constant size for y dimension.
2430 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002431 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002432 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002433
SJWf93f5f32020-05-05 07:27:56 -05002434 InitializerID =
2435 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002436 }
alan-bakerbed3a882020-04-21 14:42:41 -04002437 } else if (BuiltinType == spv::BuiltInWorkDim) {
2438 // 1. Generate a specialization constant with a default of 3.
2439 // 2. Allocate and annotate a SpecId for the constant.
2440 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002441 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002442
2443 //
2444 // Generate OpSpecConstant.
2445 //
2446 // Ops[0] : Result Type ID
2447 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002448
SJW01901d92020-05-21 08:58:31 -05002449 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002450
SJWf93f5f32020-05-05 07:27:56 -05002451 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002452
2453 //
2454 // Generate SpecId decoration.
2455 //
2456 // Ops[0] : target
2457 // Ops[1] : decoration
2458 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002459 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002460 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002461 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002462
SJWf93f5f32020-05-05 07:27:56 -05002463 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002464 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2465 // 1. Generate a spec constant with a default of {0, 0, 0}.
2466 // 2. Allocate and annotate SpecIds for the constants.
2467 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002468 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002469
2470 //
2471 // Generate OpSpecConstant for each dimension.
2472 //
2473 // Ops[0] : Result Type ID
2474 // Ops[1] : Default literal value
2475 //
SJW01901d92020-05-21 08:58:31 -05002476 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2477 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002478
alan-bakere1996972020-05-04 08:38:12 -04002479 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002480 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2481 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002482
alan-bakere1996972020-05-04 08:38:12 -04002483 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002484 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2485 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002486
2487 //
2488 // Generate SpecId decoration for each dimension.
2489 //
2490 // Ops[0] : target
2491 // Ops[1] : decoration
2492 // Ops[2] : SpecId
2493 //
2494 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2495 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002496 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002497 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002498
2499 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2500 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002501 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002502 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002503
2504 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2505 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002506 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002507 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002508
2509 //
2510 // Generate OpSpecConstantComposite.
2511 //
2512 // Ops[0] : type id
2513 // Ops[1..n-1] : elements
2514 //
alan-bakere1996972020-05-04 08:38:12 -04002515 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002516 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002517 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002518 }
2519
David Neto85082642018-03-24 06:55:20 -07002520 const auto AS = PTy->getAddressSpace();
SJW806a5d82020-07-15 12:51:38 -05002521 const auto spvSC = GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002522
David Neto85082642018-03-24 06:55:20 -07002523 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002524 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002525 clspv::Option::ModuleConstantsInStorageBuffer();
2526
Kévin Petit23d5f182019-08-13 16:21:29 +01002527 if (GV.hasInitializer()) {
2528 auto GVInit = GV.getInitializer();
2529 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002530 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002531 }
2532 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002533
alan-baker3f772c02021-06-15 22:18:11 -04002534 // All private, module private, and local global variables can be added to
2535 // interfaces conservatively.
2536 const bool interface =
2537 (AS == AddressSpace::Private || AS == AddressSpace::ModuleScopePrivate ||
2538 AS == AddressSpace::Local);
SJW806a5d82020-07-15 12:51:38 -05002539 SPIRVID var_id =
alan-baker3f772c02021-06-15 22:18:11 -04002540 addSPIRVGlobalVariable(getSPIRVType(Ty), spvSC, InitializerID, interface);
David Neto85082642018-03-24 06:55:20 -07002541
SJWf93f5f32020-05-05 07:27:56 -05002542 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002543
alan-bakere1996972020-05-04 08:38:12 -04002544 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2545 return builtin == spv::BuiltInWorkDim ||
2546 builtin == spv::BuiltInGlobalOffset;
2547 };
2548
alan-bakere1996972020-05-04 08:38:12 -04002549 // If we have a builtin (not an OpenCL builtin).
2550 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002551 //
2552 // Generate OpDecorate.
2553 //
2554 // DOps[0] = Target ID
2555 // DOps[1] = Decoration (Builtin)
2556 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002557 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002558
2559 // WorkgroupSize is different, we decorate the constant composite that has
2560 // its value, rather than the variable that we use to access the value.
2561 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2562 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002563 // Save both the value and variable IDs for later.
2564 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002565 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002566 } else {
SJWf93f5f32020-05-05 07:27:56 -05002567 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002568 }
2569
SJW806a5d82020-07-15 12:51:38 -05002570 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002571 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002572
SJW01901d92020-05-21 08:58:31 -05002573 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002574 } else if (module_scope_constant_external_init) {
2575 // This module scope constant is initialized from a storage buffer with data
2576 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002577 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002578
alan-baker86ce19c2020-08-05 13:09:19 -04002579 // Emit the intializer as a reflection instruction.
David Neto85082642018-03-24 06:55:20 -07002580 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2581 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002582 std::string hexbytes;
2583 llvm::raw_string_ostream str(hexbytes);
2584 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
alan-baker86ce19c2020-08-05 13:09:19 -04002585
2586 // Reflection instruction for constant data.
2587 SPIRVOperandVec Ops;
2588 auto data_id = addSPIRVInst<kDebug>(spv::OpString, str.str().c_str());
2589 Ops << getSPIRVType(Type::getVoidTy(module->getContext()))
2590 << getReflectionImport() << reflection::ExtInstConstantDataStorageBuffer
2591 << getSPIRVInt32Constant(descriptor_set) << getSPIRVInt32Constant(0)
2592 << data_id;
2593 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto85082642018-03-24 06:55:20 -07002594
David Neto85082642018-03-24 06:55:20 -07002595 // OpDecorate %var DescriptorSet <descriptor_set>
alan-baker86ce19c2020-08-05 13:09:19 -04002596 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002597 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2598 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002599
2600 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002601 Ops.clear();
2602 Ops << var_id << spv::DecorationBinding << 0;
2603 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002604 }
2605}
2606
David Neto22f144c2017-06-12 14:26:21 -04002607void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002608 ValueMapType &VMap = getValueMap();
2609 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002610 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2611 auto &GlobalConstArgSet = getGlobalConstArgSet();
2612
2613 FunctionType *FTy = F.getFunctionType();
2614
2615 //
David Neto22f144c2017-06-12 14:26:21 -04002616 // Generate OPFunction.
2617 //
2618
2619 // FOps[0] : Result Type ID
2620 // FOps[1] : Function Control
2621 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002622 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002623
2624 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002625 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002626
2627 // Check function attributes for SPIRV Function Control.
2628 uint32_t FuncControl = spv::FunctionControlMaskNone;
2629 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2630 FuncControl |= spv::FunctionControlInlineMask;
2631 }
2632 if (F.hasFnAttribute(Attribute::NoInline)) {
2633 FuncControl |= spv::FunctionControlDontInlineMask;
2634 }
2635 // TODO: Check llvm attribute for Function Control Pure.
2636 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2637 FuncControl |= spv::FunctionControlPureMask;
2638 }
2639 // TODO: Check llvm attribute for Function Control Const.
2640 if (F.hasFnAttribute(Attribute::ReadNone)) {
2641 FuncControl |= spv::FunctionControlConstMask;
2642 }
2643
SJW01901d92020-05-21 08:58:31 -05002644 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002645
SJW01901d92020-05-21 08:58:31 -05002646 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002647 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2648 SmallVector<Type *, 4> NewFuncParamTys;
2649 FunctionType *NewFTy =
2650 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002651 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002652 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002653 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002654 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002655 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002656 } else {
SJWf93f5f32020-05-05 07:27:56 -05002657 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002658 }
2659 }
2660
SJW01901d92020-05-21 08:58:31 -05002661 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002662
SJWf93f5f32020-05-05 07:27:56 -05002663 // Generate SPIRV instruction for function.
2664 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2665 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002666
SJWf93f5f32020-05-05 07:27:56 -05002667 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2668 EntryPoints.push_back(std::make_pair(&F, FID));
2669 }
David Neto22f144c2017-06-12 14:26:21 -04002670
David Neto482550a2018-03-24 05:21:07 -07002671 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002672 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002673 }
David Neto22f144c2017-06-12 14:26:21 -04002674
2675 //
2676 // Generate OpFunctionParameter for Normal function.
2677 //
David Neto22f144c2017-06-12 14:26:21 -04002678 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002679
David Neto22f144c2017-06-12 14:26:21 -04002680 // Iterate Argument for name instead of param type from function type.
2681 unsigned ArgIdx = 0;
2682 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002683 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002684 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002685
2686 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002687 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002688 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2689 if (GlobalConstFuncTyMap.count(FTy)) {
2690 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2691 Type *EleTy = PTy->getPointerElementType();
2692 Type *ArgTy =
2693 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002694 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002695 GlobalConstArgSet.insert(&Arg);
2696 }
2697 }
2698 }
SJW01901d92020-05-21 08:58:31 -05002699 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002700
2701 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002702 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002703 VMap[&Arg] = param_id;
2704
2705 if (CalledWithCoherentResource(Arg)) {
2706 // If the arg is passed a coherent resource ever, then decorate this
2707 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002708 Ops.clear();
2709 Ops << param_id << spv::DecorationCoherent;
2710 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002711 }
David Neto22f144c2017-06-12 14:26:21 -04002712
2713 ArgIdx++;
2714 }
2715 }
2716}
2717
SJW77b87ad2020-04-21 14:37:52 -05002718void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002719 EntryPointVecType &EntryPoints = getEntryPointVec();
SJW806a5d82020-07-15 12:51:38 -05002720 auto &EntryPointInterfaces = getEntryPointInterfacesList();
SJW01901d92020-05-21 08:58:31 -05002721 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002722
SJWf93f5f32020-05-05 07:27:56 -05002723 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002724
SJW01901d92020-05-21 08:58:31 -05002725 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002726 //
SJW01901d92020-05-21 08:58:31 -05002727 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002728 //
2729 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002730 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002731 }
2732
alan-baker3f772c02021-06-15 22:18:11 -04002733 // Storage buffer and variable pointer extensions were made core in SPIR-V
2734 // 1.3.
2735 if (SpvVersion() < SPIRVVersion::SPIRV_1_3) {
David Neto22f144c2017-06-12 14:26:21 -04002736 //
2737 // Generate OpExtension.
2738 //
2739 // Ops[0] = Name (Literal String)
2740 //
SJWf93f5f32020-05-05 07:27:56 -05002741 addSPIRVInst<kExtensions>(spv::OpExtension,
2742 "SPV_KHR_storage_buffer_storage_class");
David Neto22f144c2017-06-12 14:26:21 -04002743
alan-baker3f772c02021-06-15 22:18:11 -04002744 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2745 //
2746 // Generate OpExtension.
2747 //
2748 // Ops[0] = Name (Literal String)
2749 //
2750 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
2751 }
David Neto22f144c2017-06-12 14:26:21 -04002752 }
2753
2754 //
2755 // Generate OpMemoryModel
2756 //
2757 // Memory model for Vulkan will always be GLSL450.
2758
2759 // Ops[0] = Addressing Model
2760 // Ops[1] = Memory Model
2761 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002762 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002763
SJWf93f5f32020-05-05 07:27:56 -05002764 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002765
2766 //
2767 // Generate OpEntryPoint
2768 //
2769 for (auto EntryPoint : EntryPoints) {
2770 // Ops[0] = Execution Model
2771 // Ops[1] = EntryPoint ID
2772 // Ops[2] = Name (Literal String)
2773 // ...
2774 //
2775 // TODO: Do we need to consider Interface ID for forward references???
2776 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002777 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002778 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002779
SJW806a5d82020-07-15 12:51:38 -05002780 for (auto &Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002781 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002782 }
2783
alan-baker3f772c02021-06-15 22:18:11 -04002784 // Starting in SPIR-V 1.4, all statically used global variables must be
2785 // included in the interface. Private and statically-sized workgroup
2786 // variables are added to all entry points. Kernel arguments are handled
2787 // here.
2788 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
2789 auto *F = dyn_cast<Function>(EntryPoint.first);
2790 assert(F);
2791 assert(F->getCallingConv() == CallingConv::SPIR_KERNEL);
2792
2793 auto &resource_var_at_index = FunctionToResourceVarsMap[F];
2794 for (auto *info : resource_var_at_index) {
2795 if (info) {
2796 Ops << info->var_id;
2797 }
2798 }
2799
Kévin Petit85bcee02021-08-03 18:21:30 +01002800 for (auto sampler_id : FunctionToLiteralSamplersMap[F]) {
2801 Ops << sampler_id;
2802 }
2803
2804 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
2805 auto *V = module->getGlobalVariable(
2806 clspv::ClusteredConstantsVariableName(), true);
2807 if (V) {
2808 Ops << getValueMap()[V];
2809 }
2810 }
2811
alan-baker3f772c02021-06-15 22:18:11 -04002812 auto local_spec_id_md =
2813 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
2814 if (local_spec_id_md) {
2815 for (auto spec_id_op : local_spec_id_md->operands()) {
2816 if (dyn_cast<Function>(
2817 dyn_cast<ValueAsMetadata>(spec_id_op->getOperand(0))
2818 ->getValue()) == F) {
2819 int64_t spec_id =
2820 mdconst::extract<ConstantInt>(spec_id_op->getOperand(2))
2821 ->getSExtValue();
2822 if (spec_id > 0) {
2823 auto &info = LocalSpecIdInfoMap[spec_id];
2824 Ops << info.variable_id;
2825 }
2826 }
2827 }
2828 }
2829
2830 // If the kernel uses the global push constant interface it will not be
2831 // covered by the resource variable iteration above.
2832 if (GetPodArgsImpl(*F) == PodArgImpl::kGlobalPushConstant) {
2833 auto *PC =
2834 module->getGlobalVariable(clspv::PushConstantsVariableName());
2835 assert(PC);
2836 Ops << getValueMap()[PC];
2837 }
2838 }
2839
SJWf93f5f32020-05-05 07:27:56 -05002840 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002841 }
2842
alan-baker3b609772020-09-03 19:10:17 -04002843 if (BuiltinDimVec.empty()) {
2844 for (auto EntryPoint : EntryPoints) {
2845 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2846 ->getMetadata("reqd_work_group_size");
2847 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
2848 //
2849 // Generate OpExecutionMode
2850 //
David Neto22f144c2017-06-12 14:26:21 -04002851
alan-baker3b609772020-09-03 19:10:17 -04002852 // Ops[0] = Entry Point ID
2853 // Ops[1] = Execution Mode
2854 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
2855 Ops.clear();
2856 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
2857
2858 uint32_t XDim = static_cast<uint32_t>(
2859 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2860 uint32_t YDim = static_cast<uint32_t>(
2861 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2862 uint32_t ZDim = static_cast<uint32_t>(
2863 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2864
2865 Ops << XDim << YDim << ZDim;
2866
2867 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002868 }
David Neto22f144c2017-06-12 14:26:21 -04002869 }
2870 }
2871
2872 //
2873 // Generate OpSource.
2874 //
2875 // Ops[0] = SourceLanguage ID
2876 // Ops[1] = Version (LiteralNum)
2877 //
SJW01901d92020-05-21 08:58:31 -05002878 uint32_t LangID = spv::SourceLanguageUnknown;
2879 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00002880 switch (clspv::Option::Language()) {
2881 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05002882 LangID = spv::SourceLanguageOpenCL_C;
2883 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002884 break;
2885 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05002886 LangID = spv::SourceLanguageOpenCL_C;
2887 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00002888 break;
2889 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05002890 LangID = spv::SourceLanguageOpenCL_C;
2891 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00002892 break;
2893 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05002894 LangID = spv::SourceLanguageOpenCL_C;
2895 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00002896 break;
Kévin Petit77838ff2020-10-19 18:54:51 +01002897 case clspv::Option::SourceLanguage::OpenCL_C_30:
2898 LangID = spv::SourceLanguageOpenCL_C;
2899 LangVer = 300;
2900 break;
Kévin Petitf0515712020-01-07 18:29:20 +00002901 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05002902 LangID = spv::SourceLanguageOpenCL_CPP;
2903 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002904 break;
2905 default:
Kévin Petitf0515712020-01-07 18:29:20 +00002906 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01002907 }
David Neto22f144c2017-06-12 14:26:21 -04002908
SJW01901d92020-05-21 08:58:31 -05002909 Ops.clear();
2910 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05002911 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002912
2913 if (!BuiltinDimVec.empty()) {
2914 //
2915 // Generate OpDecorates for x/y/z dimension.
2916 //
2917 // Ops[0] = Target ID
2918 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04002919 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04002920
2921 // X Dimension
2922 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002923 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05002924 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002925
2926 // Y Dimension
2927 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002928 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05002929 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002930
2931 // Z Dimension
2932 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002933 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05002934 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002935 }
2936}
2937
David Netob6e2e062018-04-25 10:32:06 -04002938void SPIRVProducerPass::GenerateEntryPointInitialStores() {
2939 // Work around a driver bug. Initializers on Private variables might not
2940 // work. So the start of the kernel should store the initializer value to the
2941 // variables. Yes, *every* entry point pays this cost if *any* entry point
2942 // uses this builtin. At this point I judge this to be an acceptable tradeoff
2943 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002944 // TODO(dneto): Remove this at some point once fixed drivers are widely
2945 // available.
SJW01901d92020-05-21 08:58:31 -05002946 if (WorkgroupSizeVarID.isValid()) {
2947 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04002948
SJWf93f5f32020-05-05 07:27:56 -05002949 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002950 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04002951
SJWf93f5f32020-05-05 07:27:56 -05002952 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04002953 }
2954}
2955
David Neto22f144c2017-06-12 14:26:21 -04002956void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002957 ValueMapType &VMap = getValueMap();
2958
David Netob6e2e062018-04-25 10:32:06 -04002959 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04002960
2961 for (BasicBlock &BB : F) {
2962 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04002963
2964 //
2965 // Generate OpLabel for Basic Block.
2966 //
SJWf93f5f32020-05-05 07:27:56 -05002967 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04002968
David Neto6dcd4712017-06-23 11:06:47 -04002969 // OpVariable instructions must come first.
2970 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05002971 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
2972 // Allocating a pointer requires variable pointers.
2973 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002974 setVariablePointersCapabilities(
2975 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05002976 }
David Neto6dcd4712017-06-23 11:06:47 -04002977 GenerateInstruction(I);
2978 }
2979 }
2980
David Neto22f144c2017-06-12 14:26:21 -04002981 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04002982 if (clspv::Option::HackInitializers()) {
2983 GenerateEntryPointInitialStores();
2984 }
David Neto22f144c2017-06-12 14:26:21 -04002985 }
2986
2987 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04002988 if (!isa<AllocaInst>(I)) {
2989 GenerateInstruction(I);
2990 }
David Neto22f144c2017-06-12 14:26:21 -04002991 }
2992 }
2993}
2994
2995spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
2996 const std::map<CmpInst::Predicate, spv::Op> Map = {
2997 {CmpInst::ICMP_EQ, spv::OpIEqual},
2998 {CmpInst::ICMP_NE, spv::OpINotEqual},
2999 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3000 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3001 {CmpInst::ICMP_ULT, spv::OpULessThan},
3002 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3003 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3004 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3005 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3006 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3007 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3008 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3009 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3010 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3011 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3012 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3013 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3014 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3015 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3016 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3017 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3018 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3019
3020 assert(0 != Map.count(I->getPredicate()));
3021
3022 return Map.at(I->getPredicate());
3023}
3024
3025spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3026 const std::map<unsigned, spv::Op> Map{
3027 {Instruction::Trunc, spv::OpUConvert},
3028 {Instruction::ZExt, spv::OpUConvert},
3029 {Instruction::SExt, spv::OpSConvert},
3030 {Instruction::FPToUI, spv::OpConvertFToU},
3031 {Instruction::FPToSI, spv::OpConvertFToS},
3032 {Instruction::UIToFP, spv::OpConvertUToF},
3033 {Instruction::SIToFP, spv::OpConvertSToF},
3034 {Instruction::FPTrunc, spv::OpFConvert},
3035 {Instruction::FPExt, spv::OpFConvert},
3036 {Instruction::BitCast, spv::OpBitcast}};
3037
3038 assert(0 != Map.count(I.getOpcode()));
3039
3040 return Map.at(I.getOpcode());
3041}
3042
3043spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003044 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003045 switch (I.getOpcode()) {
3046 default:
3047 break;
3048 case Instruction::Or:
3049 return spv::OpLogicalOr;
3050 case Instruction::And:
3051 return spv::OpLogicalAnd;
3052 case Instruction::Xor:
3053 return spv::OpLogicalNotEqual;
3054 }
3055 }
3056
alan-bakerb6b09dc2018-11-08 16:59:28 -05003057 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003058 {Instruction::Add, spv::OpIAdd},
3059 {Instruction::FAdd, spv::OpFAdd},
3060 {Instruction::Sub, spv::OpISub},
3061 {Instruction::FSub, spv::OpFSub},
3062 {Instruction::Mul, spv::OpIMul},
3063 {Instruction::FMul, spv::OpFMul},
3064 {Instruction::UDiv, spv::OpUDiv},
3065 {Instruction::SDiv, spv::OpSDiv},
3066 {Instruction::FDiv, spv::OpFDiv},
3067 {Instruction::URem, spv::OpUMod},
3068 {Instruction::SRem, spv::OpSRem},
3069 {Instruction::FRem, spv::OpFRem},
3070 {Instruction::Or, spv::OpBitwiseOr},
3071 {Instruction::Xor, spv::OpBitwiseXor},
3072 {Instruction::And, spv::OpBitwiseAnd},
3073 {Instruction::Shl, spv::OpShiftLeftLogical},
3074 {Instruction::LShr, spv::OpShiftRightLogical},
3075 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3076
3077 assert(0 != Map.count(I.getOpcode()));
3078
3079 return Map.at(I.getOpcode());
3080}
3081
SJW806a5d82020-07-15 12:51:38 -05003082SPIRVID SPIRVProducerPass::getSPIRVBuiltin(spv::BuiltIn BID,
3083 spv::Capability Cap) {
3084 SPIRVID RID;
3085
3086 auto ii = BuiltinConstantMap.find(BID);
3087
3088 if (ii != BuiltinConstantMap.end()) {
3089 return ii->second;
3090 } else {
SJW806a5d82020-07-15 12:51:38 -05003091 addCapability(Cap);
3092
3093 Type *type = PointerType::get(IntegerType::get(module->getContext(), 32),
3094 AddressSpace::Input);
3095
3096 RID = addSPIRVGlobalVariable(getSPIRVType(type), spv::StorageClassInput);
3097
3098 BuiltinConstantMap[BID] = RID;
3099
3100 //
3101 // Generate OpDecorate.
3102 //
3103 // Ops[0] : target
3104 // Ops[1] : decoration
3105 // Ops[2] : SpecId
3106 SPIRVOperandVec Ops;
3107 Ops << RID << spv::DecorationBuiltIn << static_cast<int>(BID);
3108
3109 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
3110 }
3111
3112 return RID;
3113}
3114
3115SPIRVID
3116SPIRVProducerPass::GenerateClspvInstruction(CallInst *Call,
3117 const FunctionInfo &FuncInfo) {
3118 SPIRVID RID;
3119
3120 switch (FuncInfo.getType()) {
3121 case Builtins::kClspvCompositeConstruct:
3122 RID = addSPIRVPlaceholder(Call);
3123 break;
3124 case Builtins::kClspvResource: {
3125 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3126 // Generate an OpLoad
3127 SPIRVOperandVec Ops;
3128
3129 Ops << Call->getType()->getPointerElementType()
3130 << ResourceVarDeferredLoadCalls[Call];
3131
3132 RID = addSPIRVInst(spv::OpLoad, Ops);
3133
3134 } else {
3135 // This maps to an OpVariable we've already generated.
3136 // No code is generated for the call.
3137 }
3138 break;
3139 }
3140 case Builtins::kClspvLocal: {
3141 // Don't codegen an instruction here, but instead map this call directly
3142 // to the workgroup variable id.
3143 int spec_id = static_cast<int>(
3144 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
3145 const auto &info = LocalSpecIdInfoMap[spec_id];
3146 RID = info.variable_id;
3147 break;
3148 }
3149 case Builtins::kClspvSamplerVarLiteral: {
3150 // Sampler initializers become a load of the corresponding sampler.
3151 // Map this to a load from the variable.
3152 const auto third_param = static_cast<unsigned>(
3153 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3154 auto sampler_value = third_param;
3155 if (clspv::Option::UseSamplerMap()) {
alan-baker3f772c02021-06-15 22:18:11 -04003156 sampler_value = (*getSamplerMap())[third_param].first;
SJW806a5d82020-07-15 12:51:38 -05003157 }
3158
3159 // Generate an OpLoad
3160 SPIRVOperandVec Ops;
3161
3162 Ops << SamplerTy->getPointerElementType()
3163 << SamplerLiteralToIDMap[sampler_value];
3164
3165 RID = addSPIRVInst(spv::OpLoad, Ops);
3166 break;
3167 }
3168 case Builtins::kSpirvAtomicXor: {
3169 // Handle SPIR-V intrinsics
3170 SPIRVOperandVec Ops;
3171
3172 if (!Call->getType()->isVoidTy()) {
3173 Ops << Call->getType();
3174 }
3175
3176 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
3177 Ops << Call->getArgOperand(i);
3178 }
3179
3180 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
3181 break;
3182 }
3183 case Builtins::kSpirvOp: {
3184 // Handle SPIR-V intrinsics
3185 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
3186 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
3187 if (opcode != spv::OpNop) {
3188 SPIRVOperandVec Ops;
3189
3190 if (!Call->getType()->isVoidTy()) {
3191 Ops << Call->getType();
3192 }
3193
3194 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
3195 Ops << Call->getArgOperand(i);
3196 }
3197
3198 RID = addSPIRVInst(opcode, Ops);
3199 }
3200 break;
3201 }
3202 case Builtins::kSpirvCopyMemory: {
3203 //
3204 // Generate OpCopyMemory.
3205 //
3206
3207 // Ops[0] = Dst ID
3208 // Ops[1] = Src ID
3209 // Ops[2] = Memory Access
3210 // Ops[3] = Alignment
3211
alan-baker3f772c02021-06-15 22:18:11 -04003212 const auto volatile_arg = SpvVersion() >= SPIRVVersion::SPIRV_1_4 ? 4 : 3;
3213 auto IsVolatile = dyn_cast<ConstantInt>(Call->getArgOperand(volatile_arg))
3214 ->getZExtValue() != 0;
SJW806a5d82020-07-15 12:51:38 -05003215
3216 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3217 : spv::MemoryAccessMaskNone;
3218
3219 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3220
alan-baker3f772c02021-06-15 22:18:11 -04003221 auto DstAlignment =
SJW806a5d82020-07-15 12:51:38 -05003222 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
alan-baker3f772c02021-06-15 22:18:11 -04003223 auto SrcAlignment = DstAlignment;
3224 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
3225 SrcAlignment =
3226 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue();
3227 }
SJW806a5d82020-07-15 12:51:38 -05003228
alan-baker3f772c02021-06-15 22:18:11 -04003229 // OpCopyMemory only works if the pointer element type are the same id. If
3230 // we are generating code for SPIR-V 1.4 or later, this may not be the
3231 // case.
3232 auto dst = Call->getArgOperand(0);
3233 auto src = Call->getArgOperand(1);
3234 auto dst_layout =
3235 PointerRequiresLayout(dst->getType()->getPointerAddressSpace());
3236 auto src_layout =
3237 PointerRequiresLayout(src->getType()->getPointerAddressSpace());
3238 auto dst_id =
3239 getSPIRVType(dst->getType()->getPointerElementType(), dst_layout);
3240 auto src_id =
3241 getSPIRVType(src->getType()->getPointerElementType(), src_layout);
SJW806a5d82020-07-15 12:51:38 -05003242 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04003243 if (dst_id.get() != src_id.get()) {
3244 assert(Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4);
3245 // Types differ so generate:
3246 // OpLoad
3247 // OpCopyLogical
3248 // OpStore
3249 auto load_type_id =
3250 getSPIRVType(src->getType()->getPointerElementType(), src_layout);
3251 Ops << load_type_id << src << MemoryAccess
3252 << static_cast<uint32_t>(SrcAlignment);
3253 auto load = addSPIRVInst(spv::OpLoad, Ops);
SJW806a5d82020-07-15 12:51:38 -05003254
alan-baker3f772c02021-06-15 22:18:11 -04003255 auto copy_type_id =
3256 getSPIRVType(dst->getType()->getPointerElementType(), dst_layout);
3257 Ops.clear();
3258 Ops << copy_type_id << load;
3259 auto copy = addSPIRVInst(spv::OpCopyLogical, Ops);
3260
3261 Ops.clear();
3262 Ops << dst << copy << MemoryAccess << static_cast<uint32_t>(DstAlignment);
3263 RID = addSPIRVInst(spv::OpStore, Ops);
3264 } else {
3265 Ops << dst << src << MemoryAccess << static_cast<uint32_t>(DstAlignment);
3266 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4) {
3267 Ops << MemoryAccess << static_cast<uint32_t>(SrcAlignment);
3268 }
3269
3270 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
3271 }
SJW806a5d82020-07-15 12:51:38 -05003272 break;
3273 }
3274 default:
3275 llvm_unreachable("Unknown CLSPV Instruction");
3276 break;
3277 }
3278 return RID;
3279}
3280
3281SPIRVID
3282SPIRVProducerPass::GenerateImageInstruction(CallInst *Call,
3283 const FunctionInfo &FuncInfo) {
3284 SPIRVID RID;
3285
alan-baker3f772c02021-06-15 22:18:11 -04003286 auto GetExtendMask = [this](Type *sample_type,
3287 bool is_int_image) -> uint32_t {
3288 if (SpvVersion() >= SPIRVVersion::SPIRV_1_4 &&
3289 sample_type->getScalarType()->isIntegerTy()) {
3290 if (is_int_image)
3291 return spv::ImageOperandsSignExtendMask;
3292 else
3293 return spv::ImageOperandsZeroExtendMask;
3294 }
3295 return 0;
3296 };
3297
SJW806a5d82020-07-15 12:51:38 -05003298 LLVMContext &Context = module->getContext();
3299 switch (FuncInfo.getType()) {
3300 case Builtins::kReadImagef:
3301 case Builtins::kReadImageh:
3302 case Builtins::kReadImagei:
3303 case Builtins::kReadImageui: {
3304 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
3305 // Additionally, OpTypeSampledImage is generated.
alan-bakerf6bc8252020-09-23 14:58:55 -04003306 const auto image_ty = Call->getArgOperand(0)->getType();
SJW806a5d82020-07-15 12:51:38 -05003307 const auto &pi = FuncInfo.getParameter(1);
3308 if (pi.isSampler()) {
3309 //
3310 // Generate OpSampledImage.
3311 //
3312 // Ops[0] = Result Type ID
3313 // Ops[1] = Image ID
3314 // Ops[2] = Sampler ID
3315 //
3316 SPIRVOperandVec Ops;
3317
3318 Value *Image = Call->getArgOperand(0);
3319 Value *Sampler = Call->getArgOperand(1);
3320 Value *Coordinate = Call->getArgOperand(2);
3321
3322 TypeMapType &OpImageTypeMap = getImageTypeMap();
3323 Type *ImageTy = Image->getType()->getPointerElementType();
3324 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
3325
3326 Ops << ImageTyID << Image << Sampler;
3327
3328 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
3329
3330 //
3331 // Generate OpImageSampleExplicitLod.
3332 //
3333 // Ops[0] = Result Type ID
3334 // Ops[1] = Sampled Image ID
3335 // Ops[2] = Coordinate ID
3336 // Ops[3] = Image Operands Type ID
3337 // Ops[4] ... Ops[n] = Operands ID
3338 //
3339 Ops.clear();
3340
3341 const bool is_int_image = IsIntImageType(Image->getType());
3342 SPIRVID result_type;
3343 if (is_int_image) {
3344 result_type = v4int32ID;
3345 } else {
3346 result_type = getSPIRVType(Call->getType());
3347 }
3348
alan-baker3f772c02021-06-15 22:18:11 -04003349 uint32_t mask = spv::ImageOperandsLodMask |
3350 GetExtendMask(Call->getType(), is_int_image);
SJW806a5d82020-07-15 12:51:38 -05003351 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
alan-baker3f772c02021-06-15 22:18:11 -04003352 Ops << result_type << SampledImageID << Coordinate << mask << CstFP0;
SJW806a5d82020-07-15 12:51:38 -05003353
3354 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
3355
3356 if (is_int_image) {
3357 // Generate the bitcast.
3358 Ops.clear();
3359 Ops << Call->getType() << RID;
3360 RID = addSPIRVInst(spv::OpBitcast, Ops);
3361 }
alan-bakerf6bc8252020-09-23 14:58:55 -04003362 } else if (IsStorageImageType(image_ty)) {
3363 // read_image on a storage image is mapped to OpImageRead.
3364 Value *Image = Call->getArgOperand(0);
3365 Value *Coordinate = Call->getArgOperand(1);
3366
3367 //
3368 // Generate OpImageRead
3369 //
3370 // Ops[0] = Result Type ID
3371 // Ops[1] = Image ID
3372 // Ops[2] = Coordinate
3373 // No optional image operands.
3374 //
3375 SPIRVOperandVec Ops;
3376
3377 const bool is_int_image = IsIntImageType(Image->getType());
3378 SPIRVID result_type;
3379 if (is_int_image) {
3380 result_type = v4int32ID;
3381 } else {
3382 result_type = getSPIRVType(Call->getType());
3383 }
3384
3385 Ops << result_type << Image << Coordinate;
alan-baker3f772c02021-06-15 22:18:11 -04003386 uint32_t mask = GetExtendMask(Call->getType(), is_int_image);
3387 if (mask != 0)
3388 Ops << mask;
alan-bakerf6bc8252020-09-23 14:58:55 -04003389 RID = addSPIRVInst(spv::OpImageRead, Ops);
3390
3391 if (is_int_image) {
3392 // Generate the bitcast.
3393 Ops.clear();
3394 Ops << Call->getType() << RID;
3395 RID = addSPIRVInst(spv::OpBitcast, Ops);
3396 }
3397
3398 // OpImageRead requires StorageImageReadWithoutFormat.
3399 addCapability(spv::CapabilityStorageImageReadWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003400 } else {
alan-bakerf6bc8252020-09-23 14:58:55 -04003401 // read_image on a sampled image (without a sampler) is mapped to
3402 // OpImageFetch.
SJW806a5d82020-07-15 12:51:38 -05003403 Value *Image = Call->getArgOperand(0);
3404 Value *Coordinate = Call->getArgOperand(1);
3405
3406 //
3407 // Generate OpImageFetch
3408 //
3409 // Ops[0] = Result Type ID
3410 // Ops[1] = Image ID
3411 // Ops[2] = Coordinate ID
3412 // Ops[3] = Lod
3413 // Ops[4] = 0
3414 //
3415 SPIRVOperandVec Ops;
3416
3417 const bool is_int_image = IsIntImageType(Image->getType());
3418 SPIRVID result_type;
3419 if (is_int_image) {
3420 result_type = v4int32ID;
3421 } else {
3422 result_type = getSPIRVType(Call->getType());
3423 }
3424
alan-baker3f772c02021-06-15 22:18:11 -04003425 uint32_t mask = spv::ImageOperandsLodMask |
3426 GetExtendMask(Call->getType(), is_int_image);
3427 Ops << result_type << Image << Coordinate << mask
SJW806a5d82020-07-15 12:51:38 -05003428 << getSPIRVInt32Constant(0);
3429
3430 RID = addSPIRVInst(spv::OpImageFetch, Ops);
3431
3432 if (is_int_image) {
3433 // Generate the bitcast.
3434 Ops.clear();
3435 Ops << Call->getType() << RID;
3436 RID = addSPIRVInst(spv::OpBitcast, Ops);
3437 }
3438 }
3439 break;
3440 }
3441
3442 case Builtins::kWriteImagef:
3443 case Builtins::kWriteImageh:
3444 case Builtins::kWriteImagei:
3445 case Builtins::kWriteImageui: {
3446 // write_image is mapped to OpImageWrite.
3447 //
3448 // Generate OpImageWrite.
3449 //
3450 // Ops[0] = Image ID
3451 // Ops[1] = Coordinate ID
3452 // Ops[2] = Texel ID
3453 // Ops[3] = (Optional) Image Operands Type (Literal Number)
3454 // Ops[4] ... Ops[n] = (Optional) Operands ID
3455 //
3456 SPIRVOperandVec Ops;
3457
3458 Value *Image = Call->getArgOperand(0);
3459 Value *Coordinate = Call->getArgOperand(1);
3460 Value *Texel = Call->getArgOperand(2);
3461
3462 SPIRVID TexelID = getSPIRVValue(Texel);
3463
3464 const bool is_int_image = IsIntImageType(Image->getType());
3465 if (is_int_image) {
3466 // Generate a bitcast to v4int and use it as the texel value.
3467 Ops << v4int32ID << TexelID;
3468 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
3469 Ops.clear();
3470 }
3471 Ops << Image << Coordinate << TexelID;
alan-baker3f772c02021-06-15 22:18:11 -04003472 uint32_t mask = GetExtendMask(Texel->getType(), is_int_image);
3473 if (mask != 0)
3474 Ops << mask;
SJW806a5d82020-07-15 12:51:38 -05003475 RID = addSPIRVInst(spv::OpImageWrite, Ops);
alan-bakerf6bc8252020-09-23 14:58:55 -04003476
3477 // Image writes require StorageImageWriteWithoutFormat.
3478 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003479 break;
3480 }
3481
3482 case Builtins::kGetImageHeight:
3483 case Builtins::kGetImageWidth:
3484 case Builtins::kGetImageDepth:
3485 case Builtins::kGetImageDim: {
3486 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
3487 addCapability(spv::CapabilityImageQuery);
3488
3489 //
3490 // Generate OpImageQuerySize[Lod]
3491 //
3492 // Ops[0] = Image ID
3493 //
3494 // Result type has components equal to the dimensionality of the image,
3495 // plus 1 if the image is arrayed.
3496 //
3497 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3498 SPIRVOperandVec Ops;
3499
3500 // Implement:
3501 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3502 SPIRVID SizesTypeID;
3503
3504 Value *Image = Call->getArgOperand(0);
3505 const uint32_t dim = ImageDimensionality(Image->getType());
3506 const uint32_t components =
3507 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
3508 if (components == 1) {
3509 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
3510 } else {
3511 SizesTypeID = getSPIRVType(
3512 FixedVectorType::get(Type::getInt32Ty(Context), components));
3513 }
3514 Ops << SizesTypeID << Image;
3515 spv::Op query_opcode = spv::OpImageQuerySize;
3516 if (IsSampledImageType(Image->getType())) {
3517 query_opcode = spv::OpImageQuerySizeLod;
3518 // Need explicit 0 for Lod operand.
3519 Ops << getSPIRVInt32Constant(0);
3520 }
3521
3522 RID = addSPIRVInst(query_opcode, Ops);
3523
3524 // May require an extra instruction to create the appropriate result of
3525 // the builtin function.
3526 if (FuncInfo.getType() == Builtins::kGetImageDim) {
3527 if (dim == 3) {
3528 // get_image_dim returns an int4 for 3D images.
3529 //
3530
3531 // Implement:
3532 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
3533 Ops.clear();
3534 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
3535 << getSPIRVInt32Constant(0);
3536
3537 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
3538 } else if (dim != components) {
3539 // get_image_dim return an int2 regardless of the arrayedness of the
3540 // image. If the image is arrayed an element must be dropped from the
3541 // query result.
3542 //
3543
3544 // Implement:
3545 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
3546 Ops.clear();
3547 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
3548 << 0 << 1;
3549
3550 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
3551 }
3552 } else if (components > 1) {
3553 // Implement:
3554 // %result = OpCompositeExtract %uint %sizes <component number>
3555 Ops.clear();
3556 Ops << Call->getType() << RID;
3557
3558 uint32_t component = 0;
3559 if (FuncInfo.getType() == Builtins::kGetImageHeight)
3560 component = 1;
3561 else if (FuncInfo.getType() == Builtins::kGetImageDepth)
3562 component = 2;
3563 Ops << component;
3564
3565 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
3566 }
3567 break;
3568 }
3569 default:
3570 llvm_unreachable("Unsupported Image builtin");
3571 }
3572
3573 return RID;
3574}
3575
3576SPIRVID
3577SPIRVProducerPass::GenerateSubgroupInstruction(CallInst *Call,
3578 const FunctionInfo &FuncInfo) {
3579 SPIRVID RID;
3580
3581 // requires SPIRV version 1.3 or greater
3582 if (SpvVersion() != SPIRVVersion::SPIRV_1_3) {
3583 // llvm_unreachable("SubGroups extension requires SPIRV 1.3 or greater");
3584 // TODO(sjw): error out gracefully
3585 }
3586
3587 auto loadBuiltin = [this, Call](spv::BuiltIn spvBI,
3588 spv::Capability spvCap =
3589 spv::CapabilityGroupNonUniform) {
3590 SPIRVOperandVec Ops;
3591 Ops << Call->getType() << this->getSPIRVBuiltin(spvBI, spvCap);
3592
3593 return addSPIRVInst(spv::OpLoad, Ops);
3594 };
3595
3596 spv::Op op = spv::OpNop;
3597 switch (FuncInfo.getType()) {
3598 case Builtins::kGetSubGroupSize:
3599 return loadBuiltin(spv::BuiltInSubgroupSize);
3600 case Builtins::kGetNumSubGroups:
3601 return loadBuiltin(spv::BuiltInNumSubgroups);
3602 case Builtins::kGetSubGroupId:
3603 return loadBuiltin(spv::BuiltInSubgroupId);
3604 case Builtins::kGetSubGroupLocalId:
3605 return loadBuiltin(spv::BuiltInSubgroupLocalInvocationId);
3606
3607 case Builtins::kSubGroupBroadcast:
3608 if (SpvVersion() < SPIRVVersion::SPIRV_1_5 &&
3609 !dyn_cast<ConstantInt>(Call->getOperand(1))) {
3610 llvm_unreachable("sub_group_broadcast requires constant lane Id for "
3611 "SPIRV version < 1.5");
3612 }
3613 addCapability(spv::CapabilityGroupNonUniformBallot);
3614 op = spv::OpGroupNonUniformBroadcast;
3615 break;
3616
3617 case Builtins::kSubGroupAll:
3618 addCapability(spv::CapabilityGroupNonUniformVote);
3619 op = spv::OpGroupNonUniformAll;
3620 break;
3621 case Builtins::kSubGroupAny:
3622 addCapability(spv::CapabilityGroupNonUniformVote);
3623 op = spv::OpGroupNonUniformAny;
3624 break;
3625 case Builtins::kSubGroupReduceAdd:
3626 case Builtins::kSubGroupScanExclusiveAdd:
3627 case Builtins::kSubGroupScanInclusiveAdd: {
3628 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3629 if (FuncInfo.getParameter(0).type_id == Type::IntegerTyID) {
3630 op = spv::OpGroupNonUniformIAdd;
3631 } else {
3632 op = spv::OpGroupNonUniformFAdd;
3633 }
3634 break;
3635 }
3636 case Builtins::kSubGroupReduceMin:
3637 case Builtins::kSubGroupScanExclusiveMin:
3638 case Builtins::kSubGroupScanInclusiveMin: {
3639 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3640 auto &param = FuncInfo.getParameter(0);
3641 if (param.type_id == Type::IntegerTyID) {
3642 op = param.is_signed ? spv::OpGroupNonUniformSMin
3643 : spv::OpGroupNonUniformUMin;
3644 } else {
3645 op = spv::OpGroupNonUniformFMin;
3646 }
3647 break;
3648 }
3649 case Builtins::kSubGroupReduceMax:
3650 case Builtins::kSubGroupScanExclusiveMax:
3651 case Builtins::kSubGroupScanInclusiveMax: {
3652 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3653 auto &param = FuncInfo.getParameter(0);
3654 if (param.type_id == Type::IntegerTyID) {
3655 op = param.is_signed ? spv::OpGroupNonUniformSMax
3656 : spv::OpGroupNonUniformUMax;
3657 } else {
3658 op = spv::OpGroupNonUniformFMax;
3659 }
3660 break;
3661 }
3662
3663 case Builtins::kGetEnqueuedNumSubGroups:
3664 // TODO(sjw): requires CapabilityKernel (incompatible with Shader)
3665 case Builtins::kGetMaxSubGroupSize:
3666 // TODO(sjw): use SpecConstant, capability Kernel (incompatible with Shader)
3667 case Builtins::kSubGroupBarrier:
3668 case Builtins::kSubGroupReserveReadPipe:
3669 case Builtins::kSubGroupReserveWritePipe:
3670 case Builtins::kSubGroupCommitReadPipe:
3671 case Builtins::kSubGroupCommitWritePipe:
3672 case Builtins::kGetKernelSubGroupCountForNdrange:
3673 case Builtins::kGetKernelMaxSubGroupSizeForNdrange:
3674 default:
3675 Call->print(errs());
3676 llvm_unreachable("Unsupported sub_group operation");
3677 break;
3678 }
3679
3680 assert(op != spv::OpNop);
3681
3682 SPIRVOperandVec Operands;
3683
3684 //
3685 // Generate OpGroupNonUniform*
3686 //
3687 // Ops[0] = Result Type ID
3688 // Ops[1] = ScopeSubgroup
3689 // Ops[2] = Value ID
3690 // Ops[3] = Local ID
3691
3692 // The result type.
3693 Operands << Call->getType();
3694
3695 // Subgroup Scope
3696 Operands << getSPIRVInt32Constant(spv::ScopeSubgroup);
3697
3698 switch (FuncInfo.getType()) {
3699 case Builtins::kSubGroupReduceAdd:
3700 case Builtins::kSubGroupReduceMin:
3701 case Builtins::kSubGroupReduceMax:
3702 Operands << spv::GroupOperationReduce;
3703 break;
3704 case Builtins::kSubGroupScanExclusiveAdd:
3705 case Builtins::kSubGroupScanExclusiveMin:
3706 case Builtins::kSubGroupScanExclusiveMax:
3707 Operands << spv::GroupOperationExclusiveScan;
3708 break;
3709 case Builtins::kSubGroupScanInclusiveAdd:
3710 case Builtins::kSubGroupScanInclusiveMin:
3711 case Builtins::kSubGroupScanInclusiveMax:
3712 Operands << spv::GroupOperationInclusiveScan;
3713 break;
3714 default:
3715 break;
3716 }
3717
3718 for (Use &use : Call->arg_operands()) {
3719 Operands << use.get();
3720 }
3721
3722 return addSPIRVInst(op, Operands);
3723}
3724
3725SPIRVID SPIRVProducerPass::GenerateInstructionFromCall(CallInst *Call) {
3726 LLVMContext &Context = module->getContext();
3727
3728 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
3729 auto func_type = func_info.getType();
3730
3731 if (BUILTIN_IN_GROUP(func_type, Clspv)) {
3732 return GenerateClspvInstruction(Call, func_info);
3733 } else if (BUILTIN_IN_GROUP(func_type, Image)) {
3734 return GenerateImageInstruction(Call, func_info);
3735 } else if (BUILTIN_IN_GROUP(func_type, SubgroupsKHR)) {
3736 return GenerateSubgroupInstruction(Call, func_info);
3737 }
3738
3739 SPIRVID RID;
3740
alan-baker5f2e88e2020-12-07 15:24:04 -05003741 switch (Call->getCalledFunction()->getIntrinsicID()) {
3742 case Intrinsic::ctlz: {
3743 // Implement as 31 - FindUMsb. Ignore the second operand of llvm.ctlz.
3744 SPIRVOperandVec Ops;
3745 Ops << Call->getType() << getOpExtInstImportID()
3746 << glsl::ExtInst::ExtInstFindUMsb << Call->getArgOperand(0);
3747 auto find_msb = addSPIRVInst(spv::OpExtInst, Ops);
3748
3749 Constant *thirty_one = ConstantInt::get(
3750 Call->getType(), Call->getType()->getScalarSizeInBits() - 1);
3751 Ops.clear();
3752 Ops << Call->getType() << thirty_one << find_msb;
3753 return addSPIRVInst(spv::OpISub, Ops);
3754 }
3755 case Intrinsic::cttz: {
3756 // Implement as:
3757 // lsb = FindILsb x
3758 // res = lsb == -1 ? width : lsb
3759 //
3760 // Ignore the second operand of llvm.cttz.
3761 SPIRVOperandVec Ops;
3762 Ops << Call->getType() << getOpExtInstImportID()
3763 << glsl::ExtInst::ExtInstFindILsb << Call->getArgOperand(0);
3764 auto find_lsb = addSPIRVInst(spv::OpExtInst, Ops);
3765
3766 auto neg_one = Constant::getAllOnesValue(Call->getType());
3767 auto i1_ty = Call->getType()->getWithNewBitWidth(1);
3768 auto width = ConstantInt::get(Call->getType(),
3769 Call->getType()->getScalarSizeInBits());
3770
3771 Ops.clear();
3772 Ops << i1_ty << find_lsb << neg_one;
3773 auto cmp = addSPIRVInst(spv::OpIEqual, Ops);
3774
3775 Ops.clear();
3776 Ops << Call->getType() << cmp << width << find_lsb;
3777 return addSPIRVInst(spv::OpSelect, Ops);
3778 }
3779
3780 default:
3781 break;
3782 }
3783
SJW806a5d82020-07-15 12:51:38 -05003784 switch (func_type) {
3785 case Builtins::kPopcount: {
3786 //
3787 // Generate OpBitCount
3788 //
3789 // Ops[0] = Result Type ID
3790 // Ops[1] = Base ID
3791 SPIRVOperandVec Ops;
3792 Ops << Call->getType() << Call->getOperand(0);
3793
3794 RID = addSPIRVInst(spv::OpBitCount, Ops);
3795 break;
3796 }
3797 default: {
3798 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3799
alan-baker5f2e88e2020-12-07 15:24:04 -05003800 // Do not replace functions with implementations.
3801 if (EInst && Call->getCalledFunction()->isDeclaration()) {
SJW806a5d82020-07-15 12:51:38 -05003802 SPIRVID ExtInstImportID = getOpExtInstImportID();
3803
3804 //
3805 // Generate OpExtInst.
3806 //
3807
3808 // Ops[0] = Result Type ID
3809 // Ops[1] = Set ID (OpExtInstImport ID)
3810 // Ops[2] = Instruction Number (Literal Number)
3811 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3812 SPIRVOperandVec Ops;
3813
3814 Ops << Call->getType() << ExtInstImportID << EInst;
3815
3816 for (auto &use : Call->arg_operands()) {
3817 Ops << use.get();
3818 }
3819
3820 RID = addSPIRVInst(spv::OpExtInst, Ops);
3821
3822 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3823 if (IndirectExtInst != kGlslExtInstBad) {
SJW806a5d82020-07-15 12:51:38 -05003824 // Generate one more instruction that uses the result of the extended
3825 // instruction. Its result id is one more than the id of the
3826 // extended instruction.
3827 auto generate_extra_inst = [this, &Context, &Call,
3828 &RID](spv::Op opcode, Constant *constant) {
3829 //
3830 // Generate instruction like:
3831 // result = opcode constant <extinst-result>
3832 //
3833 // Ops[0] = Result Type ID
3834 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3835 // Ops[2] = Operand 1 ;; the result of the extended instruction
3836 SPIRVOperandVec Ops;
3837
3838 Type *resultTy = Call->getType();
3839
3840 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
alan-baker931253b2020-08-20 17:15:38 -04003841 constant =
3842 ConstantVector::getSplat(vectorTy->getElementCount(), constant);
SJW806a5d82020-07-15 12:51:38 -05003843 }
3844 Ops << resultTy << constant << RID;
3845
3846 RID = addSPIRVInst(opcode, Ops);
3847 };
3848
SJW806a5d82020-07-15 12:51:38 -05003849 switch (IndirectExtInst) {
SJW806a5d82020-07-15 12:51:38 -05003850 case glsl::ExtInstAcos: // Implementing acospi
3851 case glsl::ExtInstAsin: // Implementing asinpi
3852 case glsl::ExtInstAtan: // Implementing atanpi
3853 case glsl::ExtInstAtan2: // Implementing atan2pi
3854 generate_extra_inst(
3855 spv::OpFMul,
alan-bakercc2bafb2020-11-02 08:30:18 -05003856 ConstantFP::get(Call->getType()->getScalarType(), kOneOverPi));
SJW806a5d82020-07-15 12:51:38 -05003857 break;
3858
3859 default:
3860 assert(false && "internally inconsistent");
3861 }
3862 }
3863 } else {
Pedro Olsen Ferreira208d1e72021-06-17 19:24:48 +01003864 switch (Call->getIntrinsicID()) {
3865 // These LLVM intrinsics have no SPV equivalent.
3866 // Because they are optimiser hints, we can safely discard them.
3867 case Intrinsic::experimental_noalias_scope_decl:
3868 break;
3869 default:
3870 // A real function call (not builtin)
3871 // Call instruction is deferred because it needs function's ID.
3872 RID = addSPIRVPlaceholder(Call);
3873 break;
3874 }
SJW806a5d82020-07-15 12:51:38 -05003875 }
3876
3877 break;
3878 }
3879 }
3880
3881 return RID;
3882}
3883
David Neto22f144c2017-06-12 14:26:21 -04003884void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003885 ValueMapType &VMap = getValueMap();
SJW806a5d82020-07-15 12:51:38 -05003886 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04003887
SJW806a5d82020-07-15 12:51:38 -05003888 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04003889
3890 switch (I.getOpcode()) {
3891 default: {
3892 if (Instruction::isCast(I.getOpcode())) {
3893 //
3894 // Generate SPIRV instructions for cast operators.
3895 //
3896
David Netod2de94a2017-08-28 17:27:47 -04003897 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003898 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003899 auto toI8 = Ty == Type::getInt8Ty(Context);
3900 auto fromI32 = OpTy == Type::getInt32Ty(Context);
James Price757dea82021-01-11 13:42:39 -05003901 // Handle zext, sext, uitofp, and sitofp with i1 type specially.
David Neto22f144c2017-06-12 14:26:21 -04003902 if ((I.getOpcode() == Instruction::ZExt ||
3903 I.getOpcode() == Instruction::SExt ||
James Price757dea82021-01-11 13:42:39 -05003904 I.getOpcode() == Instruction::UIToFP ||
3905 I.getOpcode() == Instruction::SIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003906 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003907 //
3908 // Generate OpSelect.
3909 //
3910
3911 // Ops[0] = Result Type ID
3912 // Ops[1] = Condition ID
3913 // Ops[2] = True Constant ID
3914 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003915 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003916
SJW01901d92020-05-21 08:58:31 -05003917 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003918
David Neto22f144c2017-06-12 14:26:21 -04003919 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003920 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003921 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003922 Ops << ConstantInt::getSigned(I.getType(), -1);
James Price757dea82021-01-11 13:42:39 -05003923 } else if (I.getOpcode() == Instruction::UIToFP) {
James Price96bd3d92020-11-23 09:01:57 -05003924 Ops << ConstantFP::get(I.getType(), 1.0);
James Price757dea82021-01-11 13:42:39 -05003925 } else if (I.getOpcode() == Instruction::SIToFP) {
3926 Ops << ConstantFP::get(I.getType(), -1.0);
David Neto22f144c2017-06-12 14:26:21 -04003927 }
David Neto22f144c2017-06-12 14:26:21 -04003928
David Neto22f144c2017-06-12 14:26:21 -04003929 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003930 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003931 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003932 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003933 } else {
James Price96bd3d92020-11-23 09:01:57 -05003934 Ops << ConstantFP::get(I.getType(), 0.0);
David Neto22f144c2017-06-12 14:26:21 -04003935 }
David Neto22f144c2017-06-12 14:26:21 -04003936
SJWf93f5f32020-05-05 07:27:56 -05003937 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003938 } else if (!clspv::Option::Int8Support() &&
3939 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003940 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3941 // 8 bits.
3942 // Before:
3943 // %result = trunc i32 %a to i8
3944 // After
3945 // %result = OpBitwiseAnd %uint %a %uint_255
3946
SJWf93f5f32020-05-05 07:27:56 -05003947 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003948
SJW806a5d82020-07-15 12:51:38 -05003949 Ops << OpTy << I.getOperand(0) << getSPIRVInt32Constant(255);
David Netod2de94a2017-08-28 17:27:47 -04003950
SJWf93f5f32020-05-05 07:27:56 -05003951 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003952 } else {
3953 // Ops[0] = Result Type ID
3954 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003955 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003956
SJW01901d92020-05-21 08:58:31 -05003957 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003958
SJWf93f5f32020-05-05 07:27:56 -05003959 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003960 }
3961 } else if (isa<BinaryOperator>(I)) {
3962 //
3963 // Generate SPIRV instructions for binary operators.
3964 //
3965
3966 // Handle xor with i1 type specially.
3967 if (I.getOpcode() == Instruction::Xor &&
3968 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003969 ((isa<ConstantInt>(I.getOperand(0)) &&
3970 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3971 (isa<ConstantInt>(I.getOperand(1)) &&
3972 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003973 //
3974 // Generate OpLogicalNot.
3975 //
3976 // Ops[0] = Result Type ID
3977 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003978 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003979
SJW01901d92020-05-21 08:58:31 -05003980 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003981
3982 Value *CondV = I.getOperand(0);
3983 if (isa<Constant>(I.getOperand(0))) {
3984 CondV = I.getOperand(1);
3985 }
SJW01901d92020-05-21 08:58:31 -05003986 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003987
SJWf93f5f32020-05-05 07:27:56 -05003988 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003989 } else {
3990 // Ops[0] = Result Type ID
3991 // Ops[1] = Operand 0
3992 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003993 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003994
SJW01901d92020-05-21 08:58:31 -05003995 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003996
SJWf93f5f32020-05-05 07:27:56 -05003997 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003998 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003999 } else if (I.getOpcode() == Instruction::FNeg) {
4000 // The only unary operator.
4001 //
4002 // Ops[0] = Result Type ID
4003 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05004004 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05004005
SJW01901d92020-05-21 08:58:31 -05004006 Ops << I.getType() << I.getOperand(0);
4007 RID = addSPIRVInst(spv::OpFNegate, Ops);
Marco Antognini68e5c512020-09-09 16:08:57 +01004008 } else if (I.getOpcode() == Instruction::Unreachable) {
4009 RID = addSPIRVInst(spv::OpUnreachable);
David Neto22f144c2017-06-12 14:26:21 -04004010 } else {
4011 I.print(errs());
4012 llvm_unreachable("Unsupported instruction???");
4013 }
4014 break;
4015 }
4016 case Instruction::GetElementPtr: {
4017 auto &GlobalConstArgSet = getGlobalConstArgSet();
4018
4019 //
4020 // Generate OpAccessChain.
4021 //
4022 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
4023
4024 //
4025 // Generate OpAccessChain.
4026 //
4027
4028 // Ops[0] = Result Type ID
4029 // Ops[1] = Base ID
4030 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05004031 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004032
alan-bakerb6b09dc2018-11-08 16:59:28 -05004033 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04004034 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4035 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4036 // Use pointer type with private address space for global constant.
4037 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004038 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004039 }
David Neto257c3892018-04-11 13:19:45 -04004040
SJW01901d92020-05-21 08:58:31 -05004041 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04004042
David Neto862b7d82018-06-14 18:48:37 -04004043 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05004044 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004045
David Neto862b7d82018-06-14 18:48:37 -04004046 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004047
4048 //
4049 // Follows below rules for gep.
4050 //
David Neto862b7d82018-06-14 18:48:37 -04004051 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4052 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004053 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4054 // first index.
4055 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4056 // use gep's first index.
4057 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4058 // gep's first index.
4059 //
4060 spv::Op Opcode = spv::OpAccessChain;
4061 unsigned offset = 0;
4062 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004063 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004064 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004065 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004066 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004067 }
David Neto862b7d82018-06-14 18:48:37 -04004068 } else {
David Neto22f144c2017-06-12 14:26:21 -04004069 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004070 }
4071
4072 if (Opcode == spv::OpPtrAccessChain) {
alan-baker7506abb2020-09-10 15:02:55 -04004073 // Shader validation in the SPIR-V spec requires that the base pointer to
4074 // OpPtrAccessChain (in StorageBuffer storage class) be decorated with
4075 // ArrayStride.
alan-baker5b86ed72019-02-15 08:26:50 -05004076 auto address_space = ResultType->getAddressSpace();
4077 setVariablePointersCapabilities(address_space);
4078 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004079 case spv::StorageClassStorageBuffer:
David Neto1a1a0582017-07-07 12:01:44 -04004080 // Save the need to generate an ArrayStride decoration. But defer
4081 // generation until later, so we only make one decoration.
alan-baker7506abb2020-09-10 15:02:55 -04004082 getTypesNeedingArrayStride().insert(GEP->getPointerOperandType());
4083 break;
4084 case spv::StorageClassWorkgroup:
Alan Bakerfcda9482018-10-02 17:09:59 -04004085 break;
4086 default:
alan-baker7506abb2020-09-10 15:02:55 -04004087 llvm_unreachable(
4088 "OpPtrAccessChain is not supported for this storage class");
Alan Bakerfcda9482018-10-02 17:09:59 -04004089 break;
David Neto1a1a0582017-07-07 12:01:44 -04004090 }
David Neto22f144c2017-06-12 14:26:21 -04004091 }
4092
4093 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05004094 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04004095 }
4096
SJWf93f5f32020-05-05 07:27:56 -05004097 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004098 break;
4099 }
4100 case Instruction::ExtractValue: {
4101 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4102 // Ops[0] = Result Type ID
4103 // Ops[1] = Composite ID
4104 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004105 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004106
SJW01901d92020-05-21 08:58:31 -05004107 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004108
SJW01901d92020-05-21 08:58:31 -05004109 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004110
4111 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004112 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004113 }
4114
SJWf93f5f32020-05-05 07:27:56 -05004115 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004116 break;
4117 }
4118 case Instruction::InsertValue: {
4119 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4120 // Ops[0] = Result Type ID
4121 // Ops[1] = Object ID
4122 // Ops[2] = Composite ID
4123 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004124 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004125
SJW01901d92020-05-21 08:58:31 -05004126 Ops << I.getType() << IVI->getInsertedValueOperand()
4127 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004128
4129 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004130 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004131 }
4132
SJWf93f5f32020-05-05 07:27:56 -05004133 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004134 break;
4135 }
4136 case Instruction::Select: {
4137 //
4138 // Generate OpSelect.
4139 //
4140
4141 // Ops[0] = Result Type ID
4142 // Ops[1] = Condition ID
4143 // Ops[2] = True Constant ID
4144 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05004145 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004146
4147 // Find SPIRV instruction for parameter type.
4148 auto Ty = I.getType();
4149 if (Ty->isPointerTy()) {
4150 auto PointeeTy = Ty->getPointerElementType();
4151 if (PointeeTy->isStructTy() &&
4152 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4153 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004154 } else {
4155 // Selecting between pointers requires variable pointers.
4156 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4157 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05004158 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004159 }
David Neto22f144c2017-06-12 14:26:21 -04004160 }
4161 }
4162
SJW01901d92020-05-21 08:58:31 -05004163 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004164
SJWf93f5f32020-05-05 07:27:56 -05004165 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004166 break;
4167 }
4168 case Instruction::ExtractElement: {
4169 // Handle <4 x i8> type manually.
4170 Type *CompositeTy = I.getOperand(0)->getType();
4171 if (is4xi8vec(CompositeTy)) {
4172 //
4173 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4174 // <4 x i8>.
4175 //
4176
4177 //
4178 // Generate OpShiftRightLogical
4179 //
4180 // Ops[0] = Result Type ID
4181 // Ops[1] = Operand 0
4182 // Ops[2] = Operand 1
4183 //
SJWf93f5f32020-05-05 07:27:56 -05004184 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004185
SJW01901d92020-05-21 08:58:31 -05004186 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004187
SJW01901d92020-05-21 08:58:31 -05004188 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004189 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4190 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004191 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4192 Op1ID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004193 } else {
4194 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004195 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004196
SJW806a5d82020-07-15 12:51:38 -05004197 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1)
4198 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004199
SJWf93f5f32020-05-05 07:27:56 -05004200 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004201 }
SJW01901d92020-05-21 08:58:31 -05004202 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04004203
SJW01901d92020-05-21 08:58:31 -05004204 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004205
4206 //
4207 // Generate OpBitwiseAnd
4208 //
4209 // Ops[0] = Result Type ID
4210 // Ops[1] = Operand 0
4211 // Ops[2] = Operand 1
4212 //
4213 Ops.clear();
4214
SJW806a5d82020-07-15 12:51:38 -05004215 Ops << CompositeTy << ShiftID << getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004216
SJWf93f5f32020-05-05 07:27:56 -05004217 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004218 break;
4219 }
4220
4221 // Ops[0] = Result Type ID
4222 // Ops[1] = Composite ID
4223 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004224 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004225
SJW01901d92020-05-21 08:58:31 -05004226 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004227
4228 spv::Op Opcode = spv::OpCompositeExtract;
4229 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05004230 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04004231 } else {
SJW01901d92020-05-21 08:58:31 -05004232 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004233 Opcode = spv::OpVectorExtractDynamic;
4234 }
4235
SJWf93f5f32020-05-05 07:27:56 -05004236 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004237 break;
4238 }
4239 case Instruction::InsertElement: {
4240 // Handle <4 x i8> type manually.
4241 Type *CompositeTy = I.getOperand(0)->getType();
4242 if (is4xi8vec(CompositeTy)) {
SJW806a5d82020-07-15 12:51:38 -05004243 SPIRVID CstFFID = getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004244
SJW01901d92020-05-21 08:58:31 -05004245 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004246 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4247 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004248 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4249 ShiftAmountID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004250 } else {
4251 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004252 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004253
SJW806a5d82020-07-15 12:51:38 -05004254 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2)
4255 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004256
SJWf93f5f32020-05-05 07:27:56 -05004257 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004258 }
4259
4260 //
4261 // Generate mask operations.
4262 //
4263
4264 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004265 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004266
SJW01901d92020-05-21 08:58:31 -05004267 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004268
SJW01901d92020-05-21 08:58:31 -05004269 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004270
4271 // Inverse mask.
4272 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004273 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04004274
SJW01901d92020-05-21 08:58:31 -05004275 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004276
4277 // Apply mask.
4278 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004279 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04004280
SJW01901d92020-05-21 08:58:31 -05004281 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004282
4283 // Create correct value according to index of insertelement.
4284 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004285 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004286
SJW01901d92020-05-21 08:58:31 -05004287 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004288
4289 // Insert value to original value.
4290 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004291 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04004292
SJWf93f5f32020-05-05 07:27:56 -05004293 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004294 break;
4295 }
4296
SJWf93f5f32020-05-05 07:27:56 -05004297 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004298
James Priced26efea2018-06-09 23:28:32 +01004299 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05004300 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004301
4302 spv::Op Opcode = spv::OpCompositeInsert;
4303 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004304 const auto value = CI->getZExtValue();
4305 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004306 // Ops[1] = Object ID
4307 // Ops[2] = Composite ID
4308 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004309 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04004310 } else {
James Priced26efea2018-06-09 23:28:32 +01004311 // Ops[1] = Composite ID
4312 // Ops[2] = Object ID
4313 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004314 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004315 Opcode = spv::OpVectorInsertDynamic;
4316 }
4317
SJWf93f5f32020-05-05 07:27:56 -05004318 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004319 break;
4320 }
4321 case Instruction::ShuffleVector: {
4322 // Ops[0] = Result Type ID
4323 // Ops[1] = Vector 1 ID
4324 // Ops[2] = Vector 2 ID
4325 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004326 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004327
SJW01901d92020-05-21 08:58:31 -05004328 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004329
alan-bakerc9666712020-04-01 16:31:21 -04004330 auto shuffle = cast<ShuffleVectorInst>(&I);
4331 SmallVector<int, 4> mask;
4332 shuffle->getShuffleMask(mask);
4333 for (auto i : mask) {
4334 if (i == UndefMaskElem) {
4335 if (clspv::Option::HackUndef())
4336 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05004337 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04004338 else
4339 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05004340 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04004341 } else {
SJW01901d92020-05-21 08:58:31 -05004342 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04004343 }
4344 }
4345
SJWf93f5f32020-05-05 07:27:56 -05004346 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004347 break;
4348 }
4349 case Instruction::ICmp:
4350 case Instruction::FCmp: {
4351 CmpInst *CmpI = cast<CmpInst>(&I);
4352
David Netod4ca2e62017-07-06 18:47:35 -04004353 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004354 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004355 if (isa<PointerType>(ArgTy)) {
4356 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004357 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004358 errs()
4359 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4360 << "in function " << name << "\n";
4361 llvm_unreachable("Pointer equality check is invalid");
4362 break;
4363 }
4364
SJWf93f5f32020-05-05 07:27:56 -05004365 SPIRVOperandVec Ops;
alan-baker15106572020-11-06 15:08:10 -05004366 if (CmpI->getPredicate() == CmpInst::FCMP_ORD ||
4367 CmpI->getPredicate() == CmpInst::FCMP_UNO) {
4368 // Implement ordered and unordered comparisons are OpIsNan instructions.
4369 // Optimize the constants to simplify the resulting code.
4370 auto lhs = CmpI->getOperand(0);
4371 auto rhs = CmpI->getOperand(1);
4372 auto const_lhs = dyn_cast_or_null<Constant>(lhs);
4373 auto const_rhs = dyn_cast_or_null<Constant>(rhs);
4374 if ((const_lhs && const_lhs->isNaN()) ||
4375 (const_rhs && const_rhs->isNaN())) {
4376 // Result is a constant, false of ordered, true for unordered.
4377 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4378 RID = getSPIRVConstant(ConstantInt::getFalse(CmpI->getType()));
4379 } else {
4380 RID = getSPIRVConstant(ConstantInt::getTrue(CmpI->getType()));
4381 }
4382 break;
4383 }
4384 SPIRVID lhs_id;
4385 SPIRVID rhs_id;
4386 if (!const_lhs) {
4387 // Generate OpIsNan for the lhs.
4388 Ops.clear();
4389 Ops << CmpI->getType() << lhs;
4390 lhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4391 }
4392 if (!const_rhs) {
4393 // Generate OpIsNan for the rhs.
4394 Ops.clear();
4395 Ops << CmpI->getType() << rhs;
4396 rhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4397 }
4398 if (lhs_id.isValid() && rhs_id.isValid()) {
4399 // Or the results for the lhs and rhs.
4400 Ops.clear();
4401 Ops << CmpI->getType() << lhs_id << rhs_id;
4402 RID = addSPIRVInst(spv::OpLogicalOr, Ops);
4403 } else {
4404 RID = lhs_id.isValid() ? lhs_id : rhs_id;
4405 }
4406 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4407 // For ordered comparisons, invert the intermediate result.
4408 Ops.clear();
4409 Ops << CmpI->getType() << RID;
4410 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
4411 }
4412 break;
4413 } else {
4414 // Remaining comparisons map directly to SPIR-V opcodes.
4415 // Ops[0] = Result Type ID
4416 // Ops[1] = Operand 1 ID
4417 // Ops[2] = Operand 2 ID
4418 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004419
alan-baker15106572020-11-06 15:08:10 -05004420 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
4421 RID = addSPIRVInst(Opcode, Ops);
4422 }
David Neto22f144c2017-06-12 14:26:21 -04004423 break;
4424 }
4425 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004426 // Branch instruction is deferred because it needs label's ID.
4427 BasicBlock *BrBB = I.getParent();
4428 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4429 // Placeholder for Merge operation
4430 RID = addSPIRVPlaceholder(&I);
4431 }
4432 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004433 break;
4434 }
4435 case Instruction::Switch: {
4436 I.print(errs());
4437 llvm_unreachable("Unsupported instruction???");
4438 break;
4439 }
4440 case Instruction::IndirectBr: {
4441 I.print(errs());
4442 llvm_unreachable("Unsupported instruction???");
4443 break;
4444 }
4445 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004446 // PHI instruction is deferred because it needs label's ID.
4447 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004448 break;
4449 }
4450 case Instruction::Alloca: {
4451 //
4452 // Generate OpVariable.
4453 //
4454 // Ops[0] : Result Type ID
4455 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004456 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004457
SJW01901d92020-05-21 08:58:31 -05004458 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04004459
SJWf93f5f32020-05-05 07:27:56 -05004460 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004461 break;
4462 }
4463 case Instruction::Load: {
4464 LoadInst *LD = cast<LoadInst>(&I);
4465 //
4466 // Generate OpLoad.
4467 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004468
alan-baker5b86ed72019-02-15 08:26:50 -05004469 if (LD->getType()->isPointerTy()) {
4470 // Loading a pointer requires variable pointers.
4471 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4472 }
David Neto22f144c2017-06-12 14:26:21 -04004473
SJW01901d92020-05-21 08:58:31 -05004474 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004475 // This is a hack to work around what looks like a driver bug.
4476 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004477 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4478 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004479 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004480 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004481 // Generate a bitwise-and of the original value with itself.
4482 // We should have been able to get away with just an OpCopyObject,
4483 // but we need something more complex to get past certain driver bugs.
4484 // This is ridiculous, but necessary.
4485 // TODO(dneto): Revisit this once drivers fix their bugs.
4486
SJWf93f5f32020-05-05 07:27:56 -05004487 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004488 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04004489
SJWf93f5f32020-05-05 07:27:56 -05004490 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004491 break;
4492 }
4493
4494 // This is the normal path. Generate a load.
4495
David Neto22f144c2017-06-12 14:26:21 -04004496 // Ops[0] = Result Type ID
4497 // Ops[1] = Pointer ID
4498 // Ops[2] ... Ops[n] = Optional Memory Access
4499 //
4500 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004501
alan-baker3f772c02021-06-15 22:18:11 -04004502 auto ptr = LD->getPointerOperand();
4503 auto ptr_ty = ptr->getType();
4504 SPIRVID result_type_id;
4505 if (LD->getType()->isPointerTy()) {
4506 result_type_id = getSPIRVType(LD->getType());
4507 } else {
4508 auto layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
4509 result_type_id = getSPIRVType(LD->getType(), layout);
4510 }
SJWf93f5f32020-05-05 07:27:56 -05004511 SPIRVOperandVec Ops;
alan-baker3f772c02021-06-15 22:18:11 -04004512 Ops << result_type_id << ptr;
David Neto22f144c2017-06-12 14:26:21 -04004513
SJWf93f5f32020-05-05 07:27:56 -05004514 RID = addSPIRVInst(spv::OpLoad, Ops);
alan-baker3f772c02021-06-15 22:18:11 -04004515
4516 auto no_layout_id = getSPIRVType(LD->getType());
4517 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4 &&
4518 no_layout_id.get() != result_type_id.get()) {
4519 // Generate an OpCopyLogical to convert from the laid out type to a
4520 // non-laid out type.
4521 Ops.clear();
4522 Ops << no_layout_id << RID;
4523 RID = addSPIRVInst(spv::OpCopyLogical, Ops);
4524 }
David Neto22f144c2017-06-12 14:26:21 -04004525 break;
4526 }
4527 case Instruction::Store: {
4528 StoreInst *ST = cast<StoreInst>(&I);
4529 //
4530 // Generate OpStore.
4531 //
4532
alan-baker5b86ed72019-02-15 08:26:50 -05004533 if (ST->getValueOperand()->getType()->isPointerTy()) {
4534 // Storing a pointer requires variable pointers.
4535 setVariablePointersCapabilities(
4536 ST->getValueOperand()->getType()->getPointerAddressSpace());
4537 }
4538
alan-baker3f772c02021-06-15 22:18:11 -04004539 SPIRVOperandVec Ops;
4540 auto ptr = ST->getPointerOperand();
4541 auto ptr_ty = ptr->getType();
4542 auto value = ST->getValueOperand();
4543 auto value_ty = value->getType();
4544 auto needs_layout = PointerRequiresLayout(ptr_ty->getPointerAddressSpace());
4545 if (Option::SpvVersion() >= SPIRVVersion::SPIRV_1_4 && needs_layout &&
4546 (value_ty->isArrayTy() || value_ty->isStructTy())) {
4547 // Generate an OpCopyLogical to convert from the non-laid type to the
4548 // laid out type.
4549 Ops << getSPIRVType(value_ty, needs_layout) << value;
4550 RID = addSPIRVInst(spv::OpCopyLogical, Ops);
4551 Ops.clear();
4552 }
4553
David Neto22f144c2017-06-12 14:26:21 -04004554 // Ops[0] = Pointer ID
4555 // Ops[1] = Object ID
4556 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4557 //
4558 // TODO: Do we need to implement Optional Memory Access???
alan-baker3f772c02021-06-15 22:18:11 -04004559 Ops << ST->getPointerOperand();
4560 if (RID.isValid()) {
4561 Ops << RID;
4562 } else {
4563 Ops << ST->getValueOperand();
4564 }
SJWf93f5f32020-05-05 07:27:56 -05004565 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004566 break;
4567 }
4568 case Instruction::AtomicCmpXchg: {
4569 I.print(errs());
4570 llvm_unreachable("Unsupported instruction???");
4571 break;
4572 }
4573 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004574 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4575
4576 spv::Op opcode;
4577
4578 switch (AtomicRMW->getOperation()) {
4579 default:
4580 I.print(errs());
4581 llvm_unreachable("Unsupported instruction???");
4582 case llvm::AtomicRMWInst::Add:
4583 opcode = spv::OpAtomicIAdd;
4584 break;
4585 case llvm::AtomicRMWInst::Sub:
4586 opcode = spv::OpAtomicISub;
4587 break;
4588 case llvm::AtomicRMWInst::Xchg:
4589 opcode = spv::OpAtomicExchange;
4590 break;
4591 case llvm::AtomicRMWInst::Min:
4592 opcode = spv::OpAtomicSMin;
4593 break;
4594 case llvm::AtomicRMWInst::Max:
4595 opcode = spv::OpAtomicSMax;
4596 break;
4597 case llvm::AtomicRMWInst::UMin:
4598 opcode = spv::OpAtomicUMin;
4599 break;
4600 case llvm::AtomicRMWInst::UMax:
4601 opcode = spv::OpAtomicUMax;
4602 break;
4603 case llvm::AtomicRMWInst::And:
4604 opcode = spv::OpAtomicAnd;
4605 break;
4606 case llvm::AtomicRMWInst::Or:
4607 opcode = spv::OpAtomicOr;
4608 break;
4609 case llvm::AtomicRMWInst::Xor:
4610 opcode = spv::OpAtomicXor;
4611 break;
4612 }
4613
4614 //
4615 // Generate OpAtomic*.
4616 //
SJWf93f5f32020-05-05 07:27:56 -05004617 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004618
SJW01901d92020-05-21 08:58:31 -05004619 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01004620
SJW806a5d82020-07-15 12:51:38 -05004621 const auto ConstantScopeDevice = getSPIRVInt32Constant(spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05004622 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01004623
SJW806a5d82020-07-15 12:51:38 -05004624 const auto ConstantMemorySemantics =
4625 getSPIRVInt32Constant(spv::MemorySemanticsUniformMemoryMask |
4626 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05004627 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01004628
SJWf93f5f32020-05-05 07:27:56 -05004629 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004630 break;
4631 }
4632 case Instruction::Fence: {
4633 I.print(errs());
4634 llvm_unreachable("Unsupported instruction???");
4635 break;
4636 }
4637 case Instruction::Call: {
4638 CallInst *Call = dyn_cast<CallInst>(&I);
SJW806a5d82020-07-15 12:51:38 -05004639 RID = GenerateInstructionFromCall(Call);
David Neto22f144c2017-06-12 14:26:21 -04004640 break;
4641 }
4642 case Instruction::Ret: {
4643 unsigned NumOps = I.getNumOperands();
4644 if (NumOps == 0) {
4645 //
4646 // Generate OpReturn.
4647 //
SJWf93f5f32020-05-05 07:27:56 -05004648 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004649 } else {
4650 //
4651 // Generate OpReturnValue.
4652 //
4653
4654 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004655 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004656
SJW01901d92020-05-21 08:58:31 -05004657 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004658
SJWf93f5f32020-05-05 07:27:56 -05004659 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004660 break;
4661 }
4662 break;
4663 }
4664 }
SJWf93f5f32020-05-05 07:27:56 -05004665
4666 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004667 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004668 VMap[&I] = RID;
4669 }
David Neto22f144c2017-06-12 14:26:21 -04004670}
4671
4672void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004673 //
4674 // Generate OpFunctionEnd
4675 //
SJWf93f5f32020-05-05 07:27:56 -05004676 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004677}
4678
4679bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004680 // Don't specialize <4 x i8> if i8 is generally supported.
4681 if (clspv::Option::Int8Support())
4682 return false;
4683
David Neto22f144c2017-06-12 14:26:21 -04004684 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004685 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4686 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
alan-baker5a8c3be2020-09-09 13:44:26 -04004687 VecTy->getElementCount().getKnownMinValue() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004688 return true;
4689 }
4690 }
4691
4692 return false;
4693}
4694
4695void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004696 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4697
SJW88ed5fe2020-05-11 12:40:57 -05004698 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4699 Value *Inst = DeferredInsts[i].first;
4700 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4701 SPIRVOperandVec Operands;
4702
4703 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4704 ++i;
4705 assert(DeferredInsts.size() > i);
4706 assert(Inst == DeferredInsts[i].first);
4707 Placeholder = DeferredInsts[i].second;
4708 };
David Neto22f144c2017-06-12 14:26:21 -04004709
4710 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004711 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004712 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004713 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004714 //
4715 // Generate OpLoopMerge.
4716 //
4717 // Ops[0] = Merge Block ID
4718 // Ops[1] = Continue Target ID
4719 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004720 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004721
SJW01901d92020-05-21 08:58:31 -05004722 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4723 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004724
SJW88ed5fe2020-05-11 12:40:57 -05004725 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4726
4727 nextDeferred();
4728
alan-baker06cad652019-12-03 17:56:47 -05004729 } else if (MergeBlocks.count(BrBB)) {
4730 //
4731 // Generate OpSelectionMerge.
4732 //
4733 // Ops[0] = Merge Block ID
4734 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004735 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004736
alan-baker06cad652019-12-03 17:56:47 -05004737 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004738 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004739
SJW88ed5fe2020-05-11 12:40:57 -05004740 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4741
4742 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004743 }
4744
4745 if (Br->isConditional()) {
4746 //
4747 // Generate OpBranchConditional.
4748 //
4749 // Ops[0] = Condition ID
4750 // Ops[1] = True Label ID
4751 // Ops[2] = False Label ID
4752 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004753 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004754
SJW01901d92020-05-21 08:58:31 -05004755 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004756
SJW88ed5fe2020-05-11 12:40:57 -05004757 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4758
David Neto22f144c2017-06-12 14:26:21 -04004759 } else {
4760 //
4761 // Generate OpBranch.
4762 //
4763 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004764 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004765
SJW01901d92020-05-21 08:58:31 -05004766 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004767
SJW88ed5fe2020-05-11 12:40:57 -05004768 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004769 }
4770 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004771 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4772 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004773 // OpPhi on pointers requires variable pointers.
4774 setVariablePointersCapabilities(
4775 PHI->getType()->getPointerAddressSpace());
4776 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004777 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004778 }
4779 }
4780
David Neto22f144c2017-06-12 14:26:21 -04004781 //
4782 // Generate OpPhi.
4783 //
4784 // Ops[0] = Result Type ID
4785 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004786 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004787
SJW01901d92020-05-21 08:58:31 -05004788 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004789
SJW88ed5fe2020-05-11 12:40:57 -05004790 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004791 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004792 }
4793
SJW88ed5fe2020-05-11 12:40:57 -05004794 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4795
David Neto22f144c2017-06-12 14:26:21 -04004796 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4797 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004798 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004799
SJW61531372020-06-09 07:31:08 -05004800 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004801 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004802 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004803
4804 // The result type.
SJW01901d92020-05-21 08:58:31 -05004805 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004806
4807 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004808 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004809 }
4810
SJW88ed5fe2020-05-11 12:40:57 -05004811 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004812
David Neto22f144c2017-06-12 14:26:21 -04004813 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004814 if (Call->getType()->isPointerTy()) {
4815 // Functions returning pointers require variable pointers.
4816 setVariablePointersCapabilities(
4817 Call->getType()->getPointerAddressSpace());
4818 }
4819
David Neto22f144c2017-06-12 14:26:21 -04004820 //
4821 // Generate OpFunctionCall.
4822 //
4823
4824 // Ops[0] = Result Type ID
4825 // Ops[1] = Callee Function ID
4826 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004827 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004828
SJW01901d92020-05-21 08:58:31 -05004829 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004830
SJW01901d92020-05-21 08:58:31 -05004831 SPIRVID CalleeID = getSPIRVValue(Callee);
SJW806a5d82020-07-15 12:51:38 -05004832 if (!CalleeID.isValid()) {
David Neto43568eb2017-10-13 18:25:25 -04004833 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004834 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004835 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4836 // causes an infinite loop. Instead, go ahead and generate
4837 // the bad function call. A validator will catch the 0-Id.
4838 // llvm_unreachable("Can't translate function call");
4839 }
David Neto22f144c2017-06-12 14:26:21 -04004840
SJW01901d92020-05-21 08:58:31 -05004841 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004842
David Neto22f144c2017-06-12 14:26:21 -04004843 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004844 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4845 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004846 auto *operand_type = operand->getType();
4847 // Images and samplers can be passed as function parameters without
4848 // variable pointers.
4849 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4850 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004851 auto sc =
4852 GetStorageClass(operand->getType()->getPointerAddressSpace());
4853 if (sc == spv::StorageClassStorageBuffer) {
4854 // Passing SSBO by reference requires variable pointers storage
4855 // buffer.
SJW01901d92020-05-21 08:58:31 -05004856 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004857 } else if (sc == spv::StorageClassWorkgroup) {
4858 // Workgroup references require variable pointers if they are not
4859 // memory object declarations.
4860 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4861 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004862 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4863 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004864 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004865 } else {
4866 // Arguments are function parameters.
4867 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004868 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004869 }
4870 }
4871 }
SJW01901d92020-05-21 08:58:31 -05004872 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004873 }
4874
SJW88ed5fe2020-05-11 12:40:57 -05004875 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004876 }
4877 }
4878 }
4879}
4880
SJW77b87ad2020-04-21 14:37:52 -05004881void SPIRVProducerPass::HandleDeferredDecorations() {
4882 const auto &DL = module->getDataLayout();
alan-baker5a8c3be2020-09-09 13:44:26 -04004883 if (getTypesNeedingArrayStride().empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004884 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004885 }
David Neto1a1a0582017-07-07 12:01:44 -04004886
David Netoc6f3ab22018-04-06 18:02:31 -04004887 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4888 // instructions we generated earlier.
alan-bakerc3fd07f2020-10-22 09:48:49 -04004889 DenseSet<uint32_t> seen;
David Neto85082642018-03-24 06:55:20 -07004890 for (auto *type : getTypesNeedingArrayStride()) {
alan-baker3f772c02021-06-15 22:18:11 -04004891 auto TI = TypeMap.find(type);
4892 unsigned index = SpvVersion() < SPIRVVersion::SPIRV_1_4 ? 0 : 1;
4893 assert(TI != TypeMap.end());
4894 assert(index < TI->second.size());
4895 if (!TI->second[index].isValid())
4896 continue;
4897
4898 auto id = TI->second[index];
alan-bakerc3fd07f2020-10-22 09:48:49 -04004899 if (!seen.insert(id.get()).second)
4900 continue;
4901
David Neto85082642018-03-24 06:55:20 -07004902 Type *elemTy = nullptr;
4903 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4904 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004905 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004906 elemTy = arrayTy->getElementType();
4907 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4908 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004909 } else {
4910 errs() << "Unhandled strided type " << *type << "\n";
4911 llvm_unreachable("Unhandled strided type");
4912 }
David Neto1a1a0582017-07-07 12:01:44 -04004913
4914 // Ops[0] = Target ID
4915 // Ops[1] = Decoration (ArrayStride)
4916 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004917 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004918
David Neto85082642018-03-24 06:55:20 -07004919 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004920 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004921
alan-bakerc3fd07f2020-10-22 09:48:49 -04004922 Ops << id << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004923
SJWf93f5f32020-05-05 07:27:56 -05004924 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004925 }
David Neto1a1a0582017-07-07 12:01:44 -04004926}
4927
SJW61531372020-06-09 07:31:08 -05004928glsl::ExtInst
4929SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW61531372020-06-09 07:31:08 -05004930 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004931 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05004932 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004933 if (param_type.type_id == Type::FloatTyID) {
alan-bakerecc9c942020-12-07 13:13:32 -05004934 return glsl::ExtInst::ExtInstNClamp;
SJW2c317da2020-03-23 07:39:13 -05004935 }
4936 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4937 : glsl::ExtInst::ExtInstUClamp;
4938 }
4939 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05004940 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004941 if (param_type.type_id == Type::FloatTyID) {
4942 return glsl::ExtInst::ExtInstFMax;
4943 }
4944 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4945 : glsl::ExtInst::ExtInstUMax;
4946 }
4947 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05004948 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004949 if (param_type.type_id == Type::FloatTyID) {
4950 return glsl::ExtInst::ExtInstFMin;
4951 }
4952 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4953 : glsl::ExtInst::ExtInstUMin;
4954 }
4955 case Builtins::kAbs:
4956 return glsl::ExtInst::ExtInstSAbs;
4957 case Builtins::kFmax:
Marco Antognini55d51862020-07-21 17:50:07 +01004958 return glsl::ExtInst::ExtInstNMax;
SJW2c317da2020-03-23 07:39:13 -05004959 case Builtins::kFmin:
Marco Antognini55d51862020-07-21 17:50:07 +01004960 return glsl::ExtInst::ExtInstNMin;
SJW2c317da2020-03-23 07:39:13 -05004961 case Builtins::kDegrees:
4962 return glsl::ExtInst::ExtInstDegrees;
4963 case Builtins::kRadians:
4964 return glsl::ExtInst::ExtInstRadians;
4965 case Builtins::kMix:
4966 return glsl::ExtInst::ExtInstFMix;
4967 case Builtins::kAcos:
4968 case Builtins::kAcospi:
4969 return glsl::ExtInst::ExtInstAcos;
4970 case Builtins::kAcosh:
4971 return glsl::ExtInst::ExtInstAcosh;
4972 case Builtins::kAsin:
4973 case Builtins::kAsinpi:
4974 return glsl::ExtInst::ExtInstAsin;
4975 case Builtins::kAsinh:
4976 return glsl::ExtInst::ExtInstAsinh;
4977 case Builtins::kAtan:
4978 case Builtins::kAtanpi:
4979 return glsl::ExtInst::ExtInstAtan;
4980 case Builtins::kAtanh:
4981 return glsl::ExtInst::ExtInstAtanh;
4982 case Builtins::kAtan2:
4983 case Builtins::kAtan2pi:
4984 return glsl::ExtInst::ExtInstAtan2;
4985 case Builtins::kCeil:
4986 return glsl::ExtInst::ExtInstCeil;
4987 case Builtins::kSin:
4988 case Builtins::kHalfSin:
4989 case Builtins::kNativeSin:
4990 return glsl::ExtInst::ExtInstSin;
4991 case Builtins::kSinh:
4992 return glsl::ExtInst::ExtInstSinh;
4993 case Builtins::kCos:
4994 case Builtins::kHalfCos:
4995 case Builtins::kNativeCos:
4996 return glsl::ExtInst::ExtInstCos;
4997 case Builtins::kCosh:
4998 return glsl::ExtInst::ExtInstCosh;
4999 case Builtins::kTan:
5000 case Builtins::kHalfTan:
5001 case Builtins::kNativeTan:
5002 return glsl::ExtInst::ExtInstTan;
5003 case Builtins::kTanh:
5004 return glsl::ExtInst::ExtInstTanh;
5005 case Builtins::kExp:
5006 case Builtins::kHalfExp:
5007 case Builtins::kNativeExp:
5008 return glsl::ExtInst::ExtInstExp;
5009 case Builtins::kExp2:
5010 case Builtins::kHalfExp2:
5011 case Builtins::kNativeExp2:
5012 return glsl::ExtInst::ExtInstExp2;
5013 case Builtins::kLog:
5014 case Builtins::kHalfLog:
5015 case Builtins::kNativeLog:
5016 return glsl::ExtInst::ExtInstLog;
5017 case Builtins::kLog2:
5018 case Builtins::kHalfLog2:
5019 case Builtins::kNativeLog2:
5020 return glsl::ExtInst::ExtInstLog2;
5021 case Builtins::kFabs:
5022 return glsl::ExtInst::ExtInstFAbs;
5023 case Builtins::kFma:
5024 return glsl::ExtInst::ExtInstFma;
5025 case Builtins::kFloor:
5026 return glsl::ExtInst::ExtInstFloor;
5027 case Builtins::kLdexp:
5028 return glsl::ExtInst::ExtInstLdexp;
5029 case Builtins::kPow:
5030 case Builtins::kPowr:
5031 case Builtins::kHalfPowr:
5032 case Builtins::kNativePowr:
5033 return glsl::ExtInst::ExtInstPow;
James Price38553362020-09-03 18:30:40 -04005034 case Builtins::kRint:
5035 return glsl::ExtInst::ExtInstRoundEven;
SJW2c317da2020-03-23 07:39:13 -05005036 case Builtins::kRound:
5037 return glsl::ExtInst::ExtInstRound;
5038 case Builtins::kSqrt:
5039 case Builtins::kHalfSqrt:
5040 case Builtins::kNativeSqrt:
5041 return glsl::ExtInst::ExtInstSqrt;
5042 case Builtins::kRsqrt:
5043 case Builtins::kHalfRsqrt:
5044 case Builtins::kNativeRsqrt:
5045 return glsl::ExtInst::ExtInstInverseSqrt;
5046 case Builtins::kTrunc:
5047 return glsl::ExtInst::ExtInstTrunc;
5048 case Builtins::kFrexp:
5049 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05005050 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05005051 case Builtins::kFract:
5052 return glsl::ExtInst::ExtInstFract;
5053 case Builtins::kSign:
5054 return glsl::ExtInst::ExtInstFSign;
5055 case Builtins::kLength:
5056 case Builtins::kFastLength:
5057 return glsl::ExtInst::ExtInstLength;
5058 case Builtins::kDistance:
5059 case Builtins::kFastDistance:
5060 return glsl::ExtInst::ExtInstDistance;
5061 case Builtins::kStep:
5062 return glsl::ExtInst::ExtInstStep;
5063 case Builtins::kSmoothstep:
5064 return glsl::ExtInst::ExtInstSmoothStep;
5065 case Builtins::kCross:
5066 return glsl::ExtInst::ExtInstCross;
5067 case Builtins::kNormalize:
5068 case Builtins::kFastNormalize:
5069 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05005070 case Builtins::kSpirvPack:
5071 return glsl::ExtInst::ExtInstPackHalf2x16;
5072 case Builtins::kSpirvUnpack:
5073 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05005074 default:
5075 break;
5076 }
5077
alan-baker5f2e88e2020-12-07 15:24:04 -05005078 // TODO: improve this by checking the intrinsic id.
SJW61531372020-06-09 07:31:08 -05005079 if (func_info.getName().find("llvm.fmuladd.") == 0) {
5080 return glsl::ExtInst::ExtInstFma;
5081 }
alan-baker5f2e88e2020-12-07 15:24:04 -05005082 if (func_info.getName().find("llvm.sqrt.") == 0) {
5083 return glsl::ExtInst::ExtInstSqrt;
5084 }
5085 if (func_info.getName().find("llvm.trunc.") == 0) {
5086 return glsl::ExtInst::ExtInstTrunc;
5087 }
5088 if (func_info.getName().find("llvm.ctlz.") == 0) {
5089 return glsl::ExtInst::ExtInstFindUMsb;
5090 }
5091 if (func_info.getName().find("llvm.cttz.") == 0) {
5092 return glsl::ExtInst::ExtInstFindILsb;
5093 }
alan-baker3e0de472020-12-08 15:57:17 -05005094 if (func_info.getName().find("llvm.ceil.") == 0) {
5095 return glsl::ExtInst::ExtInstCeil;
5096 }
5097 if (func_info.getName().find("llvm.rint.") == 0) {
5098 return glsl::ExtInst::ExtInstRoundEven;
5099 }
5100 if (func_info.getName().find("llvm.fabs.") == 0) {
5101 return glsl::ExtInst::ExtInstFAbs;
5102 }
5103 if (func_info.getName().find("llvm.floor.") == 0) {
5104 return glsl::ExtInst::ExtInstFloor;
5105 }
5106 if (func_info.getName().find("llvm.sin.") == 0) {
5107 return glsl::ExtInst::ExtInstSin;
5108 }
5109 if (func_info.getName().find("llvm.cos.") == 0) {
5110 return glsl::ExtInst::ExtInstCos;
5111 }
alan-baker8b968112020-12-15 15:53:29 -05005112 if (func_info.getName().find("llvm.exp.") == 0) {
5113 return glsl::ExtInst::ExtInstExp;
5114 }
5115 if (func_info.getName().find("llvm.log.") == 0) {
5116 return glsl::ExtInst::ExtInstLog;
5117 }
5118 if (func_info.getName().find("llvm.pow.") == 0) {
5119 return glsl::ExtInst::ExtInstPow;
5120 }
James Price8cc3bb12021-05-05 10:20:58 -04005121 if (func_info.getName().find("llvm.smax.") == 0) {
5122 return glsl::ExtInst::ExtInstSMax;
5123 }
5124 if (func_info.getName().find("llvm.smin.") == 0) {
5125 return glsl::ExtInst::ExtInstSMin;
5126 }
Kévin Petit0c0c3882021-07-27 17:01:14 +01005127 if (func_info.getName().find("llvm.umax.") == 0) {
5128 return glsl::ExtInst::ExtInstUMax;
5129 }
5130 if (func_info.getName().find("llvm.umin.") == 0) {
5131 return glsl::ExtInst::ExtInstUMin;
5132 }
SJW61531372020-06-09 07:31:08 -05005133 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005134}
5135
SJW61531372020-06-09 07:31:08 -05005136glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
5137 const Builtins::FunctionInfo &func_info) {
5138 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05005139 case Builtins::kAcospi:
5140 return glsl::ExtInst::ExtInstAcos;
5141 case Builtins::kAsinpi:
5142 return glsl::ExtInst::ExtInstAsin;
5143 case Builtins::kAtanpi:
5144 return glsl::ExtInst::ExtInstAtan;
5145 case Builtins::kAtan2pi:
5146 return glsl::ExtInst::ExtInstAtan2;
5147 default:
5148 break;
5149 }
5150 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005151}
5152
SJW61531372020-06-09 07:31:08 -05005153glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
5154 const Builtins::FunctionInfo &func_info) {
5155 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04005156 if (direct != kGlslExtInstBad)
5157 return direct;
SJW61531372020-06-09 07:31:08 -05005158 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04005159}
5160
David Neto22f144c2017-06-12 14:26:21 -04005161void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005162 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005163}
5164
SJW88ed5fe2020-05-11 12:40:57 -05005165void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05005166 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04005167}
5168
SJW88ed5fe2020-05-11 12:40:57 -05005169void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04005170 // High 16 bit : Word Count
5171 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05005172 uint32_t Word = Inst.getOpcode();
5173 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04005174 if (count > 65535) {
5175 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5176 llvm_unreachable("Word count too high");
5177 }
SJW88ed5fe2020-05-11 12:40:57 -05005178 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04005179 WriteOneWord(Word);
5180}
5181
SJW88ed5fe2020-05-11 12:40:57 -05005182void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
5183 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04005184 switch (OpTy) {
5185 default: {
5186 llvm_unreachable("Unsupported SPIRV Operand Type???");
5187 break;
5188 }
5189 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05005190 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04005191 break;
5192 }
5193 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05005194 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04005195 const char *Data = Str.c_str();
5196 size_t WordSize = Str.size() / 4;
5197 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5198 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5199 }
5200
5201 uint32_t Remainder = Str.size() % 4;
5202 uint32_t LastWord = 0;
5203 if (Remainder) {
5204 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5205 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5206 }
5207 }
5208
5209 WriteOneWord(LastWord);
5210 break;
5211 }
SJW88ed5fe2020-05-11 12:40:57 -05005212 case SPIRVOperandType::LITERAL_WORD: {
5213 WriteOneWord(Op.getLiteralNum()[0]);
5214 break;
5215 }
5216 case SPIRVOperandType::LITERAL_DWORD: {
5217 WriteOneWord(Op.getLiteralNum()[0]);
5218 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04005219 break;
5220 }
5221 }
5222}
5223
5224void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005225 for (int i = 0; i < kSectionCount; ++i) {
5226 WriteSPIRVBinary(SPIRVSections[i]);
5227 }
5228}
5229
5230void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
SJW88ed5fe2020-05-11 12:40:57 -05005231 for (const auto &Inst : SPIRVInstList) {
5232 const auto &Ops = Inst.getOperands();
5233 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04005234
5235 switch (Opcode) {
5236 default: {
David Neto5c22a252018-03-15 16:07:41 -04005237 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005238 llvm_unreachable("Unsupported SPIRV instruction");
5239 break;
5240 }
Marco Antognini68e5c512020-09-09 16:08:57 +01005241 case spv::OpUnreachable:
David Neto22f144c2017-06-12 14:26:21 -04005242 case spv::OpCapability:
5243 case spv::OpExtension:
5244 case spv::OpMemoryModel:
5245 case spv::OpEntryPoint:
5246 case spv::OpExecutionMode:
5247 case spv::OpSource:
5248 case spv::OpDecorate:
5249 case spv::OpMemberDecorate:
5250 case spv::OpBranch:
5251 case spv::OpBranchConditional:
5252 case spv::OpSelectionMerge:
5253 case spv::OpLoopMerge:
5254 case spv::OpStore:
5255 case spv::OpImageWrite:
5256 case spv::OpReturnValue:
5257 case spv::OpControlBarrier:
5258 case spv::OpMemoryBarrier:
5259 case spv::OpReturn:
5260 case spv::OpFunctionEnd:
alan-baker4986eff2020-10-29 13:38:00 -04005261 case spv::OpCopyMemory:
5262 case spv::OpAtomicStore: {
David Neto22f144c2017-06-12 14:26:21 -04005263 WriteWordCountAndOpcode(Inst);
5264 for (uint32_t i = 0; i < Ops.size(); i++) {
5265 WriteOperand(Ops[i]);
5266 }
5267 break;
5268 }
5269 case spv::OpTypeBool:
5270 case spv::OpTypeVoid:
5271 case spv::OpTypeSampler:
5272 case spv::OpLabel:
5273 case spv::OpExtInstImport:
5274 case spv::OpTypePointer:
5275 case spv::OpTypeRuntimeArray:
5276 case spv::OpTypeStruct:
5277 case spv::OpTypeImage:
5278 case spv::OpTypeSampledImage:
5279 case spv::OpTypeInt:
5280 case spv::OpTypeFloat:
5281 case spv::OpTypeArray:
5282 case spv::OpTypeVector:
alan-baker86ce19c2020-08-05 13:09:19 -04005283 case spv::OpTypeFunction:
5284 case spv::OpString: {
David Neto22f144c2017-06-12 14:26:21 -04005285 WriteWordCountAndOpcode(Inst);
5286 WriteResultID(Inst);
5287 for (uint32_t i = 0; i < Ops.size(); i++) {
5288 WriteOperand(Ops[i]);
5289 }
5290 break;
5291 }
5292 case spv::OpFunction:
5293 case spv::OpFunctionParameter:
5294 case spv::OpAccessChain:
5295 case spv::OpPtrAccessChain:
5296 case spv::OpInBoundsAccessChain:
5297 case spv::OpUConvert:
5298 case spv::OpSConvert:
5299 case spv::OpConvertFToU:
5300 case spv::OpConvertFToS:
5301 case spv::OpConvertUToF:
5302 case spv::OpConvertSToF:
5303 case spv::OpFConvert:
5304 case spv::OpConvertPtrToU:
5305 case spv::OpConvertUToPtr:
5306 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005307 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005308 case spv::OpIAdd:
alan-bakera52b7312020-10-26 08:58:51 -04005309 case spv::OpIAddCarry:
David Neto22f144c2017-06-12 14:26:21 -04005310 case spv::OpFAdd:
5311 case spv::OpISub:
alan-baker3f1bf492020-11-05 09:07:36 -05005312 case spv::OpISubBorrow:
David Neto22f144c2017-06-12 14:26:21 -04005313 case spv::OpFSub:
5314 case spv::OpIMul:
5315 case spv::OpFMul:
5316 case spv::OpUDiv:
5317 case spv::OpSDiv:
5318 case spv::OpFDiv:
5319 case spv::OpUMod:
5320 case spv::OpSRem:
5321 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005322 case spv::OpUMulExtended:
5323 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005324 case spv::OpBitwiseOr:
5325 case spv::OpBitwiseXor:
5326 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005327 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005328 case spv::OpShiftLeftLogical:
5329 case spv::OpShiftRightLogical:
5330 case spv::OpShiftRightArithmetic:
5331 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005332 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005333 case spv::OpCompositeExtract:
5334 case spv::OpVectorExtractDynamic:
5335 case spv::OpCompositeInsert:
alan-baker3f772c02021-06-15 22:18:11 -04005336 case spv::OpCopyLogical:
David Neto0a2f98d2017-09-15 19:38:40 -04005337 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005338 case spv::OpVectorInsertDynamic:
5339 case spv::OpVectorShuffle:
5340 case spv::OpIEqual:
5341 case spv::OpINotEqual:
5342 case spv::OpUGreaterThan:
5343 case spv::OpUGreaterThanEqual:
5344 case spv::OpULessThan:
5345 case spv::OpULessThanEqual:
5346 case spv::OpSGreaterThan:
5347 case spv::OpSGreaterThanEqual:
5348 case spv::OpSLessThan:
5349 case spv::OpSLessThanEqual:
5350 case spv::OpFOrdEqual:
5351 case spv::OpFOrdGreaterThan:
5352 case spv::OpFOrdGreaterThanEqual:
5353 case spv::OpFOrdLessThan:
5354 case spv::OpFOrdLessThanEqual:
5355 case spv::OpFOrdNotEqual:
5356 case spv::OpFUnordEqual:
5357 case spv::OpFUnordGreaterThan:
5358 case spv::OpFUnordGreaterThanEqual:
5359 case spv::OpFUnordLessThan:
5360 case spv::OpFUnordLessThanEqual:
5361 case spv::OpFUnordNotEqual:
5362 case spv::OpExtInst:
5363 case spv::OpIsInf:
5364 case spv::OpIsNan:
5365 case spv::OpAny:
5366 case spv::OpAll:
5367 case spv::OpUndef:
5368 case spv::OpConstantNull:
5369 case spv::OpLogicalOr:
5370 case spv::OpLogicalAnd:
5371 case spv::OpLogicalNot:
5372 case spv::OpLogicalNotEqual:
5373 case spv::OpConstantComposite:
5374 case spv::OpSpecConstantComposite:
5375 case spv::OpConstantTrue:
5376 case spv::OpConstantFalse:
5377 case spv::OpConstant:
5378 case spv::OpSpecConstant:
5379 case spv::OpVariable:
5380 case spv::OpFunctionCall:
5381 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005382 case spv::OpImageFetch:
alan-bakerf6bc8252020-09-23 14:58:55 -04005383 case spv::OpImageRead:
David Neto22f144c2017-06-12 14:26:21 -04005384 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005385 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005386 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005387 case spv::OpSelect:
5388 case spv::OpPhi:
5389 case spv::OpLoad:
alan-baker4986eff2020-10-29 13:38:00 -04005390 case spv::OpAtomicLoad:
David Neto22f144c2017-06-12 14:26:21 -04005391 case spv::OpAtomicIAdd:
5392 case spv::OpAtomicISub:
5393 case spv::OpAtomicExchange:
5394 case spv::OpAtomicIIncrement:
5395 case spv::OpAtomicIDecrement:
5396 case spv::OpAtomicCompareExchange:
5397 case spv::OpAtomicUMin:
5398 case spv::OpAtomicSMin:
5399 case spv::OpAtomicUMax:
5400 case spv::OpAtomicSMax:
5401 case spv::OpAtomicAnd:
5402 case spv::OpAtomicOr:
5403 case spv::OpAtomicXor:
SJW806a5d82020-07-15 12:51:38 -05005404 case spv::OpDot:
5405 case spv::OpGroupNonUniformAll:
5406 case spv::OpGroupNonUniformAny:
5407 case spv::OpGroupNonUniformBroadcast:
5408 case spv::OpGroupNonUniformIAdd:
5409 case spv::OpGroupNonUniformFAdd:
5410 case spv::OpGroupNonUniformSMin:
5411 case spv::OpGroupNonUniformUMin:
5412 case spv::OpGroupNonUniformFMin:
5413 case spv::OpGroupNonUniformSMax:
5414 case spv::OpGroupNonUniformUMax:
5415 case spv::OpGroupNonUniformFMax: {
David Neto22f144c2017-06-12 14:26:21 -04005416 WriteWordCountAndOpcode(Inst);
5417 WriteOperand(Ops[0]);
5418 WriteResultID(Inst);
5419 for (uint32_t i = 1; i < Ops.size(); i++) {
5420 WriteOperand(Ops[i]);
5421 }
5422 break;
5423 }
5424 }
5425 }
5426}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005427
alan-bakerb6b09dc2018-11-08 16:59:28 -05005428bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005429 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005430 case Type::HalfTyID:
5431 case Type::FloatTyID:
5432 case Type::DoubleTyID:
5433 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005434 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005435 return true;
5436 case Type::PointerTyID: {
5437 const PointerType *pointer_type = cast<PointerType>(type);
5438 if (pointer_type->getPointerAddressSpace() !=
5439 AddressSpace::UniformConstant) {
5440 auto pointee_type = pointer_type->getPointerElementType();
5441 if (pointee_type->isStructTy() &&
5442 cast<StructType>(pointee_type)->isOpaque()) {
5443 // Images and samplers are not nullable.
5444 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005445 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005446 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005447 return true;
5448 }
5449 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005450 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005451 case Type::StructTyID: {
5452 const StructType *struct_type = cast<StructType>(type);
5453 // Images and samplers are not nullable.
5454 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005455 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005456 for (const auto element : struct_type->elements()) {
5457 if (!IsTypeNullable(element))
5458 return false;
5459 }
5460 return true;
5461 }
5462 default:
5463 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005464 }
5465}
Alan Bakerfcda9482018-10-02 17:09:59 -04005466
SJW77b87ad2020-04-21 14:37:52 -05005467void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005468 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005469 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005470 // Metdata is stored as key-value pair operands. The first element of each
5471 // operand is the type and the second is a vector of offsets.
5472 for (const auto *operand : offsets_md->operands()) {
5473 const auto *pair = cast<MDTuple>(operand);
5474 auto *type =
5475 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5476 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5477 std::vector<uint32_t> offsets;
5478 for (const Metadata *offset_md : offset_vector->operands()) {
5479 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005480 offsets.push_back(static_cast<uint32_t>(
5481 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005482 }
5483 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5484 }
5485 }
5486
5487 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005488 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005489 // Metadata is stored as key-value pair operands. The first element of each
5490 // operand is the type and the second is a triple of sizes: type size in
5491 // bits, store size and alloc size.
5492 for (const auto *operand : sizes_md->operands()) {
5493 const auto *pair = cast<MDTuple>(operand);
5494 auto *type =
5495 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5496 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5497 uint64_t type_size_in_bits =
5498 cast<ConstantInt>(
5499 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5500 ->getZExtValue();
5501 uint64_t type_store_size =
5502 cast<ConstantInt>(
5503 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5504 ->getZExtValue();
5505 uint64_t type_alloc_size =
5506 cast<ConstantInt>(
5507 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5508 ->getZExtValue();
5509 RemappedUBOTypeSizes.insert(std::make_pair(
5510 type, std::make_tuple(type_size_in_bits, type_store_size,
5511 type_alloc_size)));
5512 }
5513 }
5514}
5515
5516uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5517 const DataLayout &DL) {
5518 auto iter = RemappedUBOTypeSizes.find(type);
5519 if (iter != RemappedUBOTypeSizes.end()) {
5520 return std::get<0>(iter->second);
5521 }
5522
5523 return DL.getTypeSizeInBits(type);
5524}
5525
Alan Bakerfcda9482018-10-02 17:09:59 -04005526uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5527 auto iter = RemappedUBOTypeSizes.find(type);
5528 if (iter != RemappedUBOTypeSizes.end()) {
5529 return std::get<2>(iter->second);
5530 }
5531
5532 return DL.getTypeAllocSize(type);
5533}
alan-baker5b86ed72019-02-15 08:26:50 -05005534
Kévin Petitbbbda972020-03-03 19:16:31 +00005535uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5536 StructType *type, unsigned member, const DataLayout &DL) {
5537 const auto StructLayout = DL.getStructLayout(type);
5538 // Search for the correct offsets if this type was remapped.
5539 std::vector<uint32_t> *offsets = nullptr;
5540 auto iter = RemappedUBOTypeOffsets.find(type);
5541 if (iter != RemappedUBOTypeOffsets.end()) {
5542 offsets = &iter->second;
5543 }
5544 auto ByteOffset =
5545 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5546 if (offsets) {
5547 ByteOffset = (*offsets)[member];
5548 }
5549
5550 return ByteOffset;
5551}
5552
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005553void SPIRVProducerPass::setVariablePointersCapabilities(
5554 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005555 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005556 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005557 } else {
SJW01901d92020-05-21 08:58:31 -05005558 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005559 }
5560}
5561
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005562Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005563 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5564 return GetBasePointer(gep->getPointerOperand());
5565 }
5566
5567 // Conservatively return |v|.
5568 return v;
5569}
5570
5571bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5572 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5573 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
alan-baker7506abb2020-09-10 15:02:55 -04005574 const auto &lhs_func_info =
5575 Builtins::Lookup(lhs_call->getCalledFunction());
5576 const auto &rhs_func_info =
5577 Builtins::Lookup(rhs_call->getCalledFunction());
SJW61531372020-06-09 07:31:08 -05005578 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5579 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005580 // For resource accessors, match descriptor set and binding.
5581 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5582 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5583 return true;
SJW61531372020-06-09 07:31:08 -05005584 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5585 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005586 // For workgroup resources, match spec id.
5587 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5588 return true;
5589 }
5590 }
5591 }
5592
5593 return false;
5594}
5595
5596bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5597 assert(inst->getType()->isPointerTy());
5598 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5599 spv::StorageClassStorageBuffer);
5600 const bool hack_undef = clspv::Option::HackUndef();
5601 if (auto *select = dyn_cast<SelectInst>(inst)) {
5602 auto *true_base = GetBasePointer(select->getTrueValue());
5603 auto *false_base = GetBasePointer(select->getFalseValue());
5604
5605 if (true_base == false_base)
5606 return true;
5607
5608 // If either the true or false operand is a null, then we satisfy the same
5609 // object constraint.
5610 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5611 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5612 return true;
5613 }
5614
5615 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5616 if (false_cst->isNullValue() ||
5617 (hack_undef && isa<UndefValue>(false_base)))
5618 return true;
5619 }
5620
5621 if (sameResource(true_base, false_base))
5622 return true;
5623 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5624 Value *value = nullptr;
5625 bool ok = true;
5626 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5627 auto *base = GetBasePointer(phi->getIncomingValue(i));
5628 // Null values satisfy the constraint of selecting of selecting from the
5629 // same object.
5630 if (!value) {
5631 if (auto *cst = dyn_cast<Constant>(base)) {
5632 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5633 value = base;
5634 } else {
5635 value = base;
5636 }
5637 } else if (base != value) {
5638 if (auto *base_cst = dyn_cast<Constant>(base)) {
5639 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5640 continue;
5641 }
5642
5643 if (sameResource(value, base))
5644 continue;
5645
5646 // Values don't represent the same base.
5647 ok = false;
5648 }
5649 }
5650
5651 return ok;
5652 }
5653
5654 // Conservatively return false.
5655 return false;
5656}
alan-bakere9308012019-03-15 10:25:13 -04005657
5658bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5659 if (!Arg.getType()->isPointerTy() ||
5660 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5661 // Only SSBOs need to be annotated as coherent.
5662 return false;
5663 }
5664
5665 DenseSet<Value *> visited;
5666 std::vector<Value *> stack;
5667 for (auto *U : Arg.getParent()->users()) {
5668 if (auto *call = dyn_cast<CallInst>(U)) {
5669 stack.push_back(call->getOperand(Arg.getArgNo()));
5670 }
5671 }
5672
5673 while (!stack.empty()) {
5674 Value *v = stack.back();
5675 stack.pop_back();
5676
5677 if (!visited.insert(v).second)
5678 continue;
5679
5680 auto *resource_call = dyn_cast<CallInst>(v);
5681 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005682 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5683 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005684 // If this is a resource accessor function, check if the coherent operand
5685 // is set.
5686 const auto coherent =
5687 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5688 ->getZExtValue());
5689 if (coherent == 1)
5690 return true;
5691 } else if (auto *arg = dyn_cast<Argument>(v)) {
5692 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005693 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005694 if (auto *call = dyn_cast<CallInst>(U)) {
5695 stack.push_back(call->getOperand(arg->getArgNo()));
5696 }
5697 }
5698 } else if (auto *user = dyn_cast<User>(v)) {
5699 // If this is a user, traverse all operands that could lead to resource
5700 // variables.
5701 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5702 Value *operand = user->getOperand(i);
5703 if (operand->getType()->isPointerTy() &&
5704 operand->getType()->getPointerAddressSpace() ==
5705 clspv::AddressSpace::Global) {
5706 stack.push_back(operand);
5707 }
5708 }
5709 }
5710 }
5711
5712 // No coherent resource variables encountered.
5713 return false;
5714}
alan-baker06cad652019-12-03 17:56:47 -05005715
SJW77b87ad2020-04-21 14:37:52 -05005716void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005717 // First, track loop merges and continues.
5718 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005719 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005720 if (F.isDeclaration())
5721 continue;
5722
5723 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5724 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5725 std::deque<BasicBlock *> order;
5726 DenseSet<BasicBlock *> visited;
5727 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5728
5729 for (auto BB : order) {
5730 auto terminator = BB->getTerminator();
5731 auto branch = dyn_cast<BranchInst>(terminator);
5732 if (LI.isLoopHeader(BB)) {
5733 auto L = LI.getLoopFor(BB);
5734 BasicBlock *ContinueBB = nullptr;
5735 BasicBlock *MergeBB = nullptr;
5736
5737 MergeBB = L->getExitBlock();
5738 if (!MergeBB) {
5739 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5740 // has regions with single entry/exit. As a result, loop should not
5741 // have multiple exits.
5742 llvm_unreachable("Loop has multiple exits???");
5743 }
5744
5745 if (L->isLoopLatch(BB)) {
5746 ContinueBB = BB;
5747 } else {
5748 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5749 // block.
5750 BasicBlock *Header = L->getHeader();
5751 BasicBlock *Latch = L->getLoopLatch();
5752 for (auto *loop_block : L->blocks()) {
5753 if (loop_block == Header) {
5754 continue;
5755 }
5756
5757 // Check whether block dominates block with back-edge.
5758 // The loop latch is the single block with a back-edge. If it was
5759 // possible, StructurizeCFG made the loop conform to this
5760 // requirement, otherwise |Latch| is a nullptr.
5761 if (DT.dominates(loop_block, Latch)) {
5762 ContinueBB = loop_block;
5763 }
5764 }
5765
5766 if (!ContinueBB) {
5767 llvm_unreachable("Wrong continue block from loop");
5768 }
5769 }
5770
5771 // Record the continue and merge blocks.
5772 MergeBlocks[BB] = MergeBB;
5773 ContinueBlocks[BB] = ContinueBB;
5774 LoopMergesAndContinues.insert(MergeBB);
5775 LoopMergesAndContinues.insert(ContinueBB);
5776 } else if (branch && branch->isConditional()) {
5777 auto L = LI.getLoopFor(BB);
5778 bool HasBackedge = false;
5779 while (L && !HasBackedge) {
5780 if (L->isLoopLatch(BB)) {
5781 HasBackedge = true;
5782 }
5783 L = L->getParentLoop();
5784 }
5785
5786 if (!HasBackedge) {
5787 // Only need a merge if the branch doesn't include a loop break or
5788 // continue.
5789 auto true_bb = branch->getSuccessor(0);
5790 auto false_bb = branch->getSuccessor(1);
5791 if (!LoopMergesAndContinues.count(true_bb) &&
5792 !LoopMergesAndContinues.count(false_bb)) {
5793 // StructurizeCFG pass already manipulated CFG. Just use false block
5794 // of branch instruction as merge block.
5795 MergeBlocks[BB] = false_bb;
5796 }
5797 }
5798 }
5799 }
5800 }
5801}
alan-baker86ce19c2020-08-05 13:09:19 -04005802
5803SPIRVID SPIRVProducerPass::getReflectionImport() {
5804 if (!ReflectionID.isValid()) {
5805 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_non_semantic_info");
5806 ReflectionID = addSPIRVInst<kImports>(spv::OpExtInstImport,
5807 "NonSemantic.ClspvReflection.1");
5808 }
5809 return ReflectionID;
5810}
5811
5812void SPIRVProducerPass::GenerateReflection() {
5813 GenerateKernelReflection();
5814 GeneratePushConstantReflection();
5815 GenerateSpecConstantReflection();
5816}
5817
5818void SPIRVProducerPass::GeneratePushConstantReflection() {
5819 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
5820 auto const &DL = module->getDataLayout();
5821 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
5822 auto STy = cast<StructType>(GV->getValueType());
5823
5824 for (unsigned i = 0; i < STy->getNumElements(); i++) {
5825 auto pc = static_cast<clspv::PushConstant>(
5826 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
5827 if (pc == PushConstant::KernelArgument)
5828 continue;
5829
5830 auto memberType = STy->getElementType(i);
5831 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
Marco Antognini7e338402021-03-15 12:48:37 +00005832#ifndef NDEBUG
alan-baker86ce19c2020-08-05 13:09:19 -04005833 unsigned previousOffset = 0;
5834 if (i > 0) {
5835 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
5836 }
alan-baker86ce19c2020-08-05 13:09:19 -04005837 assert(isValidExplicitLayout(*module, STy, i,
5838 spv::StorageClassPushConstant, offset,
5839 previousOffset));
Marco Antognini7e338402021-03-15 12:48:37 +00005840#endif
alan-baker86ce19c2020-08-05 13:09:19 -04005841
5842 reflection::ExtInst pc_inst = reflection::ExtInstMax;
5843 switch (pc) {
5844 case PushConstant::GlobalOffset:
5845 pc_inst = reflection::ExtInstPushConstantGlobalOffset;
5846 break;
5847 case PushConstant::EnqueuedLocalSize:
5848 pc_inst = reflection::ExtInstPushConstantEnqueuedLocalSize;
5849 break;
5850 case PushConstant::GlobalSize:
5851 pc_inst = reflection::ExtInstPushConstantGlobalSize;
5852 break;
5853 case PushConstant::RegionOffset:
5854 pc_inst = reflection::ExtInstPushConstantRegionOffset;
5855 break;
5856 case PushConstant::NumWorkgroups:
5857 pc_inst = reflection::ExtInstPushConstantNumWorkgroups;
5858 break;
5859 case PushConstant::RegionGroupOffset:
5860 pc_inst = reflection::ExtInstPushConstantRegionGroupOffset;
5861 break;
5862 default:
5863 llvm_unreachable("Unhandled push constant");
5864 break;
5865 }
5866
5867 auto import_id = getReflectionImport();
Marco Antognini7e338402021-03-15 12:48:37 +00005868 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
alan-baker86ce19c2020-08-05 13:09:19 -04005869 SPIRVOperandVec Ops;
5870 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
5871 << pc_inst << getSPIRVInt32Constant(offset)
5872 << getSPIRVInt32Constant(size);
5873 addSPIRVInst(spv::OpExtInst, Ops);
5874 }
5875 }
5876}
5877
5878void SPIRVProducerPass::GenerateSpecConstantReflection() {
5879 const uint32_t kMax = std::numeric_limits<uint32_t>::max();
5880 uint32_t wgsize_id[3] = {kMax, kMax, kMax};
5881 uint32_t global_offset_id[3] = {kMax, kMax, kMax};
5882 uint32_t work_dim_id = kMax;
5883 for (auto pair : clspv::GetSpecConstants(module)) {
5884 auto kind = pair.first;
5885 auto id = pair.second;
5886
5887 // Local memory size is only used for kernel arguments.
5888 if (kind == SpecConstant::kLocalMemorySize)
5889 continue;
5890
5891 switch (kind) {
5892 case SpecConstant::kWorkgroupSizeX:
5893 wgsize_id[0] = id;
5894 break;
5895 case SpecConstant::kWorkgroupSizeY:
5896 wgsize_id[1] = id;
5897 break;
5898 case SpecConstant::kWorkgroupSizeZ:
5899 wgsize_id[2] = id;
5900 break;
5901 case SpecConstant::kGlobalOffsetX:
5902 global_offset_id[0] = id;
5903 break;
5904 case SpecConstant::kGlobalOffsetY:
5905 global_offset_id[1] = id;
5906 break;
5907 case SpecConstant::kGlobalOffsetZ:
5908 global_offset_id[2] = id;
5909 break;
5910 case SpecConstant::kWorkDim:
5911 work_dim_id = id;
5912 break;
5913 default:
5914 llvm_unreachable("Unhandled spec constant");
5915 }
5916 }
5917
5918 auto import_id = getReflectionImport();
5919 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5920 SPIRVOperandVec Ops;
5921 if (wgsize_id[0] != kMax) {
5922 assert(wgsize_id[1] != kMax);
5923 assert(wgsize_id[2] != kMax);
5924 Ops.clear();
5925 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkgroupSize
5926 << getSPIRVInt32Constant(wgsize_id[0])
5927 << getSPIRVInt32Constant(wgsize_id[1])
5928 << getSPIRVInt32Constant(wgsize_id[2]);
5929 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5930 }
5931 if (global_offset_id[0] != kMax) {
5932 assert(global_offset_id[1] != kMax);
5933 assert(global_offset_id[2] != kMax);
5934 Ops.clear();
5935 Ops << void_id << import_id << reflection::ExtInstSpecConstantGlobalOffset
5936 << getSPIRVInt32Constant(global_offset_id[0])
5937 << getSPIRVInt32Constant(global_offset_id[1])
5938 << getSPIRVInt32Constant(global_offset_id[2]);
5939 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5940 }
5941 if (work_dim_id != kMax) {
5942 Ops.clear();
5943 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkDim
5944 << getSPIRVInt32Constant(work_dim_id);
5945 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5946 }
5947}
5948
5949void SPIRVProducerPass::GenerateKernelReflection() {
5950 const auto &DL = module->getDataLayout();
5951 auto import_id = getReflectionImport();
5952 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5953
5954 for (auto &F : *module) {
5955 if (F.isDeclaration() || F.getCallingConv() != CallingConv::SPIR_KERNEL) {
5956 continue;
5957 }
5958
5959 // OpString for the kernel name.
5960 auto kernel_name =
5961 addSPIRVInst<kDebug>(spv::OpString, F.getName().str().c_str());
5962
5963 // Kernel declaration
5964 // Ops[0] = void type
5965 // Ops[1] = reflection ext import
5966 // Ops[2] = function id
5967 // Ops[3] = kernel name
5968 SPIRVOperandVec Ops;
5969 Ops << void_id << import_id << reflection::ExtInstKernel << ValueMap[&F]
5970 << kernel_name;
5971 auto kernel_decl = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5972
5973 // Generate the required workgroup size property if it was specified.
5974 if (const MDNode *MD = F.getMetadata("reqd_work_group_size")) {
5975 uint32_t CurXDimCst = static_cast<uint32_t>(
5976 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
5977 uint32_t CurYDimCst = static_cast<uint32_t>(
5978 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
5979 uint32_t CurZDimCst = static_cast<uint32_t>(
5980 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
5981
5982 Ops.clear();
5983 Ops << void_id << import_id
5984 << reflection::ExtInstPropertyRequiredWorkgroupSize << kernel_decl
5985 << getSPIRVInt32Constant(CurXDimCst)
5986 << getSPIRVInt32Constant(CurYDimCst)
5987 << getSPIRVInt32Constant(CurZDimCst);
5988 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5989 }
5990
5991 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
5992 auto *func_ty = F.getFunctionType();
5993
5994 // If we've clustered POD arguments, then argument details are in metadata.
5995 // If an argument maps to a resource variable, then get descriptor set and
5996 // binding from the resource variable. Other info comes from the metadata.
5997 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
5998 auto local_spec_id_md =
5999 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
6000 if (arg_map) {
6001 for (const auto &arg : arg_map->operands()) {
6002 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
6003 assert(arg_node->getNumOperands() == 6);
6004 const auto name =
6005 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
6006 const auto old_index =
6007 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
6008 // Remapped argument index
6009 const int new_index = static_cast<int>(
6010 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
6011 const auto offset =
6012 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
6013 const auto size =
6014 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
6015 const auto argKind = clspv::GetArgKindFromName(
6016 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
6017
6018 // If this is a local memory argument, find the right spec id for this
6019 // argument.
6020 int64_t spec_id = -1;
6021 if (argKind == clspv::ArgKind::Local) {
6022 for (auto spec_id_arg : local_spec_id_md->operands()) {
6023 if ((&F == dyn_cast<Function>(
6024 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
6025 ->getValue())) &&
6026 (static_cast<uint64_t>(new_index) ==
6027 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
6028 ->getZExtValue())) {
6029 spec_id =
6030 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
6031 ->getSExtValue();
6032 break;
6033 }
6034 }
6035 }
6036
6037 // Generate the specific argument instruction.
6038 const uint32_t ordinal = static_cast<uint32_t>(old_index);
6039 const uint32_t arg_offset = static_cast<uint32_t>(offset);
6040 const uint32_t arg_size = static_cast<uint32_t>(size);
6041 uint32_t elem_size = 0;
6042 uint32_t descriptor_set = 0;
6043 uint32_t binding = 0;
6044 if (spec_id > 0) {
6045 elem_size = static_cast<uint32_t>(
6046 GetTypeAllocSize(func_ty->getParamType(unsigned(new_index))
6047 ->getPointerElementType(),
6048 DL));
6049 } else if (new_index >= 0) {
6050 auto *info = resource_var_at_index[new_index];
6051 assert(info);
6052 descriptor_set = info->descriptor_set;
6053 binding = info->binding;
6054 }
6055 AddArgumentReflection(kernel_decl, name.str(), argKind, ordinal,
6056 descriptor_set, binding, arg_offset, arg_size,
6057 static_cast<uint32_t>(spec_id), elem_size);
6058 }
6059 } else {
6060 // There is no argument map.
6061 // Take descriptor info from the resource variable calls.
6062 // Take argument name and size from the arguments list.
6063
6064 SmallVector<Argument *, 4> arguments;
6065 for (auto &arg : F.args()) {
6066 arguments.push_back(&arg);
6067 }
6068
6069 unsigned arg_index = 0;
6070 for (auto *info : resource_var_at_index) {
6071 if (info) {
6072 auto arg = arguments[arg_index];
6073 unsigned arg_size = 0;
6074 if (info->arg_kind == clspv::ArgKind::Pod ||
6075 info->arg_kind == clspv::ArgKind::PodUBO ||
6076 info->arg_kind == clspv::ArgKind::PodPushConstant) {
6077 arg_size =
6078 static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
6079 }
6080
6081 // Local pointer arguments are unused in this case.
6082 // offset, spec_id and elem_size always 0.
6083 AddArgumentReflection(kernel_decl, arg->getName().str(),
6084 info->arg_kind, arg_index, info->descriptor_set,
6085 info->binding, 0, arg_size, 0, 0);
6086 }
6087 arg_index++;
6088 }
6089 // Generate mappings for pointer-to-local arguments.
6090 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
6091 Argument *arg = arguments[arg_index];
6092 auto where = LocalArgSpecIds.find(arg);
6093 if (where != LocalArgSpecIds.end()) {
6094 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
6095
6096 // descriptor_set, binding, offset and size are always 0.
6097 AddArgumentReflection(kernel_decl, arg->getName().str(),
6098 ArgKind::Local, arg_index, 0, 0, 0, 0,
6099 static_cast<uint32_t>(local_arg_info.spec_id),
6100 static_cast<uint32_t>(GetTypeAllocSize(
6101 local_arg_info.elem_type, DL)));
6102 }
6103 }
6104 }
6105 }
6106}
6107
6108void SPIRVProducerPass::AddArgumentReflection(
6109 SPIRVID kernel_decl, const std::string &name, clspv::ArgKind arg_kind,
6110 uint32_t ordinal, uint32_t descriptor_set, uint32_t binding,
6111 uint32_t offset, uint32_t size, uint32_t spec_id, uint32_t elem_size) {
6112 // Generate ArgumentInfo for this argument.
6113 // TODO: generate remaining optional operands.
6114 auto import_id = getReflectionImport();
6115 auto arg_name = addSPIRVInst<kDebug>(spv::OpString, name.c_str());
6116 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
6117 SPIRVOperandVec Ops;
6118 Ops << void_id << import_id << reflection::ExtInstArgumentInfo << arg_name;
6119 auto arg_info = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6120
6121 Ops.clear();
6122 Ops << void_id << import_id;
6123 reflection::ExtInst ext_inst = reflection::ExtInstMax;
6124 // Determine the extended instruction.
6125 switch (arg_kind) {
6126 case clspv::ArgKind::Buffer:
6127 ext_inst = reflection::ExtInstArgumentStorageBuffer;
6128 break;
6129 case clspv::ArgKind::BufferUBO:
6130 ext_inst = reflection::ExtInstArgumentUniform;
6131 break;
6132 case clspv::ArgKind::Local:
6133 ext_inst = reflection::ExtInstArgumentWorkgroup;
6134 break;
6135 case clspv::ArgKind::Pod:
6136 ext_inst = reflection::ExtInstArgumentPodStorageBuffer;
6137 break;
6138 case clspv::ArgKind::PodUBO:
6139 ext_inst = reflection::ExtInstArgumentPodUniform;
6140 break;
6141 case clspv::ArgKind::PodPushConstant:
6142 ext_inst = reflection::ExtInstArgumentPodPushConstant;
6143 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006144 case clspv::ArgKind::SampledImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006145 ext_inst = reflection::ExtInstArgumentSampledImage;
6146 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006147 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006148 ext_inst = reflection::ExtInstArgumentStorageImage;
6149 break;
6150 case clspv::ArgKind::Sampler:
6151 ext_inst = reflection::ExtInstArgumentSampler;
6152 break;
6153 default:
6154 llvm_unreachable("Unhandled argument reflection");
6155 break;
6156 }
6157 Ops << ext_inst << kernel_decl << getSPIRVInt32Constant(ordinal);
6158
6159 // Add descriptor set and binding for applicable arguments.
6160 switch (arg_kind) {
6161 case clspv::ArgKind::Buffer:
6162 case clspv::ArgKind::BufferUBO:
6163 case clspv::ArgKind::Pod:
6164 case clspv::ArgKind::PodUBO:
alan-bakerf6bc8252020-09-23 14:58:55 -04006165 case clspv::ArgKind::SampledImage:
6166 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006167 case clspv::ArgKind::Sampler:
6168 Ops << getSPIRVInt32Constant(descriptor_set)
6169 << getSPIRVInt32Constant(binding);
6170 break;
6171 default:
6172 break;
6173 }
6174
6175 // Add remaining operands for arguments.
6176 switch (arg_kind) {
6177 case clspv::ArgKind::Local:
6178 Ops << getSPIRVInt32Constant(spec_id) << getSPIRVInt32Constant(elem_size);
6179 break;
6180 case clspv::ArgKind::Pod:
6181 case clspv::ArgKind::PodUBO:
6182 case clspv::ArgKind::PodPushConstant:
6183 Ops << getSPIRVInt32Constant(offset) << getSPIRVInt32Constant(size);
6184 break;
6185 default:
6186 break;
6187 }
6188 Ops << arg_info;
6189 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6190}