blob: 9c9d438c0c945ce8cc2d1355ba6d3c959a1348a7 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
SJWf93f5f32020-05-05 07:27:56 -050046// enable spv::HasResultAndType
47#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040048#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040049
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/AddressSpace.h"
David Neto118188e2018-08-24 11:27:54 -040051#include "clspv/Option.h"
alan-baker86ce19c2020-08-05 13:09:19 -040052#include "clspv/PushConstant.h"
53#include "clspv/SpecConstant.h"
David Neto85082642018-03-24 06:55:20 -070054#include "clspv/spirv_c_strings.hpp"
55#include "clspv/spirv_glsl.hpp"
alan-baker86ce19c2020-08-05 13:09:19 -040056#include "clspv/spirv_reflection.hpp"
David Neto22f144c2017-06-12 14:26:21 -040057
David Neto4feb7a42017-10-06 17:29:42 -040058#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050059#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050060#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070061#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040062#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040063#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040064#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040065#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040066#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040067#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050068#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040069
David Neto22f144c2017-06-12 14:26:21 -040070#if defined(_MSC_VER)
71#pragma warning(pop)
72#endif
73
74using namespace llvm;
75using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050076using namespace clspv::Builtins;
SJW806a5d82020-07-15 12:51:38 -050077using namespace clspv::Option;
David Neto156783e2017-07-05 15:39:41 -040078using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040079
80namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040081
David Neto862b7d82018-06-14 18:48:37 -040082cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
83 cl::desc("Show resource variable creation"));
84
alan-baker5ed87542020-03-23 11:05:22 -040085cl::opt<bool>
86 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
87 cl::desc("Dump the IR at the start of SPIRVProducer"));
88
David Neto862b7d82018-06-14 18:48:37 -040089// These hacks exist to help transition code generation algorithms
90// without making huge noise in detailed test output.
91const bool Hack_generate_runtime_array_stride_early = true;
92
David Neto3fbb4072017-10-16 11:28:14 -040093// The value of 1/pi. This value is from MSDN
94// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
95const double kOneOverPi = 0.318309886183790671538;
96const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
97
alan-baker86ce19c2020-08-05 13:09:19 -040098// SPIRV Module Sections (per 2.4 of the SPIR-V spec)
SJW69939d52020-04-16 07:29:07 -050099// These are used to collect SPIRVInstructions by type on-the-fly.
100enum SPIRVSection {
101 kCapabilities,
102 kExtensions,
103 kImports,
104 kMemoryModel,
105 kEntryPoints,
106 kExecutionModes,
107
108 kDebug,
109 kAnnotations,
110
111 kTypes,
112 kConstants = kTypes,
113 kGlobalVariables,
114
115 kFunctions,
116
alan-baker86ce19c2020-08-05 13:09:19 -0400117 // This is not a section of the SPIR-V spec and should always immediately
118 // precede kSectionCount. It is a convenient place for the embedded
119 // reflection data.
120 kReflection,
SJW69939d52020-04-16 07:29:07 -0500121 kSectionCount
122};
123
SJW01901d92020-05-21 08:58:31 -0500124class SPIRVID {
125 uint32_t id;
126
127public:
128 SPIRVID(uint32_t _id = 0) : id(_id) {}
129 uint32_t get() const { return id; }
130 bool isValid() const { return id != 0; }
131 bool operator==(const SPIRVID &that) const { return id == that.id; }
SJW806a5d82020-07-15 12:51:38 -0500132 bool operator<(const SPIRVID &that) const { return id < that.id; }
SJW01901d92020-05-21 08:58:31 -0500133};
SJWf93f5f32020-05-05 07:27:56 -0500134
SJW88ed5fe2020-05-11 12:40:57 -0500135enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400136
137struct SPIRVOperand {
SJW88ed5fe2020-05-11 12:40:57 -0500138 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
139 LiteralNum[0] = Num;
140 }
David Neto22f144c2017-06-12 14:26:21 -0400141 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
142 : Type(Ty), LiteralStr(Str) {}
143 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
144 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500145 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
146 auto sz = NumVec.size();
147 assert(sz >= 1 && sz <= 2);
148 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
149 LiteralNum[0] = NumVec[0];
150 if (sz == 2) {
151 LiteralNum[1] = NumVec[1];
152 }
153 }
David Neto22f144c2017-06-12 14:26:21 -0400154
alan-baker7506abb2020-09-10 15:02:55 -0400155 SPIRVOperandType getType() const { return Type; }
156 uint32_t getNumID() const { return LiteralNum[0]; }
157 std::string getLiteralStr() const { return LiteralStr; }
158 const uint32_t *getLiteralNum() const { return LiteralNum; }
David Neto22f144c2017-06-12 14:26:21 -0400159
David Neto87846742018-04-11 17:36:22 -0400160 uint32_t GetNumWords() const {
161 switch (Type) {
162 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500163 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400164 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500165 case LITERAL_DWORD:
166 return 2;
David Neto87846742018-04-11 17:36:22 -0400167 case LITERAL_STRING:
168 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400169 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400170 }
171 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
172 }
173
David Neto22f144c2017-06-12 14:26:21 -0400174private:
175 SPIRVOperandType Type;
176 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500177 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400178};
179
SJW88ed5fe2020-05-11 12:40:57 -0500180typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400181
David Neto22f144c2017-06-12 14:26:21 -0400182struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500183 // Primary constructor must have Opcode, initializes WordCount based on ResID.
184 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
185 : Opcode(static_cast<uint16_t>(Opc)) {
186 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400187 }
David Neto22f144c2017-06-12 14:26:21 -0400188
SJWf93f5f32020-05-05 07:27:56 -0500189 // Creates an instruction with an opcode and no result ID, and with the given
190 // operands. This calls primary constructor to initialize Opcode, WordCount.
191 // Takes ownership of the operands and clears |Ops|.
192 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
193 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500194 }
SJWf93f5f32020-05-05 07:27:56 -0500195 // Creates an instruction with an opcode and no result ID, and with the given
196 // operands. This calls primary constructor to initialize Opcode, WordCount.
197 // Takes ownership of the operands and clears |Ops|.
198 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
199 : SPIRVInstruction(Opc, ResID) {
200 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500201 }
David Netoef5ba2b2019-12-20 08:35:54 -0500202
David Netoee2660d2018-06-28 16:31:29 -0400203 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400204 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500205 SPIRVID getResultID() const { return ResultID; }
206 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400207
208private:
SJW01901d92020-05-21 08:58:31 -0500209 void setResult(SPIRVID ResID = 0) {
210 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500211 ResultID = ResID;
212 }
213
214 void setOperands(SPIRVOperandVec &Ops) {
215 assert(Operands.empty());
216 Operands = std::move(Ops);
217 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500218 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500219 }
220 }
221
222private:
David Netoee2660d2018-06-28 16:31:29 -0400223 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400224 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500225 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500226 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400227};
228
229struct SPIRVProducerPass final : public ModulePass {
SJW01901d92020-05-21 08:58:31 -0500230 typedef DenseMap<Type *, SPIRVID> TypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400231 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500232 typedef DenseMap<Value *, SPIRVID> ValueMapType;
SJW806a5d82020-07-15 12:51:38 -0500233 typedef std::list<SPIRVID> SPIRVIDListType;
SJW01901d92020-05-21 08:58:31 -0500234 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
235 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500236 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
SJW806a5d82020-07-15 12:51:38 -0500237 typedef std::map<spv::BuiltIn, SPIRVID> BuiltinConstantMapType;
SJW88ed5fe2020-05-11 12:40:57 -0500238 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400239 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500240 // - the SPIR-V instruction placeholder that will be replaced
241 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400242 DeferredInstVecType;
243 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
244 GlobalConstFuncMapType;
245
David Neto44795152017-07-13 15:45:28 -0400246 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500247 raw_pwrite_stream &out,
alan-baker00e7a582019-06-07 12:54:21 -0400248 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400249 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500250 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400251 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
David Neto0676e6f2017-07-11 18:47:44 -0400252 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500253 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
254 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
SJW01901d92020-05-21 08:58:31 -0500255 WorkgroupSizeVarID(0) {
256 addCapability(spv::CapabilityShader);
257 Ptr = this;
258 }
David Neto22f144c2017-06-12 14:26:21 -0400259
James Price11010dc2019-12-19 13:53:09 -0500260 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500261 }
262
David Neto22f144c2017-06-12 14:26:21 -0400263 void getAnalysisUsage(AnalysisUsage &AU) const override {
264 AU.addRequired<DominatorTreeWrapperPass>();
265 AU.addRequired<LoopInfoWrapperPass>();
266 }
267
268 virtual bool runOnModule(Module &module) override;
269
270 // output the SPIR-V header block
271 void outputHeader();
272
273 // patch the SPIR-V header block
274 void patchHeader();
275
SJW01901d92020-05-21 08:58:31 -0500276 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400277 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-baker7506abb2020-09-10 15:02:55 -0400278 TypeList &getTypeList() { return Types; }
David Neto22f144c2017-06-12 14:26:21 -0400279 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500280 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
281 return SPIRVSections[Section];
282 };
alan-baker7506abb2020-09-10 15:02:55 -0400283 EntryPointVecType &getEntryPointVec() { return EntryPointVec; }
284 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; }
SJW806a5d82020-07-15 12:51:38 -0500285 SPIRVIDListType &getEntryPointInterfacesList() {
286 return EntryPointInterfacesList;
alan-baker7506abb2020-09-10 15:02:55 -0400287 }
SJW01901d92020-05-21 08:58:31 -0500288 SPIRVID getOpExtInstImportID();
alan-baker7506abb2020-09-10 15:02:55 -0400289 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; }
SJW2c317da2020-03-23 07:39:13 -0500290
alan-baker5b86ed72019-02-15 08:26:50 -0500291 bool hasVariablePointersStorageBuffer() {
292 return HasVariablePointersStorageBuffer;
293 }
SJW01901d92020-05-21 08:58:31 -0500294 void setVariablePointersStorageBuffer() {
295 if (!HasVariablePointersStorageBuffer) {
296 addCapability(spv::CapabilityVariablePointersStorageBuffer);
297 HasVariablePointersStorageBuffer = true;
298 }
alan-baker5b86ed72019-02-15 08:26:50 -0500299 }
alan-baker7506abb2020-09-10 15:02:55 -0400300 bool hasVariablePointers() { return HasVariablePointers; }
SJW01901d92020-05-21 08:58:31 -0500301 void setVariablePointers() {
302 if (!HasVariablePointers) {
303 addCapability(spv::CapabilityVariablePointers);
304 HasVariablePointers = true;
305 }
alan-baker7506abb2020-09-10 15:02:55 -0400306 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500307 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
308 return samplerMap;
309 }
David Neto22f144c2017-06-12 14:26:21 -0400310 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
311 return GlobalConstFuncTypeMap;
312 }
313 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
314 return GlobalConstArgumentSet;
315 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500316 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400317
SJW77b87ad2020-04-21 14:37:52 -0500318 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500319 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
320 // *not* be converted to a storage buffer, replace each such global variable
321 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500322 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400323 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
324 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500325 void FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400326 void FindTypePerGlobalVar(GlobalVariable &GV);
327 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500328 void FindTypesForSamplerMap();
329 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500330 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
331 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400332 void FindType(Type *Ty);
SJWf93f5f32020-05-05 07:27:56 -0500333
334 // Lookup or create Types, Constants.
335 // Returns SPIRVID once it has been created.
336 SPIRVID getSPIRVType(Type *Ty);
337 SPIRVID getSPIRVConstant(Constant *Cst);
SJW806a5d82020-07-15 12:51:38 -0500338 SPIRVID getSPIRVInt32Constant(uint32_t CstVal);
SJWf93f5f32020-05-05 07:27:56 -0500339 // Lookup SPIRVID of llvm::Value, may create Constant.
340 SPIRVID getSPIRVValue(Value *V);
341
SJW806a5d82020-07-15 12:51:38 -0500342 SPIRVID getSPIRVBuiltin(spv::BuiltIn BID, spv::Capability Cap);
343
David Neto19a1bad2017-08-25 15:01:41 -0400344 // Generates instructions for SPIR-V types corresponding to the LLVM types
345 // saved in the |Types| member. A type follows its subtypes. IDs are
346 // allocated sequentially starting with the current value of nextID, and
347 // with a type following its subtypes. Also updates nextID to just beyond
348 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500349 void GenerateSPIRVTypes();
SJW77b87ad2020-04-21 14:37:52 -0500350 void GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400351 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500352 void GenerateWorkgroupVars();
alan-baker86ce19c2020-08-05 13:09:19 -0400353 // Generate reflection instructions for resource variables associated with
David Neto862b7d82018-06-14 18:48:37 -0400354 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500355 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400356 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500357 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400358 void GenerateFuncPrologue(Function &F);
359 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400360 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400361 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
362 spv::Op GetSPIRVCastOpcode(Instruction &I);
363 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
SJW806a5d82020-07-15 12:51:38 -0500364 SPIRVID GenerateClspvInstruction(CallInst *Call,
365 const FunctionInfo &FuncInfo);
366 SPIRVID GenerateImageInstruction(CallInst *Call,
367 const FunctionInfo &FuncInfo);
368 SPIRVID GenerateSubgroupInstruction(CallInst *Call,
369 const FunctionInfo &FuncInfo);
370 SPIRVID GenerateInstructionFromCall(CallInst *Call);
David Neto22f144c2017-06-12 14:26:21 -0400371 void GenerateInstruction(Instruction &I);
372 void GenerateFuncEpilogue();
373 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500374 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400375 bool is4xi8vec(Type *Ty) const;
376 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400377 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400378 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400379 // Returns the GLSL extended instruction enum that the given function
380 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500381 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400382 // Returns the GLSL extended instruction enum indirectly used by the given
383 // function. That is, to implement the given function, we use an extended
384 // instruction plus one more instruction. If none, then returns the 0 value,
385 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500386 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400387 // Returns the single GLSL extended instruction used directly or
388 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500389 glsl::ExtInst
390 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400391 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500392 void WriteResultID(const SPIRVInstruction &Inst);
393 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
394 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400395 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500396 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400397
Alan Baker9bf93fb2018-08-28 16:59:26 -0400398 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500399 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400400
Alan Bakerfcda9482018-10-02 17:09:59 -0400401 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500402 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400403
alan-baker06cad652019-12-03 17:56:47 -0500404 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500405 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500406
Alan Bakerfcda9482018-10-02 17:09:59 -0400407 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
408 // uses the internal map, otherwise it falls back on the data layout.
409 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
410 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
411 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000412 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
413 unsigned member,
414 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400415
alan-baker5b86ed72019-02-15 08:26:50 -0500416 // Returns the base pointer of |v|.
417 Value *GetBasePointer(Value *v);
418
SJW01901d92020-05-21 08:58:31 -0500419 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
420 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
421
alan-baker5b86ed72019-02-15 08:26:50 -0500422 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
423 // |address_space|.
424 void setVariablePointersCapabilities(unsigned address_space);
425
426 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
427 // variable.
428 bool sameResource(Value *lhs, Value *rhs) const;
429
430 // Returns true if |inst| is phi or select that selects from the same
431 // structure (or null).
432 bool selectFromSameObject(Instruction *inst);
433
alan-bakere9308012019-03-15 10:25:13 -0400434 // Returns true if |Arg| is called with a coherent resource.
435 bool CalledWithCoherentResource(Argument &Arg);
436
SJWf93f5f32020-05-05 07:27:56 -0500437 //
438 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
439 template <enum SPIRVSection TSection = kFunctions>
440 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
441 bool has_result, has_result_type;
442 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
443 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500444 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500445 return RID;
446 }
447 template <enum SPIRVSection TSection = kFunctions>
448 SPIRVID addSPIRVInst(spv::Op Op) {
449 SPIRVOperandVec Ops;
450 return addSPIRVInst<TSection>(Op, Ops);
451 }
452 template <enum SPIRVSection TSection = kFunctions>
453 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
454 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500455 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500456 return addSPIRVInst<TSection>(Op, Ops);
457 }
458 template <enum SPIRVSection TSection = kFunctions>
459 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
460 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500461 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500462 return addSPIRVInst<TSection>(Op, Ops);
463 }
464
SJW88ed5fe2020-05-11 12:40:57 -0500465 //
466 // Add placeholder for llvm::Value that references future values.
467 // Must have result ID just in case final SPIRVInstruction requires.
468 SPIRVID addSPIRVPlaceholder(Value *I) {
469 SPIRVID RID = incrNextID();
470 SPIRVOperandVec Ops;
471 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
472 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
473 return RID;
474 }
475 // Replace placeholder with actual SPIRVInstruction on the final pass
476 // (HandleDeferredInstruction).
477 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
478 SPIRVOperandVec &Operands) {
479 bool has_result, has_result_type;
480 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
481 SPIRVID RID = has_result ? I->getResultID() : 0;
482 *I = SPIRVInstruction(Opcode, RID, Operands);
483 return RID;
484 }
485
SJW806a5d82020-07-15 12:51:38 -0500486 //
487 // Add global variable and capture entry point interface
488 SPIRVID addSPIRVGlobalVariable(const SPIRVID &TypeID, spv::StorageClass SC,
489 const SPIRVID &InitID = SPIRVID());
490
alan-baker86ce19c2020-08-05 13:09:19 -0400491 SPIRVID getReflectionImport();
492 void GenerateReflection();
493 void GenerateKernelReflection();
494 void GeneratePushConstantReflection();
495 void GenerateSpecConstantReflection();
496 void AddArgumentReflection(SPIRVID kernel_decl, const std::string &name,
497 clspv::ArgKind arg_kind, uint32_t ordinal,
498 uint32_t descriptor_set, uint32_t binding,
499 uint32_t offset, uint32_t size, uint32_t spec_id,
500 uint32_t elem_size);
501
David Neto22f144c2017-06-12 14:26:21 -0400502private:
503 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500504
505 Module *module;
506
SJW01901d92020-05-21 08:58:31 -0500507 // Set of Capabilities required
508 CapabilitySetType CapabilitySet;
509
SJW806a5d82020-07-15 12:51:38 -0500510 // Map from clspv::BuiltinType to SPIRV Global Variable
511 BuiltinConstantMapType BuiltinConstantMap;
512
David Neto44795152017-07-13 15:45:28 -0400513 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400514 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400515
516 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
517 // convert to other formats on demand?
518
519 // When emitting a C initialization list, the WriteSPIRVBinary method
520 // will actually write its words to this vector via binaryTempOut.
521 SmallVector<char, 100> binaryTempUnderlyingVector;
522 raw_svector_ostream binaryTempOut;
523
524 // Binary output writes to this stream, which might be |out| or
525 // |binaryTempOut|. It's the latter when we really want to write a C
526 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400527 raw_pwrite_stream *binaryOut;
David Neto0676e6f2017-07-11 18:47:44 -0400528 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400529 uint64_t patchBoundOffset;
530 uint32_t nextID;
531
SJWf93f5f32020-05-05 07:27:56 -0500532 SPIRVID incrNextID() { return nextID++; }
533
alan-bakerf67468c2019-11-25 15:51:49 -0500534 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500535 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500536 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500537 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500538
David Neto19a1bad2017-08-25 15:01:41 -0400539 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400540 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400541 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400542 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400543 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400544 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400545 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400546 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500547 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400548
David Neto22f144c2017-06-12 14:26:21 -0400549 EntryPointVecType EntryPointVec;
550 DeferredInstVecType DeferredInstVec;
SJW806a5d82020-07-15 12:51:38 -0500551 SPIRVIDListType EntryPointInterfacesList;
SJW01901d92020-05-21 08:58:31 -0500552 SPIRVID OpExtInstImportID;
553 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500554 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400555 bool HasVariablePointers;
556 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500557 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700558
559 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700560 // will map F's type to (G, index of the parameter), where in a first phase
561 // G is F's type. During FindTypePerFunc, G will be changed to F's type
562 // but replacing the pointer-to-constant parameter with
563 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700564 // TODO(dneto): This doesn't seem general enough? A function might have
565 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400566 GlobalConstFuncMapType GlobalConstFuncTypeMap;
567 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400568 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700569 // or array types, and which point into transparent memory (StorageBuffer
570 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400571 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700572 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400573
574 // This is truly ugly, but works around what look like driver bugs.
575 // For get_local_size, an earlier part of the flow has created a module-scope
576 // variable in Private address space to hold the value for the workgroup
577 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
578 // When this is present, save the IDs of the initializer value and variable
579 // in these two variables. We only ever do a vector load from it, and
580 // when we see one of those, substitute just the value of the intializer.
581 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700582 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500583 SPIRVID WorkgroupSizeValueID;
584 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400585
David Neto862b7d82018-06-14 18:48:37 -0400586 // Bookkeeping for mapping kernel arguments to resource variables.
587 struct ResourceVarInfo {
588 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400589 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400590 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400591 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400592 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
593 const int index; // Index into ResourceVarInfoList
594 const unsigned descriptor_set;
595 const unsigned binding;
596 Function *const var_fn; // The @clspv.resource.var.* function.
597 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400598 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400599 const unsigned addr_space; // The LLVM address space
600 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500601 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400602 };
603 // A list of resource var info. Each one correponds to a module-scope
604 // resource variable we will have to create. Resource var indices are
605 // indices into this vector.
606 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
607 // This is a vector of pointers of all the resource vars, but ordered by
608 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500609 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400610 // Map a function to the ordered list of resource variables it uses, one for
611 // each argument. If an argument does not use a resource variable, it
612 // will have a null pointer entry.
613 using FunctionToResourceVarsMapType =
614 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
615 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
616
617 // What LLVM types map to SPIR-V types needing layout? These are the
618 // arrays and structures supporting storage buffers and uniform buffers.
619 TypeList TypesNeedingLayout;
620 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
621 UniqueVector<StructType *> StructTypesNeedingBlock;
622 // For a call that represents a load from an opaque type (samplers, images),
623 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500624 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700625
David Netoc6f3ab22018-04-06 18:02:31 -0400626 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500627 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400628 LocalArgList LocalArgs;
629 // Information about a pointer-to-local argument.
630 struct LocalArgInfo {
631 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500632 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400633 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500634 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400635 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500636 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400637 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500638 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400639 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500640 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400641 // The specialization constant ID of the array size.
642 int spec_id;
643 };
Alan Baker202c8c72018-08-13 13:47:44 -0400644 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500645 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400646 // A mapping from SpecId to its LocalArgInfo.
647 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400648 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500649 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400650 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500651 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
652 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500653
654 // Maps basic block to its merge block.
655 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
656 // Maps basic block to its continue block.
657 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500658
alan-baker86ce19c2020-08-05 13:09:19 -0400659 SPIRVID ReflectionID;
660 DenseMap<Function *, SPIRVID> KernelDeclarations;
661
SJW01901d92020-05-21 08:58:31 -0500662public:
663 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400664};
665
666char SPIRVProducerPass::ID;
SJW01901d92020-05-21 08:58:31 -0500667SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
David Netoc6f3ab22018-04-06 18:02:31 -0400668
alan-bakerb6b09dc2018-11-08 16:59:28 -0500669} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400670
671namespace clspv {
alan-baker86ce19c2020-08-05 13:09:19 -0400672ModulePass *
673createSPIRVProducerPass(raw_pwrite_stream &out,
674 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
675 bool outputCInitList) {
676 return new SPIRVProducerPass(out, samplerMap, outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400677}
David Netoc2c368d2017-06-30 16:50:17 -0400678} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400679
SJW01901d92020-05-21 08:58:31 -0500680namespace {
681SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
682 list.emplace_back(LITERAL_WORD, num);
683 return list;
684}
685
686SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
687 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
688 return list;
689}
690
691SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
692 list.emplace_back(num_vec);
693 return list;
694}
695
696SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
697 list.emplace_back(LITERAL_STRING, str);
698 return list;
699}
700
701SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
702 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
703 return list;
704}
705
706SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
707 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
708 return list;
709}
710
SJW806a5d82020-07-15 12:51:38 -0500711SPIRVOperandVec &operator<<(SPIRVOperandVec &list, const SPIRVID &v) {
SJW01901d92020-05-21 08:58:31 -0500712 list.emplace_back(NUMBERID, v.get());
713 return list;
714}
715} // namespace
716
SJW77b87ad2020-04-21 14:37:52 -0500717bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500718 // TODO(sjw): Need to reset all data members for each Module, or better
719 // yet create a new SPIRVProducer for every module.. For now only
720 // allow 1 call.
721 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500722 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400723 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500724 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400725 }
David Neto0676e6f2017-07-11 18:47:44 -0400726 binaryOut = outputCInitList ? &binaryTempOut : &out;
727
SJW77b87ad2020-04-21 14:37:52 -0500728 PopulateUBOTypeMaps();
729 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400730
David Neto22f144c2017-06-12 14:26:21 -0400731 // SPIR-V always begins with its header information
732 outputHeader();
733
734 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500735 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400736
David Neto22f144c2017-06-12 14:26:21 -0400737 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500738 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400739 // If the GV is one of our special __spirv_* variables, remove the
740 // initializer as it was only placed there to force LLVM to not throw the
741 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000742 if (GV.getName().startswith("__spirv_") ||
743 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400744 GV.setInitializer(nullptr);
745 }
746
747 // Collect types' information from global variable.
748 FindTypePerGlobalVar(GV);
David Neto22f144c2017-06-12 14:26:21 -0400749 }
750
David Neto22f144c2017-06-12 14:26:21 -0400751 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500752 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400753
alan-baker09cb9802019-12-10 13:16:27 -0500754 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500755 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400756
757 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500758 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400759 GenerateGlobalVar(GV);
760 }
SJW77b87ad2020-04-21 14:37:52 -0500761 GenerateResourceVars();
762 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400763
764 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500765 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400766 if (F.isDeclaration()) {
767 continue;
768 }
769
770 // Generate Function Prologue.
771 GenerateFuncPrologue(F);
772
773 // Generate SPIRV instructions for function body.
774 GenerateFuncBody(F);
775
776 // Generate Function Epilogue.
777 GenerateFuncEpilogue();
778 }
779
780 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500781 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400782
David Neto22f144c2017-06-12 14:26:21 -0400783 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500784 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400785
alan-baker86ce19c2020-08-05 13:09:19 -0400786 // Generate embedded reflection information.
787 GenerateReflection();
788
alan-baker00e7a582019-06-07 12:54:21 -0400789 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400790
791 // We need to patch the SPIR-V header to set bound correctly.
792 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400793
794 if (outputCInitList) {
795 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400796 std::ostringstream os;
797
David Neto57fb0b92017-08-04 15:35:09 -0400798 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400799 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400800 os << ",\n";
801 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400802 first = false;
803 };
804
805 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400806 const std::string str(binaryTempOut.str());
807 for (unsigned i = 0; i < str.size(); i += 4) {
808 const uint32_t a = static_cast<unsigned char>(str[i]);
809 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
810 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
811 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
812 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400813 }
814 os << "}\n";
815 out << os.str();
816 }
817
David Neto22f144c2017-06-12 14:26:21 -0400818 return false;
819}
820
821void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400822 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
823 sizeof(spv::MagicNumber));
SJW806a5d82020-07-15 12:51:38 -0500824 uint32_t minor = 0;
825 if (SpvVersion() == SPIRVVersion::SPIRV_1_3) {
826 minor = 3;
827 }
828 uint32_t version = (1 << 16) | (minor << 8);
829 binaryOut->write(reinterpret_cast<const char *>(&version), sizeof(version));
David Neto22f144c2017-06-12 14:26:21 -0400830
alan-baker0c18ab02019-06-12 10:23:21 -0400831 // use Google's vendor ID
832 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400833 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400834
alan-baker00e7a582019-06-07 12:54:21 -0400835 // we record where we need to come back to and patch in the bound value
836 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400837
alan-baker00e7a582019-06-07 12:54:21 -0400838 // output a bad bound for now
839 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400840
alan-baker00e7a582019-06-07 12:54:21 -0400841 // output the schema (reserved for use and must be 0)
842 const uint32_t schema = 0;
843 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400844}
845
846void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400847 // for a binary we just write the value of nextID over bound
848 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
849 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400850}
851
SJW77b87ad2020-04-21 14:37:52 -0500852void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400853 // This function generates LLVM IR for function such as global variable for
854 // argument, constant and pointer type for argument access. These information
855 // is artificial one because we need Vulkan SPIR-V output. This function is
856 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400857
SJW77b87ad2020-04-21 14:37:52 -0500858 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400859
SJW77b87ad2020-04-21 14:37:52 -0500860 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400861
862 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500863 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400864 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
865 if (spv::BuiltInWorkgroupSize == BuiltinType) {
866 HasWorkGroupBuiltin = true;
867 }
868 }
869
SJW77b87ad2020-04-21 14:37:52 -0500870 FindTypesForSamplerMap();
871 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400872}
873
SJW77b87ad2020-04-21 14:37:52 -0500874void SPIRVProducerPass::FindGlobalConstVars() {
875 clspv::NormalizeGlobalVariables(*module);
876 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400877
David Neto862b7d82018-06-14 18:48:37 -0400878 SmallVector<GlobalVariable *, 8> GVList;
879 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500880 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400881 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
882 if (GV.use_empty()) {
883 DeadGVList.push_back(&GV);
884 } else {
885 GVList.push_back(&GV);
886 }
887 }
888 }
889
890 // Remove dead global __constant variables.
891 for (auto GV : DeadGVList) {
892 GV->eraseFromParent();
893 }
894 DeadGVList.clear();
895
896 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
897 // For now, we only support a single storage buffer.
alan-baker7506abb2020-09-10 15:02:55 -0400898 if (!GVList.empty()) {
David Neto862b7d82018-06-14 18:48:37 -0400899 assert(GVList.size() == 1);
900 const auto *GV = GVList[0];
901 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400902 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400903 const size_t kConstantMaxSize = 65536;
904 if (constants_byte_size > kConstantMaxSize) {
905 outs() << "Max __constant capacity of " << kConstantMaxSize
906 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
907 llvm_unreachable("Max __constant capacity exceeded");
908 }
909 }
910 } else {
911 // Change global constant variable's address space to ModuleScopePrivate.
912 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
913 for (auto GV : GVList) {
914 // Create new gv with ModuleScopePrivate address space.
915 Type *NewGVTy = GV->getType()->getPointerElementType();
916 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500917 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400918 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
919 NewGV->takeName(GV);
920
921 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
922 SmallVector<User *, 8> CandidateUsers;
923
924 auto record_called_function_type_as_user =
925 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
926 // Find argument index.
927 unsigned index = 0;
928 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
929 if (gv == call->getOperand(i)) {
930 // TODO(dneto): Should we break here?
931 index = i;
932 }
933 }
934
935 // Record function type with global constant.
936 GlobalConstFuncTyMap[call->getFunctionType()] =
937 std::make_pair(call->getFunctionType(), index);
938 };
939
940 for (User *GVU : GVUsers) {
941 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
942 record_called_function_type_as_user(GV, Call);
943 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
944 // Check GEP users.
945 for (User *GEPU : GEP->users()) {
946 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
947 record_called_function_type_as_user(GEP, GEPCall);
948 }
949 }
950 }
951
952 CandidateUsers.push_back(GVU);
953 }
954
955 for (User *U : CandidateUsers) {
956 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500957 if (!isa<Constant>(U)) {
958 // #254: Can't change operands of a constant, but this shouldn't be
959 // something that sticks around in the module.
960 U->replaceUsesOfWith(GV, NewGV);
961 }
David Neto862b7d82018-06-14 18:48:37 -0400962 }
963
964 // Delete original gv.
965 GV->eraseFromParent();
966 }
967 }
968}
969
SJW77b87ad2020-04-21 14:37:52 -0500970void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -0400971 ResourceVarInfoList.clear();
972 FunctionToResourceVarsMap.clear();
973 ModuleOrderedResourceVars.reset();
974 // Normally, there is one resource variable per clspv.resource.var.*
975 // function, since that is unique'd by arg type and index. By design,
976 // we can share these resource variables across kernels because all
977 // kernels use the same descriptor set.
978 //
979 // But if the user requested distinct descriptor sets per kernel, then
980 // the descriptor allocator has made different (set,binding) pairs for
981 // the same (type,arg_index) pair. Since we can decorate a resource
982 // variable with only exactly one DescriptorSet and Binding, we are
983 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +0000984 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -0400985 // (set,binding) values.
986 const bool always_distinct_sets =
987 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -0500988 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -0400989 // Rely on the fact the resource var functions have a stable ordering
990 // in the module.
SJW61531372020-06-09 07:31:08 -0500991 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -0400992 // Find all calls to this function with distinct set and binding pairs.
993 // Save them in ResourceVarInfoList.
994
995 // Determine uniqueness of the (set,binding) pairs only withing this
996 // one resource-var builtin function.
997 using SetAndBinding = std::pair<unsigned, unsigned>;
998 // Maps set and binding to the resource var info.
999 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1000 bool first_use = true;
1001 for (auto &U : F.uses()) {
1002 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1003 const auto set = unsigned(
1004 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1005 const auto binding = unsigned(
1006 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1007 const auto arg_kind = clspv::ArgKind(
1008 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1009 const auto arg_index = unsigned(
1010 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001011 const auto coherent = unsigned(
1012 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001013
1014 // Find or make the resource var info for this combination.
1015 ResourceVarInfo *rv = nullptr;
1016 if (always_distinct_sets) {
1017 // Make a new resource var any time we see a different
1018 // (set,binding) pair.
1019 SetAndBinding key{set, binding};
1020 auto where = set_and_binding_map.find(key);
1021 if (where == set_and_binding_map.end()) {
alan-baker7506abb2020-09-10 15:02:55 -04001022 rv = new ResourceVarInfo(
1023 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1024 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001025 ResourceVarInfoList.emplace_back(rv);
1026 set_and_binding_map[key] = rv;
1027 } else {
1028 rv = where->second;
1029 }
1030 } else {
1031 // The default is to make exactly one resource for each
1032 // clspv.resource.var.* function.
1033 if (first_use) {
1034 first_use = false;
alan-baker7506abb2020-09-10 15:02:55 -04001035 rv = new ResourceVarInfo(
1036 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1037 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001038 ResourceVarInfoList.emplace_back(rv);
1039 } else {
1040 rv = ResourceVarInfoList.back().get();
1041 }
1042 }
1043
1044 // Now populate FunctionToResourceVarsMap.
1045 auto &mapping =
1046 FunctionToResourceVarsMap[call->getParent()->getParent()];
1047 while (mapping.size() <= arg_index) {
1048 mapping.push_back(nullptr);
1049 }
1050 mapping[arg_index] = rv;
1051 }
1052 }
1053 }
1054 }
1055
1056 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001057 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001058 auto where = FunctionToResourceVarsMap.find(&F);
1059 if (where != FunctionToResourceVarsMap.end()) {
1060 for (auto &rv : where->second) {
1061 if (rv != nullptr) {
1062 ModuleOrderedResourceVars.insert(rv);
1063 }
1064 }
1065 }
1066 }
1067 if (ShowResourceVars) {
1068 for (auto *info : ModuleOrderedResourceVars) {
1069 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1070 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1071 << "\n";
1072 }
1073 }
1074}
1075
David Neto22f144c2017-06-12 14:26:21 -04001076void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1077 // Investigate global variable's type.
1078 FindType(GV.getType());
1079}
1080
1081void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1082 // Investigate function's type.
1083 FunctionType *FTy = F.getFunctionType();
1084
1085 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1086 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001087 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001088 if (GlobalConstFuncTyMap.count(FTy)) {
1089 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1090 SmallVector<Type *, 4> NewFuncParamTys;
1091 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1092 Type *ParamTy = FTy->getParamType(i);
1093 if (i == GVCstArgIdx) {
1094 Type *EleTy = ParamTy->getPointerElementType();
1095 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1096 }
1097
1098 NewFuncParamTys.push_back(ParamTy);
1099 }
1100
1101 FunctionType *NewFTy =
1102 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1103 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1104 FTy = NewFTy;
1105 }
1106
1107 FindType(FTy);
1108 } else {
1109 // As kernel functions do not have parameters, create new function type and
1110 // add it to type map.
1111 SmallVector<Type *, 4> NewFuncParamTys;
1112 FunctionType *NewFTy =
1113 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1114 FindType(NewFTy);
1115 }
1116
1117 // Investigate instructions' type in function body.
1118 for (BasicBlock &BB : F) {
1119 for (Instruction &I : BB) {
1120 if (isa<ShuffleVectorInst>(I)) {
1121 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1122 // Ignore type for mask of shuffle vector instruction.
1123 if (i == 2) {
1124 continue;
1125 }
1126
1127 Value *Op = I.getOperand(i);
1128 if (!isa<MetadataAsValue>(Op)) {
1129 FindType(Op->getType());
1130 }
1131 }
1132
1133 FindType(I.getType());
1134 continue;
1135 }
1136
David Neto862b7d82018-06-14 18:48:37 -04001137 CallInst *Call = dyn_cast<CallInst>(&I);
1138
SJW61531372020-06-09 07:31:08 -05001139 if (Call) {
1140 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
1141 if (func_info.getType() == Builtins::kClspvResource ||
1142 func_info.getType() == Builtins::kClspvLocal) {
1143 // This is a fake call representing access to a resource/workgroup
1144 // variable. We handle that elsewhere.
1145 continue;
1146 }
Alan Baker202c8c72018-08-13 13:47:44 -04001147 }
1148
alan-bakerf083bed2020-01-29 08:15:42 -05001149 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1150 // OpCompositeExtract which takes literal values for indices. As a result
1151 // don't map the type of indices.
1152 if (I.getOpcode() == Instruction::ExtractValue) {
1153 FindType(I.getOperand(0)->getType());
1154 continue;
1155 }
1156 if (I.getOpcode() == Instruction::InsertValue) {
1157 FindType(I.getOperand(0)->getType());
1158 FindType(I.getOperand(1)->getType());
1159 continue;
1160 }
1161
1162 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1163 // the index is a constant. In such a case don't map the index type.
1164 if (I.getOpcode() == Instruction::ExtractElement) {
1165 FindType(I.getOperand(0)->getType());
1166 Value *op1 = I.getOperand(1);
1167 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1168 FindType(op1->getType());
1169 }
1170 continue;
1171 }
1172 if (I.getOpcode() == Instruction::InsertElement) {
1173 FindType(I.getOperand(0)->getType());
1174 FindType(I.getOperand(1)->getType());
1175 Value *op2 = I.getOperand(2);
1176 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1177 FindType(op2->getType());
1178 }
1179 continue;
1180 }
1181
David Neto22f144c2017-06-12 14:26:21 -04001182 // Work through the operands of the instruction.
1183 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1184 Value *const Op = I.getOperand(i);
1185 // If any of the operands is a constant, find the type!
1186 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1187 FindType(Op->getType());
1188 }
1189 }
1190
1191 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001192 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001193 // Avoid to check call instruction's type.
1194 break;
1195 }
Alan Baker202c8c72018-08-13 13:47:44 -04001196 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
SJW61531372020-06-09 07:31:08 -05001197 if (Builtins::Lookup(OpCall->getCalledFunction()) ==
1198 Builtins::kClspvLocal) {
Alan Baker202c8c72018-08-13 13:47:44 -04001199 // This is a fake call representing access to a workgroup variable.
1200 // We handle that elsewhere.
1201 continue;
1202 }
1203 }
David Neto22f144c2017-06-12 14:26:21 -04001204 if (!isa<MetadataAsValue>(&Op)) {
1205 FindType(Op->getType());
1206 continue;
1207 }
1208 }
1209
David Neto22f144c2017-06-12 14:26:21 -04001210 // We don't want to track the type of this call as we are going to replace
1211 // it.
SJW61531372020-06-09 07:31:08 -05001212 if (Call && Builtins::Lookup(Call->getCalledFunction()) ==
1213 Builtins::kClspvSamplerVarLiteral) {
David Neto22f144c2017-06-12 14:26:21 -04001214 continue;
1215 }
1216
1217 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1218 // If gep's base operand has ModuleScopePrivate address space, make gep
1219 // return ModuleScopePrivate address space.
1220 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1221 // Add pointer type with private address space for global constant to
1222 // type list.
1223 Type *EleTy = I.getType()->getPointerElementType();
1224 Type *NewPTy =
1225 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1226
1227 FindType(NewPTy);
1228 continue;
1229 }
1230 }
1231
1232 FindType(I.getType());
1233 }
1234 }
1235}
1236
SJW77b87ad2020-04-21 14:37:52 -05001237void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001238 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001239 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
alan-baker7506abb2020-09-10 15:02:55 -04001240 !getSamplerMap().empty()) {
SJW77b87ad2020-04-21 14:37:52 -05001241 auto SamplerStructTy = module->getTypeByName("opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001242 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001243 SamplerStructTy =
1244 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001245 }
1246
1247 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1248
1249 FindType(SamplerTy);
1250 }
1251}
1252
SJW77b87ad2020-04-21 14:37:52 -05001253void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001254 // Record types so they are generated.
1255 TypesNeedingLayout.reset();
1256 StructTypesNeedingBlock.reset();
1257
1258 // To match older clspv codegen, generate the float type first if required
1259 // for images.
1260 for (const auto *info : ModuleOrderedResourceVars) {
alan-bakerf6bc8252020-09-23 14:58:55 -04001261 if (info->arg_kind == clspv::ArgKind::SampledImage ||
1262 info->arg_kind == clspv::ArgKind::StorageImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001263 if (IsIntImageType(info->var_fn->getReturnType())) {
1264 // Nothing for now...
1265 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001266 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001267 }
1268
1269 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001270 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001271 }
1272 }
1273
1274 for (const auto *info : ModuleOrderedResourceVars) {
1275 Type *type = info->var_fn->getReturnType();
1276
1277 switch (info->arg_kind) {
1278 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001279 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001280 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1281 StructTypesNeedingBlock.insert(sty);
1282 } else {
1283 errs() << *type << "\n";
1284 llvm_unreachable("Buffer arguments must map to structures!");
1285 }
1286 break;
1287 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001288 case clspv::ArgKind::PodUBO:
1289 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001290 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1291 StructTypesNeedingBlock.insert(sty);
1292 } else {
1293 errs() << *type << "\n";
1294 llvm_unreachable("POD arguments must map to structures!");
1295 }
1296 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04001297 case clspv::ArgKind::SampledImage:
1298 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001299 case clspv::ArgKind::Sampler:
1300 // Sampler and image types map to the pointee type but
1301 // in the uniform constant address space.
1302 type = PointerType::get(type->getPointerElementType(),
1303 clspv::AddressSpace::UniformConstant);
1304 break;
1305 default:
1306 break;
1307 }
1308
1309 // The converted type is the type of the OpVariable we will generate.
1310 // If the pointee type is an array of size zero, FindType will convert it
1311 // to a runtime array.
1312 FindType(type);
1313 }
1314
alan-bakerdcd97412019-09-16 15:32:30 -04001315 // If module constants are clustered in a storage buffer then that struct
1316 // needs layout decorations.
1317 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001318 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001319 PointerType *PTy = cast<PointerType>(GV.getType());
1320 const auto AS = PTy->getAddressSpace();
1321 const bool module_scope_constant_external_init =
1322 (AS == AddressSpace::Constant) && GV.hasInitializer();
1323 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1324 if (module_scope_constant_external_init &&
1325 spv::BuiltInMax == BuiltinType) {
1326 StructTypesNeedingBlock.insert(
1327 cast<StructType>(PTy->getPointerElementType()));
1328 }
1329 }
1330 }
1331
SJW77b87ad2020-04-21 14:37:52 -05001332 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001333 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1334 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1335 assert(Ty->isStructTy() && "Push constants have to be structures.");
1336 auto STy = cast<StructType>(Ty);
1337 StructTypesNeedingBlock.insert(STy);
1338 }
1339 }
1340
David Neto862b7d82018-06-14 18:48:37 -04001341 // Traverse the arrays and structures underneath each Block, and
1342 // mark them as needing layout.
1343 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1344 StructTypesNeedingBlock.end());
1345 while (!work_list.empty()) {
1346 Type *type = work_list.back();
1347 work_list.pop_back();
1348 TypesNeedingLayout.insert(type);
1349 switch (type->getTypeID()) {
1350 case Type::ArrayTyID:
1351 work_list.push_back(type->getArrayElementType());
1352 if (!Hack_generate_runtime_array_stride_early) {
1353 // Remember this array type for deferred decoration.
1354 TypesNeedingArrayStride.insert(type);
1355 }
1356 break;
1357 case Type::StructTyID:
1358 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1359 work_list.push_back(elem_ty);
1360 }
1361 default:
1362 // This type and its contained types don't get layout.
1363 break;
1364 }
1365 }
1366}
1367
SJWf93f5f32020-05-05 07:27:56 -05001368void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001369 // The SpecId assignment for pointer-to-local arguments is recorded in
1370 // module-level metadata. Translate that information into local argument
1371 // information.
SJWf93f5f32020-05-05 07:27:56 -05001372 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001373 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001374 if (!nmd)
1375 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001376 for (auto operand : nmd->operands()) {
1377 MDTuple *tuple = cast<MDTuple>(operand);
1378 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1379 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001380 ConstantAsMetadata *arg_index_md =
1381 cast<ConstantAsMetadata>(tuple->getOperand(1));
1382 int arg_index = static_cast<int>(
1383 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1384 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001385
1386 ConstantAsMetadata *spec_id_md =
1387 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001388 int spec_id = static_cast<int>(
1389 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001390
Alan Baker202c8c72018-08-13 13:47:44 -04001391 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001392 if (LocalSpecIdInfoMap.count(spec_id))
1393 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001394
SJWf93f5f32020-05-05 07:27:56 -05001395 // Generate the spec constant.
1396 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001397 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001398 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001399
SJWf93f5f32020-05-05 07:27:56 -05001400 // Generate the array type.
1401 Type *ElemTy = arg->getType()->getPointerElementType();
1402 Ops.clear();
1403 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001404 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001405
1406 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1407
1408 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001409 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001410 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1411
1412 // Generate OpVariable.
1413 //
1414 // Ops[0] : Result Type ID
1415 // Ops[1] : Storage Class
SJW806a5d82020-07-15 12:51:38 -05001416 SPIRVID VariableID =
1417 addSPIRVGlobalVariable(PtrArrayTypeID, spv::StorageClassWorkgroup);
SJWf93f5f32020-05-05 07:27:56 -05001418
1419 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001420 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001421 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1422
1423 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1424 ArrayTypeID, PtrArrayTypeID, spec_id};
1425 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001426 }
1427}
1428
David Neto22f144c2017-06-12 14:26:21 -04001429void SPIRVProducerPass::FindType(Type *Ty) {
1430 TypeList &TyList = getTypeList();
1431
1432 if (0 != TyList.idFor(Ty)) {
1433 return;
1434 }
1435
1436 if (Ty->isPointerTy()) {
1437 auto AddrSpace = Ty->getPointerAddressSpace();
1438 if ((AddressSpace::Constant == AddrSpace) ||
1439 (AddressSpace::Global == AddrSpace)) {
1440 auto PointeeTy = Ty->getPointerElementType();
1441
1442 if (PointeeTy->isStructTy() &&
1443 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1444 FindType(PointeeTy);
1445 auto ActualPointerTy =
1446 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1447 FindType(ActualPointerTy);
1448 return;
1449 }
1450 }
1451 }
1452
David Neto862b7d82018-06-14 18:48:37 -04001453 // By convention, LLVM array type with 0 elements will map to
1454 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1455 // has a constant number of elements. We need to support type of the
1456 // constant.
1457 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1458 if (arrayTy->getNumElements() > 0) {
1459 LLVMContext &Context = Ty->getContext();
1460 FindType(Type::getInt32Ty(Context));
1461 }
David Neto22f144c2017-06-12 14:26:21 -04001462 }
1463
1464 for (Type *SubTy : Ty->subtypes()) {
1465 FindType(SubTy);
1466 }
1467
1468 TyList.insert(Ty);
1469}
1470
David Neto22f144c2017-06-12 14:26:21 -04001471spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1472 switch (AddrSpace) {
1473 default:
1474 llvm_unreachable("Unsupported OpenCL address space");
1475 case AddressSpace::Private:
1476 return spv::StorageClassFunction;
1477 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001478 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001479 case AddressSpace::Constant:
1480 return clspv::Option::ConstantArgsInUniformBuffer()
1481 ? spv::StorageClassUniform
1482 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001483 case AddressSpace::Input:
1484 return spv::StorageClassInput;
1485 case AddressSpace::Local:
1486 return spv::StorageClassWorkgroup;
1487 case AddressSpace::UniformConstant:
1488 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001489 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001490 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001491 case AddressSpace::ModuleScopePrivate:
1492 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001493 case AddressSpace::PushConstant:
1494 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001495 }
1496}
1497
David Neto862b7d82018-06-14 18:48:37 -04001498spv::StorageClass
1499SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1500 switch (arg_kind) {
1501 case clspv::ArgKind::Buffer:
1502 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001503 case clspv::ArgKind::BufferUBO:
1504 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001505 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001506 return spv::StorageClassStorageBuffer;
1507 case clspv::ArgKind::PodUBO:
1508 return spv::StorageClassUniform;
1509 case clspv::ArgKind::PodPushConstant:
1510 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001511 case clspv::ArgKind::Local:
1512 return spv::StorageClassWorkgroup;
alan-bakerf6bc8252020-09-23 14:58:55 -04001513 case clspv::ArgKind::SampledImage:
1514 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001515 case clspv::ArgKind::Sampler:
1516 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001517 default:
1518 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001519 }
1520}
1521
David Neto22f144c2017-06-12 14:26:21 -04001522spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1523 return StringSwitch<spv::BuiltIn>(Name)
1524 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1525 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1526 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1527 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1528 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001529 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001530 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001531 .Default(spv::BuiltInMax);
1532}
1533
SJW01901d92020-05-21 08:58:31 -05001534SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1535 if (OpExtInstImportID == 0) {
1536 //
1537 // Generate OpExtInstImport.
1538 //
1539 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001540
SJW01901d92020-05-21 08:58:31 -05001541 OpExtInstImportID =
1542 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1543 }
1544 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001545}
1546
SJW806a5d82020-07-15 12:51:38 -05001547SPIRVID SPIRVProducerPass::addSPIRVGlobalVariable(const SPIRVID &TypeID,
1548 spv::StorageClass SC,
1549 const SPIRVID &InitID) {
1550 // Generate OpVariable.
1551 //
1552 // Ops[0] : Result Type ID
1553 // Ops[1] : Storage Class
1554 // Ops[2] : Initialization Value ID (optional)
1555
1556 SPIRVOperandVec Ops;
1557 Ops << TypeID << SC;
1558 if (InitID.isValid()) {
1559 Ops << InitID;
1560 }
1561
1562 SPIRVID VID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1563
1564 if (SC == spv::StorageClassInput) {
1565 getEntryPointInterfacesList().push_back(VID);
1566 }
1567
1568 return VID;
1569}
1570
SJW01901d92020-05-21 08:58:31 -05001571SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
SJWf93f5f32020-05-05 07:27:56 -05001572 auto TI = TypeMap.find(Ty);
1573 if (TI != TypeMap.end()) {
SJW01901d92020-05-21 08:58:31 -05001574 assert(TI->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05001575 return TI->second;
1576 }
1577
1578 const auto &DL = module->getDataLayout();
1579
SJW01901d92020-05-21 08:58:31 -05001580 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001581
1582 switch (Ty->getTypeID()) {
1583 default: {
1584 Ty->print(errs());
1585 llvm_unreachable("Unsupported type???");
1586 break;
1587 }
1588 case Type::MetadataTyID:
1589 case Type::LabelTyID: {
1590 // Ignore these types.
1591 break;
1592 }
1593 case Type::PointerTyID: {
1594 PointerType *PTy = cast<PointerType>(Ty);
1595 unsigned AddrSpace = PTy->getAddressSpace();
1596
1597 if (AddrSpace != AddressSpace::UniformConstant) {
1598 auto PointeeTy = PTy->getElementType();
1599 if (PointeeTy->isStructTy() &&
1600 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1601 // TODO(sjw): assert always an image?
1602 RID = getSPIRVType(PointeeTy);
1603 break;
1604 }
1605 }
1606
1607 // For the purposes of our Vulkan SPIR-V type system, constant and global
1608 // are conflated.
1609 if (AddressSpace::Constant == AddrSpace) {
1610 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1611 AddrSpace = AddressSpace::Global;
1612 // Check to see if we already created this type (for instance, if we
1613 // had a constant <type>* and a global <type>*, the type would be
1614 // created by one of these types, and shared by both).
1615 auto GlobalTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1616 if (0 < TypeMap.count(GlobalTy)) {
1617 RID = TypeMap[GlobalTy];
1618 break;
1619 }
1620 }
1621 } else if (AddressSpace::Global == AddrSpace) {
1622 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1623 AddrSpace = AddressSpace::Constant;
1624
1625 // Check to see if we already created this type (for instance, if we
1626 // had a constant <type>* and a global <type>*, the type would be
1627 // created by one of these types, and shared by both).
1628 auto ConstantTy = PTy->getPointerElementType()->getPointerTo(AddrSpace);
1629 if (0 < TypeMap.count(ConstantTy)) {
1630 RID = TypeMap[ConstantTy];
1631 break;
1632 }
1633 }
1634 }
1635
1636 //
1637 // Generate OpTypePointer.
1638 //
1639
1640 // OpTypePointer
1641 // Ops[0] = Storage Class
1642 // Ops[1] = Element Type ID
1643 SPIRVOperandVec Ops;
1644
SJW01901d92020-05-21 08:58:31 -05001645 Ops << GetStorageClass(AddrSpace) << PTy->getElementType();
SJWf93f5f32020-05-05 07:27:56 -05001646
1647 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1648 break;
1649 }
1650 case Type::StructTyID: {
1651 StructType *STy = cast<StructType>(Ty);
1652
1653 // Handle sampler type.
1654 if (STy->isOpaque()) {
1655 if (STy->getName().equals("opencl.sampler_t")) {
1656 //
1657 // Generate OpTypeSampler
1658 //
1659 // Empty Ops.
1660
1661 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1662 break;
1663 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001664 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001665 STy->getName().startswith("opencl.image1d_wo_t") ||
1666 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001667 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001668 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1669 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001670 STy->getName().startswith("opencl.image2d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001671 STy->getName().startswith("opencl.image2d_wo_t") ||
1672 STy->getName().startswith("opencl.image2d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001673 STy->getName().startswith("opencl.image2d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001674 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1675 STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001676 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001677 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001678 if (STy->getName().startswith("opencl.image1d_")) {
1679 if (STy->getName().contains(".sampled"))
1680 addCapability(spv::CapabilitySampled1D);
1681 else
1682 addCapability(spv::CapabilityImage1D);
1683 }
1684
SJWf93f5f32020-05-05 07:27:56 -05001685 //
1686 // Generate OpTypeImage
1687 //
1688 // Ops[0] = Sampled Type ID
1689 // Ops[1] = Dim ID
1690 // Ops[2] = Depth (Literal Number)
1691 // Ops[3] = Arrayed (Literal Number)
1692 // Ops[4] = MS (Literal Number)
1693 // Ops[5] = Sampled (Literal Number)
1694 // Ops[6] = Image Format ID
1695 //
1696 SPIRVOperandVec Ops;
1697
SJW01901d92020-05-21 08:58:31 -05001698 SPIRVID SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001699 if (STy->getName().contains(".float")) {
1700 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
1701 } else if (STy->getName().contains(".uint")) {
1702 SampledTyID = getSPIRVType(Type::getInt32Ty(Ty->getContext()));
1703 } else if (STy->getName().contains(".int")) {
1704 // Generate a signed 32-bit integer if necessary.
1705 if (int32ID == 0) {
1706 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001707 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001708 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1709 }
1710 SampledTyID = int32ID;
1711
1712 // Generate a vec4 of the signed int if necessary.
1713 if (v4int32ID == 0) {
1714 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001715 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001716 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1717 }
1718 } else {
1719 // This was likely an UndefValue.
1720 SampledTyID = getSPIRVType(Type::getFloatTy(Ty->getContext()));
1721 }
SJW01901d92020-05-21 08:58:31 -05001722 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001723
1724 spv::Dim DimID = spv::Dim2D;
1725 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001726 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001727 STy->getName().startswith("opencl.image1d_wo_t") ||
1728 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001729 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001730 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1731 DimID = spv::Dim1D;
1732 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001733 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001734 STy->getName().startswith("opencl.image3d_wo_t")) {
1735 DimID = spv::Dim3D;
1736 }
SJW01901d92020-05-21 08:58:31 -05001737 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001738
1739 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001740 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001741
1742 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001743 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001744
1745 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001746 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001747
1748 // Set up Sampled.
1749 //
1750 // From Spec
1751 //
1752 // 0 indicates this is only known at run time, not at compile time
1753 // 1 indicates will be used with sampler
1754 // 2 indicates will be used without a sampler (a storage image)
1755 uint32_t Sampled = 1;
1756 if (!STy->getName().contains(".sampled")) {
1757 Sampled = 2;
1758 }
SJW01901d92020-05-21 08:58:31 -05001759 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001760
1761 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001762 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001763 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1764
alan-bakerf6bc8252020-09-23 14:58:55 -04001765 // Only need a sampled version of the type if it is used with a sampler.
1766 if (Sampled == 1) {
1767 Ops.clear();
1768 Ops << RID;
1769 getImageTypeMap()[Ty] =
1770 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1771 }
SJWf93f5f32020-05-05 07:27:56 -05001772 break;
1773 }
1774 }
1775
1776 //
1777 // Generate OpTypeStruct
1778 //
1779 // Ops[0] ... Ops[n] = Member IDs
1780 SPIRVOperandVec Ops;
1781
1782 for (auto *EleTy : STy->elements()) {
SJW01901d92020-05-21 08:58:31 -05001783 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001784 }
1785
1786 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1787
1788 // Generate OpMemberDecorate.
1789 if (TypesNeedingLayout.idFor(STy)) {
1790 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1791 MemberIdx++) {
1792 // Ops[0] = Structure Type ID
1793 // Ops[1] = Member Index(Literal Number)
1794 // Ops[2] = Decoration (Offset)
1795 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001796 const auto ByteOffset =
1797 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1798
SJW01901d92020-05-21 08:58:31 -05001799 Ops.clear();
1800 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001801
1802 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1803 }
1804 }
1805
1806 // Generate OpDecorate.
1807 if (StructTypesNeedingBlock.idFor(STy)) {
1808 Ops.clear();
1809 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001810 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001811
1812 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1813 }
1814 break;
1815 }
1816 case Type::IntegerTyID: {
alan-bakere2a62752020-07-09 22:53:23 -04001817 uint32_t bit_width = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001818
alan-bakere2a62752020-07-09 22:53:23 -04001819 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001820 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001821 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001822 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001823 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001824 addCapability(spv::CapabilityInt64);
1825 }
1826
alan-bakere2a62752020-07-09 22:53:23 -04001827 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001828 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1829 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001830 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001831 // i8 is added to TypeMap as i32.
1832 RID = getSPIRVType(Type::getIntNTy(Ty->getContext(), 32));
1833 } else {
1834 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001835 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001836 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1837 }
1838 }
1839 break;
1840 }
1841 case Type::HalfTyID:
1842 case Type::FloatTyID:
1843 case Type::DoubleTyID: {
alan-bakere2a62752020-07-09 22:53:23 -04001844 uint32_t bit_width = static_cast<uint32_t>(Ty->getPrimitiveSizeInBits());
1845 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001846 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001847 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001848 addCapability(spv::CapabilityFloat64);
1849 }
1850
SJWf93f5f32020-05-05 07:27:56 -05001851 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001852 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001853
1854 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1855 break;
1856 }
1857 case Type::ArrayTyID: {
1858 ArrayType *ArrTy = cast<ArrayType>(Ty);
1859 const uint64_t Length = ArrTy->getArrayNumElements();
1860 if (Length == 0) {
1861 // By convention, map it to a RuntimeArray.
1862
1863 Type *EleTy = ArrTy->getArrayElementType();
1864
1865 //
1866 // Generate OpTypeRuntimeArray.
1867 //
1868 // OpTypeRuntimeArray
1869 // Ops[0] = Element Type ID
1870 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001871 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001872
1873 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1874
1875 if (Hack_generate_runtime_array_stride_early) {
1876 // Generate OpDecorate.
1877
1878 // Ops[0] = Target ID
1879 // Ops[1] = Decoration (ArrayStride)
1880 // Ops[2] = Stride Number(Literal Number)
1881 Ops.clear();
1882
SJW01901d92020-05-21 08:58:31 -05001883 Ops << RID << spv::DecorationArrayStride
1884 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001885
1886 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1887 }
1888
1889 } else {
1890
1891 //
1892 // Generate OpConstant and OpTypeArray.
1893 //
1894
1895 //
1896 // Generate OpConstant for array length.
1897 //
1898 // Add constant for length to constant list.
1899 Constant *CstLength =
1900 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001901
1902 // Remember to generate ArrayStride later
1903 getTypesNeedingArrayStride().insert(Ty);
1904
1905 //
1906 // Generate OpTypeArray.
1907 //
1908 // Ops[0] = Element Type ID
1909 // Ops[1] = Array Length Constant ID
1910 SPIRVOperandVec Ops;
1911
SJW01901d92020-05-21 08:58:31 -05001912 Ops << ArrTy->getElementType() << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001913
1914 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1915 }
1916 break;
1917 }
1918 case Type::FixedVectorTyID: {
1919 auto VecTy = cast<VectorType>(Ty);
1920 // <4 x i8> is changed to i32 if i8 is not generally supported.
1921 if (!clspv::Option::Int8Support() &&
1922 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
alan-baker5a8c3be2020-09-09 13:44:26 -04001923 if (VecTy->getElementCount().getKnownMinValue() == 4) {
SJWf93f5f32020-05-05 07:27:56 -05001924 RID = getSPIRVType(VecTy->getElementType());
1925 break;
1926 } else {
1927 Ty->print(errs());
1928 llvm_unreachable("Support above i8 vector type");
1929 }
1930 }
1931
1932 // Ops[0] = Component Type ID
1933 // Ops[1] = Component Count (Literal Number)
1934 SPIRVOperandVec Ops;
alan-baker5a8c3be2020-09-09 13:44:26 -04001935 Ops << VecTy->getElementType()
1936 << VecTy->getElementCount().getKnownMinValue();
SJWf93f5f32020-05-05 07:27:56 -05001937
1938 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1939 break;
1940 }
1941 case Type::VoidTyID: {
1942 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
1943 break;
1944 }
1945 case Type::FunctionTyID: {
1946 // Generate SPIRV instruction for function type.
1947 FunctionType *FTy = cast<FunctionType>(Ty);
1948
1949 // Ops[0] = Return Type ID
1950 // Ops[1] ... Ops[n] = Parameter Type IDs
1951 SPIRVOperandVec Ops;
1952
1953 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05001954 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05001955
1956 // Find SPIRV instructions for parameter types
1957 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
1958 // Find SPIRV instruction for parameter type.
1959 auto ParamTy = FTy->getParamType(k);
1960 if (ParamTy->isPointerTy()) {
1961 auto PointeeTy = ParamTy->getPointerElementType();
1962 if (PointeeTy->isStructTy() &&
1963 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1964 ParamTy = PointeeTy;
1965 }
1966 }
1967
SJW01901d92020-05-21 08:58:31 -05001968 Ops << ParamTy;
SJWf93f5f32020-05-05 07:27:56 -05001969 }
1970
1971 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
1972 break;
1973 }
1974 }
1975
SJW01901d92020-05-21 08:58:31 -05001976 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05001977 TypeMap[Ty] = RID;
1978 }
1979 return RID;
David Neto22f144c2017-06-12 14:26:21 -04001980}
1981
SJW77b87ad2020-04-21 14:37:52 -05001982void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04001983 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05001984 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04001985 }
David Neto22f144c2017-06-12 14:26:21 -04001986}
1987
SJW806a5d82020-07-15 12:51:38 -05001988SPIRVID SPIRVProducerPass::getSPIRVInt32Constant(uint32_t CstVal) {
1989 Type *i32 = Type::getInt32Ty(module->getContext());
1990 Constant *Cst = ConstantInt::get(i32, CstVal);
1991 return getSPIRVValue(Cst);
1992}
1993
SJWf93f5f32020-05-05 07:27:56 -05001994SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *Cst) {
David Neto22f144c2017-06-12 14:26:21 -04001995 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07001996 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04001997
SJW01901d92020-05-21 08:58:31 -05001998 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04001999
SJWf93f5f32020-05-05 07:27:56 -05002000 //
2001 // Generate OpConstant.
2002 //
2003 // Ops[0] = Result Type ID
2004 // Ops[1] .. Ops[n] = Values LiteralNumber
2005 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002006
SJW01901d92020-05-21 08:58:31 -05002007 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04002008
SJWf93f5f32020-05-05 07:27:56 -05002009 std::vector<uint32_t> LiteralNum;
2010 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04002011
SJWf93f5f32020-05-05 07:27:56 -05002012 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04002013 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05002014 Opcode = spv::OpUndef;
2015 if (hack_undef && IsTypeNullable(Cst->getType())) {
2016 Opcode = spv::OpConstantNull;
2017 }
2018 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04002019 unsigned bit_width = CI->getBitWidth();
2020 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05002021 // If the bitwidth of constant is 1, generate OpConstantTrue or
2022 // OpConstantFalse.
2023 if (CI->getZExtValue()) {
2024 // Ops[0] = Result Type ID
2025 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04002026 } else {
SJWf93f5f32020-05-05 07:27:56 -05002027 // Ops[0] = Result Type ID
2028 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04002029 }
SJWf93f5f32020-05-05 07:27:56 -05002030 } else {
2031 auto V = CI->getZExtValue();
2032 LiteralNum.push_back(V & 0xFFFFFFFF);
2033
alan-bakere2a62752020-07-09 22:53:23 -04002034 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05002035 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002036 }
2037
2038 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002039
SJW01901d92020-05-21 08:58:31 -05002040 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002041 }
2042 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2043 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2044 Type *CFPTy = CFP->getType();
2045 if (CFPTy->isFloatTy()) {
2046 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2047 } else if (CFPTy->isDoubleTy()) {
2048 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2049 LiteralNum.push_back(FPVal >> 32);
2050 } else if (CFPTy->isHalfTy()) {
2051 LiteralNum.push_back(FPVal & 0xFFFF);
2052 } else {
2053 CFPTy->print(errs());
2054 llvm_unreachable("Implement this ConstantFP Type");
2055 }
David Neto22f144c2017-06-12 14:26:21 -04002056
SJWf93f5f32020-05-05 07:27:56 -05002057 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04002058
SJW01901d92020-05-21 08:58:31 -05002059 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002060 } else if (isa<ConstantDataSequential>(Cst) &&
2061 cast<ConstantDataSequential>(Cst)->isString()) {
2062 Cst->print(errs());
2063 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002064
SJWf93f5f32020-05-05 07:27:56 -05002065 } else if (const ConstantDataSequential *CDS =
2066 dyn_cast<ConstantDataSequential>(Cst)) {
2067 // Let's convert <4 x i8> constant to int constant specially.
2068 // This case occurs when all the values are specified as constant
2069 // ints.
2070 Type *CstTy = Cst->getType();
2071 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002072 //
2073 // Generate OpConstant with OpTypeInt 32 0.
2074 //
2075 uint32_t IntValue = 0;
2076 for (unsigned k = 0; k < 4; k++) {
2077 const uint64_t Val = CDS->getElementAsInteger(k);
2078 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002079 }
2080
SJW806a5d82020-07-15 12:51:38 -05002081 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002082 } else {
2083
David Neto49351ac2017-08-26 17:32:20 -04002084 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002085 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002086 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002087 }
2088
2089 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002090 }
2091 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2092 // Let's convert <4 x i8> constant to int constant specially.
2093 // This case occurs when at least one of the values is an undef.
2094 Type *CstTy = Cst->getType();
2095 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002096 //
2097 // Generate OpConstant with OpTypeInt 32 0.
2098 //
2099 uint32_t IntValue = 0;
2100 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2101 I != E; ++I) {
2102 uint64_t Val = 0;
2103 const Value *CV = *I;
2104 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2105 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002106 }
SJWf93f5f32020-05-05 07:27:56 -05002107 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002108 }
2109
SJW806a5d82020-07-15 12:51:38 -05002110 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002111 } else {
2112
David Neto22f144c2017-06-12 14:26:21 -04002113 // We use a constant composite in SPIR-V for our constant aggregate in
2114 // LLVM.
2115 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002116
2117 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002118 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002119 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002120 }
David Neto22f144c2017-06-12 14:26:21 -04002121 }
SJWf93f5f32020-05-05 07:27:56 -05002122 } else if (Cst->isNullValue()) {
2123 Opcode = spv::OpConstantNull;
2124 } else {
2125 Cst->print(errs());
2126 llvm_unreachable("Unsupported Constant???");
2127 }
David Neto22f144c2017-06-12 14:26:21 -04002128
SJWf93f5f32020-05-05 07:27:56 -05002129 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2130 // Null pointer requires variable pointers.
2131 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2132 }
alan-baker5b86ed72019-02-15 08:26:50 -05002133
SJWf93f5f32020-05-05 07:27:56 -05002134 if (RID == 0) {
2135 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2136 }
2137
2138 VMap[Cst] = RID;
2139
2140 return RID;
2141}
2142
2143SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2144 auto II = ValueMap.find(V);
2145 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002146 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002147 return II->second;
2148 }
2149 if (Constant *Cst = dyn_cast<Constant>(V)) {
2150 return getSPIRVConstant(Cst);
2151 } else {
2152 llvm_unreachable("Variable not found");
2153 }
2154}
2155
SJW77b87ad2020-04-21 14:37:52 -05002156void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002157 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002158 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002159 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2160 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002161
David Neto862b7d82018-06-14 18:48:37 -04002162 // We might have samplers in the sampler map that are not used
2163 // in the translation unit. We need to allocate variables
2164 // for them and bindings too.
2165 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002166
SJW77b87ad2020-04-21 14:37:52 -05002167 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002168 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002169 if (!var_fn)
2170 return;
alan-baker09cb9802019-12-10 13:16:27 -05002171
David Neto862b7d82018-06-14 18:48:37 -04002172 for (auto user : var_fn->users()) {
2173 // Populate SamplerLiteralToDescriptorSetMap and
2174 // SamplerLiteralToBindingMap.
2175 //
2176 // Look for calls like
2177 // call %opencl.sampler_t addrspace(2)*
2178 // @clspv.sampler.var.literal(
2179 // i32 descriptor,
2180 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002181 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002182 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002183 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002184 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002185 auto sampler_value = third_param;
2186 if (clspv::Option::UseSamplerMap()) {
2187 if (third_param >= sampler_map.size()) {
2188 errs() << "Out of bounds index to sampler map: " << third_param;
2189 llvm_unreachable("bad sampler init: out of bounds");
2190 }
2191 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002192 }
2193
David Neto862b7d82018-06-14 18:48:37 -04002194 const auto descriptor_set = static_cast<unsigned>(
2195 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2196 const auto binding = static_cast<unsigned>(
2197 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2198
2199 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2200 SamplerLiteralToBindingMap[sampler_value] = binding;
2201 used_bindings.insert(binding);
2202 }
2203 }
2204
alan-baker09cb9802019-12-10 13:16:27 -05002205 DenseSet<size_t> seen;
2206 for (auto user : var_fn->users()) {
2207 if (!isa<CallInst>(user))
2208 continue;
2209
2210 auto call = cast<CallInst>(user);
2211 const unsigned third_param = static_cast<unsigned>(
2212 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2213
2214 // Already allocated a variable for this value.
2215 if (!seen.insert(third_param).second)
2216 continue;
2217
2218 auto sampler_value = third_param;
2219 if (clspv::Option::UseSamplerMap()) {
2220 sampler_value = sampler_map[third_param].first;
2221 }
2222
SJW806a5d82020-07-15 12:51:38 -05002223 auto sampler_var_id = addSPIRVGlobalVariable(
2224 getSPIRVType(SamplerTy), spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002225
alan-baker09cb9802019-12-10 13:16:27 -05002226 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002227
David Neto862b7d82018-06-14 18:48:37 -04002228 unsigned descriptor_set;
2229 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002230 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002231 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002232 // This sampler is not actually used. Find the next one.
alan-baker7506abb2020-09-10 15:02:55 -04002233 for (binding = 0; used_bindings.count(binding); binding++) {
2234 }
David Neto862b7d82018-06-14 18:48:37 -04002235 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2236 used_bindings.insert(binding);
2237 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002238 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2239 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002240
alan-baker86ce19c2020-08-05 13:09:19 -04002241 auto import_id = getReflectionImport();
2242 SPIRVOperandVec Ops;
2243 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
2244 << reflection::ExtInstLiteralSampler
2245 << getSPIRVInt32Constant(descriptor_set)
2246 << getSPIRVInt32Constant(binding)
2247 << getSPIRVInt32Constant(sampler_value);
2248 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002249 }
2250
SJW69939d52020-04-16 07:29:07 -05002251 // Ops[0] = Target ID
2252 // Ops[1] = Decoration (DescriptorSet)
2253 // Ops[2] = LiteralNumber according to Decoration
SJW806a5d82020-07-15 12:51:38 -05002254 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002255 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002256
SJWf93f5f32020-05-05 07:27:56 -05002257 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002258
2259 // Ops[0] = Target ID
2260 // Ops[1] = Decoration (Binding)
2261 // Ops[2] = LiteralNumber according to Decoration
2262 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002263 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002264
SJWf93f5f32020-05-05 07:27:56 -05002265 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002266 }
David Neto862b7d82018-06-14 18:48:37 -04002267}
David Neto22f144c2017-06-12 14:26:21 -04002268
SJW77b87ad2020-04-21 14:37:52 -05002269void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002270 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002271
David Neto862b7d82018-06-14 18:48:37 -04002272 // Generate variables. Make one for each of resource var info object.
2273 for (auto *info : ModuleOrderedResourceVars) {
2274 Type *type = info->var_fn->getReturnType();
2275 // Remap the address space for opaque types.
2276 switch (info->arg_kind) {
2277 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002278 case clspv::ArgKind::SampledImage:
2279 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002280 type = PointerType::get(type->getPointerElementType(),
2281 clspv::AddressSpace::UniformConstant);
2282 break;
2283 default:
2284 break;
2285 }
David Neto22f144c2017-06-12 14:26:21 -04002286
David Neto862b7d82018-06-14 18:48:37 -04002287 const auto sc = GetStorageClassForArgKind(info->arg_kind);
David Neto22f144c2017-06-12 14:26:21 -04002288
SJW806a5d82020-07-15 12:51:38 -05002289 info->var_id = addSPIRVGlobalVariable(getSPIRVType(type), sc);
David Neto862b7d82018-06-14 18:48:37 -04002290
2291 // Map calls to the variable-builtin-function.
2292 for (auto &U : info->var_fn->uses()) {
2293 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2294 const auto set = unsigned(
2295 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2296 const auto binding = unsigned(
2297 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2298 if (set == info->descriptor_set && binding == info->binding) {
2299 switch (info->arg_kind) {
2300 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002301 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002302 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002303 case clspv::ArgKind::PodUBO:
2304 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002305 // The call maps to the variable directly.
2306 VMap[call] = info->var_id;
2307 break;
2308 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002309 case clspv::ArgKind::SampledImage:
2310 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002311 // The call maps to a load we generate later.
2312 ResourceVarDeferredLoadCalls[call] = info->var_id;
2313 break;
2314 default:
2315 llvm_unreachable("Unhandled arg kind");
2316 }
2317 }
David Neto22f144c2017-06-12 14:26:21 -04002318 }
David Neto862b7d82018-06-14 18:48:37 -04002319 }
2320 }
David Neto22f144c2017-06-12 14:26:21 -04002321
David Neto862b7d82018-06-14 18:48:37 -04002322 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002323 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002324 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002325 // Push constants don't need descriptor set or binding decorations.
2326 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2327 continue;
2328
David Neto862b7d82018-06-14 18:48:37 -04002329 // Decorate with DescriptorSet and Binding.
2330 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002331 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002332 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002333
2334 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002335 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002336 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002337
alan-bakere9308012019-03-15 10:25:13 -04002338 if (info->coherent) {
2339 // Decorate with Coherent if required for the variable.
2340 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002341 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002342 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002343 }
2344
David Neto862b7d82018-06-14 18:48:37 -04002345 // Generate NonWritable and NonReadable
2346 switch (info->arg_kind) {
2347 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002348 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002349 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2350 clspv::AddressSpace::Constant) {
2351 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002352 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002353 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002354 }
David Neto862b7d82018-06-14 18:48:37 -04002355 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002356 case clspv::ArgKind::StorageImage: {
2357 auto *type = info->var_fn->getReturnType();
2358 auto *struct_ty = cast<StructType>(type->getPointerElementType());
2359 // TODO(alan-baker): This is conservative. If compiling for OpenCL 2.0 or
2360 // above, the compiler treats all write_only images as read_write images.
2361 if (struct_ty->getName().contains("_wo_t")) {
2362 Ops.clear();
2363 Ops << info->var_id << spv::DecorationNonReadable;
2364 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2365 }
David Neto862b7d82018-06-14 18:48:37 -04002366 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002367 }
David Neto862b7d82018-06-14 18:48:37 -04002368 default:
2369 break;
David Neto22f144c2017-06-12 14:26:21 -04002370 }
2371 }
2372}
2373
2374void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002375 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002376 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002377 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002378
2379 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2380 Type *Ty = GV.getType();
2381 PointerType *PTy = cast<PointerType>(Ty);
2382
SJW01901d92020-05-21 08:58:31 -05002383 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002384
2385 // Workgroup size is handled differently (it goes into a constant)
2386 if (spv::BuiltInWorkgroupSize == BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04002387 uint32_t PrevXDimCst = 0xFFFFFFFF;
2388 uint32_t PrevYDimCst = 0xFFFFFFFF;
2389 uint32_t PrevZDimCst = 0xFFFFFFFF;
alan-baker3b609772020-09-03 19:10:17 -04002390 bool HasMD = true;
David Neto22f144c2017-06-12 14:26:21 -04002391 for (Function &Func : *GV.getParent()) {
2392 if (Func.isDeclaration()) {
2393 continue;
2394 }
2395
2396 // We only need to check kernels.
2397 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2398 continue;
2399 }
2400
2401 if (const MDNode *MD =
2402 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2403 uint32_t CurXDimCst = static_cast<uint32_t>(
2404 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2405 uint32_t CurYDimCst = static_cast<uint32_t>(
2406 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2407 uint32_t CurZDimCst = static_cast<uint32_t>(
2408 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2409
2410 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2411 PrevZDimCst == 0xFFFFFFFF) {
2412 PrevXDimCst = CurXDimCst;
2413 PrevYDimCst = CurYDimCst;
2414 PrevZDimCst = CurZDimCst;
2415 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2416 CurZDimCst != PrevZDimCst) {
alan-baker3b609772020-09-03 19:10:17 -04002417 HasMD = false;
2418 continue;
David Neto22f144c2017-06-12 14:26:21 -04002419 } else {
2420 continue;
2421 }
2422
2423 //
2424 // Generate OpConstantComposite.
2425 //
2426 // Ops[0] : Result Type ID
2427 // Ops[1] : Constant size for x dimension.
2428 // Ops[2] : Constant size for y dimension.
2429 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002430 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002431
SJW01901d92020-05-21 08:58:31 -05002432 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002433 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002434 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002435 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002436 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002437 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002438
SJW01901d92020-05-21 08:58:31 -05002439 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2440 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002441
SJWf93f5f32020-05-05 07:27:56 -05002442 InitializerID =
2443 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002444 } else {
alan-baker3b609772020-09-03 19:10:17 -04002445 HasMD = false;
David Neto22f144c2017-06-12 14:26:21 -04002446 }
2447 }
2448
2449 // If all kernels do not have metadata for reqd_work_group_size, generate
2450 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002451 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002452 //
2453 // Generate OpSpecConstants for x/y/z dimension.
2454 //
2455 // Ops[0] : Result Type ID
2456 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002457
alan-bakera1be3322020-04-20 12:48:18 -04002458 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002459 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002460
SJWf93f5f32020-05-05 07:27:56 -05002461 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002462 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002463 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002464
David Neto257c3892018-04-11 13:19:45 -04002465 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002466 Ops << result_type_id << 1;
2467 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002468
2469 // Y Dimension
2470 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002471 Ops << result_type_id << 1;
2472 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002473
2474 // Z Dimension
2475 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002476 Ops << result_type_id << 1;
2477 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002478
David Neto257c3892018-04-11 13:19:45 -04002479 BuiltinDimVec.push_back(XDimCstID);
2480 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002481 BuiltinDimVec.push_back(ZDimCstID);
2482
David Neto22f144c2017-06-12 14:26:21 -04002483 //
2484 // Generate OpSpecConstantComposite.
2485 //
2486 // Ops[0] : Result Type ID
2487 // Ops[1] : Constant size for x dimension.
2488 // Ops[2] : Constant size for y dimension.
2489 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002490 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002491 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002492
SJWf93f5f32020-05-05 07:27:56 -05002493 InitializerID =
2494 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002495 }
alan-bakerbed3a882020-04-21 14:42:41 -04002496 } else if (BuiltinType == spv::BuiltInWorkDim) {
2497 // 1. Generate a specialization constant with a default of 3.
2498 // 2. Allocate and annotate a SpecId for the constant.
2499 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002500 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002501
2502 //
2503 // Generate OpSpecConstant.
2504 //
2505 // Ops[0] : Result Type ID
2506 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002507
SJW01901d92020-05-21 08:58:31 -05002508 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002509
SJWf93f5f32020-05-05 07:27:56 -05002510 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002511
2512 //
2513 // Generate SpecId decoration.
2514 //
2515 // Ops[0] : target
2516 // Ops[1] : decoration
2517 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002518 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002519 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002520 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002521
SJWf93f5f32020-05-05 07:27:56 -05002522 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002523 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2524 // 1. Generate a spec constant with a default of {0, 0, 0}.
2525 // 2. Allocate and annotate SpecIds for the constants.
2526 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002527 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002528
2529 //
2530 // Generate OpSpecConstant for each dimension.
2531 //
2532 // Ops[0] : Result Type ID
2533 // Ops[1] : Default literal value
2534 //
SJW01901d92020-05-21 08:58:31 -05002535 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2536 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002537
alan-bakere1996972020-05-04 08:38:12 -04002538 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002539 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2540 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002541
alan-bakere1996972020-05-04 08:38:12 -04002542 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002543 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2544 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002545
2546 //
2547 // Generate SpecId decoration for each dimension.
2548 //
2549 // Ops[0] : target
2550 // Ops[1] : decoration
2551 // Ops[2] : SpecId
2552 //
2553 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2554 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002555 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002556 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002557
2558 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2559 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002560 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002561 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002562
2563 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2564 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002565 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002566 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002567
2568 //
2569 // Generate OpSpecConstantComposite.
2570 //
2571 // Ops[0] : type id
2572 // Ops[1..n-1] : elements
2573 //
alan-bakere1996972020-05-04 08:38:12 -04002574 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002575 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002576 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002577 }
2578
David Neto85082642018-03-24 06:55:20 -07002579 const auto AS = PTy->getAddressSpace();
SJW806a5d82020-07-15 12:51:38 -05002580 const auto spvSC = GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002581
David Neto85082642018-03-24 06:55:20 -07002582 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002583 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002584 clspv::Option::ModuleConstantsInStorageBuffer();
2585
Kévin Petit23d5f182019-08-13 16:21:29 +01002586 if (GV.hasInitializer()) {
2587 auto GVInit = GV.getInitializer();
2588 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002589 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002590 }
2591 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002592
SJW806a5d82020-07-15 12:51:38 -05002593 SPIRVID var_id =
2594 addSPIRVGlobalVariable(getSPIRVType(Ty), spvSC, InitializerID);
David Neto85082642018-03-24 06:55:20 -07002595
SJWf93f5f32020-05-05 07:27:56 -05002596 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002597
alan-bakere1996972020-05-04 08:38:12 -04002598 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2599 return builtin == spv::BuiltInWorkDim ||
2600 builtin == spv::BuiltInGlobalOffset;
2601 };
2602
alan-bakere1996972020-05-04 08:38:12 -04002603 // If we have a builtin (not an OpenCL builtin).
2604 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002605 //
2606 // Generate OpDecorate.
2607 //
2608 // DOps[0] = Target ID
2609 // DOps[1] = Decoration (Builtin)
2610 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002611 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002612
2613 // WorkgroupSize is different, we decorate the constant composite that has
2614 // its value, rather than the variable that we use to access the value.
2615 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2616 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002617 // Save both the value and variable IDs for later.
2618 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002619 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002620 } else {
SJWf93f5f32020-05-05 07:27:56 -05002621 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002622 }
2623
SJW806a5d82020-07-15 12:51:38 -05002624 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002625 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002626
SJW01901d92020-05-21 08:58:31 -05002627 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002628 } else if (module_scope_constant_external_init) {
2629 // This module scope constant is initialized from a storage buffer with data
2630 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002631 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002632
alan-baker86ce19c2020-08-05 13:09:19 -04002633 // Emit the intializer as a reflection instruction.
David Neto85082642018-03-24 06:55:20 -07002634 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2635 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002636 std::string hexbytes;
2637 llvm::raw_string_ostream str(hexbytes);
2638 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
alan-baker86ce19c2020-08-05 13:09:19 -04002639
2640 // Reflection instruction for constant data.
2641 SPIRVOperandVec Ops;
2642 auto data_id = addSPIRVInst<kDebug>(spv::OpString, str.str().c_str());
2643 Ops << getSPIRVType(Type::getVoidTy(module->getContext()))
2644 << getReflectionImport() << reflection::ExtInstConstantDataStorageBuffer
2645 << getSPIRVInt32Constant(descriptor_set) << getSPIRVInt32Constant(0)
2646 << data_id;
2647 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto85082642018-03-24 06:55:20 -07002648
David Neto85082642018-03-24 06:55:20 -07002649 // OpDecorate %var DescriptorSet <descriptor_set>
alan-baker86ce19c2020-08-05 13:09:19 -04002650 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002651 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2652 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002653
2654 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002655 Ops.clear();
2656 Ops << var_id << spv::DecorationBinding << 0;
2657 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002658 }
2659}
2660
David Neto22f144c2017-06-12 14:26:21 -04002661void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002662 ValueMapType &VMap = getValueMap();
2663 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002664 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2665 auto &GlobalConstArgSet = getGlobalConstArgSet();
2666
2667 FunctionType *FTy = F.getFunctionType();
2668
2669 //
David Neto22f144c2017-06-12 14:26:21 -04002670 // Generate OPFunction.
2671 //
2672
2673 // FOps[0] : Result Type ID
2674 // FOps[1] : Function Control
2675 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002676 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002677
2678 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002679 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002680
2681 // Check function attributes for SPIRV Function Control.
2682 uint32_t FuncControl = spv::FunctionControlMaskNone;
2683 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2684 FuncControl |= spv::FunctionControlInlineMask;
2685 }
2686 if (F.hasFnAttribute(Attribute::NoInline)) {
2687 FuncControl |= spv::FunctionControlDontInlineMask;
2688 }
2689 // TODO: Check llvm attribute for Function Control Pure.
2690 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2691 FuncControl |= spv::FunctionControlPureMask;
2692 }
2693 // TODO: Check llvm attribute for Function Control Const.
2694 if (F.hasFnAttribute(Attribute::ReadNone)) {
2695 FuncControl |= spv::FunctionControlConstMask;
2696 }
2697
SJW01901d92020-05-21 08:58:31 -05002698 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002699
SJW01901d92020-05-21 08:58:31 -05002700 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002701 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2702 SmallVector<Type *, 4> NewFuncParamTys;
2703 FunctionType *NewFTy =
2704 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002705 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002706 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002707 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002708 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002709 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002710 } else {
SJWf93f5f32020-05-05 07:27:56 -05002711 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002712 }
2713 }
2714
SJW01901d92020-05-21 08:58:31 -05002715 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002716
SJWf93f5f32020-05-05 07:27:56 -05002717 // Generate SPIRV instruction for function.
2718 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2719 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002720
SJWf93f5f32020-05-05 07:27:56 -05002721 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2722 EntryPoints.push_back(std::make_pair(&F, FID));
2723 }
David Neto22f144c2017-06-12 14:26:21 -04002724
David Neto482550a2018-03-24 05:21:07 -07002725 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002726 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002727 }
David Neto22f144c2017-06-12 14:26:21 -04002728
2729 //
2730 // Generate OpFunctionParameter for Normal function.
2731 //
David Neto22f144c2017-06-12 14:26:21 -04002732 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002733
David Neto22f144c2017-06-12 14:26:21 -04002734 // Iterate Argument for name instead of param type from function type.
2735 unsigned ArgIdx = 0;
2736 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002737 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002738 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002739
2740 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002741 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002742 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2743 if (GlobalConstFuncTyMap.count(FTy)) {
2744 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2745 Type *EleTy = PTy->getPointerElementType();
2746 Type *ArgTy =
2747 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002748 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002749 GlobalConstArgSet.insert(&Arg);
2750 }
2751 }
2752 }
SJW01901d92020-05-21 08:58:31 -05002753 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002754
2755 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002756 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002757 VMap[&Arg] = param_id;
2758
2759 if (CalledWithCoherentResource(Arg)) {
2760 // If the arg is passed a coherent resource ever, then decorate this
2761 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002762 Ops.clear();
2763 Ops << param_id << spv::DecorationCoherent;
2764 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002765 }
David Neto22f144c2017-06-12 14:26:21 -04002766
2767 ArgIdx++;
2768 }
2769 }
2770}
2771
SJW77b87ad2020-04-21 14:37:52 -05002772void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002773 EntryPointVecType &EntryPoints = getEntryPointVec();
SJW806a5d82020-07-15 12:51:38 -05002774 auto &EntryPointInterfaces = getEntryPointInterfacesList();
SJW01901d92020-05-21 08:58:31 -05002775 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002776
SJWf93f5f32020-05-05 07:27:56 -05002777 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002778
SJW01901d92020-05-21 08:58:31 -05002779 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002780 //
SJW01901d92020-05-21 08:58:31 -05002781 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002782 //
2783 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002784 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002785 }
2786
2787 // Always add the storage buffer extension
2788 {
David Neto22f144c2017-06-12 14:26:21 -04002789 //
2790 // Generate OpExtension.
2791 //
2792 // Ops[0] = Name (Literal String)
2793 //
SJWf93f5f32020-05-05 07:27:56 -05002794 addSPIRVInst<kExtensions>(spv::OpExtension,
2795 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05002796 }
David Neto22f144c2017-06-12 14:26:21 -04002797
alan-baker5b86ed72019-02-15 08:26:50 -05002798 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2799 //
2800 // Generate OpExtension.
2801 //
2802 // Ops[0] = Name (Literal String)
2803 //
SJWf93f5f32020-05-05 07:27:56 -05002804 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04002805 }
2806
2807 //
2808 // Generate OpMemoryModel
2809 //
2810 // Memory model for Vulkan will always be GLSL450.
2811
2812 // Ops[0] = Addressing Model
2813 // Ops[1] = Memory Model
2814 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002815 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002816
SJWf93f5f32020-05-05 07:27:56 -05002817 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002818
2819 //
2820 // Generate OpEntryPoint
2821 //
2822 for (auto EntryPoint : EntryPoints) {
2823 // Ops[0] = Execution Model
2824 // Ops[1] = EntryPoint ID
2825 // Ops[2] = Name (Literal String)
2826 // ...
2827 //
2828 // TODO: Do we need to consider Interface ID for forward references???
2829 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002830 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002831 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002832
SJW806a5d82020-07-15 12:51:38 -05002833 for (auto &Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002834 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002835 }
2836
SJWf93f5f32020-05-05 07:27:56 -05002837 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002838 }
2839
alan-baker3b609772020-09-03 19:10:17 -04002840 if (BuiltinDimVec.empty()) {
2841 for (auto EntryPoint : EntryPoints) {
2842 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2843 ->getMetadata("reqd_work_group_size");
2844 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
2845 //
2846 // Generate OpExecutionMode
2847 //
David Neto22f144c2017-06-12 14:26:21 -04002848
alan-baker3b609772020-09-03 19:10:17 -04002849 // Ops[0] = Entry Point ID
2850 // Ops[1] = Execution Mode
2851 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
2852 Ops.clear();
2853 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
2854
2855 uint32_t XDim = static_cast<uint32_t>(
2856 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2857 uint32_t YDim = static_cast<uint32_t>(
2858 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2859 uint32_t ZDim = static_cast<uint32_t>(
2860 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2861
2862 Ops << XDim << YDim << ZDim;
2863
2864 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002865 }
David Neto22f144c2017-06-12 14:26:21 -04002866 }
2867 }
2868
2869 //
2870 // Generate OpSource.
2871 //
2872 // Ops[0] = SourceLanguage ID
2873 // Ops[1] = Version (LiteralNum)
2874 //
SJW01901d92020-05-21 08:58:31 -05002875 uint32_t LangID = spv::SourceLanguageUnknown;
2876 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00002877 switch (clspv::Option::Language()) {
2878 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05002879 LangID = spv::SourceLanguageOpenCL_C;
2880 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002881 break;
2882 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05002883 LangID = spv::SourceLanguageOpenCL_C;
2884 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00002885 break;
2886 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05002887 LangID = spv::SourceLanguageOpenCL_C;
2888 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00002889 break;
2890 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05002891 LangID = spv::SourceLanguageOpenCL_C;
2892 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00002893 break;
Kévin Petit77838ff2020-10-19 18:54:51 +01002894 case clspv::Option::SourceLanguage::OpenCL_C_30:
2895 LangID = spv::SourceLanguageOpenCL_C;
2896 LangVer = 300;
2897 break;
Kévin Petitf0515712020-01-07 18:29:20 +00002898 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05002899 LangID = spv::SourceLanguageOpenCL_CPP;
2900 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002901 break;
2902 default:
Kévin Petitf0515712020-01-07 18:29:20 +00002903 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01002904 }
David Neto22f144c2017-06-12 14:26:21 -04002905
SJW01901d92020-05-21 08:58:31 -05002906 Ops.clear();
2907 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05002908 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002909
2910 if (!BuiltinDimVec.empty()) {
2911 //
2912 // Generate OpDecorates for x/y/z dimension.
2913 //
2914 // Ops[0] = Target ID
2915 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04002916 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04002917
2918 // X Dimension
2919 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002920 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05002921 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002922
2923 // Y Dimension
2924 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002925 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05002926 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002927
2928 // Z Dimension
2929 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002930 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05002931 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002932 }
2933}
2934
David Netob6e2e062018-04-25 10:32:06 -04002935void SPIRVProducerPass::GenerateEntryPointInitialStores() {
2936 // Work around a driver bug. Initializers on Private variables might not
2937 // work. So the start of the kernel should store the initializer value to the
2938 // variables. Yes, *every* entry point pays this cost if *any* entry point
2939 // uses this builtin. At this point I judge this to be an acceptable tradeoff
2940 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002941 // TODO(dneto): Remove this at some point once fixed drivers are widely
2942 // available.
SJW01901d92020-05-21 08:58:31 -05002943 if (WorkgroupSizeVarID.isValid()) {
2944 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04002945
SJWf93f5f32020-05-05 07:27:56 -05002946 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002947 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04002948
SJWf93f5f32020-05-05 07:27:56 -05002949 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04002950 }
2951}
2952
David Neto22f144c2017-06-12 14:26:21 -04002953void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002954 ValueMapType &VMap = getValueMap();
2955
David Netob6e2e062018-04-25 10:32:06 -04002956 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04002957
2958 for (BasicBlock &BB : F) {
2959 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04002960
2961 //
2962 // Generate OpLabel for Basic Block.
2963 //
SJWf93f5f32020-05-05 07:27:56 -05002964 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04002965
David Neto6dcd4712017-06-23 11:06:47 -04002966 // OpVariable instructions must come first.
2967 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05002968 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
2969 // Allocating a pointer requires variable pointers.
2970 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002971 setVariablePointersCapabilities(
2972 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05002973 }
David Neto6dcd4712017-06-23 11:06:47 -04002974 GenerateInstruction(I);
2975 }
2976 }
2977
David Neto22f144c2017-06-12 14:26:21 -04002978 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04002979 if (clspv::Option::HackInitializers()) {
2980 GenerateEntryPointInitialStores();
2981 }
David Neto22f144c2017-06-12 14:26:21 -04002982 }
2983
2984 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04002985 if (!isa<AllocaInst>(I)) {
2986 GenerateInstruction(I);
2987 }
David Neto22f144c2017-06-12 14:26:21 -04002988 }
2989 }
2990}
2991
2992spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
2993 const std::map<CmpInst::Predicate, spv::Op> Map = {
2994 {CmpInst::ICMP_EQ, spv::OpIEqual},
2995 {CmpInst::ICMP_NE, spv::OpINotEqual},
2996 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
2997 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
2998 {CmpInst::ICMP_ULT, spv::OpULessThan},
2999 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3000 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3001 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3002 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3003 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3004 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3005 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3006 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3007 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3008 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3009 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3010 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3011 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3012 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3013 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3014 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3015 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3016
3017 assert(0 != Map.count(I->getPredicate()));
3018
3019 return Map.at(I->getPredicate());
3020}
3021
3022spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3023 const std::map<unsigned, spv::Op> Map{
3024 {Instruction::Trunc, spv::OpUConvert},
3025 {Instruction::ZExt, spv::OpUConvert},
3026 {Instruction::SExt, spv::OpSConvert},
3027 {Instruction::FPToUI, spv::OpConvertFToU},
3028 {Instruction::FPToSI, spv::OpConvertFToS},
3029 {Instruction::UIToFP, spv::OpConvertUToF},
3030 {Instruction::SIToFP, spv::OpConvertSToF},
3031 {Instruction::FPTrunc, spv::OpFConvert},
3032 {Instruction::FPExt, spv::OpFConvert},
3033 {Instruction::BitCast, spv::OpBitcast}};
3034
3035 assert(0 != Map.count(I.getOpcode()));
3036
3037 return Map.at(I.getOpcode());
3038}
3039
3040spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003041 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003042 switch (I.getOpcode()) {
3043 default:
3044 break;
3045 case Instruction::Or:
3046 return spv::OpLogicalOr;
3047 case Instruction::And:
3048 return spv::OpLogicalAnd;
3049 case Instruction::Xor:
3050 return spv::OpLogicalNotEqual;
3051 }
3052 }
3053
alan-bakerb6b09dc2018-11-08 16:59:28 -05003054 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003055 {Instruction::Add, spv::OpIAdd},
3056 {Instruction::FAdd, spv::OpFAdd},
3057 {Instruction::Sub, spv::OpISub},
3058 {Instruction::FSub, spv::OpFSub},
3059 {Instruction::Mul, spv::OpIMul},
3060 {Instruction::FMul, spv::OpFMul},
3061 {Instruction::UDiv, spv::OpUDiv},
3062 {Instruction::SDiv, spv::OpSDiv},
3063 {Instruction::FDiv, spv::OpFDiv},
3064 {Instruction::URem, spv::OpUMod},
3065 {Instruction::SRem, spv::OpSRem},
3066 {Instruction::FRem, spv::OpFRem},
3067 {Instruction::Or, spv::OpBitwiseOr},
3068 {Instruction::Xor, spv::OpBitwiseXor},
3069 {Instruction::And, spv::OpBitwiseAnd},
3070 {Instruction::Shl, spv::OpShiftLeftLogical},
3071 {Instruction::LShr, spv::OpShiftRightLogical},
3072 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3073
3074 assert(0 != Map.count(I.getOpcode()));
3075
3076 return Map.at(I.getOpcode());
3077}
3078
SJW806a5d82020-07-15 12:51:38 -05003079SPIRVID SPIRVProducerPass::getSPIRVBuiltin(spv::BuiltIn BID,
3080 spv::Capability Cap) {
3081 SPIRVID RID;
3082
3083 auto ii = BuiltinConstantMap.find(BID);
3084
3085 if (ii != BuiltinConstantMap.end()) {
3086 return ii->second;
3087 } else {
SJW806a5d82020-07-15 12:51:38 -05003088 addCapability(Cap);
3089
3090 Type *type = PointerType::get(IntegerType::get(module->getContext(), 32),
3091 AddressSpace::Input);
3092
3093 RID = addSPIRVGlobalVariable(getSPIRVType(type), spv::StorageClassInput);
3094
3095 BuiltinConstantMap[BID] = RID;
3096
3097 //
3098 // Generate OpDecorate.
3099 //
3100 // Ops[0] : target
3101 // Ops[1] : decoration
3102 // Ops[2] : SpecId
3103 SPIRVOperandVec Ops;
3104 Ops << RID << spv::DecorationBuiltIn << static_cast<int>(BID);
3105
3106 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
3107 }
3108
3109 return RID;
3110}
3111
3112SPIRVID
3113SPIRVProducerPass::GenerateClspvInstruction(CallInst *Call,
3114 const FunctionInfo &FuncInfo) {
3115 SPIRVID RID;
3116
3117 switch (FuncInfo.getType()) {
3118 case Builtins::kClspvCompositeConstruct:
3119 RID = addSPIRVPlaceholder(Call);
3120 break;
3121 case Builtins::kClspvResource: {
3122 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3123 // Generate an OpLoad
3124 SPIRVOperandVec Ops;
3125
3126 Ops << Call->getType()->getPointerElementType()
3127 << ResourceVarDeferredLoadCalls[Call];
3128
3129 RID = addSPIRVInst(spv::OpLoad, Ops);
3130
3131 } else {
3132 // This maps to an OpVariable we've already generated.
3133 // No code is generated for the call.
3134 }
3135 break;
3136 }
3137 case Builtins::kClspvLocal: {
3138 // Don't codegen an instruction here, but instead map this call directly
3139 // to the workgroup variable id.
3140 int spec_id = static_cast<int>(
3141 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
3142 const auto &info = LocalSpecIdInfoMap[spec_id];
3143 RID = info.variable_id;
3144 break;
3145 }
3146 case Builtins::kClspvSamplerVarLiteral: {
3147 // Sampler initializers become a load of the corresponding sampler.
3148 // Map this to a load from the variable.
3149 const auto third_param = static_cast<unsigned>(
3150 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3151 auto sampler_value = third_param;
3152 if (clspv::Option::UseSamplerMap()) {
3153 sampler_value = getSamplerMap()[third_param].first;
3154 }
3155
3156 // Generate an OpLoad
3157 SPIRVOperandVec Ops;
3158
3159 Ops << SamplerTy->getPointerElementType()
3160 << SamplerLiteralToIDMap[sampler_value];
3161
3162 RID = addSPIRVInst(spv::OpLoad, Ops);
3163 break;
3164 }
3165 case Builtins::kSpirvAtomicXor: {
3166 // Handle SPIR-V intrinsics
3167 SPIRVOperandVec Ops;
3168
3169 if (!Call->getType()->isVoidTy()) {
3170 Ops << Call->getType();
3171 }
3172
3173 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
3174 Ops << Call->getArgOperand(i);
3175 }
3176
3177 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
3178 break;
3179 }
3180 case Builtins::kSpirvOp: {
3181 // Handle SPIR-V intrinsics
3182 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
3183 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
3184 if (opcode != spv::OpNop) {
3185 SPIRVOperandVec Ops;
3186
3187 if (!Call->getType()->isVoidTy()) {
3188 Ops << Call->getType();
3189 }
3190
3191 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
3192 Ops << Call->getArgOperand(i);
3193 }
3194
3195 RID = addSPIRVInst(opcode, Ops);
3196 }
3197 break;
3198 }
3199 case Builtins::kSpirvCopyMemory: {
3200 //
3201 // Generate OpCopyMemory.
3202 //
3203
3204 // Ops[0] = Dst ID
3205 // Ops[1] = Src ID
3206 // Ops[2] = Memory Access
3207 // Ops[3] = Alignment
3208
3209 auto IsVolatile =
3210 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
3211
3212 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3213 : spv::MemoryAccessMaskNone;
3214
3215 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3216
3217 auto Alignment =
3218 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
3219
3220 SPIRVOperandVec Ops;
3221 Ops << Call->getArgOperand(0) << Call->getArgOperand(1) << MemoryAccess
3222 << static_cast<uint32_t>(Alignment);
3223
3224 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
3225 break;
3226 }
3227 default:
3228 llvm_unreachable("Unknown CLSPV Instruction");
3229 break;
3230 }
3231 return RID;
3232}
3233
3234SPIRVID
3235SPIRVProducerPass::GenerateImageInstruction(CallInst *Call,
3236 const FunctionInfo &FuncInfo) {
3237 SPIRVID RID;
3238
3239 LLVMContext &Context = module->getContext();
3240 switch (FuncInfo.getType()) {
3241 case Builtins::kReadImagef:
3242 case Builtins::kReadImageh:
3243 case Builtins::kReadImagei:
3244 case Builtins::kReadImageui: {
3245 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
3246 // Additionally, OpTypeSampledImage is generated.
alan-bakerf6bc8252020-09-23 14:58:55 -04003247 const auto image_ty = Call->getArgOperand(0)->getType();
SJW806a5d82020-07-15 12:51:38 -05003248 const auto &pi = FuncInfo.getParameter(1);
3249 if (pi.isSampler()) {
3250 //
3251 // Generate OpSampledImage.
3252 //
3253 // Ops[0] = Result Type ID
3254 // Ops[1] = Image ID
3255 // Ops[2] = Sampler ID
3256 //
3257 SPIRVOperandVec Ops;
3258
3259 Value *Image = Call->getArgOperand(0);
3260 Value *Sampler = Call->getArgOperand(1);
3261 Value *Coordinate = Call->getArgOperand(2);
3262
3263 TypeMapType &OpImageTypeMap = getImageTypeMap();
3264 Type *ImageTy = Image->getType()->getPointerElementType();
3265 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
3266
3267 Ops << ImageTyID << Image << Sampler;
3268
3269 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
3270
3271 //
3272 // Generate OpImageSampleExplicitLod.
3273 //
3274 // Ops[0] = Result Type ID
3275 // Ops[1] = Sampled Image ID
3276 // Ops[2] = Coordinate ID
3277 // Ops[3] = Image Operands Type ID
3278 // Ops[4] ... Ops[n] = Operands ID
3279 //
3280 Ops.clear();
3281
3282 const bool is_int_image = IsIntImageType(Image->getType());
3283 SPIRVID result_type;
3284 if (is_int_image) {
3285 result_type = v4int32ID;
3286 } else {
3287 result_type = getSPIRVType(Call->getType());
3288 }
3289
3290 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
3291 Ops << result_type << SampledImageID << Coordinate
3292 << spv::ImageOperandsLodMask << CstFP0;
3293
3294 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
3295
3296 if (is_int_image) {
3297 // Generate the bitcast.
3298 Ops.clear();
3299 Ops << Call->getType() << RID;
3300 RID = addSPIRVInst(spv::OpBitcast, Ops);
3301 }
alan-bakerf6bc8252020-09-23 14:58:55 -04003302 } else if (IsStorageImageType(image_ty)) {
3303 // read_image on a storage image is mapped to OpImageRead.
3304 Value *Image = Call->getArgOperand(0);
3305 Value *Coordinate = Call->getArgOperand(1);
3306
3307 //
3308 // Generate OpImageRead
3309 //
3310 // Ops[0] = Result Type ID
3311 // Ops[1] = Image ID
3312 // Ops[2] = Coordinate
3313 // No optional image operands.
3314 //
3315 SPIRVOperandVec Ops;
3316
3317 const bool is_int_image = IsIntImageType(Image->getType());
3318 SPIRVID result_type;
3319 if (is_int_image) {
3320 result_type = v4int32ID;
3321 } else {
3322 result_type = getSPIRVType(Call->getType());
3323 }
3324
3325 Ops << result_type << Image << Coordinate;
3326 RID = addSPIRVInst(spv::OpImageRead, Ops);
3327
3328 if (is_int_image) {
3329 // Generate the bitcast.
3330 Ops.clear();
3331 Ops << Call->getType() << RID;
3332 RID = addSPIRVInst(spv::OpBitcast, Ops);
3333 }
3334
3335 // OpImageRead requires StorageImageReadWithoutFormat.
3336 addCapability(spv::CapabilityStorageImageReadWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003337 } else {
alan-bakerf6bc8252020-09-23 14:58:55 -04003338 // read_image on a sampled image (without a sampler) is mapped to
3339 // OpImageFetch.
SJW806a5d82020-07-15 12:51:38 -05003340 Value *Image = Call->getArgOperand(0);
3341 Value *Coordinate = Call->getArgOperand(1);
3342
3343 //
3344 // Generate OpImageFetch
3345 //
3346 // Ops[0] = Result Type ID
3347 // Ops[1] = Image ID
3348 // Ops[2] = Coordinate ID
3349 // Ops[3] = Lod
3350 // Ops[4] = 0
3351 //
3352 SPIRVOperandVec Ops;
3353
3354 const bool is_int_image = IsIntImageType(Image->getType());
3355 SPIRVID result_type;
3356 if (is_int_image) {
3357 result_type = v4int32ID;
3358 } else {
3359 result_type = getSPIRVType(Call->getType());
3360 }
3361
3362 Ops << result_type << Image << Coordinate << spv::ImageOperandsLodMask
3363 << getSPIRVInt32Constant(0);
3364
3365 RID = addSPIRVInst(spv::OpImageFetch, Ops);
3366
3367 if (is_int_image) {
3368 // Generate the bitcast.
3369 Ops.clear();
3370 Ops << Call->getType() << RID;
3371 RID = addSPIRVInst(spv::OpBitcast, Ops);
3372 }
3373 }
3374 break;
3375 }
3376
3377 case Builtins::kWriteImagef:
3378 case Builtins::kWriteImageh:
3379 case Builtins::kWriteImagei:
3380 case Builtins::kWriteImageui: {
3381 // write_image is mapped to OpImageWrite.
3382 //
3383 // Generate OpImageWrite.
3384 //
3385 // Ops[0] = Image ID
3386 // Ops[1] = Coordinate ID
3387 // Ops[2] = Texel ID
3388 // Ops[3] = (Optional) Image Operands Type (Literal Number)
3389 // Ops[4] ... Ops[n] = (Optional) Operands ID
3390 //
3391 SPIRVOperandVec Ops;
3392
3393 Value *Image = Call->getArgOperand(0);
3394 Value *Coordinate = Call->getArgOperand(1);
3395 Value *Texel = Call->getArgOperand(2);
3396
3397 SPIRVID TexelID = getSPIRVValue(Texel);
3398
3399 const bool is_int_image = IsIntImageType(Image->getType());
3400 if (is_int_image) {
3401 // Generate a bitcast to v4int and use it as the texel value.
3402 Ops << v4int32ID << TexelID;
3403 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
3404 Ops.clear();
3405 }
3406 Ops << Image << Coordinate << TexelID;
SJW806a5d82020-07-15 12:51:38 -05003407 RID = addSPIRVInst(spv::OpImageWrite, Ops);
alan-bakerf6bc8252020-09-23 14:58:55 -04003408
3409 // Image writes require StorageImageWriteWithoutFormat.
3410 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003411 break;
3412 }
3413
3414 case Builtins::kGetImageHeight:
3415 case Builtins::kGetImageWidth:
3416 case Builtins::kGetImageDepth:
3417 case Builtins::kGetImageDim: {
3418 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
3419 addCapability(spv::CapabilityImageQuery);
3420
3421 //
3422 // Generate OpImageQuerySize[Lod]
3423 //
3424 // Ops[0] = Image ID
3425 //
3426 // Result type has components equal to the dimensionality of the image,
3427 // plus 1 if the image is arrayed.
3428 //
3429 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3430 SPIRVOperandVec Ops;
3431
3432 // Implement:
3433 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3434 SPIRVID SizesTypeID;
3435
3436 Value *Image = Call->getArgOperand(0);
3437 const uint32_t dim = ImageDimensionality(Image->getType());
3438 const uint32_t components =
3439 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
3440 if (components == 1) {
3441 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
3442 } else {
3443 SizesTypeID = getSPIRVType(
3444 FixedVectorType::get(Type::getInt32Ty(Context), components));
3445 }
3446 Ops << SizesTypeID << Image;
3447 spv::Op query_opcode = spv::OpImageQuerySize;
3448 if (IsSampledImageType(Image->getType())) {
3449 query_opcode = spv::OpImageQuerySizeLod;
3450 // Need explicit 0 for Lod operand.
3451 Ops << getSPIRVInt32Constant(0);
3452 }
3453
3454 RID = addSPIRVInst(query_opcode, Ops);
3455
3456 // May require an extra instruction to create the appropriate result of
3457 // the builtin function.
3458 if (FuncInfo.getType() == Builtins::kGetImageDim) {
3459 if (dim == 3) {
3460 // get_image_dim returns an int4 for 3D images.
3461 //
3462
3463 // Implement:
3464 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
3465 Ops.clear();
3466 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
3467 << getSPIRVInt32Constant(0);
3468
3469 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
3470 } else if (dim != components) {
3471 // get_image_dim return an int2 regardless of the arrayedness of the
3472 // image. If the image is arrayed an element must be dropped from the
3473 // query result.
3474 //
3475
3476 // Implement:
3477 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
3478 Ops.clear();
3479 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
3480 << 0 << 1;
3481
3482 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
3483 }
3484 } else if (components > 1) {
3485 // Implement:
3486 // %result = OpCompositeExtract %uint %sizes <component number>
3487 Ops.clear();
3488 Ops << Call->getType() << RID;
3489
3490 uint32_t component = 0;
3491 if (FuncInfo.getType() == Builtins::kGetImageHeight)
3492 component = 1;
3493 else if (FuncInfo.getType() == Builtins::kGetImageDepth)
3494 component = 2;
3495 Ops << component;
3496
3497 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
3498 }
3499 break;
3500 }
3501 default:
3502 llvm_unreachable("Unsupported Image builtin");
3503 }
3504
3505 return RID;
3506}
3507
3508SPIRVID
3509SPIRVProducerPass::GenerateSubgroupInstruction(CallInst *Call,
3510 const FunctionInfo &FuncInfo) {
3511 SPIRVID RID;
3512
3513 // requires SPIRV version 1.3 or greater
3514 if (SpvVersion() != SPIRVVersion::SPIRV_1_3) {
3515 // llvm_unreachable("SubGroups extension requires SPIRV 1.3 or greater");
3516 // TODO(sjw): error out gracefully
3517 }
3518
3519 auto loadBuiltin = [this, Call](spv::BuiltIn spvBI,
3520 spv::Capability spvCap =
3521 spv::CapabilityGroupNonUniform) {
3522 SPIRVOperandVec Ops;
3523 Ops << Call->getType() << this->getSPIRVBuiltin(spvBI, spvCap);
3524
3525 return addSPIRVInst(spv::OpLoad, Ops);
3526 };
3527
3528 spv::Op op = spv::OpNop;
3529 switch (FuncInfo.getType()) {
3530 case Builtins::kGetSubGroupSize:
3531 return loadBuiltin(spv::BuiltInSubgroupSize);
3532 case Builtins::kGetNumSubGroups:
3533 return loadBuiltin(spv::BuiltInNumSubgroups);
3534 case Builtins::kGetSubGroupId:
3535 return loadBuiltin(spv::BuiltInSubgroupId);
3536 case Builtins::kGetSubGroupLocalId:
3537 return loadBuiltin(spv::BuiltInSubgroupLocalInvocationId);
3538
3539 case Builtins::kSubGroupBroadcast:
3540 if (SpvVersion() < SPIRVVersion::SPIRV_1_5 &&
3541 !dyn_cast<ConstantInt>(Call->getOperand(1))) {
3542 llvm_unreachable("sub_group_broadcast requires constant lane Id for "
3543 "SPIRV version < 1.5");
3544 }
3545 addCapability(spv::CapabilityGroupNonUniformBallot);
3546 op = spv::OpGroupNonUniformBroadcast;
3547 break;
3548
3549 case Builtins::kSubGroupAll:
3550 addCapability(spv::CapabilityGroupNonUniformVote);
3551 op = spv::OpGroupNonUniformAll;
3552 break;
3553 case Builtins::kSubGroupAny:
3554 addCapability(spv::CapabilityGroupNonUniformVote);
3555 op = spv::OpGroupNonUniformAny;
3556 break;
3557 case Builtins::kSubGroupReduceAdd:
3558 case Builtins::kSubGroupScanExclusiveAdd:
3559 case Builtins::kSubGroupScanInclusiveAdd: {
3560 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3561 if (FuncInfo.getParameter(0).type_id == Type::IntegerTyID) {
3562 op = spv::OpGroupNonUniformIAdd;
3563 } else {
3564 op = spv::OpGroupNonUniformFAdd;
3565 }
3566 break;
3567 }
3568 case Builtins::kSubGroupReduceMin:
3569 case Builtins::kSubGroupScanExclusiveMin:
3570 case Builtins::kSubGroupScanInclusiveMin: {
3571 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3572 auto &param = FuncInfo.getParameter(0);
3573 if (param.type_id == Type::IntegerTyID) {
3574 op = param.is_signed ? spv::OpGroupNonUniformSMin
3575 : spv::OpGroupNonUniformUMin;
3576 } else {
3577 op = spv::OpGroupNonUniformFMin;
3578 }
3579 break;
3580 }
3581 case Builtins::kSubGroupReduceMax:
3582 case Builtins::kSubGroupScanExclusiveMax:
3583 case Builtins::kSubGroupScanInclusiveMax: {
3584 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3585 auto &param = FuncInfo.getParameter(0);
3586 if (param.type_id == Type::IntegerTyID) {
3587 op = param.is_signed ? spv::OpGroupNonUniformSMax
3588 : spv::OpGroupNonUniformUMax;
3589 } else {
3590 op = spv::OpGroupNonUniformFMax;
3591 }
3592 break;
3593 }
3594
3595 case Builtins::kGetEnqueuedNumSubGroups:
3596 // TODO(sjw): requires CapabilityKernel (incompatible with Shader)
3597 case Builtins::kGetMaxSubGroupSize:
3598 // TODO(sjw): use SpecConstant, capability Kernel (incompatible with Shader)
3599 case Builtins::kSubGroupBarrier:
3600 case Builtins::kSubGroupReserveReadPipe:
3601 case Builtins::kSubGroupReserveWritePipe:
3602 case Builtins::kSubGroupCommitReadPipe:
3603 case Builtins::kSubGroupCommitWritePipe:
3604 case Builtins::kGetKernelSubGroupCountForNdrange:
3605 case Builtins::kGetKernelMaxSubGroupSizeForNdrange:
3606 default:
3607 Call->print(errs());
3608 llvm_unreachable("Unsupported sub_group operation");
3609 break;
3610 }
3611
3612 assert(op != spv::OpNop);
3613
3614 SPIRVOperandVec Operands;
3615
3616 //
3617 // Generate OpGroupNonUniform*
3618 //
3619 // Ops[0] = Result Type ID
3620 // Ops[1] = ScopeSubgroup
3621 // Ops[2] = Value ID
3622 // Ops[3] = Local ID
3623
3624 // The result type.
3625 Operands << Call->getType();
3626
3627 // Subgroup Scope
3628 Operands << getSPIRVInt32Constant(spv::ScopeSubgroup);
3629
3630 switch (FuncInfo.getType()) {
3631 case Builtins::kSubGroupReduceAdd:
3632 case Builtins::kSubGroupReduceMin:
3633 case Builtins::kSubGroupReduceMax:
3634 Operands << spv::GroupOperationReduce;
3635 break;
3636 case Builtins::kSubGroupScanExclusiveAdd:
3637 case Builtins::kSubGroupScanExclusiveMin:
3638 case Builtins::kSubGroupScanExclusiveMax:
3639 Operands << spv::GroupOperationExclusiveScan;
3640 break;
3641 case Builtins::kSubGroupScanInclusiveAdd:
3642 case Builtins::kSubGroupScanInclusiveMin:
3643 case Builtins::kSubGroupScanInclusiveMax:
3644 Operands << spv::GroupOperationInclusiveScan;
3645 break;
3646 default:
3647 break;
3648 }
3649
3650 for (Use &use : Call->arg_operands()) {
3651 Operands << use.get();
3652 }
3653
3654 return addSPIRVInst(op, Operands);
3655}
3656
3657SPIRVID SPIRVProducerPass::GenerateInstructionFromCall(CallInst *Call) {
3658 LLVMContext &Context = module->getContext();
3659
3660 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
3661 auto func_type = func_info.getType();
3662
3663 if (BUILTIN_IN_GROUP(func_type, Clspv)) {
3664 return GenerateClspvInstruction(Call, func_info);
3665 } else if (BUILTIN_IN_GROUP(func_type, Image)) {
3666 return GenerateImageInstruction(Call, func_info);
3667 } else if (BUILTIN_IN_GROUP(func_type, SubgroupsKHR)) {
3668 return GenerateSubgroupInstruction(Call, func_info);
3669 }
3670
3671 SPIRVID RID;
3672
3673 switch (func_type) {
3674 case Builtins::kPopcount: {
3675 //
3676 // Generate OpBitCount
3677 //
3678 // Ops[0] = Result Type ID
3679 // Ops[1] = Base ID
3680 SPIRVOperandVec Ops;
3681 Ops << Call->getType() << Call->getOperand(0);
3682
3683 RID = addSPIRVInst(spv::OpBitCount, Ops);
3684 break;
3685 }
3686 default: {
3687 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3688
3689 if (EInst) {
3690 SPIRVID ExtInstImportID = getOpExtInstImportID();
3691
3692 //
3693 // Generate OpExtInst.
3694 //
3695
3696 // Ops[0] = Result Type ID
3697 // Ops[1] = Set ID (OpExtInstImport ID)
3698 // Ops[2] = Instruction Number (Literal Number)
3699 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3700 SPIRVOperandVec Ops;
3701
3702 Ops << Call->getType() << ExtInstImportID << EInst;
3703
3704 for (auto &use : Call->arg_operands()) {
3705 Ops << use.get();
3706 }
3707
3708 RID = addSPIRVInst(spv::OpExtInst, Ops);
3709
3710 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3711 if (IndirectExtInst != kGlslExtInstBad) {
SJW806a5d82020-07-15 12:51:38 -05003712 // Generate one more instruction that uses the result of the extended
3713 // instruction. Its result id is one more than the id of the
3714 // extended instruction.
3715 auto generate_extra_inst = [this, &Context, &Call,
3716 &RID](spv::Op opcode, Constant *constant) {
3717 //
3718 // Generate instruction like:
3719 // result = opcode constant <extinst-result>
3720 //
3721 // Ops[0] = Result Type ID
3722 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3723 // Ops[2] = Operand 1 ;; the result of the extended instruction
3724 SPIRVOperandVec Ops;
3725
3726 Type *resultTy = Call->getType();
3727
3728 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
alan-baker931253b2020-08-20 17:15:38 -04003729 constant =
3730 ConstantVector::getSplat(vectorTy->getElementCount(), constant);
SJW806a5d82020-07-15 12:51:38 -05003731 }
3732 Ops << resultTy << constant << RID;
3733
3734 RID = addSPIRVInst(opcode, Ops);
3735 };
3736
3737 auto IntTy = Type::getInt32Ty(Context);
3738 switch (IndirectExtInst) {
3739 case glsl::ExtInstFindUMsb: // Implementing clz
3740 generate_extra_inst(spv::OpISub, ConstantInt::get(IntTy, 31));
3741 break;
3742 case glsl::ExtInstAcos: // Implementing acospi
3743 case glsl::ExtInstAsin: // Implementing asinpi
3744 case glsl::ExtInstAtan: // Implementing atanpi
3745 case glsl::ExtInstAtan2: // Implementing atan2pi
3746 generate_extra_inst(
3747 spv::OpFMul,
3748 ConstantFP::get(Type::getFloatTy(Context), kOneOverPi));
3749 break;
3750
3751 default:
3752 assert(false && "internally inconsistent");
3753 }
3754 }
3755 } else {
SJW806a5d82020-07-15 12:51:38 -05003756 // A real function call (not builtin)
3757 // Call instruction is deferred because it needs function's ID.
3758 RID = addSPIRVPlaceholder(Call);
3759 }
3760
3761 break;
3762 }
3763 }
3764
3765 return RID;
3766}
3767
David Neto22f144c2017-06-12 14:26:21 -04003768void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003769 ValueMapType &VMap = getValueMap();
SJW806a5d82020-07-15 12:51:38 -05003770 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04003771
SJW806a5d82020-07-15 12:51:38 -05003772 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04003773
3774 switch (I.getOpcode()) {
3775 default: {
3776 if (Instruction::isCast(I.getOpcode())) {
3777 //
3778 // Generate SPIRV instructions for cast operators.
3779 //
3780
David Netod2de94a2017-08-28 17:27:47 -04003781 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003782 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003783 auto toI8 = Ty == Type::getInt8Ty(Context);
3784 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003785 // Handle zext, sext and uitofp with i1 type specially.
3786 if ((I.getOpcode() == Instruction::ZExt ||
3787 I.getOpcode() == Instruction::SExt ||
3788 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003789 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003790 //
3791 // Generate OpSelect.
3792 //
3793
3794 // Ops[0] = Result Type ID
3795 // Ops[1] = Condition ID
3796 // Ops[2] = True Constant ID
3797 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003798 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003799
SJW01901d92020-05-21 08:58:31 -05003800 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003801
David Neto22f144c2017-06-12 14:26:21 -04003802 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003803 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003804 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003805 Ops << ConstantInt::getSigned(I.getType(), -1);
David Neto22f144c2017-06-12 14:26:21 -04003806 } else {
SJW01901d92020-05-21 08:58:31 -05003807 Ops << ConstantFP::get(Context, APFloat(1.0f));
David Neto22f144c2017-06-12 14:26:21 -04003808 }
David Neto22f144c2017-06-12 14:26:21 -04003809
David Neto22f144c2017-06-12 14:26:21 -04003810 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003811 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003812 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003813 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003814 } else {
SJW01901d92020-05-21 08:58:31 -05003815 Ops << ConstantFP::get(Context, APFloat(0.0f));
David Neto22f144c2017-06-12 14:26:21 -04003816 }
David Neto22f144c2017-06-12 14:26:21 -04003817
SJWf93f5f32020-05-05 07:27:56 -05003818 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003819 } else if (!clspv::Option::Int8Support() &&
3820 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003821 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3822 // 8 bits.
3823 // Before:
3824 // %result = trunc i32 %a to i8
3825 // After
3826 // %result = OpBitwiseAnd %uint %a %uint_255
3827
SJWf93f5f32020-05-05 07:27:56 -05003828 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003829
SJW806a5d82020-07-15 12:51:38 -05003830 Ops << OpTy << I.getOperand(0) << getSPIRVInt32Constant(255);
David Netod2de94a2017-08-28 17:27:47 -04003831
SJWf93f5f32020-05-05 07:27:56 -05003832 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003833 } else {
3834 // Ops[0] = Result Type ID
3835 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003836 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003837
SJW01901d92020-05-21 08:58:31 -05003838 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003839
SJWf93f5f32020-05-05 07:27:56 -05003840 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003841 }
3842 } else if (isa<BinaryOperator>(I)) {
3843 //
3844 // Generate SPIRV instructions for binary operators.
3845 //
3846
3847 // Handle xor with i1 type specially.
3848 if (I.getOpcode() == Instruction::Xor &&
3849 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003850 ((isa<ConstantInt>(I.getOperand(0)) &&
3851 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3852 (isa<ConstantInt>(I.getOperand(1)) &&
3853 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003854 //
3855 // Generate OpLogicalNot.
3856 //
3857 // Ops[0] = Result Type ID
3858 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003859 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003860
SJW01901d92020-05-21 08:58:31 -05003861 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003862
3863 Value *CondV = I.getOperand(0);
3864 if (isa<Constant>(I.getOperand(0))) {
3865 CondV = I.getOperand(1);
3866 }
SJW01901d92020-05-21 08:58:31 -05003867 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003868
SJWf93f5f32020-05-05 07:27:56 -05003869 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003870 } else {
3871 // Ops[0] = Result Type ID
3872 // Ops[1] = Operand 0
3873 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003874 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003875
SJW01901d92020-05-21 08:58:31 -05003876 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003877
SJWf93f5f32020-05-05 07:27:56 -05003878 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003879 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003880 } else if (I.getOpcode() == Instruction::FNeg) {
3881 // The only unary operator.
3882 //
3883 // Ops[0] = Result Type ID
3884 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05003885 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003886
SJW01901d92020-05-21 08:58:31 -05003887 Ops << I.getType() << I.getOperand(0);
3888 RID = addSPIRVInst(spv::OpFNegate, Ops);
Marco Antognini68e5c512020-09-09 16:08:57 +01003889 } else if (I.getOpcode() == Instruction::Unreachable) {
3890 RID = addSPIRVInst(spv::OpUnreachable);
David Neto22f144c2017-06-12 14:26:21 -04003891 } else {
3892 I.print(errs());
3893 llvm_unreachable("Unsupported instruction???");
3894 }
3895 break;
3896 }
3897 case Instruction::GetElementPtr: {
3898 auto &GlobalConstArgSet = getGlobalConstArgSet();
3899
3900 //
3901 // Generate OpAccessChain.
3902 //
3903 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3904
3905 //
3906 // Generate OpAccessChain.
3907 //
3908
3909 // Ops[0] = Result Type ID
3910 // Ops[1] = Base ID
3911 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05003912 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003913
alan-bakerb6b09dc2018-11-08 16:59:28 -05003914 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003915 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3916 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3917 // Use pointer type with private address space for global constant.
3918 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003919 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003920 }
David Neto257c3892018-04-11 13:19:45 -04003921
SJW01901d92020-05-21 08:58:31 -05003922 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04003923
David Neto862b7d82018-06-14 18:48:37 -04003924 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05003925 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04003926
David Neto862b7d82018-06-14 18:48:37 -04003927 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003928
3929 //
3930 // Follows below rules for gep.
3931 //
David Neto862b7d82018-06-14 18:48:37 -04003932 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3933 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003934 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3935 // first index.
3936 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3937 // use gep's first index.
3938 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3939 // gep's first index.
3940 //
3941 spv::Op Opcode = spv::OpAccessChain;
3942 unsigned offset = 0;
3943 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003944 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003945 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003946 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003947 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003948 }
David Neto862b7d82018-06-14 18:48:37 -04003949 } else {
David Neto22f144c2017-06-12 14:26:21 -04003950 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003951 }
3952
3953 if (Opcode == spv::OpPtrAccessChain) {
alan-baker7506abb2020-09-10 15:02:55 -04003954 // Shader validation in the SPIR-V spec requires that the base pointer to
3955 // OpPtrAccessChain (in StorageBuffer storage class) be decorated with
3956 // ArrayStride.
alan-baker5b86ed72019-02-15 08:26:50 -05003957 auto address_space = ResultType->getAddressSpace();
3958 setVariablePointersCapabilities(address_space);
3959 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003960 case spv::StorageClassStorageBuffer:
David Neto1a1a0582017-07-07 12:01:44 -04003961 // Save the need to generate an ArrayStride decoration. But defer
3962 // generation until later, so we only make one decoration.
alan-baker7506abb2020-09-10 15:02:55 -04003963 getTypesNeedingArrayStride().insert(GEP->getPointerOperandType());
3964 break;
3965 case spv::StorageClassWorkgroup:
Alan Bakerfcda9482018-10-02 17:09:59 -04003966 break;
3967 default:
alan-baker7506abb2020-09-10 15:02:55 -04003968 llvm_unreachable(
3969 "OpPtrAccessChain is not supported for this storage class");
Alan Bakerfcda9482018-10-02 17:09:59 -04003970 break;
David Neto1a1a0582017-07-07 12:01:44 -04003971 }
David Neto22f144c2017-06-12 14:26:21 -04003972 }
3973
3974 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05003975 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04003976 }
3977
SJWf93f5f32020-05-05 07:27:56 -05003978 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003979 break;
3980 }
3981 case Instruction::ExtractValue: {
3982 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3983 // Ops[0] = Result Type ID
3984 // Ops[1] = Composite ID
3985 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003986 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003987
SJW01901d92020-05-21 08:58:31 -05003988 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003989
SJW01901d92020-05-21 08:58:31 -05003990 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04003991
3992 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05003993 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04003994 }
3995
SJWf93f5f32020-05-05 07:27:56 -05003996 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003997 break;
3998 }
3999 case Instruction::InsertValue: {
4000 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4001 // Ops[0] = Result Type ID
4002 // Ops[1] = Object ID
4003 // Ops[2] = Composite ID
4004 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004005 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004006
SJW01901d92020-05-21 08:58:31 -05004007 Ops << I.getType() << IVI->getInsertedValueOperand()
4008 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004009
4010 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004011 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004012 }
4013
SJWf93f5f32020-05-05 07:27:56 -05004014 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004015 break;
4016 }
4017 case Instruction::Select: {
4018 //
4019 // Generate OpSelect.
4020 //
4021
4022 // Ops[0] = Result Type ID
4023 // Ops[1] = Condition ID
4024 // Ops[2] = True Constant ID
4025 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05004026 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004027
4028 // Find SPIRV instruction for parameter type.
4029 auto Ty = I.getType();
4030 if (Ty->isPointerTy()) {
4031 auto PointeeTy = Ty->getPointerElementType();
4032 if (PointeeTy->isStructTy() &&
4033 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4034 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004035 } else {
4036 // Selecting between pointers requires variable pointers.
4037 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4038 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05004039 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004040 }
David Neto22f144c2017-06-12 14:26:21 -04004041 }
4042 }
4043
SJW01901d92020-05-21 08:58:31 -05004044 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004045
SJWf93f5f32020-05-05 07:27:56 -05004046 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004047 break;
4048 }
4049 case Instruction::ExtractElement: {
4050 // Handle <4 x i8> type manually.
4051 Type *CompositeTy = I.getOperand(0)->getType();
4052 if (is4xi8vec(CompositeTy)) {
4053 //
4054 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4055 // <4 x i8>.
4056 //
4057
4058 //
4059 // Generate OpShiftRightLogical
4060 //
4061 // Ops[0] = Result Type ID
4062 // Ops[1] = Operand 0
4063 // Ops[2] = Operand 1
4064 //
SJWf93f5f32020-05-05 07:27:56 -05004065 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004066
SJW01901d92020-05-21 08:58:31 -05004067 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004068
SJW01901d92020-05-21 08:58:31 -05004069 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004070 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4071 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004072 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4073 Op1ID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004074 } else {
4075 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004076 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004077
SJW806a5d82020-07-15 12:51:38 -05004078 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1)
4079 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004080
SJWf93f5f32020-05-05 07:27:56 -05004081 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004082 }
SJW01901d92020-05-21 08:58:31 -05004083 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04004084
SJW01901d92020-05-21 08:58:31 -05004085 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004086
4087 //
4088 // Generate OpBitwiseAnd
4089 //
4090 // Ops[0] = Result Type ID
4091 // Ops[1] = Operand 0
4092 // Ops[2] = Operand 1
4093 //
4094 Ops.clear();
4095
SJW806a5d82020-07-15 12:51:38 -05004096 Ops << CompositeTy << ShiftID << getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004097
SJWf93f5f32020-05-05 07:27:56 -05004098 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004099 break;
4100 }
4101
4102 // Ops[0] = Result Type ID
4103 // Ops[1] = Composite ID
4104 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004105 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004106
SJW01901d92020-05-21 08:58:31 -05004107 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004108
4109 spv::Op Opcode = spv::OpCompositeExtract;
4110 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05004111 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04004112 } else {
SJW01901d92020-05-21 08:58:31 -05004113 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004114 Opcode = spv::OpVectorExtractDynamic;
4115 }
4116
SJWf93f5f32020-05-05 07:27:56 -05004117 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004118 break;
4119 }
4120 case Instruction::InsertElement: {
4121 // Handle <4 x i8> type manually.
4122 Type *CompositeTy = I.getOperand(0)->getType();
4123 if (is4xi8vec(CompositeTy)) {
SJW806a5d82020-07-15 12:51:38 -05004124 SPIRVID CstFFID = getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004125
SJW01901d92020-05-21 08:58:31 -05004126 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004127 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4128 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004129 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4130 ShiftAmountID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004131 } else {
4132 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004133 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004134
SJW806a5d82020-07-15 12:51:38 -05004135 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2)
4136 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004137
SJWf93f5f32020-05-05 07:27:56 -05004138 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004139 }
4140
4141 //
4142 // Generate mask operations.
4143 //
4144
4145 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004146 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004147
SJW01901d92020-05-21 08:58:31 -05004148 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004149
SJW01901d92020-05-21 08:58:31 -05004150 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004151
4152 // Inverse mask.
4153 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004154 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04004155
SJW01901d92020-05-21 08:58:31 -05004156 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004157
4158 // Apply mask.
4159 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004160 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04004161
SJW01901d92020-05-21 08:58:31 -05004162 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004163
4164 // Create correct value according to index of insertelement.
4165 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004166 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004167
SJW01901d92020-05-21 08:58:31 -05004168 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004169
4170 // Insert value to original value.
4171 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004172 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04004173
SJWf93f5f32020-05-05 07:27:56 -05004174 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004175 break;
4176 }
4177
SJWf93f5f32020-05-05 07:27:56 -05004178 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004179
James Priced26efea2018-06-09 23:28:32 +01004180 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05004181 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004182
4183 spv::Op Opcode = spv::OpCompositeInsert;
4184 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004185 const auto value = CI->getZExtValue();
4186 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004187 // Ops[1] = Object ID
4188 // Ops[2] = Composite ID
4189 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004190 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04004191 } else {
James Priced26efea2018-06-09 23:28:32 +01004192 // Ops[1] = Composite ID
4193 // Ops[2] = Object ID
4194 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004195 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004196 Opcode = spv::OpVectorInsertDynamic;
4197 }
4198
SJWf93f5f32020-05-05 07:27:56 -05004199 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004200 break;
4201 }
4202 case Instruction::ShuffleVector: {
4203 // Ops[0] = Result Type ID
4204 // Ops[1] = Vector 1 ID
4205 // Ops[2] = Vector 2 ID
4206 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004207 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004208
SJW01901d92020-05-21 08:58:31 -05004209 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004210
alan-bakerc9666712020-04-01 16:31:21 -04004211 auto shuffle = cast<ShuffleVectorInst>(&I);
4212 SmallVector<int, 4> mask;
4213 shuffle->getShuffleMask(mask);
4214 for (auto i : mask) {
4215 if (i == UndefMaskElem) {
4216 if (clspv::Option::HackUndef())
4217 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05004218 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04004219 else
4220 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05004221 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04004222 } else {
SJW01901d92020-05-21 08:58:31 -05004223 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04004224 }
4225 }
4226
SJWf93f5f32020-05-05 07:27:56 -05004227 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004228 break;
4229 }
4230 case Instruction::ICmp:
4231 case Instruction::FCmp: {
4232 CmpInst *CmpI = cast<CmpInst>(&I);
4233
David Netod4ca2e62017-07-06 18:47:35 -04004234 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004235 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004236 if (isa<PointerType>(ArgTy)) {
4237 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004238 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004239 errs()
4240 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4241 << "in function " << name << "\n";
4242 llvm_unreachable("Pointer equality check is invalid");
4243 break;
4244 }
4245
David Neto257c3892018-04-11 13:19:45 -04004246 // Ops[0] = Result Type ID
4247 // Ops[1] = Operand 1 ID
4248 // Ops[2] = Operand 2 ID
SJWf93f5f32020-05-05 07:27:56 -05004249 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004250
SJW01901d92020-05-21 08:58:31 -05004251 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004252
4253 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
SJWf93f5f32020-05-05 07:27:56 -05004254 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004255 break;
4256 }
4257 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004258 // Branch instruction is deferred because it needs label's ID.
4259 BasicBlock *BrBB = I.getParent();
4260 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4261 // Placeholder for Merge operation
4262 RID = addSPIRVPlaceholder(&I);
4263 }
4264 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004265 break;
4266 }
4267 case Instruction::Switch: {
4268 I.print(errs());
4269 llvm_unreachable("Unsupported instruction???");
4270 break;
4271 }
4272 case Instruction::IndirectBr: {
4273 I.print(errs());
4274 llvm_unreachable("Unsupported instruction???");
4275 break;
4276 }
4277 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004278 // PHI instruction is deferred because it needs label's ID.
4279 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004280 break;
4281 }
4282 case Instruction::Alloca: {
4283 //
4284 // Generate OpVariable.
4285 //
4286 // Ops[0] : Result Type ID
4287 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004288 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004289
SJW01901d92020-05-21 08:58:31 -05004290 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04004291
SJWf93f5f32020-05-05 07:27:56 -05004292 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004293 break;
4294 }
4295 case Instruction::Load: {
4296 LoadInst *LD = cast<LoadInst>(&I);
4297 //
4298 // Generate OpLoad.
4299 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004300
alan-baker5b86ed72019-02-15 08:26:50 -05004301 if (LD->getType()->isPointerTy()) {
4302 // Loading a pointer requires variable pointers.
4303 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4304 }
David Neto22f144c2017-06-12 14:26:21 -04004305
SJW01901d92020-05-21 08:58:31 -05004306 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004307 // This is a hack to work around what looks like a driver bug.
4308 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004309 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4310 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004311 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004312 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004313 // Generate a bitwise-and of the original value with itself.
4314 // We should have been able to get away with just an OpCopyObject,
4315 // but we need something more complex to get past certain driver bugs.
4316 // This is ridiculous, but necessary.
4317 // TODO(dneto): Revisit this once drivers fix their bugs.
4318
SJWf93f5f32020-05-05 07:27:56 -05004319 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004320 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04004321
SJWf93f5f32020-05-05 07:27:56 -05004322 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004323 break;
4324 }
4325
4326 // This is the normal path. Generate a load.
4327
David Neto22f144c2017-06-12 14:26:21 -04004328 // Ops[0] = Result Type ID
4329 // Ops[1] = Pointer ID
4330 // Ops[2] ... Ops[n] = Optional Memory Access
4331 //
4332 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004333
SJWf93f5f32020-05-05 07:27:56 -05004334 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004335 Ops << LD->getType() << LD->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004336
SJWf93f5f32020-05-05 07:27:56 -05004337 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004338 break;
4339 }
4340 case Instruction::Store: {
4341 StoreInst *ST = cast<StoreInst>(&I);
4342 //
4343 // Generate OpStore.
4344 //
4345
alan-baker5b86ed72019-02-15 08:26:50 -05004346 if (ST->getValueOperand()->getType()->isPointerTy()) {
4347 // Storing a pointer requires variable pointers.
4348 setVariablePointersCapabilities(
4349 ST->getValueOperand()->getType()->getPointerAddressSpace());
4350 }
4351
David Neto22f144c2017-06-12 14:26:21 -04004352 // Ops[0] = Pointer ID
4353 // Ops[1] = Object ID
4354 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4355 //
4356 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05004357 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004358 Ops << ST->getPointerOperand() << ST->getValueOperand();
David Neto22f144c2017-06-12 14:26:21 -04004359
SJWf93f5f32020-05-05 07:27:56 -05004360 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004361 break;
4362 }
4363 case Instruction::AtomicCmpXchg: {
4364 I.print(errs());
4365 llvm_unreachable("Unsupported instruction???");
4366 break;
4367 }
4368 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004369 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4370
4371 spv::Op opcode;
4372
4373 switch (AtomicRMW->getOperation()) {
4374 default:
4375 I.print(errs());
4376 llvm_unreachable("Unsupported instruction???");
4377 case llvm::AtomicRMWInst::Add:
4378 opcode = spv::OpAtomicIAdd;
4379 break;
4380 case llvm::AtomicRMWInst::Sub:
4381 opcode = spv::OpAtomicISub;
4382 break;
4383 case llvm::AtomicRMWInst::Xchg:
4384 opcode = spv::OpAtomicExchange;
4385 break;
4386 case llvm::AtomicRMWInst::Min:
4387 opcode = spv::OpAtomicSMin;
4388 break;
4389 case llvm::AtomicRMWInst::Max:
4390 opcode = spv::OpAtomicSMax;
4391 break;
4392 case llvm::AtomicRMWInst::UMin:
4393 opcode = spv::OpAtomicUMin;
4394 break;
4395 case llvm::AtomicRMWInst::UMax:
4396 opcode = spv::OpAtomicUMax;
4397 break;
4398 case llvm::AtomicRMWInst::And:
4399 opcode = spv::OpAtomicAnd;
4400 break;
4401 case llvm::AtomicRMWInst::Or:
4402 opcode = spv::OpAtomicOr;
4403 break;
4404 case llvm::AtomicRMWInst::Xor:
4405 opcode = spv::OpAtomicXor;
4406 break;
4407 }
4408
4409 //
4410 // Generate OpAtomic*.
4411 //
SJWf93f5f32020-05-05 07:27:56 -05004412 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004413
SJW01901d92020-05-21 08:58:31 -05004414 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01004415
SJW806a5d82020-07-15 12:51:38 -05004416 const auto ConstantScopeDevice = getSPIRVInt32Constant(spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05004417 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01004418
SJW806a5d82020-07-15 12:51:38 -05004419 const auto ConstantMemorySemantics =
4420 getSPIRVInt32Constant(spv::MemorySemanticsUniformMemoryMask |
4421 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05004422 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01004423
SJWf93f5f32020-05-05 07:27:56 -05004424 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004425 break;
4426 }
4427 case Instruction::Fence: {
4428 I.print(errs());
4429 llvm_unreachable("Unsupported instruction???");
4430 break;
4431 }
4432 case Instruction::Call: {
4433 CallInst *Call = dyn_cast<CallInst>(&I);
SJW806a5d82020-07-15 12:51:38 -05004434 RID = GenerateInstructionFromCall(Call);
David Neto22f144c2017-06-12 14:26:21 -04004435 break;
4436 }
4437 case Instruction::Ret: {
4438 unsigned NumOps = I.getNumOperands();
4439 if (NumOps == 0) {
4440 //
4441 // Generate OpReturn.
4442 //
SJWf93f5f32020-05-05 07:27:56 -05004443 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004444 } else {
4445 //
4446 // Generate OpReturnValue.
4447 //
4448
4449 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004450 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004451
SJW01901d92020-05-21 08:58:31 -05004452 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004453
SJWf93f5f32020-05-05 07:27:56 -05004454 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004455 break;
4456 }
4457 break;
4458 }
4459 }
SJWf93f5f32020-05-05 07:27:56 -05004460
4461 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004462 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004463 VMap[&I] = RID;
4464 }
David Neto22f144c2017-06-12 14:26:21 -04004465}
4466
4467void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004468 //
4469 // Generate OpFunctionEnd
4470 //
SJWf93f5f32020-05-05 07:27:56 -05004471 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004472}
4473
4474bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004475 // Don't specialize <4 x i8> if i8 is generally supported.
4476 if (clspv::Option::Int8Support())
4477 return false;
4478
David Neto22f144c2017-06-12 14:26:21 -04004479 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004480 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4481 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
alan-baker5a8c3be2020-09-09 13:44:26 -04004482 VecTy->getElementCount().getKnownMinValue() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004483 return true;
4484 }
4485 }
4486
4487 return false;
4488}
4489
4490void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004491 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4492
SJW88ed5fe2020-05-11 12:40:57 -05004493 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4494 Value *Inst = DeferredInsts[i].first;
4495 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4496 SPIRVOperandVec Operands;
4497
4498 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4499 ++i;
4500 assert(DeferredInsts.size() > i);
4501 assert(Inst == DeferredInsts[i].first);
4502 Placeholder = DeferredInsts[i].second;
4503 };
David Neto22f144c2017-06-12 14:26:21 -04004504
4505 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004506 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004507 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004508 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004509 //
4510 // Generate OpLoopMerge.
4511 //
4512 // Ops[0] = Merge Block ID
4513 // Ops[1] = Continue Target ID
4514 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004515 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004516
SJW01901d92020-05-21 08:58:31 -05004517 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4518 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004519
SJW88ed5fe2020-05-11 12:40:57 -05004520 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4521
4522 nextDeferred();
4523
alan-baker06cad652019-12-03 17:56:47 -05004524 } else if (MergeBlocks.count(BrBB)) {
4525 //
4526 // Generate OpSelectionMerge.
4527 //
4528 // Ops[0] = Merge Block ID
4529 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004530 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004531
alan-baker06cad652019-12-03 17:56:47 -05004532 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004533 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004534
SJW88ed5fe2020-05-11 12:40:57 -05004535 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4536
4537 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004538 }
4539
4540 if (Br->isConditional()) {
4541 //
4542 // Generate OpBranchConditional.
4543 //
4544 // Ops[0] = Condition ID
4545 // Ops[1] = True Label ID
4546 // Ops[2] = False Label ID
4547 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004548 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004549
SJW01901d92020-05-21 08:58:31 -05004550 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004551
SJW88ed5fe2020-05-11 12:40:57 -05004552 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4553
David Neto22f144c2017-06-12 14:26:21 -04004554 } else {
4555 //
4556 // Generate OpBranch.
4557 //
4558 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004559 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004560
SJW01901d92020-05-21 08:58:31 -05004561 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004562
SJW88ed5fe2020-05-11 12:40:57 -05004563 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004564 }
4565 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004566 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4567 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004568 // OpPhi on pointers requires variable pointers.
4569 setVariablePointersCapabilities(
4570 PHI->getType()->getPointerAddressSpace());
4571 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004572 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004573 }
4574 }
4575
David Neto22f144c2017-06-12 14:26:21 -04004576 //
4577 // Generate OpPhi.
4578 //
4579 // Ops[0] = Result Type ID
4580 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004581 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004582
SJW01901d92020-05-21 08:58:31 -05004583 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004584
SJW88ed5fe2020-05-11 12:40:57 -05004585 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004586 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004587 }
4588
SJW88ed5fe2020-05-11 12:40:57 -05004589 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4590
David Neto22f144c2017-06-12 14:26:21 -04004591 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4592 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004593 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004594
SJW61531372020-06-09 07:31:08 -05004595 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004596 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004597 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004598
4599 // The result type.
SJW01901d92020-05-21 08:58:31 -05004600 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004601
4602 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004603 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004604 }
4605
SJW88ed5fe2020-05-11 12:40:57 -05004606 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004607
David Neto22f144c2017-06-12 14:26:21 -04004608 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004609 if (Call->getType()->isPointerTy()) {
4610 // Functions returning pointers require variable pointers.
4611 setVariablePointersCapabilities(
4612 Call->getType()->getPointerAddressSpace());
4613 }
4614
David Neto22f144c2017-06-12 14:26:21 -04004615 //
4616 // Generate OpFunctionCall.
4617 //
4618
4619 // Ops[0] = Result Type ID
4620 // Ops[1] = Callee Function ID
4621 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004622 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004623
SJW01901d92020-05-21 08:58:31 -05004624 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004625
SJW01901d92020-05-21 08:58:31 -05004626 SPIRVID CalleeID = getSPIRVValue(Callee);
SJW806a5d82020-07-15 12:51:38 -05004627 if (!CalleeID.isValid()) {
David Neto43568eb2017-10-13 18:25:25 -04004628 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004629 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004630 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4631 // causes an infinite loop. Instead, go ahead and generate
4632 // the bad function call. A validator will catch the 0-Id.
4633 // llvm_unreachable("Can't translate function call");
4634 }
David Neto22f144c2017-06-12 14:26:21 -04004635
SJW01901d92020-05-21 08:58:31 -05004636 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004637
David Neto22f144c2017-06-12 14:26:21 -04004638 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004639 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4640 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004641 auto *operand_type = operand->getType();
4642 // Images and samplers can be passed as function parameters without
4643 // variable pointers.
4644 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4645 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004646 auto sc =
4647 GetStorageClass(operand->getType()->getPointerAddressSpace());
4648 if (sc == spv::StorageClassStorageBuffer) {
4649 // Passing SSBO by reference requires variable pointers storage
4650 // buffer.
SJW01901d92020-05-21 08:58:31 -05004651 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004652 } else if (sc == spv::StorageClassWorkgroup) {
4653 // Workgroup references require variable pointers if they are not
4654 // memory object declarations.
4655 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4656 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004657 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4658 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004659 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004660 } else {
4661 // Arguments are function parameters.
4662 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004663 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004664 }
4665 }
4666 }
SJW01901d92020-05-21 08:58:31 -05004667 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004668 }
4669
SJW88ed5fe2020-05-11 12:40:57 -05004670 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004671 }
4672 }
4673 }
4674}
4675
SJW77b87ad2020-04-21 14:37:52 -05004676void SPIRVProducerPass::HandleDeferredDecorations() {
4677 const auto &DL = module->getDataLayout();
alan-baker5a8c3be2020-09-09 13:44:26 -04004678 if (getTypesNeedingArrayStride().empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004679 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004680 }
David Neto1a1a0582017-07-07 12:01:44 -04004681
David Netoc6f3ab22018-04-06 18:02:31 -04004682 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4683 // instructions we generated earlier.
David Neto85082642018-03-24 06:55:20 -07004684 for (auto *type : getTypesNeedingArrayStride()) {
4685 Type *elemTy = nullptr;
4686 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4687 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004688 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004689 elemTy = arrayTy->getElementType();
4690 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4691 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004692 } else {
4693 errs() << "Unhandled strided type " << *type << "\n";
4694 llvm_unreachable("Unhandled strided type");
4695 }
David Neto1a1a0582017-07-07 12:01:44 -04004696
4697 // Ops[0] = Target ID
4698 // Ops[1] = Decoration (ArrayStride)
4699 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004700 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004701
David Neto85082642018-03-24 06:55:20 -07004702 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004703 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004704
SJW01901d92020-05-21 08:58:31 -05004705 Ops << type << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004706
SJWf93f5f32020-05-05 07:27:56 -05004707 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004708 }
David Neto1a1a0582017-07-07 12:01:44 -04004709}
4710
SJW61531372020-06-09 07:31:08 -05004711glsl::ExtInst
4712SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW61531372020-06-09 07:31:08 -05004713 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004714 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05004715 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004716 if (param_type.type_id == Type::FloatTyID) {
4717 return glsl::ExtInst::ExtInstFClamp;
4718 }
4719 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4720 : glsl::ExtInst::ExtInstUClamp;
4721 }
4722 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05004723 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004724 if (param_type.type_id == Type::FloatTyID) {
4725 return glsl::ExtInst::ExtInstFMax;
4726 }
4727 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4728 : glsl::ExtInst::ExtInstUMax;
4729 }
4730 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05004731 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004732 if (param_type.type_id == Type::FloatTyID) {
4733 return glsl::ExtInst::ExtInstFMin;
4734 }
4735 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4736 : glsl::ExtInst::ExtInstUMin;
4737 }
4738 case Builtins::kAbs:
4739 return glsl::ExtInst::ExtInstSAbs;
4740 case Builtins::kFmax:
Marco Antognini55d51862020-07-21 17:50:07 +01004741 return glsl::ExtInst::ExtInstNMax;
SJW2c317da2020-03-23 07:39:13 -05004742 case Builtins::kFmin:
Marco Antognini55d51862020-07-21 17:50:07 +01004743 return glsl::ExtInst::ExtInstNMin;
SJW2c317da2020-03-23 07:39:13 -05004744 case Builtins::kDegrees:
4745 return glsl::ExtInst::ExtInstDegrees;
4746 case Builtins::kRadians:
4747 return glsl::ExtInst::ExtInstRadians;
4748 case Builtins::kMix:
4749 return glsl::ExtInst::ExtInstFMix;
4750 case Builtins::kAcos:
4751 case Builtins::kAcospi:
4752 return glsl::ExtInst::ExtInstAcos;
4753 case Builtins::kAcosh:
4754 return glsl::ExtInst::ExtInstAcosh;
4755 case Builtins::kAsin:
4756 case Builtins::kAsinpi:
4757 return glsl::ExtInst::ExtInstAsin;
4758 case Builtins::kAsinh:
4759 return glsl::ExtInst::ExtInstAsinh;
4760 case Builtins::kAtan:
4761 case Builtins::kAtanpi:
4762 return glsl::ExtInst::ExtInstAtan;
4763 case Builtins::kAtanh:
4764 return glsl::ExtInst::ExtInstAtanh;
4765 case Builtins::kAtan2:
4766 case Builtins::kAtan2pi:
4767 return glsl::ExtInst::ExtInstAtan2;
4768 case Builtins::kCeil:
4769 return glsl::ExtInst::ExtInstCeil;
4770 case Builtins::kSin:
4771 case Builtins::kHalfSin:
4772 case Builtins::kNativeSin:
4773 return glsl::ExtInst::ExtInstSin;
4774 case Builtins::kSinh:
4775 return glsl::ExtInst::ExtInstSinh;
4776 case Builtins::kCos:
4777 case Builtins::kHalfCos:
4778 case Builtins::kNativeCos:
4779 return glsl::ExtInst::ExtInstCos;
4780 case Builtins::kCosh:
4781 return glsl::ExtInst::ExtInstCosh;
4782 case Builtins::kTan:
4783 case Builtins::kHalfTan:
4784 case Builtins::kNativeTan:
4785 return glsl::ExtInst::ExtInstTan;
4786 case Builtins::kTanh:
4787 return glsl::ExtInst::ExtInstTanh;
4788 case Builtins::kExp:
4789 case Builtins::kHalfExp:
4790 case Builtins::kNativeExp:
4791 return glsl::ExtInst::ExtInstExp;
4792 case Builtins::kExp2:
4793 case Builtins::kHalfExp2:
4794 case Builtins::kNativeExp2:
4795 return glsl::ExtInst::ExtInstExp2;
4796 case Builtins::kLog:
4797 case Builtins::kHalfLog:
4798 case Builtins::kNativeLog:
4799 return glsl::ExtInst::ExtInstLog;
4800 case Builtins::kLog2:
4801 case Builtins::kHalfLog2:
4802 case Builtins::kNativeLog2:
4803 return glsl::ExtInst::ExtInstLog2;
4804 case Builtins::kFabs:
4805 return glsl::ExtInst::ExtInstFAbs;
4806 case Builtins::kFma:
4807 return glsl::ExtInst::ExtInstFma;
4808 case Builtins::kFloor:
4809 return glsl::ExtInst::ExtInstFloor;
4810 case Builtins::kLdexp:
4811 return glsl::ExtInst::ExtInstLdexp;
4812 case Builtins::kPow:
4813 case Builtins::kPowr:
4814 case Builtins::kHalfPowr:
4815 case Builtins::kNativePowr:
4816 return glsl::ExtInst::ExtInstPow;
James Price38553362020-09-03 18:30:40 -04004817 case Builtins::kRint:
4818 return glsl::ExtInst::ExtInstRoundEven;
SJW2c317da2020-03-23 07:39:13 -05004819 case Builtins::kRound:
4820 return glsl::ExtInst::ExtInstRound;
4821 case Builtins::kSqrt:
4822 case Builtins::kHalfSqrt:
4823 case Builtins::kNativeSqrt:
4824 return glsl::ExtInst::ExtInstSqrt;
4825 case Builtins::kRsqrt:
4826 case Builtins::kHalfRsqrt:
4827 case Builtins::kNativeRsqrt:
4828 return glsl::ExtInst::ExtInstInverseSqrt;
4829 case Builtins::kTrunc:
4830 return glsl::ExtInst::ExtInstTrunc;
4831 case Builtins::kFrexp:
4832 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05004833 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05004834 case Builtins::kFract:
4835 return glsl::ExtInst::ExtInstFract;
4836 case Builtins::kSign:
4837 return glsl::ExtInst::ExtInstFSign;
4838 case Builtins::kLength:
4839 case Builtins::kFastLength:
4840 return glsl::ExtInst::ExtInstLength;
4841 case Builtins::kDistance:
4842 case Builtins::kFastDistance:
4843 return glsl::ExtInst::ExtInstDistance;
4844 case Builtins::kStep:
4845 return glsl::ExtInst::ExtInstStep;
4846 case Builtins::kSmoothstep:
4847 return glsl::ExtInst::ExtInstSmoothStep;
4848 case Builtins::kCross:
4849 return glsl::ExtInst::ExtInstCross;
4850 case Builtins::kNormalize:
4851 case Builtins::kFastNormalize:
4852 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05004853 case Builtins::kSpirvPack:
4854 return glsl::ExtInst::ExtInstPackHalf2x16;
4855 case Builtins::kSpirvUnpack:
4856 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05004857 default:
4858 break;
4859 }
4860
SJW61531372020-06-09 07:31:08 -05004861 if (func_info.getName().find("llvm.fmuladd.") == 0) {
4862 return glsl::ExtInst::ExtInstFma;
4863 }
4864 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04004865}
4866
SJW61531372020-06-09 07:31:08 -05004867glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
4868 const Builtins::FunctionInfo &func_info) {
4869 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004870 case Builtins::kClz:
4871 return glsl::ExtInst::ExtInstFindUMsb;
4872 case Builtins::kAcospi:
4873 return glsl::ExtInst::ExtInstAcos;
4874 case Builtins::kAsinpi:
4875 return glsl::ExtInst::ExtInstAsin;
4876 case Builtins::kAtanpi:
4877 return glsl::ExtInst::ExtInstAtan;
4878 case Builtins::kAtan2pi:
4879 return glsl::ExtInst::ExtInstAtan2;
4880 default:
4881 break;
4882 }
4883 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04004884}
4885
SJW61531372020-06-09 07:31:08 -05004886glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
4887 const Builtins::FunctionInfo &func_info) {
4888 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04004889 if (direct != kGlslExtInstBad)
4890 return direct;
SJW61531372020-06-09 07:31:08 -05004891 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04004892}
4893
David Neto22f144c2017-06-12 14:26:21 -04004894void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04004895 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04004896}
4897
SJW88ed5fe2020-05-11 12:40:57 -05004898void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05004899 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04004900}
4901
SJW88ed5fe2020-05-11 12:40:57 -05004902void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04004903 // High 16 bit : Word Count
4904 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05004905 uint32_t Word = Inst.getOpcode();
4906 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04004907 if (count > 65535) {
4908 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
4909 llvm_unreachable("Word count too high");
4910 }
SJW88ed5fe2020-05-11 12:40:57 -05004911 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04004912 WriteOneWord(Word);
4913}
4914
SJW88ed5fe2020-05-11 12:40:57 -05004915void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
4916 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04004917 switch (OpTy) {
4918 default: {
4919 llvm_unreachable("Unsupported SPIRV Operand Type???");
4920 break;
4921 }
4922 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05004923 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04004924 break;
4925 }
4926 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05004927 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04004928 const char *Data = Str.c_str();
4929 size_t WordSize = Str.size() / 4;
4930 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
4931 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
4932 }
4933
4934 uint32_t Remainder = Str.size() % 4;
4935 uint32_t LastWord = 0;
4936 if (Remainder) {
4937 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
4938 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
4939 }
4940 }
4941
4942 WriteOneWord(LastWord);
4943 break;
4944 }
SJW88ed5fe2020-05-11 12:40:57 -05004945 case SPIRVOperandType::LITERAL_WORD: {
4946 WriteOneWord(Op.getLiteralNum()[0]);
4947 break;
4948 }
4949 case SPIRVOperandType::LITERAL_DWORD: {
4950 WriteOneWord(Op.getLiteralNum()[0]);
4951 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04004952 break;
4953 }
4954 }
4955}
4956
4957void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05004958 for (int i = 0; i < kSectionCount; ++i) {
4959 WriteSPIRVBinary(SPIRVSections[i]);
4960 }
4961}
4962
4963void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
SJW88ed5fe2020-05-11 12:40:57 -05004964 for (const auto &Inst : SPIRVInstList) {
4965 const auto &Ops = Inst.getOperands();
4966 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04004967
4968 switch (Opcode) {
4969 default: {
David Neto5c22a252018-03-15 16:07:41 -04004970 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04004971 llvm_unreachable("Unsupported SPIRV instruction");
4972 break;
4973 }
Marco Antognini68e5c512020-09-09 16:08:57 +01004974 case spv::OpUnreachable:
David Neto22f144c2017-06-12 14:26:21 -04004975 case spv::OpCapability:
4976 case spv::OpExtension:
4977 case spv::OpMemoryModel:
4978 case spv::OpEntryPoint:
4979 case spv::OpExecutionMode:
4980 case spv::OpSource:
4981 case spv::OpDecorate:
4982 case spv::OpMemberDecorate:
4983 case spv::OpBranch:
4984 case spv::OpBranchConditional:
4985 case spv::OpSelectionMerge:
4986 case spv::OpLoopMerge:
4987 case spv::OpStore:
4988 case spv::OpImageWrite:
4989 case spv::OpReturnValue:
4990 case spv::OpControlBarrier:
4991 case spv::OpMemoryBarrier:
4992 case spv::OpReturn:
4993 case spv::OpFunctionEnd:
4994 case spv::OpCopyMemory: {
4995 WriteWordCountAndOpcode(Inst);
4996 for (uint32_t i = 0; i < Ops.size(); i++) {
4997 WriteOperand(Ops[i]);
4998 }
4999 break;
5000 }
5001 case spv::OpTypeBool:
5002 case spv::OpTypeVoid:
5003 case spv::OpTypeSampler:
5004 case spv::OpLabel:
5005 case spv::OpExtInstImport:
5006 case spv::OpTypePointer:
5007 case spv::OpTypeRuntimeArray:
5008 case spv::OpTypeStruct:
5009 case spv::OpTypeImage:
5010 case spv::OpTypeSampledImage:
5011 case spv::OpTypeInt:
5012 case spv::OpTypeFloat:
5013 case spv::OpTypeArray:
5014 case spv::OpTypeVector:
alan-baker86ce19c2020-08-05 13:09:19 -04005015 case spv::OpTypeFunction:
5016 case spv::OpString: {
David Neto22f144c2017-06-12 14:26:21 -04005017 WriteWordCountAndOpcode(Inst);
5018 WriteResultID(Inst);
5019 for (uint32_t i = 0; i < Ops.size(); i++) {
5020 WriteOperand(Ops[i]);
5021 }
5022 break;
5023 }
5024 case spv::OpFunction:
5025 case spv::OpFunctionParameter:
5026 case spv::OpAccessChain:
5027 case spv::OpPtrAccessChain:
5028 case spv::OpInBoundsAccessChain:
5029 case spv::OpUConvert:
5030 case spv::OpSConvert:
5031 case spv::OpConvertFToU:
5032 case spv::OpConvertFToS:
5033 case spv::OpConvertUToF:
5034 case spv::OpConvertSToF:
5035 case spv::OpFConvert:
5036 case spv::OpConvertPtrToU:
5037 case spv::OpConvertUToPtr:
5038 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005039 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005040 case spv::OpIAdd:
5041 case spv::OpFAdd:
5042 case spv::OpISub:
5043 case spv::OpFSub:
5044 case spv::OpIMul:
5045 case spv::OpFMul:
5046 case spv::OpUDiv:
5047 case spv::OpSDiv:
5048 case spv::OpFDiv:
5049 case spv::OpUMod:
5050 case spv::OpSRem:
5051 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005052 case spv::OpUMulExtended:
5053 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005054 case spv::OpBitwiseOr:
5055 case spv::OpBitwiseXor:
5056 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005057 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005058 case spv::OpShiftLeftLogical:
5059 case spv::OpShiftRightLogical:
5060 case spv::OpShiftRightArithmetic:
5061 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005062 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005063 case spv::OpCompositeExtract:
5064 case spv::OpVectorExtractDynamic:
5065 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005066 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005067 case spv::OpVectorInsertDynamic:
5068 case spv::OpVectorShuffle:
5069 case spv::OpIEqual:
5070 case spv::OpINotEqual:
5071 case spv::OpUGreaterThan:
5072 case spv::OpUGreaterThanEqual:
5073 case spv::OpULessThan:
5074 case spv::OpULessThanEqual:
5075 case spv::OpSGreaterThan:
5076 case spv::OpSGreaterThanEqual:
5077 case spv::OpSLessThan:
5078 case spv::OpSLessThanEqual:
5079 case spv::OpFOrdEqual:
5080 case spv::OpFOrdGreaterThan:
5081 case spv::OpFOrdGreaterThanEqual:
5082 case spv::OpFOrdLessThan:
5083 case spv::OpFOrdLessThanEqual:
5084 case spv::OpFOrdNotEqual:
5085 case spv::OpFUnordEqual:
5086 case spv::OpFUnordGreaterThan:
5087 case spv::OpFUnordGreaterThanEqual:
5088 case spv::OpFUnordLessThan:
5089 case spv::OpFUnordLessThanEqual:
5090 case spv::OpFUnordNotEqual:
5091 case spv::OpExtInst:
5092 case spv::OpIsInf:
5093 case spv::OpIsNan:
5094 case spv::OpAny:
5095 case spv::OpAll:
5096 case spv::OpUndef:
5097 case spv::OpConstantNull:
5098 case spv::OpLogicalOr:
5099 case spv::OpLogicalAnd:
5100 case spv::OpLogicalNot:
5101 case spv::OpLogicalNotEqual:
5102 case spv::OpConstantComposite:
5103 case spv::OpSpecConstantComposite:
5104 case spv::OpConstantTrue:
5105 case spv::OpConstantFalse:
5106 case spv::OpConstant:
5107 case spv::OpSpecConstant:
5108 case spv::OpVariable:
5109 case spv::OpFunctionCall:
5110 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005111 case spv::OpImageFetch:
alan-bakerf6bc8252020-09-23 14:58:55 -04005112 case spv::OpImageRead:
David Neto22f144c2017-06-12 14:26:21 -04005113 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005114 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005115 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005116 case spv::OpSelect:
5117 case spv::OpPhi:
5118 case spv::OpLoad:
5119 case spv::OpAtomicIAdd:
5120 case spv::OpAtomicISub:
5121 case spv::OpAtomicExchange:
5122 case spv::OpAtomicIIncrement:
5123 case spv::OpAtomicIDecrement:
5124 case spv::OpAtomicCompareExchange:
5125 case spv::OpAtomicUMin:
5126 case spv::OpAtomicSMin:
5127 case spv::OpAtomicUMax:
5128 case spv::OpAtomicSMax:
5129 case spv::OpAtomicAnd:
5130 case spv::OpAtomicOr:
5131 case spv::OpAtomicXor:
SJW806a5d82020-07-15 12:51:38 -05005132 case spv::OpDot:
5133 case spv::OpGroupNonUniformAll:
5134 case spv::OpGroupNonUniformAny:
5135 case spv::OpGroupNonUniformBroadcast:
5136 case spv::OpGroupNonUniformIAdd:
5137 case spv::OpGroupNonUniformFAdd:
5138 case spv::OpGroupNonUniformSMin:
5139 case spv::OpGroupNonUniformUMin:
5140 case spv::OpGroupNonUniformFMin:
5141 case spv::OpGroupNonUniformSMax:
5142 case spv::OpGroupNonUniformUMax:
5143 case spv::OpGroupNonUniformFMax: {
David Neto22f144c2017-06-12 14:26:21 -04005144 WriteWordCountAndOpcode(Inst);
5145 WriteOperand(Ops[0]);
5146 WriteResultID(Inst);
5147 for (uint32_t i = 1; i < Ops.size(); i++) {
5148 WriteOperand(Ops[i]);
5149 }
5150 break;
5151 }
5152 }
5153 }
5154}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005155
alan-bakerb6b09dc2018-11-08 16:59:28 -05005156bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005157 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005158 case Type::HalfTyID:
5159 case Type::FloatTyID:
5160 case Type::DoubleTyID:
5161 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005162 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005163 return true;
5164 case Type::PointerTyID: {
5165 const PointerType *pointer_type = cast<PointerType>(type);
5166 if (pointer_type->getPointerAddressSpace() !=
5167 AddressSpace::UniformConstant) {
5168 auto pointee_type = pointer_type->getPointerElementType();
5169 if (pointee_type->isStructTy() &&
5170 cast<StructType>(pointee_type)->isOpaque()) {
5171 // Images and samplers are not nullable.
5172 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005173 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005174 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005175 return true;
5176 }
5177 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005178 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005179 case Type::StructTyID: {
5180 const StructType *struct_type = cast<StructType>(type);
5181 // Images and samplers are not nullable.
5182 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005183 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005184 for (const auto element : struct_type->elements()) {
5185 if (!IsTypeNullable(element))
5186 return false;
5187 }
5188 return true;
5189 }
5190 default:
5191 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005192 }
5193}
Alan Bakerfcda9482018-10-02 17:09:59 -04005194
SJW77b87ad2020-04-21 14:37:52 -05005195void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005196 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005197 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005198 // Metdata is stored as key-value pair operands. The first element of each
5199 // operand is the type and the second is a vector of offsets.
5200 for (const auto *operand : offsets_md->operands()) {
5201 const auto *pair = cast<MDTuple>(operand);
5202 auto *type =
5203 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5204 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5205 std::vector<uint32_t> offsets;
5206 for (const Metadata *offset_md : offset_vector->operands()) {
5207 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005208 offsets.push_back(static_cast<uint32_t>(
5209 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005210 }
5211 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5212 }
5213 }
5214
5215 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005216 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005217 // Metadata is stored as key-value pair operands. The first element of each
5218 // operand is the type and the second is a triple of sizes: type size in
5219 // bits, store size and alloc size.
5220 for (const auto *operand : sizes_md->operands()) {
5221 const auto *pair = cast<MDTuple>(operand);
5222 auto *type =
5223 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5224 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5225 uint64_t type_size_in_bits =
5226 cast<ConstantInt>(
5227 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5228 ->getZExtValue();
5229 uint64_t type_store_size =
5230 cast<ConstantInt>(
5231 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5232 ->getZExtValue();
5233 uint64_t type_alloc_size =
5234 cast<ConstantInt>(
5235 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5236 ->getZExtValue();
5237 RemappedUBOTypeSizes.insert(std::make_pair(
5238 type, std::make_tuple(type_size_in_bits, type_store_size,
5239 type_alloc_size)));
5240 }
5241 }
5242}
5243
5244uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5245 const DataLayout &DL) {
5246 auto iter = RemappedUBOTypeSizes.find(type);
5247 if (iter != RemappedUBOTypeSizes.end()) {
5248 return std::get<0>(iter->second);
5249 }
5250
5251 return DL.getTypeSizeInBits(type);
5252}
5253
5254uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5255 auto iter = RemappedUBOTypeSizes.find(type);
5256 if (iter != RemappedUBOTypeSizes.end()) {
5257 return std::get<1>(iter->second);
5258 }
5259
5260 return DL.getTypeStoreSize(type);
5261}
5262
5263uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5264 auto iter = RemappedUBOTypeSizes.find(type);
5265 if (iter != RemappedUBOTypeSizes.end()) {
5266 return std::get<2>(iter->second);
5267 }
5268
5269 return DL.getTypeAllocSize(type);
5270}
alan-baker5b86ed72019-02-15 08:26:50 -05005271
Kévin Petitbbbda972020-03-03 19:16:31 +00005272uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5273 StructType *type, unsigned member, const DataLayout &DL) {
5274 const auto StructLayout = DL.getStructLayout(type);
5275 // Search for the correct offsets if this type was remapped.
5276 std::vector<uint32_t> *offsets = nullptr;
5277 auto iter = RemappedUBOTypeOffsets.find(type);
5278 if (iter != RemappedUBOTypeOffsets.end()) {
5279 offsets = &iter->second;
5280 }
5281 auto ByteOffset =
5282 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5283 if (offsets) {
5284 ByteOffset = (*offsets)[member];
5285 }
5286
5287 return ByteOffset;
5288}
5289
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005290void SPIRVProducerPass::setVariablePointersCapabilities(
5291 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005292 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005293 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005294 } else {
SJW01901d92020-05-21 08:58:31 -05005295 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005296 }
5297}
5298
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005299Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005300 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5301 return GetBasePointer(gep->getPointerOperand());
5302 }
5303
5304 // Conservatively return |v|.
5305 return v;
5306}
5307
5308bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5309 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5310 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
alan-baker7506abb2020-09-10 15:02:55 -04005311 const auto &lhs_func_info =
5312 Builtins::Lookup(lhs_call->getCalledFunction());
5313 const auto &rhs_func_info =
5314 Builtins::Lookup(rhs_call->getCalledFunction());
SJW61531372020-06-09 07:31:08 -05005315 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5316 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005317 // For resource accessors, match descriptor set and binding.
5318 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5319 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5320 return true;
SJW61531372020-06-09 07:31:08 -05005321 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5322 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005323 // For workgroup resources, match spec id.
5324 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5325 return true;
5326 }
5327 }
5328 }
5329
5330 return false;
5331}
5332
5333bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5334 assert(inst->getType()->isPointerTy());
5335 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5336 spv::StorageClassStorageBuffer);
5337 const bool hack_undef = clspv::Option::HackUndef();
5338 if (auto *select = dyn_cast<SelectInst>(inst)) {
5339 auto *true_base = GetBasePointer(select->getTrueValue());
5340 auto *false_base = GetBasePointer(select->getFalseValue());
5341
5342 if (true_base == false_base)
5343 return true;
5344
5345 // If either the true or false operand is a null, then we satisfy the same
5346 // object constraint.
5347 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5348 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5349 return true;
5350 }
5351
5352 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5353 if (false_cst->isNullValue() ||
5354 (hack_undef && isa<UndefValue>(false_base)))
5355 return true;
5356 }
5357
5358 if (sameResource(true_base, false_base))
5359 return true;
5360 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5361 Value *value = nullptr;
5362 bool ok = true;
5363 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5364 auto *base = GetBasePointer(phi->getIncomingValue(i));
5365 // Null values satisfy the constraint of selecting of selecting from the
5366 // same object.
5367 if (!value) {
5368 if (auto *cst = dyn_cast<Constant>(base)) {
5369 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5370 value = base;
5371 } else {
5372 value = base;
5373 }
5374 } else if (base != value) {
5375 if (auto *base_cst = dyn_cast<Constant>(base)) {
5376 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5377 continue;
5378 }
5379
5380 if (sameResource(value, base))
5381 continue;
5382
5383 // Values don't represent the same base.
5384 ok = false;
5385 }
5386 }
5387
5388 return ok;
5389 }
5390
5391 // Conservatively return false.
5392 return false;
5393}
alan-bakere9308012019-03-15 10:25:13 -04005394
5395bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5396 if (!Arg.getType()->isPointerTy() ||
5397 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5398 // Only SSBOs need to be annotated as coherent.
5399 return false;
5400 }
5401
5402 DenseSet<Value *> visited;
5403 std::vector<Value *> stack;
5404 for (auto *U : Arg.getParent()->users()) {
5405 if (auto *call = dyn_cast<CallInst>(U)) {
5406 stack.push_back(call->getOperand(Arg.getArgNo()));
5407 }
5408 }
5409
5410 while (!stack.empty()) {
5411 Value *v = stack.back();
5412 stack.pop_back();
5413
5414 if (!visited.insert(v).second)
5415 continue;
5416
5417 auto *resource_call = dyn_cast<CallInst>(v);
5418 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005419 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5420 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005421 // If this is a resource accessor function, check if the coherent operand
5422 // is set.
5423 const auto coherent =
5424 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5425 ->getZExtValue());
5426 if (coherent == 1)
5427 return true;
5428 } else if (auto *arg = dyn_cast<Argument>(v)) {
5429 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005430 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005431 if (auto *call = dyn_cast<CallInst>(U)) {
5432 stack.push_back(call->getOperand(arg->getArgNo()));
5433 }
5434 }
5435 } else if (auto *user = dyn_cast<User>(v)) {
5436 // If this is a user, traverse all operands that could lead to resource
5437 // variables.
5438 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5439 Value *operand = user->getOperand(i);
5440 if (operand->getType()->isPointerTy() &&
5441 operand->getType()->getPointerAddressSpace() ==
5442 clspv::AddressSpace::Global) {
5443 stack.push_back(operand);
5444 }
5445 }
5446 }
5447 }
5448
5449 // No coherent resource variables encountered.
5450 return false;
5451}
alan-baker06cad652019-12-03 17:56:47 -05005452
SJW77b87ad2020-04-21 14:37:52 -05005453void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005454 // First, track loop merges and continues.
5455 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005456 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005457 if (F.isDeclaration())
5458 continue;
5459
5460 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5461 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5462 std::deque<BasicBlock *> order;
5463 DenseSet<BasicBlock *> visited;
5464 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5465
5466 for (auto BB : order) {
5467 auto terminator = BB->getTerminator();
5468 auto branch = dyn_cast<BranchInst>(terminator);
5469 if (LI.isLoopHeader(BB)) {
5470 auto L = LI.getLoopFor(BB);
5471 BasicBlock *ContinueBB = nullptr;
5472 BasicBlock *MergeBB = nullptr;
5473
5474 MergeBB = L->getExitBlock();
5475 if (!MergeBB) {
5476 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5477 // has regions with single entry/exit. As a result, loop should not
5478 // have multiple exits.
5479 llvm_unreachable("Loop has multiple exits???");
5480 }
5481
5482 if (L->isLoopLatch(BB)) {
5483 ContinueBB = BB;
5484 } else {
5485 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5486 // block.
5487 BasicBlock *Header = L->getHeader();
5488 BasicBlock *Latch = L->getLoopLatch();
5489 for (auto *loop_block : L->blocks()) {
5490 if (loop_block == Header) {
5491 continue;
5492 }
5493
5494 // Check whether block dominates block with back-edge.
5495 // The loop latch is the single block with a back-edge. If it was
5496 // possible, StructurizeCFG made the loop conform to this
5497 // requirement, otherwise |Latch| is a nullptr.
5498 if (DT.dominates(loop_block, Latch)) {
5499 ContinueBB = loop_block;
5500 }
5501 }
5502
5503 if (!ContinueBB) {
5504 llvm_unreachable("Wrong continue block from loop");
5505 }
5506 }
5507
5508 // Record the continue and merge blocks.
5509 MergeBlocks[BB] = MergeBB;
5510 ContinueBlocks[BB] = ContinueBB;
5511 LoopMergesAndContinues.insert(MergeBB);
5512 LoopMergesAndContinues.insert(ContinueBB);
5513 } else if (branch && branch->isConditional()) {
5514 auto L = LI.getLoopFor(BB);
5515 bool HasBackedge = false;
5516 while (L && !HasBackedge) {
5517 if (L->isLoopLatch(BB)) {
5518 HasBackedge = true;
5519 }
5520 L = L->getParentLoop();
5521 }
5522
5523 if (!HasBackedge) {
5524 // Only need a merge if the branch doesn't include a loop break or
5525 // continue.
5526 auto true_bb = branch->getSuccessor(0);
5527 auto false_bb = branch->getSuccessor(1);
5528 if (!LoopMergesAndContinues.count(true_bb) &&
5529 !LoopMergesAndContinues.count(false_bb)) {
5530 // StructurizeCFG pass already manipulated CFG. Just use false block
5531 // of branch instruction as merge block.
5532 MergeBlocks[BB] = false_bb;
5533 }
5534 }
5535 }
5536 }
5537 }
5538}
alan-baker86ce19c2020-08-05 13:09:19 -04005539
5540SPIRVID SPIRVProducerPass::getReflectionImport() {
5541 if (!ReflectionID.isValid()) {
5542 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_non_semantic_info");
5543 ReflectionID = addSPIRVInst<kImports>(spv::OpExtInstImport,
5544 "NonSemantic.ClspvReflection.1");
5545 }
5546 return ReflectionID;
5547}
5548
5549void SPIRVProducerPass::GenerateReflection() {
5550 GenerateKernelReflection();
5551 GeneratePushConstantReflection();
5552 GenerateSpecConstantReflection();
5553}
5554
5555void SPIRVProducerPass::GeneratePushConstantReflection() {
5556 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
5557 auto const &DL = module->getDataLayout();
5558 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
5559 auto STy = cast<StructType>(GV->getValueType());
5560
5561 for (unsigned i = 0; i < STy->getNumElements(); i++) {
5562 auto pc = static_cast<clspv::PushConstant>(
5563 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
5564 if (pc == PushConstant::KernelArgument)
5565 continue;
5566
5567 auto memberType = STy->getElementType(i);
5568 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
5569 unsigned previousOffset = 0;
5570 if (i > 0) {
5571 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
5572 }
5573 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
5574 assert(isValidExplicitLayout(*module, STy, i,
5575 spv::StorageClassPushConstant, offset,
5576 previousOffset));
5577
5578 reflection::ExtInst pc_inst = reflection::ExtInstMax;
5579 switch (pc) {
5580 case PushConstant::GlobalOffset:
5581 pc_inst = reflection::ExtInstPushConstantGlobalOffset;
5582 break;
5583 case PushConstant::EnqueuedLocalSize:
5584 pc_inst = reflection::ExtInstPushConstantEnqueuedLocalSize;
5585 break;
5586 case PushConstant::GlobalSize:
5587 pc_inst = reflection::ExtInstPushConstantGlobalSize;
5588 break;
5589 case PushConstant::RegionOffset:
5590 pc_inst = reflection::ExtInstPushConstantRegionOffset;
5591 break;
5592 case PushConstant::NumWorkgroups:
5593 pc_inst = reflection::ExtInstPushConstantNumWorkgroups;
5594 break;
5595 case PushConstant::RegionGroupOffset:
5596 pc_inst = reflection::ExtInstPushConstantRegionGroupOffset;
5597 break;
5598 default:
5599 llvm_unreachable("Unhandled push constant");
5600 break;
5601 }
5602
5603 auto import_id = getReflectionImport();
5604 SPIRVOperandVec Ops;
5605 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
5606 << pc_inst << getSPIRVInt32Constant(offset)
5607 << getSPIRVInt32Constant(size);
5608 addSPIRVInst(spv::OpExtInst, Ops);
5609 }
5610 }
5611}
5612
5613void SPIRVProducerPass::GenerateSpecConstantReflection() {
5614 const uint32_t kMax = std::numeric_limits<uint32_t>::max();
5615 uint32_t wgsize_id[3] = {kMax, kMax, kMax};
5616 uint32_t global_offset_id[3] = {kMax, kMax, kMax};
5617 uint32_t work_dim_id = kMax;
5618 for (auto pair : clspv::GetSpecConstants(module)) {
5619 auto kind = pair.first;
5620 auto id = pair.second;
5621
5622 // Local memory size is only used for kernel arguments.
5623 if (kind == SpecConstant::kLocalMemorySize)
5624 continue;
5625
5626 switch (kind) {
5627 case SpecConstant::kWorkgroupSizeX:
5628 wgsize_id[0] = id;
5629 break;
5630 case SpecConstant::kWorkgroupSizeY:
5631 wgsize_id[1] = id;
5632 break;
5633 case SpecConstant::kWorkgroupSizeZ:
5634 wgsize_id[2] = id;
5635 break;
5636 case SpecConstant::kGlobalOffsetX:
5637 global_offset_id[0] = id;
5638 break;
5639 case SpecConstant::kGlobalOffsetY:
5640 global_offset_id[1] = id;
5641 break;
5642 case SpecConstant::kGlobalOffsetZ:
5643 global_offset_id[2] = id;
5644 break;
5645 case SpecConstant::kWorkDim:
5646 work_dim_id = id;
5647 break;
5648 default:
5649 llvm_unreachable("Unhandled spec constant");
5650 }
5651 }
5652
5653 auto import_id = getReflectionImport();
5654 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5655 SPIRVOperandVec Ops;
5656 if (wgsize_id[0] != kMax) {
5657 assert(wgsize_id[1] != kMax);
5658 assert(wgsize_id[2] != kMax);
5659 Ops.clear();
5660 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkgroupSize
5661 << getSPIRVInt32Constant(wgsize_id[0])
5662 << getSPIRVInt32Constant(wgsize_id[1])
5663 << getSPIRVInt32Constant(wgsize_id[2]);
5664 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5665 }
5666 if (global_offset_id[0] != kMax) {
5667 assert(global_offset_id[1] != kMax);
5668 assert(global_offset_id[2] != kMax);
5669 Ops.clear();
5670 Ops << void_id << import_id << reflection::ExtInstSpecConstantGlobalOffset
5671 << getSPIRVInt32Constant(global_offset_id[0])
5672 << getSPIRVInt32Constant(global_offset_id[1])
5673 << getSPIRVInt32Constant(global_offset_id[2]);
5674 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5675 }
5676 if (work_dim_id != kMax) {
5677 Ops.clear();
5678 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkDim
5679 << getSPIRVInt32Constant(work_dim_id);
5680 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5681 }
5682}
5683
5684void SPIRVProducerPass::GenerateKernelReflection() {
5685 const auto &DL = module->getDataLayout();
5686 auto import_id = getReflectionImport();
5687 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5688
5689 for (auto &F : *module) {
5690 if (F.isDeclaration() || F.getCallingConv() != CallingConv::SPIR_KERNEL) {
5691 continue;
5692 }
5693
5694 // OpString for the kernel name.
5695 auto kernel_name =
5696 addSPIRVInst<kDebug>(spv::OpString, F.getName().str().c_str());
5697
5698 // Kernel declaration
5699 // Ops[0] = void type
5700 // Ops[1] = reflection ext import
5701 // Ops[2] = function id
5702 // Ops[3] = kernel name
5703 SPIRVOperandVec Ops;
5704 Ops << void_id << import_id << reflection::ExtInstKernel << ValueMap[&F]
5705 << kernel_name;
5706 auto kernel_decl = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5707
5708 // Generate the required workgroup size property if it was specified.
5709 if (const MDNode *MD = F.getMetadata("reqd_work_group_size")) {
5710 uint32_t CurXDimCst = static_cast<uint32_t>(
5711 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
5712 uint32_t CurYDimCst = static_cast<uint32_t>(
5713 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
5714 uint32_t CurZDimCst = static_cast<uint32_t>(
5715 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
5716
5717 Ops.clear();
5718 Ops << void_id << import_id
5719 << reflection::ExtInstPropertyRequiredWorkgroupSize << kernel_decl
5720 << getSPIRVInt32Constant(CurXDimCst)
5721 << getSPIRVInt32Constant(CurYDimCst)
5722 << getSPIRVInt32Constant(CurZDimCst);
5723 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5724 }
5725
5726 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
5727 auto *func_ty = F.getFunctionType();
5728
5729 // If we've clustered POD arguments, then argument details are in metadata.
5730 // If an argument maps to a resource variable, then get descriptor set and
5731 // binding from the resource variable. Other info comes from the metadata.
5732 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
5733 auto local_spec_id_md =
5734 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
5735 if (arg_map) {
5736 for (const auto &arg : arg_map->operands()) {
5737 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
5738 assert(arg_node->getNumOperands() == 6);
5739 const auto name =
5740 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
5741 const auto old_index =
5742 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
5743 // Remapped argument index
5744 const int new_index = static_cast<int>(
5745 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
5746 const auto offset =
5747 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
5748 const auto size =
5749 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
5750 const auto argKind = clspv::GetArgKindFromName(
5751 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
5752
5753 // If this is a local memory argument, find the right spec id for this
5754 // argument.
5755 int64_t spec_id = -1;
5756 if (argKind == clspv::ArgKind::Local) {
5757 for (auto spec_id_arg : local_spec_id_md->operands()) {
5758 if ((&F == dyn_cast<Function>(
5759 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
5760 ->getValue())) &&
5761 (static_cast<uint64_t>(new_index) ==
5762 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
5763 ->getZExtValue())) {
5764 spec_id =
5765 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
5766 ->getSExtValue();
5767 break;
5768 }
5769 }
5770 }
5771
5772 // Generate the specific argument instruction.
5773 const uint32_t ordinal = static_cast<uint32_t>(old_index);
5774 const uint32_t arg_offset = static_cast<uint32_t>(offset);
5775 const uint32_t arg_size = static_cast<uint32_t>(size);
5776 uint32_t elem_size = 0;
5777 uint32_t descriptor_set = 0;
5778 uint32_t binding = 0;
5779 if (spec_id > 0) {
5780 elem_size = static_cast<uint32_t>(
5781 GetTypeAllocSize(func_ty->getParamType(unsigned(new_index))
5782 ->getPointerElementType(),
5783 DL));
5784 } else if (new_index >= 0) {
5785 auto *info = resource_var_at_index[new_index];
5786 assert(info);
5787 descriptor_set = info->descriptor_set;
5788 binding = info->binding;
5789 }
5790 AddArgumentReflection(kernel_decl, name.str(), argKind, ordinal,
5791 descriptor_set, binding, arg_offset, arg_size,
5792 static_cast<uint32_t>(spec_id), elem_size);
5793 }
5794 } else {
5795 // There is no argument map.
5796 // Take descriptor info from the resource variable calls.
5797 // Take argument name and size from the arguments list.
5798
5799 SmallVector<Argument *, 4> arguments;
5800 for (auto &arg : F.args()) {
5801 arguments.push_back(&arg);
5802 }
5803
5804 unsigned arg_index = 0;
5805 for (auto *info : resource_var_at_index) {
5806 if (info) {
5807 auto arg = arguments[arg_index];
5808 unsigned arg_size = 0;
5809 if (info->arg_kind == clspv::ArgKind::Pod ||
5810 info->arg_kind == clspv::ArgKind::PodUBO ||
5811 info->arg_kind == clspv::ArgKind::PodPushConstant) {
5812 arg_size =
5813 static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
5814 }
5815
5816 // Local pointer arguments are unused in this case.
5817 // offset, spec_id and elem_size always 0.
5818 AddArgumentReflection(kernel_decl, arg->getName().str(),
5819 info->arg_kind, arg_index, info->descriptor_set,
5820 info->binding, 0, arg_size, 0, 0);
5821 }
5822 arg_index++;
5823 }
5824 // Generate mappings for pointer-to-local arguments.
5825 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
5826 Argument *arg = arguments[arg_index];
5827 auto where = LocalArgSpecIds.find(arg);
5828 if (where != LocalArgSpecIds.end()) {
5829 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
5830
5831 // descriptor_set, binding, offset and size are always 0.
5832 AddArgumentReflection(kernel_decl, arg->getName().str(),
5833 ArgKind::Local, arg_index, 0, 0, 0, 0,
5834 static_cast<uint32_t>(local_arg_info.spec_id),
5835 static_cast<uint32_t>(GetTypeAllocSize(
5836 local_arg_info.elem_type, DL)));
5837 }
5838 }
5839 }
5840 }
5841}
5842
5843void SPIRVProducerPass::AddArgumentReflection(
5844 SPIRVID kernel_decl, const std::string &name, clspv::ArgKind arg_kind,
5845 uint32_t ordinal, uint32_t descriptor_set, uint32_t binding,
5846 uint32_t offset, uint32_t size, uint32_t spec_id, uint32_t elem_size) {
5847 // Generate ArgumentInfo for this argument.
5848 // TODO: generate remaining optional operands.
5849 auto import_id = getReflectionImport();
5850 auto arg_name = addSPIRVInst<kDebug>(spv::OpString, name.c_str());
5851 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5852 SPIRVOperandVec Ops;
5853 Ops << void_id << import_id << reflection::ExtInstArgumentInfo << arg_name;
5854 auto arg_info = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5855
5856 Ops.clear();
5857 Ops << void_id << import_id;
5858 reflection::ExtInst ext_inst = reflection::ExtInstMax;
5859 // Determine the extended instruction.
5860 switch (arg_kind) {
5861 case clspv::ArgKind::Buffer:
5862 ext_inst = reflection::ExtInstArgumentStorageBuffer;
5863 break;
5864 case clspv::ArgKind::BufferUBO:
5865 ext_inst = reflection::ExtInstArgumentUniform;
5866 break;
5867 case clspv::ArgKind::Local:
5868 ext_inst = reflection::ExtInstArgumentWorkgroup;
5869 break;
5870 case clspv::ArgKind::Pod:
5871 ext_inst = reflection::ExtInstArgumentPodStorageBuffer;
5872 break;
5873 case clspv::ArgKind::PodUBO:
5874 ext_inst = reflection::ExtInstArgumentPodUniform;
5875 break;
5876 case clspv::ArgKind::PodPushConstant:
5877 ext_inst = reflection::ExtInstArgumentPodPushConstant;
5878 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04005879 case clspv::ArgKind::SampledImage:
alan-baker86ce19c2020-08-05 13:09:19 -04005880 ext_inst = reflection::ExtInstArgumentSampledImage;
5881 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04005882 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04005883 ext_inst = reflection::ExtInstArgumentStorageImage;
5884 break;
5885 case clspv::ArgKind::Sampler:
5886 ext_inst = reflection::ExtInstArgumentSampler;
5887 break;
5888 default:
5889 llvm_unreachable("Unhandled argument reflection");
5890 break;
5891 }
5892 Ops << ext_inst << kernel_decl << getSPIRVInt32Constant(ordinal);
5893
5894 // Add descriptor set and binding for applicable arguments.
5895 switch (arg_kind) {
5896 case clspv::ArgKind::Buffer:
5897 case clspv::ArgKind::BufferUBO:
5898 case clspv::ArgKind::Pod:
5899 case clspv::ArgKind::PodUBO:
alan-bakerf6bc8252020-09-23 14:58:55 -04005900 case clspv::ArgKind::SampledImage:
5901 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04005902 case clspv::ArgKind::Sampler:
5903 Ops << getSPIRVInt32Constant(descriptor_set)
5904 << getSPIRVInt32Constant(binding);
5905 break;
5906 default:
5907 break;
5908 }
5909
5910 // Add remaining operands for arguments.
5911 switch (arg_kind) {
5912 case clspv::ArgKind::Local:
5913 Ops << getSPIRVInt32Constant(spec_id) << getSPIRVInt32Constant(elem_size);
5914 break;
5915 case clspv::ArgKind::Pod:
5916 case clspv::ArgKind::PodUBO:
5917 case clspv::ArgKind::PodPushConstant:
5918 Ops << getSPIRVInt32Constant(offset) << getSPIRVInt32Constant(size);
5919 break;
5920 default:
5921 break;
5922 }
5923 Ops << arg_info;
5924 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5925}