blob: b7cf3b7231970db294cc62167b5ac582245a0fe9 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
alan-baker5f2e88e2020-12-07 15:24:04 -050037#include "llvm/IR/Intrinsics.h"
David Neto118188e2018-08-24 11:27:54 -040038#include "llvm/IR/Metadata.h"
39#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050040#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040041#include "llvm/Pass.h"
42#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000043#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040044#include "llvm/Support/raw_ostream.h"
45#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040046
SJWf93f5f32020-05-05 07:27:56 -050047// enable spv::HasResultAndType
48#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040049#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040050
David Neto85082642018-03-24 06:55:20 -070051#include "clspv/AddressSpace.h"
David Neto118188e2018-08-24 11:27:54 -040052#include "clspv/Option.h"
alan-baker86ce19c2020-08-05 13:09:19 -040053#include "clspv/PushConstant.h"
54#include "clspv/SpecConstant.h"
David Neto85082642018-03-24 06:55:20 -070055#include "clspv/spirv_c_strings.hpp"
56#include "clspv/spirv_glsl.hpp"
alan-baker86ce19c2020-08-05 13:09:19 -040057#include "clspv/spirv_reflection.hpp"
David Neto22f144c2017-06-12 14:26:21 -040058
David Neto4feb7a42017-10-06 17:29:42 -040059#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050060#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050061#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070062#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040063#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040064#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040065#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040066#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040067#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040068#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050069#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040070
David Neto22f144c2017-06-12 14:26:21 -040071#if defined(_MSC_VER)
72#pragma warning(pop)
73#endif
74
75using namespace llvm;
76using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050077using namespace clspv::Builtins;
SJW806a5d82020-07-15 12:51:38 -050078using namespace clspv::Option;
David Neto156783e2017-07-05 15:39:41 -040079using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040080
81namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040082
David Neto862b7d82018-06-14 18:48:37 -040083cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
84 cl::desc("Show resource variable creation"));
85
alan-baker5ed87542020-03-23 11:05:22 -040086cl::opt<bool>
87 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
88 cl::desc("Dump the IR at the start of SPIRVProducer"));
89
David Neto862b7d82018-06-14 18:48:37 -040090// These hacks exist to help transition code generation algorithms
91// without making huge noise in detailed test output.
92const bool Hack_generate_runtime_array_stride_early = true;
93
David Neto3fbb4072017-10-16 11:28:14 -040094// The value of 1/pi. This value is from MSDN
95// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
96const double kOneOverPi = 0.318309886183790671538;
97const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
98
alan-baker86ce19c2020-08-05 13:09:19 -040099// SPIRV Module Sections (per 2.4 of the SPIR-V spec)
SJW69939d52020-04-16 07:29:07 -0500100// These are used to collect SPIRVInstructions by type on-the-fly.
101enum SPIRVSection {
102 kCapabilities,
103 kExtensions,
104 kImports,
105 kMemoryModel,
106 kEntryPoints,
107 kExecutionModes,
108
109 kDebug,
110 kAnnotations,
111
112 kTypes,
113 kConstants = kTypes,
114 kGlobalVariables,
115
116 kFunctions,
117
alan-baker86ce19c2020-08-05 13:09:19 -0400118 // This is not a section of the SPIR-V spec and should always immediately
119 // precede kSectionCount. It is a convenient place for the embedded
120 // reflection data.
121 kReflection,
SJW69939d52020-04-16 07:29:07 -0500122 kSectionCount
123};
124
SJW01901d92020-05-21 08:58:31 -0500125class SPIRVID {
126 uint32_t id;
127
128public:
129 SPIRVID(uint32_t _id = 0) : id(_id) {}
130 uint32_t get() const { return id; }
131 bool isValid() const { return id != 0; }
132 bool operator==(const SPIRVID &that) const { return id == that.id; }
SJW806a5d82020-07-15 12:51:38 -0500133 bool operator<(const SPIRVID &that) const { return id < that.id; }
SJW01901d92020-05-21 08:58:31 -0500134};
SJWf93f5f32020-05-05 07:27:56 -0500135
SJW88ed5fe2020-05-11 12:40:57 -0500136enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400137
138struct SPIRVOperand {
SJW88ed5fe2020-05-11 12:40:57 -0500139 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
140 LiteralNum[0] = Num;
141 }
David Neto22f144c2017-06-12 14:26:21 -0400142 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
143 : Type(Ty), LiteralStr(Str) {}
144 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
145 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500146 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
147 auto sz = NumVec.size();
148 assert(sz >= 1 && sz <= 2);
149 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
150 LiteralNum[0] = NumVec[0];
151 if (sz == 2) {
152 LiteralNum[1] = NumVec[1];
153 }
154 }
David Neto22f144c2017-06-12 14:26:21 -0400155
alan-baker7506abb2020-09-10 15:02:55 -0400156 SPIRVOperandType getType() const { return Type; }
157 uint32_t getNumID() const { return LiteralNum[0]; }
158 std::string getLiteralStr() const { return LiteralStr; }
159 const uint32_t *getLiteralNum() const { return LiteralNum; }
David Neto22f144c2017-06-12 14:26:21 -0400160
David Neto87846742018-04-11 17:36:22 -0400161 uint32_t GetNumWords() const {
162 switch (Type) {
163 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500164 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400165 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500166 case LITERAL_DWORD:
167 return 2;
David Neto87846742018-04-11 17:36:22 -0400168 case LITERAL_STRING:
169 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400170 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400171 }
172 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
173 }
174
David Neto22f144c2017-06-12 14:26:21 -0400175private:
176 SPIRVOperandType Type;
177 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500178 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400179};
180
SJW88ed5fe2020-05-11 12:40:57 -0500181typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400182
David Neto22f144c2017-06-12 14:26:21 -0400183struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500184 // Primary constructor must have Opcode, initializes WordCount based on ResID.
185 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
186 : Opcode(static_cast<uint16_t>(Opc)) {
187 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400188 }
David Neto22f144c2017-06-12 14:26:21 -0400189
SJWf93f5f32020-05-05 07:27:56 -0500190 // Creates an instruction with an opcode and no result ID, and with the given
191 // operands. This calls primary constructor to initialize Opcode, WordCount.
192 // Takes ownership of the operands and clears |Ops|.
193 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
194 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500195 }
SJWf93f5f32020-05-05 07:27:56 -0500196 // Creates an instruction with an opcode and no result ID, and with the given
197 // operands. This calls primary constructor to initialize Opcode, WordCount.
198 // Takes ownership of the operands and clears |Ops|.
199 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
200 : SPIRVInstruction(Opc, ResID) {
201 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500202 }
David Netoef5ba2b2019-12-20 08:35:54 -0500203
David Netoee2660d2018-06-28 16:31:29 -0400204 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400205 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500206 SPIRVID getResultID() const { return ResultID; }
207 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400208
209private:
SJW01901d92020-05-21 08:58:31 -0500210 void setResult(SPIRVID ResID = 0) {
211 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500212 ResultID = ResID;
213 }
214
215 void setOperands(SPIRVOperandVec &Ops) {
216 assert(Operands.empty());
217 Operands = std::move(Ops);
218 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500219 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500220 }
221 }
222
223private:
David Netoee2660d2018-06-28 16:31:29 -0400224 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400225 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500226 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500227 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400228};
229
230struct SPIRVProducerPass final : public ModulePass {
SJW01901d92020-05-21 08:58:31 -0500231 typedef DenseMap<Type *, SPIRVID> TypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400232 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500233 typedef DenseMap<Value *, SPIRVID> ValueMapType;
SJW806a5d82020-07-15 12:51:38 -0500234 typedef std::list<SPIRVID> SPIRVIDListType;
SJW01901d92020-05-21 08:58:31 -0500235 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
236 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500237 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
SJW806a5d82020-07-15 12:51:38 -0500238 typedef std::map<spv::BuiltIn, SPIRVID> BuiltinConstantMapType;
SJW88ed5fe2020-05-11 12:40:57 -0500239 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400240 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500241 // - the SPIR-V instruction placeholder that will be replaced
242 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400243 DeferredInstVecType;
244 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
245 GlobalConstFuncMapType;
246
David Neto44795152017-07-13 15:45:28 -0400247 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500248 raw_pwrite_stream &out,
alan-baker00e7a582019-06-07 12:54:21 -0400249 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400250 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500251 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400252 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
David Neto0676e6f2017-07-11 18:47:44 -0400253 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500254 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
255 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
SJW01901d92020-05-21 08:58:31 -0500256 WorkgroupSizeVarID(0) {
257 addCapability(spv::CapabilityShader);
258 Ptr = this;
259 }
David Neto22f144c2017-06-12 14:26:21 -0400260
James Price11010dc2019-12-19 13:53:09 -0500261 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500262 }
263
David Neto22f144c2017-06-12 14:26:21 -0400264 void getAnalysisUsage(AnalysisUsage &AU) const override {
265 AU.addRequired<DominatorTreeWrapperPass>();
266 AU.addRequired<LoopInfoWrapperPass>();
267 }
268
269 virtual bool runOnModule(Module &module) override;
270
271 // output the SPIR-V header block
272 void outputHeader();
273
274 // patch the SPIR-V header block
275 void patchHeader();
276
SJW01901d92020-05-21 08:58:31 -0500277 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400278 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-baker7506abb2020-09-10 15:02:55 -0400279 TypeList &getTypeList() { return Types; }
David Neto22f144c2017-06-12 14:26:21 -0400280 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500281 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
282 return SPIRVSections[Section];
283 };
alan-baker7506abb2020-09-10 15:02:55 -0400284 EntryPointVecType &getEntryPointVec() { return EntryPointVec; }
285 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; }
SJW806a5d82020-07-15 12:51:38 -0500286 SPIRVIDListType &getEntryPointInterfacesList() {
287 return EntryPointInterfacesList;
alan-baker7506abb2020-09-10 15:02:55 -0400288 }
SJW01901d92020-05-21 08:58:31 -0500289 SPIRVID getOpExtInstImportID();
alan-baker7506abb2020-09-10 15:02:55 -0400290 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; }
SJW2c317da2020-03-23 07:39:13 -0500291
alan-baker5b86ed72019-02-15 08:26:50 -0500292 bool hasVariablePointersStorageBuffer() {
293 return HasVariablePointersStorageBuffer;
294 }
SJW01901d92020-05-21 08:58:31 -0500295 void setVariablePointersStorageBuffer() {
296 if (!HasVariablePointersStorageBuffer) {
297 addCapability(spv::CapabilityVariablePointersStorageBuffer);
298 HasVariablePointersStorageBuffer = true;
299 }
alan-baker5b86ed72019-02-15 08:26:50 -0500300 }
alan-baker7506abb2020-09-10 15:02:55 -0400301 bool hasVariablePointers() { return HasVariablePointers; }
SJW01901d92020-05-21 08:58:31 -0500302 void setVariablePointers() {
303 if (!HasVariablePointers) {
304 addCapability(spv::CapabilityVariablePointers);
305 HasVariablePointers = true;
306 }
alan-baker7506abb2020-09-10 15:02:55 -0400307 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500308 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
309 return samplerMap;
310 }
David Neto22f144c2017-06-12 14:26:21 -0400311 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
312 return GlobalConstFuncTypeMap;
313 }
314 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
315 return GlobalConstArgumentSet;
316 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500317 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400318
SJW77b87ad2020-04-21 14:37:52 -0500319 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500320 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
321 // *not* be converted to a storage buffer, replace each such global variable
322 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500323 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400324 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
325 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500326 void FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400327 void FindTypePerGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500328 void FindTypesForSamplerMap();
329 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500330 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
331 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400332 void FindType(Type *Ty);
SJWf93f5f32020-05-05 07:27:56 -0500333
alan-bakerc3fd07f2020-10-22 09:48:49 -0400334 // Returns the canonical type of |type|.
335 //
336 // By default, clspv maps both __constant and __global address space pointers
337 // to StorageBuffer storage class. In order to prevent duplicate types from
338 // being generated, clspv uses the canonical type as a representative.
339 Type *CanonicalType(Type *type);
340
SJWf93f5f32020-05-05 07:27:56 -0500341 // Lookup or create Types, Constants.
342 // Returns SPIRVID once it has been created.
343 SPIRVID getSPIRVType(Type *Ty);
344 SPIRVID getSPIRVConstant(Constant *Cst);
SJW806a5d82020-07-15 12:51:38 -0500345 SPIRVID getSPIRVInt32Constant(uint32_t CstVal);
SJWf93f5f32020-05-05 07:27:56 -0500346 // Lookup SPIRVID of llvm::Value, may create Constant.
347 SPIRVID getSPIRVValue(Value *V);
348
SJW806a5d82020-07-15 12:51:38 -0500349 SPIRVID getSPIRVBuiltin(spv::BuiltIn BID, spv::Capability Cap);
350
David Neto19a1bad2017-08-25 15:01:41 -0400351 // Generates instructions for SPIR-V types corresponding to the LLVM types
352 // saved in the |Types| member. A type follows its subtypes. IDs are
353 // allocated sequentially starting with the current value of nextID, and
354 // with a type following its subtypes. Also updates nextID to just beyond
355 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500356 void GenerateSPIRVTypes();
SJW77b87ad2020-04-21 14:37:52 -0500357 void GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400358 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500359 void GenerateWorkgroupVars();
alan-baker86ce19c2020-08-05 13:09:19 -0400360 // Generate reflection instructions for resource variables associated with
David Neto862b7d82018-06-14 18:48:37 -0400361 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500362 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400363 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500364 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400365 void GenerateFuncPrologue(Function &F);
366 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400367 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400368 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
369 spv::Op GetSPIRVCastOpcode(Instruction &I);
370 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
SJW806a5d82020-07-15 12:51:38 -0500371 SPIRVID GenerateClspvInstruction(CallInst *Call,
372 const FunctionInfo &FuncInfo);
373 SPIRVID GenerateImageInstruction(CallInst *Call,
374 const FunctionInfo &FuncInfo);
375 SPIRVID GenerateSubgroupInstruction(CallInst *Call,
376 const FunctionInfo &FuncInfo);
377 SPIRVID GenerateInstructionFromCall(CallInst *Call);
David Neto22f144c2017-06-12 14:26:21 -0400378 void GenerateInstruction(Instruction &I);
379 void GenerateFuncEpilogue();
380 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500381 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400382 bool is4xi8vec(Type *Ty) const;
383 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400384 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400385 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400386 // Returns the GLSL extended instruction enum that the given function
387 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500388 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400389 // Returns the GLSL extended instruction enum indirectly used by the given
390 // function. That is, to implement the given function, we use an extended
391 // instruction plus one more instruction. If none, then returns the 0 value,
392 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500393 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400394 // Returns the single GLSL extended instruction used directly or
395 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500396 glsl::ExtInst
397 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400398 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500399 void WriteResultID(const SPIRVInstruction &Inst);
400 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
401 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400402 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500403 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400404
Alan Baker9bf93fb2018-08-28 16:59:26 -0400405 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500406 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400407
Alan Bakerfcda9482018-10-02 17:09:59 -0400408 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500409 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400410
alan-baker06cad652019-12-03 17:56:47 -0500411 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500412 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500413
Alan Bakerfcda9482018-10-02 17:09:59 -0400414 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
415 // uses the internal map, otherwise it falls back on the data layout.
416 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400417 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000418 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
419 unsigned member,
420 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400421
alan-baker5b86ed72019-02-15 08:26:50 -0500422 // Returns the base pointer of |v|.
423 Value *GetBasePointer(Value *v);
424
SJW01901d92020-05-21 08:58:31 -0500425 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
426 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
427
alan-baker5b86ed72019-02-15 08:26:50 -0500428 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
429 // |address_space|.
430 void setVariablePointersCapabilities(unsigned address_space);
431
432 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
433 // variable.
434 bool sameResource(Value *lhs, Value *rhs) const;
435
436 // Returns true if |inst| is phi or select that selects from the same
437 // structure (or null).
438 bool selectFromSameObject(Instruction *inst);
439
alan-bakere9308012019-03-15 10:25:13 -0400440 // Returns true if |Arg| is called with a coherent resource.
441 bool CalledWithCoherentResource(Argument &Arg);
442
SJWf93f5f32020-05-05 07:27:56 -0500443 //
444 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
445 template <enum SPIRVSection TSection = kFunctions>
446 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
447 bool has_result, has_result_type;
448 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
449 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500450 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500451 return RID;
452 }
453 template <enum SPIRVSection TSection = kFunctions>
454 SPIRVID addSPIRVInst(spv::Op Op) {
455 SPIRVOperandVec Ops;
456 return addSPIRVInst<TSection>(Op, Ops);
457 }
458 template <enum SPIRVSection TSection = kFunctions>
459 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
460 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500461 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500462 return addSPIRVInst<TSection>(Op, Ops);
463 }
464 template <enum SPIRVSection TSection = kFunctions>
465 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
466 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500467 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500468 return addSPIRVInst<TSection>(Op, Ops);
469 }
470
SJW88ed5fe2020-05-11 12:40:57 -0500471 //
472 // Add placeholder for llvm::Value that references future values.
473 // Must have result ID just in case final SPIRVInstruction requires.
474 SPIRVID addSPIRVPlaceholder(Value *I) {
475 SPIRVID RID = incrNextID();
476 SPIRVOperandVec Ops;
477 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
478 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
479 return RID;
480 }
481 // Replace placeholder with actual SPIRVInstruction on the final pass
482 // (HandleDeferredInstruction).
483 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
484 SPIRVOperandVec &Operands) {
485 bool has_result, has_result_type;
486 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
487 SPIRVID RID = has_result ? I->getResultID() : 0;
488 *I = SPIRVInstruction(Opcode, RID, Operands);
489 return RID;
490 }
491
SJW806a5d82020-07-15 12:51:38 -0500492 //
493 // Add global variable and capture entry point interface
494 SPIRVID addSPIRVGlobalVariable(const SPIRVID &TypeID, spv::StorageClass SC,
495 const SPIRVID &InitID = SPIRVID());
496
alan-baker86ce19c2020-08-05 13:09:19 -0400497 SPIRVID getReflectionImport();
498 void GenerateReflection();
499 void GenerateKernelReflection();
500 void GeneratePushConstantReflection();
501 void GenerateSpecConstantReflection();
502 void AddArgumentReflection(SPIRVID kernel_decl, const std::string &name,
503 clspv::ArgKind arg_kind, uint32_t ordinal,
504 uint32_t descriptor_set, uint32_t binding,
505 uint32_t offset, uint32_t size, uint32_t spec_id,
506 uint32_t elem_size);
507
David Neto22f144c2017-06-12 14:26:21 -0400508private:
509 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500510
511 Module *module;
512
SJW01901d92020-05-21 08:58:31 -0500513 // Set of Capabilities required
514 CapabilitySetType CapabilitySet;
515
SJW806a5d82020-07-15 12:51:38 -0500516 // Map from clspv::BuiltinType to SPIRV Global Variable
517 BuiltinConstantMapType BuiltinConstantMap;
518
David Neto44795152017-07-13 15:45:28 -0400519 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400520 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400521
522 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
523 // convert to other formats on demand?
524
525 // When emitting a C initialization list, the WriteSPIRVBinary method
526 // will actually write its words to this vector via binaryTempOut.
527 SmallVector<char, 100> binaryTempUnderlyingVector;
528 raw_svector_ostream binaryTempOut;
529
530 // Binary output writes to this stream, which might be |out| or
531 // |binaryTempOut|. It's the latter when we really want to write a C
532 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400533 raw_pwrite_stream *binaryOut;
David Neto0676e6f2017-07-11 18:47:44 -0400534 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400535 uint64_t patchBoundOffset;
536 uint32_t nextID;
537
SJWf93f5f32020-05-05 07:27:56 -0500538 SPIRVID incrNextID() { return nextID++; }
539
alan-bakerf67468c2019-11-25 15:51:49 -0500540 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500541 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500542 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500543 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500544
David Neto19a1bad2017-08-25 15:01:41 -0400545 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400546 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400547 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400548 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400549 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400550 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400551 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400552 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500553 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400554
David Neto22f144c2017-06-12 14:26:21 -0400555 EntryPointVecType EntryPointVec;
556 DeferredInstVecType DeferredInstVec;
SJW806a5d82020-07-15 12:51:38 -0500557 SPIRVIDListType EntryPointInterfacesList;
SJW01901d92020-05-21 08:58:31 -0500558 SPIRVID OpExtInstImportID;
559 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500560 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400561 bool HasVariablePointers;
562 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500563 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700564
565 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700566 // will map F's type to (G, index of the parameter), where in a first phase
Marco Antognini7e338402021-03-15 12:48:37 +0000567 // G is F's type.
David Netoc77d9e22018-03-24 06:30:28 -0700568 // TODO(dneto): This doesn't seem general enough? A function might have
569 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400570 GlobalConstFuncMapType GlobalConstFuncTypeMap;
571 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400572 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700573 // or array types, and which point into transparent memory (StorageBuffer
574 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400575 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700576 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400577
578 // This is truly ugly, but works around what look like driver bugs.
579 // For get_local_size, an earlier part of the flow has created a module-scope
580 // variable in Private address space to hold the value for the workgroup
581 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
582 // When this is present, save the IDs of the initializer value and variable
583 // in these two variables. We only ever do a vector load from it, and
584 // when we see one of those, substitute just the value of the intializer.
585 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700586 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500587 SPIRVID WorkgroupSizeValueID;
588 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400589
David Neto862b7d82018-06-14 18:48:37 -0400590 // Bookkeeping for mapping kernel arguments to resource variables.
591 struct ResourceVarInfo {
592 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400593 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400594 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400595 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400596 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
597 const int index; // Index into ResourceVarInfoList
598 const unsigned descriptor_set;
599 const unsigned binding;
600 Function *const var_fn; // The @clspv.resource.var.* function.
601 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400602 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400603 const unsigned addr_space; // The LLVM address space
604 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500605 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400606 };
607 // A list of resource var info. Each one correponds to a module-scope
608 // resource variable we will have to create. Resource var indices are
609 // indices into this vector.
610 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
611 // This is a vector of pointers of all the resource vars, but ordered by
612 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500613 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400614 // Map a function to the ordered list of resource variables it uses, one for
615 // each argument. If an argument does not use a resource variable, it
616 // will have a null pointer entry.
617 using FunctionToResourceVarsMapType =
618 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
619 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
620
621 // What LLVM types map to SPIR-V types needing layout? These are the
622 // arrays and structures supporting storage buffers and uniform buffers.
623 TypeList TypesNeedingLayout;
624 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
625 UniqueVector<StructType *> StructTypesNeedingBlock;
626 // For a call that represents a load from an opaque type (samplers, images),
627 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500628 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700629
David Netoc6f3ab22018-04-06 18:02:31 -0400630 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500631 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400632 LocalArgList LocalArgs;
633 // Information about a pointer-to-local argument.
634 struct LocalArgInfo {
635 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500636 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400637 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500638 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400639 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500640 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400641 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500642 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400643 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500644 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400645 // The specialization constant ID of the array size.
646 int spec_id;
647 };
Alan Baker202c8c72018-08-13 13:47:44 -0400648 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500649 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400650 // A mapping from SpecId to its LocalArgInfo.
651 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400652 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500653 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400654 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500655 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
656 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500657
658 // Maps basic block to its merge block.
659 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
660 // Maps basic block to its continue block.
661 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500662
alan-baker86ce19c2020-08-05 13:09:19 -0400663 SPIRVID ReflectionID;
664 DenseMap<Function *, SPIRVID> KernelDeclarations;
665
SJW01901d92020-05-21 08:58:31 -0500666public:
667 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400668};
669
670char SPIRVProducerPass::ID;
SJW01901d92020-05-21 08:58:31 -0500671SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
David Netoc6f3ab22018-04-06 18:02:31 -0400672
alan-bakerb6b09dc2018-11-08 16:59:28 -0500673} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400674
675namespace clspv {
alan-baker86ce19c2020-08-05 13:09:19 -0400676ModulePass *
677createSPIRVProducerPass(raw_pwrite_stream &out,
678 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
679 bool outputCInitList) {
680 return new SPIRVProducerPass(out, samplerMap, outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400681}
David Netoc2c368d2017-06-30 16:50:17 -0400682} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400683
SJW01901d92020-05-21 08:58:31 -0500684namespace {
685SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
686 list.emplace_back(LITERAL_WORD, num);
687 return list;
688}
689
690SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
691 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
692 return list;
693}
694
695SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
696 list.emplace_back(num_vec);
697 return list;
698}
699
700SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
701 list.emplace_back(LITERAL_STRING, str);
702 return list;
703}
704
705SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
706 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
707 return list;
708}
709
710SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
711 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
712 return list;
713}
714
SJW806a5d82020-07-15 12:51:38 -0500715SPIRVOperandVec &operator<<(SPIRVOperandVec &list, const SPIRVID &v) {
SJW01901d92020-05-21 08:58:31 -0500716 list.emplace_back(NUMBERID, v.get());
717 return list;
718}
719} // namespace
720
SJW77b87ad2020-04-21 14:37:52 -0500721bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500722 // TODO(sjw): Need to reset all data members for each Module, or better
723 // yet create a new SPIRVProducer for every module.. For now only
724 // allow 1 call.
725 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500726 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400727 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500728 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400729 }
David Neto0676e6f2017-07-11 18:47:44 -0400730 binaryOut = outputCInitList ? &binaryTempOut : &out;
731
SJW77b87ad2020-04-21 14:37:52 -0500732 PopulateUBOTypeMaps();
733 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400734
David Neto22f144c2017-06-12 14:26:21 -0400735 // SPIR-V always begins with its header information
736 outputHeader();
737
738 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500739 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400740
David Neto22f144c2017-06-12 14:26:21 -0400741 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500742 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400743 // If the GV is one of our special __spirv_* variables, remove the
744 // initializer as it was only placed there to force LLVM to not throw the
745 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000746 if (GV.getName().startswith("__spirv_") ||
747 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400748 GV.setInitializer(nullptr);
749 }
750
751 // Collect types' information from global variable.
752 FindTypePerGlobalVar(GV);
David Neto22f144c2017-06-12 14:26:21 -0400753 }
754
David Neto22f144c2017-06-12 14:26:21 -0400755 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500756 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400757
alan-baker09cb9802019-12-10 13:16:27 -0500758 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500759 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400760
761 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500762 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400763 GenerateGlobalVar(GV);
764 }
SJW77b87ad2020-04-21 14:37:52 -0500765 GenerateResourceVars();
766 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400767
768 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500769 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400770 if (F.isDeclaration()) {
771 continue;
772 }
773
774 // Generate Function Prologue.
775 GenerateFuncPrologue(F);
776
777 // Generate SPIRV instructions for function body.
778 GenerateFuncBody(F);
779
780 // Generate Function Epilogue.
781 GenerateFuncEpilogue();
782 }
783
784 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500785 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400786
David Neto22f144c2017-06-12 14:26:21 -0400787 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500788 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400789
alan-baker86ce19c2020-08-05 13:09:19 -0400790 // Generate embedded reflection information.
791 GenerateReflection();
792
alan-baker00e7a582019-06-07 12:54:21 -0400793 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400794
795 // We need to patch the SPIR-V header to set bound correctly.
796 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400797
798 if (outputCInitList) {
799 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400800 std::ostringstream os;
801
David Neto57fb0b92017-08-04 15:35:09 -0400802 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400803 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400804 os << ",\n";
805 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400806 first = false;
807 };
808
809 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400810 const std::string str(binaryTempOut.str());
811 for (unsigned i = 0; i < str.size(); i += 4) {
812 const uint32_t a = static_cast<unsigned char>(str[i]);
813 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
814 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
815 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
816 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400817 }
818 os << "}\n";
819 out << os.str();
820 }
821
David Neto22f144c2017-06-12 14:26:21 -0400822 return false;
823}
824
825void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400826 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
827 sizeof(spv::MagicNumber));
SJW806a5d82020-07-15 12:51:38 -0500828 uint32_t minor = 0;
829 if (SpvVersion() == SPIRVVersion::SPIRV_1_3) {
830 minor = 3;
831 }
832 uint32_t version = (1 << 16) | (minor << 8);
833 binaryOut->write(reinterpret_cast<const char *>(&version), sizeof(version));
David Neto22f144c2017-06-12 14:26:21 -0400834
alan-baker0c18ab02019-06-12 10:23:21 -0400835 // use Google's vendor ID
836 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400837 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400838
alan-baker00e7a582019-06-07 12:54:21 -0400839 // we record where we need to come back to and patch in the bound value
840 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400841
alan-baker00e7a582019-06-07 12:54:21 -0400842 // output a bad bound for now
843 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400844
alan-baker00e7a582019-06-07 12:54:21 -0400845 // output the schema (reserved for use and must be 0)
846 const uint32_t schema = 0;
847 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400848}
849
850void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400851 // for a binary we just write the value of nextID over bound
852 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
853 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400854}
855
SJW77b87ad2020-04-21 14:37:52 -0500856void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400857 // This function generates LLVM IR for function such as global variable for
858 // argument, constant and pointer type for argument access. These information
859 // is artificial one because we need Vulkan SPIR-V output. This function is
860 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400861
SJW77b87ad2020-04-21 14:37:52 -0500862 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400863
SJW77b87ad2020-04-21 14:37:52 -0500864 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400865
SJW77b87ad2020-04-21 14:37:52 -0500866 FindTypesForSamplerMap();
867 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400868}
869
SJW77b87ad2020-04-21 14:37:52 -0500870void SPIRVProducerPass::FindGlobalConstVars() {
871 clspv::NormalizeGlobalVariables(*module);
872 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400873
David Neto862b7d82018-06-14 18:48:37 -0400874 SmallVector<GlobalVariable *, 8> GVList;
875 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500876 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400877 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
878 if (GV.use_empty()) {
879 DeadGVList.push_back(&GV);
880 } else {
881 GVList.push_back(&GV);
882 }
883 }
884 }
885
886 // Remove dead global __constant variables.
887 for (auto GV : DeadGVList) {
888 GV->eraseFromParent();
889 }
890 DeadGVList.clear();
891
892 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
893 // For now, we only support a single storage buffer.
alan-baker7506abb2020-09-10 15:02:55 -0400894 if (!GVList.empty()) {
David Neto862b7d82018-06-14 18:48:37 -0400895 assert(GVList.size() == 1);
896 const auto *GV = GVList[0];
897 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400898 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400899 const size_t kConstantMaxSize = 65536;
900 if (constants_byte_size > kConstantMaxSize) {
901 outs() << "Max __constant capacity of " << kConstantMaxSize
902 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
903 llvm_unreachable("Max __constant capacity exceeded");
904 }
905 }
906 } else {
907 // Change global constant variable's address space to ModuleScopePrivate.
908 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
909 for (auto GV : GVList) {
910 // Create new gv with ModuleScopePrivate address space.
911 Type *NewGVTy = GV->getType()->getPointerElementType();
912 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500913 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400914 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
915 NewGV->takeName(GV);
916
917 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
918 SmallVector<User *, 8> CandidateUsers;
919
920 auto record_called_function_type_as_user =
921 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
922 // Find argument index.
923 unsigned index = 0;
924 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
925 if (gv == call->getOperand(i)) {
926 // TODO(dneto): Should we break here?
927 index = i;
928 }
929 }
930
931 // Record function type with global constant.
932 GlobalConstFuncTyMap[call->getFunctionType()] =
933 std::make_pair(call->getFunctionType(), index);
934 };
935
936 for (User *GVU : GVUsers) {
937 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
938 record_called_function_type_as_user(GV, Call);
939 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
940 // Check GEP users.
941 for (User *GEPU : GEP->users()) {
942 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
943 record_called_function_type_as_user(GEP, GEPCall);
944 }
945 }
946 }
947
948 CandidateUsers.push_back(GVU);
949 }
950
951 for (User *U : CandidateUsers) {
952 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500953 if (!isa<Constant>(U)) {
954 // #254: Can't change operands of a constant, but this shouldn't be
955 // something that sticks around in the module.
956 U->replaceUsesOfWith(GV, NewGV);
957 }
David Neto862b7d82018-06-14 18:48:37 -0400958 }
959
960 // Delete original gv.
961 GV->eraseFromParent();
962 }
963 }
964}
965
SJW77b87ad2020-04-21 14:37:52 -0500966void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -0400967 ResourceVarInfoList.clear();
968 FunctionToResourceVarsMap.clear();
969 ModuleOrderedResourceVars.reset();
970 // Normally, there is one resource variable per clspv.resource.var.*
971 // function, since that is unique'd by arg type and index. By design,
972 // we can share these resource variables across kernels because all
973 // kernels use the same descriptor set.
974 //
975 // But if the user requested distinct descriptor sets per kernel, then
976 // the descriptor allocator has made different (set,binding) pairs for
977 // the same (type,arg_index) pair. Since we can decorate a resource
978 // variable with only exactly one DescriptorSet and Binding, we are
979 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +0000980 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -0400981 // (set,binding) values.
982 const bool always_distinct_sets =
983 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -0500984 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -0400985 // Rely on the fact the resource var functions have a stable ordering
986 // in the module.
SJW61531372020-06-09 07:31:08 -0500987 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -0400988 // Find all calls to this function with distinct set and binding pairs.
989 // Save them in ResourceVarInfoList.
990
991 // Determine uniqueness of the (set,binding) pairs only withing this
992 // one resource-var builtin function.
993 using SetAndBinding = std::pair<unsigned, unsigned>;
994 // Maps set and binding to the resource var info.
995 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
996 bool first_use = true;
997 for (auto &U : F.uses()) {
998 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
999 const auto set = unsigned(
1000 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1001 const auto binding = unsigned(
1002 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1003 const auto arg_kind = clspv::ArgKind(
1004 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1005 const auto arg_index = unsigned(
1006 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001007 const auto coherent = unsigned(
1008 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001009
1010 // Find or make the resource var info for this combination.
1011 ResourceVarInfo *rv = nullptr;
1012 if (always_distinct_sets) {
1013 // Make a new resource var any time we see a different
1014 // (set,binding) pair.
1015 SetAndBinding key{set, binding};
1016 auto where = set_and_binding_map.find(key);
1017 if (where == set_and_binding_map.end()) {
alan-baker7506abb2020-09-10 15:02:55 -04001018 rv = new ResourceVarInfo(
1019 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1020 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001021 ResourceVarInfoList.emplace_back(rv);
1022 set_and_binding_map[key] = rv;
1023 } else {
1024 rv = where->second;
1025 }
1026 } else {
1027 // The default is to make exactly one resource for each
1028 // clspv.resource.var.* function.
1029 if (first_use) {
1030 first_use = false;
alan-baker7506abb2020-09-10 15:02:55 -04001031 rv = new ResourceVarInfo(
1032 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1033 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001034 ResourceVarInfoList.emplace_back(rv);
1035 } else {
1036 rv = ResourceVarInfoList.back().get();
1037 }
1038 }
1039
1040 // Now populate FunctionToResourceVarsMap.
1041 auto &mapping =
1042 FunctionToResourceVarsMap[call->getParent()->getParent()];
1043 while (mapping.size() <= arg_index) {
1044 mapping.push_back(nullptr);
1045 }
1046 mapping[arg_index] = rv;
1047 }
1048 }
1049 }
1050 }
1051
1052 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001053 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001054 auto where = FunctionToResourceVarsMap.find(&F);
1055 if (where != FunctionToResourceVarsMap.end()) {
1056 for (auto &rv : where->second) {
1057 if (rv != nullptr) {
1058 ModuleOrderedResourceVars.insert(rv);
1059 }
1060 }
1061 }
1062 }
1063 if (ShowResourceVars) {
1064 for (auto *info : ModuleOrderedResourceVars) {
1065 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1066 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1067 << "\n";
1068 }
1069 }
1070}
1071
David Neto22f144c2017-06-12 14:26:21 -04001072void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1073 // Investigate global variable's type.
1074 FindType(GV.getType());
1075}
1076
SJW77b87ad2020-04-21 14:37:52 -05001077void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001078 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001079 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
alan-baker7506abb2020-09-10 15:02:55 -04001080 !getSamplerMap().empty()) {
James Pricecbe834f2020-12-01 13:42:25 -05001081 auto SamplerStructTy =
1082 StructType::getTypeByName(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001083 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001084 SamplerStructTy =
1085 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001086 }
1087
1088 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1089
1090 FindType(SamplerTy);
1091 }
1092}
1093
SJW77b87ad2020-04-21 14:37:52 -05001094void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001095 // Record types so they are generated.
1096 TypesNeedingLayout.reset();
1097 StructTypesNeedingBlock.reset();
1098
1099 // To match older clspv codegen, generate the float type first if required
1100 // for images.
1101 for (const auto *info : ModuleOrderedResourceVars) {
alan-bakerf6bc8252020-09-23 14:58:55 -04001102 if (info->arg_kind == clspv::ArgKind::SampledImage ||
1103 info->arg_kind == clspv::ArgKind::StorageImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001104 if (IsIntImageType(info->var_fn->getReturnType())) {
1105 // Nothing for now...
1106 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001107 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001108 }
1109
1110 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001111 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001112 }
1113 }
1114
1115 for (const auto *info : ModuleOrderedResourceVars) {
1116 Type *type = info->var_fn->getReturnType();
1117
1118 switch (info->arg_kind) {
1119 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001120 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001121 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1122 StructTypesNeedingBlock.insert(sty);
1123 } else {
1124 errs() << *type << "\n";
1125 llvm_unreachable("Buffer arguments must map to structures!");
1126 }
1127 break;
1128 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001129 case clspv::ArgKind::PodUBO:
1130 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001131 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1132 StructTypesNeedingBlock.insert(sty);
1133 } else {
1134 errs() << *type << "\n";
1135 llvm_unreachable("POD arguments must map to structures!");
1136 }
1137 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04001138 case clspv::ArgKind::SampledImage:
1139 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001140 case clspv::ArgKind::Sampler:
1141 // Sampler and image types map to the pointee type but
1142 // in the uniform constant address space.
1143 type = PointerType::get(type->getPointerElementType(),
1144 clspv::AddressSpace::UniformConstant);
1145 break;
1146 default:
1147 break;
1148 }
1149
1150 // The converted type is the type of the OpVariable we will generate.
1151 // If the pointee type is an array of size zero, FindType will convert it
1152 // to a runtime array.
1153 FindType(type);
1154 }
1155
alan-bakerdcd97412019-09-16 15:32:30 -04001156 // If module constants are clustered in a storage buffer then that struct
1157 // needs layout decorations.
1158 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001159 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001160 PointerType *PTy = cast<PointerType>(GV.getType());
1161 const auto AS = PTy->getAddressSpace();
1162 const bool module_scope_constant_external_init =
1163 (AS == AddressSpace::Constant) && GV.hasInitializer();
1164 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1165 if (module_scope_constant_external_init &&
1166 spv::BuiltInMax == BuiltinType) {
1167 StructTypesNeedingBlock.insert(
1168 cast<StructType>(PTy->getPointerElementType()));
1169 }
1170 }
1171 }
1172
SJW77b87ad2020-04-21 14:37:52 -05001173 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001174 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1175 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1176 assert(Ty->isStructTy() && "Push constants have to be structures.");
1177 auto STy = cast<StructType>(Ty);
1178 StructTypesNeedingBlock.insert(STy);
1179 }
1180 }
1181
David Neto862b7d82018-06-14 18:48:37 -04001182 // Traverse the arrays and structures underneath each Block, and
1183 // mark them as needing layout.
1184 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1185 StructTypesNeedingBlock.end());
1186 while (!work_list.empty()) {
1187 Type *type = work_list.back();
1188 work_list.pop_back();
1189 TypesNeedingLayout.insert(type);
1190 switch (type->getTypeID()) {
1191 case Type::ArrayTyID:
1192 work_list.push_back(type->getArrayElementType());
1193 if (!Hack_generate_runtime_array_stride_early) {
1194 // Remember this array type for deferred decoration.
1195 TypesNeedingArrayStride.insert(type);
1196 }
1197 break;
1198 case Type::StructTyID:
1199 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1200 work_list.push_back(elem_ty);
1201 }
1202 default:
1203 // This type and its contained types don't get layout.
1204 break;
1205 }
1206 }
1207}
1208
SJWf93f5f32020-05-05 07:27:56 -05001209void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001210 // The SpecId assignment for pointer-to-local arguments is recorded in
1211 // module-level metadata. Translate that information into local argument
1212 // information.
SJWf93f5f32020-05-05 07:27:56 -05001213 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001214 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001215 if (!nmd)
1216 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001217 for (auto operand : nmd->operands()) {
1218 MDTuple *tuple = cast<MDTuple>(operand);
1219 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1220 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001221 ConstantAsMetadata *arg_index_md =
1222 cast<ConstantAsMetadata>(tuple->getOperand(1));
1223 int arg_index = static_cast<int>(
1224 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1225 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001226
1227 ConstantAsMetadata *spec_id_md =
1228 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001229 int spec_id = static_cast<int>(
1230 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001231
Alan Baker202c8c72018-08-13 13:47:44 -04001232 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001233 if (LocalSpecIdInfoMap.count(spec_id))
1234 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001235
SJWf93f5f32020-05-05 07:27:56 -05001236 // Generate the spec constant.
1237 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001238 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001239 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001240
SJWf93f5f32020-05-05 07:27:56 -05001241 // Generate the array type.
1242 Type *ElemTy = arg->getType()->getPointerElementType();
1243 Ops.clear();
1244 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001245 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001246
1247 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1248
1249 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001250 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001251 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1252
1253 // Generate OpVariable.
1254 //
1255 // Ops[0] : Result Type ID
1256 // Ops[1] : Storage Class
SJW806a5d82020-07-15 12:51:38 -05001257 SPIRVID VariableID =
1258 addSPIRVGlobalVariable(PtrArrayTypeID, spv::StorageClassWorkgroup);
SJWf93f5f32020-05-05 07:27:56 -05001259
1260 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001261 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001262 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1263
1264 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1265 ArrayTypeID, PtrArrayTypeID, spec_id};
1266 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001267 }
1268}
1269
David Neto22f144c2017-06-12 14:26:21 -04001270void SPIRVProducerPass::FindType(Type *Ty) {
1271 TypeList &TyList = getTypeList();
1272
1273 if (0 != TyList.idFor(Ty)) {
1274 return;
1275 }
1276
1277 if (Ty->isPointerTy()) {
1278 auto AddrSpace = Ty->getPointerAddressSpace();
1279 if ((AddressSpace::Constant == AddrSpace) ||
1280 (AddressSpace::Global == AddrSpace)) {
1281 auto PointeeTy = Ty->getPointerElementType();
1282
1283 if (PointeeTy->isStructTy() &&
1284 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1285 FindType(PointeeTy);
1286 auto ActualPointerTy =
1287 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1288 FindType(ActualPointerTy);
1289 return;
1290 }
1291 }
1292 }
1293
David Neto862b7d82018-06-14 18:48:37 -04001294 // By convention, LLVM array type with 0 elements will map to
1295 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1296 // has a constant number of elements. We need to support type of the
1297 // constant.
1298 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1299 if (arrayTy->getNumElements() > 0) {
1300 LLVMContext &Context = Ty->getContext();
1301 FindType(Type::getInt32Ty(Context));
1302 }
David Neto22f144c2017-06-12 14:26:21 -04001303 }
1304
1305 for (Type *SubTy : Ty->subtypes()) {
1306 FindType(SubTy);
1307 }
1308
1309 TyList.insert(Ty);
1310}
1311
David Neto22f144c2017-06-12 14:26:21 -04001312spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1313 switch (AddrSpace) {
1314 default:
1315 llvm_unreachable("Unsupported OpenCL address space");
1316 case AddressSpace::Private:
1317 return spv::StorageClassFunction;
1318 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001319 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001320 case AddressSpace::Constant:
1321 return clspv::Option::ConstantArgsInUniformBuffer()
1322 ? spv::StorageClassUniform
1323 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001324 case AddressSpace::Input:
1325 return spv::StorageClassInput;
1326 case AddressSpace::Local:
1327 return spv::StorageClassWorkgroup;
1328 case AddressSpace::UniformConstant:
1329 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001330 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001331 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001332 case AddressSpace::ModuleScopePrivate:
1333 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001334 case AddressSpace::PushConstant:
1335 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001336 }
1337}
1338
David Neto862b7d82018-06-14 18:48:37 -04001339spv::StorageClass
1340SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1341 switch (arg_kind) {
1342 case clspv::ArgKind::Buffer:
1343 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001344 case clspv::ArgKind::BufferUBO:
1345 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001346 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001347 return spv::StorageClassStorageBuffer;
1348 case clspv::ArgKind::PodUBO:
1349 return spv::StorageClassUniform;
1350 case clspv::ArgKind::PodPushConstant:
1351 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001352 case clspv::ArgKind::Local:
1353 return spv::StorageClassWorkgroup;
alan-bakerf6bc8252020-09-23 14:58:55 -04001354 case clspv::ArgKind::SampledImage:
1355 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001356 case clspv::ArgKind::Sampler:
1357 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001358 default:
1359 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001360 }
1361}
1362
David Neto22f144c2017-06-12 14:26:21 -04001363spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1364 return StringSwitch<spv::BuiltIn>(Name)
1365 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1366 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1367 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1368 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1369 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001370 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001371 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001372 .Default(spv::BuiltInMax);
1373}
1374
SJW01901d92020-05-21 08:58:31 -05001375SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1376 if (OpExtInstImportID == 0) {
1377 //
1378 // Generate OpExtInstImport.
1379 //
1380 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001381
SJW01901d92020-05-21 08:58:31 -05001382 OpExtInstImportID =
1383 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1384 }
1385 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001386}
1387
SJW806a5d82020-07-15 12:51:38 -05001388SPIRVID SPIRVProducerPass::addSPIRVGlobalVariable(const SPIRVID &TypeID,
1389 spv::StorageClass SC,
1390 const SPIRVID &InitID) {
1391 // Generate OpVariable.
1392 //
1393 // Ops[0] : Result Type ID
1394 // Ops[1] : Storage Class
1395 // Ops[2] : Initialization Value ID (optional)
1396
1397 SPIRVOperandVec Ops;
1398 Ops << TypeID << SC;
1399 if (InitID.isValid()) {
1400 Ops << InitID;
1401 }
1402
1403 SPIRVID VID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1404
1405 if (SC == spv::StorageClassInput) {
1406 getEntryPointInterfacesList().push_back(VID);
1407 }
1408
1409 return VID;
1410}
1411
alan-bakerc3fd07f2020-10-22 09:48:49 -04001412Type *SPIRVProducerPass::CanonicalType(Type *type) {
1413 if (type->getNumContainedTypes() != 0) {
1414 switch (type->getTypeID()) {
1415 case Type::PointerTyID: {
1416 // For the purposes of our Vulkan SPIR-V type system, constant and global
1417 // are conflated.
1418 auto *ptr_ty = cast<PointerType>(type);
1419 unsigned AddrSpace = ptr_ty->getAddressSpace();
1420 if (AddressSpace::Constant == AddrSpace) {
1421 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1422 AddrSpace = AddressSpace::Global;
1423 // The canonical type of __constant is __global unless constants are
1424 // passed in uniform buffers.
1425 auto *GlobalTy =
1426 ptr_ty->getPointerElementType()->getPointerTo(AddrSpace);
1427 return GlobalTy;
1428 }
1429 }
1430 break;
1431 }
1432 case Type::StructTyID: {
1433 SmallVector<Type *, 8> subtypes;
1434 bool changed = false;
1435 for (auto *subtype : type->subtypes()) {
1436 auto canonical = CanonicalType(subtype);
1437 subtypes.push_back(canonical);
1438 if (canonical != subtype) {
1439 changed = true;
1440 }
1441 }
1442 if (changed) {
1443 return StructType::get(type->getContext(), subtypes,
1444 cast<StructType>(type)->isPacked());
1445 }
1446 break;
1447 }
1448 case Type::ArrayTyID: {
1449 auto *elem_ty = type->getArrayElementType();
1450 auto *equiv_elem_ty = CanonicalType(elem_ty);
1451 if (equiv_elem_ty != elem_ty) {
1452 return ArrayType::get(equiv_elem_ty,
1453 cast<ArrayType>(type)->getNumElements());
1454 }
1455 break;
1456 }
1457 case Type::FunctionTyID: {
1458 auto *func_ty = cast<FunctionType>(type);
1459 auto *return_ty = CanonicalType(func_ty->getReturnType());
1460 SmallVector<Type *, 8> params;
1461 for (unsigned i = 0; i < func_ty->getNumParams(); ++i) {
1462 params.push_back(CanonicalType(func_ty->getParamType(i)));
1463 }
1464 return FunctionType::get(return_ty, params, func_ty->isVarArg());
1465 }
1466 default:
1467 break;
1468 }
1469 }
1470
1471 return type;
1472}
1473
SJW01901d92020-05-21 08:58:31 -05001474SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
SJWf93f5f32020-05-05 07:27:56 -05001475 auto TI = TypeMap.find(Ty);
1476 if (TI != TypeMap.end()) {
SJW01901d92020-05-21 08:58:31 -05001477 assert(TI->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05001478 return TI->second;
1479 }
1480
alan-bakerc3fd07f2020-10-22 09:48:49 -04001481 auto Canonical = CanonicalType(Ty);
1482 if (Canonical != Ty) {
1483 auto CanonicalTI = TypeMap.find(Canonical);
1484 if (CanonicalTI != TypeMap.end()) {
1485 assert(CanonicalTI->second.isValid());
1486 return CanonicalTI->second;
1487 }
1488 }
1489
1490 // Perform the mapping with the canonical type.
1491
SJWf93f5f32020-05-05 07:27:56 -05001492 const auto &DL = module->getDataLayout();
1493
SJW01901d92020-05-21 08:58:31 -05001494 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001495
alan-bakerc3fd07f2020-10-22 09:48:49 -04001496 switch (Canonical->getTypeID()) {
SJWf93f5f32020-05-05 07:27:56 -05001497 default: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001498 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001499 llvm_unreachable("Unsupported type???");
1500 break;
1501 }
1502 case Type::MetadataTyID:
1503 case Type::LabelTyID: {
1504 // Ignore these types.
1505 break;
1506 }
1507 case Type::PointerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001508 PointerType *PTy = cast<PointerType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001509 unsigned AddrSpace = PTy->getAddressSpace();
1510
1511 if (AddrSpace != AddressSpace::UniformConstant) {
1512 auto PointeeTy = PTy->getElementType();
1513 if (PointeeTy->isStructTy() &&
1514 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1515 // TODO(sjw): assert always an image?
1516 RID = getSPIRVType(PointeeTy);
1517 break;
1518 }
1519 }
1520
SJWf93f5f32020-05-05 07:27:56 -05001521 //
1522 // Generate OpTypePointer.
1523 //
1524
1525 // OpTypePointer
1526 // Ops[0] = Storage Class
1527 // Ops[1] = Element Type ID
1528 SPIRVOperandVec Ops;
1529
SJW01901d92020-05-21 08:58:31 -05001530 Ops << GetStorageClass(AddrSpace) << PTy->getElementType();
SJWf93f5f32020-05-05 07:27:56 -05001531
1532 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1533 break;
1534 }
1535 case Type::StructTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001536 StructType *STy = cast<StructType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001537
1538 // Handle sampler type.
1539 if (STy->isOpaque()) {
1540 if (STy->getName().equals("opencl.sampler_t")) {
1541 //
1542 // Generate OpTypeSampler
1543 //
1544 // Empty Ops.
1545
1546 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1547 break;
1548 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001549 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001550 STy->getName().startswith("opencl.image1d_wo_t") ||
1551 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001552 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001553 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1554 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001555 STy->getName().startswith("opencl.image2d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001556 STy->getName().startswith("opencl.image2d_wo_t") ||
1557 STy->getName().startswith("opencl.image2d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001558 STy->getName().startswith("opencl.image2d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001559 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1560 STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001561 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001562 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001563 if (STy->getName().startswith("opencl.image1d_")) {
1564 if (STy->getName().contains(".sampled"))
1565 addCapability(spv::CapabilitySampled1D);
1566 else
1567 addCapability(spv::CapabilityImage1D);
1568 }
1569
SJWf93f5f32020-05-05 07:27:56 -05001570 //
1571 // Generate OpTypeImage
1572 //
1573 // Ops[0] = Sampled Type ID
1574 // Ops[1] = Dim ID
1575 // Ops[2] = Depth (Literal Number)
1576 // Ops[3] = Arrayed (Literal Number)
1577 // Ops[4] = MS (Literal Number)
1578 // Ops[5] = Sampled (Literal Number)
1579 // Ops[6] = Image Format ID
1580 //
1581 SPIRVOperandVec Ops;
1582
SJW01901d92020-05-21 08:58:31 -05001583 SPIRVID SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001584 if (STy->getName().contains(".float")) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001585 SampledTyID = getSPIRVType(Type::getFloatTy(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001586 } else if (STy->getName().contains(".uint")) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001587 SampledTyID = getSPIRVType(Type::getInt32Ty(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001588 } else if (STy->getName().contains(".int")) {
1589 // Generate a signed 32-bit integer if necessary.
1590 if (int32ID == 0) {
1591 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001592 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001593 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1594 }
1595 SampledTyID = int32ID;
1596
1597 // Generate a vec4 of the signed int if necessary.
1598 if (v4int32ID == 0) {
1599 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001600 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001601 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1602 }
1603 } else {
1604 // This was likely an UndefValue.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001605 SampledTyID = getSPIRVType(Type::getFloatTy(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001606 }
SJW01901d92020-05-21 08:58:31 -05001607 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001608
1609 spv::Dim DimID = spv::Dim2D;
1610 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001611 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001612 STy->getName().startswith("opencl.image1d_wo_t") ||
1613 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001614 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001615 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1616 DimID = spv::Dim1D;
1617 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001618 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001619 STy->getName().startswith("opencl.image3d_wo_t")) {
1620 DimID = spv::Dim3D;
1621 }
SJW01901d92020-05-21 08:58:31 -05001622 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001623
1624 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001625 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001626
1627 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001628 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001629
1630 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001631 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001632
1633 // Set up Sampled.
1634 //
1635 // From Spec
1636 //
1637 // 0 indicates this is only known at run time, not at compile time
1638 // 1 indicates will be used with sampler
1639 // 2 indicates will be used without a sampler (a storage image)
1640 uint32_t Sampled = 1;
1641 if (!STy->getName().contains(".sampled")) {
1642 Sampled = 2;
1643 }
SJW01901d92020-05-21 08:58:31 -05001644 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001645
1646 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001647 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001648 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1649
alan-bakerf6bc8252020-09-23 14:58:55 -04001650 // Only need a sampled version of the type if it is used with a sampler.
1651 if (Sampled == 1) {
1652 Ops.clear();
1653 Ops << RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001654 getImageTypeMap()[Canonical] =
alan-bakerf6bc8252020-09-23 14:58:55 -04001655 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1656 }
SJWf93f5f32020-05-05 07:27:56 -05001657 break;
1658 }
1659 }
1660
1661 //
1662 // Generate OpTypeStruct
1663 //
1664 // Ops[0] ... Ops[n] = Member IDs
1665 SPIRVOperandVec Ops;
1666
1667 for (auto *EleTy : STy->elements()) {
SJW01901d92020-05-21 08:58:31 -05001668 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001669 }
1670
1671 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1672
alan-bakerc3fd07f2020-10-22 09:48:49 -04001673 // Generate OpMemberDecorate unless we are generating it for the canonical
1674 // type.
1675 StructType *canonical = cast<StructType>(CanonicalType(STy));
1676 if (TypesNeedingLayout.idFor(STy) &&
1677 (canonical == STy || !TypesNeedingLayout.idFor(canonical))) {
SJWf93f5f32020-05-05 07:27:56 -05001678 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1679 MemberIdx++) {
1680 // Ops[0] = Structure Type ID
1681 // Ops[1] = Member Index(Literal Number)
1682 // Ops[2] = Decoration (Offset)
1683 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001684 const auto ByteOffset =
1685 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1686
SJW01901d92020-05-21 08:58:31 -05001687 Ops.clear();
1688 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001689
1690 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1691 }
1692 }
1693
alan-bakerc3fd07f2020-10-22 09:48:49 -04001694 // Generate OpDecorate unless we are generating it for the canonical type.
1695 if (StructTypesNeedingBlock.idFor(STy) &&
1696 (canonical == STy || !StructTypesNeedingBlock.idFor(canonical))) {
SJWf93f5f32020-05-05 07:27:56 -05001697 Ops.clear();
1698 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001699 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001700
1701 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1702 }
1703 break;
1704 }
1705 case Type::IntegerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001706 uint32_t bit_width =
1707 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001708
alan-bakere2a62752020-07-09 22:53:23 -04001709 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001710 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001711 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001712 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001713 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001714 addCapability(spv::CapabilityInt64);
1715 }
1716
alan-bakere2a62752020-07-09 22:53:23 -04001717 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001718 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1719 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001720 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001721 // i8 is added to TypeMap as i32.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001722 RID = getSPIRVType(Type::getIntNTy(Canonical->getContext(), 32));
SJWf93f5f32020-05-05 07:27:56 -05001723 } else {
1724 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001725 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001726 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1727 }
1728 }
1729 break;
1730 }
1731 case Type::HalfTyID:
1732 case Type::FloatTyID:
1733 case Type::DoubleTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001734 uint32_t bit_width =
1735 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
alan-bakere2a62752020-07-09 22:53:23 -04001736 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001737 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001738 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001739 addCapability(spv::CapabilityFloat64);
1740 }
1741
SJWf93f5f32020-05-05 07:27:56 -05001742 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001743 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001744
1745 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1746 break;
1747 }
1748 case Type::ArrayTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001749 ArrayType *ArrTy = cast<ArrayType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001750 const uint64_t Length = ArrTy->getArrayNumElements();
1751 if (Length == 0) {
1752 // By convention, map it to a RuntimeArray.
1753
1754 Type *EleTy = ArrTy->getArrayElementType();
1755
1756 //
1757 // Generate OpTypeRuntimeArray.
1758 //
1759 // OpTypeRuntimeArray
1760 // Ops[0] = Element Type ID
1761 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001762 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001763
1764 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1765
1766 if (Hack_generate_runtime_array_stride_early) {
1767 // Generate OpDecorate.
1768
1769 // Ops[0] = Target ID
1770 // Ops[1] = Decoration (ArrayStride)
1771 // Ops[2] = Stride Number(Literal Number)
1772 Ops.clear();
1773
SJW01901d92020-05-21 08:58:31 -05001774 Ops << RID << spv::DecorationArrayStride
1775 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001776
1777 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1778 }
1779
1780 } else {
1781
1782 //
1783 // Generate OpConstant and OpTypeArray.
1784 //
1785
1786 //
1787 // Generate OpConstant for array length.
1788 //
1789 // Add constant for length to constant list.
1790 Constant *CstLength =
1791 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001792
1793 // Remember to generate ArrayStride later
alan-bakerc3fd07f2020-10-22 09:48:49 -04001794 getTypesNeedingArrayStride().insert(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001795
1796 //
1797 // Generate OpTypeArray.
1798 //
1799 // Ops[0] = Element Type ID
1800 // Ops[1] = Array Length Constant ID
1801 SPIRVOperandVec Ops;
1802
SJW01901d92020-05-21 08:58:31 -05001803 Ops << ArrTy->getElementType() << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001804
1805 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1806 }
1807 break;
1808 }
1809 case Type::FixedVectorTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001810 auto VecTy = cast<VectorType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001811 // <4 x i8> is changed to i32 if i8 is not generally supported.
1812 if (!clspv::Option::Int8Support() &&
1813 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
alan-baker5a8c3be2020-09-09 13:44:26 -04001814 if (VecTy->getElementCount().getKnownMinValue() == 4) {
SJWf93f5f32020-05-05 07:27:56 -05001815 RID = getSPIRVType(VecTy->getElementType());
1816 break;
1817 } else {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001818 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001819 llvm_unreachable("Support above i8 vector type");
1820 }
1821 }
1822
1823 // Ops[0] = Component Type ID
1824 // Ops[1] = Component Count (Literal Number)
1825 SPIRVOperandVec Ops;
alan-baker5a8c3be2020-09-09 13:44:26 -04001826 Ops << VecTy->getElementType()
1827 << VecTy->getElementCount().getKnownMinValue();
SJWf93f5f32020-05-05 07:27:56 -05001828
1829 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1830 break;
1831 }
1832 case Type::VoidTyID: {
1833 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
1834 break;
1835 }
1836 case Type::FunctionTyID: {
1837 // Generate SPIRV instruction for function type.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001838 FunctionType *FTy = cast<FunctionType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001839
1840 // Ops[0] = Return Type ID
1841 // Ops[1] ... Ops[n] = Parameter Type IDs
1842 SPIRVOperandVec Ops;
1843
1844 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05001845 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05001846
1847 // Find SPIRV instructions for parameter types
1848 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
1849 // Find SPIRV instruction for parameter type.
1850 auto ParamTy = FTy->getParamType(k);
1851 if (ParamTy->isPointerTy()) {
1852 auto PointeeTy = ParamTy->getPointerElementType();
1853 if (PointeeTy->isStructTy() &&
1854 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1855 ParamTy = PointeeTy;
1856 }
1857 }
1858
SJW01901d92020-05-21 08:58:31 -05001859 Ops << ParamTy;
SJWf93f5f32020-05-05 07:27:56 -05001860 }
1861
1862 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
1863 break;
1864 }
1865 }
1866
SJW01901d92020-05-21 08:58:31 -05001867 if (RID.isValid()) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001868 TypeMap[Canonical] = RID;
1869 if (Ty != Canonical) {
1870 // Speed up future lookups of this type by also caching the non-canonical
1871 // type.
1872 TypeMap[Ty] = RID;
1873 }
SJWf93f5f32020-05-05 07:27:56 -05001874 }
1875 return RID;
David Neto22f144c2017-06-12 14:26:21 -04001876}
1877
SJW77b87ad2020-04-21 14:37:52 -05001878void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04001879 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05001880 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04001881 }
David Neto22f144c2017-06-12 14:26:21 -04001882}
1883
SJW806a5d82020-07-15 12:51:38 -05001884SPIRVID SPIRVProducerPass::getSPIRVInt32Constant(uint32_t CstVal) {
1885 Type *i32 = Type::getInt32Ty(module->getContext());
1886 Constant *Cst = ConstantInt::get(i32, CstVal);
1887 return getSPIRVValue(Cst);
1888}
1889
alan-baker1b333b62021-05-31 14:55:32 -04001890SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *C) {
David Neto22f144c2017-06-12 14:26:21 -04001891 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07001892 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04001893
alan-baker1b333b62021-05-31 14:55:32 -04001894 // Treat poison as an undef.
1895 auto *Cst = C;
1896 if (isa<PoisonValue>(Cst)) {
1897 Cst = UndefValue::get(Cst->getType());
1898 }
1899
1900 auto VI = VMap.find(Cst);
1901 if (VI != VMap.end()) {
1902 assert(VI->second.isValid());
1903 return VI->second;
1904 }
1905
SJW01901d92020-05-21 08:58:31 -05001906 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04001907
SJWf93f5f32020-05-05 07:27:56 -05001908 //
1909 // Generate OpConstant.
1910 //
1911 // Ops[0] = Result Type ID
1912 // Ops[1] .. Ops[n] = Values LiteralNumber
1913 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04001914
SJW01901d92020-05-21 08:58:31 -05001915 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04001916
SJWf93f5f32020-05-05 07:27:56 -05001917 std::vector<uint32_t> LiteralNum;
1918 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04001919
SJWf93f5f32020-05-05 07:27:56 -05001920 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04001921 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05001922 Opcode = spv::OpUndef;
1923 if (hack_undef && IsTypeNullable(Cst->getType())) {
1924 Opcode = spv::OpConstantNull;
1925 }
1926 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04001927 unsigned bit_width = CI->getBitWidth();
1928 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001929 // If the bitwidth of constant is 1, generate OpConstantTrue or
1930 // OpConstantFalse.
1931 if (CI->getZExtValue()) {
1932 // Ops[0] = Result Type ID
1933 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04001934 } else {
SJWf93f5f32020-05-05 07:27:56 -05001935 // Ops[0] = Result Type ID
1936 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04001937 }
SJWf93f5f32020-05-05 07:27:56 -05001938 } else {
1939 auto V = CI->getZExtValue();
1940 LiteralNum.push_back(V & 0xFFFFFFFF);
1941
alan-bakere2a62752020-07-09 22:53:23 -04001942 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05001943 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04001944 }
1945
1946 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04001947
SJW01901d92020-05-21 08:58:31 -05001948 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001949 }
1950 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
1951 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
1952 Type *CFPTy = CFP->getType();
1953 if (CFPTy->isFloatTy()) {
1954 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1955 } else if (CFPTy->isDoubleTy()) {
1956 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
1957 LiteralNum.push_back(FPVal >> 32);
1958 } else if (CFPTy->isHalfTy()) {
1959 LiteralNum.push_back(FPVal & 0xFFFF);
1960 } else {
1961 CFPTy->print(errs());
1962 llvm_unreachable("Implement this ConstantFP Type");
1963 }
David Neto22f144c2017-06-12 14:26:21 -04001964
SJWf93f5f32020-05-05 07:27:56 -05001965 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04001966
SJW01901d92020-05-21 08:58:31 -05001967 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05001968 } else if (isa<ConstantDataSequential>(Cst) &&
1969 cast<ConstantDataSequential>(Cst)->isString()) {
1970 Cst->print(errs());
1971 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04001972
SJWf93f5f32020-05-05 07:27:56 -05001973 } else if (const ConstantDataSequential *CDS =
1974 dyn_cast<ConstantDataSequential>(Cst)) {
1975 // Let's convert <4 x i8> constant to int constant specially.
1976 // This case occurs when all the values are specified as constant
1977 // ints.
1978 Type *CstTy = Cst->getType();
1979 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05001980 //
1981 // Generate OpConstant with OpTypeInt 32 0.
1982 //
1983 uint32_t IntValue = 0;
1984 for (unsigned k = 0; k < 4; k++) {
1985 const uint64_t Val = CDS->getElementAsInteger(k);
1986 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04001987 }
1988
SJW806a5d82020-07-15 12:51:38 -05001989 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05001990 } else {
1991
David Neto49351ac2017-08-26 17:32:20 -04001992 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04001993 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05001994 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04001995 }
1996
1997 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05001998 }
1999 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2000 // Let's convert <4 x i8> constant to int constant specially.
2001 // This case occurs when at least one of the values is an undef.
2002 Type *CstTy = Cst->getType();
2003 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002004 //
2005 // Generate OpConstant with OpTypeInt 32 0.
2006 //
2007 uint32_t IntValue = 0;
2008 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2009 I != E; ++I) {
2010 uint64_t Val = 0;
2011 const Value *CV = *I;
2012 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2013 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002014 }
SJWf93f5f32020-05-05 07:27:56 -05002015 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002016 }
2017
SJW806a5d82020-07-15 12:51:38 -05002018 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002019 } else {
2020
David Neto22f144c2017-06-12 14:26:21 -04002021 // We use a constant composite in SPIR-V for our constant aggregate in
2022 // LLVM.
2023 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002024
2025 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002026 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002027 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002028 }
David Neto22f144c2017-06-12 14:26:21 -04002029 }
SJWf93f5f32020-05-05 07:27:56 -05002030 } else if (Cst->isNullValue()) {
2031 Opcode = spv::OpConstantNull;
2032 } else {
2033 Cst->print(errs());
2034 llvm_unreachable("Unsupported Constant???");
2035 }
David Neto22f144c2017-06-12 14:26:21 -04002036
SJWf93f5f32020-05-05 07:27:56 -05002037 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2038 // Null pointer requires variable pointers.
2039 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2040 }
alan-baker5b86ed72019-02-15 08:26:50 -05002041
SJWf93f5f32020-05-05 07:27:56 -05002042 if (RID == 0) {
2043 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2044 }
2045
2046 VMap[Cst] = RID;
2047
2048 return RID;
2049}
2050
2051SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2052 auto II = ValueMap.find(V);
2053 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002054 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002055 return II->second;
2056 }
2057 if (Constant *Cst = dyn_cast<Constant>(V)) {
2058 return getSPIRVConstant(Cst);
2059 } else {
2060 llvm_unreachable("Variable not found");
2061 }
2062}
2063
SJW77b87ad2020-04-21 14:37:52 -05002064void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002065 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002066 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002067 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2068 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002069
David Neto862b7d82018-06-14 18:48:37 -04002070 // We might have samplers in the sampler map that are not used
2071 // in the translation unit. We need to allocate variables
2072 // for them and bindings too.
2073 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002074
SJW77b87ad2020-04-21 14:37:52 -05002075 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002076 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002077 if (!var_fn)
2078 return;
alan-baker09cb9802019-12-10 13:16:27 -05002079
David Neto862b7d82018-06-14 18:48:37 -04002080 for (auto user : var_fn->users()) {
2081 // Populate SamplerLiteralToDescriptorSetMap and
2082 // SamplerLiteralToBindingMap.
2083 //
2084 // Look for calls like
2085 // call %opencl.sampler_t addrspace(2)*
2086 // @clspv.sampler.var.literal(
2087 // i32 descriptor,
2088 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002089 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002090 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002091 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002092 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002093 auto sampler_value = third_param;
2094 if (clspv::Option::UseSamplerMap()) {
2095 if (third_param >= sampler_map.size()) {
2096 errs() << "Out of bounds index to sampler map: " << third_param;
2097 llvm_unreachable("bad sampler init: out of bounds");
2098 }
2099 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002100 }
2101
David Neto862b7d82018-06-14 18:48:37 -04002102 const auto descriptor_set = static_cast<unsigned>(
2103 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2104 const auto binding = static_cast<unsigned>(
2105 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2106
2107 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2108 SamplerLiteralToBindingMap[sampler_value] = binding;
2109 used_bindings.insert(binding);
2110 }
2111 }
2112
alan-baker09cb9802019-12-10 13:16:27 -05002113 DenseSet<size_t> seen;
2114 for (auto user : var_fn->users()) {
2115 if (!isa<CallInst>(user))
2116 continue;
2117
2118 auto call = cast<CallInst>(user);
2119 const unsigned third_param = static_cast<unsigned>(
2120 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2121
2122 // Already allocated a variable for this value.
2123 if (!seen.insert(third_param).second)
2124 continue;
2125
2126 auto sampler_value = third_param;
2127 if (clspv::Option::UseSamplerMap()) {
2128 sampler_value = sampler_map[third_param].first;
2129 }
2130
SJW806a5d82020-07-15 12:51:38 -05002131 auto sampler_var_id = addSPIRVGlobalVariable(
2132 getSPIRVType(SamplerTy), spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002133
alan-baker09cb9802019-12-10 13:16:27 -05002134 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002135
David Neto862b7d82018-06-14 18:48:37 -04002136 unsigned descriptor_set;
2137 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002138 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002139 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002140 // This sampler is not actually used. Find the next one.
alan-baker7506abb2020-09-10 15:02:55 -04002141 for (binding = 0; used_bindings.count(binding); binding++) {
2142 }
David Neto862b7d82018-06-14 18:48:37 -04002143 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2144 used_bindings.insert(binding);
2145 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002146 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2147 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002148
alan-baker86ce19c2020-08-05 13:09:19 -04002149 auto import_id = getReflectionImport();
2150 SPIRVOperandVec Ops;
2151 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
2152 << reflection::ExtInstLiteralSampler
2153 << getSPIRVInt32Constant(descriptor_set)
2154 << getSPIRVInt32Constant(binding)
2155 << getSPIRVInt32Constant(sampler_value);
2156 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002157 }
2158
SJW69939d52020-04-16 07:29:07 -05002159 // Ops[0] = Target ID
2160 // Ops[1] = Decoration (DescriptorSet)
2161 // Ops[2] = LiteralNumber according to Decoration
SJW806a5d82020-07-15 12:51:38 -05002162 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002163 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002164
SJWf93f5f32020-05-05 07:27:56 -05002165 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002166
2167 // Ops[0] = Target ID
2168 // Ops[1] = Decoration (Binding)
2169 // Ops[2] = LiteralNumber according to Decoration
2170 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002171 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002172
SJWf93f5f32020-05-05 07:27:56 -05002173 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002174 }
David Neto862b7d82018-06-14 18:48:37 -04002175}
David Neto22f144c2017-06-12 14:26:21 -04002176
SJW77b87ad2020-04-21 14:37:52 -05002177void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002178 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002179
David Neto862b7d82018-06-14 18:48:37 -04002180 // Generate variables. Make one for each of resource var info object.
2181 for (auto *info : ModuleOrderedResourceVars) {
2182 Type *type = info->var_fn->getReturnType();
2183 // Remap the address space for opaque types.
2184 switch (info->arg_kind) {
2185 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002186 case clspv::ArgKind::SampledImage:
2187 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002188 type = PointerType::get(type->getPointerElementType(),
2189 clspv::AddressSpace::UniformConstant);
2190 break;
2191 default:
2192 break;
2193 }
David Neto22f144c2017-06-12 14:26:21 -04002194
David Neto862b7d82018-06-14 18:48:37 -04002195 const auto sc = GetStorageClassForArgKind(info->arg_kind);
David Neto22f144c2017-06-12 14:26:21 -04002196
SJW806a5d82020-07-15 12:51:38 -05002197 info->var_id = addSPIRVGlobalVariable(getSPIRVType(type), sc);
David Neto862b7d82018-06-14 18:48:37 -04002198
2199 // Map calls to the variable-builtin-function.
2200 for (auto &U : info->var_fn->uses()) {
2201 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2202 const auto set = unsigned(
2203 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2204 const auto binding = unsigned(
2205 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2206 if (set == info->descriptor_set && binding == info->binding) {
2207 switch (info->arg_kind) {
2208 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002209 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002210 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002211 case clspv::ArgKind::PodUBO:
2212 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002213 // The call maps to the variable directly.
2214 VMap[call] = info->var_id;
2215 break;
2216 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002217 case clspv::ArgKind::SampledImage:
2218 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002219 // The call maps to a load we generate later.
2220 ResourceVarDeferredLoadCalls[call] = info->var_id;
2221 break;
2222 default:
2223 llvm_unreachable("Unhandled arg kind");
2224 }
2225 }
David Neto22f144c2017-06-12 14:26:21 -04002226 }
David Neto862b7d82018-06-14 18:48:37 -04002227 }
2228 }
David Neto22f144c2017-06-12 14:26:21 -04002229
David Neto862b7d82018-06-14 18:48:37 -04002230 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002231 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002232 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002233 // Push constants don't need descriptor set or binding decorations.
2234 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2235 continue;
2236
David Neto862b7d82018-06-14 18:48:37 -04002237 // Decorate with DescriptorSet and Binding.
2238 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002239 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002240 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002241
2242 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002243 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002244 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002245
alan-bakere9308012019-03-15 10:25:13 -04002246 if (info->coherent) {
2247 // Decorate with Coherent if required for the variable.
2248 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002249 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002250 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002251 }
2252
David Neto862b7d82018-06-14 18:48:37 -04002253 // Generate NonWritable and NonReadable
2254 switch (info->arg_kind) {
2255 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002256 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002257 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2258 clspv::AddressSpace::Constant) {
2259 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002260 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002261 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002262 }
David Neto862b7d82018-06-14 18:48:37 -04002263 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002264 case clspv::ArgKind::StorageImage: {
2265 auto *type = info->var_fn->getReturnType();
2266 auto *struct_ty = cast<StructType>(type->getPointerElementType());
2267 // TODO(alan-baker): This is conservative. If compiling for OpenCL 2.0 or
2268 // above, the compiler treats all write_only images as read_write images.
2269 if (struct_ty->getName().contains("_wo_t")) {
2270 Ops.clear();
2271 Ops << info->var_id << spv::DecorationNonReadable;
2272 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2273 }
David Neto862b7d82018-06-14 18:48:37 -04002274 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002275 }
David Neto862b7d82018-06-14 18:48:37 -04002276 default:
2277 break;
David Neto22f144c2017-06-12 14:26:21 -04002278 }
2279 }
2280}
2281
2282void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002283 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002284 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002285 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002286
2287 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2288 Type *Ty = GV.getType();
2289 PointerType *PTy = cast<PointerType>(Ty);
2290
SJW01901d92020-05-21 08:58:31 -05002291 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002292
2293 // Workgroup size is handled differently (it goes into a constant)
2294 if (spv::BuiltInWorkgroupSize == BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04002295 uint32_t PrevXDimCst = 0xFFFFFFFF;
2296 uint32_t PrevYDimCst = 0xFFFFFFFF;
2297 uint32_t PrevZDimCst = 0xFFFFFFFF;
alan-baker3b609772020-09-03 19:10:17 -04002298 bool HasMD = true;
David Neto22f144c2017-06-12 14:26:21 -04002299 for (Function &Func : *GV.getParent()) {
2300 if (Func.isDeclaration()) {
2301 continue;
2302 }
2303
2304 // We only need to check kernels.
2305 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2306 continue;
2307 }
2308
2309 if (const MDNode *MD =
2310 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2311 uint32_t CurXDimCst = static_cast<uint32_t>(
2312 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2313 uint32_t CurYDimCst = static_cast<uint32_t>(
2314 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2315 uint32_t CurZDimCst = static_cast<uint32_t>(
2316 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2317
2318 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2319 PrevZDimCst == 0xFFFFFFFF) {
2320 PrevXDimCst = CurXDimCst;
2321 PrevYDimCst = CurYDimCst;
2322 PrevZDimCst = CurZDimCst;
2323 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2324 CurZDimCst != PrevZDimCst) {
alan-baker3b609772020-09-03 19:10:17 -04002325 HasMD = false;
2326 continue;
David Neto22f144c2017-06-12 14:26:21 -04002327 } else {
2328 continue;
2329 }
2330
2331 //
2332 // Generate OpConstantComposite.
2333 //
2334 // Ops[0] : Result Type ID
2335 // Ops[1] : Constant size for x dimension.
2336 // Ops[2] : Constant size for y dimension.
2337 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002338 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002339
SJW01901d92020-05-21 08:58:31 -05002340 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002341 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002342 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002343 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002344 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002345 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002346
SJW01901d92020-05-21 08:58:31 -05002347 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2348 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002349
SJWf93f5f32020-05-05 07:27:56 -05002350 InitializerID =
2351 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002352 } else {
alan-baker3b609772020-09-03 19:10:17 -04002353 HasMD = false;
David Neto22f144c2017-06-12 14:26:21 -04002354 }
2355 }
2356
2357 // If all kernels do not have metadata for reqd_work_group_size, generate
2358 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002359 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002360 //
2361 // Generate OpSpecConstants for x/y/z dimension.
2362 //
2363 // Ops[0] : Result Type ID
2364 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002365
alan-bakera1be3322020-04-20 12:48:18 -04002366 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002367 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002368
SJWf93f5f32020-05-05 07:27:56 -05002369 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002370 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002371 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002372
David Neto257c3892018-04-11 13:19:45 -04002373 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002374 Ops << result_type_id << 1;
2375 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002376
2377 // Y Dimension
2378 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002379 Ops << result_type_id << 1;
2380 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002381
2382 // Z Dimension
2383 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002384 Ops << result_type_id << 1;
2385 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002386
David Neto257c3892018-04-11 13:19:45 -04002387 BuiltinDimVec.push_back(XDimCstID);
2388 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002389 BuiltinDimVec.push_back(ZDimCstID);
2390
David Neto22f144c2017-06-12 14:26:21 -04002391 //
2392 // Generate OpSpecConstantComposite.
2393 //
2394 // Ops[0] : Result Type ID
2395 // Ops[1] : Constant size for x dimension.
2396 // Ops[2] : Constant size for y dimension.
2397 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002398 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002399 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002400
SJWf93f5f32020-05-05 07:27:56 -05002401 InitializerID =
2402 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002403 }
alan-bakerbed3a882020-04-21 14:42:41 -04002404 } else if (BuiltinType == spv::BuiltInWorkDim) {
2405 // 1. Generate a specialization constant with a default of 3.
2406 // 2. Allocate and annotate a SpecId for the constant.
2407 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002408 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002409
2410 //
2411 // Generate OpSpecConstant.
2412 //
2413 // Ops[0] : Result Type ID
2414 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002415
SJW01901d92020-05-21 08:58:31 -05002416 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002417
SJWf93f5f32020-05-05 07:27:56 -05002418 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002419
2420 //
2421 // Generate SpecId decoration.
2422 //
2423 // Ops[0] : target
2424 // Ops[1] : decoration
2425 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002426 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002427 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002428 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002429
SJWf93f5f32020-05-05 07:27:56 -05002430 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002431 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2432 // 1. Generate a spec constant with a default of {0, 0, 0}.
2433 // 2. Allocate and annotate SpecIds for the constants.
2434 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002435 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002436
2437 //
2438 // Generate OpSpecConstant for each dimension.
2439 //
2440 // Ops[0] : Result Type ID
2441 // Ops[1] : Default literal value
2442 //
SJW01901d92020-05-21 08:58:31 -05002443 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2444 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002445
alan-bakere1996972020-05-04 08:38:12 -04002446 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002447 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2448 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002449
alan-bakere1996972020-05-04 08:38:12 -04002450 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002451 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2452 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002453
2454 //
2455 // Generate SpecId decoration for each dimension.
2456 //
2457 // Ops[0] : target
2458 // Ops[1] : decoration
2459 // Ops[2] : SpecId
2460 //
2461 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2462 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002463 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002464 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002465
2466 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2467 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002468 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002469 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002470
2471 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2472 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002473 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002474 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002475
2476 //
2477 // Generate OpSpecConstantComposite.
2478 //
2479 // Ops[0] : type id
2480 // Ops[1..n-1] : elements
2481 //
alan-bakere1996972020-05-04 08:38:12 -04002482 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002483 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002484 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002485 }
2486
David Neto85082642018-03-24 06:55:20 -07002487 const auto AS = PTy->getAddressSpace();
SJW806a5d82020-07-15 12:51:38 -05002488 const auto spvSC = GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002489
David Neto85082642018-03-24 06:55:20 -07002490 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002491 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002492 clspv::Option::ModuleConstantsInStorageBuffer();
2493
Kévin Petit23d5f182019-08-13 16:21:29 +01002494 if (GV.hasInitializer()) {
2495 auto GVInit = GV.getInitializer();
2496 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002497 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002498 }
2499 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002500
SJW806a5d82020-07-15 12:51:38 -05002501 SPIRVID var_id =
2502 addSPIRVGlobalVariable(getSPIRVType(Ty), spvSC, InitializerID);
David Neto85082642018-03-24 06:55:20 -07002503
SJWf93f5f32020-05-05 07:27:56 -05002504 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002505
alan-bakere1996972020-05-04 08:38:12 -04002506 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2507 return builtin == spv::BuiltInWorkDim ||
2508 builtin == spv::BuiltInGlobalOffset;
2509 };
2510
alan-bakere1996972020-05-04 08:38:12 -04002511 // If we have a builtin (not an OpenCL builtin).
2512 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002513 //
2514 // Generate OpDecorate.
2515 //
2516 // DOps[0] = Target ID
2517 // DOps[1] = Decoration (Builtin)
2518 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002519 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002520
2521 // WorkgroupSize is different, we decorate the constant composite that has
2522 // its value, rather than the variable that we use to access the value.
2523 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2524 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002525 // Save both the value and variable IDs for later.
2526 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002527 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002528 } else {
SJWf93f5f32020-05-05 07:27:56 -05002529 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002530 }
2531
SJW806a5d82020-07-15 12:51:38 -05002532 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002533 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002534
SJW01901d92020-05-21 08:58:31 -05002535 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002536 } else if (module_scope_constant_external_init) {
2537 // This module scope constant is initialized from a storage buffer with data
2538 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002539 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002540
alan-baker86ce19c2020-08-05 13:09:19 -04002541 // Emit the intializer as a reflection instruction.
David Neto85082642018-03-24 06:55:20 -07002542 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2543 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002544 std::string hexbytes;
2545 llvm::raw_string_ostream str(hexbytes);
2546 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
alan-baker86ce19c2020-08-05 13:09:19 -04002547
2548 // Reflection instruction for constant data.
2549 SPIRVOperandVec Ops;
2550 auto data_id = addSPIRVInst<kDebug>(spv::OpString, str.str().c_str());
2551 Ops << getSPIRVType(Type::getVoidTy(module->getContext()))
2552 << getReflectionImport() << reflection::ExtInstConstantDataStorageBuffer
2553 << getSPIRVInt32Constant(descriptor_set) << getSPIRVInt32Constant(0)
2554 << data_id;
2555 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto85082642018-03-24 06:55:20 -07002556
David Neto85082642018-03-24 06:55:20 -07002557 // OpDecorate %var DescriptorSet <descriptor_set>
alan-baker86ce19c2020-08-05 13:09:19 -04002558 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002559 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2560 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002561
2562 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002563 Ops.clear();
2564 Ops << var_id << spv::DecorationBinding << 0;
2565 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002566 }
2567}
2568
David Neto22f144c2017-06-12 14:26:21 -04002569void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002570 ValueMapType &VMap = getValueMap();
2571 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002572 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2573 auto &GlobalConstArgSet = getGlobalConstArgSet();
2574
2575 FunctionType *FTy = F.getFunctionType();
2576
2577 //
David Neto22f144c2017-06-12 14:26:21 -04002578 // Generate OPFunction.
2579 //
2580
2581 // FOps[0] : Result Type ID
2582 // FOps[1] : Function Control
2583 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002584 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002585
2586 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002587 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002588
2589 // Check function attributes for SPIRV Function Control.
2590 uint32_t FuncControl = spv::FunctionControlMaskNone;
2591 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2592 FuncControl |= spv::FunctionControlInlineMask;
2593 }
2594 if (F.hasFnAttribute(Attribute::NoInline)) {
2595 FuncControl |= spv::FunctionControlDontInlineMask;
2596 }
2597 // TODO: Check llvm attribute for Function Control Pure.
2598 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2599 FuncControl |= spv::FunctionControlPureMask;
2600 }
2601 // TODO: Check llvm attribute for Function Control Const.
2602 if (F.hasFnAttribute(Attribute::ReadNone)) {
2603 FuncControl |= spv::FunctionControlConstMask;
2604 }
2605
SJW01901d92020-05-21 08:58:31 -05002606 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002607
SJW01901d92020-05-21 08:58:31 -05002608 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002609 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2610 SmallVector<Type *, 4> NewFuncParamTys;
2611 FunctionType *NewFTy =
2612 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002613 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002614 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002615 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002616 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002617 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002618 } else {
SJWf93f5f32020-05-05 07:27:56 -05002619 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002620 }
2621 }
2622
SJW01901d92020-05-21 08:58:31 -05002623 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002624
SJWf93f5f32020-05-05 07:27:56 -05002625 // Generate SPIRV instruction for function.
2626 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2627 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002628
SJWf93f5f32020-05-05 07:27:56 -05002629 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2630 EntryPoints.push_back(std::make_pair(&F, FID));
2631 }
David Neto22f144c2017-06-12 14:26:21 -04002632
David Neto482550a2018-03-24 05:21:07 -07002633 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002634 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002635 }
David Neto22f144c2017-06-12 14:26:21 -04002636
2637 //
2638 // Generate OpFunctionParameter for Normal function.
2639 //
David Neto22f144c2017-06-12 14:26:21 -04002640 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002641
David Neto22f144c2017-06-12 14:26:21 -04002642 // Iterate Argument for name instead of param type from function type.
2643 unsigned ArgIdx = 0;
2644 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002645 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002646 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002647
2648 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002649 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002650 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2651 if (GlobalConstFuncTyMap.count(FTy)) {
2652 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2653 Type *EleTy = PTy->getPointerElementType();
2654 Type *ArgTy =
2655 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002656 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002657 GlobalConstArgSet.insert(&Arg);
2658 }
2659 }
2660 }
SJW01901d92020-05-21 08:58:31 -05002661 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002662
2663 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002664 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002665 VMap[&Arg] = param_id;
2666
2667 if (CalledWithCoherentResource(Arg)) {
2668 // If the arg is passed a coherent resource ever, then decorate this
2669 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002670 Ops.clear();
2671 Ops << param_id << spv::DecorationCoherent;
2672 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002673 }
David Neto22f144c2017-06-12 14:26:21 -04002674
2675 ArgIdx++;
2676 }
2677 }
2678}
2679
SJW77b87ad2020-04-21 14:37:52 -05002680void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002681 EntryPointVecType &EntryPoints = getEntryPointVec();
SJW806a5d82020-07-15 12:51:38 -05002682 auto &EntryPointInterfaces = getEntryPointInterfacesList();
SJW01901d92020-05-21 08:58:31 -05002683 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002684
SJWf93f5f32020-05-05 07:27:56 -05002685 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002686
SJW01901d92020-05-21 08:58:31 -05002687 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002688 //
SJW01901d92020-05-21 08:58:31 -05002689 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002690 //
2691 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002692 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002693 }
2694
2695 // Always add the storage buffer extension
2696 {
David Neto22f144c2017-06-12 14:26:21 -04002697 //
2698 // Generate OpExtension.
2699 //
2700 // Ops[0] = Name (Literal String)
2701 //
SJWf93f5f32020-05-05 07:27:56 -05002702 addSPIRVInst<kExtensions>(spv::OpExtension,
2703 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05002704 }
David Neto22f144c2017-06-12 14:26:21 -04002705
alan-baker5b86ed72019-02-15 08:26:50 -05002706 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2707 //
2708 // Generate OpExtension.
2709 //
2710 // Ops[0] = Name (Literal String)
2711 //
SJWf93f5f32020-05-05 07:27:56 -05002712 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04002713 }
2714
2715 //
2716 // Generate OpMemoryModel
2717 //
2718 // Memory model for Vulkan will always be GLSL450.
2719
2720 // Ops[0] = Addressing Model
2721 // Ops[1] = Memory Model
2722 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002723 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002724
SJWf93f5f32020-05-05 07:27:56 -05002725 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002726
2727 //
2728 // Generate OpEntryPoint
2729 //
2730 for (auto EntryPoint : EntryPoints) {
2731 // Ops[0] = Execution Model
2732 // Ops[1] = EntryPoint ID
2733 // Ops[2] = Name (Literal String)
2734 // ...
2735 //
2736 // TODO: Do we need to consider Interface ID for forward references???
2737 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002738 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002739 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002740
SJW806a5d82020-07-15 12:51:38 -05002741 for (auto &Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002742 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002743 }
2744
SJWf93f5f32020-05-05 07:27:56 -05002745 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002746 }
2747
alan-baker3b609772020-09-03 19:10:17 -04002748 if (BuiltinDimVec.empty()) {
2749 for (auto EntryPoint : EntryPoints) {
2750 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2751 ->getMetadata("reqd_work_group_size");
2752 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
2753 //
2754 // Generate OpExecutionMode
2755 //
David Neto22f144c2017-06-12 14:26:21 -04002756
alan-baker3b609772020-09-03 19:10:17 -04002757 // Ops[0] = Entry Point ID
2758 // Ops[1] = Execution Mode
2759 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
2760 Ops.clear();
2761 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
2762
2763 uint32_t XDim = static_cast<uint32_t>(
2764 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2765 uint32_t YDim = static_cast<uint32_t>(
2766 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2767 uint32_t ZDim = static_cast<uint32_t>(
2768 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2769
2770 Ops << XDim << YDim << ZDim;
2771
2772 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002773 }
David Neto22f144c2017-06-12 14:26:21 -04002774 }
2775 }
2776
2777 //
2778 // Generate OpSource.
2779 //
2780 // Ops[0] = SourceLanguage ID
2781 // Ops[1] = Version (LiteralNum)
2782 //
SJW01901d92020-05-21 08:58:31 -05002783 uint32_t LangID = spv::SourceLanguageUnknown;
2784 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00002785 switch (clspv::Option::Language()) {
2786 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05002787 LangID = spv::SourceLanguageOpenCL_C;
2788 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002789 break;
2790 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05002791 LangID = spv::SourceLanguageOpenCL_C;
2792 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00002793 break;
2794 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05002795 LangID = spv::SourceLanguageOpenCL_C;
2796 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00002797 break;
2798 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05002799 LangID = spv::SourceLanguageOpenCL_C;
2800 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00002801 break;
Kévin Petit77838ff2020-10-19 18:54:51 +01002802 case clspv::Option::SourceLanguage::OpenCL_C_30:
2803 LangID = spv::SourceLanguageOpenCL_C;
2804 LangVer = 300;
2805 break;
Kévin Petitf0515712020-01-07 18:29:20 +00002806 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05002807 LangID = spv::SourceLanguageOpenCL_CPP;
2808 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002809 break;
2810 default:
Kévin Petitf0515712020-01-07 18:29:20 +00002811 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01002812 }
David Neto22f144c2017-06-12 14:26:21 -04002813
SJW01901d92020-05-21 08:58:31 -05002814 Ops.clear();
2815 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05002816 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002817
2818 if (!BuiltinDimVec.empty()) {
2819 //
2820 // Generate OpDecorates for x/y/z dimension.
2821 //
2822 // Ops[0] = Target ID
2823 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04002824 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04002825
2826 // X Dimension
2827 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002828 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05002829 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002830
2831 // Y Dimension
2832 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002833 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05002834 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002835
2836 // Z Dimension
2837 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002838 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05002839 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002840 }
2841}
2842
David Netob6e2e062018-04-25 10:32:06 -04002843void SPIRVProducerPass::GenerateEntryPointInitialStores() {
2844 // Work around a driver bug. Initializers on Private variables might not
2845 // work. So the start of the kernel should store the initializer value to the
2846 // variables. Yes, *every* entry point pays this cost if *any* entry point
2847 // uses this builtin. At this point I judge this to be an acceptable tradeoff
2848 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002849 // TODO(dneto): Remove this at some point once fixed drivers are widely
2850 // available.
SJW01901d92020-05-21 08:58:31 -05002851 if (WorkgroupSizeVarID.isValid()) {
2852 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04002853
SJWf93f5f32020-05-05 07:27:56 -05002854 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002855 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04002856
SJWf93f5f32020-05-05 07:27:56 -05002857 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04002858 }
2859}
2860
David Neto22f144c2017-06-12 14:26:21 -04002861void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002862 ValueMapType &VMap = getValueMap();
2863
David Netob6e2e062018-04-25 10:32:06 -04002864 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04002865
2866 for (BasicBlock &BB : F) {
2867 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04002868
2869 //
2870 // Generate OpLabel for Basic Block.
2871 //
SJWf93f5f32020-05-05 07:27:56 -05002872 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04002873
David Neto6dcd4712017-06-23 11:06:47 -04002874 // OpVariable instructions must come first.
2875 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05002876 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
2877 // Allocating a pointer requires variable pointers.
2878 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04002879 setVariablePointersCapabilities(
2880 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05002881 }
David Neto6dcd4712017-06-23 11:06:47 -04002882 GenerateInstruction(I);
2883 }
2884 }
2885
David Neto22f144c2017-06-12 14:26:21 -04002886 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04002887 if (clspv::Option::HackInitializers()) {
2888 GenerateEntryPointInitialStores();
2889 }
David Neto22f144c2017-06-12 14:26:21 -04002890 }
2891
2892 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04002893 if (!isa<AllocaInst>(I)) {
2894 GenerateInstruction(I);
2895 }
David Neto22f144c2017-06-12 14:26:21 -04002896 }
2897 }
2898}
2899
2900spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
2901 const std::map<CmpInst::Predicate, spv::Op> Map = {
2902 {CmpInst::ICMP_EQ, spv::OpIEqual},
2903 {CmpInst::ICMP_NE, spv::OpINotEqual},
2904 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
2905 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
2906 {CmpInst::ICMP_ULT, spv::OpULessThan},
2907 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
2908 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
2909 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
2910 {CmpInst::ICMP_SLT, spv::OpSLessThan},
2911 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
2912 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
2913 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
2914 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
2915 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
2916 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
2917 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
2918 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
2919 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
2920 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
2921 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
2922 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
2923 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
2924
2925 assert(0 != Map.count(I->getPredicate()));
2926
2927 return Map.at(I->getPredicate());
2928}
2929
2930spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
2931 const std::map<unsigned, spv::Op> Map{
2932 {Instruction::Trunc, spv::OpUConvert},
2933 {Instruction::ZExt, spv::OpUConvert},
2934 {Instruction::SExt, spv::OpSConvert},
2935 {Instruction::FPToUI, spv::OpConvertFToU},
2936 {Instruction::FPToSI, spv::OpConvertFToS},
2937 {Instruction::UIToFP, spv::OpConvertUToF},
2938 {Instruction::SIToFP, spv::OpConvertSToF},
2939 {Instruction::FPTrunc, spv::OpFConvert},
2940 {Instruction::FPExt, spv::OpFConvert},
2941 {Instruction::BitCast, spv::OpBitcast}};
2942
2943 assert(0 != Map.count(I.getOpcode()));
2944
2945 return Map.at(I.getOpcode());
2946}
2947
2948spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00002949 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04002950 switch (I.getOpcode()) {
2951 default:
2952 break;
2953 case Instruction::Or:
2954 return spv::OpLogicalOr;
2955 case Instruction::And:
2956 return spv::OpLogicalAnd;
2957 case Instruction::Xor:
2958 return spv::OpLogicalNotEqual;
2959 }
2960 }
2961
alan-bakerb6b09dc2018-11-08 16:59:28 -05002962 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04002963 {Instruction::Add, spv::OpIAdd},
2964 {Instruction::FAdd, spv::OpFAdd},
2965 {Instruction::Sub, spv::OpISub},
2966 {Instruction::FSub, spv::OpFSub},
2967 {Instruction::Mul, spv::OpIMul},
2968 {Instruction::FMul, spv::OpFMul},
2969 {Instruction::UDiv, spv::OpUDiv},
2970 {Instruction::SDiv, spv::OpSDiv},
2971 {Instruction::FDiv, spv::OpFDiv},
2972 {Instruction::URem, spv::OpUMod},
2973 {Instruction::SRem, spv::OpSRem},
2974 {Instruction::FRem, spv::OpFRem},
2975 {Instruction::Or, spv::OpBitwiseOr},
2976 {Instruction::Xor, spv::OpBitwiseXor},
2977 {Instruction::And, spv::OpBitwiseAnd},
2978 {Instruction::Shl, spv::OpShiftLeftLogical},
2979 {Instruction::LShr, spv::OpShiftRightLogical},
2980 {Instruction::AShr, spv::OpShiftRightArithmetic}};
2981
2982 assert(0 != Map.count(I.getOpcode()));
2983
2984 return Map.at(I.getOpcode());
2985}
2986
SJW806a5d82020-07-15 12:51:38 -05002987SPIRVID SPIRVProducerPass::getSPIRVBuiltin(spv::BuiltIn BID,
2988 spv::Capability Cap) {
2989 SPIRVID RID;
2990
2991 auto ii = BuiltinConstantMap.find(BID);
2992
2993 if (ii != BuiltinConstantMap.end()) {
2994 return ii->second;
2995 } else {
SJW806a5d82020-07-15 12:51:38 -05002996 addCapability(Cap);
2997
2998 Type *type = PointerType::get(IntegerType::get(module->getContext(), 32),
2999 AddressSpace::Input);
3000
3001 RID = addSPIRVGlobalVariable(getSPIRVType(type), spv::StorageClassInput);
3002
3003 BuiltinConstantMap[BID] = RID;
3004
3005 //
3006 // Generate OpDecorate.
3007 //
3008 // Ops[0] : target
3009 // Ops[1] : decoration
3010 // Ops[2] : SpecId
3011 SPIRVOperandVec Ops;
3012 Ops << RID << spv::DecorationBuiltIn << static_cast<int>(BID);
3013
3014 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
3015 }
3016
3017 return RID;
3018}
3019
3020SPIRVID
3021SPIRVProducerPass::GenerateClspvInstruction(CallInst *Call,
3022 const FunctionInfo &FuncInfo) {
3023 SPIRVID RID;
3024
3025 switch (FuncInfo.getType()) {
3026 case Builtins::kClspvCompositeConstruct:
3027 RID = addSPIRVPlaceholder(Call);
3028 break;
3029 case Builtins::kClspvResource: {
3030 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3031 // Generate an OpLoad
3032 SPIRVOperandVec Ops;
3033
3034 Ops << Call->getType()->getPointerElementType()
3035 << ResourceVarDeferredLoadCalls[Call];
3036
3037 RID = addSPIRVInst(spv::OpLoad, Ops);
3038
3039 } else {
3040 // This maps to an OpVariable we've already generated.
3041 // No code is generated for the call.
3042 }
3043 break;
3044 }
3045 case Builtins::kClspvLocal: {
3046 // Don't codegen an instruction here, but instead map this call directly
3047 // to the workgroup variable id.
3048 int spec_id = static_cast<int>(
3049 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
3050 const auto &info = LocalSpecIdInfoMap[spec_id];
3051 RID = info.variable_id;
3052 break;
3053 }
3054 case Builtins::kClspvSamplerVarLiteral: {
3055 // Sampler initializers become a load of the corresponding sampler.
3056 // Map this to a load from the variable.
3057 const auto third_param = static_cast<unsigned>(
3058 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3059 auto sampler_value = third_param;
3060 if (clspv::Option::UseSamplerMap()) {
3061 sampler_value = getSamplerMap()[third_param].first;
3062 }
3063
3064 // Generate an OpLoad
3065 SPIRVOperandVec Ops;
3066
3067 Ops << SamplerTy->getPointerElementType()
3068 << SamplerLiteralToIDMap[sampler_value];
3069
3070 RID = addSPIRVInst(spv::OpLoad, Ops);
3071 break;
3072 }
3073 case Builtins::kSpirvAtomicXor: {
3074 // Handle SPIR-V intrinsics
3075 SPIRVOperandVec Ops;
3076
3077 if (!Call->getType()->isVoidTy()) {
3078 Ops << Call->getType();
3079 }
3080
3081 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
3082 Ops << Call->getArgOperand(i);
3083 }
3084
3085 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
3086 break;
3087 }
3088 case Builtins::kSpirvOp: {
3089 // Handle SPIR-V intrinsics
3090 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
3091 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
3092 if (opcode != spv::OpNop) {
3093 SPIRVOperandVec Ops;
3094
3095 if (!Call->getType()->isVoidTy()) {
3096 Ops << Call->getType();
3097 }
3098
3099 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
3100 Ops << Call->getArgOperand(i);
3101 }
3102
3103 RID = addSPIRVInst(opcode, Ops);
3104 }
3105 break;
3106 }
3107 case Builtins::kSpirvCopyMemory: {
3108 //
3109 // Generate OpCopyMemory.
3110 //
3111
3112 // Ops[0] = Dst ID
3113 // Ops[1] = Src ID
3114 // Ops[2] = Memory Access
3115 // Ops[3] = Alignment
3116
3117 auto IsVolatile =
3118 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
3119
3120 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3121 : spv::MemoryAccessMaskNone;
3122
3123 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3124
3125 auto Alignment =
3126 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
3127
3128 SPIRVOperandVec Ops;
3129 Ops << Call->getArgOperand(0) << Call->getArgOperand(1) << MemoryAccess
3130 << static_cast<uint32_t>(Alignment);
3131
3132 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
3133 break;
3134 }
3135 default:
3136 llvm_unreachable("Unknown CLSPV Instruction");
3137 break;
3138 }
3139 return RID;
3140}
3141
3142SPIRVID
3143SPIRVProducerPass::GenerateImageInstruction(CallInst *Call,
3144 const FunctionInfo &FuncInfo) {
3145 SPIRVID RID;
3146
3147 LLVMContext &Context = module->getContext();
3148 switch (FuncInfo.getType()) {
3149 case Builtins::kReadImagef:
3150 case Builtins::kReadImageh:
3151 case Builtins::kReadImagei:
3152 case Builtins::kReadImageui: {
3153 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
3154 // Additionally, OpTypeSampledImage is generated.
alan-bakerf6bc8252020-09-23 14:58:55 -04003155 const auto image_ty = Call->getArgOperand(0)->getType();
SJW806a5d82020-07-15 12:51:38 -05003156 const auto &pi = FuncInfo.getParameter(1);
3157 if (pi.isSampler()) {
3158 //
3159 // Generate OpSampledImage.
3160 //
3161 // Ops[0] = Result Type ID
3162 // Ops[1] = Image ID
3163 // Ops[2] = Sampler ID
3164 //
3165 SPIRVOperandVec Ops;
3166
3167 Value *Image = Call->getArgOperand(0);
3168 Value *Sampler = Call->getArgOperand(1);
3169 Value *Coordinate = Call->getArgOperand(2);
3170
3171 TypeMapType &OpImageTypeMap = getImageTypeMap();
3172 Type *ImageTy = Image->getType()->getPointerElementType();
3173 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
3174
3175 Ops << ImageTyID << Image << Sampler;
3176
3177 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
3178
3179 //
3180 // Generate OpImageSampleExplicitLod.
3181 //
3182 // Ops[0] = Result Type ID
3183 // Ops[1] = Sampled Image ID
3184 // Ops[2] = Coordinate ID
3185 // Ops[3] = Image Operands Type ID
3186 // Ops[4] ... Ops[n] = Operands ID
3187 //
3188 Ops.clear();
3189
3190 const bool is_int_image = IsIntImageType(Image->getType());
3191 SPIRVID result_type;
3192 if (is_int_image) {
3193 result_type = v4int32ID;
3194 } else {
3195 result_type = getSPIRVType(Call->getType());
3196 }
3197
3198 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
3199 Ops << result_type << SampledImageID << Coordinate
3200 << spv::ImageOperandsLodMask << CstFP0;
3201
3202 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
3203
3204 if (is_int_image) {
3205 // Generate the bitcast.
3206 Ops.clear();
3207 Ops << Call->getType() << RID;
3208 RID = addSPIRVInst(spv::OpBitcast, Ops);
3209 }
alan-bakerf6bc8252020-09-23 14:58:55 -04003210 } else if (IsStorageImageType(image_ty)) {
3211 // read_image on a storage image is mapped to OpImageRead.
3212 Value *Image = Call->getArgOperand(0);
3213 Value *Coordinate = Call->getArgOperand(1);
3214
3215 //
3216 // Generate OpImageRead
3217 //
3218 // Ops[0] = Result Type ID
3219 // Ops[1] = Image ID
3220 // Ops[2] = Coordinate
3221 // No optional image operands.
3222 //
3223 SPIRVOperandVec Ops;
3224
3225 const bool is_int_image = IsIntImageType(Image->getType());
3226 SPIRVID result_type;
3227 if (is_int_image) {
3228 result_type = v4int32ID;
3229 } else {
3230 result_type = getSPIRVType(Call->getType());
3231 }
3232
3233 Ops << result_type << Image << Coordinate;
3234 RID = addSPIRVInst(spv::OpImageRead, Ops);
3235
3236 if (is_int_image) {
3237 // Generate the bitcast.
3238 Ops.clear();
3239 Ops << Call->getType() << RID;
3240 RID = addSPIRVInst(spv::OpBitcast, Ops);
3241 }
3242
3243 // OpImageRead requires StorageImageReadWithoutFormat.
3244 addCapability(spv::CapabilityStorageImageReadWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003245 } else {
alan-bakerf6bc8252020-09-23 14:58:55 -04003246 // read_image on a sampled image (without a sampler) is mapped to
3247 // OpImageFetch.
SJW806a5d82020-07-15 12:51:38 -05003248 Value *Image = Call->getArgOperand(0);
3249 Value *Coordinate = Call->getArgOperand(1);
3250
3251 //
3252 // Generate OpImageFetch
3253 //
3254 // Ops[0] = Result Type ID
3255 // Ops[1] = Image ID
3256 // Ops[2] = Coordinate ID
3257 // Ops[3] = Lod
3258 // Ops[4] = 0
3259 //
3260 SPIRVOperandVec Ops;
3261
3262 const bool is_int_image = IsIntImageType(Image->getType());
3263 SPIRVID result_type;
3264 if (is_int_image) {
3265 result_type = v4int32ID;
3266 } else {
3267 result_type = getSPIRVType(Call->getType());
3268 }
3269
3270 Ops << result_type << Image << Coordinate << spv::ImageOperandsLodMask
3271 << getSPIRVInt32Constant(0);
3272
3273 RID = addSPIRVInst(spv::OpImageFetch, Ops);
3274
3275 if (is_int_image) {
3276 // Generate the bitcast.
3277 Ops.clear();
3278 Ops << Call->getType() << RID;
3279 RID = addSPIRVInst(spv::OpBitcast, Ops);
3280 }
3281 }
3282 break;
3283 }
3284
3285 case Builtins::kWriteImagef:
3286 case Builtins::kWriteImageh:
3287 case Builtins::kWriteImagei:
3288 case Builtins::kWriteImageui: {
3289 // write_image is mapped to OpImageWrite.
3290 //
3291 // Generate OpImageWrite.
3292 //
3293 // Ops[0] = Image ID
3294 // Ops[1] = Coordinate ID
3295 // Ops[2] = Texel ID
3296 // Ops[3] = (Optional) Image Operands Type (Literal Number)
3297 // Ops[4] ... Ops[n] = (Optional) Operands ID
3298 //
3299 SPIRVOperandVec Ops;
3300
3301 Value *Image = Call->getArgOperand(0);
3302 Value *Coordinate = Call->getArgOperand(1);
3303 Value *Texel = Call->getArgOperand(2);
3304
3305 SPIRVID TexelID = getSPIRVValue(Texel);
3306
3307 const bool is_int_image = IsIntImageType(Image->getType());
3308 if (is_int_image) {
3309 // Generate a bitcast to v4int and use it as the texel value.
3310 Ops << v4int32ID << TexelID;
3311 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
3312 Ops.clear();
3313 }
3314 Ops << Image << Coordinate << TexelID;
SJW806a5d82020-07-15 12:51:38 -05003315 RID = addSPIRVInst(spv::OpImageWrite, Ops);
alan-bakerf6bc8252020-09-23 14:58:55 -04003316
3317 // Image writes require StorageImageWriteWithoutFormat.
3318 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003319 break;
3320 }
3321
3322 case Builtins::kGetImageHeight:
3323 case Builtins::kGetImageWidth:
3324 case Builtins::kGetImageDepth:
3325 case Builtins::kGetImageDim: {
3326 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
3327 addCapability(spv::CapabilityImageQuery);
3328
3329 //
3330 // Generate OpImageQuerySize[Lod]
3331 //
3332 // Ops[0] = Image ID
3333 //
3334 // Result type has components equal to the dimensionality of the image,
3335 // plus 1 if the image is arrayed.
3336 //
3337 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3338 SPIRVOperandVec Ops;
3339
3340 // Implement:
3341 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3342 SPIRVID SizesTypeID;
3343
3344 Value *Image = Call->getArgOperand(0);
3345 const uint32_t dim = ImageDimensionality(Image->getType());
3346 const uint32_t components =
3347 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
3348 if (components == 1) {
3349 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
3350 } else {
3351 SizesTypeID = getSPIRVType(
3352 FixedVectorType::get(Type::getInt32Ty(Context), components));
3353 }
3354 Ops << SizesTypeID << Image;
3355 spv::Op query_opcode = spv::OpImageQuerySize;
3356 if (IsSampledImageType(Image->getType())) {
3357 query_opcode = spv::OpImageQuerySizeLod;
3358 // Need explicit 0 for Lod operand.
3359 Ops << getSPIRVInt32Constant(0);
3360 }
3361
3362 RID = addSPIRVInst(query_opcode, Ops);
3363
3364 // May require an extra instruction to create the appropriate result of
3365 // the builtin function.
3366 if (FuncInfo.getType() == Builtins::kGetImageDim) {
3367 if (dim == 3) {
3368 // get_image_dim returns an int4 for 3D images.
3369 //
3370
3371 // Implement:
3372 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
3373 Ops.clear();
3374 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
3375 << getSPIRVInt32Constant(0);
3376
3377 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
3378 } else if (dim != components) {
3379 // get_image_dim return an int2 regardless of the arrayedness of the
3380 // image. If the image is arrayed an element must be dropped from the
3381 // query result.
3382 //
3383
3384 // Implement:
3385 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
3386 Ops.clear();
3387 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
3388 << 0 << 1;
3389
3390 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
3391 }
3392 } else if (components > 1) {
3393 // Implement:
3394 // %result = OpCompositeExtract %uint %sizes <component number>
3395 Ops.clear();
3396 Ops << Call->getType() << RID;
3397
3398 uint32_t component = 0;
3399 if (FuncInfo.getType() == Builtins::kGetImageHeight)
3400 component = 1;
3401 else if (FuncInfo.getType() == Builtins::kGetImageDepth)
3402 component = 2;
3403 Ops << component;
3404
3405 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
3406 }
3407 break;
3408 }
3409 default:
3410 llvm_unreachable("Unsupported Image builtin");
3411 }
3412
3413 return RID;
3414}
3415
3416SPIRVID
3417SPIRVProducerPass::GenerateSubgroupInstruction(CallInst *Call,
3418 const FunctionInfo &FuncInfo) {
3419 SPIRVID RID;
3420
3421 // requires SPIRV version 1.3 or greater
3422 if (SpvVersion() != SPIRVVersion::SPIRV_1_3) {
3423 // llvm_unreachable("SubGroups extension requires SPIRV 1.3 or greater");
3424 // TODO(sjw): error out gracefully
3425 }
3426
3427 auto loadBuiltin = [this, Call](spv::BuiltIn spvBI,
3428 spv::Capability spvCap =
3429 spv::CapabilityGroupNonUniform) {
3430 SPIRVOperandVec Ops;
3431 Ops << Call->getType() << this->getSPIRVBuiltin(spvBI, spvCap);
3432
3433 return addSPIRVInst(spv::OpLoad, Ops);
3434 };
3435
3436 spv::Op op = spv::OpNop;
3437 switch (FuncInfo.getType()) {
3438 case Builtins::kGetSubGroupSize:
3439 return loadBuiltin(spv::BuiltInSubgroupSize);
3440 case Builtins::kGetNumSubGroups:
3441 return loadBuiltin(spv::BuiltInNumSubgroups);
3442 case Builtins::kGetSubGroupId:
3443 return loadBuiltin(spv::BuiltInSubgroupId);
3444 case Builtins::kGetSubGroupLocalId:
3445 return loadBuiltin(spv::BuiltInSubgroupLocalInvocationId);
3446
3447 case Builtins::kSubGroupBroadcast:
3448 if (SpvVersion() < SPIRVVersion::SPIRV_1_5 &&
3449 !dyn_cast<ConstantInt>(Call->getOperand(1))) {
3450 llvm_unreachable("sub_group_broadcast requires constant lane Id for "
3451 "SPIRV version < 1.5");
3452 }
3453 addCapability(spv::CapabilityGroupNonUniformBallot);
3454 op = spv::OpGroupNonUniformBroadcast;
3455 break;
3456
3457 case Builtins::kSubGroupAll:
3458 addCapability(spv::CapabilityGroupNonUniformVote);
3459 op = spv::OpGroupNonUniformAll;
3460 break;
3461 case Builtins::kSubGroupAny:
3462 addCapability(spv::CapabilityGroupNonUniformVote);
3463 op = spv::OpGroupNonUniformAny;
3464 break;
3465 case Builtins::kSubGroupReduceAdd:
3466 case Builtins::kSubGroupScanExclusiveAdd:
3467 case Builtins::kSubGroupScanInclusiveAdd: {
3468 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3469 if (FuncInfo.getParameter(0).type_id == Type::IntegerTyID) {
3470 op = spv::OpGroupNonUniformIAdd;
3471 } else {
3472 op = spv::OpGroupNonUniformFAdd;
3473 }
3474 break;
3475 }
3476 case Builtins::kSubGroupReduceMin:
3477 case Builtins::kSubGroupScanExclusiveMin:
3478 case Builtins::kSubGroupScanInclusiveMin: {
3479 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3480 auto &param = FuncInfo.getParameter(0);
3481 if (param.type_id == Type::IntegerTyID) {
3482 op = param.is_signed ? spv::OpGroupNonUniformSMin
3483 : spv::OpGroupNonUniformUMin;
3484 } else {
3485 op = spv::OpGroupNonUniformFMin;
3486 }
3487 break;
3488 }
3489 case Builtins::kSubGroupReduceMax:
3490 case Builtins::kSubGroupScanExclusiveMax:
3491 case Builtins::kSubGroupScanInclusiveMax: {
3492 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3493 auto &param = FuncInfo.getParameter(0);
3494 if (param.type_id == Type::IntegerTyID) {
3495 op = param.is_signed ? spv::OpGroupNonUniformSMax
3496 : spv::OpGroupNonUniformUMax;
3497 } else {
3498 op = spv::OpGroupNonUniformFMax;
3499 }
3500 break;
3501 }
3502
3503 case Builtins::kGetEnqueuedNumSubGroups:
3504 // TODO(sjw): requires CapabilityKernel (incompatible with Shader)
3505 case Builtins::kGetMaxSubGroupSize:
3506 // TODO(sjw): use SpecConstant, capability Kernel (incompatible with Shader)
3507 case Builtins::kSubGroupBarrier:
3508 case Builtins::kSubGroupReserveReadPipe:
3509 case Builtins::kSubGroupReserveWritePipe:
3510 case Builtins::kSubGroupCommitReadPipe:
3511 case Builtins::kSubGroupCommitWritePipe:
3512 case Builtins::kGetKernelSubGroupCountForNdrange:
3513 case Builtins::kGetKernelMaxSubGroupSizeForNdrange:
3514 default:
3515 Call->print(errs());
3516 llvm_unreachable("Unsupported sub_group operation");
3517 break;
3518 }
3519
3520 assert(op != spv::OpNop);
3521
3522 SPIRVOperandVec Operands;
3523
3524 //
3525 // Generate OpGroupNonUniform*
3526 //
3527 // Ops[0] = Result Type ID
3528 // Ops[1] = ScopeSubgroup
3529 // Ops[2] = Value ID
3530 // Ops[3] = Local ID
3531
3532 // The result type.
3533 Operands << Call->getType();
3534
3535 // Subgroup Scope
3536 Operands << getSPIRVInt32Constant(spv::ScopeSubgroup);
3537
3538 switch (FuncInfo.getType()) {
3539 case Builtins::kSubGroupReduceAdd:
3540 case Builtins::kSubGroupReduceMin:
3541 case Builtins::kSubGroupReduceMax:
3542 Operands << spv::GroupOperationReduce;
3543 break;
3544 case Builtins::kSubGroupScanExclusiveAdd:
3545 case Builtins::kSubGroupScanExclusiveMin:
3546 case Builtins::kSubGroupScanExclusiveMax:
3547 Operands << spv::GroupOperationExclusiveScan;
3548 break;
3549 case Builtins::kSubGroupScanInclusiveAdd:
3550 case Builtins::kSubGroupScanInclusiveMin:
3551 case Builtins::kSubGroupScanInclusiveMax:
3552 Operands << spv::GroupOperationInclusiveScan;
3553 break;
3554 default:
3555 break;
3556 }
3557
3558 for (Use &use : Call->arg_operands()) {
3559 Operands << use.get();
3560 }
3561
3562 return addSPIRVInst(op, Operands);
3563}
3564
3565SPIRVID SPIRVProducerPass::GenerateInstructionFromCall(CallInst *Call) {
3566 LLVMContext &Context = module->getContext();
3567
3568 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
3569 auto func_type = func_info.getType();
3570
3571 if (BUILTIN_IN_GROUP(func_type, Clspv)) {
3572 return GenerateClspvInstruction(Call, func_info);
3573 } else if (BUILTIN_IN_GROUP(func_type, Image)) {
3574 return GenerateImageInstruction(Call, func_info);
3575 } else if (BUILTIN_IN_GROUP(func_type, SubgroupsKHR)) {
3576 return GenerateSubgroupInstruction(Call, func_info);
3577 }
3578
3579 SPIRVID RID;
3580
alan-baker5f2e88e2020-12-07 15:24:04 -05003581 switch (Call->getCalledFunction()->getIntrinsicID()) {
3582 case Intrinsic::ctlz: {
3583 // Implement as 31 - FindUMsb. Ignore the second operand of llvm.ctlz.
3584 SPIRVOperandVec Ops;
3585 Ops << Call->getType() << getOpExtInstImportID()
3586 << glsl::ExtInst::ExtInstFindUMsb << Call->getArgOperand(0);
3587 auto find_msb = addSPIRVInst(spv::OpExtInst, Ops);
3588
3589 Constant *thirty_one = ConstantInt::get(
3590 Call->getType(), Call->getType()->getScalarSizeInBits() - 1);
3591 Ops.clear();
3592 Ops << Call->getType() << thirty_one << find_msb;
3593 return addSPIRVInst(spv::OpISub, Ops);
3594 }
3595 case Intrinsic::cttz: {
3596 // Implement as:
3597 // lsb = FindILsb x
3598 // res = lsb == -1 ? width : lsb
3599 //
3600 // Ignore the second operand of llvm.cttz.
3601 SPIRVOperandVec Ops;
3602 Ops << Call->getType() << getOpExtInstImportID()
3603 << glsl::ExtInst::ExtInstFindILsb << Call->getArgOperand(0);
3604 auto find_lsb = addSPIRVInst(spv::OpExtInst, Ops);
3605
3606 auto neg_one = Constant::getAllOnesValue(Call->getType());
3607 auto i1_ty = Call->getType()->getWithNewBitWidth(1);
3608 auto width = ConstantInt::get(Call->getType(),
3609 Call->getType()->getScalarSizeInBits());
3610
3611 Ops.clear();
3612 Ops << i1_ty << find_lsb << neg_one;
3613 auto cmp = addSPIRVInst(spv::OpIEqual, Ops);
3614
3615 Ops.clear();
3616 Ops << Call->getType() << cmp << width << find_lsb;
3617 return addSPIRVInst(spv::OpSelect, Ops);
3618 }
3619
3620 default:
3621 break;
3622 }
3623
SJW806a5d82020-07-15 12:51:38 -05003624 switch (func_type) {
3625 case Builtins::kPopcount: {
3626 //
3627 // Generate OpBitCount
3628 //
3629 // Ops[0] = Result Type ID
3630 // Ops[1] = Base ID
3631 SPIRVOperandVec Ops;
3632 Ops << Call->getType() << Call->getOperand(0);
3633
3634 RID = addSPIRVInst(spv::OpBitCount, Ops);
3635 break;
3636 }
3637 default: {
3638 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3639
alan-baker5f2e88e2020-12-07 15:24:04 -05003640 // Do not replace functions with implementations.
3641 if (EInst && Call->getCalledFunction()->isDeclaration()) {
SJW806a5d82020-07-15 12:51:38 -05003642 SPIRVID ExtInstImportID = getOpExtInstImportID();
3643
3644 //
3645 // Generate OpExtInst.
3646 //
3647
3648 // Ops[0] = Result Type ID
3649 // Ops[1] = Set ID (OpExtInstImport ID)
3650 // Ops[2] = Instruction Number (Literal Number)
3651 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3652 SPIRVOperandVec Ops;
3653
3654 Ops << Call->getType() << ExtInstImportID << EInst;
3655
3656 for (auto &use : Call->arg_operands()) {
3657 Ops << use.get();
3658 }
3659
3660 RID = addSPIRVInst(spv::OpExtInst, Ops);
3661
3662 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3663 if (IndirectExtInst != kGlslExtInstBad) {
SJW806a5d82020-07-15 12:51:38 -05003664 // Generate one more instruction that uses the result of the extended
3665 // instruction. Its result id is one more than the id of the
3666 // extended instruction.
3667 auto generate_extra_inst = [this, &Context, &Call,
3668 &RID](spv::Op opcode, Constant *constant) {
3669 //
3670 // Generate instruction like:
3671 // result = opcode constant <extinst-result>
3672 //
3673 // Ops[0] = Result Type ID
3674 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3675 // Ops[2] = Operand 1 ;; the result of the extended instruction
3676 SPIRVOperandVec Ops;
3677
3678 Type *resultTy = Call->getType();
3679
3680 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
alan-baker931253b2020-08-20 17:15:38 -04003681 constant =
3682 ConstantVector::getSplat(vectorTy->getElementCount(), constant);
SJW806a5d82020-07-15 12:51:38 -05003683 }
3684 Ops << resultTy << constant << RID;
3685
3686 RID = addSPIRVInst(opcode, Ops);
3687 };
3688
SJW806a5d82020-07-15 12:51:38 -05003689 switch (IndirectExtInst) {
SJW806a5d82020-07-15 12:51:38 -05003690 case glsl::ExtInstAcos: // Implementing acospi
3691 case glsl::ExtInstAsin: // Implementing asinpi
3692 case glsl::ExtInstAtan: // Implementing atanpi
3693 case glsl::ExtInstAtan2: // Implementing atan2pi
3694 generate_extra_inst(
3695 spv::OpFMul,
alan-bakercc2bafb2020-11-02 08:30:18 -05003696 ConstantFP::get(Call->getType()->getScalarType(), kOneOverPi));
SJW806a5d82020-07-15 12:51:38 -05003697 break;
3698
3699 default:
3700 assert(false && "internally inconsistent");
3701 }
3702 }
3703 } else {
SJW806a5d82020-07-15 12:51:38 -05003704 // A real function call (not builtin)
3705 // Call instruction is deferred because it needs function's ID.
3706 RID = addSPIRVPlaceholder(Call);
3707 }
3708
3709 break;
3710 }
3711 }
3712
3713 return RID;
3714}
3715
David Neto22f144c2017-06-12 14:26:21 -04003716void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003717 ValueMapType &VMap = getValueMap();
SJW806a5d82020-07-15 12:51:38 -05003718 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04003719
SJW806a5d82020-07-15 12:51:38 -05003720 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04003721
3722 switch (I.getOpcode()) {
3723 default: {
3724 if (Instruction::isCast(I.getOpcode())) {
3725 //
3726 // Generate SPIRV instructions for cast operators.
3727 //
3728
David Netod2de94a2017-08-28 17:27:47 -04003729 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003730 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003731 auto toI8 = Ty == Type::getInt8Ty(Context);
3732 auto fromI32 = OpTy == Type::getInt32Ty(Context);
James Price757dea82021-01-11 13:42:39 -05003733 // Handle zext, sext, uitofp, and sitofp with i1 type specially.
David Neto22f144c2017-06-12 14:26:21 -04003734 if ((I.getOpcode() == Instruction::ZExt ||
3735 I.getOpcode() == Instruction::SExt ||
James Price757dea82021-01-11 13:42:39 -05003736 I.getOpcode() == Instruction::UIToFP ||
3737 I.getOpcode() == Instruction::SIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003738 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003739 //
3740 // Generate OpSelect.
3741 //
3742
3743 // Ops[0] = Result Type ID
3744 // Ops[1] = Condition ID
3745 // Ops[2] = True Constant ID
3746 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003747 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003748
SJW01901d92020-05-21 08:58:31 -05003749 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003750
David Neto22f144c2017-06-12 14:26:21 -04003751 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003752 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003753 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003754 Ops << ConstantInt::getSigned(I.getType(), -1);
James Price757dea82021-01-11 13:42:39 -05003755 } else if (I.getOpcode() == Instruction::UIToFP) {
James Price96bd3d92020-11-23 09:01:57 -05003756 Ops << ConstantFP::get(I.getType(), 1.0);
James Price757dea82021-01-11 13:42:39 -05003757 } else if (I.getOpcode() == Instruction::SIToFP) {
3758 Ops << ConstantFP::get(I.getType(), -1.0);
David Neto22f144c2017-06-12 14:26:21 -04003759 }
David Neto22f144c2017-06-12 14:26:21 -04003760
David Neto22f144c2017-06-12 14:26:21 -04003761 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003762 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003763 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003764 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003765 } else {
James Price96bd3d92020-11-23 09:01:57 -05003766 Ops << ConstantFP::get(I.getType(), 0.0);
David Neto22f144c2017-06-12 14:26:21 -04003767 }
David Neto22f144c2017-06-12 14:26:21 -04003768
SJWf93f5f32020-05-05 07:27:56 -05003769 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003770 } else if (!clspv::Option::Int8Support() &&
3771 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003772 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3773 // 8 bits.
3774 // Before:
3775 // %result = trunc i32 %a to i8
3776 // After
3777 // %result = OpBitwiseAnd %uint %a %uint_255
3778
SJWf93f5f32020-05-05 07:27:56 -05003779 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003780
SJW806a5d82020-07-15 12:51:38 -05003781 Ops << OpTy << I.getOperand(0) << getSPIRVInt32Constant(255);
David Netod2de94a2017-08-28 17:27:47 -04003782
SJWf93f5f32020-05-05 07:27:56 -05003783 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003784 } else {
3785 // Ops[0] = Result Type ID
3786 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003787 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003788
SJW01901d92020-05-21 08:58:31 -05003789 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003790
SJWf93f5f32020-05-05 07:27:56 -05003791 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003792 }
3793 } else if (isa<BinaryOperator>(I)) {
3794 //
3795 // Generate SPIRV instructions for binary operators.
3796 //
3797
3798 // Handle xor with i1 type specially.
3799 if (I.getOpcode() == Instruction::Xor &&
3800 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003801 ((isa<ConstantInt>(I.getOperand(0)) &&
3802 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3803 (isa<ConstantInt>(I.getOperand(1)) &&
3804 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003805 //
3806 // Generate OpLogicalNot.
3807 //
3808 // Ops[0] = Result Type ID
3809 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003810 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003811
SJW01901d92020-05-21 08:58:31 -05003812 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003813
3814 Value *CondV = I.getOperand(0);
3815 if (isa<Constant>(I.getOperand(0))) {
3816 CondV = I.getOperand(1);
3817 }
SJW01901d92020-05-21 08:58:31 -05003818 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003819
SJWf93f5f32020-05-05 07:27:56 -05003820 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003821 } else {
3822 // Ops[0] = Result Type ID
3823 // Ops[1] = Operand 0
3824 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003825 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003826
SJW01901d92020-05-21 08:58:31 -05003827 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003828
SJWf93f5f32020-05-05 07:27:56 -05003829 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003830 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003831 } else if (I.getOpcode() == Instruction::FNeg) {
3832 // The only unary operator.
3833 //
3834 // Ops[0] = Result Type ID
3835 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05003836 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003837
SJW01901d92020-05-21 08:58:31 -05003838 Ops << I.getType() << I.getOperand(0);
3839 RID = addSPIRVInst(spv::OpFNegate, Ops);
Marco Antognini68e5c512020-09-09 16:08:57 +01003840 } else if (I.getOpcode() == Instruction::Unreachable) {
3841 RID = addSPIRVInst(spv::OpUnreachable);
David Neto22f144c2017-06-12 14:26:21 -04003842 } else {
3843 I.print(errs());
3844 llvm_unreachable("Unsupported instruction???");
3845 }
3846 break;
3847 }
3848 case Instruction::GetElementPtr: {
3849 auto &GlobalConstArgSet = getGlobalConstArgSet();
3850
3851 //
3852 // Generate OpAccessChain.
3853 //
3854 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3855
3856 //
3857 // Generate OpAccessChain.
3858 //
3859
3860 // Ops[0] = Result Type ID
3861 // Ops[1] = Base ID
3862 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05003863 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003864
alan-bakerb6b09dc2018-11-08 16:59:28 -05003865 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003866 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
3867 GlobalConstArgSet.count(GEP->getPointerOperand())) {
3868 // Use pointer type with private address space for global constant.
3869 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04003870 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04003871 }
David Neto257c3892018-04-11 13:19:45 -04003872
SJW01901d92020-05-21 08:58:31 -05003873 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04003874
David Neto862b7d82018-06-14 18:48:37 -04003875 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05003876 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04003877
David Neto862b7d82018-06-14 18:48:37 -04003878 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04003879
3880 //
3881 // Follows below rules for gep.
3882 //
David Neto862b7d82018-06-14 18:48:37 -04003883 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
3884 // first index.
David Neto22f144c2017-06-12 14:26:21 -04003885 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
3886 // first index.
3887 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
3888 // use gep's first index.
3889 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
3890 // gep's first index.
3891 //
3892 spv::Op Opcode = spv::OpAccessChain;
3893 unsigned offset = 0;
3894 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04003895 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04003896 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04003897 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04003898 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04003899 }
David Neto862b7d82018-06-14 18:48:37 -04003900 } else {
David Neto22f144c2017-06-12 14:26:21 -04003901 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04003902 }
3903
3904 if (Opcode == spv::OpPtrAccessChain) {
alan-baker7506abb2020-09-10 15:02:55 -04003905 // Shader validation in the SPIR-V spec requires that the base pointer to
3906 // OpPtrAccessChain (in StorageBuffer storage class) be decorated with
3907 // ArrayStride.
alan-baker5b86ed72019-02-15 08:26:50 -05003908 auto address_space = ResultType->getAddressSpace();
3909 setVariablePointersCapabilities(address_space);
3910 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04003911 case spv::StorageClassStorageBuffer:
David Neto1a1a0582017-07-07 12:01:44 -04003912 // Save the need to generate an ArrayStride decoration. But defer
3913 // generation until later, so we only make one decoration.
alan-baker7506abb2020-09-10 15:02:55 -04003914 getTypesNeedingArrayStride().insert(GEP->getPointerOperandType());
3915 break;
3916 case spv::StorageClassWorkgroup:
Alan Bakerfcda9482018-10-02 17:09:59 -04003917 break;
3918 default:
alan-baker7506abb2020-09-10 15:02:55 -04003919 llvm_unreachable(
3920 "OpPtrAccessChain is not supported for this storage class");
Alan Bakerfcda9482018-10-02 17:09:59 -04003921 break;
David Neto1a1a0582017-07-07 12:01:44 -04003922 }
David Neto22f144c2017-06-12 14:26:21 -04003923 }
3924
3925 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05003926 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04003927 }
3928
SJWf93f5f32020-05-05 07:27:56 -05003929 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003930 break;
3931 }
3932 case Instruction::ExtractValue: {
3933 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
3934 // Ops[0] = Result Type ID
3935 // Ops[1] = Composite ID
3936 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003937 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003938
SJW01901d92020-05-21 08:58:31 -05003939 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003940
SJW01901d92020-05-21 08:58:31 -05003941 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04003942
3943 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05003944 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04003945 }
3946
SJWf93f5f32020-05-05 07:27:56 -05003947 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003948 break;
3949 }
3950 case Instruction::InsertValue: {
3951 InsertValueInst *IVI = cast<InsertValueInst>(&I);
3952 // Ops[0] = Result Type ID
3953 // Ops[1] = Object ID
3954 // Ops[2] = Composite ID
3955 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05003956 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003957
SJW01901d92020-05-21 08:58:31 -05003958 Ops << I.getType() << IVI->getInsertedValueOperand()
3959 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04003960
3961 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05003962 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04003963 }
3964
SJWf93f5f32020-05-05 07:27:56 -05003965 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003966 break;
3967 }
3968 case Instruction::Select: {
3969 //
3970 // Generate OpSelect.
3971 //
3972
3973 // Ops[0] = Result Type ID
3974 // Ops[1] = Condition ID
3975 // Ops[2] = True Constant ID
3976 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003977 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003978
3979 // Find SPIRV instruction for parameter type.
3980 auto Ty = I.getType();
3981 if (Ty->isPointerTy()) {
3982 auto PointeeTy = Ty->getPointerElementType();
3983 if (PointeeTy->isStructTy() &&
3984 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
3985 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05003986 } else {
3987 // Selecting between pointers requires variable pointers.
3988 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
3989 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05003990 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05003991 }
David Neto22f144c2017-06-12 14:26:21 -04003992 }
3993 }
3994
SJW01901d92020-05-21 08:58:31 -05003995 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04003996
SJWf93f5f32020-05-05 07:27:56 -05003997 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003998 break;
3999 }
4000 case Instruction::ExtractElement: {
4001 // Handle <4 x i8> type manually.
4002 Type *CompositeTy = I.getOperand(0)->getType();
4003 if (is4xi8vec(CompositeTy)) {
4004 //
4005 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4006 // <4 x i8>.
4007 //
4008
4009 //
4010 // Generate OpShiftRightLogical
4011 //
4012 // Ops[0] = Result Type ID
4013 // Ops[1] = Operand 0
4014 // Ops[2] = Operand 1
4015 //
SJWf93f5f32020-05-05 07:27:56 -05004016 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004017
SJW01901d92020-05-21 08:58:31 -05004018 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004019
SJW01901d92020-05-21 08:58:31 -05004020 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004021 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4022 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004023 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4024 Op1ID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004025 } else {
4026 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004027 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004028
SJW806a5d82020-07-15 12:51:38 -05004029 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1)
4030 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004031
SJWf93f5f32020-05-05 07:27:56 -05004032 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004033 }
SJW01901d92020-05-21 08:58:31 -05004034 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04004035
SJW01901d92020-05-21 08:58:31 -05004036 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004037
4038 //
4039 // Generate OpBitwiseAnd
4040 //
4041 // Ops[0] = Result Type ID
4042 // Ops[1] = Operand 0
4043 // Ops[2] = Operand 1
4044 //
4045 Ops.clear();
4046
SJW806a5d82020-07-15 12:51:38 -05004047 Ops << CompositeTy << ShiftID << getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004048
SJWf93f5f32020-05-05 07:27:56 -05004049 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004050 break;
4051 }
4052
4053 // Ops[0] = Result Type ID
4054 // Ops[1] = Composite ID
4055 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004056 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004057
SJW01901d92020-05-21 08:58:31 -05004058 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004059
4060 spv::Op Opcode = spv::OpCompositeExtract;
4061 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05004062 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04004063 } else {
SJW01901d92020-05-21 08:58:31 -05004064 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004065 Opcode = spv::OpVectorExtractDynamic;
4066 }
4067
SJWf93f5f32020-05-05 07:27:56 -05004068 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004069 break;
4070 }
4071 case Instruction::InsertElement: {
4072 // Handle <4 x i8> type manually.
4073 Type *CompositeTy = I.getOperand(0)->getType();
4074 if (is4xi8vec(CompositeTy)) {
SJW806a5d82020-07-15 12:51:38 -05004075 SPIRVID CstFFID = getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004076
SJW01901d92020-05-21 08:58:31 -05004077 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004078 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4079 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004080 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4081 ShiftAmountID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004082 } else {
4083 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004084 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004085
SJW806a5d82020-07-15 12:51:38 -05004086 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2)
4087 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004088
SJWf93f5f32020-05-05 07:27:56 -05004089 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004090 }
4091
4092 //
4093 // Generate mask operations.
4094 //
4095
4096 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004097 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004098
SJW01901d92020-05-21 08:58:31 -05004099 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004100
SJW01901d92020-05-21 08:58:31 -05004101 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004102
4103 // Inverse mask.
4104 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004105 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04004106
SJW01901d92020-05-21 08:58:31 -05004107 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004108
4109 // Apply mask.
4110 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004111 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04004112
SJW01901d92020-05-21 08:58:31 -05004113 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004114
4115 // Create correct value according to index of insertelement.
4116 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004117 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004118
SJW01901d92020-05-21 08:58:31 -05004119 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004120
4121 // Insert value to original value.
4122 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004123 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04004124
SJWf93f5f32020-05-05 07:27:56 -05004125 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004126 break;
4127 }
4128
SJWf93f5f32020-05-05 07:27:56 -05004129 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004130
James Priced26efea2018-06-09 23:28:32 +01004131 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05004132 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004133
4134 spv::Op Opcode = spv::OpCompositeInsert;
4135 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004136 const auto value = CI->getZExtValue();
4137 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004138 // Ops[1] = Object ID
4139 // Ops[2] = Composite ID
4140 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004141 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04004142 } else {
James Priced26efea2018-06-09 23:28:32 +01004143 // Ops[1] = Composite ID
4144 // Ops[2] = Object ID
4145 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004146 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004147 Opcode = spv::OpVectorInsertDynamic;
4148 }
4149
SJWf93f5f32020-05-05 07:27:56 -05004150 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004151 break;
4152 }
4153 case Instruction::ShuffleVector: {
4154 // Ops[0] = Result Type ID
4155 // Ops[1] = Vector 1 ID
4156 // Ops[2] = Vector 2 ID
4157 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004158 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004159
SJW01901d92020-05-21 08:58:31 -05004160 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004161
alan-bakerc9666712020-04-01 16:31:21 -04004162 auto shuffle = cast<ShuffleVectorInst>(&I);
4163 SmallVector<int, 4> mask;
4164 shuffle->getShuffleMask(mask);
4165 for (auto i : mask) {
4166 if (i == UndefMaskElem) {
4167 if (clspv::Option::HackUndef())
4168 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05004169 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04004170 else
4171 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05004172 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04004173 } else {
SJW01901d92020-05-21 08:58:31 -05004174 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04004175 }
4176 }
4177
SJWf93f5f32020-05-05 07:27:56 -05004178 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004179 break;
4180 }
4181 case Instruction::ICmp:
4182 case Instruction::FCmp: {
4183 CmpInst *CmpI = cast<CmpInst>(&I);
4184
David Netod4ca2e62017-07-06 18:47:35 -04004185 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004186 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004187 if (isa<PointerType>(ArgTy)) {
4188 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004189 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004190 errs()
4191 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4192 << "in function " << name << "\n";
4193 llvm_unreachable("Pointer equality check is invalid");
4194 break;
4195 }
4196
SJWf93f5f32020-05-05 07:27:56 -05004197 SPIRVOperandVec Ops;
alan-baker15106572020-11-06 15:08:10 -05004198 if (CmpI->getPredicate() == CmpInst::FCMP_ORD ||
4199 CmpI->getPredicate() == CmpInst::FCMP_UNO) {
4200 // Implement ordered and unordered comparisons are OpIsNan instructions.
4201 // Optimize the constants to simplify the resulting code.
4202 auto lhs = CmpI->getOperand(0);
4203 auto rhs = CmpI->getOperand(1);
4204 auto const_lhs = dyn_cast_or_null<Constant>(lhs);
4205 auto const_rhs = dyn_cast_or_null<Constant>(rhs);
4206 if ((const_lhs && const_lhs->isNaN()) ||
4207 (const_rhs && const_rhs->isNaN())) {
4208 // Result is a constant, false of ordered, true for unordered.
4209 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4210 RID = getSPIRVConstant(ConstantInt::getFalse(CmpI->getType()));
4211 } else {
4212 RID = getSPIRVConstant(ConstantInt::getTrue(CmpI->getType()));
4213 }
4214 break;
4215 }
4216 SPIRVID lhs_id;
4217 SPIRVID rhs_id;
4218 if (!const_lhs) {
4219 // Generate OpIsNan for the lhs.
4220 Ops.clear();
4221 Ops << CmpI->getType() << lhs;
4222 lhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4223 }
4224 if (!const_rhs) {
4225 // Generate OpIsNan for the rhs.
4226 Ops.clear();
4227 Ops << CmpI->getType() << rhs;
4228 rhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4229 }
4230 if (lhs_id.isValid() && rhs_id.isValid()) {
4231 // Or the results for the lhs and rhs.
4232 Ops.clear();
4233 Ops << CmpI->getType() << lhs_id << rhs_id;
4234 RID = addSPIRVInst(spv::OpLogicalOr, Ops);
4235 } else {
4236 RID = lhs_id.isValid() ? lhs_id : rhs_id;
4237 }
4238 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4239 // For ordered comparisons, invert the intermediate result.
4240 Ops.clear();
4241 Ops << CmpI->getType() << RID;
4242 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
4243 }
4244 break;
4245 } else {
4246 // Remaining comparisons map directly to SPIR-V opcodes.
4247 // Ops[0] = Result Type ID
4248 // Ops[1] = Operand 1 ID
4249 // Ops[2] = Operand 2 ID
4250 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004251
alan-baker15106572020-11-06 15:08:10 -05004252 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
4253 RID = addSPIRVInst(Opcode, Ops);
4254 }
David Neto22f144c2017-06-12 14:26:21 -04004255 break;
4256 }
4257 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004258 // Branch instruction is deferred because it needs label's ID.
4259 BasicBlock *BrBB = I.getParent();
4260 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4261 // Placeholder for Merge operation
4262 RID = addSPIRVPlaceholder(&I);
4263 }
4264 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004265 break;
4266 }
4267 case Instruction::Switch: {
4268 I.print(errs());
4269 llvm_unreachable("Unsupported instruction???");
4270 break;
4271 }
4272 case Instruction::IndirectBr: {
4273 I.print(errs());
4274 llvm_unreachable("Unsupported instruction???");
4275 break;
4276 }
4277 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004278 // PHI instruction is deferred because it needs label's ID.
4279 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004280 break;
4281 }
4282 case Instruction::Alloca: {
4283 //
4284 // Generate OpVariable.
4285 //
4286 // Ops[0] : Result Type ID
4287 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004288 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004289
SJW01901d92020-05-21 08:58:31 -05004290 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04004291
SJWf93f5f32020-05-05 07:27:56 -05004292 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004293 break;
4294 }
4295 case Instruction::Load: {
4296 LoadInst *LD = cast<LoadInst>(&I);
4297 //
4298 // Generate OpLoad.
4299 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004300
alan-baker5b86ed72019-02-15 08:26:50 -05004301 if (LD->getType()->isPointerTy()) {
4302 // Loading a pointer requires variable pointers.
4303 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4304 }
David Neto22f144c2017-06-12 14:26:21 -04004305
SJW01901d92020-05-21 08:58:31 -05004306 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004307 // This is a hack to work around what looks like a driver bug.
4308 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004309 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4310 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004311 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004312 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004313 // Generate a bitwise-and of the original value with itself.
4314 // We should have been able to get away with just an OpCopyObject,
4315 // but we need something more complex to get past certain driver bugs.
4316 // This is ridiculous, but necessary.
4317 // TODO(dneto): Revisit this once drivers fix their bugs.
4318
SJWf93f5f32020-05-05 07:27:56 -05004319 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004320 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04004321
SJWf93f5f32020-05-05 07:27:56 -05004322 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004323 break;
4324 }
4325
4326 // This is the normal path. Generate a load.
4327
David Neto22f144c2017-06-12 14:26:21 -04004328 // Ops[0] = Result Type ID
4329 // Ops[1] = Pointer ID
4330 // Ops[2] ... Ops[n] = Optional Memory Access
4331 //
4332 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004333
SJWf93f5f32020-05-05 07:27:56 -05004334 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004335 Ops << LD->getType() << LD->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004336
SJWf93f5f32020-05-05 07:27:56 -05004337 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004338 break;
4339 }
4340 case Instruction::Store: {
4341 StoreInst *ST = cast<StoreInst>(&I);
4342 //
4343 // Generate OpStore.
4344 //
4345
alan-baker5b86ed72019-02-15 08:26:50 -05004346 if (ST->getValueOperand()->getType()->isPointerTy()) {
4347 // Storing a pointer requires variable pointers.
4348 setVariablePointersCapabilities(
4349 ST->getValueOperand()->getType()->getPointerAddressSpace());
4350 }
4351
David Neto22f144c2017-06-12 14:26:21 -04004352 // Ops[0] = Pointer ID
4353 // Ops[1] = Object ID
4354 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4355 //
4356 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05004357 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004358 Ops << ST->getPointerOperand() << ST->getValueOperand();
David Neto22f144c2017-06-12 14:26:21 -04004359
SJWf93f5f32020-05-05 07:27:56 -05004360 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004361 break;
4362 }
4363 case Instruction::AtomicCmpXchg: {
4364 I.print(errs());
4365 llvm_unreachable("Unsupported instruction???");
4366 break;
4367 }
4368 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004369 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4370
4371 spv::Op opcode;
4372
4373 switch (AtomicRMW->getOperation()) {
4374 default:
4375 I.print(errs());
4376 llvm_unreachable("Unsupported instruction???");
4377 case llvm::AtomicRMWInst::Add:
4378 opcode = spv::OpAtomicIAdd;
4379 break;
4380 case llvm::AtomicRMWInst::Sub:
4381 opcode = spv::OpAtomicISub;
4382 break;
4383 case llvm::AtomicRMWInst::Xchg:
4384 opcode = spv::OpAtomicExchange;
4385 break;
4386 case llvm::AtomicRMWInst::Min:
4387 opcode = spv::OpAtomicSMin;
4388 break;
4389 case llvm::AtomicRMWInst::Max:
4390 opcode = spv::OpAtomicSMax;
4391 break;
4392 case llvm::AtomicRMWInst::UMin:
4393 opcode = spv::OpAtomicUMin;
4394 break;
4395 case llvm::AtomicRMWInst::UMax:
4396 opcode = spv::OpAtomicUMax;
4397 break;
4398 case llvm::AtomicRMWInst::And:
4399 opcode = spv::OpAtomicAnd;
4400 break;
4401 case llvm::AtomicRMWInst::Or:
4402 opcode = spv::OpAtomicOr;
4403 break;
4404 case llvm::AtomicRMWInst::Xor:
4405 opcode = spv::OpAtomicXor;
4406 break;
4407 }
4408
4409 //
4410 // Generate OpAtomic*.
4411 //
SJWf93f5f32020-05-05 07:27:56 -05004412 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004413
SJW01901d92020-05-21 08:58:31 -05004414 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01004415
SJW806a5d82020-07-15 12:51:38 -05004416 const auto ConstantScopeDevice = getSPIRVInt32Constant(spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05004417 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01004418
SJW806a5d82020-07-15 12:51:38 -05004419 const auto ConstantMemorySemantics =
4420 getSPIRVInt32Constant(spv::MemorySemanticsUniformMemoryMask |
4421 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05004422 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01004423
SJWf93f5f32020-05-05 07:27:56 -05004424 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004425 break;
4426 }
4427 case Instruction::Fence: {
4428 I.print(errs());
4429 llvm_unreachable("Unsupported instruction???");
4430 break;
4431 }
4432 case Instruction::Call: {
4433 CallInst *Call = dyn_cast<CallInst>(&I);
SJW806a5d82020-07-15 12:51:38 -05004434 RID = GenerateInstructionFromCall(Call);
David Neto22f144c2017-06-12 14:26:21 -04004435 break;
4436 }
4437 case Instruction::Ret: {
4438 unsigned NumOps = I.getNumOperands();
4439 if (NumOps == 0) {
4440 //
4441 // Generate OpReturn.
4442 //
SJWf93f5f32020-05-05 07:27:56 -05004443 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004444 } else {
4445 //
4446 // Generate OpReturnValue.
4447 //
4448
4449 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004450 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004451
SJW01901d92020-05-21 08:58:31 -05004452 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004453
SJWf93f5f32020-05-05 07:27:56 -05004454 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004455 break;
4456 }
4457 break;
4458 }
4459 }
SJWf93f5f32020-05-05 07:27:56 -05004460
4461 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004462 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004463 VMap[&I] = RID;
4464 }
David Neto22f144c2017-06-12 14:26:21 -04004465}
4466
4467void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004468 //
4469 // Generate OpFunctionEnd
4470 //
SJWf93f5f32020-05-05 07:27:56 -05004471 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004472}
4473
4474bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004475 // Don't specialize <4 x i8> if i8 is generally supported.
4476 if (clspv::Option::Int8Support())
4477 return false;
4478
David Neto22f144c2017-06-12 14:26:21 -04004479 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004480 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4481 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
alan-baker5a8c3be2020-09-09 13:44:26 -04004482 VecTy->getElementCount().getKnownMinValue() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004483 return true;
4484 }
4485 }
4486
4487 return false;
4488}
4489
4490void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004491 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4492
SJW88ed5fe2020-05-11 12:40:57 -05004493 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4494 Value *Inst = DeferredInsts[i].first;
4495 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4496 SPIRVOperandVec Operands;
4497
4498 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4499 ++i;
4500 assert(DeferredInsts.size() > i);
4501 assert(Inst == DeferredInsts[i].first);
4502 Placeholder = DeferredInsts[i].second;
4503 };
David Neto22f144c2017-06-12 14:26:21 -04004504
4505 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004506 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004507 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004508 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004509 //
4510 // Generate OpLoopMerge.
4511 //
4512 // Ops[0] = Merge Block ID
4513 // Ops[1] = Continue Target ID
4514 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004515 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004516
SJW01901d92020-05-21 08:58:31 -05004517 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4518 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004519
SJW88ed5fe2020-05-11 12:40:57 -05004520 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4521
4522 nextDeferred();
4523
alan-baker06cad652019-12-03 17:56:47 -05004524 } else if (MergeBlocks.count(BrBB)) {
4525 //
4526 // Generate OpSelectionMerge.
4527 //
4528 // Ops[0] = Merge Block ID
4529 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004530 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004531
alan-baker06cad652019-12-03 17:56:47 -05004532 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004533 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004534
SJW88ed5fe2020-05-11 12:40:57 -05004535 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4536
4537 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004538 }
4539
4540 if (Br->isConditional()) {
4541 //
4542 // Generate OpBranchConditional.
4543 //
4544 // Ops[0] = Condition ID
4545 // Ops[1] = True Label ID
4546 // Ops[2] = False Label ID
4547 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004548 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004549
SJW01901d92020-05-21 08:58:31 -05004550 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004551
SJW88ed5fe2020-05-11 12:40:57 -05004552 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4553
David Neto22f144c2017-06-12 14:26:21 -04004554 } else {
4555 //
4556 // Generate OpBranch.
4557 //
4558 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004559 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004560
SJW01901d92020-05-21 08:58:31 -05004561 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004562
SJW88ed5fe2020-05-11 12:40:57 -05004563 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004564 }
4565 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004566 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4567 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004568 // OpPhi on pointers requires variable pointers.
4569 setVariablePointersCapabilities(
4570 PHI->getType()->getPointerAddressSpace());
4571 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004572 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004573 }
4574 }
4575
David Neto22f144c2017-06-12 14:26:21 -04004576 //
4577 // Generate OpPhi.
4578 //
4579 // Ops[0] = Result Type ID
4580 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004581 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004582
SJW01901d92020-05-21 08:58:31 -05004583 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004584
SJW88ed5fe2020-05-11 12:40:57 -05004585 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004586 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004587 }
4588
SJW88ed5fe2020-05-11 12:40:57 -05004589 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4590
David Neto22f144c2017-06-12 14:26:21 -04004591 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4592 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004593 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004594
SJW61531372020-06-09 07:31:08 -05004595 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004596 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004597 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004598
4599 // The result type.
SJW01901d92020-05-21 08:58:31 -05004600 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004601
4602 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004603 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004604 }
4605
SJW88ed5fe2020-05-11 12:40:57 -05004606 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004607
David Neto22f144c2017-06-12 14:26:21 -04004608 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004609 if (Call->getType()->isPointerTy()) {
4610 // Functions returning pointers require variable pointers.
4611 setVariablePointersCapabilities(
4612 Call->getType()->getPointerAddressSpace());
4613 }
4614
David Neto22f144c2017-06-12 14:26:21 -04004615 //
4616 // Generate OpFunctionCall.
4617 //
4618
4619 // Ops[0] = Result Type ID
4620 // Ops[1] = Callee Function ID
4621 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004622 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004623
SJW01901d92020-05-21 08:58:31 -05004624 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004625
SJW01901d92020-05-21 08:58:31 -05004626 SPIRVID CalleeID = getSPIRVValue(Callee);
SJW806a5d82020-07-15 12:51:38 -05004627 if (!CalleeID.isValid()) {
David Neto43568eb2017-10-13 18:25:25 -04004628 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004629 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004630 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4631 // causes an infinite loop. Instead, go ahead and generate
4632 // the bad function call. A validator will catch the 0-Id.
4633 // llvm_unreachable("Can't translate function call");
4634 }
David Neto22f144c2017-06-12 14:26:21 -04004635
SJW01901d92020-05-21 08:58:31 -05004636 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004637
David Neto22f144c2017-06-12 14:26:21 -04004638 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004639 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4640 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004641 auto *operand_type = operand->getType();
4642 // Images and samplers can be passed as function parameters without
4643 // variable pointers.
4644 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4645 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004646 auto sc =
4647 GetStorageClass(operand->getType()->getPointerAddressSpace());
4648 if (sc == spv::StorageClassStorageBuffer) {
4649 // Passing SSBO by reference requires variable pointers storage
4650 // buffer.
SJW01901d92020-05-21 08:58:31 -05004651 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004652 } else if (sc == spv::StorageClassWorkgroup) {
4653 // Workgroup references require variable pointers if they are not
4654 // memory object declarations.
4655 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4656 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004657 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4658 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004659 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004660 } else {
4661 // Arguments are function parameters.
4662 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004663 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004664 }
4665 }
4666 }
SJW01901d92020-05-21 08:58:31 -05004667 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004668 }
4669
SJW88ed5fe2020-05-11 12:40:57 -05004670 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004671 }
4672 }
4673 }
4674}
4675
SJW77b87ad2020-04-21 14:37:52 -05004676void SPIRVProducerPass::HandleDeferredDecorations() {
4677 const auto &DL = module->getDataLayout();
alan-baker5a8c3be2020-09-09 13:44:26 -04004678 if (getTypesNeedingArrayStride().empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004679 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004680 }
David Neto1a1a0582017-07-07 12:01:44 -04004681
David Netoc6f3ab22018-04-06 18:02:31 -04004682 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4683 // instructions we generated earlier.
alan-bakerc3fd07f2020-10-22 09:48:49 -04004684 DenseSet<uint32_t> seen;
David Neto85082642018-03-24 06:55:20 -07004685 for (auto *type : getTypesNeedingArrayStride()) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04004686 auto id = getSPIRVType(type);
4687 if (!seen.insert(id.get()).second)
4688 continue;
4689
David Neto85082642018-03-24 06:55:20 -07004690 Type *elemTy = nullptr;
4691 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4692 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004693 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004694 elemTy = arrayTy->getElementType();
4695 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4696 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004697 } else {
4698 errs() << "Unhandled strided type " << *type << "\n";
4699 llvm_unreachable("Unhandled strided type");
4700 }
David Neto1a1a0582017-07-07 12:01:44 -04004701
4702 // Ops[0] = Target ID
4703 // Ops[1] = Decoration (ArrayStride)
4704 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004705 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004706
David Neto85082642018-03-24 06:55:20 -07004707 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004708 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004709
alan-bakerc3fd07f2020-10-22 09:48:49 -04004710 Ops << id << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004711
SJWf93f5f32020-05-05 07:27:56 -05004712 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004713 }
David Neto1a1a0582017-07-07 12:01:44 -04004714}
4715
SJW61531372020-06-09 07:31:08 -05004716glsl::ExtInst
4717SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW61531372020-06-09 07:31:08 -05004718 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004719 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05004720 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004721 if (param_type.type_id == Type::FloatTyID) {
alan-bakerecc9c942020-12-07 13:13:32 -05004722 return glsl::ExtInst::ExtInstNClamp;
SJW2c317da2020-03-23 07:39:13 -05004723 }
4724 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4725 : glsl::ExtInst::ExtInstUClamp;
4726 }
4727 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05004728 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004729 if (param_type.type_id == Type::FloatTyID) {
4730 return glsl::ExtInst::ExtInstFMax;
4731 }
4732 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4733 : glsl::ExtInst::ExtInstUMax;
4734 }
4735 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05004736 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004737 if (param_type.type_id == Type::FloatTyID) {
4738 return glsl::ExtInst::ExtInstFMin;
4739 }
4740 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4741 : glsl::ExtInst::ExtInstUMin;
4742 }
4743 case Builtins::kAbs:
4744 return glsl::ExtInst::ExtInstSAbs;
4745 case Builtins::kFmax:
Marco Antognini55d51862020-07-21 17:50:07 +01004746 return glsl::ExtInst::ExtInstNMax;
SJW2c317da2020-03-23 07:39:13 -05004747 case Builtins::kFmin:
Marco Antognini55d51862020-07-21 17:50:07 +01004748 return glsl::ExtInst::ExtInstNMin;
SJW2c317da2020-03-23 07:39:13 -05004749 case Builtins::kDegrees:
4750 return glsl::ExtInst::ExtInstDegrees;
4751 case Builtins::kRadians:
4752 return glsl::ExtInst::ExtInstRadians;
4753 case Builtins::kMix:
4754 return glsl::ExtInst::ExtInstFMix;
4755 case Builtins::kAcos:
4756 case Builtins::kAcospi:
4757 return glsl::ExtInst::ExtInstAcos;
4758 case Builtins::kAcosh:
4759 return glsl::ExtInst::ExtInstAcosh;
4760 case Builtins::kAsin:
4761 case Builtins::kAsinpi:
4762 return glsl::ExtInst::ExtInstAsin;
4763 case Builtins::kAsinh:
4764 return glsl::ExtInst::ExtInstAsinh;
4765 case Builtins::kAtan:
4766 case Builtins::kAtanpi:
4767 return glsl::ExtInst::ExtInstAtan;
4768 case Builtins::kAtanh:
4769 return glsl::ExtInst::ExtInstAtanh;
4770 case Builtins::kAtan2:
4771 case Builtins::kAtan2pi:
4772 return glsl::ExtInst::ExtInstAtan2;
4773 case Builtins::kCeil:
4774 return glsl::ExtInst::ExtInstCeil;
4775 case Builtins::kSin:
4776 case Builtins::kHalfSin:
4777 case Builtins::kNativeSin:
4778 return glsl::ExtInst::ExtInstSin;
4779 case Builtins::kSinh:
4780 return glsl::ExtInst::ExtInstSinh;
4781 case Builtins::kCos:
4782 case Builtins::kHalfCos:
4783 case Builtins::kNativeCos:
4784 return glsl::ExtInst::ExtInstCos;
4785 case Builtins::kCosh:
4786 return glsl::ExtInst::ExtInstCosh;
4787 case Builtins::kTan:
4788 case Builtins::kHalfTan:
4789 case Builtins::kNativeTan:
4790 return glsl::ExtInst::ExtInstTan;
4791 case Builtins::kTanh:
4792 return glsl::ExtInst::ExtInstTanh;
4793 case Builtins::kExp:
4794 case Builtins::kHalfExp:
4795 case Builtins::kNativeExp:
4796 return glsl::ExtInst::ExtInstExp;
4797 case Builtins::kExp2:
4798 case Builtins::kHalfExp2:
4799 case Builtins::kNativeExp2:
4800 return glsl::ExtInst::ExtInstExp2;
4801 case Builtins::kLog:
4802 case Builtins::kHalfLog:
4803 case Builtins::kNativeLog:
4804 return glsl::ExtInst::ExtInstLog;
4805 case Builtins::kLog2:
4806 case Builtins::kHalfLog2:
4807 case Builtins::kNativeLog2:
4808 return glsl::ExtInst::ExtInstLog2;
4809 case Builtins::kFabs:
4810 return glsl::ExtInst::ExtInstFAbs;
4811 case Builtins::kFma:
4812 return glsl::ExtInst::ExtInstFma;
4813 case Builtins::kFloor:
4814 return glsl::ExtInst::ExtInstFloor;
4815 case Builtins::kLdexp:
4816 return glsl::ExtInst::ExtInstLdexp;
4817 case Builtins::kPow:
4818 case Builtins::kPowr:
4819 case Builtins::kHalfPowr:
4820 case Builtins::kNativePowr:
4821 return glsl::ExtInst::ExtInstPow;
James Price38553362020-09-03 18:30:40 -04004822 case Builtins::kRint:
4823 return glsl::ExtInst::ExtInstRoundEven;
SJW2c317da2020-03-23 07:39:13 -05004824 case Builtins::kRound:
4825 return glsl::ExtInst::ExtInstRound;
4826 case Builtins::kSqrt:
4827 case Builtins::kHalfSqrt:
4828 case Builtins::kNativeSqrt:
4829 return glsl::ExtInst::ExtInstSqrt;
4830 case Builtins::kRsqrt:
4831 case Builtins::kHalfRsqrt:
4832 case Builtins::kNativeRsqrt:
4833 return glsl::ExtInst::ExtInstInverseSqrt;
4834 case Builtins::kTrunc:
4835 return glsl::ExtInst::ExtInstTrunc;
4836 case Builtins::kFrexp:
4837 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05004838 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05004839 case Builtins::kFract:
4840 return glsl::ExtInst::ExtInstFract;
4841 case Builtins::kSign:
4842 return glsl::ExtInst::ExtInstFSign;
4843 case Builtins::kLength:
4844 case Builtins::kFastLength:
4845 return glsl::ExtInst::ExtInstLength;
4846 case Builtins::kDistance:
4847 case Builtins::kFastDistance:
4848 return glsl::ExtInst::ExtInstDistance;
4849 case Builtins::kStep:
4850 return glsl::ExtInst::ExtInstStep;
4851 case Builtins::kSmoothstep:
4852 return glsl::ExtInst::ExtInstSmoothStep;
4853 case Builtins::kCross:
4854 return glsl::ExtInst::ExtInstCross;
4855 case Builtins::kNormalize:
4856 case Builtins::kFastNormalize:
4857 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05004858 case Builtins::kSpirvPack:
4859 return glsl::ExtInst::ExtInstPackHalf2x16;
4860 case Builtins::kSpirvUnpack:
4861 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05004862 default:
4863 break;
4864 }
4865
alan-baker5f2e88e2020-12-07 15:24:04 -05004866 // TODO: improve this by checking the intrinsic id.
SJW61531372020-06-09 07:31:08 -05004867 if (func_info.getName().find("llvm.fmuladd.") == 0) {
4868 return glsl::ExtInst::ExtInstFma;
4869 }
alan-baker5f2e88e2020-12-07 15:24:04 -05004870 if (func_info.getName().find("llvm.sqrt.") == 0) {
4871 return glsl::ExtInst::ExtInstSqrt;
4872 }
4873 if (func_info.getName().find("llvm.trunc.") == 0) {
4874 return glsl::ExtInst::ExtInstTrunc;
4875 }
4876 if (func_info.getName().find("llvm.ctlz.") == 0) {
4877 return glsl::ExtInst::ExtInstFindUMsb;
4878 }
4879 if (func_info.getName().find("llvm.cttz.") == 0) {
4880 return glsl::ExtInst::ExtInstFindILsb;
4881 }
alan-baker3e0de472020-12-08 15:57:17 -05004882 if (func_info.getName().find("llvm.ceil.") == 0) {
4883 return glsl::ExtInst::ExtInstCeil;
4884 }
4885 if (func_info.getName().find("llvm.rint.") == 0) {
4886 return glsl::ExtInst::ExtInstRoundEven;
4887 }
4888 if (func_info.getName().find("llvm.fabs.") == 0) {
4889 return glsl::ExtInst::ExtInstFAbs;
4890 }
4891 if (func_info.getName().find("llvm.floor.") == 0) {
4892 return glsl::ExtInst::ExtInstFloor;
4893 }
4894 if (func_info.getName().find("llvm.sin.") == 0) {
4895 return glsl::ExtInst::ExtInstSin;
4896 }
4897 if (func_info.getName().find("llvm.cos.") == 0) {
4898 return glsl::ExtInst::ExtInstCos;
4899 }
alan-baker8b968112020-12-15 15:53:29 -05004900 if (func_info.getName().find("llvm.exp.") == 0) {
4901 return glsl::ExtInst::ExtInstExp;
4902 }
4903 if (func_info.getName().find("llvm.log.") == 0) {
4904 return glsl::ExtInst::ExtInstLog;
4905 }
4906 if (func_info.getName().find("llvm.pow.") == 0) {
4907 return glsl::ExtInst::ExtInstPow;
4908 }
James Price8cc3bb12021-05-05 10:20:58 -04004909 if (func_info.getName().find("llvm.smax.") == 0) {
4910 return glsl::ExtInst::ExtInstSMax;
4911 }
4912 if (func_info.getName().find("llvm.smin.") == 0) {
4913 return glsl::ExtInst::ExtInstSMin;
4914 }
SJW61531372020-06-09 07:31:08 -05004915 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04004916}
4917
SJW61531372020-06-09 07:31:08 -05004918glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
4919 const Builtins::FunctionInfo &func_info) {
4920 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004921 case Builtins::kAcospi:
4922 return glsl::ExtInst::ExtInstAcos;
4923 case Builtins::kAsinpi:
4924 return glsl::ExtInst::ExtInstAsin;
4925 case Builtins::kAtanpi:
4926 return glsl::ExtInst::ExtInstAtan;
4927 case Builtins::kAtan2pi:
4928 return glsl::ExtInst::ExtInstAtan2;
4929 default:
4930 break;
4931 }
4932 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04004933}
4934
SJW61531372020-06-09 07:31:08 -05004935glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
4936 const Builtins::FunctionInfo &func_info) {
4937 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04004938 if (direct != kGlslExtInstBad)
4939 return direct;
SJW61531372020-06-09 07:31:08 -05004940 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04004941}
4942
David Neto22f144c2017-06-12 14:26:21 -04004943void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04004944 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04004945}
4946
SJW88ed5fe2020-05-11 12:40:57 -05004947void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05004948 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04004949}
4950
SJW88ed5fe2020-05-11 12:40:57 -05004951void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04004952 // High 16 bit : Word Count
4953 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05004954 uint32_t Word = Inst.getOpcode();
4955 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04004956 if (count > 65535) {
4957 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
4958 llvm_unreachable("Word count too high");
4959 }
SJW88ed5fe2020-05-11 12:40:57 -05004960 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04004961 WriteOneWord(Word);
4962}
4963
SJW88ed5fe2020-05-11 12:40:57 -05004964void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
4965 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04004966 switch (OpTy) {
4967 default: {
4968 llvm_unreachable("Unsupported SPIRV Operand Type???");
4969 break;
4970 }
4971 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05004972 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04004973 break;
4974 }
4975 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05004976 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04004977 const char *Data = Str.c_str();
4978 size_t WordSize = Str.size() / 4;
4979 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
4980 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
4981 }
4982
4983 uint32_t Remainder = Str.size() % 4;
4984 uint32_t LastWord = 0;
4985 if (Remainder) {
4986 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
4987 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
4988 }
4989 }
4990
4991 WriteOneWord(LastWord);
4992 break;
4993 }
SJW88ed5fe2020-05-11 12:40:57 -05004994 case SPIRVOperandType::LITERAL_WORD: {
4995 WriteOneWord(Op.getLiteralNum()[0]);
4996 break;
4997 }
4998 case SPIRVOperandType::LITERAL_DWORD: {
4999 WriteOneWord(Op.getLiteralNum()[0]);
5000 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04005001 break;
5002 }
5003 }
5004}
5005
5006void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005007 for (int i = 0; i < kSectionCount; ++i) {
5008 WriteSPIRVBinary(SPIRVSections[i]);
5009 }
5010}
5011
5012void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
SJW88ed5fe2020-05-11 12:40:57 -05005013 for (const auto &Inst : SPIRVInstList) {
5014 const auto &Ops = Inst.getOperands();
5015 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04005016
5017 switch (Opcode) {
5018 default: {
David Neto5c22a252018-03-15 16:07:41 -04005019 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005020 llvm_unreachable("Unsupported SPIRV instruction");
5021 break;
5022 }
Marco Antognini68e5c512020-09-09 16:08:57 +01005023 case spv::OpUnreachable:
David Neto22f144c2017-06-12 14:26:21 -04005024 case spv::OpCapability:
5025 case spv::OpExtension:
5026 case spv::OpMemoryModel:
5027 case spv::OpEntryPoint:
5028 case spv::OpExecutionMode:
5029 case spv::OpSource:
5030 case spv::OpDecorate:
5031 case spv::OpMemberDecorate:
5032 case spv::OpBranch:
5033 case spv::OpBranchConditional:
5034 case spv::OpSelectionMerge:
5035 case spv::OpLoopMerge:
5036 case spv::OpStore:
5037 case spv::OpImageWrite:
5038 case spv::OpReturnValue:
5039 case spv::OpControlBarrier:
5040 case spv::OpMemoryBarrier:
5041 case spv::OpReturn:
5042 case spv::OpFunctionEnd:
alan-baker4986eff2020-10-29 13:38:00 -04005043 case spv::OpCopyMemory:
5044 case spv::OpAtomicStore: {
David Neto22f144c2017-06-12 14:26:21 -04005045 WriteWordCountAndOpcode(Inst);
5046 for (uint32_t i = 0; i < Ops.size(); i++) {
5047 WriteOperand(Ops[i]);
5048 }
5049 break;
5050 }
5051 case spv::OpTypeBool:
5052 case spv::OpTypeVoid:
5053 case spv::OpTypeSampler:
5054 case spv::OpLabel:
5055 case spv::OpExtInstImport:
5056 case spv::OpTypePointer:
5057 case spv::OpTypeRuntimeArray:
5058 case spv::OpTypeStruct:
5059 case spv::OpTypeImage:
5060 case spv::OpTypeSampledImage:
5061 case spv::OpTypeInt:
5062 case spv::OpTypeFloat:
5063 case spv::OpTypeArray:
5064 case spv::OpTypeVector:
alan-baker86ce19c2020-08-05 13:09:19 -04005065 case spv::OpTypeFunction:
5066 case spv::OpString: {
David Neto22f144c2017-06-12 14:26:21 -04005067 WriteWordCountAndOpcode(Inst);
5068 WriteResultID(Inst);
5069 for (uint32_t i = 0; i < Ops.size(); i++) {
5070 WriteOperand(Ops[i]);
5071 }
5072 break;
5073 }
5074 case spv::OpFunction:
5075 case spv::OpFunctionParameter:
5076 case spv::OpAccessChain:
5077 case spv::OpPtrAccessChain:
5078 case spv::OpInBoundsAccessChain:
5079 case spv::OpUConvert:
5080 case spv::OpSConvert:
5081 case spv::OpConvertFToU:
5082 case spv::OpConvertFToS:
5083 case spv::OpConvertUToF:
5084 case spv::OpConvertSToF:
5085 case spv::OpFConvert:
5086 case spv::OpConvertPtrToU:
5087 case spv::OpConvertUToPtr:
5088 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005089 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005090 case spv::OpIAdd:
alan-bakera52b7312020-10-26 08:58:51 -04005091 case spv::OpIAddCarry:
David Neto22f144c2017-06-12 14:26:21 -04005092 case spv::OpFAdd:
5093 case spv::OpISub:
alan-baker3f1bf492020-11-05 09:07:36 -05005094 case spv::OpISubBorrow:
David Neto22f144c2017-06-12 14:26:21 -04005095 case spv::OpFSub:
5096 case spv::OpIMul:
5097 case spv::OpFMul:
5098 case spv::OpUDiv:
5099 case spv::OpSDiv:
5100 case spv::OpFDiv:
5101 case spv::OpUMod:
5102 case spv::OpSRem:
5103 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005104 case spv::OpUMulExtended:
5105 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005106 case spv::OpBitwiseOr:
5107 case spv::OpBitwiseXor:
5108 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005109 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005110 case spv::OpShiftLeftLogical:
5111 case spv::OpShiftRightLogical:
5112 case spv::OpShiftRightArithmetic:
5113 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005114 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005115 case spv::OpCompositeExtract:
5116 case spv::OpVectorExtractDynamic:
5117 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005118 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005119 case spv::OpVectorInsertDynamic:
5120 case spv::OpVectorShuffle:
5121 case spv::OpIEqual:
5122 case spv::OpINotEqual:
5123 case spv::OpUGreaterThan:
5124 case spv::OpUGreaterThanEqual:
5125 case spv::OpULessThan:
5126 case spv::OpULessThanEqual:
5127 case spv::OpSGreaterThan:
5128 case spv::OpSGreaterThanEqual:
5129 case spv::OpSLessThan:
5130 case spv::OpSLessThanEqual:
5131 case spv::OpFOrdEqual:
5132 case spv::OpFOrdGreaterThan:
5133 case spv::OpFOrdGreaterThanEqual:
5134 case spv::OpFOrdLessThan:
5135 case spv::OpFOrdLessThanEqual:
5136 case spv::OpFOrdNotEqual:
5137 case spv::OpFUnordEqual:
5138 case spv::OpFUnordGreaterThan:
5139 case spv::OpFUnordGreaterThanEqual:
5140 case spv::OpFUnordLessThan:
5141 case spv::OpFUnordLessThanEqual:
5142 case spv::OpFUnordNotEqual:
5143 case spv::OpExtInst:
5144 case spv::OpIsInf:
5145 case spv::OpIsNan:
5146 case spv::OpAny:
5147 case spv::OpAll:
5148 case spv::OpUndef:
5149 case spv::OpConstantNull:
5150 case spv::OpLogicalOr:
5151 case spv::OpLogicalAnd:
5152 case spv::OpLogicalNot:
5153 case spv::OpLogicalNotEqual:
5154 case spv::OpConstantComposite:
5155 case spv::OpSpecConstantComposite:
5156 case spv::OpConstantTrue:
5157 case spv::OpConstantFalse:
5158 case spv::OpConstant:
5159 case spv::OpSpecConstant:
5160 case spv::OpVariable:
5161 case spv::OpFunctionCall:
5162 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005163 case spv::OpImageFetch:
alan-bakerf6bc8252020-09-23 14:58:55 -04005164 case spv::OpImageRead:
David Neto22f144c2017-06-12 14:26:21 -04005165 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005166 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005167 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005168 case spv::OpSelect:
5169 case spv::OpPhi:
5170 case spv::OpLoad:
alan-baker4986eff2020-10-29 13:38:00 -04005171 case spv::OpAtomicLoad:
David Neto22f144c2017-06-12 14:26:21 -04005172 case spv::OpAtomicIAdd:
5173 case spv::OpAtomicISub:
5174 case spv::OpAtomicExchange:
5175 case spv::OpAtomicIIncrement:
5176 case spv::OpAtomicIDecrement:
5177 case spv::OpAtomicCompareExchange:
5178 case spv::OpAtomicUMin:
5179 case spv::OpAtomicSMin:
5180 case spv::OpAtomicUMax:
5181 case spv::OpAtomicSMax:
5182 case spv::OpAtomicAnd:
5183 case spv::OpAtomicOr:
5184 case spv::OpAtomicXor:
SJW806a5d82020-07-15 12:51:38 -05005185 case spv::OpDot:
5186 case spv::OpGroupNonUniformAll:
5187 case spv::OpGroupNonUniformAny:
5188 case spv::OpGroupNonUniformBroadcast:
5189 case spv::OpGroupNonUniformIAdd:
5190 case spv::OpGroupNonUniformFAdd:
5191 case spv::OpGroupNonUniformSMin:
5192 case spv::OpGroupNonUniformUMin:
5193 case spv::OpGroupNonUniformFMin:
5194 case spv::OpGroupNonUniformSMax:
5195 case spv::OpGroupNonUniformUMax:
5196 case spv::OpGroupNonUniformFMax: {
David Neto22f144c2017-06-12 14:26:21 -04005197 WriteWordCountAndOpcode(Inst);
5198 WriteOperand(Ops[0]);
5199 WriteResultID(Inst);
5200 for (uint32_t i = 1; i < Ops.size(); i++) {
5201 WriteOperand(Ops[i]);
5202 }
5203 break;
5204 }
5205 }
5206 }
5207}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005208
alan-bakerb6b09dc2018-11-08 16:59:28 -05005209bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005210 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005211 case Type::HalfTyID:
5212 case Type::FloatTyID:
5213 case Type::DoubleTyID:
5214 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005215 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005216 return true;
5217 case Type::PointerTyID: {
5218 const PointerType *pointer_type = cast<PointerType>(type);
5219 if (pointer_type->getPointerAddressSpace() !=
5220 AddressSpace::UniformConstant) {
5221 auto pointee_type = pointer_type->getPointerElementType();
5222 if (pointee_type->isStructTy() &&
5223 cast<StructType>(pointee_type)->isOpaque()) {
5224 // Images and samplers are not nullable.
5225 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005226 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005227 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005228 return true;
5229 }
5230 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005231 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005232 case Type::StructTyID: {
5233 const StructType *struct_type = cast<StructType>(type);
5234 // Images and samplers are not nullable.
5235 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005236 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005237 for (const auto element : struct_type->elements()) {
5238 if (!IsTypeNullable(element))
5239 return false;
5240 }
5241 return true;
5242 }
5243 default:
5244 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005245 }
5246}
Alan Bakerfcda9482018-10-02 17:09:59 -04005247
SJW77b87ad2020-04-21 14:37:52 -05005248void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005249 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005250 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005251 // Metdata is stored as key-value pair operands. The first element of each
5252 // operand is the type and the second is a vector of offsets.
5253 for (const auto *operand : offsets_md->operands()) {
5254 const auto *pair = cast<MDTuple>(operand);
5255 auto *type =
5256 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5257 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5258 std::vector<uint32_t> offsets;
5259 for (const Metadata *offset_md : offset_vector->operands()) {
5260 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005261 offsets.push_back(static_cast<uint32_t>(
5262 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005263 }
5264 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5265 }
5266 }
5267
5268 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005269 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005270 // Metadata is stored as key-value pair operands. The first element of each
5271 // operand is the type and the second is a triple of sizes: type size in
5272 // bits, store size and alloc size.
5273 for (const auto *operand : sizes_md->operands()) {
5274 const auto *pair = cast<MDTuple>(operand);
5275 auto *type =
5276 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5277 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5278 uint64_t type_size_in_bits =
5279 cast<ConstantInt>(
5280 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5281 ->getZExtValue();
5282 uint64_t type_store_size =
5283 cast<ConstantInt>(
5284 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5285 ->getZExtValue();
5286 uint64_t type_alloc_size =
5287 cast<ConstantInt>(
5288 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5289 ->getZExtValue();
5290 RemappedUBOTypeSizes.insert(std::make_pair(
5291 type, std::make_tuple(type_size_in_bits, type_store_size,
5292 type_alloc_size)));
5293 }
5294 }
5295}
5296
5297uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5298 const DataLayout &DL) {
5299 auto iter = RemappedUBOTypeSizes.find(type);
5300 if (iter != RemappedUBOTypeSizes.end()) {
5301 return std::get<0>(iter->second);
5302 }
5303
5304 return DL.getTypeSizeInBits(type);
5305}
5306
Alan Bakerfcda9482018-10-02 17:09:59 -04005307uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5308 auto iter = RemappedUBOTypeSizes.find(type);
5309 if (iter != RemappedUBOTypeSizes.end()) {
5310 return std::get<2>(iter->second);
5311 }
5312
5313 return DL.getTypeAllocSize(type);
5314}
alan-baker5b86ed72019-02-15 08:26:50 -05005315
Kévin Petitbbbda972020-03-03 19:16:31 +00005316uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5317 StructType *type, unsigned member, const DataLayout &DL) {
5318 const auto StructLayout = DL.getStructLayout(type);
5319 // Search for the correct offsets if this type was remapped.
5320 std::vector<uint32_t> *offsets = nullptr;
5321 auto iter = RemappedUBOTypeOffsets.find(type);
5322 if (iter != RemappedUBOTypeOffsets.end()) {
5323 offsets = &iter->second;
5324 }
5325 auto ByteOffset =
5326 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5327 if (offsets) {
5328 ByteOffset = (*offsets)[member];
5329 }
5330
5331 return ByteOffset;
5332}
5333
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005334void SPIRVProducerPass::setVariablePointersCapabilities(
5335 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005336 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005337 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005338 } else {
SJW01901d92020-05-21 08:58:31 -05005339 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005340 }
5341}
5342
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005343Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005344 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5345 return GetBasePointer(gep->getPointerOperand());
5346 }
5347
5348 // Conservatively return |v|.
5349 return v;
5350}
5351
5352bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5353 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5354 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
alan-baker7506abb2020-09-10 15:02:55 -04005355 const auto &lhs_func_info =
5356 Builtins::Lookup(lhs_call->getCalledFunction());
5357 const auto &rhs_func_info =
5358 Builtins::Lookup(rhs_call->getCalledFunction());
SJW61531372020-06-09 07:31:08 -05005359 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5360 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005361 // For resource accessors, match descriptor set and binding.
5362 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5363 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5364 return true;
SJW61531372020-06-09 07:31:08 -05005365 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5366 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005367 // For workgroup resources, match spec id.
5368 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5369 return true;
5370 }
5371 }
5372 }
5373
5374 return false;
5375}
5376
5377bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5378 assert(inst->getType()->isPointerTy());
5379 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5380 spv::StorageClassStorageBuffer);
5381 const bool hack_undef = clspv::Option::HackUndef();
5382 if (auto *select = dyn_cast<SelectInst>(inst)) {
5383 auto *true_base = GetBasePointer(select->getTrueValue());
5384 auto *false_base = GetBasePointer(select->getFalseValue());
5385
5386 if (true_base == false_base)
5387 return true;
5388
5389 // If either the true or false operand is a null, then we satisfy the same
5390 // object constraint.
5391 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5392 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5393 return true;
5394 }
5395
5396 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5397 if (false_cst->isNullValue() ||
5398 (hack_undef && isa<UndefValue>(false_base)))
5399 return true;
5400 }
5401
5402 if (sameResource(true_base, false_base))
5403 return true;
5404 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5405 Value *value = nullptr;
5406 bool ok = true;
5407 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5408 auto *base = GetBasePointer(phi->getIncomingValue(i));
5409 // Null values satisfy the constraint of selecting of selecting from the
5410 // same object.
5411 if (!value) {
5412 if (auto *cst = dyn_cast<Constant>(base)) {
5413 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5414 value = base;
5415 } else {
5416 value = base;
5417 }
5418 } else if (base != value) {
5419 if (auto *base_cst = dyn_cast<Constant>(base)) {
5420 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5421 continue;
5422 }
5423
5424 if (sameResource(value, base))
5425 continue;
5426
5427 // Values don't represent the same base.
5428 ok = false;
5429 }
5430 }
5431
5432 return ok;
5433 }
5434
5435 // Conservatively return false.
5436 return false;
5437}
alan-bakere9308012019-03-15 10:25:13 -04005438
5439bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5440 if (!Arg.getType()->isPointerTy() ||
5441 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5442 // Only SSBOs need to be annotated as coherent.
5443 return false;
5444 }
5445
5446 DenseSet<Value *> visited;
5447 std::vector<Value *> stack;
5448 for (auto *U : Arg.getParent()->users()) {
5449 if (auto *call = dyn_cast<CallInst>(U)) {
5450 stack.push_back(call->getOperand(Arg.getArgNo()));
5451 }
5452 }
5453
5454 while (!stack.empty()) {
5455 Value *v = stack.back();
5456 stack.pop_back();
5457
5458 if (!visited.insert(v).second)
5459 continue;
5460
5461 auto *resource_call = dyn_cast<CallInst>(v);
5462 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005463 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5464 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005465 // If this is a resource accessor function, check if the coherent operand
5466 // is set.
5467 const auto coherent =
5468 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5469 ->getZExtValue());
5470 if (coherent == 1)
5471 return true;
5472 } else if (auto *arg = dyn_cast<Argument>(v)) {
5473 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005474 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005475 if (auto *call = dyn_cast<CallInst>(U)) {
5476 stack.push_back(call->getOperand(arg->getArgNo()));
5477 }
5478 }
5479 } else if (auto *user = dyn_cast<User>(v)) {
5480 // If this is a user, traverse all operands that could lead to resource
5481 // variables.
5482 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5483 Value *operand = user->getOperand(i);
5484 if (operand->getType()->isPointerTy() &&
5485 operand->getType()->getPointerAddressSpace() ==
5486 clspv::AddressSpace::Global) {
5487 stack.push_back(operand);
5488 }
5489 }
5490 }
5491 }
5492
5493 // No coherent resource variables encountered.
5494 return false;
5495}
alan-baker06cad652019-12-03 17:56:47 -05005496
SJW77b87ad2020-04-21 14:37:52 -05005497void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005498 // First, track loop merges and continues.
5499 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005500 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005501 if (F.isDeclaration())
5502 continue;
5503
5504 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5505 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5506 std::deque<BasicBlock *> order;
5507 DenseSet<BasicBlock *> visited;
5508 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5509
5510 for (auto BB : order) {
5511 auto terminator = BB->getTerminator();
5512 auto branch = dyn_cast<BranchInst>(terminator);
5513 if (LI.isLoopHeader(BB)) {
5514 auto L = LI.getLoopFor(BB);
5515 BasicBlock *ContinueBB = nullptr;
5516 BasicBlock *MergeBB = nullptr;
5517
5518 MergeBB = L->getExitBlock();
5519 if (!MergeBB) {
5520 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5521 // has regions with single entry/exit. As a result, loop should not
5522 // have multiple exits.
5523 llvm_unreachable("Loop has multiple exits???");
5524 }
5525
5526 if (L->isLoopLatch(BB)) {
5527 ContinueBB = BB;
5528 } else {
5529 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5530 // block.
5531 BasicBlock *Header = L->getHeader();
5532 BasicBlock *Latch = L->getLoopLatch();
5533 for (auto *loop_block : L->blocks()) {
5534 if (loop_block == Header) {
5535 continue;
5536 }
5537
5538 // Check whether block dominates block with back-edge.
5539 // The loop latch is the single block with a back-edge. If it was
5540 // possible, StructurizeCFG made the loop conform to this
5541 // requirement, otherwise |Latch| is a nullptr.
5542 if (DT.dominates(loop_block, Latch)) {
5543 ContinueBB = loop_block;
5544 }
5545 }
5546
5547 if (!ContinueBB) {
5548 llvm_unreachable("Wrong continue block from loop");
5549 }
5550 }
5551
5552 // Record the continue and merge blocks.
5553 MergeBlocks[BB] = MergeBB;
5554 ContinueBlocks[BB] = ContinueBB;
5555 LoopMergesAndContinues.insert(MergeBB);
5556 LoopMergesAndContinues.insert(ContinueBB);
5557 } else if (branch && branch->isConditional()) {
5558 auto L = LI.getLoopFor(BB);
5559 bool HasBackedge = false;
5560 while (L && !HasBackedge) {
5561 if (L->isLoopLatch(BB)) {
5562 HasBackedge = true;
5563 }
5564 L = L->getParentLoop();
5565 }
5566
5567 if (!HasBackedge) {
5568 // Only need a merge if the branch doesn't include a loop break or
5569 // continue.
5570 auto true_bb = branch->getSuccessor(0);
5571 auto false_bb = branch->getSuccessor(1);
5572 if (!LoopMergesAndContinues.count(true_bb) &&
5573 !LoopMergesAndContinues.count(false_bb)) {
5574 // StructurizeCFG pass already manipulated CFG. Just use false block
5575 // of branch instruction as merge block.
5576 MergeBlocks[BB] = false_bb;
5577 }
5578 }
5579 }
5580 }
5581 }
5582}
alan-baker86ce19c2020-08-05 13:09:19 -04005583
5584SPIRVID SPIRVProducerPass::getReflectionImport() {
5585 if (!ReflectionID.isValid()) {
5586 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_non_semantic_info");
5587 ReflectionID = addSPIRVInst<kImports>(spv::OpExtInstImport,
5588 "NonSemantic.ClspvReflection.1");
5589 }
5590 return ReflectionID;
5591}
5592
5593void SPIRVProducerPass::GenerateReflection() {
5594 GenerateKernelReflection();
5595 GeneratePushConstantReflection();
5596 GenerateSpecConstantReflection();
5597}
5598
5599void SPIRVProducerPass::GeneratePushConstantReflection() {
5600 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
5601 auto const &DL = module->getDataLayout();
5602 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
5603 auto STy = cast<StructType>(GV->getValueType());
5604
5605 for (unsigned i = 0; i < STy->getNumElements(); i++) {
5606 auto pc = static_cast<clspv::PushConstant>(
5607 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
5608 if (pc == PushConstant::KernelArgument)
5609 continue;
5610
5611 auto memberType = STy->getElementType(i);
5612 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
Marco Antognini7e338402021-03-15 12:48:37 +00005613#ifndef NDEBUG
alan-baker86ce19c2020-08-05 13:09:19 -04005614 unsigned previousOffset = 0;
5615 if (i > 0) {
5616 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
5617 }
alan-baker86ce19c2020-08-05 13:09:19 -04005618 assert(isValidExplicitLayout(*module, STy, i,
5619 spv::StorageClassPushConstant, offset,
5620 previousOffset));
Marco Antognini7e338402021-03-15 12:48:37 +00005621#endif
alan-baker86ce19c2020-08-05 13:09:19 -04005622
5623 reflection::ExtInst pc_inst = reflection::ExtInstMax;
5624 switch (pc) {
5625 case PushConstant::GlobalOffset:
5626 pc_inst = reflection::ExtInstPushConstantGlobalOffset;
5627 break;
5628 case PushConstant::EnqueuedLocalSize:
5629 pc_inst = reflection::ExtInstPushConstantEnqueuedLocalSize;
5630 break;
5631 case PushConstant::GlobalSize:
5632 pc_inst = reflection::ExtInstPushConstantGlobalSize;
5633 break;
5634 case PushConstant::RegionOffset:
5635 pc_inst = reflection::ExtInstPushConstantRegionOffset;
5636 break;
5637 case PushConstant::NumWorkgroups:
5638 pc_inst = reflection::ExtInstPushConstantNumWorkgroups;
5639 break;
5640 case PushConstant::RegionGroupOffset:
5641 pc_inst = reflection::ExtInstPushConstantRegionGroupOffset;
5642 break;
5643 default:
5644 llvm_unreachable("Unhandled push constant");
5645 break;
5646 }
5647
5648 auto import_id = getReflectionImport();
Marco Antognini7e338402021-03-15 12:48:37 +00005649 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
alan-baker86ce19c2020-08-05 13:09:19 -04005650 SPIRVOperandVec Ops;
5651 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
5652 << pc_inst << getSPIRVInt32Constant(offset)
5653 << getSPIRVInt32Constant(size);
5654 addSPIRVInst(spv::OpExtInst, Ops);
5655 }
5656 }
5657}
5658
5659void SPIRVProducerPass::GenerateSpecConstantReflection() {
5660 const uint32_t kMax = std::numeric_limits<uint32_t>::max();
5661 uint32_t wgsize_id[3] = {kMax, kMax, kMax};
5662 uint32_t global_offset_id[3] = {kMax, kMax, kMax};
5663 uint32_t work_dim_id = kMax;
5664 for (auto pair : clspv::GetSpecConstants(module)) {
5665 auto kind = pair.first;
5666 auto id = pair.second;
5667
5668 // Local memory size is only used for kernel arguments.
5669 if (kind == SpecConstant::kLocalMemorySize)
5670 continue;
5671
5672 switch (kind) {
5673 case SpecConstant::kWorkgroupSizeX:
5674 wgsize_id[0] = id;
5675 break;
5676 case SpecConstant::kWorkgroupSizeY:
5677 wgsize_id[1] = id;
5678 break;
5679 case SpecConstant::kWorkgroupSizeZ:
5680 wgsize_id[2] = id;
5681 break;
5682 case SpecConstant::kGlobalOffsetX:
5683 global_offset_id[0] = id;
5684 break;
5685 case SpecConstant::kGlobalOffsetY:
5686 global_offset_id[1] = id;
5687 break;
5688 case SpecConstant::kGlobalOffsetZ:
5689 global_offset_id[2] = id;
5690 break;
5691 case SpecConstant::kWorkDim:
5692 work_dim_id = id;
5693 break;
5694 default:
5695 llvm_unreachable("Unhandled spec constant");
5696 }
5697 }
5698
5699 auto import_id = getReflectionImport();
5700 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5701 SPIRVOperandVec Ops;
5702 if (wgsize_id[0] != kMax) {
5703 assert(wgsize_id[1] != kMax);
5704 assert(wgsize_id[2] != kMax);
5705 Ops.clear();
5706 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkgroupSize
5707 << getSPIRVInt32Constant(wgsize_id[0])
5708 << getSPIRVInt32Constant(wgsize_id[1])
5709 << getSPIRVInt32Constant(wgsize_id[2]);
5710 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5711 }
5712 if (global_offset_id[0] != kMax) {
5713 assert(global_offset_id[1] != kMax);
5714 assert(global_offset_id[2] != kMax);
5715 Ops.clear();
5716 Ops << void_id << import_id << reflection::ExtInstSpecConstantGlobalOffset
5717 << getSPIRVInt32Constant(global_offset_id[0])
5718 << getSPIRVInt32Constant(global_offset_id[1])
5719 << getSPIRVInt32Constant(global_offset_id[2]);
5720 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5721 }
5722 if (work_dim_id != kMax) {
5723 Ops.clear();
5724 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkDim
5725 << getSPIRVInt32Constant(work_dim_id);
5726 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5727 }
5728}
5729
5730void SPIRVProducerPass::GenerateKernelReflection() {
5731 const auto &DL = module->getDataLayout();
5732 auto import_id = getReflectionImport();
5733 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5734
5735 for (auto &F : *module) {
5736 if (F.isDeclaration() || F.getCallingConv() != CallingConv::SPIR_KERNEL) {
5737 continue;
5738 }
5739
5740 // OpString for the kernel name.
5741 auto kernel_name =
5742 addSPIRVInst<kDebug>(spv::OpString, F.getName().str().c_str());
5743
5744 // Kernel declaration
5745 // Ops[0] = void type
5746 // Ops[1] = reflection ext import
5747 // Ops[2] = function id
5748 // Ops[3] = kernel name
5749 SPIRVOperandVec Ops;
5750 Ops << void_id << import_id << reflection::ExtInstKernel << ValueMap[&F]
5751 << kernel_name;
5752 auto kernel_decl = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5753
5754 // Generate the required workgroup size property if it was specified.
5755 if (const MDNode *MD = F.getMetadata("reqd_work_group_size")) {
5756 uint32_t CurXDimCst = static_cast<uint32_t>(
5757 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
5758 uint32_t CurYDimCst = static_cast<uint32_t>(
5759 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
5760 uint32_t CurZDimCst = static_cast<uint32_t>(
5761 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
5762
5763 Ops.clear();
5764 Ops << void_id << import_id
5765 << reflection::ExtInstPropertyRequiredWorkgroupSize << kernel_decl
5766 << getSPIRVInt32Constant(CurXDimCst)
5767 << getSPIRVInt32Constant(CurYDimCst)
5768 << getSPIRVInt32Constant(CurZDimCst);
5769 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5770 }
5771
5772 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
5773 auto *func_ty = F.getFunctionType();
5774
5775 // If we've clustered POD arguments, then argument details are in metadata.
5776 // If an argument maps to a resource variable, then get descriptor set and
5777 // binding from the resource variable. Other info comes from the metadata.
5778 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
5779 auto local_spec_id_md =
5780 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
5781 if (arg_map) {
5782 for (const auto &arg : arg_map->operands()) {
5783 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
5784 assert(arg_node->getNumOperands() == 6);
5785 const auto name =
5786 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
5787 const auto old_index =
5788 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
5789 // Remapped argument index
5790 const int new_index = static_cast<int>(
5791 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
5792 const auto offset =
5793 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
5794 const auto size =
5795 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
5796 const auto argKind = clspv::GetArgKindFromName(
5797 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
5798
5799 // If this is a local memory argument, find the right spec id for this
5800 // argument.
5801 int64_t spec_id = -1;
5802 if (argKind == clspv::ArgKind::Local) {
5803 for (auto spec_id_arg : local_spec_id_md->operands()) {
5804 if ((&F == dyn_cast<Function>(
5805 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
5806 ->getValue())) &&
5807 (static_cast<uint64_t>(new_index) ==
5808 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
5809 ->getZExtValue())) {
5810 spec_id =
5811 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
5812 ->getSExtValue();
5813 break;
5814 }
5815 }
5816 }
5817
5818 // Generate the specific argument instruction.
5819 const uint32_t ordinal = static_cast<uint32_t>(old_index);
5820 const uint32_t arg_offset = static_cast<uint32_t>(offset);
5821 const uint32_t arg_size = static_cast<uint32_t>(size);
5822 uint32_t elem_size = 0;
5823 uint32_t descriptor_set = 0;
5824 uint32_t binding = 0;
5825 if (spec_id > 0) {
5826 elem_size = static_cast<uint32_t>(
5827 GetTypeAllocSize(func_ty->getParamType(unsigned(new_index))
5828 ->getPointerElementType(),
5829 DL));
5830 } else if (new_index >= 0) {
5831 auto *info = resource_var_at_index[new_index];
5832 assert(info);
5833 descriptor_set = info->descriptor_set;
5834 binding = info->binding;
5835 }
5836 AddArgumentReflection(kernel_decl, name.str(), argKind, ordinal,
5837 descriptor_set, binding, arg_offset, arg_size,
5838 static_cast<uint32_t>(spec_id), elem_size);
5839 }
5840 } else {
5841 // There is no argument map.
5842 // Take descriptor info from the resource variable calls.
5843 // Take argument name and size from the arguments list.
5844
5845 SmallVector<Argument *, 4> arguments;
5846 for (auto &arg : F.args()) {
5847 arguments.push_back(&arg);
5848 }
5849
5850 unsigned arg_index = 0;
5851 for (auto *info : resource_var_at_index) {
5852 if (info) {
5853 auto arg = arguments[arg_index];
5854 unsigned arg_size = 0;
5855 if (info->arg_kind == clspv::ArgKind::Pod ||
5856 info->arg_kind == clspv::ArgKind::PodUBO ||
5857 info->arg_kind == clspv::ArgKind::PodPushConstant) {
5858 arg_size =
5859 static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
5860 }
5861
5862 // Local pointer arguments are unused in this case.
5863 // offset, spec_id and elem_size always 0.
5864 AddArgumentReflection(kernel_decl, arg->getName().str(),
5865 info->arg_kind, arg_index, info->descriptor_set,
5866 info->binding, 0, arg_size, 0, 0);
5867 }
5868 arg_index++;
5869 }
5870 // Generate mappings for pointer-to-local arguments.
5871 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
5872 Argument *arg = arguments[arg_index];
5873 auto where = LocalArgSpecIds.find(arg);
5874 if (where != LocalArgSpecIds.end()) {
5875 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
5876
5877 // descriptor_set, binding, offset and size are always 0.
5878 AddArgumentReflection(kernel_decl, arg->getName().str(),
5879 ArgKind::Local, arg_index, 0, 0, 0, 0,
5880 static_cast<uint32_t>(local_arg_info.spec_id),
5881 static_cast<uint32_t>(GetTypeAllocSize(
5882 local_arg_info.elem_type, DL)));
5883 }
5884 }
5885 }
5886 }
5887}
5888
5889void SPIRVProducerPass::AddArgumentReflection(
5890 SPIRVID kernel_decl, const std::string &name, clspv::ArgKind arg_kind,
5891 uint32_t ordinal, uint32_t descriptor_set, uint32_t binding,
5892 uint32_t offset, uint32_t size, uint32_t spec_id, uint32_t elem_size) {
5893 // Generate ArgumentInfo for this argument.
5894 // TODO: generate remaining optional operands.
5895 auto import_id = getReflectionImport();
5896 auto arg_name = addSPIRVInst<kDebug>(spv::OpString, name.c_str());
5897 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5898 SPIRVOperandVec Ops;
5899 Ops << void_id << import_id << reflection::ExtInstArgumentInfo << arg_name;
5900 auto arg_info = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5901
5902 Ops.clear();
5903 Ops << void_id << import_id;
5904 reflection::ExtInst ext_inst = reflection::ExtInstMax;
5905 // Determine the extended instruction.
5906 switch (arg_kind) {
5907 case clspv::ArgKind::Buffer:
5908 ext_inst = reflection::ExtInstArgumentStorageBuffer;
5909 break;
5910 case clspv::ArgKind::BufferUBO:
5911 ext_inst = reflection::ExtInstArgumentUniform;
5912 break;
5913 case clspv::ArgKind::Local:
5914 ext_inst = reflection::ExtInstArgumentWorkgroup;
5915 break;
5916 case clspv::ArgKind::Pod:
5917 ext_inst = reflection::ExtInstArgumentPodStorageBuffer;
5918 break;
5919 case clspv::ArgKind::PodUBO:
5920 ext_inst = reflection::ExtInstArgumentPodUniform;
5921 break;
5922 case clspv::ArgKind::PodPushConstant:
5923 ext_inst = reflection::ExtInstArgumentPodPushConstant;
5924 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04005925 case clspv::ArgKind::SampledImage:
alan-baker86ce19c2020-08-05 13:09:19 -04005926 ext_inst = reflection::ExtInstArgumentSampledImage;
5927 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04005928 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04005929 ext_inst = reflection::ExtInstArgumentStorageImage;
5930 break;
5931 case clspv::ArgKind::Sampler:
5932 ext_inst = reflection::ExtInstArgumentSampler;
5933 break;
5934 default:
5935 llvm_unreachable("Unhandled argument reflection");
5936 break;
5937 }
5938 Ops << ext_inst << kernel_decl << getSPIRVInt32Constant(ordinal);
5939
5940 // Add descriptor set and binding for applicable arguments.
5941 switch (arg_kind) {
5942 case clspv::ArgKind::Buffer:
5943 case clspv::ArgKind::BufferUBO:
5944 case clspv::ArgKind::Pod:
5945 case clspv::ArgKind::PodUBO:
alan-bakerf6bc8252020-09-23 14:58:55 -04005946 case clspv::ArgKind::SampledImage:
5947 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04005948 case clspv::ArgKind::Sampler:
5949 Ops << getSPIRVInt32Constant(descriptor_set)
5950 << getSPIRVInt32Constant(binding);
5951 break;
5952 default:
5953 break;
5954 }
5955
5956 // Add remaining operands for arguments.
5957 switch (arg_kind) {
5958 case clspv::ArgKind::Local:
5959 Ops << getSPIRVInt32Constant(spec_id) << getSPIRVInt32Constant(elem_size);
5960 break;
5961 case clspv::ArgKind::Pod:
5962 case clspv::ArgKind::PodUBO:
5963 case clspv::ArgKind::PodPushConstant:
5964 Ops << getSPIRVInt32Constant(offset) << getSPIRVInt32Constant(size);
5965 break;
5966 default:
5967 break;
5968 }
5969 Ops << arg_info;
5970 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5971}