blob: 76fb5c8726b582e9668f58cab1a2a5fe2ddd4fd9 [file] [log] [blame]
David Neto22f144c2017-06-12 14:26:21 -04001// Copyright 2017 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#ifdef _MSC_VER
16#pragma warning(push, 0)
17#endif
18
David Neto156783e2017-07-05 15:39:41 -040019#include <cassert>
David Neto257c3892018-04-11 13:19:45 -040020#include <cstring>
David Neto118188e2018-08-24 11:27:54 -040021#include <iomanip>
22#include <list>
David Neto862b7d82018-06-14 18:48:37 -040023#include <memory>
David Neto118188e2018-08-24 11:27:54 -040024#include <set>
25#include <sstream>
26#include <string>
27#include <tuple>
28#include <unordered_set>
29#include <utility>
David Neto862b7d82018-06-14 18:48:37 -040030
David Neto118188e2018-08-24 11:27:54 -040031#include "llvm/ADT/StringSwitch.h"
32#include "llvm/ADT/UniqueVector.h"
33#include "llvm/Analysis/LoopInfo.h"
34#include "llvm/IR/Constants.h"
35#include "llvm/IR/Dominators.h"
36#include "llvm/IR/Instructions.h"
37#include "llvm/IR/Metadata.h"
38#include "llvm/IR/Module.h"
alan-bakerf67468c2019-11-25 15:51:49 -050039#include "llvm/IR/ValueSymbolTable.h"
David Neto118188e2018-08-24 11:27:54 -040040#include "llvm/Pass.h"
41#include "llvm/Support/CommandLine.h"
Kévin Petitbbbda972020-03-03 19:16:31 +000042#include "llvm/Support/MathExtras.h"
David Neto118188e2018-08-24 11:27:54 -040043#include "llvm/Support/raw_ostream.h"
44#include "llvm/Transforms/Utils/Cloning.h"
David Neto22f144c2017-06-12 14:26:21 -040045
SJWf93f5f32020-05-05 07:27:56 -050046// enable spv::HasResultAndType
47#define SPV_ENABLE_UTILITY_CODE
alan-bakere0902602020-03-23 08:43:40 -040048#include "spirv/unified1/spirv.hpp"
David Neto118188e2018-08-24 11:27:54 -040049
David Neto85082642018-03-24 06:55:20 -070050#include "clspv/AddressSpace.h"
David Neto118188e2018-08-24 11:27:54 -040051#include "clspv/Option.h"
alan-baker86ce19c2020-08-05 13:09:19 -040052#include "clspv/PushConstant.h"
53#include "clspv/SpecConstant.h"
David Neto85082642018-03-24 06:55:20 -070054#include "clspv/spirv_c_strings.hpp"
55#include "clspv/spirv_glsl.hpp"
alan-baker86ce19c2020-08-05 13:09:19 -040056#include "clspv/spirv_reflection.hpp"
David Neto22f144c2017-06-12 14:26:21 -040057
David Neto4feb7a42017-10-06 17:29:42 -040058#include "ArgKind.h"
alan-bakerf67468c2019-11-25 15:51:49 -050059#include "Builtins.h"
alan-baker06cad652019-12-03 17:56:47 -050060#include "ComputeStructuredOrder.h"
David Neto85082642018-03-24 06:55:20 -070061#include "ConstantEmitter.h"
Alan Baker202c8c72018-08-13 13:47:44 -040062#include "Constants.h"
David Neto78383442018-06-15 20:31:56 -040063#include "DescriptorCounter.h"
alan-bakerc4579bb2020-04-29 14:15:50 -040064#include "Layout.h"
alan-baker56f7aff2019-05-22 08:06:42 -040065#include "NormalizeGlobalVariable.h"
Diego Novilloa4c44fa2019-04-11 10:56:15 -040066#include "Passes.h"
alan-bakera1be3322020-04-20 12:48:18 -040067#include "SpecConstant.h"
alan-bakerce179f12019-12-06 19:02:22 -050068#include "Types.h"
David Neto48f56a42017-10-06 16:44:25 -040069
David Neto22f144c2017-06-12 14:26:21 -040070#if defined(_MSC_VER)
71#pragma warning(pop)
72#endif
73
74using namespace llvm;
75using namespace clspv;
SJW173c7e92020-03-16 08:44:47 -050076using namespace clspv::Builtins;
SJW806a5d82020-07-15 12:51:38 -050077using namespace clspv::Option;
David Neto156783e2017-07-05 15:39:41 -040078using namespace mdconst;
David Neto22f144c2017-06-12 14:26:21 -040079
80namespace {
David Netocd8ca5f2017-10-02 23:34:11 -040081
David Neto862b7d82018-06-14 18:48:37 -040082cl::opt<bool> ShowResourceVars("show-rv", cl::init(false), cl::Hidden,
83 cl::desc("Show resource variable creation"));
84
alan-baker5ed87542020-03-23 11:05:22 -040085cl::opt<bool>
86 ShowProducerIR("show-producer-ir", cl::init(false), cl::ReallyHidden,
87 cl::desc("Dump the IR at the start of SPIRVProducer"));
88
David Neto862b7d82018-06-14 18:48:37 -040089// These hacks exist to help transition code generation algorithms
90// without making huge noise in detailed test output.
91const bool Hack_generate_runtime_array_stride_early = true;
92
David Neto3fbb4072017-10-16 11:28:14 -040093// The value of 1/pi. This value is from MSDN
94// https://msdn.microsoft.com/en-us/library/4hwaceh6.aspx
95const double kOneOverPi = 0.318309886183790671538;
96const glsl::ExtInst kGlslExtInstBad = static_cast<glsl::ExtInst>(0);
97
alan-baker86ce19c2020-08-05 13:09:19 -040098// SPIRV Module Sections (per 2.4 of the SPIR-V spec)
SJW69939d52020-04-16 07:29:07 -050099// These are used to collect SPIRVInstructions by type on-the-fly.
100enum SPIRVSection {
101 kCapabilities,
102 kExtensions,
103 kImports,
104 kMemoryModel,
105 kEntryPoints,
106 kExecutionModes,
107
108 kDebug,
109 kAnnotations,
110
111 kTypes,
112 kConstants = kTypes,
113 kGlobalVariables,
114
115 kFunctions,
116
alan-baker86ce19c2020-08-05 13:09:19 -0400117 // This is not a section of the SPIR-V spec and should always immediately
118 // precede kSectionCount. It is a convenient place for the embedded
119 // reflection data.
120 kReflection,
SJW69939d52020-04-16 07:29:07 -0500121 kSectionCount
122};
123
SJW01901d92020-05-21 08:58:31 -0500124class SPIRVID {
125 uint32_t id;
126
127public:
128 SPIRVID(uint32_t _id = 0) : id(_id) {}
129 uint32_t get() const { return id; }
130 bool isValid() const { return id != 0; }
131 bool operator==(const SPIRVID &that) const { return id == that.id; }
SJW806a5d82020-07-15 12:51:38 -0500132 bool operator<(const SPIRVID &that) const { return id < that.id; }
SJW01901d92020-05-21 08:58:31 -0500133};
SJWf93f5f32020-05-05 07:27:56 -0500134
SJW88ed5fe2020-05-11 12:40:57 -0500135enum SPIRVOperandType { NUMBERID, LITERAL_WORD, LITERAL_DWORD, LITERAL_STRING };
David Neto22f144c2017-06-12 14:26:21 -0400136
137struct SPIRVOperand {
SJW88ed5fe2020-05-11 12:40:57 -0500138 explicit SPIRVOperand(SPIRVOperandType Ty, uint32_t Num) : Type(Ty) {
139 LiteralNum[0] = Num;
140 }
David Neto22f144c2017-06-12 14:26:21 -0400141 explicit SPIRVOperand(SPIRVOperandType Ty, const char *Str)
142 : Type(Ty), LiteralStr(Str) {}
143 explicit SPIRVOperand(SPIRVOperandType Ty, StringRef Str)
144 : Type(Ty), LiteralStr(Str) {}
SJW88ed5fe2020-05-11 12:40:57 -0500145 explicit SPIRVOperand(ArrayRef<uint32_t> NumVec) {
146 auto sz = NumVec.size();
147 assert(sz >= 1 && sz <= 2);
148 Type = sz == 1 ? LITERAL_WORD : LITERAL_DWORD;
149 LiteralNum[0] = NumVec[0];
150 if (sz == 2) {
151 LiteralNum[1] = NumVec[1];
152 }
153 }
David Neto22f144c2017-06-12 14:26:21 -0400154
alan-baker7506abb2020-09-10 15:02:55 -0400155 SPIRVOperandType getType() const { return Type; }
156 uint32_t getNumID() const { return LiteralNum[0]; }
157 std::string getLiteralStr() const { return LiteralStr; }
158 const uint32_t *getLiteralNum() const { return LiteralNum; }
David Neto22f144c2017-06-12 14:26:21 -0400159
David Neto87846742018-04-11 17:36:22 -0400160 uint32_t GetNumWords() const {
161 switch (Type) {
162 case NUMBERID:
SJW88ed5fe2020-05-11 12:40:57 -0500163 case LITERAL_WORD:
David Neto87846742018-04-11 17:36:22 -0400164 return 1;
SJW88ed5fe2020-05-11 12:40:57 -0500165 case LITERAL_DWORD:
166 return 2;
David Neto87846742018-04-11 17:36:22 -0400167 case LITERAL_STRING:
168 // Account for the terminating null character.
David Netoee2660d2018-06-28 16:31:29 -0400169 return uint32_t((LiteralStr.size() + 4) / 4);
David Neto87846742018-04-11 17:36:22 -0400170 }
171 llvm_unreachable("Unhandled case in SPIRVOperand::GetNumWords()");
172 }
173
David Neto22f144c2017-06-12 14:26:21 -0400174private:
175 SPIRVOperandType Type;
176 std::string LiteralStr;
SJW88ed5fe2020-05-11 12:40:57 -0500177 uint32_t LiteralNum[2];
David Neto22f144c2017-06-12 14:26:21 -0400178};
179
SJW88ed5fe2020-05-11 12:40:57 -0500180typedef SmallVector<SPIRVOperand, 4> SPIRVOperandVec;
David Netoc6f3ab22018-04-06 18:02:31 -0400181
David Neto22f144c2017-06-12 14:26:21 -0400182struct SPIRVInstruction {
SJWf93f5f32020-05-05 07:27:56 -0500183 // Primary constructor must have Opcode, initializes WordCount based on ResID.
184 SPIRVInstruction(spv::Op Opc, SPIRVID ResID = 0)
185 : Opcode(static_cast<uint16_t>(Opc)) {
186 setResult(ResID);
David Neto87846742018-04-11 17:36:22 -0400187 }
David Neto22f144c2017-06-12 14:26:21 -0400188
SJWf93f5f32020-05-05 07:27:56 -0500189 // Creates an instruction with an opcode and no result ID, and with the given
190 // operands. This calls primary constructor to initialize Opcode, WordCount.
191 // Takes ownership of the operands and clears |Ops|.
192 SPIRVInstruction(spv::Op Opc, SPIRVOperandVec &Ops) : SPIRVInstruction(Opc) {
193 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500194 }
SJWf93f5f32020-05-05 07:27:56 -0500195 // Creates an instruction with an opcode and no result ID, and with the given
196 // operands. This calls primary constructor to initialize Opcode, WordCount.
197 // Takes ownership of the operands and clears |Ops|.
198 SPIRVInstruction(spv::Op Opc, SPIRVID ResID, SPIRVOperandVec &Ops)
199 : SPIRVInstruction(Opc, ResID) {
200 setOperands(Ops);
David Netoef5ba2b2019-12-20 08:35:54 -0500201 }
David Netoef5ba2b2019-12-20 08:35:54 -0500202
David Netoee2660d2018-06-28 16:31:29 -0400203 uint32_t getWordCount() const { return WordCount; }
David Neto22f144c2017-06-12 14:26:21 -0400204 uint16_t getOpcode() const { return Opcode; }
SJW88ed5fe2020-05-11 12:40:57 -0500205 SPIRVID getResultID() const { return ResultID; }
206 const SPIRVOperandVec &getOperands() const { return Operands; }
David Neto22f144c2017-06-12 14:26:21 -0400207
208private:
SJW01901d92020-05-21 08:58:31 -0500209 void setResult(SPIRVID ResID = 0) {
210 WordCount = 1 + (ResID.isValid() ? 1 : 0);
SJWf93f5f32020-05-05 07:27:56 -0500211 ResultID = ResID;
212 }
213
214 void setOperands(SPIRVOperandVec &Ops) {
215 assert(Operands.empty());
216 Operands = std::move(Ops);
217 for (auto &opd : Operands) {
SJW88ed5fe2020-05-11 12:40:57 -0500218 WordCount += uint16_t(opd.GetNumWords());
SJWf93f5f32020-05-05 07:27:56 -0500219 }
220 }
221
222private:
David Netoee2660d2018-06-28 16:31:29 -0400223 uint32_t WordCount; // Check the 16-bit bound at code generation time.
David Neto22f144c2017-06-12 14:26:21 -0400224 uint16_t Opcode;
SJW88ed5fe2020-05-11 12:40:57 -0500225 SPIRVID ResultID;
SJWf93f5f32020-05-05 07:27:56 -0500226 SPIRVOperandVec Operands;
David Neto22f144c2017-06-12 14:26:21 -0400227};
228
229struct SPIRVProducerPass final : public ModulePass {
SJW01901d92020-05-21 08:58:31 -0500230 typedef DenseMap<Type *, SPIRVID> TypeMapType;
David Neto22f144c2017-06-12 14:26:21 -0400231 typedef UniqueVector<Type *> TypeList;
SJW88ed5fe2020-05-11 12:40:57 -0500232 typedef DenseMap<Value *, SPIRVID> ValueMapType;
SJW806a5d82020-07-15 12:51:38 -0500233 typedef std::list<SPIRVID> SPIRVIDListType;
SJW01901d92020-05-21 08:58:31 -0500234 typedef std::vector<std::pair<Value *, SPIRVID>> EntryPointVecType;
235 typedef std::set<uint32_t> CapabilitySetType;
SJW88ed5fe2020-05-11 12:40:57 -0500236 typedef std::list<SPIRVInstruction> SPIRVInstructionList;
SJW806a5d82020-07-15 12:51:38 -0500237 typedef std::map<spv::BuiltIn, SPIRVID> BuiltinConstantMapType;
SJW88ed5fe2020-05-11 12:40:57 -0500238 // A vector of pairs, each of which is:
David Neto87846742018-04-11 17:36:22 -0400239 // - the LLVM instruction that we will later generate SPIR-V code for
SJW88ed5fe2020-05-11 12:40:57 -0500240 // - the SPIR-V instruction placeholder that will be replaced
241 typedef std::vector<std::pair<Value *, SPIRVInstruction *>>
David Neto22f144c2017-06-12 14:26:21 -0400242 DeferredInstVecType;
243 typedef DenseMap<FunctionType *, std::pair<FunctionType *, uint32_t>>
244 GlobalConstFuncMapType;
245
David Neto44795152017-07-13 15:45:28 -0400246 explicit SPIRVProducerPass(
alan-bakerf5e5f692018-11-27 08:33:24 -0500247 raw_pwrite_stream &out,
alan-baker00e7a582019-06-07 12:54:21 -0400248 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
David Neto44795152017-07-13 15:45:28 -0400249 bool outputCInitList)
SJW01901d92020-05-21 08:58:31 -0500250 : ModulePass(ID), module(nullptr), samplerMap(samplerMap), out(out),
David Neto0676e6f2017-07-11 18:47:44 -0400251 binaryTempOut(binaryTempUnderlyingVector), binaryOut(&out),
David Neto0676e6f2017-07-11 18:47:44 -0400252 outputCInitList(outputCInitList), patchBoundOffset(0), nextID(1),
alan-baker5b86ed72019-02-15 08:26:50 -0500253 OpExtInstImportID(0), HasVariablePointersStorageBuffer(false),
254 HasVariablePointers(false), SamplerTy(nullptr), WorkgroupSizeValueID(0),
SJW01901d92020-05-21 08:58:31 -0500255 WorkgroupSizeVarID(0) {
256 addCapability(spv::CapabilityShader);
257 Ptr = this;
258 }
David Neto22f144c2017-06-12 14:26:21 -0400259
James Price11010dc2019-12-19 13:53:09 -0500260 virtual ~SPIRVProducerPass() {
James Price11010dc2019-12-19 13:53:09 -0500261 }
262
David Neto22f144c2017-06-12 14:26:21 -0400263 void getAnalysisUsage(AnalysisUsage &AU) const override {
264 AU.addRequired<DominatorTreeWrapperPass>();
265 AU.addRequired<LoopInfoWrapperPass>();
266 }
267
268 virtual bool runOnModule(Module &module) override;
269
270 // output the SPIR-V header block
271 void outputHeader();
272
273 // patch the SPIR-V header block
274 void patchHeader();
275
SJW01901d92020-05-21 08:58:31 -0500276 CapabilitySetType &getCapabilitySet() { return CapabilitySet; }
David Neto22f144c2017-06-12 14:26:21 -0400277 TypeMapType &getImageTypeMap() { return ImageTypeMap; }
alan-baker7506abb2020-09-10 15:02:55 -0400278 TypeList &getTypeList() { return Types; }
David Neto22f144c2017-06-12 14:26:21 -0400279 ValueMapType &getValueMap() { return ValueMap; }
SJW69939d52020-04-16 07:29:07 -0500280 SPIRVInstructionList &getSPIRVInstList(SPIRVSection Section) {
281 return SPIRVSections[Section];
282 };
alan-baker7506abb2020-09-10 15:02:55 -0400283 EntryPointVecType &getEntryPointVec() { return EntryPointVec; }
284 DeferredInstVecType &getDeferredInstVec() { return DeferredInstVec; }
SJW806a5d82020-07-15 12:51:38 -0500285 SPIRVIDListType &getEntryPointInterfacesList() {
286 return EntryPointInterfacesList;
alan-baker7506abb2020-09-10 15:02:55 -0400287 }
SJW01901d92020-05-21 08:58:31 -0500288 SPIRVID getOpExtInstImportID();
alan-baker7506abb2020-09-10 15:02:55 -0400289 std::vector<SPIRVID> &getBuiltinDimVec() { return BuiltinDimensionVec; }
SJW2c317da2020-03-23 07:39:13 -0500290
alan-baker5b86ed72019-02-15 08:26:50 -0500291 bool hasVariablePointersStorageBuffer() {
292 return HasVariablePointersStorageBuffer;
293 }
SJW01901d92020-05-21 08:58:31 -0500294 void setVariablePointersStorageBuffer() {
295 if (!HasVariablePointersStorageBuffer) {
296 addCapability(spv::CapabilityVariablePointersStorageBuffer);
297 HasVariablePointersStorageBuffer = true;
298 }
alan-baker5b86ed72019-02-15 08:26:50 -0500299 }
alan-baker7506abb2020-09-10 15:02:55 -0400300 bool hasVariablePointers() { return HasVariablePointers; }
SJW01901d92020-05-21 08:58:31 -0500301 void setVariablePointers() {
302 if (!HasVariablePointers) {
303 addCapability(spv::CapabilityVariablePointers);
304 HasVariablePointers = true;
305 }
alan-baker7506abb2020-09-10 15:02:55 -0400306 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500307 ArrayRef<std::pair<unsigned, std::string>> &getSamplerMap() {
308 return samplerMap;
309 }
David Neto22f144c2017-06-12 14:26:21 -0400310 GlobalConstFuncMapType &getGlobalConstFuncTypeMap() {
311 return GlobalConstFuncTypeMap;
312 }
313 SmallPtrSet<Value *, 16> &getGlobalConstArgSet() {
314 return GlobalConstArgumentSet;
315 }
alan-bakerb6b09dc2018-11-08 16:59:28 -0500316 TypeList &getTypesNeedingArrayStride() { return TypesNeedingArrayStride; }
David Neto22f144c2017-06-12 14:26:21 -0400317
SJW77b87ad2020-04-21 14:37:52 -0500318 void GenerateLLVMIRInfo();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500319 // Populate GlobalConstFuncTypeMap. Also, if module-scope __constant will
320 // *not* be converted to a storage buffer, replace each such global variable
321 // with one in the storage class expecgted by SPIR-V.
SJW77b87ad2020-04-21 14:37:52 -0500322 void FindGlobalConstVars();
David Neto862b7d82018-06-14 18:48:37 -0400323 // Populate ResourceVarInfoList, FunctionToResourceVarsMap, and
324 // ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -0500325 void FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400326 void FindTypePerGlobalVar(GlobalVariable &GV);
327 void FindTypePerFunc(Function &F);
SJW77b87ad2020-04-21 14:37:52 -0500328 void FindTypesForSamplerMap();
329 void FindTypesForResourceVars();
alan-bakerb6b09dc2018-11-08 16:59:28 -0500330 // Inserts |Ty| and relevant sub-types into the |Types| member, indicating
331 // that |Ty| and its subtypes will need a corresponding SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400332 void FindType(Type *Ty);
SJWf93f5f32020-05-05 07:27:56 -0500333
alan-bakerc3fd07f2020-10-22 09:48:49 -0400334 // Returns the canonical type of |type|.
335 //
336 // By default, clspv maps both __constant and __global address space pointers
337 // to StorageBuffer storage class. In order to prevent duplicate types from
338 // being generated, clspv uses the canonical type as a representative.
339 Type *CanonicalType(Type *type);
340
SJWf93f5f32020-05-05 07:27:56 -0500341 // Lookup or create Types, Constants.
342 // Returns SPIRVID once it has been created.
343 SPIRVID getSPIRVType(Type *Ty);
344 SPIRVID getSPIRVConstant(Constant *Cst);
SJW806a5d82020-07-15 12:51:38 -0500345 SPIRVID getSPIRVInt32Constant(uint32_t CstVal);
SJWf93f5f32020-05-05 07:27:56 -0500346 // Lookup SPIRVID of llvm::Value, may create Constant.
347 SPIRVID getSPIRVValue(Value *V);
348
SJW806a5d82020-07-15 12:51:38 -0500349 SPIRVID getSPIRVBuiltin(spv::BuiltIn BID, spv::Capability Cap);
350
David Neto19a1bad2017-08-25 15:01:41 -0400351 // Generates instructions for SPIR-V types corresponding to the LLVM types
352 // saved in the |Types| member. A type follows its subtypes. IDs are
353 // allocated sequentially starting with the current value of nextID, and
354 // with a type following its subtypes. Also updates nextID to just beyond
355 // the last generated ID.
SJW77b87ad2020-04-21 14:37:52 -0500356 void GenerateSPIRVTypes();
SJW77b87ad2020-04-21 14:37:52 -0500357 void GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400358 void GenerateGlobalVar(GlobalVariable &GV);
SJW77b87ad2020-04-21 14:37:52 -0500359 void GenerateWorkgroupVars();
alan-baker86ce19c2020-08-05 13:09:19 -0400360 // Generate reflection instructions for resource variables associated with
David Neto862b7d82018-06-14 18:48:37 -0400361 // arguments to F.
SJW77b87ad2020-04-21 14:37:52 -0500362 void GenerateSamplers();
David Neto862b7d82018-06-14 18:48:37 -0400363 // Generate OpVariables for %clspv.resource.var.* calls.
SJW77b87ad2020-04-21 14:37:52 -0500364 void GenerateResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400365 void GenerateFuncPrologue(Function &F);
366 void GenerateFuncBody(Function &F);
David Netob6e2e062018-04-25 10:32:06 -0400367 void GenerateEntryPointInitialStores();
David Neto22f144c2017-06-12 14:26:21 -0400368 spv::Op GetSPIRVCmpOpcode(CmpInst *CmpI);
369 spv::Op GetSPIRVCastOpcode(Instruction &I);
370 spv::Op GetSPIRVBinaryOpcode(Instruction &I);
SJW806a5d82020-07-15 12:51:38 -0500371 SPIRVID GenerateClspvInstruction(CallInst *Call,
372 const FunctionInfo &FuncInfo);
373 SPIRVID GenerateImageInstruction(CallInst *Call,
374 const FunctionInfo &FuncInfo);
375 SPIRVID GenerateSubgroupInstruction(CallInst *Call,
376 const FunctionInfo &FuncInfo);
377 SPIRVID GenerateInstructionFromCall(CallInst *Call);
David Neto22f144c2017-06-12 14:26:21 -0400378 void GenerateInstruction(Instruction &I);
379 void GenerateFuncEpilogue();
380 void HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500381 void HandleDeferredDecorations();
David Neto22f144c2017-06-12 14:26:21 -0400382 bool is4xi8vec(Type *Ty) const;
383 spv::StorageClass GetStorageClass(unsigned AddrSpace) const;
David Neto862b7d82018-06-14 18:48:37 -0400384 spv::StorageClass GetStorageClassForArgKind(clspv::ArgKind arg_kind) const;
David Neto22f144c2017-06-12 14:26:21 -0400385 spv::BuiltIn GetBuiltin(StringRef globalVarName) const;
David Neto3fbb4072017-10-16 11:28:14 -0400386 // Returns the GLSL extended instruction enum that the given function
387 // call maps to. If none, then returns the 0 value, i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500388 glsl::ExtInst getExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400389 // Returns the GLSL extended instruction enum indirectly used by the given
390 // function. That is, to implement the given function, we use an extended
391 // instruction plus one more instruction. If none, then returns the 0 value,
392 // i.e. GLSLstd4580Bad.
SJW61531372020-06-09 07:31:08 -0500393 glsl::ExtInst getIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto3fbb4072017-10-16 11:28:14 -0400394 // Returns the single GLSL extended instruction used directly or
395 // indirectly by the given function call.
SJW61531372020-06-09 07:31:08 -0500396 glsl::ExtInst
397 getDirectOrIndirectExtInstEnum(const Builtins::FunctionInfo &func_info);
David Neto22f144c2017-06-12 14:26:21 -0400398 void WriteOneWord(uint32_t Word);
SJW88ed5fe2020-05-11 12:40:57 -0500399 void WriteResultID(const SPIRVInstruction &Inst);
400 void WriteWordCountAndOpcode(const SPIRVInstruction &Inst);
401 void WriteOperand(const SPIRVOperand &Op);
David Neto22f144c2017-06-12 14:26:21 -0400402 void WriteSPIRVBinary();
SJW69939d52020-04-16 07:29:07 -0500403 void WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList);
David Neto22f144c2017-06-12 14:26:21 -0400404
Alan Baker9bf93fb2018-08-28 16:59:26 -0400405 // Returns true if |type| is compatible with OpConstantNull.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500406 bool IsTypeNullable(const Type *type) const;
Alan Baker9bf93fb2018-08-28 16:59:26 -0400407
Alan Bakerfcda9482018-10-02 17:09:59 -0400408 // Populate UBO remapped type maps.
SJW77b87ad2020-04-21 14:37:52 -0500409 void PopulateUBOTypeMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400410
alan-baker06cad652019-12-03 17:56:47 -0500411 // Populate the merge and continue block maps.
SJW77b87ad2020-04-21 14:37:52 -0500412 void PopulateStructuredCFGMaps();
alan-baker06cad652019-12-03 17:56:47 -0500413
Alan Bakerfcda9482018-10-02 17:09:59 -0400414 // Wrapped methods of DataLayout accessors. If |type| was remapped for UBOs,
415 // uses the internal map, otherwise it falls back on the data layout.
416 uint64_t GetTypeSizeInBits(Type *type, const DataLayout &DL);
417 uint64_t GetTypeStoreSize(Type *type, const DataLayout &DL);
418 uint64_t GetTypeAllocSize(Type *type, const DataLayout &DL);
Kévin Petitbbbda972020-03-03 19:16:31 +0000419 uint32_t GetExplicitLayoutStructMemberOffset(StructType *type,
420 unsigned member,
421 const DataLayout &DL);
Alan Bakerfcda9482018-10-02 17:09:59 -0400422
alan-baker5b86ed72019-02-15 08:26:50 -0500423 // Returns the base pointer of |v|.
424 Value *GetBasePointer(Value *v);
425
SJW01901d92020-05-21 08:58:31 -0500426 // Add Capability if not already (e.g. CapabilityGroupNonUniformBroadcast)
427 void addCapability(uint32_t c) { CapabilitySet.emplace(c); }
428
alan-baker5b86ed72019-02-15 08:26:50 -0500429 // Sets |HasVariablePointersStorageBuffer| or |HasVariablePointers| base on
430 // |address_space|.
431 void setVariablePointersCapabilities(unsigned address_space);
432
433 // Returns true if |lhs| and |rhs| represent the same resource or workgroup
434 // variable.
435 bool sameResource(Value *lhs, Value *rhs) const;
436
437 // Returns true if |inst| is phi or select that selects from the same
438 // structure (or null).
439 bool selectFromSameObject(Instruction *inst);
440
alan-bakere9308012019-03-15 10:25:13 -0400441 // Returns true if |Arg| is called with a coherent resource.
442 bool CalledWithCoherentResource(Argument &Arg);
443
SJWf93f5f32020-05-05 07:27:56 -0500444 //
445 // Primary interface for adding SPIRVInstructions to a SPIRVSection.
446 template <enum SPIRVSection TSection = kFunctions>
447 SPIRVID addSPIRVInst(spv::Op Opcode, SPIRVOperandVec &Operands) {
448 bool has_result, has_result_type;
449 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
450 SPIRVID RID = has_result ? incrNextID() : 0;
SJW88ed5fe2020-05-11 12:40:57 -0500451 SPIRVSections[TSection].emplace_back(Opcode, RID, Operands);
SJWf93f5f32020-05-05 07:27:56 -0500452 return RID;
453 }
454 template <enum SPIRVSection TSection = kFunctions>
455 SPIRVID addSPIRVInst(spv::Op Op) {
456 SPIRVOperandVec Ops;
457 return addSPIRVInst<TSection>(Op, Ops);
458 }
459 template <enum SPIRVSection TSection = kFunctions>
460 SPIRVID addSPIRVInst(spv::Op Op, uint32_t V) {
461 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500462 Ops.emplace_back(LITERAL_WORD, V);
SJWf93f5f32020-05-05 07:27:56 -0500463 return addSPIRVInst<TSection>(Op, Ops);
464 }
465 template <enum SPIRVSection TSection = kFunctions>
466 SPIRVID addSPIRVInst(spv::Op Op, const char *V) {
467 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -0500468 Ops.emplace_back(LITERAL_STRING, V);
SJWf93f5f32020-05-05 07:27:56 -0500469 return addSPIRVInst<TSection>(Op, Ops);
470 }
471
SJW88ed5fe2020-05-11 12:40:57 -0500472 //
473 // Add placeholder for llvm::Value that references future values.
474 // Must have result ID just in case final SPIRVInstruction requires.
475 SPIRVID addSPIRVPlaceholder(Value *I) {
476 SPIRVID RID = incrNextID();
477 SPIRVOperandVec Ops;
478 SPIRVSections[kFunctions].emplace_back(spv::OpExtInst, RID, Ops);
479 DeferredInstVec.push_back({I, &SPIRVSections[kFunctions].back()});
480 return RID;
481 }
482 // Replace placeholder with actual SPIRVInstruction on the final pass
483 // (HandleDeferredInstruction).
484 SPIRVID replaceSPIRVInst(SPIRVInstruction *I, spv::Op Opcode,
485 SPIRVOperandVec &Operands) {
486 bool has_result, has_result_type;
487 spv::HasResultAndType(Opcode, &has_result, &has_result_type);
488 SPIRVID RID = has_result ? I->getResultID() : 0;
489 *I = SPIRVInstruction(Opcode, RID, Operands);
490 return RID;
491 }
492
SJW806a5d82020-07-15 12:51:38 -0500493 //
494 // Add global variable and capture entry point interface
495 SPIRVID addSPIRVGlobalVariable(const SPIRVID &TypeID, spv::StorageClass SC,
496 const SPIRVID &InitID = SPIRVID());
497
alan-baker86ce19c2020-08-05 13:09:19 -0400498 SPIRVID getReflectionImport();
499 void GenerateReflection();
500 void GenerateKernelReflection();
501 void GeneratePushConstantReflection();
502 void GenerateSpecConstantReflection();
503 void AddArgumentReflection(SPIRVID kernel_decl, const std::string &name,
504 clspv::ArgKind arg_kind, uint32_t ordinal,
505 uint32_t descriptor_set, uint32_t binding,
506 uint32_t offset, uint32_t size, uint32_t spec_id,
507 uint32_t elem_size);
508
David Neto22f144c2017-06-12 14:26:21 -0400509private:
510 static char ID;
SJW77b87ad2020-04-21 14:37:52 -0500511
512 Module *module;
513
SJW01901d92020-05-21 08:58:31 -0500514 // Set of Capabilities required
515 CapabilitySetType CapabilitySet;
516
SJW806a5d82020-07-15 12:51:38 -0500517 // Map from clspv::BuiltinType to SPIRV Global Variable
518 BuiltinConstantMapType BuiltinConstantMap;
519
David Neto44795152017-07-13 15:45:28 -0400520 ArrayRef<std::pair<unsigned, std::string>> samplerMap;
David Neto22f144c2017-06-12 14:26:21 -0400521 raw_pwrite_stream &out;
David Neto0676e6f2017-07-11 18:47:44 -0400522
523 // TODO(dneto): Wouldn't it be better to always just emit a binary, and then
524 // convert to other formats on demand?
525
526 // When emitting a C initialization list, the WriteSPIRVBinary method
527 // will actually write its words to this vector via binaryTempOut.
528 SmallVector<char, 100> binaryTempUnderlyingVector;
529 raw_svector_ostream binaryTempOut;
530
531 // Binary output writes to this stream, which might be |out| or
532 // |binaryTempOut|. It's the latter when we really want to write a C
533 // initializer list.
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400534 raw_pwrite_stream *binaryOut;
David Neto0676e6f2017-07-11 18:47:44 -0400535 const bool outputCInitList; // If true, output look like {0x7023, ... , 5}
David Neto22f144c2017-06-12 14:26:21 -0400536 uint64_t patchBoundOffset;
537 uint32_t nextID;
538
SJWf93f5f32020-05-05 07:27:56 -0500539 SPIRVID incrNextID() { return nextID++; }
540
alan-bakerf67468c2019-11-25 15:51:49 -0500541 // ID for OpTypeInt 32 1.
SJW01901d92020-05-21 08:58:31 -0500542 SPIRVID int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500543 // ID for OpTypeVector %int 4.
SJW01901d92020-05-21 08:58:31 -0500544 SPIRVID v4int32ID;
alan-bakerf67468c2019-11-25 15:51:49 -0500545
David Neto19a1bad2017-08-25 15:01:41 -0400546 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400547 TypeMapType TypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400548 // Maps an LLVM image type to its SPIR-V ID.
David Neto22f144c2017-06-12 14:26:21 -0400549 TypeMapType ImageTypeMap;
David Neto19a1bad2017-08-25 15:01:41 -0400550 // A unique-vector of LLVM types that map to a SPIR-V type.
David Neto22f144c2017-06-12 14:26:21 -0400551 TypeList Types;
David Neto19a1bad2017-08-25 15:01:41 -0400552 // Maps an LLVM Value pointer to the corresponding SPIR-V Id.
David Neto22f144c2017-06-12 14:26:21 -0400553 ValueMapType ValueMap;
SJW69939d52020-04-16 07:29:07 -0500554 SPIRVInstructionList SPIRVSections[kSectionCount];
David Neto862b7d82018-06-14 18:48:37 -0400555
David Neto22f144c2017-06-12 14:26:21 -0400556 EntryPointVecType EntryPointVec;
557 DeferredInstVecType DeferredInstVec;
SJW806a5d82020-07-15 12:51:38 -0500558 SPIRVIDListType EntryPointInterfacesList;
SJW01901d92020-05-21 08:58:31 -0500559 SPIRVID OpExtInstImportID;
560 std::vector<SPIRVID> BuiltinDimensionVec;
alan-baker5b86ed72019-02-15 08:26:50 -0500561 bool HasVariablePointersStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -0400562 bool HasVariablePointers;
563 Type *SamplerTy;
SJW01901d92020-05-21 08:58:31 -0500564 DenseMap<unsigned, SPIRVID> SamplerLiteralToIDMap;
David Netoc77d9e22018-03-24 06:30:28 -0700565
566 // If a function F has a pointer-to-__constant parameter, then this variable
David Neto9ed8e2f2018-03-24 06:47:24 -0700567 // will map F's type to (G, index of the parameter), where in a first phase
568 // G is F's type. During FindTypePerFunc, G will be changed to F's type
569 // but replacing the pointer-to-constant parameter with
570 // pointer-to-ModuleScopePrivate.
David Netoc77d9e22018-03-24 06:30:28 -0700571 // TODO(dneto): This doesn't seem general enough? A function might have
572 // more than one such parameter.
David Neto22f144c2017-06-12 14:26:21 -0400573 GlobalConstFuncMapType GlobalConstFuncTypeMap;
574 SmallPtrSet<Value *, 16> GlobalConstArgumentSet;
David Neto1a1a0582017-07-07 12:01:44 -0400575 // An ordered set of pointer types of Base arguments to OpPtrAccessChain,
David Neto85082642018-03-24 06:55:20 -0700576 // or array types, and which point into transparent memory (StorageBuffer
577 // storage class). These will require an ArrayStride decoration.
David Neto1a1a0582017-07-07 12:01:44 -0400578 // See SPV_KHR_variable_pointers rev 13.
David Neto85082642018-03-24 06:55:20 -0700579 TypeList TypesNeedingArrayStride;
David Netoa60b00b2017-09-15 16:34:09 -0400580
581 // This is truly ugly, but works around what look like driver bugs.
582 // For get_local_size, an earlier part of the flow has created a module-scope
583 // variable in Private address space to hold the value for the workgroup
584 // size. Its intializer is a uint3 value marked as builtin WorkgroupSize.
585 // When this is present, save the IDs of the initializer value and variable
586 // in these two variables. We only ever do a vector load from it, and
587 // when we see one of those, substitute just the value of the intializer.
588 // This mimics what Glslang does, and that's what drivers are used to.
David Neto66cfe642018-03-24 06:13:56 -0700589 // TODO(dneto): Remove this once drivers are fixed.
SJW01901d92020-05-21 08:58:31 -0500590 SPIRVID WorkgroupSizeValueID;
591 SPIRVID WorkgroupSizeVarID;
David Neto26aaf622017-10-23 18:11:53 -0400592
David Neto862b7d82018-06-14 18:48:37 -0400593 // Bookkeeping for mapping kernel arguments to resource variables.
594 struct ResourceVarInfo {
595 ResourceVarInfo(int index_arg, unsigned set_arg, unsigned binding_arg,
alan-bakere9308012019-03-15 10:25:13 -0400596 Function *fn, clspv::ArgKind arg_kind_arg, int coherent_arg)
David Neto862b7d82018-06-14 18:48:37 -0400597 : index(index_arg), descriptor_set(set_arg), binding(binding_arg),
alan-bakere9308012019-03-15 10:25:13 -0400598 var_fn(fn), arg_kind(arg_kind_arg), coherent(coherent_arg),
David Neto862b7d82018-06-14 18:48:37 -0400599 addr_space(fn->getReturnType()->getPointerAddressSpace()) {}
600 const int index; // Index into ResourceVarInfoList
601 const unsigned descriptor_set;
602 const unsigned binding;
603 Function *const var_fn; // The @clspv.resource.var.* function.
604 const clspv::ArgKind arg_kind;
alan-bakere9308012019-03-15 10:25:13 -0400605 const int coherent;
David Neto862b7d82018-06-14 18:48:37 -0400606 const unsigned addr_space; // The LLVM address space
607 // The SPIR-V ID of the OpVariable. Not populated at construction time.
SJW01901d92020-05-21 08:58:31 -0500608 SPIRVID var_id;
David Neto862b7d82018-06-14 18:48:37 -0400609 };
610 // A list of resource var info. Each one correponds to a module-scope
611 // resource variable we will have to create. Resource var indices are
612 // indices into this vector.
613 SmallVector<std::unique_ptr<ResourceVarInfo>, 8> ResourceVarInfoList;
614 // This is a vector of pointers of all the resource vars, but ordered by
615 // kernel function, and then by argument.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500616 UniqueVector<ResourceVarInfo *> ModuleOrderedResourceVars;
David Neto862b7d82018-06-14 18:48:37 -0400617 // Map a function to the ordered list of resource variables it uses, one for
618 // each argument. If an argument does not use a resource variable, it
619 // will have a null pointer entry.
620 using FunctionToResourceVarsMapType =
621 DenseMap<Function *, SmallVector<ResourceVarInfo *, 8>>;
622 FunctionToResourceVarsMapType FunctionToResourceVarsMap;
623
624 // What LLVM types map to SPIR-V types needing layout? These are the
625 // arrays and structures supporting storage buffers and uniform buffers.
626 TypeList TypesNeedingLayout;
627 // What LLVM struct types map to a SPIR-V struct type with Block decoration?
628 UniqueVector<StructType *> StructTypesNeedingBlock;
629 // For a call that represents a load from an opaque type (samplers, images),
630 // map it to the variable id it should load from.
SJW01901d92020-05-21 08:58:31 -0500631 DenseMap<CallInst *, SPIRVID> ResourceVarDeferredLoadCalls;
David Neto85082642018-03-24 06:55:20 -0700632
David Netoc6f3ab22018-04-06 18:02:31 -0400633 // An ordered list of the kernel arguments of type pointer-to-local.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500634 using LocalArgList = SmallVector<Argument *, 8>;
David Netoc6f3ab22018-04-06 18:02:31 -0400635 LocalArgList LocalArgs;
636 // Information about a pointer-to-local argument.
637 struct LocalArgInfo {
638 // The SPIR-V ID of the array variable.
SJW01901d92020-05-21 08:58:31 -0500639 SPIRVID variable_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400640 // The element type of the
alan-bakerb6b09dc2018-11-08 16:59:28 -0500641 Type *elem_type;
David Netoc6f3ab22018-04-06 18:02:31 -0400642 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500643 SPIRVID array_size_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400644 // The ID of the array type.
SJW01901d92020-05-21 08:58:31 -0500645 SPIRVID array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400646 // The ID of the pointer to the array type.
SJW01901d92020-05-21 08:58:31 -0500647 SPIRVID ptr_array_type_id;
David Netoc6f3ab22018-04-06 18:02:31 -0400648 // The specialization constant ID of the array size.
649 int spec_id;
650 };
Alan Baker202c8c72018-08-13 13:47:44 -0400651 // A mapping from Argument to its assigned SpecId.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500652 DenseMap<const Argument *, int> LocalArgSpecIds;
Alan Baker202c8c72018-08-13 13:47:44 -0400653 // A mapping from SpecId to its LocalArgInfo.
654 DenseMap<int, LocalArgInfo> LocalSpecIdInfoMap;
Alan Bakerfcda9482018-10-02 17:09:59 -0400655 // A mapping from a remapped type to its real offsets.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500656 DenseMap<Type *, std::vector<uint32_t>> RemappedUBOTypeOffsets;
Alan Bakerfcda9482018-10-02 17:09:59 -0400657 // A mapping from a remapped type to its real sizes.
alan-bakerb6b09dc2018-11-08 16:59:28 -0500658 DenseMap<Type *, std::tuple<uint64_t, uint64_t, uint64_t>>
659 RemappedUBOTypeSizes;
alan-baker06cad652019-12-03 17:56:47 -0500660
661 // Maps basic block to its merge block.
662 DenseMap<BasicBlock *, BasicBlock *> MergeBlocks;
663 // Maps basic block to its continue block.
664 DenseMap<BasicBlock *, BasicBlock *> ContinueBlocks;
SJW01901d92020-05-21 08:58:31 -0500665
alan-baker86ce19c2020-08-05 13:09:19 -0400666 SPIRVID ReflectionID;
667 DenseMap<Function *, SPIRVID> KernelDeclarations;
668
SJW01901d92020-05-21 08:58:31 -0500669public:
670 static SPIRVProducerPass *Ptr;
David Neto22f144c2017-06-12 14:26:21 -0400671};
672
673char SPIRVProducerPass::ID;
SJW01901d92020-05-21 08:58:31 -0500674SPIRVProducerPass *SPIRVProducerPass::Ptr = nullptr;
David Netoc6f3ab22018-04-06 18:02:31 -0400675
alan-bakerb6b09dc2018-11-08 16:59:28 -0500676} // namespace
David Neto22f144c2017-06-12 14:26:21 -0400677
678namespace clspv {
alan-baker86ce19c2020-08-05 13:09:19 -0400679ModulePass *
680createSPIRVProducerPass(raw_pwrite_stream &out,
681 ArrayRef<std::pair<unsigned, std::string>> samplerMap,
682 bool outputCInitList) {
683 return new SPIRVProducerPass(out, samplerMap, outputCInitList);
David Neto22f144c2017-06-12 14:26:21 -0400684}
David Netoc2c368d2017-06-30 16:50:17 -0400685} // namespace clspv
David Neto22f144c2017-06-12 14:26:21 -0400686
SJW01901d92020-05-21 08:58:31 -0500687namespace {
688SPIRVOperandVec &operator<<(SPIRVOperandVec &list, uint32_t num) {
689 list.emplace_back(LITERAL_WORD, num);
690 return list;
691}
692
693SPIRVOperandVec &operator<<(SPIRVOperandVec &list, int32_t num) {
694 list.emplace_back(LITERAL_WORD, static_cast<uint32_t>(num));
695 return list;
696}
697
698SPIRVOperandVec &operator<<(SPIRVOperandVec &list, ArrayRef<uint32_t> num_vec) {
699 list.emplace_back(num_vec);
700 return list;
701}
702
703SPIRVOperandVec &operator<<(SPIRVOperandVec &list, StringRef str) {
704 list.emplace_back(LITERAL_STRING, str);
705 return list;
706}
707
708SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Type *t) {
709 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVType(t).get());
710 return list;
711}
712
713SPIRVOperandVec &operator<<(SPIRVOperandVec &list, Value *v) {
714 list.emplace_back(NUMBERID, SPIRVProducerPass::Ptr->getSPIRVValue(v).get());
715 return list;
716}
717
SJW806a5d82020-07-15 12:51:38 -0500718SPIRVOperandVec &operator<<(SPIRVOperandVec &list, const SPIRVID &v) {
SJW01901d92020-05-21 08:58:31 -0500719 list.emplace_back(NUMBERID, v.get());
720 return list;
721}
722} // namespace
723
SJW77b87ad2020-04-21 14:37:52 -0500724bool SPIRVProducerPass::runOnModule(Module &M) {
SJW01901d92020-05-21 08:58:31 -0500725 // TODO(sjw): Need to reset all data members for each Module, or better
726 // yet create a new SPIRVProducer for every module.. For now only
727 // allow 1 call.
728 assert(module == nullptr);
SJW77b87ad2020-04-21 14:37:52 -0500729 module = &M;
alan-baker5ed87542020-03-23 11:05:22 -0400730 if (ShowProducerIR) {
SJW77b87ad2020-04-21 14:37:52 -0500731 llvm::outs() << *module << "\n";
alan-baker5ed87542020-03-23 11:05:22 -0400732 }
David Neto0676e6f2017-07-11 18:47:44 -0400733 binaryOut = outputCInitList ? &binaryTempOut : &out;
734
SJW77b87ad2020-04-21 14:37:52 -0500735 PopulateUBOTypeMaps();
736 PopulateStructuredCFGMaps();
Alan Bakerfcda9482018-10-02 17:09:59 -0400737
David Neto22f144c2017-06-12 14:26:21 -0400738 // SPIR-V always begins with its header information
739 outputHeader();
740
741 // Gather information from the LLVM IR that we require.
SJW77b87ad2020-04-21 14:37:52 -0500742 GenerateLLVMIRInfo();
David Neto22f144c2017-06-12 14:26:21 -0400743
David Neto22f144c2017-06-12 14:26:21 -0400744 // Collect information on global variables too.
SJW77b87ad2020-04-21 14:37:52 -0500745 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400746 // If the GV is one of our special __spirv_* variables, remove the
747 // initializer as it was only placed there to force LLVM to not throw the
748 // value away.
Kévin Petitbbbda972020-03-03 19:16:31 +0000749 if (GV.getName().startswith("__spirv_") ||
750 GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
David Neto22f144c2017-06-12 14:26:21 -0400751 GV.setInitializer(nullptr);
752 }
753
754 // Collect types' information from global variable.
755 FindTypePerGlobalVar(GV);
David Neto22f144c2017-06-12 14:26:21 -0400756 }
757
David Neto22f144c2017-06-12 14:26:21 -0400758 // Generate SPIRV instructions for types.
SJW77b87ad2020-04-21 14:37:52 -0500759 GenerateSPIRVTypes();
David Neto22f144c2017-06-12 14:26:21 -0400760
alan-baker09cb9802019-12-10 13:16:27 -0500761 // Generate literal samplers if necessary.
SJW77b87ad2020-04-21 14:37:52 -0500762 GenerateSamplers();
David Neto22f144c2017-06-12 14:26:21 -0400763
764 // Generate SPIRV variables.
SJW77b87ad2020-04-21 14:37:52 -0500765 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400766 GenerateGlobalVar(GV);
767 }
SJW77b87ad2020-04-21 14:37:52 -0500768 GenerateResourceVars();
769 GenerateWorkgroupVars();
David Neto22f144c2017-06-12 14:26:21 -0400770
771 // Generate SPIRV instructions for each function.
SJW77b87ad2020-04-21 14:37:52 -0500772 for (Function &F : *module) {
David Neto22f144c2017-06-12 14:26:21 -0400773 if (F.isDeclaration()) {
774 continue;
775 }
776
777 // Generate Function Prologue.
778 GenerateFuncPrologue(F);
779
780 // Generate SPIRV instructions for function body.
781 GenerateFuncBody(F);
782
783 // Generate Function Epilogue.
784 GenerateFuncEpilogue();
785 }
786
787 HandleDeferredInstruction();
SJW77b87ad2020-04-21 14:37:52 -0500788 HandleDeferredDecorations();
alan-bakera1be3322020-04-20 12:48:18 -0400789
David Neto22f144c2017-06-12 14:26:21 -0400790 // Generate SPIRV module information.
SJW77b87ad2020-04-21 14:37:52 -0500791 GenerateModuleInfo();
David Neto22f144c2017-06-12 14:26:21 -0400792
alan-baker86ce19c2020-08-05 13:09:19 -0400793 // Generate embedded reflection information.
794 GenerateReflection();
795
alan-baker00e7a582019-06-07 12:54:21 -0400796 WriteSPIRVBinary();
David Neto22f144c2017-06-12 14:26:21 -0400797
798 // We need to patch the SPIR-V header to set bound correctly.
799 patchHeader();
David Neto0676e6f2017-07-11 18:47:44 -0400800
801 if (outputCInitList) {
802 bool first = true;
David Neto0676e6f2017-07-11 18:47:44 -0400803 std::ostringstream os;
804
David Neto57fb0b92017-08-04 15:35:09 -0400805 auto emit_word = [&os, &first](uint32_t word) {
David Neto0676e6f2017-07-11 18:47:44 -0400806 if (!first)
David Neto57fb0b92017-08-04 15:35:09 -0400807 os << ",\n";
808 os << word;
David Neto0676e6f2017-07-11 18:47:44 -0400809 first = false;
810 };
811
812 os << "{";
David Neto57fb0b92017-08-04 15:35:09 -0400813 const std::string str(binaryTempOut.str());
814 for (unsigned i = 0; i < str.size(); i += 4) {
815 const uint32_t a = static_cast<unsigned char>(str[i]);
816 const uint32_t b = static_cast<unsigned char>(str[i + 1]);
817 const uint32_t c = static_cast<unsigned char>(str[i + 2]);
818 const uint32_t d = static_cast<unsigned char>(str[i + 3]);
819 emit_word(a | (b << 8) | (c << 16) | (d << 24));
David Neto0676e6f2017-07-11 18:47:44 -0400820 }
821 os << "}\n";
822 out << os.str();
823 }
824
David Neto22f144c2017-06-12 14:26:21 -0400825 return false;
826}
827
828void SPIRVProducerPass::outputHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400829 binaryOut->write(reinterpret_cast<const char *>(&spv::MagicNumber),
830 sizeof(spv::MagicNumber));
SJW806a5d82020-07-15 12:51:38 -0500831 uint32_t minor = 0;
832 if (SpvVersion() == SPIRVVersion::SPIRV_1_3) {
833 minor = 3;
834 }
835 uint32_t version = (1 << 16) | (minor << 8);
836 binaryOut->write(reinterpret_cast<const char *>(&version), sizeof(version));
David Neto22f144c2017-06-12 14:26:21 -0400837
alan-baker0c18ab02019-06-12 10:23:21 -0400838 // use Google's vendor ID
839 const uint32_t vendor = 21 << 16;
alan-baker00e7a582019-06-07 12:54:21 -0400840 binaryOut->write(reinterpret_cast<const char *>(&vendor), sizeof(vendor));
David Neto22f144c2017-06-12 14:26:21 -0400841
alan-baker00e7a582019-06-07 12:54:21 -0400842 // we record where we need to come back to and patch in the bound value
843 patchBoundOffset = binaryOut->tell();
David Neto22f144c2017-06-12 14:26:21 -0400844
alan-baker00e7a582019-06-07 12:54:21 -0400845 // output a bad bound for now
846 binaryOut->write(reinterpret_cast<const char *>(&nextID), sizeof(nextID));
David Neto22f144c2017-06-12 14:26:21 -0400847
alan-baker00e7a582019-06-07 12:54:21 -0400848 // output the schema (reserved for use and must be 0)
849 const uint32_t schema = 0;
850 binaryOut->write(reinterpret_cast<const char *>(&schema), sizeof(schema));
David Neto22f144c2017-06-12 14:26:21 -0400851}
852
853void SPIRVProducerPass::patchHeader() {
alan-baker00e7a582019-06-07 12:54:21 -0400854 // for a binary we just write the value of nextID over bound
855 binaryOut->pwrite(reinterpret_cast<char *>(&nextID), sizeof(nextID),
856 patchBoundOffset);
David Neto22f144c2017-06-12 14:26:21 -0400857}
858
SJW77b87ad2020-04-21 14:37:52 -0500859void SPIRVProducerPass::GenerateLLVMIRInfo() {
David Neto22f144c2017-06-12 14:26:21 -0400860 // This function generates LLVM IR for function such as global variable for
861 // argument, constant and pointer type for argument access. These information
862 // is artificial one because we need Vulkan SPIR-V output. This function is
863 // executed ahead of FindType and FindConstant.
David Neto22f144c2017-06-12 14:26:21 -0400864
SJW77b87ad2020-04-21 14:37:52 -0500865 FindGlobalConstVars();
David Neto5c22a252018-03-15 16:07:41 -0400866
SJW77b87ad2020-04-21 14:37:52 -0500867 FindResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400868
869 bool HasWorkGroupBuiltin = false;
SJW77b87ad2020-04-21 14:37:52 -0500870 for (GlobalVariable &GV : module->globals()) {
David Neto22f144c2017-06-12 14:26:21 -0400871 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
872 if (spv::BuiltInWorkgroupSize == BuiltinType) {
873 HasWorkGroupBuiltin = true;
874 }
875 }
876
SJW77b87ad2020-04-21 14:37:52 -0500877 FindTypesForSamplerMap();
878 FindTypesForResourceVars();
David Neto22f144c2017-06-12 14:26:21 -0400879}
880
SJW77b87ad2020-04-21 14:37:52 -0500881void SPIRVProducerPass::FindGlobalConstVars() {
882 clspv::NormalizeGlobalVariables(*module);
883 const DataLayout &DL = module->getDataLayout();
alan-baker56f7aff2019-05-22 08:06:42 -0400884
David Neto862b7d82018-06-14 18:48:37 -0400885 SmallVector<GlobalVariable *, 8> GVList;
886 SmallVector<GlobalVariable *, 8> DeadGVList;
SJW77b87ad2020-04-21 14:37:52 -0500887 for (GlobalVariable &GV : module->globals()) {
David Neto862b7d82018-06-14 18:48:37 -0400888 if (GV.getType()->getAddressSpace() == AddressSpace::Constant) {
889 if (GV.use_empty()) {
890 DeadGVList.push_back(&GV);
891 } else {
892 GVList.push_back(&GV);
893 }
894 }
895 }
896
897 // Remove dead global __constant variables.
898 for (auto GV : DeadGVList) {
899 GV->eraseFromParent();
900 }
901 DeadGVList.clear();
902
903 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
904 // For now, we only support a single storage buffer.
alan-baker7506abb2020-09-10 15:02:55 -0400905 if (!GVList.empty()) {
David Neto862b7d82018-06-14 18:48:37 -0400906 assert(GVList.size() == 1);
907 const auto *GV = GVList[0];
908 const auto constants_byte_size =
Alan Bakerfcda9482018-10-02 17:09:59 -0400909 (GetTypeSizeInBits(GV->getInitializer()->getType(), DL)) / 8;
David Neto862b7d82018-06-14 18:48:37 -0400910 const size_t kConstantMaxSize = 65536;
911 if (constants_byte_size > kConstantMaxSize) {
912 outs() << "Max __constant capacity of " << kConstantMaxSize
913 << " bytes exceeded: " << constants_byte_size << " bytes used\n";
914 llvm_unreachable("Max __constant capacity exceeded");
915 }
916 }
917 } else {
918 // Change global constant variable's address space to ModuleScopePrivate.
919 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
920 for (auto GV : GVList) {
921 // Create new gv with ModuleScopePrivate address space.
922 Type *NewGVTy = GV->getType()->getPointerElementType();
923 GlobalVariable *NewGV = new GlobalVariable(
SJW77b87ad2020-04-21 14:37:52 -0500924 *module, NewGVTy, false, GV->getLinkage(), GV->getInitializer(), "",
David Neto862b7d82018-06-14 18:48:37 -0400925 nullptr, GV->getThreadLocalMode(), AddressSpace::ModuleScopePrivate);
926 NewGV->takeName(GV);
927
928 const SmallVector<User *, 8> GVUsers(GV->user_begin(), GV->user_end());
929 SmallVector<User *, 8> CandidateUsers;
930
931 auto record_called_function_type_as_user =
932 [&GlobalConstFuncTyMap](Value *gv, CallInst *call) {
933 // Find argument index.
934 unsigned index = 0;
935 for (unsigned i = 0; i < call->getNumArgOperands(); i++) {
936 if (gv == call->getOperand(i)) {
937 // TODO(dneto): Should we break here?
938 index = i;
939 }
940 }
941
942 // Record function type with global constant.
943 GlobalConstFuncTyMap[call->getFunctionType()] =
944 std::make_pair(call->getFunctionType(), index);
945 };
946
947 for (User *GVU : GVUsers) {
948 if (CallInst *Call = dyn_cast<CallInst>(GVU)) {
949 record_called_function_type_as_user(GV, Call);
950 } else if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(GVU)) {
951 // Check GEP users.
952 for (User *GEPU : GEP->users()) {
953 if (CallInst *GEPCall = dyn_cast<CallInst>(GEPU)) {
954 record_called_function_type_as_user(GEP, GEPCall);
955 }
956 }
957 }
958
959 CandidateUsers.push_back(GVU);
960 }
961
962 for (User *U : CandidateUsers) {
963 // Update users of gv with new gv.
alan-bakered80f572019-02-11 17:28:26 -0500964 if (!isa<Constant>(U)) {
965 // #254: Can't change operands of a constant, but this shouldn't be
966 // something that sticks around in the module.
967 U->replaceUsesOfWith(GV, NewGV);
968 }
David Neto862b7d82018-06-14 18:48:37 -0400969 }
970
971 // Delete original gv.
972 GV->eraseFromParent();
973 }
974 }
975}
976
SJW77b87ad2020-04-21 14:37:52 -0500977void SPIRVProducerPass::FindResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -0400978 ResourceVarInfoList.clear();
979 FunctionToResourceVarsMap.clear();
980 ModuleOrderedResourceVars.reset();
981 // Normally, there is one resource variable per clspv.resource.var.*
982 // function, since that is unique'd by arg type and index. By design,
983 // we can share these resource variables across kernels because all
984 // kernels use the same descriptor set.
985 //
986 // But if the user requested distinct descriptor sets per kernel, then
987 // the descriptor allocator has made different (set,binding) pairs for
988 // the same (type,arg_index) pair. Since we can decorate a resource
989 // variable with only exactly one DescriptorSet and Binding, we are
990 // forced in this case to make distinct resource variables whenever
Kévin Petitbbbda972020-03-03 19:16:31 +0000991 // the same clspv.resource.var.X function is seen with disintct
David Neto862b7d82018-06-14 18:48:37 -0400992 // (set,binding) values.
993 const bool always_distinct_sets =
994 clspv::Option::DistinctKernelDescriptorSets();
SJW77b87ad2020-04-21 14:37:52 -0500995 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -0400996 // Rely on the fact the resource var functions have a stable ordering
997 // in the module.
SJW61531372020-06-09 07:31:08 -0500998 if (Builtins::Lookup(&F) == Builtins::kClspvResource) {
David Neto862b7d82018-06-14 18:48:37 -0400999 // Find all calls to this function with distinct set and binding pairs.
1000 // Save them in ResourceVarInfoList.
1001
1002 // Determine uniqueness of the (set,binding) pairs only withing this
1003 // one resource-var builtin function.
1004 using SetAndBinding = std::pair<unsigned, unsigned>;
1005 // Maps set and binding to the resource var info.
1006 DenseMap<SetAndBinding, ResourceVarInfo *> set_and_binding_map;
1007 bool first_use = true;
1008 for (auto &U : F.uses()) {
1009 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
1010 const auto set = unsigned(
1011 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
1012 const auto binding = unsigned(
1013 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
1014 const auto arg_kind = clspv::ArgKind(
1015 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
1016 const auto arg_index = unsigned(
1017 dyn_cast<ConstantInt>(call->getArgOperand(3))->getZExtValue());
alan-bakere9308012019-03-15 10:25:13 -04001018 const auto coherent = unsigned(
1019 dyn_cast<ConstantInt>(call->getArgOperand(5))->getZExtValue());
David Neto862b7d82018-06-14 18:48:37 -04001020
1021 // Find or make the resource var info for this combination.
1022 ResourceVarInfo *rv = nullptr;
1023 if (always_distinct_sets) {
1024 // Make a new resource var any time we see a different
1025 // (set,binding) pair.
1026 SetAndBinding key{set, binding};
1027 auto where = set_and_binding_map.find(key);
1028 if (where == set_and_binding_map.end()) {
alan-baker7506abb2020-09-10 15:02:55 -04001029 rv = new ResourceVarInfo(
1030 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1031 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001032 ResourceVarInfoList.emplace_back(rv);
1033 set_and_binding_map[key] = rv;
1034 } else {
1035 rv = where->second;
1036 }
1037 } else {
1038 // The default is to make exactly one resource for each
1039 // clspv.resource.var.* function.
1040 if (first_use) {
1041 first_use = false;
alan-baker7506abb2020-09-10 15:02:55 -04001042 rv = new ResourceVarInfo(
1043 static_cast<int>(ResourceVarInfoList.size()), set, binding,
1044 &F, arg_kind, coherent);
David Neto862b7d82018-06-14 18:48:37 -04001045 ResourceVarInfoList.emplace_back(rv);
1046 } else {
1047 rv = ResourceVarInfoList.back().get();
1048 }
1049 }
1050
1051 // Now populate FunctionToResourceVarsMap.
1052 auto &mapping =
1053 FunctionToResourceVarsMap[call->getParent()->getParent()];
1054 while (mapping.size() <= arg_index) {
1055 mapping.push_back(nullptr);
1056 }
1057 mapping[arg_index] = rv;
1058 }
1059 }
1060 }
1061 }
1062
1063 // Populate ModuleOrderedResourceVars.
SJW77b87ad2020-04-21 14:37:52 -05001064 for (Function &F : *module) {
David Neto862b7d82018-06-14 18:48:37 -04001065 auto where = FunctionToResourceVarsMap.find(&F);
1066 if (where != FunctionToResourceVarsMap.end()) {
1067 for (auto &rv : where->second) {
1068 if (rv != nullptr) {
1069 ModuleOrderedResourceVars.insert(rv);
1070 }
1071 }
1072 }
1073 }
1074 if (ShowResourceVars) {
1075 for (auto *info : ModuleOrderedResourceVars) {
1076 outs() << "MORV index " << info->index << " (" << info->descriptor_set
1077 << "," << info->binding << ") " << *(info->var_fn->getReturnType())
1078 << "\n";
1079 }
1080 }
1081}
1082
David Neto22f144c2017-06-12 14:26:21 -04001083void SPIRVProducerPass::FindTypePerGlobalVar(GlobalVariable &GV) {
1084 // Investigate global variable's type.
1085 FindType(GV.getType());
1086}
1087
1088void SPIRVProducerPass::FindTypePerFunc(Function &F) {
1089 // Investigate function's type.
1090 FunctionType *FTy = F.getFunctionType();
1091
1092 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
1093 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
David Neto9ed8e2f2018-03-24 06:47:24 -07001094 // Handle a regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04001095 if (GlobalConstFuncTyMap.count(FTy)) {
1096 uint32_t GVCstArgIdx = GlobalConstFuncTypeMap[FTy].second;
1097 SmallVector<Type *, 4> NewFuncParamTys;
1098 for (unsigned i = 0; i < FTy->getNumParams(); i++) {
1099 Type *ParamTy = FTy->getParamType(i);
1100 if (i == GVCstArgIdx) {
1101 Type *EleTy = ParamTy->getPointerElementType();
1102 ParamTy = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1103 }
1104
1105 NewFuncParamTys.push_back(ParamTy);
1106 }
1107
1108 FunctionType *NewFTy =
1109 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1110 GlobalConstFuncTyMap[FTy] = std::make_pair(NewFTy, GVCstArgIdx);
1111 FTy = NewFTy;
1112 }
1113
1114 FindType(FTy);
1115 } else {
1116 // As kernel functions do not have parameters, create new function type and
1117 // add it to type map.
1118 SmallVector<Type *, 4> NewFuncParamTys;
1119 FunctionType *NewFTy =
1120 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
1121 FindType(NewFTy);
1122 }
1123
1124 // Investigate instructions' type in function body.
1125 for (BasicBlock &BB : F) {
1126 for (Instruction &I : BB) {
1127 if (isa<ShuffleVectorInst>(I)) {
1128 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1129 // Ignore type for mask of shuffle vector instruction.
1130 if (i == 2) {
1131 continue;
1132 }
1133
1134 Value *Op = I.getOperand(i);
1135 if (!isa<MetadataAsValue>(Op)) {
1136 FindType(Op->getType());
1137 }
1138 }
1139
1140 FindType(I.getType());
1141 continue;
1142 }
1143
David Neto862b7d82018-06-14 18:48:37 -04001144 CallInst *Call = dyn_cast<CallInst>(&I);
1145
SJW61531372020-06-09 07:31:08 -05001146 if (Call) {
1147 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
1148 if (func_info.getType() == Builtins::kClspvResource ||
1149 func_info.getType() == Builtins::kClspvLocal) {
1150 // This is a fake call representing access to a resource/workgroup
1151 // variable. We handle that elsewhere.
1152 continue;
1153 }
Alan Baker202c8c72018-08-13 13:47:44 -04001154 }
1155
alan-bakerf083bed2020-01-29 08:15:42 -05001156 // #497: InsertValue and ExtractValue map to OpCompositeInsert and
1157 // OpCompositeExtract which takes literal values for indices. As a result
1158 // don't map the type of indices.
1159 if (I.getOpcode() == Instruction::ExtractValue) {
1160 FindType(I.getOperand(0)->getType());
1161 continue;
1162 }
1163 if (I.getOpcode() == Instruction::InsertValue) {
1164 FindType(I.getOperand(0)->getType());
1165 FindType(I.getOperand(1)->getType());
1166 continue;
1167 }
1168
1169 // #497: InsertElement and ExtractElement map to OpCompositeExtract if
1170 // the index is a constant. In such a case don't map the index type.
1171 if (I.getOpcode() == Instruction::ExtractElement) {
1172 FindType(I.getOperand(0)->getType());
1173 Value *op1 = I.getOperand(1);
1174 if (!isa<Constant>(op1) || isa<GlobalValue>(op1)) {
1175 FindType(op1->getType());
1176 }
1177 continue;
1178 }
1179 if (I.getOpcode() == Instruction::InsertElement) {
1180 FindType(I.getOperand(0)->getType());
1181 FindType(I.getOperand(1)->getType());
1182 Value *op2 = I.getOperand(2);
1183 if (!isa<Constant>(op2) || isa<GlobalValue>(op2)) {
1184 FindType(op2->getType());
1185 }
1186 continue;
1187 }
1188
David Neto22f144c2017-06-12 14:26:21 -04001189 // Work through the operands of the instruction.
1190 for (unsigned i = 0; i < I.getNumOperands(); i++) {
1191 Value *const Op = I.getOperand(i);
1192 // If any of the operands is a constant, find the type!
1193 if (isa<Constant>(Op) && !isa<GlobalValue>(Op)) {
1194 FindType(Op->getType());
1195 }
1196 }
1197
1198 for (Use &Op : I.operands()) {
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001199 if (isa<CallInst>(&I)) {
David Neto22f144c2017-06-12 14:26:21 -04001200 // Avoid to check call instruction's type.
1201 break;
1202 }
Alan Baker202c8c72018-08-13 13:47:44 -04001203 if (CallInst *OpCall = dyn_cast<CallInst>(Op)) {
SJW61531372020-06-09 07:31:08 -05001204 if (Builtins::Lookup(OpCall->getCalledFunction()) ==
1205 Builtins::kClspvLocal) {
Alan Baker202c8c72018-08-13 13:47:44 -04001206 // This is a fake call representing access to a workgroup variable.
1207 // We handle that elsewhere.
1208 continue;
1209 }
1210 }
David Neto22f144c2017-06-12 14:26:21 -04001211 if (!isa<MetadataAsValue>(&Op)) {
1212 FindType(Op->getType());
1213 continue;
1214 }
1215 }
1216
David Neto22f144c2017-06-12 14:26:21 -04001217 // We don't want to track the type of this call as we are going to replace
1218 // it.
SJW61531372020-06-09 07:31:08 -05001219 if (Call && Builtins::Lookup(Call->getCalledFunction()) ==
1220 Builtins::kClspvSamplerVarLiteral) {
David Neto22f144c2017-06-12 14:26:21 -04001221 continue;
1222 }
1223
1224 if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(&I)) {
1225 // If gep's base operand has ModuleScopePrivate address space, make gep
1226 // return ModuleScopePrivate address space.
1227 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate) {
1228 // Add pointer type with private address space for global constant to
1229 // type list.
1230 Type *EleTy = I.getType()->getPointerElementType();
1231 Type *NewPTy =
1232 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
1233
1234 FindType(NewPTy);
1235 continue;
1236 }
1237 }
1238
1239 FindType(I.getType());
1240 }
1241 }
1242}
1243
SJW77b87ad2020-04-21 14:37:52 -05001244void SPIRVProducerPass::FindTypesForSamplerMap() {
David Neto862b7d82018-06-14 18:48:37 -04001245 // If we are using a sampler map, find the type of the sampler.
SJW77b87ad2020-04-21 14:37:52 -05001246 if (module->getFunction(clspv::LiteralSamplerFunction()) ||
alan-baker7506abb2020-09-10 15:02:55 -04001247 !getSamplerMap().empty()) {
James Pricecbe834f2020-12-01 13:42:25 -05001248 auto SamplerStructTy =
1249 StructType::getTypeByName(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001250 if (!SamplerStructTy) {
SJW77b87ad2020-04-21 14:37:52 -05001251 SamplerStructTy =
1252 StructType::create(module->getContext(), "opencl.sampler_t");
David Neto862b7d82018-06-14 18:48:37 -04001253 }
1254
1255 SamplerTy = SamplerStructTy->getPointerTo(AddressSpace::UniformConstant);
1256
1257 FindType(SamplerTy);
1258 }
1259}
1260
SJW77b87ad2020-04-21 14:37:52 -05001261void SPIRVProducerPass::FindTypesForResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04001262 // Record types so they are generated.
1263 TypesNeedingLayout.reset();
1264 StructTypesNeedingBlock.reset();
1265
1266 // To match older clspv codegen, generate the float type first if required
1267 // for images.
1268 for (const auto *info : ModuleOrderedResourceVars) {
alan-bakerf6bc8252020-09-23 14:58:55 -04001269 if (info->arg_kind == clspv::ArgKind::SampledImage ||
1270 info->arg_kind == clspv::ArgKind::StorageImage) {
alan-bakerf67468c2019-11-25 15:51:49 -05001271 if (IsIntImageType(info->var_fn->getReturnType())) {
1272 // Nothing for now...
1273 } else if (IsUintImageType(info->var_fn->getReturnType())) {
SJW77b87ad2020-04-21 14:37:52 -05001274 FindType(Type::getInt32Ty(module->getContext()));
alan-bakerf67468c2019-11-25 15:51:49 -05001275 }
1276
1277 // We need "float" either for the sampled type or for the Lod operand.
SJW77b87ad2020-04-21 14:37:52 -05001278 FindType(Type::getFloatTy(module->getContext()));
David Neto862b7d82018-06-14 18:48:37 -04001279 }
1280 }
1281
1282 for (const auto *info : ModuleOrderedResourceVars) {
1283 Type *type = info->var_fn->getReturnType();
1284
1285 switch (info->arg_kind) {
1286 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04001287 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04001288 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1289 StructTypesNeedingBlock.insert(sty);
1290 } else {
1291 errs() << *type << "\n";
1292 llvm_unreachable("Buffer arguments must map to structures!");
1293 }
1294 break;
1295 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001296 case clspv::ArgKind::PodUBO:
1297 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04001298 if (auto *sty = dyn_cast<StructType>(type->getPointerElementType())) {
1299 StructTypesNeedingBlock.insert(sty);
1300 } else {
1301 errs() << *type << "\n";
1302 llvm_unreachable("POD arguments must map to structures!");
1303 }
1304 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04001305 case clspv::ArgKind::SampledImage:
1306 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001307 case clspv::ArgKind::Sampler:
1308 // Sampler and image types map to the pointee type but
1309 // in the uniform constant address space.
1310 type = PointerType::get(type->getPointerElementType(),
1311 clspv::AddressSpace::UniformConstant);
1312 break;
1313 default:
1314 break;
1315 }
1316
1317 // The converted type is the type of the OpVariable we will generate.
1318 // If the pointee type is an array of size zero, FindType will convert it
1319 // to a runtime array.
1320 FindType(type);
1321 }
1322
alan-bakerdcd97412019-09-16 15:32:30 -04001323 // If module constants are clustered in a storage buffer then that struct
1324 // needs layout decorations.
1325 if (clspv::Option::ModuleConstantsInStorageBuffer()) {
SJW77b87ad2020-04-21 14:37:52 -05001326 for (GlobalVariable &GV : module->globals()) {
alan-bakerdcd97412019-09-16 15:32:30 -04001327 PointerType *PTy = cast<PointerType>(GV.getType());
1328 const auto AS = PTy->getAddressSpace();
1329 const bool module_scope_constant_external_init =
1330 (AS == AddressSpace::Constant) && GV.hasInitializer();
1331 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
1332 if (module_scope_constant_external_init &&
1333 spv::BuiltInMax == BuiltinType) {
1334 StructTypesNeedingBlock.insert(
1335 cast<StructType>(PTy->getPointerElementType()));
1336 }
1337 }
1338 }
1339
SJW77b87ad2020-04-21 14:37:52 -05001340 for (const GlobalVariable &GV : module->globals()) {
Kévin Petitbbbda972020-03-03 19:16:31 +00001341 if (GV.getAddressSpace() == clspv::AddressSpace::PushConstant) {
1342 auto Ty = cast<PointerType>(GV.getType())->getPointerElementType();
1343 assert(Ty->isStructTy() && "Push constants have to be structures.");
1344 auto STy = cast<StructType>(Ty);
1345 StructTypesNeedingBlock.insert(STy);
1346 }
1347 }
1348
David Neto862b7d82018-06-14 18:48:37 -04001349 // Traverse the arrays and structures underneath each Block, and
1350 // mark them as needing layout.
1351 std::vector<Type *> work_list(StructTypesNeedingBlock.begin(),
1352 StructTypesNeedingBlock.end());
1353 while (!work_list.empty()) {
1354 Type *type = work_list.back();
1355 work_list.pop_back();
1356 TypesNeedingLayout.insert(type);
1357 switch (type->getTypeID()) {
1358 case Type::ArrayTyID:
1359 work_list.push_back(type->getArrayElementType());
1360 if (!Hack_generate_runtime_array_stride_early) {
1361 // Remember this array type for deferred decoration.
1362 TypesNeedingArrayStride.insert(type);
1363 }
1364 break;
1365 case Type::StructTyID:
1366 for (auto *elem_ty : cast<StructType>(type)->elements()) {
1367 work_list.push_back(elem_ty);
1368 }
1369 default:
1370 // This type and its contained types don't get layout.
1371 break;
1372 }
1373 }
1374}
1375
SJWf93f5f32020-05-05 07:27:56 -05001376void SPIRVProducerPass::GenerateWorkgroupVars() {
Alan Baker202c8c72018-08-13 13:47:44 -04001377 // The SpecId assignment for pointer-to-local arguments is recorded in
1378 // module-level metadata. Translate that information into local argument
1379 // information.
SJWf93f5f32020-05-05 07:27:56 -05001380 LLVMContext &Context = module->getContext();
SJW77b87ad2020-04-21 14:37:52 -05001381 NamedMDNode *nmd = module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001382 if (!nmd)
1383 return;
Alan Baker202c8c72018-08-13 13:47:44 -04001384 for (auto operand : nmd->operands()) {
1385 MDTuple *tuple = cast<MDTuple>(operand);
1386 ValueAsMetadata *fn_md = cast<ValueAsMetadata>(tuple->getOperand(0));
1387 Function *func = cast<Function>(fn_md->getValue());
alan-bakerb6b09dc2018-11-08 16:59:28 -05001388 ConstantAsMetadata *arg_index_md =
1389 cast<ConstantAsMetadata>(tuple->getOperand(1));
1390 int arg_index = static_cast<int>(
1391 cast<ConstantInt>(arg_index_md->getValue())->getSExtValue());
1392 Argument *arg = &*(func->arg_begin() + arg_index);
Alan Baker202c8c72018-08-13 13:47:44 -04001393
1394 ConstantAsMetadata *spec_id_md =
1395 cast<ConstantAsMetadata>(tuple->getOperand(2));
alan-bakerb6b09dc2018-11-08 16:59:28 -05001396 int spec_id = static_cast<int>(
1397 cast<ConstantInt>(spec_id_md->getValue())->getSExtValue());
Alan Baker202c8c72018-08-13 13:47:44 -04001398
Alan Baker202c8c72018-08-13 13:47:44 -04001399 LocalArgSpecIds[arg] = spec_id;
alan-bakerb6b09dc2018-11-08 16:59:28 -05001400 if (LocalSpecIdInfoMap.count(spec_id))
1401 continue;
Alan Baker202c8c72018-08-13 13:47:44 -04001402
SJWf93f5f32020-05-05 07:27:56 -05001403 // Generate the spec constant.
1404 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001405 Ops << Type::getInt32Ty(Context) << 1;
SJWf93f5f32020-05-05 07:27:56 -05001406 SPIRVID ArraySizeID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
Alan Baker202c8c72018-08-13 13:47:44 -04001407
SJWf93f5f32020-05-05 07:27:56 -05001408 // Generate the array type.
1409 Type *ElemTy = arg->getType()->getPointerElementType();
1410 Ops.clear();
1411 // The element type must have been created.
SJW01901d92020-05-21 08:58:31 -05001412 Ops << ElemTy << ArraySizeID;
SJWf93f5f32020-05-05 07:27:56 -05001413
1414 SPIRVID ArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1415
1416 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001417 Ops << spv::StorageClassWorkgroup << ArrayTypeID;
SJWf93f5f32020-05-05 07:27:56 -05001418 SPIRVID PtrArrayTypeID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1419
1420 // Generate OpVariable.
1421 //
1422 // Ops[0] : Result Type ID
1423 // Ops[1] : Storage Class
SJW806a5d82020-07-15 12:51:38 -05001424 SPIRVID VariableID =
1425 addSPIRVGlobalVariable(PtrArrayTypeID, spv::StorageClassWorkgroup);
SJWf93f5f32020-05-05 07:27:56 -05001426
1427 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05001428 Ops << ArraySizeID << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05001429 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1430
1431 LocalArgInfo info{VariableID, ElemTy, ArraySizeID,
1432 ArrayTypeID, PtrArrayTypeID, spec_id};
1433 LocalSpecIdInfoMap[spec_id] = info;
Alan Baker202c8c72018-08-13 13:47:44 -04001434 }
1435}
1436
David Neto22f144c2017-06-12 14:26:21 -04001437void SPIRVProducerPass::FindType(Type *Ty) {
1438 TypeList &TyList = getTypeList();
1439
1440 if (0 != TyList.idFor(Ty)) {
1441 return;
1442 }
1443
1444 if (Ty->isPointerTy()) {
1445 auto AddrSpace = Ty->getPointerAddressSpace();
1446 if ((AddressSpace::Constant == AddrSpace) ||
1447 (AddressSpace::Global == AddrSpace)) {
1448 auto PointeeTy = Ty->getPointerElementType();
1449
1450 if (PointeeTy->isStructTy() &&
1451 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1452 FindType(PointeeTy);
1453 auto ActualPointerTy =
1454 PointeeTy->getPointerTo(AddressSpace::UniformConstant);
1455 FindType(ActualPointerTy);
1456 return;
1457 }
1458 }
1459 }
1460
David Neto862b7d82018-06-14 18:48:37 -04001461 // By convention, LLVM array type with 0 elements will map to
1462 // OpTypeRuntimeArray. Otherwise, it will map to OpTypeArray, which
1463 // has a constant number of elements. We need to support type of the
1464 // constant.
1465 if (auto *arrayTy = dyn_cast<ArrayType>(Ty)) {
1466 if (arrayTy->getNumElements() > 0) {
1467 LLVMContext &Context = Ty->getContext();
1468 FindType(Type::getInt32Ty(Context));
1469 }
David Neto22f144c2017-06-12 14:26:21 -04001470 }
1471
1472 for (Type *SubTy : Ty->subtypes()) {
1473 FindType(SubTy);
1474 }
1475
1476 TyList.insert(Ty);
1477}
1478
David Neto22f144c2017-06-12 14:26:21 -04001479spv::StorageClass SPIRVProducerPass::GetStorageClass(unsigned AddrSpace) const {
1480 switch (AddrSpace) {
1481 default:
1482 llvm_unreachable("Unsupported OpenCL address space");
1483 case AddressSpace::Private:
1484 return spv::StorageClassFunction;
1485 case AddressSpace::Global:
David Neto22f144c2017-06-12 14:26:21 -04001486 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001487 case AddressSpace::Constant:
1488 return clspv::Option::ConstantArgsInUniformBuffer()
1489 ? spv::StorageClassUniform
1490 : spv::StorageClassStorageBuffer;
David Neto22f144c2017-06-12 14:26:21 -04001491 case AddressSpace::Input:
1492 return spv::StorageClassInput;
1493 case AddressSpace::Local:
1494 return spv::StorageClassWorkgroup;
1495 case AddressSpace::UniformConstant:
1496 return spv::StorageClassUniformConstant;
David Neto9ed8e2f2018-03-24 06:47:24 -07001497 case AddressSpace::Uniform:
David Netoe439d702018-03-23 13:14:08 -07001498 return spv::StorageClassUniform;
David Neto22f144c2017-06-12 14:26:21 -04001499 case AddressSpace::ModuleScopePrivate:
1500 return spv::StorageClassPrivate;
Kévin Petitbbbda972020-03-03 19:16:31 +00001501 case AddressSpace::PushConstant:
1502 return spv::StorageClassPushConstant;
David Neto22f144c2017-06-12 14:26:21 -04001503 }
1504}
1505
David Neto862b7d82018-06-14 18:48:37 -04001506spv::StorageClass
1507SPIRVProducerPass::GetStorageClassForArgKind(clspv::ArgKind arg_kind) const {
1508 switch (arg_kind) {
1509 case clspv::ArgKind::Buffer:
1510 return spv::StorageClassStorageBuffer;
Alan Bakerfcda9482018-10-02 17:09:59 -04001511 case clspv::ArgKind::BufferUBO:
1512 return spv::StorageClassUniform;
David Neto862b7d82018-06-14 18:48:37 -04001513 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04001514 return spv::StorageClassStorageBuffer;
1515 case clspv::ArgKind::PodUBO:
1516 return spv::StorageClassUniform;
1517 case clspv::ArgKind::PodPushConstant:
1518 return spv::StorageClassPushConstant;
David Neto862b7d82018-06-14 18:48:37 -04001519 case clspv::ArgKind::Local:
1520 return spv::StorageClassWorkgroup;
alan-bakerf6bc8252020-09-23 14:58:55 -04001521 case clspv::ArgKind::SampledImage:
1522 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04001523 case clspv::ArgKind::Sampler:
1524 return spv::StorageClassUniformConstant;
Radek Szymanskibe4b0c42018-10-04 22:20:53 +01001525 default:
1526 llvm_unreachable("Unsupported storage class for argument kind");
David Neto862b7d82018-06-14 18:48:37 -04001527 }
1528}
1529
David Neto22f144c2017-06-12 14:26:21 -04001530spv::BuiltIn SPIRVProducerPass::GetBuiltin(StringRef Name) const {
1531 return StringSwitch<spv::BuiltIn>(Name)
1532 .Case("__spirv_GlobalInvocationId", spv::BuiltInGlobalInvocationId)
1533 .Case("__spirv_LocalInvocationId", spv::BuiltInLocalInvocationId)
1534 .Case("__spirv_WorkgroupSize", spv::BuiltInWorkgroupSize)
1535 .Case("__spirv_NumWorkgroups", spv::BuiltInNumWorkgroups)
1536 .Case("__spirv_WorkgroupId", spv::BuiltInWorkgroupId)
alan-bakerbed3a882020-04-21 14:42:41 -04001537 .Case("__spirv_WorkDim", spv::BuiltInWorkDim)
alan-bakere1996972020-05-04 08:38:12 -04001538 .Case("__spirv_GlobalOffset", spv::BuiltInGlobalOffset)
David Neto22f144c2017-06-12 14:26:21 -04001539 .Default(spv::BuiltInMax);
1540}
1541
SJW01901d92020-05-21 08:58:31 -05001542SPIRVID SPIRVProducerPass::getOpExtInstImportID() {
1543 if (OpExtInstImportID == 0) {
1544 //
1545 // Generate OpExtInstImport.
1546 //
1547 // Ops[0] ... Ops[n] = Name (Literal String)
David Neto22f144c2017-06-12 14:26:21 -04001548
SJW01901d92020-05-21 08:58:31 -05001549 OpExtInstImportID =
1550 addSPIRVInst<kImports>(spv::OpExtInstImport, "GLSL.std.450");
1551 }
1552 return OpExtInstImportID;
SJWf93f5f32020-05-05 07:27:56 -05001553}
1554
SJW806a5d82020-07-15 12:51:38 -05001555SPIRVID SPIRVProducerPass::addSPIRVGlobalVariable(const SPIRVID &TypeID,
1556 spv::StorageClass SC,
1557 const SPIRVID &InitID) {
1558 // Generate OpVariable.
1559 //
1560 // Ops[0] : Result Type ID
1561 // Ops[1] : Storage Class
1562 // Ops[2] : Initialization Value ID (optional)
1563
1564 SPIRVOperandVec Ops;
1565 Ops << TypeID << SC;
1566 if (InitID.isValid()) {
1567 Ops << InitID;
1568 }
1569
1570 SPIRVID VID = addSPIRVInst<kGlobalVariables>(spv::OpVariable, Ops);
1571
1572 if (SC == spv::StorageClassInput) {
1573 getEntryPointInterfacesList().push_back(VID);
1574 }
1575
1576 return VID;
1577}
1578
alan-bakerc3fd07f2020-10-22 09:48:49 -04001579Type *SPIRVProducerPass::CanonicalType(Type *type) {
1580 if (type->getNumContainedTypes() != 0) {
1581 switch (type->getTypeID()) {
1582 case Type::PointerTyID: {
1583 // For the purposes of our Vulkan SPIR-V type system, constant and global
1584 // are conflated.
1585 auto *ptr_ty = cast<PointerType>(type);
1586 unsigned AddrSpace = ptr_ty->getAddressSpace();
1587 if (AddressSpace::Constant == AddrSpace) {
1588 if (!clspv::Option::ConstantArgsInUniformBuffer()) {
1589 AddrSpace = AddressSpace::Global;
1590 // The canonical type of __constant is __global unless constants are
1591 // passed in uniform buffers.
1592 auto *GlobalTy =
1593 ptr_ty->getPointerElementType()->getPointerTo(AddrSpace);
1594 return GlobalTy;
1595 }
1596 }
1597 break;
1598 }
1599 case Type::StructTyID: {
1600 SmallVector<Type *, 8> subtypes;
1601 bool changed = false;
1602 for (auto *subtype : type->subtypes()) {
1603 auto canonical = CanonicalType(subtype);
1604 subtypes.push_back(canonical);
1605 if (canonical != subtype) {
1606 changed = true;
1607 }
1608 }
1609 if (changed) {
1610 return StructType::get(type->getContext(), subtypes,
1611 cast<StructType>(type)->isPacked());
1612 }
1613 break;
1614 }
1615 case Type::ArrayTyID: {
1616 auto *elem_ty = type->getArrayElementType();
1617 auto *equiv_elem_ty = CanonicalType(elem_ty);
1618 if (equiv_elem_ty != elem_ty) {
1619 return ArrayType::get(equiv_elem_ty,
1620 cast<ArrayType>(type)->getNumElements());
1621 }
1622 break;
1623 }
1624 case Type::FunctionTyID: {
1625 auto *func_ty = cast<FunctionType>(type);
1626 auto *return_ty = CanonicalType(func_ty->getReturnType());
1627 SmallVector<Type *, 8> params;
1628 for (unsigned i = 0; i < func_ty->getNumParams(); ++i) {
1629 params.push_back(CanonicalType(func_ty->getParamType(i)));
1630 }
1631 return FunctionType::get(return_ty, params, func_ty->isVarArg());
1632 }
1633 default:
1634 break;
1635 }
1636 }
1637
1638 return type;
1639}
1640
SJW01901d92020-05-21 08:58:31 -05001641SPIRVID SPIRVProducerPass::getSPIRVType(Type *Ty) {
SJWf93f5f32020-05-05 07:27:56 -05001642 auto TI = TypeMap.find(Ty);
1643 if (TI != TypeMap.end()) {
SJW01901d92020-05-21 08:58:31 -05001644 assert(TI->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05001645 return TI->second;
1646 }
1647
alan-bakerc3fd07f2020-10-22 09:48:49 -04001648 auto Canonical = CanonicalType(Ty);
1649 if (Canonical != Ty) {
1650 auto CanonicalTI = TypeMap.find(Canonical);
1651 if (CanonicalTI != TypeMap.end()) {
1652 assert(CanonicalTI->second.isValid());
1653 return CanonicalTI->second;
1654 }
1655 }
1656
1657 // Perform the mapping with the canonical type.
1658
SJWf93f5f32020-05-05 07:27:56 -05001659 const auto &DL = module->getDataLayout();
1660
SJW01901d92020-05-21 08:58:31 -05001661 SPIRVID RID;
SJWf93f5f32020-05-05 07:27:56 -05001662
alan-bakerc3fd07f2020-10-22 09:48:49 -04001663 switch (Canonical->getTypeID()) {
SJWf93f5f32020-05-05 07:27:56 -05001664 default: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001665 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001666 llvm_unreachable("Unsupported type???");
1667 break;
1668 }
1669 case Type::MetadataTyID:
1670 case Type::LabelTyID: {
1671 // Ignore these types.
1672 break;
1673 }
1674 case Type::PointerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001675 PointerType *PTy = cast<PointerType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001676 unsigned AddrSpace = PTy->getAddressSpace();
1677
1678 if (AddrSpace != AddressSpace::UniformConstant) {
1679 auto PointeeTy = PTy->getElementType();
1680 if (PointeeTy->isStructTy() &&
1681 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
1682 // TODO(sjw): assert always an image?
1683 RID = getSPIRVType(PointeeTy);
1684 break;
1685 }
1686 }
1687
SJWf93f5f32020-05-05 07:27:56 -05001688 //
1689 // Generate OpTypePointer.
1690 //
1691
1692 // OpTypePointer
1693 // Ops[0] = Storage Class
1694 // Ops[1] = Element Type ID
1695 SPIRVOperandVec Ops;
1696
SJW01901d92020-05-21 08:58:31 -05001697 Ops << GetStorageClass(AddrSpace) << PTy->getElementType();
SJWf93f5f32020-05-05 07:27:56 -05001698
1699 RID = addSPIRVInst<kTypes>(spv::OpTypePointer, Ops);
1700 break;
1701 }
1702 case Type::StructTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001703 StructType *STy = cast<StructType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001704
1705 // Handle sampler type.
1706 if (STy->isOpaque()) {
1707 if (STy->getName().equals("opencl.sampler_t")) {
1708 //
1709 // Generate OpTypeSampler
1710 //
1711 // Empty Ops.
1712
1713 RID = addSPIRVInst<kTypes>(spv::OpTypeSampler);
1714 break;
1715 } else if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001716 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001717 STy->getName().startswith("opencl.image1d_wo_t") ||
1718 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001719 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001720 STy->getName().startswith("opencl.image1d_array_wo_t") ||
1721 STy->getName().startswith("opencl.image2d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001722 STy->getName().startswith("opencl.image2d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001723 STy->getName().startswith("opencl.image2d_wo_t") ||
1724 STy->getName().startswith("opencl.image2d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001725 STy->getName().startswith("opencl.image2d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001726 STy->getName().startswith("opencl.image2d_array_wo_t") ||
1727 STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001728 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001729 STy->getName().startswith("opencl.image3d_wo_t")) {
SJW01901d92020-05-21 08:58:31 -05001730 if (STy->getName().startswith("opencl.image1d_")) {
1731 if (STy->getName().contains(".sampled"))
1732 addCapability(spv::CapabilitySampled1D);
1733 else
1734 addCapability(spv::CapabilityImage1D);
1735 }
1736
SJWf93f5f32020-05-05 07:27:56 -05001737 //
1738 // Generate OpTypeImage
1739 //
1740 // Ops[0] = Sampled Type ID
1741 // Ops[1] = Dim ID
1742 // Ops[2] = Depth (Literal Number)
1743 // Ops[3] = Arrayed (Literal Number)
1744 // Ops[4] = MS (Literal Number)
1745 // Ops[5] = Sampled (Literal Number)
1746 // Ops[6] = Image Format ID
1747 //
1748 SPIRVOperandVec Ops;
1749
SJW01901d92020-05-21 08:58:31 -05001750 SPIRVID SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001751 if (STy->getName().contains(".float")) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001752 SampledTyID = getSPIRVType(Type::getFloatTy(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001753 } else if (STy->getName().contains(".uint")) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001754 SampledTyID = getSPIRVType(Type::getInt32Ty(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001755 } else if (STy->getName().contains(".int")) {
1756 // Generate a signed 32-bit integer if necessary.
1757 if (int32ID == 0) {
1758 SPIRVOperandVec intOps;
SJW01901d92020-05-21 08:58:31 -05001759 intOps << 32 << 1;
SJWf93f5f32020-05-05 07:27:56 -05001760 int32ID = addSPIRVInst<kTypes>(spv::OpTypeInt, intOps);
1761 }
1762 SampledTyID = int32ID;
1763
1764 // Generate a vec4 of the signed int if necessary.
1765 if (v4int32ID == 0) {
1766 SPIRVOperandVec vecOps;
SJW01901d92020-05-21 08:58:31 -05001767 vecOps << int32ID << 4;
SJWf93f5f32020-05-05 07:27:56 -05001768 v4int32ID = addSPIRVInst<kTypes>(spv::OpTypeVector, vecOps);
1769 }
1770 } else {
1771 // This was likely an UndefValue.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001772 SampledTyID = getSPIRVType(Type::getFloatTy(Canonical->getContext()));
SJWf93f5f32020-05-05 07:27:56 -05001773 }
SJW01901d92020-05-21 08:58:31 -05001774 Ops << SampledTyID;
SJWf93f5f32020-05-05 07:27:56 -05001775
1776 spv::Dim DimID = spv::Dim2D;
1777 if (STy->getName().startswith("opencl.image1d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001778 STy->getName().startswith("opencl.image1d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001779 STy->getName().startswith("opencl.image1d_wo_t") ||
1780 STy->getName().startswith("opencl.image1d_array_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001781 STy->getName().startswith("opencl.image1d_array_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001782 STy->getName().startswith("opencl.image1d_array_wo_t")) {
1783 DimID = spv::Dim1D;
1784 } else if (STy->getName().startswith("opencl.image3d_ro_t") ||
alan-bakerf6bc8252020-09-23 14:58:55 -04001785 STy->getName().startswith("opencl.image3d_rw_t") ||
SJWf93f5f32020-05-05 07:27:56 -05001786 STy->getName().startswith("opencl.image3d_wo_t")) {
1787 DimID = spv::Dim3D;
1788 }
SJW01901d92020-05-21 08:58:31 -05001789 Ops << DimID;
SJWf93f5f32020-05-05 07:27:56 -05001790
1791 // TODO: Set up Depth.
SJW01901d92020-05-21 08:58:31 -05001792 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001793
1794 uint32_t arrayed = STy->getName().contains("_array_") ? 1 : 0;
SJW01901d92020-05-21 08:58:31 -05001795 Ops << arrayed;
SJWf93f5f32020-05-05 07:27:56 -05001796
1797 // TODO: Set up MS.
SJW01901d92020-05-21 08:58:31 -05001798 Ops << 0;
SJWf93f5f32020-05-05 07:27:56 -05001799
1800 // Set up Sampled.
1801 //
1802 // From Spec
1803 //
1804 // 0 indicates this is only known at run time, not at compile time
1805 // 1 indicates will be used with sampler
1806 // 2 indicates will be used without a sampler (a storage image)
1807 uint32_t Sampled = 1;
1808 if (!STy->getName().contains(".sampled")) {
1809 Sampled = 2;
1810 }
SJW01901d92020-05-21 08:58:31 -05001811 Ops << Sampled;
SJWf93f5f32020-05-05 07:27:56 -05001812
1813 // TODO: Set up Image Format.
SJW01901d92020-05-21 08:58:31 -05001814 Ops << spv::ImageFormatUnknown;
SJWf93f5f32020-05-05 07:27:56 -05001815 RID = addSPIRVInst<kTypes>(spv::OpTypeImage, Ops);
1816
alan-bakerf6bc8252020-09-23 14:58:55 -04001817 // Only need a sampled version of the type if it is used with a sampler.
1818 if (Sampled == 1) {
1819 Ops.clear();
1820 Ops << RID;
alan-bakerc3fd07f2020-10-22 09:48:49 -04001821 getImageTypeMap()[Canonical] =
alan-bakerf6bc8252020-09-23 14:58:55 -04001822 addSPIRVInst<kTypes>(spv::OpTypeSampledImage, Ops);
1823 }
SJWf93f5f32020-05-05 07:27:56 -05001824 break;
1825 }
1826 }
1827
1828 //
1829 // Generate OpTypeStruct
1830 //
1831 // Ops[0] ... Ops[n] = Member IDs
1832 SPIRVOperandVec Ops;
1833
1834 for (auto *EleTy : STy->elements()) {
SJW01901d92020-05-21 08:58:31 -05001835 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001836 }
1837
1838 RID = addSPIRVInst<kTypes>(spv::OpTypeStruct, Ops);
1839
alan-bakerc3fd07f2020-10-22 09:48:49 -04001840 // Generate OpMemberDecorate unless we are generating it for the canonical
1841 // type.
1842 StructType *canonical = cast<StructType>(CanonicalType(STy));
1843 if (TypesNeedingLayout.idFor(STy) &&
1844 (canonical == STy || !TypesNeedingLayout.idFor(canonical))) {
SJWf93f5f32020-05-05 07:27:56 -05001845 for (unsigned MemberIdx = 0; MemberIdx < STy->getNumElements();
1846 MemberIdx++) {
1847 // Ops[0] = Structure Type ID
1848 // Ops[1] = Member Index(Literal Number)
1849 // Ops[2] = Decoration (Offset)
1850 // Ops[3] = Byte Offset (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05001851 const auto ByteOffset =
1852 GetExplicitLayoutStructMemberOffset(STy, MemberIdx, DL);
1853
SJW01901d92020-05-21 08:58:31 -05001854 Ops.clear();
1855 Ops << RID << MemberIdx << spv::DecorationOffset << ByteOffset;
SJWf93f5f32020-05-05 07:27:56 -05001856
1857 addSPIRVInst<kAnnotations>(spv::OpMemberDecorate, Ops);
1858 }
1859 }
1860
alan-bakerc3fd07f2020-10-22 09:48:49 -04001861 // Generate OpDecorate unless we are generating it for the canonical type.
1862 if (StructTypesNeedingBlock.idFor(STy) &&
1863 (canonical == STy || !StructTypesNeedingBlock.idFor(canonical))) {
SJWf93f5f32020-05-05 07:27:56 -05001864 Ops.clear();
1865 // Use Block decorations with StorageBuffer storage class.
SJW01901d92020-05-21 08:58:31 -05001866 Ops << RID << spv::DecorationBlock;
SJWf93f5f32020-05-05 07:27:56 -05001867
1868 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1869 }
1870 break;
1871 }
1872 case Type::IntegerTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001873 uint32_t bit_width =
1874 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
SJWf93f5f32020-05-05 07:27:56 -05001875
alan-bakere2a62752020-07-09 22:53:23 -04001876 if (clspv::Option::Int8Support() && bit_width == 8) {
SJW01901d92020-05-21 08:58:31 -05001877 addCapability(spv::CapabilityInt8);
alan-bakere2a62752020-07-09 22:53:23 -04001878 } else if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001879 addCapability(spv::CapabilityInt16);
alan-bakere2a62752020-07-09 22:53:23 -04001880 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001881 addCapability(spv::CapabilityInt64);
1882 }
1883
alan-bakere2a62752020-07-09 22:53:23 -04001884 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05001885 RID = addSPIRVInst<kTypes>(spv::OpTypeBool);
1886 } else {
alan-bakere2a62752020-07-09 22:53:23 -04001887 if (!clspv::Option::Int8Support() && bit_width == 8) {
SJWf93f5f32020-05-05 07:27:56 -05001888 // i8 is added to TypeMap as i32.
alan-bakerc3fd07f2020-10-22 09:48:49 -04001889 RID = getSPIRVType(Type::getIntNTy(Canonical->getContext(), 32));
SJWf93f5f32020-05-05 07:27:56 -05001890 } else {
1891 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001892 Ops << bit_width << 0 /* not signed */;
SJWf93f5f32020-05-05 07:27:56 -05001893 RID = addSPIRVInst<kTypes>(spv::OpTypeInt, Ops);
1894 }
1895 }
1896 break;
1897 }
1898 case Type::HalfTyID:
1899 case Type::FloatTyID:
1900 case Type::DoubleTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001901 uint32_t bit_width =
1902 static_cast<uint32_t>(Canonical->getPrimitiveSizeInBits());
alan-bakere2a62752020-07-09 22:53:23 -04001903 if (bit_width == 16) {
SJW01901d92020-05-21 08:58:31 -05001904 addCapability(spv::CapabilityFloat16);
alan-bakere2a62752020-07-09 22:53:23 -04001905 } else if (bit_width == 64) {
SJW01901d92020-05-21 08:58:31 -05001906 addCapability(spv::CapabilityFloat64);
1907 }
1908
SJWf93f5f32020-05-05 07:27:56 -05001909 SPIRVOperandVec Ops;
alan-bakere2a62752020-07-09 22:53:23 -04001910 Ops << bit_width;
SJWf93f5f32020-05-05 07:27:56 -05001911
1912 RID = addSPIRVInst<kTypes>(spv::OpTypeFloat, Ops);
1913 break;
1914 }
1915 case Type::ArrayTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001916 ArrayType *ArrTy = cast<ArrayType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001917 const uint64_t Length = ArrTy->getArrayNumElements();
1918 if (Length == 0) {
1919 // By convention, map it to a RuntimeArray.
1920
1921 Type *EleTy = ArrTy->getArrayElementType();
1922
1923 //
1924 // Generate OpTypeRuntimeArray.
1925 //
1926 // OpTypeRuntimeArray
1927 // Ops[0] = Element Type ID
1928 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05001929 Ops << EleTy;
SJWf93f5f32020-05-05 07:27:56 -05001930
1931 RID = addSPIRVInst<kTypes>(spv::OpTypeRuntimeArray, Ops);
1932
1933 if (Hack_generate_runtime_array_stride_early) {
1934 // Generate OpDecorate.
1935
1936 // Ops[0] = Target ID
1937 // Ops[1] = Decoration (ArrayStride)
1938 // Ops[2] = Stride Number(Literal Number)
1939 Ops.clear();
1940
SJW01901d92020-05-21 08:58:31 -05001941 Ops << RID << spv::DecorationArrayStride
1942 << static_cast<uint32_t>(GetTypeAllocSize(EleTy, DL));
SJWf93f5f32020-05-05 07:27:56 -05001943
1944 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
1945 }
1946
1947 } else {
1948
1949 //
1950 // Generate OpConstant and OpTypeArray.
1951 //
1952
1953 //
1954 // Generate OpConstant for array length.
1955 //
1956 // Add constant for length to constant list.
1957 Constant *CstLength =
1958 ConstantInt::get(Type::getInt32Ty(module->getContext()), Length);
SJWf93f5f32020-05-05 07:27:56 -05001959
1960 // Remember to generate ArrayStride later
alan-bakerc3fd07f2020-10-22 09:48:49 -04001961 getTypesNeedingArrayStride().insert(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001962
1963 //
1964 // Generate OpTypeArray.
1965 //
1966 // Ops[0] = Element Type ID
1967 // Ops[1] = Array Length Constant ID
1968 SPIRVOperandVec Ops;
1969
SJW01901d92020-05-21 08:58:31 -05001970 Ops << ArrTy->getElementType() << CstLength;
SJWf93f5f32020-05-05 07:27:56 -05001971
1972 RID = addSPIRVInst<kTypes>(spv::OpTypeArray, Ops);
1973 }
1974 break;
1975 }
1976 case Type::FixedVectorTyID: {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001977 auto VecTy = cast<VectorType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05001978 // <4 x i8> is changed to i32 if i8 is not generally supported.
1979 if (!clspv::Option::Int8Support() &&
1980 VecTy->getElementType() == Type::getInt8Ty(module->getContext())) {
alan-baker5a8c3be2020-09-09 13:44:26 -04001981 if (VecTy->getElementCount().getKnownMinValue() == 4) {
SJWf93f5f32020-05-05 07:27:56 -05001982 RID = getSPIRVType(VecTy->getElementType());
1983 break;
1984 } else {
alan-bakerc3fd07f2020-10-22 09:48:49 -04001985 Canonical->print(errs());
SJWf93f5f32020-05-05 07:27:56 -05001986 llvm_unreachable("Support above i8 vector type");
1987 }
1988 }
1989
1990 // Ops[0] = Component Type ID
1991 // Ops[1] = Component Count (Literal Number)
1992 SPIRVOperandVec Ops;
alan-baker5a8c3be2020-09-09 13:44:26 -04001993 Ops << VecTy->getElementType()
1994 << VecTy->getElementCount().getKnownMinValue();
SJWf93f5f32020-05-05 07:27:56 -05001995
1996 RID = addSPIRVInst<kTypes>(spv::OpTypeVector, Ops);
1997 break;
1998 }
1999 case Type::VoidTyID: {
2000 RID = addSPIRVInst<kTypes>(spv::OpTypeVoid);
2001 break;
2002 }
2003 case Type::FunctionTyID: {
2004 // Generate SPIRV instruction for function type.
alan-bakerc3fd07f2020-10-22 09:48:49 -04002005 FunctionType *FTy = cast<FunctionType>(Canonical);
SJWf93f5f32020-05-05 07:27:56 -05002006
2007 // Ops[0] = Return Type ID
2008 // Ops[1] ... Ops[n] = Parameter Type IDs
2009 SPIRVOperandVec Ops;
2010
2011 // Find SPIRV instruction for return type
SJW01901d92020-05-21 08:58:31 -05002012 Ops << FTy->getReturnType();
SJWf93f5f32020-05-05 07:27:56 -05002013
2014 // Find SPIRV instructions for parameter types
2015 for (unsigned k = 0; k < FTy->getNumParams(); k++) {
2016 // Find SPIRV instruction for parameter type.
2017 auto ParamTy = FTy->getParamType(k);
2018 if (ParamTy->isPointerTy()) {
2019 auto PointeeTy = ParamTy->getPointerElementType();
2020 if (PointeeTy->isStructTy() &&
2021 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
2022 ParamTy = PointeeTy;
2023 }
2024 }
2025
SJW01901d92020-05-21 08:58:31 -05002026 Ops << ParamTy;
SJWf93f5f32020-05-05 07:27:56 -05002027 }
2028
2029 RID = addSPIRVInst<kTypes>(spv::OpTypeFunction, Ops);
2030 break;
2031 }
2032 }
2033
SJW01901d92020-05-21 08:58:31 -05002034 if (RID.isValid()) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04002035 TypeMap[Canonical] = RID;
2036 if (Ty != Canonical) {
2037 // Speed up future lookups of this type by also caching the non-canonical
2038 // type.
2039 TypeMap[Ty] = RID;
2040 }
SJWf93f5f32020-05-05 07:27:56 -05002041 }
2042 return RID;
David Neto22f144c2017-06-12 14:26:21 -04002043}
2044
SJW77b87ad2020-04-21 14:37:52 -05002045void SPIRVProducerPass::GenerateSPIRVTypes() {
David Neto22f144c2017-06-12 14:26:21 -04002046 for (Type *Ty : getTypeList()) {
SJWf93f5f32020-05-05 07:27:56 -05002047 getSPIRVType(Ty);
David Netoc6f3ab22018-04-06 18:02:31 -04002048 }
David Neto22f144c2017-06-12 14:26:21 -04002049}
2050
SJW806a5d82020-07-15 12:51:38 -05002051SPIRVID SPIRVProducerPass::getSPIRVInt32Constant(uint32_t CstVal) {
2052 Type *i32 = Type::getInt32Ty(module->getContext());
2053 Constant *Cst = ConstantInt::get(i32, CstVal);
2054 return getSPIRVValue(Cst);
2055}
2056
SJWf93f5f32020-05-05 07:27:56 -05002057SPIRVID SPIRVProducerPass::getSPIRVConstant(Constant *Cst) {
David Neto22f144c2017-06-12 14:26:21 -04002058 ValueMapType &VMap = getValueMap();
David Neto482550a2018-03-24 05:21:07 -07002059 const bool hack_undef = clspv::Option::HackUndef();
David Neto22f144c2017-06-12 14:26:21 -04002060
SJW01901d92020-05-21 08:58:31 -05002061 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04002062
SJWf93f5f32020-05-05 07:27:56 -05002063 //
2064 // Generate OpConstant.
2065 //
2066 // Ops[0] = Result Type ID
2067 // Ops[1] .. Ops[n] = Values LiteralNumber
2068 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002069
SJW01901d92020-05-21 08:58:31 -05002070 Ops << Cst->getType();
David Neto22f144c2017-06-12 14:26:21 -04002071
SJWf93f5f32020-05-05 07:27:56 -05002072 std::vector<uint32_t> LiteralNum;
2073 spv::Op Opcode = spv::OpNop;
David Neto22f144c2017-06-12 14:26:21 -04002074
SJWf93f5f32020-05-05 07:27:56 -05002075 if (isa<UndefValue>(Cst)) {
David Neto22f144c2017-06-12 14:26:21 -04002076 // Ops[0] = Result Type ID
SJWf93f5f32020-05-05 07:27:56 -05002077 Opcode = spv::OpUndef;
2078 if (hack_undef && IsTypeNullable(Cst->getType())) {
2079 Opcode = spv::OpConstantNull;
2080 }
2081 } else if (const ConstantInt *CI = dyn_cast<ConstantInt>(Cst)) {
alan-bakere2a62752020-07-09 22:53:23 -04002082 unsigned bit_width = CI->getBitWidth();
2083 if (bit_width == 1) {
SJWf93f5f32020-05-05 07:27:56 -05002084 // If the bitwidth of constant is 1, generate OpConstantTrue or
2085 // OpConstantFalse.
2086 if (CI->getZExtValue()) {
2087 // Ops[0] = Result Type ID
2088 Opcode = spv::OpConstantTrue;
David Neto22f144c2017-06-12 14:26:21 -04002089 } else {
SJWf93f5f32020-05-05 07:27:56 -05002090 // Ops[0] = Result Type ID
2091 Opcode = spv::OpConstantFalse;
David Neto22f144c2017-06-12 14:26:21 -04002092 }
SJWf93f5f32020-05-05 07:27:56 -05002093 } else {
2094 auto V = CI->getZExtValue();
2095 LiteralNum.push_back(V & 0xFFFFFFFF);
2096
alan-bakere2a62752020-07-09 22:53:23 -04002097 if (bit_width > 32) {
SJWf93f5f32020-05-05 07:27:56 -05002098 LiteralNum.push_back(V >> 32);
David Neto22f144c2017-06-12 14:26:21 -04002099 }
2100
2101 Opcode = spv::OpConstant;
David Neto22f144c2017-06-12 14:26:21 -04002102
SJW01901d92020-05-21 08:58:31 -05002103 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002104 }
2105 } else if (const ConstantFP *CFP = dyn_cast<ConstantFP>(Cst)) {
2106 uint64_t FPVal = CFP->getValueAPF().bitcastToAPInt().getZExtValue();
2107 Type *CFPTy = CFP->getType();
2108 if (CFPTy->isFloatTy()) {
2109 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2110 } else if (CFPTy->isDoubleTy()) {
2111 LiteralNum.push_back(FPVal & 0xFFFFFFFF);
2112 LiteralNum.push_back(FPVal >> 32);
2113 } else if (CFPTy->isHalfTy()) {
2114 LiteralNum.push_back(FPVal & 0xFFFF);
2115 } else {
2116 CFPTy->print(errs());
2117 llvm_unreachable("Implement this ConstantFP Type");
2118 }
David Neto22f144c2017-06-12 14:26:21 -04002119
SJWf93f5f32020-05-05 07:27:56 -05002120 Opcode = spv::OpConstant;
David Neto49351ac2017-08-26 17:32:20 -04002121
SJW01901d92020-05-21 08:58:31 -05002122 Ops << LiteralNum;
SJWf93f5f32020-05-05 07:27:56 -05002123 } else if (isa<ConstantDataSequential>(Cst) &&
2124 cast<ConstantDataSequential>(Cst)->isString()) {
2125 Cst->print(errs());
2126 llvm_unreachable("Implement this Constant");
David Neto49351ac2017-08-26 17:32:20 -04002127
SJWf93f5f32020-05-05 07:27:56 -05002128 } else if (const ConstantDataSequential *CDS =
2129 dyn_cast<ConstantDataSequential>(Cst)) {
2130 // Let's convert <4 x i8> constant to int constant specially.
2131 // This case occurs when all the values are specified as constant
2132 // ints.
2133 Type *CstTy = Cst->getType();
2134 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002135 //
2136 // Generate OpConstant with OpTypeInt 32 0.
2137 //
2138 uint32_t IntValue = 0;
2139 for (unsigned k = 0; k < 4; k++) {
2140 const uint64_t Val = CDS->getElementAsInteger(k);
2141 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto49351ac2017-08-26 17:32:20 -04002142 }
2143
SJW806a5d82020-07-15 12:51:38 -05002144 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002145 } else {
2146
David Neto49351ac2017-08-26 17:32:20 -04002147 // A normal constant-data-sequential case.
David Neto22f144c2017-06-12 14:26:21 -04002148 for (unsigned k = 0; k < CDS->getNumElements(); k++) {
SJW01901d92020-05-21 08:58:31 -05002149 Ops << CDS->getElementAsConstant(k);
David Neto22f144c2017-06-12 14:26:21 -04002150 }
2151
2152 Opcode = spv::OpConstantComposite;
SJWf93f5f32020-05-05 07:27:56 -05002153 }
2154 } else if (const ConstantAggregate *CA = dyn_cast<ConstantAggregate>(Cst)) {
2155 // Let's convert <4 x i8> constant to int constant specially.
2156 // This case occurs when at least one of the values is an undef.
2157 Type *CstTy = Cst->getType();
2158 if (is4xi8vec(CstTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002159 //
2160 // Generate OpConstant with OpTypeInt 32 0.
2161 //
2162 uint32_t IntValue = 0;
2163 for (User::const_op_iterator I = Cst->op_begin(), E = Cst->op_end();
2164 I != E; ++I) {
2165 uint64_t Val = 0;
2166 const Value *CV = *I;
2167 if (auto *CI2 = dyn_cast<ConstantInt>(CV)) {
2168 Val = CI2->getZExtValue();
David Neto22f144c2017-06-12 14:26:21 -04002169 }
SJWf93f5f32020-05-05 07:27:56 -05002170 IntValue = (IntValue << 8) | (Val & 0xffu);
David Neto22f144c2017-06-12 14:26:21 -04002171 }
2172
SJW806a5d82020-07-15 12:51:38 -05002173 RID = getSPIRVInt32Constant(IntValue);
SJWf93f5f32020-05-05 07:27:56 -05002174 } else {
2175
David Neto22f144c2017-06-12 14:26:21 -04002176 // We use a constant composite in SPIR-V for our constant aggregate in
2177 // LLVM.
2178 Opcode = spv::OpConstantComposite;
David Neto22f144c2017-06-12 14:26:21 -04002179
2180 for (unsigned k = 0; k < CA->getNumOperands(); k++) {
David Neto22f144c2017-06-12 14:26:21 -04002181 // And add an operand to the composite we are constructing
SJW01901d92020-05-21 08:58:31 -05002182 Ops << CA->getAggregateElement(k);
David Neto22f144c2017-06-12 14:26:21 -04002183 }
David Neto22f144c2017-06-12 14:26:21 -04002184 }
SJWf93f5f32020-05-05 07:27:56 -05002185 } else if (Cst->isNullValue()) {
2186 Opcode = spv::OpConstantNull;
2187 } else {
2188 Cst->print(errs());
2189 llvm_unreachable("Unsupported Constant???");
2190 }
David Neto22f144c2017-06-12 14:26:21 -04002191
SJWf93f5f32020-05-05 07:27:56 -05002192 if (Opcode == spv::OpConstantNull && Cst->getType()->isPointerTy()) {
2193 // Null pointer requires variable pointers.
2194 setVariablePointersCapabilities(Cst->getType()->getPointerAddressSpace());
2195 }
alan-baker5b86ed72019-02-15 08:26:50 -05002196
SJWf93f5f32020-05-05 07:27:56 -05002197 if (RID == 0) {
2198 RID = addSPIRVInst<kConstants>(Opcode, Ops);
2199 }
2200
2201 VMap[Cst] = RID;
2202
2203 return RID;
2204}
2205
2206SPIRVID SPIRVProducerPass::getSPIRVValue(Value *V) {
2207 auto II = ValueMap.find(V);
2208 if (II != ValueMap.end()) {
SJW01901d92020-05-21 08:58:31 -05002209 assert(II->second.isValid());
SJWf93f5f32020-05-05 07:27:56 -05002210 return II->second;
2211 }
2212 if (Constant *Cst = dyn_cast<Constant>(V)) {
2213 return getSPIRVConstant(Cst);
2214 } else {
2215 llvm_unreachable("Variable not found");
2216 }
2217}
2218
SJW77b87ad2020-04-21 14:37:52 -05002219void SPIRVProducerPass::GenerateSamplers() {
alan-bakerb6b09dc2018-11-08 16:59:28 -05002220 auto &sampler_map = getSamplerMap();
alan-baker09cb9802019-12-10 13:16:27 -05002221 SamplerLiteralToIDMap.clear();
David Neto862b7d82018-06-14 18:48:37 -04002222 DenseMap<unsigned, unsigned> SamplerLiteralToDescriptorSetMap;
2223 DenseMap<unsigned, unsigned> SamplerLiteralToBindingMap;
David Neto22f144c2017-06-12 14:26:21 -04002224
David Neto862b7d82018-06-14 18:48:37 -04002225 // We might have samplers in the sampler map that are not used
2226 // in the translation unit. We need to allocate variables
2227 // for them and bindings too.
2228 DenseSet<unsigned> used_bindings;
David Neto22f144c2017-06-12 14:26:21 -04002229
SJW77b87ad2020-04-21 14:37:52 -05002230 auto *var_fn = module->getFunction(clspv::LiteralSamplerFunction());
alan-baker09cb9802019-12-10 13:16:27 -05002231 // Return if there are no literal samplers.
alan-bakerb6b09dc2018-11-08 16:59:28 -05002232 if (!var_fn)
2233 return;
alan-baker09cb9802019-12-10 13:16:27 -05002234
David Neto862b7d82018-06-14 18:48:37 -04002235 for (auto user : var_fn->users()) {
2236 // Populate SamplerLiteralToDescriptorSetMap and
2237 // SamplerLiteralToBindingMap.
2238 //
2239 // Look for calls like
2240 // call %opencl.sampler_t addrspace(2)*
2241 // @clspv.sampler.var.literal(
2242 // i32 descriptor,
2243 // i32 binding,
alan-baker09cb9802019-12-10 13:16:27 -05002244 // i32 (index-into-sampler-map|sampler_mask))
alan-bakerb6b09dc2018-11-08 16:59:28 -05002245 if (auto *call = dyn_cast<CallInst>(user)) {
alan-baker09cb9802019-12-10 13:16:27 -05002246 const auto third_param = static_cast<unsigned>(
alan-bakerb6b09dc2018-11-08 16:59:28 -05002247 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
alan-baker09cb9802019-12-10 13:16:27 -05002248 auto sampler_value = third_param;
2249 if (clspv::Option::UseSamplerMap()) {
2250 if (third_param >= sampler_map.size()) {
2251 errs() << "Out of bounds index to sampler map: " << third_param;
2252 llvm_unreachable("bad sampler init: out of bounds");
2253 }
2254 sampler_value = sampler_map[third_param].first;
David Neto862b7d82018-06-14 18:48:37 -04002255 }
2256
David Neto862b7d82018-06-14 18:48:37 -04002257 const auto descriptor_set = static_cast<unsigned>(
2258 dyn_cast<ConstantInt>(call->getArgOperand(0))->getZExtValue());
2259 const auto binding = static_cast<unsigned>(
2260 dyn_cast<ConstantInt>(call->getArgOperand(1))->getZExtValue());
2261
2262 SamplerLiteralToDescriptorSetMap[sampler_value] = descriptor_set;
2263 SamplerLiteralToBindingMap[sampler_value] = binding;
2264 used_bindings.insert(binding);
2265 }
2266 }
2267
alan-baker09cb9802019-12-10 13:16:27 -05002268 DenseSet<size_t> seen;
2269 for (auto user : var_fn->users()) {
2270 if (!isa<CallInst>(user))
2271 continue;
2272
2273 auto call = cast<CallInst>(user);
2274 const unsigned third_param = static_cast<unsigned>(
2275 dyn_cast<ConstantInt>(call->getArgOperand(2))->getZExtValue());
2276
2277 // Already allocated a variable for this value.
2278 if (!seen.insert(third_param).second)
2279 continue;
2280
2281 auto sampler_value = third_param;
2282 if (clspv::Option::UseSamplerMap()) {
2283 sampler_value = sampler_map[third_param].first;
2284 }
2285
SJW806a5d82020-07-15 12:51:38 -05002286 auto sampler_var_id = addSPIRVGlobalVariable(
2287 getSPIRVType(SamplerTy), spv::StorageClassUniformConstant);
David Neto22f144c2017-06-12 14:26:21 -04002288
alan-baker09cb9802019-12-10 13:16:27 -05002289 SamplerLiteralToIDMap[sampler_value] = sampler_var_id;
David Neto22f144c2017-06-12 14:26:21 -04002290
David Neto862b7d82018-06-14 18:48:37 -04002291 unsigned descriptor_set;
2292 unsigned binding;
alan-baker09cb9802019-12-10 13:16:27 -05002293 if (SamplerLiteralToBindingMap.find(sampler_value) ==
alan-bakerb6b09dc2018-11-08 16:59:28 -05002294 SamplerLiteralToBindingMap.end()) {
David Neto862b7d82018-06-14 18:48:37 -04002295 // This sampler is not actually used. Find the next one.
alan-baker7506abb2020-09-10 15:02:55 -04002296 for (binding = 0; used_bindings.count(binding); binding++) {
2297 }
David Neto862b7d82018-06-14 18:48:37 -04002298 descriptor_set = 0; // Literal samplers always use descriptor set 0.
2299 used_bindings.insert(binding);
2300 } else {
alan-baker09cb9802019-12-10 13:16:27 -05002301 descriptor_set = SamplerLiteralToDescriptorSetMap[sampler_value];
2302 binding = SamplerLiteralToBindingMap[sampler_value];
alan-bakercff80152019-06-15 00:38:00 -04002303
alan-baker86ce19c2020-08-05 13:09:19 -04002304 auto import_id = getReflectionImport();
2305 SPIRVOperandVec Ops;
2306 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
2307 << reflection::ExtInstLiteralSampler
2308 << getSPIRVInt32Constant(descriptor_set)
2309 << getSPIRVInt32Constant(binding)
2310 << getSPIRVInt32Constant(sampler_value);
2311 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002312 }
2313
SJW69939d52020-04-16 07:29:07 -05002314 // Ops[0] = Target ID
2315 // Ops[1] = Decoration (DescriptorSet)
2316 // Ops[2] = LiteralNumber according to Decoration
SJW806a5d82020-07-15 12:51:38 -05002317 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002318 Ops << sampler_var_id << spv::DecorationDescriptorSet << descriptor_set;
David Neto22f144c2017-06-12 14:26:21 -04002319
SJWf93f5f32020-05-05 07:27:56 -05002320 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002321
2322 // Ops[0] = Target ID
2323 // Ops[1] = Decoration (Binding)
2324 // Ops[2] = LiteralNumber according to Decoration
2325 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002326 Ops << sampler_var_id << spv::DecorationBinding << binding;
David Neto22f144c2017-06-12 14:26:21 -04002327
SJWf93f5f32020-05-05 07:27:56 -05002328 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002329 }
David Neto862b7d82018-06-14 18:48:37 -04002330}
David Neto22f144c2017-06-12 14:26:21 -04002331
SJW77b87ad2020-04-21 14:37:52 -05002332void SPIRVProducerPass::GenerateResourceVars() {
David Neto862b7d82018-06-14 18:48:37 -04002333 ValueMapType &VMap = getValueMap();
David Neto22f144c2017-06-12 14:26:21 -04002334
David Neto862b7d82018-06-14 18:48:37 -04002335 // Generate variables. Make one for each of resource var info object.
2336 for (auto *info : ModuleOrderedResourceVars) {
2337 Type *type = info->var_fn->getReturnType();
2338 // Remap the address space for opaque types.
2339 switch (info->arg_kind) {
2340 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002341 case clspv::ArgKind::SampledImage:
2342 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002343 type = PointerType::get(type->getPointerElementType(),
2344 clspv::AddressSpace::UniformConstant);
2345 break;
2346 default:
2347 break;
2348 }
David Neto22f144c2017-06-12 14:26:21 -04002349
David Neto862b7d82018-06-14 18:48:37 -04002350 const auto sc = GetStorageClassForArgKind(info->arg_kind);
David Neto22f144c2017-06-12 14:26:21 -04002351
SJW806a5d82020-07-15 12:51:38 -05002352 info->var_id = addSPIRVGlobalVariable(getSPIRVType(type), sc);
David Neto862b7d82018-06-14 18:48:37 -04002353
2354 // Map calls to the variable-builtin-function.
2355 for (auto &U : info->var_fn->uses()) {
2356 if (auto *call = dyn_cast<CallInst>(U.getUser())) {
2357 const auto set = unsigned(
2358 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
2359 const auto binding = unsigned(
2360 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
2361 if (set == info->descriptor_set && binding == info->binding) {
2362 switch (info->arg_kind) {
2363 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002364 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002365 case clspv::ArgKind::Pod:
alan-baker9b0ec3c2020-04-06 14:45:34 -04002366 case clspv::ArgKind::PodUBO:
2367 case clspv::ArgKind::PodPushConstant:
David Neto862b7d82018-06-14 18:48:37 -04002368 // The call maps to the variable directly.
2369 VMap[call] = info->var_id;
2370 break;
2371 case clspv::ArgKind::Sampler:
alan-bakerf6bc8252020-09-23 14:58:55 -04002372 case clspv::ArgKind::SampledImage:
2373 case clspv::ArgKind::StorageImage:
David Neto862b7d82018-06-14 18:48:37 -04002374 // The call maps to a load we generate later.
2375 ResourceVarDeferredLoadCalls[call] = info->var_id;
2376 break;
2377 default:
2378 llvm_unreachable("Unhandled arg kind");
2379 }
2380 }
David Neto22f144c2017-06-12 14:26:21 -04002381 }
David Neto862b7d82018-06-14 18:48:37 -04002382 }
2383 }
David Neto22f144c2017-06-12 14:26:21 -04002384
David Neto862b7d82018-06-14 18:48:37 -04002385 // Generate associated decorations.
SJWf93f5f32020-05-05 07:27:56 -05002386 SPIRVOperandVec Ops;
David Neto862b7d82018-06-14 18:48:37 -04002387 for (auto *info : ModuleOrderedResourceVars) {
alan-baker9b0ec3c2020-04-06 14:45:34 -04002388 // Push constants don't need descriptor set or binding decorations.
2389 if (info->arg_kind == clspv::ArgKind::PodPushConstant)
2390 continue;
2391
David Neto862b7d82018-06-14 18:48:37 -04002392 // Decorate with DescriptorSet and Binding.
2393 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002394 Ops << info->var_id << spv::DecorationDescriptorSet << info->descriptor_set;
SJWf93f5f32020-05-05 07:27:56 -05002395 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002396
2397 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002398 Ops << info->var_id << spv::DecorationBinding << info->binding;
SJWf93f5f32020-05-05 07:27:56 -05002399 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto862b7d82018-06-14 18:48:37 -04002400
alan-bakere9308012019-03-15 10:25:13 -04002401 if (info->coherent) {
2402 // Decorate with Coherent if required for the variable.
2403 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002404 Ops << info->var_id << spv::DecorationCoherent;
SJWf93f5f32020-05-05 07:27:56 -05002405 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere9308012019-03-15 10:25:13 -04002406 }
2407
David Neto862b7d82018-06-14 18:48:37 -04002408 // Generate NonWritable and NonReadable
2409 switch (info->arg_kind) {
2410 case clspv::ArgKind::Buffer:
Alan Bakerfcda9482018-10-02 17:09:59 -04002411 case clspv::ArgKind::BufferUBO:
David Neto862b7d82018-06-14 18:48:37 -04002412 if (info->var_fn->getReturnType()->getPointerAddressSpace() ==
2413 clspv::AddressSpace::Constant) {
2414 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002415 Ops << info->var_id << spv::DecorationNonWritable;
SJWf93f5f32020-05-05 07:27:56 -05002416 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002417 }
David Neto862b7d82018-06-14 18:48:37 -04002418 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002419 case clspv::ArgKind::StorageImage: {
2420 auto *type = info->var_fn->getReturnType();
2421 auto *struct_ty = cast<StructType>(type->getPointerElementType());
2422 // TODO(alan-baker): This is conservative. If compiling for OpenCL 2.0 or
2423 // above, the compiler treats all write_only images as read_write images.
2424 if (struct_ty->getName().contains("_wo_t")) {
2425 Ops.clear();
2426 Ops << info->var_id << spv::DecorationNonReadable;
2427 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
2428 }
David Neto862b7d82018-06-14 18:48:37 -04002429 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04002430 }
David Neto862b7d82018-06-14 18:48:37 -04002431 default:
2432 break;
David Neto22f144c2017-06-12 14:26:21 -04002433 }
2434 }
2435}
2436
2437void SPIRVProducerPass::GenerateGlobalVar(GlobalVariable &GV) {
David Neto22f144c2017-06-12 14:26:21 -04002438 ValueMapType &VMap = getValueMap();
SJW01901d92020-05-21 08:58:31 -05002439 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto85082642018-03-24 06:55:20 -07002440 const DataLayout &DL = GV.getParent()->getDataLayout();
David Neto22f144c2017-06-12 14:26:21 -04002441
2442 const spv::BuiltIn BuiltinType = GetBuiltin(GV.getName());
2443 Type *Ty = GV.getType();
2444 PointerType *PTy = cast<PointerType>(Ty);
2445
SJW01901d92020-05-21 08:58:31 -05002446 SPIRVID InitializerID;
David Neto22f144c2017-06-12 14:26:21 -04002447
2448 // Workgroup size is handled differently (it goes into a constant)
2449 if (spv::BuiltInWorkgroupSize == BuiltinType) {
David Neto22f144c2017-06-12 14:26:21 -04002450 uint32_t PrevXDimCst = 0xFFFFFFFF;
2451 uint32_t PrevYDimCst = 0xFFFFFFFF;
2452 uint32_t PrevZDimCst = 0xFFFFFFFF;
alan-baker3b609772020-09-03 19:10:17 -04002453 bool HasMD = true;
David Neto22f144c2017-06-12 14:26:21 -04002454 for (Function &Func : *GV.getParent()) {
2455 if (Func.isDeclaration()) {
2456 continue;
2457 }
2458
2459 // We only need to check kernels.
2460 if (Func.getCallingConv() != CallingConv::SPIR_KERNEL) {
2461 continue;
2462 }
2463
2464 if (const MDNode *MD =
2465 dyn_cast<Function>(&Func)->getMetadata("reqd_work_group_size")) {
2466 uint32_t CurXDimCst = static_cast<uint32_t>(
2467 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2468 uint32_t CurYDimCst = static_cast<uint32_t>(
2469 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2470 uint32_t CurZDimCst = static_cast<uint32_t>(
2471 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2472
2473 if (PrevXDimCst == 0xFFFFFFFF && PrevYDimCst == 0xFFFFFFFF &&
2474 PrevZDimCst == 0xFFFFFFFF) {
2475 PrevXDimCst = CurXDimCst;
2476 PrevYDimCst = CurYDimCst;
2477 PrevZDimCst = CurZDimCst;
2478 } else if (CurXDimCst != PrevXDimCst || CurYDimCst != PrevYDimCst ||
2479 CurZDimCst != PrevZDimCst) {
alan-baker3b609772020-09-03 19:10:17 -04002480 HasMD = false;
2481 continue;
David Neto22f144c2017-06-12 14:26:21 -04002482 } else {
2483 continue;
2484 }
2485
2486 //
2487 // Generate OpConstantComposite.
2488 //
2489 // Ops[0] : Result Type ID
2490 // Ops[1] : Constant size for x dimension.
2491 // Ops[2] : Constant size for y dimension.
2492 // Ops[3] : Constant size for z dimension.
SJWf93f5f32020-05-05 07:27:56 -05002493 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002494
SJW01901d92020-05-21 08:58:31 -05002495 SPIRVID XDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002496 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(0)));
SJW01901d92020-05-21 08:58:31 -05002497 SPIRVID YDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002498 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(1)));
SJW01901d92020-05-21 08:58:31 -05002499 SPIRVID ZDimCstID =
SJWf93f5f32020-05-05 07:27:56 -05002500 getSPIRVValue(mdconst::extract<ConstantInt>(MD->getOperand(2)));
David Neto22f144c2017-06-12 14:26:21 -04002501
SJW01901d92020-05-21 08:58:31 -05002502 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID
2503 << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002504
SJWf93f5f32020-05-05 07:27:56 -05002505 InitializerID =
2506 addSPIRVInst<kGlobalVariables>(spv::OpConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002507 } else {
alan-baker3b609772020-09-03 19:10:17 -04002508 HasMD = false;
David Neto22f144c2017-06-12 14:26:21 -04002509 }
2510 }
2511
2512 // If all kernels do not have metadata for reqd_work_group_size, generate
2513 // OpSpecConstants for x/y/z dimension.
Kévin Petit21c23c62020-04-29 01:38:28 +01002514 if (!HasMD || clspv::Option::NonUniformNDRangeSupported()) {
David Neto22f144c2017-06-12 14:26:21 -04002515 //
2516 // Generate OpSpecConstants for x/y/z dimension.
2517 //
2518 // Ops[0] : Result Type ID
2519 // Ops[1] : Constant size for x/y/z dimension (Literal Number).
David Neto22f144c2017-06-12 14:26:21 -04002520
alan-bakera1be3322020-04-20 12:48:18 -04002521 // Allocate spec constants for workgroup size.
SJW77b87ad2020-04-21 14:37:52 -05002522 clspv::AddWorkgroupSpecConstants(module);
alan-bakera1be3322020-04-20 12:48:18 -04002523
SJWf93f5f32020-05-05 07:27:56 -05002524 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002525 SPIRVID result_type_id = getSPIRVType(
SJWf93f5f32020-05-05 07:27:56 -05002526 dyn_cast<VectorType>(Ty->getPointerElementType())->getElementType());
David Neto22f144c2017-06-12 14:26:21 -04002527
David Neto257c3892018-04-11 13:19:45 -04002528 // X Dimension
SJW01901d92020-05-21 08:58:31 -05002529 Ops << result_type_id << 1;
2530 SPIRVID XDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002531
2532 // Y Dimension
2533 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002534 Ops << result_type_id << 1;
2535 SPIRVID YDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002536
2537 // Z Dimension
2538 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002539 Ops << result_type_id << 1;
2540 SPIRVID ZDimCstID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002541
David Neto257c3892018-04-11 13:19:45 -04002542 BuiltinDimVec.push_back(XDimCstID);
2543 BuiltinDimVec.push_back(YDimCstID);
David Neto22f144c2017-06-12 14:26:21 -04002544 BuiltinDimVec.push_back(ZDimCstID);
2545
David Neto22f144c2017-06-12 14:26:21 -04002546 //
2547 // Generate OpSpecConstantComposite.
2548 //
2549 // Ops[0] : Result Type ID
2550 // Ops[1] : Constant size for x dimension.
2551 // Ops[2] : Constant size for y dimension.
2552 // Ops[3] : Constant size for z dimension.
David Neto22f144c2017-06-12 14:26:21 -04002553 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002554 Ops << Ty->getPointerElementType() << XDimCstID << YDimCstID << ZDimCstID;
David Neto22f144c2017-06-12 14:26:21 -04002555
SJWf93f5f32020-05-05 07:27:56 -05002556 InitializerID =
2557 addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002558 }
alan-bakerbed3a882020-04-21 14:42:41 -04002559 } else if (BuiltinType == spv::BuiltInWorkDim) {
2560 // 1. Generate a specialization constant with a default of 3.
2561 // 2. Allocate and annotate a SpecId for the constant.
2562 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002563 SPIRVOperandVec Ops;
alan-bakerbed3a882020-04-21 14:42:41 -04002564
2565 //
2566 // Generate OpSpecConstant.
2567 //
2568 // Ops[0] : Result Type ID
2569 // Ops[1] : Default literal value
alan-bakerbed3a882020-04-21 14:42:41 -04002570
SJW01901d92020-05-21 08:58:31 -05002571 Ops << IntegerType::get(GV.getContext(), 32) << 3;
alan-bakerbed3a882020-04-21 14:42:41 -04002572
SJWf93f5f32020-05-05 07:27:56 -05002573 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakerbed3a882020-04-21 14:42:41 -04002574
2575 //
2576 // Generate SpecId decoration.
2577 //
2578 // Ops[0] : target
2579 // Ops[1] : decoration
2580 // Ops[2] : SpecId
Alan Baker75ccc252020-04-21 17:11:52 -04002581 auto spec_id = AllocateSpecConstant(module, SpecConstant::kWorkDim);
alan-bakerbed3a882020-04-21 14:42:41 -04002582 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002583 Ops << InitializerID << spv::DecorationSpecId << spec_id;
alan-bakerbed3a882020-04-21 14:42:41 -04002584
SJWf93f5f32020-05-05 07:27:56 -05002585 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002586 } else if (BuiltinType == spv::BuiltInGlobalOffset) {
2587 // 1. Generate a spec constant with a default of {0, 0, 0}.
2588 // 2. Allocate and annotate SpecIds for the constants.
2589 // 3. Use the spec constant as the initializer for the variable.
SJWf93f5f32020-05-05 07:27:56 -05002590 SPIRVOperandVec Ops;
alan-bakere1996972020-05-04 08:38:12 -04002591
2592 //
2593 // Generate OpSpecConstant for each dimension.
2594 //
2595 // Ops[0] : Result Type ID
2596 // Ops[1] : Default literal value
2597 //
SJW01901d92020-05-21 08:58:31 -05002598 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2599 SPIRVID x_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002600
alan-bakere1996972020-05-04 08:38:12 -04002601 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002602 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2603 SPIRVID y_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002604
alan-bakere1996972020-05-04 08:38:12 -04002605 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002606 Ops << IntegerType::get(GV.getContext(), 32) << 0;
2607 SPIRVID z_id = addSPIRVInst<kConstants>(spv::OpSpecConstant, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002608
2609 //
2610 // Generate SpecId decoration for each dimension.
2611 //
2612 // Ops[0] : target
2613 // Ops[1] : decoration
2614 // Ops[2] : SpecId
2615 //
2616 auto spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetX);
2617 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002618 Ops << x_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002619 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002620
2621 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetY);
2622 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002623 Ops << y_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002624 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002625
2626 spec_id = AllocateSpecConstant(module, SpecConstant::kGlobalOffsetZ);
2627 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002628 Ops << z_id << spv::DecorationSpecId << spec_id;
SJWf93f5f32020-05-05 07:27:56 -05002629 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
alan-bakere1996972020-05-04 08:38:12 -04002630
2631 //
2632 // Generate OpSpecConstantComposite.
2633 //
2634 // Ops[0] : type id
2635 // Ops[1..n-1] : elements
2636 //
alan-bakere1996972020-05-04 08:38:12 -04002637 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002638 Ops << GV.getType()->getPointerElementType() << x_id << y_id << z_id;
SJWf93f5f32020-05-05 07:27:56 -05002639 InitializerID = addSPIRVInst<kConstants>(spv::OpSpecConstantComposite, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002640 }
2641
David Neto85082642018-03-24 06:55:20 -07002642 const auto AS = PTy->getAddressSpace();
SJW806a5d82020-07-15 12:51:38 -05002643 const auto spvSC = GetStorageClass(AS);
David Neto22f144c2017-06-12 14:26:21 -04002644
David Neto85082642018-03-24 06:55:20 -07002645 const bool module_scope_constant_external_init =
David Neto862b7d82018-06-14 18:48:37 -04002646 (AS == AddressSpace::Constant) && GV.hasInitializer() &&
David Neto85082642018-03-24 06:55:20 -07002647 clspv::Option::ModuleConstantsInStorageBuffer();
2648
Kévin Petit23d5f182019-08-13 16:21:29 +01002649 if (GV.hasInitializer()) {
2650 auto GVInit = GV.getInitializer();
2651 if (!isa<UndefValue>(GVInit) && !module_scope_constant_external_init) {
SJWf93f5f32020-05-05 07:27:56 -05002652 InitializerID = getSPIRVValue(GVInit);
David Neto85082642018-03-24 06:55:20 -07002653 }
2654 }
Kévin Petit23d5f182019-08-13 16:21:29 +01002655
SJW806a5d82020-07-15 12:51:38 -05002656 SPIRVID var_id =
2657 addSPIRVGlobalVariable(getSPIRVType(Ty), spvSC, InitializerID);
David Neto85082642018-03-24 06:55:20 -07002658
SJWf93f5f32020-05-05 07:27:56 -05002659 VMap[&GV] = var_id;
David Neto22f144c2017-06-12 14:26:21 -04002660
alan-bakere1996972020-05-04 08:38:12 -04002661 auto IsOpenCLBuiltin = [](spv::BuiltIn builtin) {
2662 return builtin == spv::BuiltInWorkDim ||
2663 builtin == spv::BuiltInGlobalOffset;
2664 };
2665
alan-bakere1996972020-05-04 08:38:12 -04002666 // If we have a builtin (not an OpenCL builtin).
2667 if (spv::BuiltInMax != BuiltinType && !IsOpenCLBuiltin(BuiltinType)) {
David Neto22f144c2017-06-12 14:26:21 -04002668 //
2669 // Generate OpDecorate.
2670 //
2671 // DOps[0] = Target ID
2672 // DOps[1] = Decoration (Builtin)
2673 // DOps[2] = BuiltIn ID
SJW01901d92020-05-21 08:58:31 -05002674 SPIRVID ResultID;
David Neto22f144c2017-06-12 14:26:21 -04002675
2676 // WorkgroupSize is different, we decorate the constant composite that has
2677 // its value, rather than the variable that we use to access the value.
2678 if (spv::BuiltInWorkgroupSize == BuiltinType) {
2679 ResultID = InitializerID;
David Netoa60b00b2017-09-15 16:34:09 -04002680 // Save both the value and variable IDs for later.
2681 WorkgroupSizeValueID = InitializerID;
SJWf93f5f32020-05-05 07:27:56 -05002682 WorkgroupSizeVarID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002683 } else {
SJWf93f5f32020-05-05 07:27:56 -05002684 ResultID = getSPIRVValue(&GV);
David Neto22f144c2017-06-12 14:26:21 -04002685 }
2686
SJW806a5d82020-07-15 12:51:38 -05002687 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05002688 Ops << ResultID << spv::DecorationBuiltIn << BuiltinType;
David Neto22f144c2017-06-12 14:26:21 -04002689
SJW01901d92020-05-21 08:58:31 -05002690 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto85082642018-03-24 06:55:20 -07002691 } else if (module_scope_constant_external_init) {
2692 // This module scope constant is initialized from a storage buffer with data
2693 // provided by the host at binding 0 of the next descriptor set.
SJW77b87ad2020-04-21 14:37:52 -05002694 const uint32_t descriptor_set = TakeDescriptorIndex(module);
David Neto85082642018-03-24 06:55:20 -07002695
alan-baker86ce19c2020-08-05 13:09:19 -04002696 // Emit the intializer as a reflection instruction.
David Neto85082642018-03-24 06:55:20 -07002697 // Use "kind,buffer" to indicate storage buffer. We might want to expand
2698 // that later to other types, like uniform buffer.
alan-bakerf5e5f692018-11-27 08:33:24 -05002699 std::string hexbytes;
2700 llvm::raw_string_ostream str(hexbytes);
2701 clspv::ConstantEmitter(DL, str).Emit(GV.getInitializer());
alan-baker86ce19c2020-08-05 13:09:19 -04002702
2703 // Reflection instruction for constant data.
2704 SPIRVOperandVec Ops;
2705 auto data_id = addSPIRVInst<kDebug>(spv::OpString, str.str().c_str());
2706 Ops << getSPIRVType(Type::getVoidTy(module->getContext()))
2707 << getReflectionImport() << reflection::ExtInstConstantDataStorageBuffer
2708 << getSPIRVInt32Constant(descriptor_set) << getSPIRVInt32Constant(0)
2709 << data_id;
2710 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
David Neto85082642018-03-24 06:55:20 -07002711
David Neto85082642018-03-24 06:55:20 -07002712 // OpDecorate %var DescriptorSet <descriptor_set>
alan-baker86ce19c2020-08-05 13:09:19 -04002713 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002714 Ops << var_id << spv::DecorationDescriptorSet << descriptor_set;
2715 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJW69939d52020-04-16 07:29:07 -05002716
2717 // OpDecorate %var Binding <binding>
SJW01901d92020-05-21 08:58:31 -05002718 Ops.clear();
2719 Ops << var_id << spv::DecorationBinding << 0;
2720 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04002721 }
2722}
2723
David Neto22f144c2017-06-12 14:26:21 -04002724void SPIRVProducerPass::GenerateFuncPrologue(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04002725 ValueMapType &VMap = getValueMap();
2726 EntryPointVecType &EntryPoints = getEntryPointVec();
David Neto22f144c2017-06-12 14:26:21 -04002727 auto &GlobalConstFuncTyMap = getGlobalConstFuncTypeMap();
2728 auto &GlobalConstArgSet = getGlobalConstArgSet();
2729
2730 FunctionType *FTy = F.getFunctionType();
2731
2732 //
David Neto22f144c2017-06-12 14:26:21 -04002733 // Generate OPFunction.
2734 //
2735
2736 // FOps[0] : Result Type ID
2737 // FOps[1] : Function Control
2738 // FOps[2] : Function Type ID
SJWf93f5f32020-05-05 07:27:56 -05002739 SPIRVOperandVec FOps;
David Neto22f144c2017-06-12 14:26:21 -04002740
2741 // Find SPIRV instruction for return type.
SJW01901d92020-05-21 08:58:31 -05002742 FOps << FTy->getReturnType();
David Neto22f144c2017-06-12 14:26:21 -04002743
2744 // Check function attributes for SPIRV Function Control.
2745 uint32_t FuncControl = spv::FunctionControlMaskNone;
2746 if (F.hasFnAttribute(Attribute::AlwaysInline)) {
2747 FuncControl |= spv::FunctionControlInlineMask;
2748 }
2749 if (F.hasFnAttribute(Attribute::NoInline)) {
2750 FuncControl |= spv::FunctionControlDontInlineMask;
2751 }
2752 // TODO: Check llvm attribute for Function Control Pure.
2753 if (F.hasFnAttribute(Attribute::ReadOnly)) {
2754 FuncControl |= spv::FunctionControlPureMask;
2755 }
2756 // TODO: Check llvm attribute for Function Control Const.
2757 if (F.hasFnAttribute(Attribute::ReadNone)) {
2758 FuncControl |= spv::FunctionControlConstMask;
2759 }
2760
SJW01901d92020-05-21 08:58:31 -05002761 FOps << FuncControl;
David Neto22f144c2017-06-12 14:26:21 -04002762
SJW01901d92020-05-21 08:58:31 -05002763 SPIRVID FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002764 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2765 SmallVector<Type *, 4> NewFuncParamTys;
2766 FunctionType *NewFTy =
2767 FunctionType::get(FTy->getReturnType(), NewFuncParamTys, false);
SJWf93f5f32020-05-05 07:27:56 -05002768 FTyID = getSPIRVType(NewFTy);
David Neto22f144c2017-06-12 14:26:21 -04002769 } else {
David Neto9ed8e2f2018-03-24 06:47:24 -07002770 // Handle regular function with global constant parameters.
David Neto22f144c2017-06-12 14:26:21 -04002771 if (GlobalConstFuncTyMap.count(FTy)) {
SJWf93f5f32020-05-05 07:27:56 -05002772 FTyID = getSPIRVType(GlobalConstFuncTyMap[FTy].first);
David Neto22f144c2017-06-12 14:26:21 -04002773 } else {
SJWf93f5f32020-05-05 07:27:56 -05002774 FTyID = getSPIRVType(FTy);
David Neto22f144c2017-06-12 14:26:21 -04002775 }
2776 }
2777
SJW01901d92020-05-21 08:58:31 -05002778 FOps << FTyID;
David Neto22f144c2017-06-12 14:26:21 -04002779
SJWf93f5f32020-05-05 07:27:56 -05002780 // Generate SPIRV instruction for function.
2781 SPIRVID FID = addSPIRVInst(spv::OpFunction, FOps);
2782 VMap[&F] = FID;
David Neto22f144c2017-06-12 14:26:21 -04002783
SJWf93f5f32020-05-05 07:27:56 -05002784 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
2785 EntryPoints.push_back(std::make_pair(&F, FID));
2786 }
David Neto22f144c2017-06-12 14:26:21 -04002787
David Neto482550a2018-03-24 05:21:07 -07002788 if (clspv::Option::ShowIDs()) {
SJW01901d92020-05-21 08:58:31 -05002789 errs() << "Function " << F.getName() << " is " << FID.get() << "\n";
David Netob05675d2018-02-16 12:37:49 -05002790 }
David Neto22f144c2017-06-12 14:26:21 -04002791
2792 //
2793 // Generate OpFunctionParameter for Normal function.
2794 //
David Neto22f144c2017-06-12 14:26:21 -04002795 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
alan-bakere9308012019-03-15 10:25:13 -04002796
David Neto22f144c2017-06-12 14:26:21 -04002797 // Iterate Argument for name instead of param type from function type.
2798 unsigned ArgIdx = 0;
2799 for (Argument &Arg : F.args()) {
David Neto22f144c2017-06-12 14:26:21 -04002800 // ParamOps[0] : Result Type ID
SJW01901d92020-05-21 08:58:31 -05002801 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002802
2803 // Find SPIRV instruction for parameter type.
SJW01901d92020-05-21 08:58:31 -05002804 SPIRVID ParamTyID = getSPIRVType(Arg.getType());
David Neto22f144c2017-06-12 14:26:21 -04002805 if (PointerType *PTy = dyn_cast<PointerType>(Arg.getType())) {
2806 if (GlobalConstFuncTyMap.count(FTy)) {
2807 if (ArgIdx == GlobalConstFuncTyMap[FTy].second) {
2808 Type *EleTy = PTy->getPointerElementType();
2809 Type *ArgTy =
2810 PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
SJWf93f5f32020-05-05 07:27:56 -05002811 ParamTyID = getSPIRVType(ArgTy);
David Neto22f144c2017-06-12 14:26:21 -04002812 GlobalConstArgSet.insert(&Arg);
2813 }
2814 }
2815 }
SJW01901d92020-05-21 08:58:31 -05002816 Ops << ParamTyID;
David Neto22f144c2017-06-12 14:26:21 -04002817
2818 // Generate SPIRV instruction for parameter.
SJW01901d92020-05-21 08:58:31 -05002819 SPIRVID param_id = addSPIRVInst(spv::OpFunctionParameter, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002820 VMap[&Arg] = param_id;
2821
2822 if (CalledWithCoherentResource(Arg)) {
2823 // If the arg is passed a coherent resource ever, then decorate this
2824 // parameter with Coherent too.
SJW01901d92020-05-21 08:58:31 -05002825 Ops.clear();
2826 Ops << param_id << spv::DecorationCoherent;
2827 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
SJWf93f5f32020-05-05 07:27:56 -05002828 }
David Neto22f144c2017-06-12 14:26:21 -04002829
2830 ArgIdx++;
2831 }
2832 }
2833}
2834
SJW77b87ad2020-04-21 14:37:52 -05002835void SPIRVProducerPass::GenerateModuleInfo() {
David Neto22f144c2017-06-12 14:26:21 -04002836 EntryPointVecType &EntryPoints = getEntryPointVec();
SJW806a5d82020-07-15 12:51:38 -05002837 auto &EntryPointInterfaces = getEntryPointInterfacesList();
SJW01901d92020-05-21 08:58:31 -05002838 std::vector<SPIRVID> &BuiltinDimVec = getBuiltinDimVec();
David Neto22f144c2017-06-12 14:26:21 -04002839
SJWf93f5f32020-05-05 07:27:56 -05002840 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04002841
SJW01901d92020-05-21 08:58:31 -05002842 for (auto Capability : CapabilitySet) {
David Neto22f144c2017-06-12 14:26:21 -04002843 //
SJW01901d92020-05-21 08:58:31 -05002844 // Generate OpCapability
David Neto22f144c2017-06-12 14:26:21 -04002845 //
2846 // Ops[0] = Capability
SJW01901d92020-05-21 08:58:31 -05002847 addSPIRVInst<kCapabilities>(spv::OpCapability, Capability);
alan-baker5b86ed72019-02-15 08:26:50 -05002848 }
2849
2850 // Always add the storage buffer extension
2851 {
David Neto22f144c2017-06-12 14:26:21 -04002852 //
2853 // Generate OpExtension.
2854 //
2855 // Ops[0] = Name (Literal String)
2856 //
SJWf93f5f32020-05-05 07:27:56 -05002857 addSPIRVInst<kExtensions>(spv::OpExtension,
2858 "SPV_KHR_storage_buffer_storage_class");
alan-baker5b86ed72019-02-15 08:26:50 -05002859 }
David Neto22f144c2017-06-12 14:26:21 -04002860
alan-baker5b86ed72019-02-15 08:26:50 -05002861 if (hasVariablePointers() || hasVariablePointersStorageBuffer()) {
2862 //
2863 // Generate OpExtension.
2864 //
2865 // Ops[0] = Name (Literal String)
2866 //
SJWf93f5f32020-05-05 07:27:56 -05002867 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_variable_pointers");
David Neto22f144c2017-06-12 14:26:21 -04002868 }
2869
2870 //
2871 // Generate OpMemoryModel
2872 //
2873 // Memory model for Vulkan will always be GLSL450.
2874
2875 // Ops[0] = Addressing Model
2876 // Ops[1] = Memory Model
2877 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002878 Ops << spv::AddressingModelLogical << spv::MemoryModelGLSL450;
David Neto22f144c2017-06-12 14:26:21 -04002879
SJWf93f5f32020-05-05 07:27:56 -05002880 addSPIRVInst<kMemoryModel>(spv::OpMemoryModel, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002881
2882 //
2883 // Generate OpEntryPoint
2884 //
2885 for (auto EntryPoint : EntryPoints) {
2886 // Ops[0] = Execution Model
2887 // Ops[1] = EntryPoint ID
2888 // Ops[2] = Name (Literal String)
2889 // ...
2890 //
2891 // TODO: Do we need to consider Interface ID for forward references???
2892 Ops.clear();
alan-bakerb6b09dc2018-11-08 16:59:28 -05002893 const StringRef &name = EntryPoint.first->getName();
SJW01901d92020-05-21 08:58:31 -05002894 Ops << spv::ExecutionModelGLCompute << EntryPoint.second << name;
David Neto22f144c2017-06-12 14:26:21 -04002895
SJW806a5d82020-07-15 12:51:38 -05002896 for (auto &Interface : EntryPointInterfaces) {
SJW01901d92020-05-21 08:58:31 -05002897 Ops << Interface;
David Neto22f144c2017-06-12 14:26:21 -04002898 }
2899
SJWf93f5f32020-05-05 07:27:56 -05002900 addSPIRVInst<kEntryPoints>(spv::OpEntryPoint, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002901 }
2902
alan-baker3b609772020-09-03 19:10:17 -04002903 if (BuiltinDimVec.empty()) {
2904 for (auto EntryPoint : EntryPoints) {
2905 const MDNode *MD = dyn_cast<Function>(EntryPoint.first)
2906 ->getMetadata("reqd_work_group_size");
2907 if ((MD != nullptr) && !clspv::Option::NonUniformNDRangeSupported()) {
2908 //
2909 // Generate OpExecutionMode
2910 //
David Neto22f144c2017-06-12 14:26:21 -04002911
alan-baker3b609772020-09-03 19:10:17 -04002912 // Ops[0] = Entry Point ID
2913 // Ops[1] = Execution Mode
2914 // Ops[2] ... Ops[n] = Optional literals according to Execution Mode
2915 Ops.clear();
2916 Ops << EntryPoint.second << spv::ExecutionModeLocalSize;
2917
2918 uint32_t XDim = static_cast<uint32_t>(
2919 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
2920 uint32_t YDim = static_cast<uint32_t>(
2921 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
2922 uint32_t ZDim = static_cast<uint32_t>(
2923 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
2924
2925 Ops << XDim << YDim << ZDim;
2926
2927 addSPIRVInst<kExecutionModes>(spv::OpExecutionMode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002928 }
David Neto22f144c2017-06-12 14:26:21 -04002929 }
2930 }
2931
2932 //
2933 // Generate OpSource.
2934 //
2935 // Ops[0] = SourceLanguage ID
2936 // Ops[1] = Version (LiteralNum)
2937 //
SJW01901d92020-05-21 08:58:31 -05002938 uint32_t LangID = spv::SourceLanguageUnknown;
2939 uint32_t LangVer = 0;
Kévin Petitf0515712020-01-07 18:29:20 +00002940 switch (clspv::Option::Language()) {
2941 case clspv::Option::SourceLanguage::OpenCL_C_10:
SJW01901d92020-05-21 08:58:31 -05002942 LangID = spv::SourceLanguageOpenCL_C;
2943 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002944 break;
2945 case clspv::Option::SourceLanguage::OpenCL_C_11:
SJW01901d92020-05-21 08:58:31 -05002946 LangID = spv::SourceLanguageOpenCL_C;
2947 LangVer = 110;
Kévin Petitf0515712020-01-07 18:29:20 +00002948 break;
2949 case clspv::Option::SourceLanguage::OpenCL_C_12:
SJW01901d92020-05-21 08:58:31 -05002950 LangID = spv::SourceLanguageOpenCL_C;
2951 LangVer = 120;
Kévin Petitf0515712020-01-07 18:29:20 +00002952 break;
2953 case clspv::Option::SourceLanguage::OpenCL_C_20:
SJW01901d92020-05-21 08:58:31 -05002954 LangID = spv::SourceLanguageOpenCL_C;
2955 LangVer = 200;
Kévin Petitf0515712020-01-07 18:29:20 +00002956 break;
Kévin Petit77838ff2020-10-19 18:54:51 +01002957 case clspv::Option::SourceLanguage::OpenCL_C_30:
2958 LangID = spv::SourceLanguageOpenCL_C;
2959 LangVer = 300;
2960 break;
Kévin Petitf0515712020-01-07 18:29:20 +00002961 case clspv::Option::SourceLanguage::OpenCL_CPP:
SJW01901d92020-05-21 08:58:31 -05002962 LangID = spv::SourceLanguageOpenCL_CPP;
2963 LangVer = 100;
Kévin Petitf0515712020-01-07 18:29:20 +00002964 break;
2965 default:
Kévin Petitf0515712020-01-07 18:29:20 +00002966 break;
Kévin Petit0fc88042019-04-09 23:25:02 +01002967 }
David Neto22f144c2017-06-12 14:26:21 -04002968
SJW01901d92020-05-21 08:58:31 -05002969 Ops.clear();
2970 Ops << LangID << LangVer;
SJWf93f5f32020-05-05 07:27:56 -05002971 addSPIRVInst<kDebug>(spv::OpSource, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002972
2973 if (!BuiltinDimVec.empty()) {
2974 //
2975 // Generate OpDecorates for x/y/z dimension.
2976 //
2977 // Ops[0] = Target ID
2978 // Ops[1] = Decoration (SpecId)
David Neto257c3892018-04-11 13:19:45 -04002979 // Ops[2] = Specialization Constant ID (Literal Number)
David Neto22f144c2017-06-12 14:26:21 -04002980
2981 // X Dimension
2982 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002983 Ops << BuiltinDimVec[0] << spv::DecorationSpecId << 0;
SJWf93f5f32020-05-05 07:27:56 -05002984 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002985
2986 // Y Dimension
2987 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002988 Ops << BuiltinDimVec[1] << spv::DecorationSpecId << 1;
SJWf93f5f32020-05-05 07:27:56 -05002989 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002990
2991 // Z Dimension
2992 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05002993 Ops << BuiltinDimVec[2] << spv::DecorationSpecId << 2;
SJWf93f5f32020-05-05 07:27:56 -05002994 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Neto22f144c2017-06-12 14:26:21 -04002995 }
2996}
2997
David Netob6e2e062018-04-25 10:32:06 -04002998void SPIRVProducerPass::GenerateEntryPointInitialStores() {
2999 // Work around a driver bug. Initializers on Private variables might not
3000 // work. So the start of the kernel should store the initializer value to the
3001 // variables. Yes, *every* entry point pays this cost if *any* entry point
3002 // uses this builtin. At this point I judge this to be an acceptable tradeoff
3003 // of complexity vs. runtime, for a broken driver.
alan-bakerb6b09dc2018-11-08 16:59:28 -05003004 // TODO(dneto): Remove this at some point once fixed drivers are widely
3005 // available.
SJW01901d92020-05-21 08:58:31 -05003006 if (WorkgroupSizeVarID.isValid()) {
3007 assert(WorkgroupSizeValueID.isValid());
David Netob6e2e062018-04-25 10:32:06 -04003008
SJWf93f5f32020-05-05 07:27:56 -05003009 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05003010 Ops << WorkgroupSizeVarID << WorkgroupSizeValueID;
David Netob6e2e062018-04-25 10:32:06 -04003011
SJWf93f5f32020-05-05 07:27:56 -05003012 addSPIRVInst(spv::OpStore, Ops);
David Netob6e2e062018-04-25 10:32:06 -04003013 }
3014}
3015
David Neto22f144c2017-06-12 14:26:21 -04003016void SPIRVProducerPass::GenerateFuncBody(Function &F) {
David Neto22f144c2017-06-12 14:26:21 -04003017 ValueMapType &VMap = getValueMap();
3018
David Netob6e2e062018-04-25 10:32:06 -04003019 const bool IsKernel = F.getCallingConv() == CallingConv::SPIR_KERNEL;
David Neto22f144c2017-06-12 14:26:21 -04003020
3021 for (BasicBlock &BB : F) {
3022 // Register BasicBlock to ValueMap.
David Neto22f144c2017-06-12 14:26:21 -04003023
3024 //
3025 // Generate OpLabel for Basic Block.
3026 //
SJWf93f5f32020-05-05 07:27:56 -05003027 VMap[&BB] = addSPIRVInst(spv::OpLabel);
David Neto22f144c2017-06-12 14:26:21 -04003028
David Neto6dcd4712017-06-23 11:06:47 -04003029 // OpVariable instructions must come first.
3030 for (Instruction &I : BB) {
alan-baker5b86ed72019-02-15 08:26:50 -05003031 if (auto *alloca = dyn_cast<AllocaInst>(&I)) {
3032 // Allocating a pointer requires variable pointers.
3033 if (alloca->getAllocatedType()->isPointerTy()) {
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04003034 setVariablePointersCapabilities(
3035 alloca->getAllocatedType()->getPointerAddressSpace());
alan-baker5b86ed72019-02-15 08:26:50 -05003036 }
David Neto6dcd4712017-06-23 11:06:47 -04003037 GenerateInstruction(I);
3038 }
3039 }
3040
David Neto22f144c2017-06-12 14:26:21 -04003041 if (&BB == &F.getEntryBlock() && IsKernel) {
David Netob6e2e062018-04-25 10:32:06 -04003042 if (clspv::Option::HackInitializers()) {
3043 GenerateEntryPointInitialStores();
3044 }
David Neto22f144c2017-06-12 14:26:21 -04003045 }
3046
3047 for (Instruction &I : BB) {
David Neto6dcd4712017-06-23 11:06:47 -04003048 if (!isa<AllocaInst>(I)) {
3049 GenerateInstruction(I);
3050 }
David Neto22f144c2017-06-12 14:26:21 -04003051 }
3052 }
3053}
3054
3055spv::Op SPIRVProducerPass::GetSPIRVCmpOpcode(CmpInst *I) {
3056 const std::map<CmpInst::Predicate, spv::Op> Map = {
3057 {CmpInst::ICMP_EQ, spv::OpIEqual},
3058 {CmpInst::ICMP_NE, spv::OpINotEqual},
3059 {CmpInst::ICMP_UGT, spv::OpUGreaterThan},
3060 {CmpInst::ICMP_UGE, spv::OpUGreaterThanEqual},
3061 {CmpInst::ICMP_ULT, spv::OpULessThan},
3062 {CmpInst::ICMP_ULE, spv::OpULessThanEqual},
3063 {CmpInst::ICMP_SGT, spv::OpSGreaterThan},
3064 {CmpInst::ICMP_SGE, spv::OpSGreaterThanEqual},
3065 {CmpInst::ICMP_SLT, spv::OpSLessThan},
3066 {CmpInst::ICMP_SLE, spv::OpSLessThanEqual},
3067 {CmpInst::FCMP_OEQ, spv::OpFOrdEqual},
3068 {CmpInst::FCMP_OGT, spv::OpFOrdGreaterThan},
3069 {CmpInst::FCMP_OGE, spv::OpFOrdGreaterThanEqual},
3070 {CmpInst::FCMP_OLT, spv::OpFOrdLessThan},
3071 {CmpInst::FCMP_OLE, spv::OpFOrdLessThanEqual},
3072 {CmpInst::FCMP_ONE, spv::OpFOrdNotEqual},
3073 {CmpInst::FCMP_UEQ, spv::OpFUnordEqual},
3074 {CmpInst::FCMP_UGT, spv::OpFUnordGreaterThan},
3075 {CmpInst::FCMP_UGE, spv::OpFUnordGreaterThanEqual},
3076 {CmpInst::FCMP_ULT, spv::OpFUnordLessThan},
3077 {CmpInst::FCMP_ULE, spv::OpFUnordLessThanEqual},
3078 {CmpInst::FCMP_UNE, spv::OpFUnordNotEqual}};
3079
3080 assert(0 != Map.count(I->getPredicate()));
3081
3082 return Map.at(I->getPredicate());
3083}
3084
3085spv::Op SPIRVProducerPass::GetSPIRVCastOpcode(Instruction &I) {
3086 const std::map<unsigned, spv::Op> Map{
3087 {Instruction::Trunc, spv::OpUConvert},
3088 {Instruction::ZExt, spv::OpUConvert},
3089 {Instruction::SExt, spv::OpSConvert},
3090 {Instruction::FPToUI, spv::OpConvertFToU},
3091 {Instruction::FPToSI, spv::OpConvertFToS},
3092 {Instruction::UIToFP, spv::OpConvertUToF},
3093 {Instruction::SIToFP, spv::OpConvertSToF},
3094 {Instruction::FPTrunc, spv::OpFConvert},
3095 {Instruction::FPExt, spv::OpFConvert},
3096 {Instruction::BitCast, spv::OpBitcast}};
3097
3098 assert(0 != Map.count(I.getOpcode()));
3099
3100 return Map.at(I.getOpcode());
3101}
3102
3103spv::Op SPIRVProducerPass::GetSPIRVBinaryOpcode(Instruction &I) {
Kévin Petit24272b62018-10-18 19:16:12 +00003104 if (I.getType()->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003105 switch (I.getOpcode()) {
3106 default:
3107 break;
3108 case Instruction::Or:
3109 return spv::OpLogicalOr;
3110 case Instruction::And:
3111 return spv::OpLogicalAnd;
3112 case Instruction::Xor:
3113 return spv::OpLogicalNotEqual;
3114 }
3115 }
3116
alan-bakerb6b09dc2018-11-08 16:59:28 -05003117 const std::map<unsigned, spv::Op> Map{
David Neto22f144c2017-06-12 14:26:21 -04003118 {Instruction::Add, spv::OpIAdd},
3119 {Instruction::FAdd, spv::OpFAdd},
3120 {Instruction::Sub, spv::OpISub},
3121 {Instruction::FSub, spv::OpFSub},
3122 {Instruction::Mul, spv::OpIMul},
3123 {Instruction::FMul, spv::OpFMul},
3124 {Instruction::UDiv, spv::OpUDiv},
3125 {Instruction::SDiv, spv::OpSDiv},
3126 {Instruction::FDiv, spv::OpFDiv},
3127 {Instruction::URem, spv::OpUMod},
3128 {Instruction::SRem, spv::OpSRem},
3129 {Instruction::FRem, spv::OpFRem},
3130 {Instruction::Or, spv::OpBitwiseOr},
3131 {Instruction::Xor, spv::OpBitwiseXor},
3132 {Instruction::And, spv::OpBitwiseAnd},
3133 {Instruction::Shl, spv::OpShiftLeftLogical},
3134 {Instruction::LShr, spv::OpShiftRightLogical},
3135 {Instruction::AShr, spv::OpShiftRightArithmetic}};
3136
3137 assert(0 != Map.count(I.getOpcode()));
3138
3139 return Map.at(I.getOpcode());
3140}
3141
SJW806a5d82020-07-15 12:51:38 -05003142SPIRVID SPIRVProducerPass::getSPIRVBuiltin(spv::BuiltIn BID,
3143 spv::Capability Cap) {
3144 SPIRVID RID;
3145
3146 auto ii = BuiltinConstantMap.find(BID);
3147
3148 if (ii != BuiltinConstantMap.end()) {
3149 return ii->second;
3150 } else {
SJW806a5d82020-07-15 12:51:38 -05003151 addCapability(Cap);
3152
3153 Type *type = PointerType::get(IntegerType::get(module->getContext(), 32),
3154 AddressSpace::Input);
3155
3156 RID = addSPIRVGlobalVariable(getSPIRVType(type), spv::StorageClassInput);
3157
3158 BuiltinConstantMap[BID] = RID;
3159
3160 //
3161 // Generate OpDecorate.
3162 //
3163 // Ops[0] : target
3164 // Ops[1] : decoration
3165 // Ops[2] : SpecId
3166 SPIRVOperandVec Ops;
3167 Ops << RID << spv::DecorationBuiltIn << static_cast<int>(BID);
3168
3169 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
3170 }
3171
3172 return RID;
3173}
3174
3175SPIRVID
3176SPIRVProducerPass::GenerateClspvInstruction(CallInst *Call,
3177 const FunctionInfo &FuncInfo) {
3178 SPIRVID RID;
3179
3180 switch (FuncInfo.getType()) {
3181 case Builtins::kClspvCompositeConstruct:
3182 RID = addSPIRVPlaceholder(Call);
3183 break;
3184 case Builtins::kClspvResource: {
3185 if (ResourceVarDeferredLoadCalls.count(Call) && Call->hasNUsesOrMore(1)) {
3186 // Generate an OpLoad
3187 SPIRVOperandVec Ops;
3188
3189 Ops << Call->getType()->getPointerElementType()
3190 << ResourceVarDeferredLoadCalls[Call];
3191
3192 RID = addSPIRVInst(spv::OpLoad, Ops);
3193
3194 } else {
3195 // This maps to an OpVariable we've already generated.
3196 // No code is generated for the call.
3197 }
3198 break;
3199 }
3200 case Builtins::kClspvLocal: {
3201 // Don't codegen an instruction here, but instead map this call directly
3202 // to the workgroup variable id.
3203 int spec_id = static_cast<int>(
3204 cast<ConstantInt>(Call->getOperand(0))->getSExtValue());
3205 const auto &info = LocalSpecIdInfoMap[spec_id];
3206 RID = info.variable_id;
3207 break;
3208 }
3209 case Builtins::kClspvSamplerVarLiteral: {
3210 // Sampler initializers become a load of the corresponding sampler.
3211 // Map this to a load from the variable.
3212 const auto third_param = static_cast<unsigned>(
3213 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue());
3214 auto sampler_value = third_param;
3215 if (clspv::Option::UseSamplerMap()) {
3216 sampler_value = getSamplerMap()[third_param].first;
3217 }
3218
3219 // Generate an OpLoad
3220 SPIRVOperandVec Ops;
3221
3222 Ops << SamplerTy->getPointerElementType()
3223 << SamplerLiteralToIDMap[sampler_value];
3224
3225 RID = addSPIRVInst(spv::OpLoad, Ops);
3226 break;
3227 }
3228 case Builtins::kSpirvAtomicXor: {
3229 // Handle SPIR-V intrinsics
3230 SPIRVOperandVec Ops;
3231
3232 if (!Call->getType()->isVoidTy()) {
3233 Ops << Call->getType();
3234 }
3235
3236 for (unsigned i = 0; i < Call->getNumArgOperands(); i++) {
3237 Ops << Call->getArgOperand(i);
3238 }
3239
3240 RID = addSPIRVInst(spv::OpAtomicXor, Ops);
3241 break;
3242 }
3243 case Builtins::kSpirvOp: {
3244 // Handle SPIR-V intrinsics
3245 auto *arg0 = dyn_cast<ConstantInt>(Call->getArgOperand(0));
3246 spv::Op opcode = static_cast<spv::Op>(arg0->getZExtValue());
3247 if (opcode != spv::OpNop) {
3248 SPIRVOperandVec Ops;
3249
3250 if (!Call->getType()->isVoidTy()) {
3251 Ops << Call->getType();
3252 }
3253
3254 for (unsigned i = 1; i < Call->getNumArgOperands(); i++) {
3255 Ops << Call->getArgOperand(i);
3256 }
3257
3258 RID = addSPIRVInst(opcode, Ops);
3259 }
3260 break;
3261 }
3262 case Builtins::kSpirvCopyMemory: {
3263 //
3264 // Generate OpCopyMemory.
3265 //
3266
3267 // Ops[0] = Dst ID
3268 // Ops[1] = Src ID
3269 // Ops[2] = Memory Access
3270 // Ops[3] = Alignment
3271
3272 auto IsVolatile =
3273 dyn_cast<ConstantInt>(Call->getArgOperand(3))->getZExtValue() != 0;
3274
3275 auto VolatileMemoryAccess = (IsVolatile) ? spv::MemoryAccessVolatileMask
3276 : spv::MemoryAccessMaskNone;
3277
3278 auto MemoryAccess = VolatileMemoryAccess | spv::MemoryAccessAlignedMask;
3279
3280 auto Alignment =
3281 dyn_cast<ConstantInt>(Call->getArgOperand(2))->getZExtValue();
3282
3283 SPIRVOperandVec Ops;
3284 Ops << Call->getArgOperand(0) << Call->getArgOperand(1) << MemoryAccess
3285 << static_cast<uint32_t>(Alignment);
3286
3287 RID = addSPIRVInst(spv::OpCopyMemory, Ops);
3288 break;
3289 }
3290 default:
3291 llvm_unreachable("Unknown CLSPV Instruction");
3292 break;
3293 }
3294 return RID;
3295}
3296
3297SPIRVID
3298SPIRVProducerPass::GenerateImageInstruction(CallInst *Call,
3299 const FunctionInfo &FuncInfo) {
3300 SPIRVID RID;
3301
3302 LLVMContext &Context = module->getContext();
3303 switch (FuncInfo.getType()) {
3304 case Builtins::kReadImagef:
3305 case Builtins::kReadImageh:
3306 case Builtins::kReadImagei:
3307 case Builtins::kReadImageui: {
3308 // read_image is converted to OpSampledImage and OpImageSampleExplicitLod.
3309 // Additionally, OpTypeSampledImage is generated.
alan-bakerf6bc8252020-09-23 14:58:55 -04003310 const auto image_ty = Call->getArgOperand(0)->getType();
SJW806a5d82020-07-15 12:51:38 -05003311 const auto &pi = FuncInfo.getParameter(1);
3312 if (pi.isSampler()) {
3313 //
3314 // Generate OpSampledImage.
3315 //
3316 // Ops[0] = Result Type ID
3317 // Ops[1] = Image ID
3318 // Ops[2] = Sampler ID
3319 //
3320 SPIRVOperandVec Ops;
3321
3322 Value *Image = Call->getArgOperand(0);
3323 Value *Sampler = Call->getArgOperand(1);
3324 Value *Coordinate = Call->getArgOperand(2);
3325
3326 TypeMapType &OpImageTypeMap = getImageTypeMap();
3327 Type *ImageTy = Image->getType()->getPointerElementType();
3328 SPIRVID ImageTyID = OpImageTypeMap[ImageTy];
3329
3330 Ops << ImageTyID << Image << Sampler;
3331
3332 SPIRVID SampledImageID = addSPIRVInst(spv::OpSampledImage, Ops);
3333
3334 //
3335 // Generate OpImageSampleExplicitLod.
3336 //
3337 // Ops[0] = Result Type ID
3338 // Ops[1] = Sampled Image ID
3339 // Ops[2] = Coordinate ID
3340 // Ops[3] = Image Operands Type ID
3341 // Ops[4] ... Ops[n] = Operands ID
3342 //
3343 Ops.clear();
3344
3345 const bool is_int_image = IsIntImageType(Image->getType());
3346 SPIRVID result_type;
3347 if (is_int_image) {
3348 result_type = v4int32ID;
3349 } else {
3350 result_type = getSPIRVType(Call->getType());
3351 }
3352
3353 Constant *CstFP0 = ConstantFP::get(Context, APFloat(0.0f));
3354 Ops << result_type << SampledImageID << Coordinate
3355 << spv::ImageOperandsLodMask << CstFP0;
3356
3357 RID = addSPIRVInst(spv::OpImageSampleExplicitLod, Ops);
3358
3359 if (is_int_image) {
3360 // Generate the bitcast.
3361 Ops.clear();
3362 Ops << Call->getType() << RID;
3363 RID = addSPIRVInst(spv::OpBitcast, Ops);
3364 }
alan-bakerf6bc8252020-09-23 14:58:55 -04003365 } else if (IsStorageImageType(image_ty)) {
3366 // read_image on a storage image is mapped to OpImageRead.
3367 Value *Image = Call->getArgOperand(0);
3368 Value *Coordinate = Call->getArgOperand(1);
3369
3370 //
3371 // Generate OpImageRead
3372 //
3373 // Ops[0] = Result Type ID
3374 // Ops[1] = Image ID
3375 // Ops[2] = Coordinate
3376 // No optional image operands.
3377 //
3378 SPIRVOperandVec Ops;
3379
3380 const bool is_int_image = IsIntImageType(Image->getType());
3381 SPIRVID result_type;
3382 if (is_int_image) {
3383 result_type = v4int32ID;
3384 } else {
3385 result_type = getSPIRVType(Call->getType());
3386 }
3387
3388 Ops << result_type << Image << Coordinate;
3389 RID = addSPIRVInst(spv::OpImageRead, Ops);
3390
3391 if (is_int_image) {
3392 // Generate the bitcast.
3393 Ops.clear();
3394 Ops << Call->getType() << RID;
3395 RID = addSPIRVInst(spv::OpBitcast, Ops);
3396 }
3397
3398 // OpImageRead requires StorageImageReadWithoutFormat.
3399 addCapability(spv::CapabilityStorageImageReadWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003400 } else {
alan-bakerf6bc8252020-09-23 14:58:55 -04003401 // read_image on a sampled image (without a sampler) is mapped to
3402 // OpImageFetch.
SJW806a5d82020-07-15 12:51:38 -05003403 Value *Image = Call->getArgOperand(0);
3404 Value *Coordinate = Call->getArgOperand(1);
3405
3406 //
3407 // Generate OpImageFetch
3408 //
3409 // Ops[0] = Result Type ID
3410 // Ops[1] = Image ID
3411 // Ops[2] = Coordinate ID
3412 // Ops[3] = Lod
3413 // Ops[4] = 0
3414 //
3415 SPIRVOperandVec Ops;
3416
3417 const bool is_int_image = IsIntImageType(Image->getType());
3418 SPIRVID result_type;
3419 if (is_int_image) {
3420 result_type = v4int32ID;
3421 } else {
3422 result_type = getSPIRVType(Call->getType());
3423 }
3424
3425 Ops << result_type << Image << Coordinate << spv::ImageOperandsLodMask
3426 << getSPIRVInt32Constant(0);
3427
3428 RID = addSPIRVInst(spv::OpImageFetch, Ops);
3429
3430 if (is_int_image) {
3431 // Generate the bitcast.
3432 Ops.clear();
3433 Ops << Call->getType() << RID;
3434 RID = addSPIRVInst(spv::OpBitcast, Ops);
3435 }
3436 }
3437 break;
3438 }
3439
3440 case Builtins::kWriteImagef:
3441 case Builtins::kWriteImageh:
3442 case Builtins::kWriteImagei:
3443 case Builtins::kWriteImageui: {
3444 // write_image is mapped to OpImageWrite.
3445 //
3446 // Generate OpImageWrite.
3447 //
3448 // Ops[0] = Image ID
3449 // Ops[1] = Coordinate ID
3450 // Ops[2] = Texel ID
3451 // Ops[3] = (Optional) Image Operands Type (Literal Number)
3452 // Ops[4] ... Ops[n] = (Optional) Operands ID
3453 //
3454 SPIRVOperandVec Ops;
3455
3456 Value *Image = Call->getArgOperand(0);
3457 Value *Coordinate = Call->getArgOperand(1);
3458 Value *Texel = Call->getArgOperand(2);
3459
3460 SPIRVID TexelID = getSPIRVValue(Texel);
3461
3462 const bool is_int_image = IsIntImageType(Image->getType());
3463 if (is_int_image) {
3464 // Generate a bitcast to v4int and use it as the texel value.
3465 Ops << v4int32ID << TexelID;
3466 TexelID = addSPIRVInst(spv::OpBitcast, Ops);
3467 Ops.clear();
3468 }
3469 Ops << Image << Coordinate << TexelID;
SJW806a5d82020-07-15 12:51:38 -05003470 RID = addSPIRVInst(spv::OpImageWrite, Ops);
alan-bakerf6bc8252020-09-23 14:58:55 -04003471
3472 // Image writes require StorageImageWriteWithoutFormat.
3473 addCapability(spv::CapabilityStorageImageWriteWithoutFormat);
SJW806a5d82020-07-15 12:51:38 -05003474 break;
3475 }
3476
3477 case Builtins::kGetImageHeight:
3478 case Builtins::kGetImageWidth:
3479 case Builtins::kGetImageDepth:
3480 case Builtins::kGetImageDim: {
3481 // get_image_* is mapped to OpImageQuerySize or OpImageQuerySizeLod
3482 addCapability(spv::CapabilityImageQuery);
3483
3484 //
3485 // Generate OpImageQuerySize[Lod]
3486 //
3487 // Ops[0] = Image ID
3488 //
3489 // Result type has components equal to the dimensionality of the image,
3490 // plus 1 if the image is arrayed.
3491 //
3492 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3493 SPIRVOperandVec Ops;
3494
3495 // Implement:
3496 // %sizes = OpImageQuerySize[Lod] %uint[2|3|4] %im [%uint_0]
3497 SPIRVID SizesTypeID;
3498
3499 Value *Image = Call->getArgOperand(0);
3500 const uint32_t dim = ImageDimensionality(Image->getType());
3501 const uint32_t components =
3502 dim + (IsArrayImageType(Image->getType()) ? 1 : 0);
3503 if (components == 1) {
3504 SizesTypeID = getSPIRVType(Type::getInt32Ty(Context));
3505 } else {
3506 SizesTypeID = getSPIRVType(
3507 FixedVectorType::get(Type::getInt32Ty(Context), components));
3508 }
3509 Ops << SizesTypeID << Image;
3510 spv::Op query_opcode = spv::OpImageQuerySize;
3511 if (IsSampledImageType(Image->getType())) {
3512 query_opcode = spv::OpImageQuerySizeLod;
3513 // Need explicit 0 for Lod operand.
3514 Ops << getSPIRVInt32Constant(0);
3515 }
3516
3517 RID = addSPIRVInst(query_opcode, Ops);
3518
3519 // May require an extra instruction to create the appropriate result of
3520 // the builtin function.
3521 if (FuncInfo.getType() == Builtins::kGetImageDim) {
3522 if (dim == 3) {
3523 // get_image_dim returns an int4 for 3D images.
3524 //
3525
3526 // Implement:
3527 // %result = OpCompositeConstruct %uint4 %sizes %uint_0
3528 Ops.clear();
3529 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 4) << RID
3530 << getSPIRVInt32Constant(0);
3531
3532 RID = addSPIRVInst(spv::OpCompositeConstruct, Ops);
3533 } else if (dim != components) {
3534 // get_image_dim return an int2 regardless of the arrayedness of the
3535 // image. If the image is arrayed an element must be dropped from the
3536 // query result.
3537 //
3538
3539 // Implement:
3540 // %result = OpVectorShuffle %uint2 %sizes %sizes 0 1
3541 Ops.clear();
3542 Ops << FixedVectorType::get(Type::getInt32Ty(Context), 2) << RID << RID
3543 << 0 << 1;
3544
3545 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
3546 }
3547 } else if (components > 1) {
3548 // Implement:
3549 // %result = OpCompositeExtract %uint %sizes <component number>
3550 Ops.clear();
3551 Ops << Call->getType() << RID;
3552
3553 uint32_t component = 0;
3554 if (FuncInfo.getType() == Builtins::kGetImageHeight)
3555 component = 1;
3556 else if (FuncInfo.getType() == Builtins::kGetImageDepth)
3557 component = 2;
3558 Ops << component;
3559
3560 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
3561 }
3562 break;
3563 }
3564 default:
3565 llvm_unreachable("Unsupported Image builtin");
3566 }
3567
3568 return RID;
3569}
3570
3571SPIRVID
3572SPIRVProducerPass::GenerateSubgroupInstruction(CallInst *Call,
3573 const FunctionInfo &FuncInfo) {
3574 SPIRVID RID;
3575
3576 // requires SPIRV version 1.3 or greater
3577 if (SpvVersion() != SPIRVVersion::SPIRV_1_3) {
3578 // llvm_unreachable("SubGroups extension requires SPIRV 1.3 or greater");
3579 // TODO(sjw): error out gracefully
3580 }
3581
3582 auto loadBuiltin = [this, Call](spv::BuiltIn spvBI,
3583 spv::Capability spvCap =
3584 spv::CapabilityGroupNonUniform) {
3585 SPIRVOperandVec Ops;
3586 Ops << Call->getType() << this->getSPIRVBuiltin(spvBI, spvCap);
3587
3588 return addSPIRVInst(spv::OpLoad, Ops);
3589 };
3590
3591 spv::Op op = spv::OpNop;
3592 switch (FuncInfo.getType()) {
3593 case Builtins::kGetSubGroupSize:
3594 return loadBuiltin(spv::BuiltInSubgroupSize);
3595 case Builtins::kGetNumSubGroups:
3596 return loadBuiltin(spv::BuiltInNumSubgroups);
3597 case Builtins::kGetSubGroupId:
3598 return loadBuiltin(spv::BuiltInSubgroupId);
3599 case Builtins::kGetSubGroupLocalId:
3600 return loadBuiltin(spv::BuiltInSubgroupLocalInvocationId);
3601
3602 case Builtins::kSubGroupBroadcast:
3603 if (SpvVersion() < SPIRVVersion::SPIRV_1_5 &&
3604 !dyn_cast<ConstantInt>(Call->getOperand(1))) {
3605 llvm_unreachable("sub_group_broadcast requires constant lane Id for "
3606 "SPIRV version < 1.5");
3607 }
3608 addCapability(spv::CapabilityGroupNonUniformBallot);
3609 op = spv::OpGroupNonUniformBroadcast;
3610 break;
3611
3612 case Builtins::kSubGroupAll:
3613 addCapability(spv::CapabilityGroupNonUniformVote);
3614 op = spv::OpGroupNonUniformAll;
3615 break;
3616 case Builtins::kSubGroupAny:
3617 addCapability(spv::CapabilityGroupNonUniformVote);
3618 op = spv::OpGroupNonUniformAny;
3619 break;
3620 case Builtins::kSubGroupReduceAdd:
3621 case Builtins::kSubGroupScanExclusiveAdd:
3622 case Builtins::kSubGroupScanInclusiveAdd: {
3623 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3624 if (FuncInfo.getParameter(0).type_id == Type::IntegerTyID) {
3625 op = spv::OpGroupNonUniformIAdd;
3626 } else {
3627 op = spv::OpGroupNonUniformFAdd;
3628 }
3629 break;
3630 }
3631 case Builtins::kSubGroupReduceMin:
3632 case Builtins::kSubGroupScanExclusiveMin:
3633 case Builtins::kSubGroupScanInclusiveMin: {
3634 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3635 auto &param = FuncInfo.getParameter(0);
3636 if (param.type_id == Type::IntegerTyID) {
3637 op = param.is_signed ? spv::OpGroupNonUniformSMin
3638 : spv::OpGroupNonUniformUMin;
3639 } else {
3640 op = spv::OpGroupNonUniformFMin;
3641 }
3642 break;
3643 }
3644 case Builtins::kSubGroupReduceMax:
3645 case Builtins::kSubGroupScanExclusiveMax:
3646 case Builtins::kSubGroupScanInclusiveMax: {
3647 addCapability(spv::CapabilityGroupNonUniformArithmetic);
3648 auto &param = FuncInfo.getParameter(0);
3649 if (param.type_id == Type::IntegerTyID) {
3650 op = param.is_signed ? spv::OpGroupNonUniformSMax
3651 : spv::OpGroupNonUniformUMax;
3652 } else {
3653 op = spv::OpGroupNonUniformFMax;
3654 }
3655 break;
3656 }
3657
3658 case Builtins::kGetEnqueuedNumSubGroups:
3659 // TODO(sjw): requires CapabilityKernel (incompatible with Shader)
3660 case Builtins::kGetMaxSubGroupSize:
3661 // TODO(sjw): use SpecConstant, capability Kernel (incompatible with Shader)
3662 case Builtins::kSubGroupBarrier:
3663 case Builtins::kSubGroupReserveReadPipe:
3664 case Builtins::kSubGroupReserveWritePipe:
3665 case Builtins::kSubGroupCommitReadPipe:
3666 case Builtins::kSubGroupCommitWritePipe:
3667 case Builtins::kGetKernelSubGroupCountForNdrange:
3668 case Builtins::kGetKernelMaxSubGroupSizeForNdrange:
3669 default:
3670 Call->print(errs());
3671 llvm_unreachable("Unsupported sub_group operation");
3672 break;
3673 }
3674
3675 assert(op != spv::OpNop);
3676
3677 SPIRVOperandVec Operands;
3678
3679 //
3680 // Generate OpGroupNonUniform*
3681 //
3682 // Ops[0] = Result Type ID
3683 // Ops[1] = ScopeSubgroup
3684 // Ops[2] = Value ID
3685 // Ops[3] = Local ID
3686
3687 // The result type.
3688 Operands << Call->getType();
3689
3690 // Subgroup Scope
3691 Operands << getSPIRVInt32Constant(spv::ScopeSubgroup);
3692
3693 switch (FuncInfo.getType()) {
3694 case Builtins::kSubGroupReduceAdd:
3695 case Builtins::kSubGroupReduceMin:
3696 case Builtins::kSubGroupReduceMax:
3697 Operands << spv::GroupOperationReduce;
3698 break;
3699 case Builtins::kSubGroupScanExclusiveAdd:
3700 case Builtins::kSubGroupScanExclusiveMin:
3701 case Builtins::kSubGroupScanExclusiveMax:
3702 Operands << spv::GroupOperationExclusiveScan;
3703 break;
3704 case Builtins::kSubGroupScanInclusiveAdd:
3705 case Builtins::kSubGroupScanInclusiveMin:
3706 case Builtins::kSubGroupScanInclusiveMax:
3707 Operands << spv::GroupOperationInclusiveScan;
3708 break;
3709 default:
3710 break;
3711 }
3712
3713 for (Use &use : Call->arg_operands()) {
3714 Operands << use.get();
3715 }
3716
3717 return addSPIRVInst(op, Operands);
3718}
3719
3720SPIRVID SPIRVProducerPass::GenerateInstructionFromCall(CallInst *Call) {
3721 LLVMContext &Context = module->getContext();
3722
3723 auto &func_info = Builtins::Lookup(Call->getCalledFunction());
3724 auto func_type = func_info.getType();
3725
3726 if (BUILTIN_IN_GROUP(func_type, Clspv)) {
3727 return GenerateClspvInstruction(Call, func_info);
3728 } else if (BUILTIN_IN_GROUP(func_type, Image)) {
3729 return GenerateImageInstruction(Call, func_info);
3730 } else if (BUILTIN_IN_GROUP(func_type, SubgroupsKHR)) {
3731 return GenerateSubgroupInstruction(Call, func_info);
3732 }
3733
3734 SPIRVID RID;
3735
3736 switch (func_type) {
3737 case Builtins::kPopcount: {
3738 //
3739 // Generate OpBitCount
3740 //
3741 // Ops[0] = Result Type ID
3742 // Ops[1] = Base ID
3743 SPIRVOperandVec Ops;
3744 Ops << Call->getType() << Call->getOperand(0);
3745
3746 RID = addSPIRVInst(spv::OpBitCount, Ops);
3747 break;
3748 }
3749 default: {
3750 glsl::ExtInst EInst = getDirectOrIndirectExtInstEnum(func_info);
3751
3752 if (EInst) {
3753 SPIRVID ExtInstImportID = getOpExtInstImportID();
3754
3755 //
3756 // Generate OpExtInst.
3757 //
3758
3759 // Ops[0] = Result Type ID
3760 // Ops[1] = Set ID (OpExtInstImport ID)
3761 // Ops[2] = Instruction Number (Literal Number)
3762 // Ops[3] ... Ops[n] = Operand 1, ... , Operand n
3763 SPIRVOperandVec Ops;
3764
3765 Ops << Call->getType() << ExtInstImportID << EInst;
3766
3767 for (auto &use : Call->arg_operands()) {
3768 Ops << use.get();
3769 }
3770
3771 RID = addSPIRVInst(spv::OpExtInst, Ops);
3772
3773 const auto IndirectExtInst = getIndirectExtInstEnum(func_info);
3774 if (IndirectExtInst != kGlslExtInstBad) {
SJW806a5d82020-07-15 12:51:38 -05003775 // Generate one more instruction that uses the result of the extended
3776 // instruction. Its result id is one more than the id of the
3777 // extended instruction.
3778 auto generate_extra_inst = [this, &Context, &Call,
3779 &RID](spv::Op opcode, Constant *constant) {
3780 //
3781 // Generate instruction like:
3782 // result = opcode constant <extinst-result>
3783 //
3784 // Ops[0] = Result Type ID
3785 // Ops[1] = Operand 0 ;; the constant, suitably splatted
3786 // Ops[2] = Operand 1 ;; the result of the extended instruction
3787 SPIRVOperandVec Ops;
3788
3789 Type *resultTy = Call->getType();
3790
3791 if (auto *vectorTy = dyn_cast<VectorType>(resultTy)) {
alan-baker931253b2020-08-20 17:15:38 -04003792 constant =
3793 ConstantVector::getSplat(vectorTy->getElementCount(), constant);
SJW806a5d82020-07-15 12:51:38 -05003794 }
3795 Ops << resultTy << constant << RID;
3796
3797 RID = addSPIRVInst(opcode, Ops);
3798 };
3799
alan-bakercc2bafb2020-11-02 08:30:18 -05003800 auto bitwidth = Call->getType()->getScalarSizeInBits();
SJW806a5d82020-07-15 12:51:38 -05003801 switch (IndirectExtInst) {
3802 case glsl::ExtInstFindUMsb: // Implementing clz
alan-bakercc2bafb2020-11-02 08:30:18 -05003803 generate_extra_inst(
3804 spv::OpISub,
3805 ConstantInt::get(Call->getType()->getScalarType(), bitwidth - 1));
SJW806a5d82020-07-15 12:51:38 -05003806 break;
alan-baker2cecaa72020-11-05 14:05:20 -05003807 case glsl::ExtInstFindILsb: { // Implementing ctz
3808 auto neg_one = Constant::getAllOnesValue(Call->getType());
3809 Constant *int_32 =
3810 ConstantInt::get(Call->getType()->getScalarType(), 32);
3811 Type *i1_ty = Type::getInt1Ty(Call->getContext());
3812 if (auto vec_ty = dyn_cast<VectorType>(Call->getType())) {
3813 i1_ty = VectorType::get(i1_ty, vec_ty->getElementCount());
3814 int_32 =
3815 ConstantVector::getSplat(vec_ty->getElementCount(), int_32);
3816 }
3817
3818 SPIRVOperandVec local_ops;
3819 local_ops << i1_ty << RID << neg_one;
3820 auto cmp = addSPIRVInst(spv::OpIEqual, local_ops);
3821 local_ops.clear();
3822 local_ops << Call->getType() << cmp << int_32 << RID;
3823 RID = addSPIRVInst(spv::OpSelect, local_ops);
3824 break;
3825 }
SJW806a5d82020-07-15 12:51:38 -05003826 case glsl::ExtInstAcos: // Implementing acospi
3827 case glsl::ExtInstAsin: // Implementing asinpi
3828 case glsl::ExtInstAtan: // Implementing atanpi
3829 case glsl::ExtInstAtan2: // Implementing atan2pi
3830 generate_extra_inst(
3831 spv::OpFMul,
alan-bakercc2bafb2020-11-02 08:30:18 -05003832 ConstantFP::get(Call->getType()->getScalarType(), kOneOverPi));
SJW806a5d82020-07-15 12:51:38 -05003833 break;
3834
3835 default:
3836 assert(false && "internally inconsistent");
3837 }
3838 }
3839 } else {
SJW806a5d82020-07-15 12:51:38 -05003840 // A real function call (not builtin)
3841 // Call instruction is deferred because it needs function's ID.
3842 RID = addSPIRVPlaceholder(Call);
3843 }
3844
3845 break;
3846 }
3847 }
3848
3849 return RID;
3850}
3851
David Neto22f144c2017-06-12 14:26:21 -04003852void SPIRVProducerPass::GenerateInstruction(Instruction &I) {
David Neto22f144c2017-06-12 14:26:21 -04003853 ValueMapType &VMap = getValueMap();
SJW806a5d82020-07-15 12:51:38 -05003854 LLVMContext &Context = module->getContext();
David Neto22f144c2017-06-12 14:26:21 -04003855
SJW806a5d82020-07-15 12:51:38 -05003856 SPIRVID RID;
David Neto22f144c2017-06-12 14:26:21 -04003857
3858 switch (I.getOpcode()) {
3859 default: {
3860 if (Instruction::isCast(I.getOpcode())) {
3861 //
3862 // Generate SPIRV instructions for cast operators.
3863 //
3864
David Netod2de94a2017-08-28 17:27:47 -04003865 auto Ty = I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003866 auto OpTy = I.getOperand(0)->getType();
David Netod2de94a2017-08-28 17:27:47 -04003867 auto toI8 = Ty == Type::getInt8Ty(Context);
3868 auto fromI32 = OpTy == Type::getInt32Ty(Context);
David Neto22f144c2017-06-12 14:26:21 -04003869 // Handle zext, sext and uitofp with i1 type specially.
3870 if ((I.getOpcode() == Instruction::ZExt ||
3871 I.getOpcode() == Instruction::SExt ||
3872 I.getOpcode() == Instruction::UIToFP) &&
alan-bakerb6b09dc2018-11-08 16:59:28 -05003873 OpTy->isIntOrIntVectorTy(1)) {
David Neto22f144c2017-06-12 14:26:21 -04003874 //
3875 // Generate OpSelect.
3876 //
3877
3878 // Ops[0] = Result Type ID
3879 // Ops[1] = Condition ID
3880 // Ops[2] = True Constant ID
3881 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05003882 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003883
SJW01901d92020-05-21 08:58:31 -05003884 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003885
David Neto22f144c2017-06-12 14:26:21 -04003886 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003887 Ops << ConstantInt::get(I.getType(), 1);
David Neto22f144c2017-06-12 14:26:21 -04003888 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003889 Ops << ConstantInt::getSigned(I.getType(), -1);
David Neto22f144c2017-06-12 14:26:21 -04003890 } else {
James Price96bd3d92020-11-23 09:01:57 -05003891 Ops << ConstantFP::get(I.getType(), 1.0);
David Neto22f144c2017-06-12 14:26:21 -04003892 }
David Neto22f144c2017-06-12 14:26:21 -04003893
David Neto22f144c2017-06-12 14:26:21 -04003894 if (I.getOpcode() == Instruction::ZExt) {
SJW01901d92020-05-21 08:58:31 -05003895 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003896 } else if (I.getOpcode() == Instruction::SExt) {
SJW01901d92020-05-21 08:58:31 -05003897 Ops << Constant::getNullValue(I.getType());
David Neto22f144c2017-06-12 14:26:21 -04003898 } else {
James Price96bd3d92020-11-23 09:01:57 -05003899 Ops << ConstantFP::get(I.getType(), 0.0);
David Neto22f144c2017-06-12 14:26:21 -04003900 }
David Neto22f144c2017-06-12 14:26:21 -04003901
SJWf93f5f32020-05-05 07:27:56 -05003902 RID = addSPIRVInst(spv::OpSelect, Ops);
alan-bakerb39c8262019-03-08 14:03:37 -05003903 } else if (!clspv::Option::Int8Support() &&
3904 I.getOpcode() == Instruction::Trunc && fromI32 && toI8) {
David Netod2de94a2017-08-28 17:27:47 -04003905 // The SPIR-V target type is a 32-bit int. Keep only the bottom
3906 // 8 bits.
3907 // Before:
3908 // %result = trunc i32 %a to i8
3909 // After
3910 // %result = OpBitwiseAnd %uint %a %uint_255
3911
SJWf93f5f32020-05-05 07:27:56 -05003912 SPIRVOperandVec Ops;
David Netod2de94a2017-08-28 17:27:47 -04003913
SJW806a5d82020-07-15 12:51:38 -05003914 Ops << OpTy << I.getOperand(0) << getSPIRVInt32Constant(255);
David Netod2de94a2017-08-28 17:27:47 -04003915
SJWf93f5f32020-05-05 07:27:56 -05003916 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003917 } else {
3918 // Ops[0] = Result Type ID
3919 // Ops[1] = Source Value ID
SJWf93f5f32020-05-05 07:27:56 -05003920 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003921
SJW01901d92020-05-21 08:58:31 -05003922 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04003923
SJWf93f5f32020-05-05 07:27:56 -05003924 RID = addSPIRVInst(GetSPIRVCastOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003925 }
3926 } else if (isa<BinaryOperator>(I)) {
3927 //
3928 // Generate SPIRV instructions for binary operators.
3929 //
3930
3931 // Handle xor with i1 type specially.
3932 if (I.getOpcode() == Instruction::Xor &&
3933 I.getType() == Type::getInt1Ty(Context) &&
Kévin Petit24272b62018-10-18 19:16:12 +00003934 ((isa<ConstantInt>(I.getOperand(0)) &&
3935 !cast<ConstantInt>(I.getOperand(0))->isZero()) ||
3936 (isa<ConstantInt>(I.getOperand(1)) &&
3937 !cast<ConstantInt>(I.getOperand(1))->isZero()))) {
David Neto22f144c2017-06-12 14:26:21 -04003938 //
3939 // Generate OpLogicalNot.
3940 //
3941 // Ops[0] = Result Type ID
3942 // Ops[1] = Operand
SJWf93f5f32020-05-05 07:27:56 -05003943 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003944
SJW01901d92020-05-21 08:58:31 -05003945 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04003946
3947 Value *CondV = I.getOperand(0);
3948 if (isa<Constant>(I.getOperand(0))) {
3949 CondV = I.getOperand(1);
3950 }
SJW01901d92020-05-21 08:58:31 -05003951 Ops << CondV;
David Neto22f144c2017-06-12 14:26:21 -04003952
SJWf93f5f32020-05-05 07:27:56 -05003953 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04003954 } else {
3955 // Ops[0] = Result Type ID
3956 // Ops[1] = Operand 0
3957 // Ops[2] = Operand 1
SJWf93f5f32020-05-05 07:27:56 -05003958 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003959
SJW01901d92020-05-21 08:58:31 -05003960 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04003961
SJWf93f5f32020-05-05 07:27:56 -05003962 RID = addSPIRVInst(GetSPIRVBinaryOpcode(I), Ops);
David Neto22f144c2017-06-12 14:26:21 -04003963 }
alan-bakerc9c55ae2019-12-02 16:01:27 -05003964 } else if (I.getOpcode() == Instruction::FNeg) {
3965 // The only unary operator.
3966 //
3967 // Ops[0] = Result Type ID
3968 // Ops[1] = Operand 0
SJW01901d92020-05-21 08:58:31 -05003969 SPIRVOperandVec Ops;
alan-bakerc9c55ae2019-12-02 16:01:27 -05003970
SJW01901d92020-05-21 08:58:31 -05003971 Ops << I.getType() << I.getOperand(0);
3972 RID = addSPIRVInst(spv::OpFNegate, Ops);
Marco Antognini68e5c512020-09-09 16:08:57 +01003973 } else if (I.getOpcode() == Instruction::Unreachable) {
3974 RID = addSPIRVInst(spv::OpUnreachable);
David Neto22f144c2017-06-12 14:26:21 -04003975 } else {
3976 I.print(errs());
3977 llvm_unreachable("Unsupported instruction???");
3978 }
3979 break;
3980 }
3981 case Instruction::GetElementPtr: {
3982 auto &GlobalConstArgSet = getGlobalConstArgSet();
3983
3984 //
3985 // Generate OpAccessChain.
3986 //
3987 GetElementPtrInst *GEP = cast<GetElementPtrInst>(&I);
3988
3989 //
3990 // Generate OpAccessChain.
3991 //
3992
3993 // Ops[0] = Result Type ID
3994 // Ops[1] = Base ID
3995 // Ops[2] ... Ops[n] = Indexes ID
SJWf93f5f32020-05-05 07:27:56 -05003996 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04003997
alan-bakerb6b09dc2018-11-08 16:59:28 -05003998 PointerType *ResultType = cast<PointerType>(GEP->getType());
David Neto22f144c2017-06-12 14:26:21 -04003999 if (GEP->getPointerAddressSpace() == AddressSpace::ModuleScopePrivate ||
4000 GlobalConstArgSet.count(GEP->getPointerOperand())) {
4001 // Use pointer type with private address space for global constant.
4002 Type *EleTy = I.getType()->getPointerElementType();
David Neto1a1a0582017-07-07 12:01:44 -04004003 ResultType = PointerType::get(EleTy, AddressSpace::ModuleScopePrivate);
David Neto22f144c2017-06-12 14:26:21 -04004004 }
David Neto257c3892018-04-11 13:19:45 -04004005
SJW01901d92020-05-21 08:58:31 -05004006 Ops << ResultType;
David Neto22f144c2017-06-12 14:26:21 -04004007
David Neto862b7d82018-06-14 18:48:37 -04004008 // Generate the base pointer.
SJW01901d92020-05-21 08:58:31 -05004009 Ops << GEP->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004010
David Neto862b7d82018-06-14 18:48:37 -04004011 // TODO(dneto): Simplify the following?
David Neto22f144c2017-06-12 14:26:21 -04004012
4013 //
4014 // Follows below rules for gep.
4015 //
David Neto862b7d82018-06-14 18:48:37 -04004016 // 1. If gep's first index is 0 generate OpAccessChain and ignore gep's
4017 // first index.
David Neto22f144c2017-06-12 14:26:21 -04004018 // 2. If gep's first index is not 0, generate OpPtrAccessChain and use gep's
4019 // first index.
4020 // 3. If gep's first index is not constant, generate OpPtrAccessChain and
4021 // use gep's first index.
4022 // 4. If it is not above case 1, 2 and 3, generate OpAccessChain and use
4023 // gep's first index.
4024 //
4025 spv::Op Opcode = spv::OpAccessChain;
4026 unsigned offset = 0;
4027 if (ConstantInt *CstInt = dyn_cast<ConstantInt>(GEP->getOperand(1))) {
David Neto862b7d82018-06-14 18:48:37 -04004028 if (CstInt->getZExtValue() == 0) {
David Neto22f144c2017-06-12 14:26:21 -04004029 offset = 1;
David Neto862b7d82018-06-14 18:48:37 -04004030 } else if (CstInt->getZExtValue() != 0) {
David Neto22f144c2017-06-12 14:26:21 -04004031 Opcode = spv::OpPtrAccessChain;
David Neto22f144c2017-06-12 14:26:21 -04004032 }
David Neto862b7d82018-06-14 18:48:37 -04004033 } else {
David Neto22f144c2017-06-12 14:26:21 -04004034 Opcode = spv::OpPtrAccessChain;
David Neto1a1a0582017-07-07 12:01:44 -04004035 }
4036
4037 if (Opcode == spv::OpPtrAccessChain) {
alan-baker7506abb2020-09-10 15:02:55 -04004038 // Shader validation in the SPIR-V spec requires that the base pointer to
4039 // OpPtrAccessChain (in StorageBuffer storage class) be decorated with
4040 // ArrayStride.
alan-baker5b86ed72019-02-15 08:26:50 -05004041 auto address_space = ResultType->getAddressSpace();
4042 setVariablePointersCapabilities(address_space);
4043 switch (GetStorageClass(address_space)) {
Alan Bakerfcda9482018-10-02 17:09:59 -04004044 case spv::StorageClassStorageBuffer:
David Neto1a1a0582017-07-07 12:01:44 -04004045 // Save the need to generate an ArrayStride decoration. But defer
4046 // generation until later, so we only make one decoration.
alan-baker7506abb2020-09-10 15:02:55 -04004047 getTypesNeedingArrayStride().insert(GEP->getPointerOperandType());
4048 break;
4049 case spv::StorageClassWorkgroup:
Alan Bakerfcda9482018-10-02 17:09:59 -04004050 break;
4051 default:
alan-baker7506abb2020-09-10 15:02:55 -04004052 llvm_unreachable(
4053 "OpPtrAccessChain is not supported for this storage class");
Alan Bakerfcda9482018-10-02 17:09:59 -04004054 break;
David Neto1a1a0582017-07-07 12:01:44 -04004055 }
David Neto22f144c2017-06-12 14:26:21 -04004056 }
4057
4058 for (auto II = GEP->idx_begin() + offset; II != GEP->idx_end(); II++) {
SJW01901d92020-05-21 08:58:31 -05004059 Ops << *II;
David Neto22f144c2017-06-12 14:26:21 -04004060 }
4061
SJWf93f5f32020-05-05 07:27:56 -05004062 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004063 break;
4064 }
4065 case Instruction::ExtractValue: {
4066 ExtractValueInst *EVI = cast<ExtractValueInst>(&I);
4067 // Ops[0] = Result Type ID
4068 // Ops[1] = Composite ID
4069 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004070 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004071
SJW01901d92020-05-21 08:58:31 -05004072 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004073
SJW01901d92020-05-21 08:58:31 -05004074 Ops << EVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004075
4076 for (auto &Index : EVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004077 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004078 }
4079
SJWf93f5f32020-05-05 07:27:56 -05004080 RID = addSPIRVInst(spv::OpCompositeExtract, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004081 break;
4082 }
4083 case Instruction::InsertValue: {
4084 InsertValueInst *IVI = cast<InsertValueInst>(&I);
4085 // Ops[0] = Result Type ID
4086 // Ops[1] = Object ID
4087 // Ops[2] = Composite ID
4088 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004089 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004090
SJW01901d92020-05-21 08:58:31 -05004091 Ops << I.getType() << IVI->getInsertedValueOperand()
4092 << IVI->getAggregateOperand();
David Neto22f144c2017-06-12 14:26:21 -04004093
4094 for (auto &Index : IVI->indices()) {
SJW01901d92020-05-21 08:58:31 -05004095 Ops << Index;
David Neto22f144c2017-06-12 14:26:21 -04004096 }
4097
SJWf93f5f32020-05-05 07:27:56 -05004098 RID = addSPIRVInst(spv::OpCompositeInsert, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004099 break;
4100 }
4101 case Instruction::Select: {
4102 //
4103 // Generate OpSelect.
4104 //
4105
4106 // Ops[0] = Result Type ID
4107 // Ops[1] = Condition ID
4108 // Ops[2] = True Constant ID
4109 // Ops[3] = False Constant ID
SJWf93f5f32020-05-05 07:27:56 -05004110 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004111
4112 // Find SPIRV instruction for parameter type.
4113 auto Ty = I.getType();
4114 if (Ty->isPointerTy()) {
4115 auto PointeeTy = Ty->getPointerElementType();
4116 if (PointeeTy->isStructTy() &&
4117 dyn_cast<StructType>(PointeeTy)->isOpaque()) {
4118 Ty = PointeeTy;
alan-baker5b86ed72019-02-15 08:26:50 -05004119 } else {
4120 // Selecting between pointers requires variable pointers.
4121 setVariablePointersCapabilities(Ty->getPointerAddressSpace());
4122 if (!hasVariablePointers() && !selectFromSameObject(&I)) {
SJW01901d92020-05-21 08:58:31 -05004123 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004124 }
David Neto22f144c2017-06-12 14:26:21 -04004125 }
4126 }
4127
SJW01901d92020-05-21 08:58:31 -05004128 Ops << Ty << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004129
SJWf93f5f32020-05-05 07:27:56 -05004130 RID = addSPIRVInst(spv::OpSelect, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004131 break;
4132 }
4133 case Instruction::ExtractElement: {
4134 // Handle <4 x i8> type manually.
4135 Type *CompositeTy = I.getOperand(0)->getType();
4136 if (is4xi8vec(CompositeTy)) {
4137 //
4138 // Generate OpShiftRightLogical and OpBitwiseAnd for extractelement with
4139 // <4 x i8>.
4140 //
4141
4142 //
4143 // Generate OpShiftRightLogical
4144 //
4145 // Ops[0] = Result Type ID
4146 // Ops[1] = Operand 0
4147 // Ops[2] = Operand 1
4148 //
SJWf93f5f32020-05-05 07:27:56 -05004149 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004150
SJW01901d92020-05-21 08:58:31 -05004151 Ops << CompositeTy << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004152
SJW01901d92020-05-21 08:58:31 -05004153 SPIRVID Op1ID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004154 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
4155 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004156 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4157 Op1ID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004158 } else {
4159 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004160 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004161
SJW806a5d82020-07-15 12:51:38 -05004162 TmpOps << Type::getInt32Ty(Context) << I.getOperand(1)
4163 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004164
SJWf93f5f32020-05-05 07:27:56 -05004165 Op1ID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004166 }
SJW01901d92020-05-21 08:58:31 -05004167 Ops << Op1ID;
David Neto22f144c2017-06-12 14:26:21 -04004168
SJW01901d92020-05-21 08:58:31 -05004169 SPIRVID ShiftID = addSPIRVInst(spv::OpShiftRightLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004170
4171 //
4172 // Generate OpBitwiseAnd
4173 //
4174 // Ops[0] = Result Type ID
4175 // Ops[1] = Operand 0
4176 // Ops[2] = Operand 1
4177 //
4178 Ops.clear();
4179
SJW806a5d82020-07-15 12:51:38 -05004180 Ops << CompositeTy << ShiftID << getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004181
SJWf93f5f32020-05-05 07:27:56 -05004182 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004183 break;
4184 }
4185
4186 // Ops[0] = Result Type ID
4187 // Ops[1] = Composite ID
4188 // Ops[2] ... Ops[n] = Indexes (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004189 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004190
SJW01901d92020-05-21 08:58:31 -05004191 Ops << I.getType() << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004192
4193 spv::Op Opcode = spv::OpCompositeExtract;
4194 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(1))) {
SJW01901d92020-05-21 08:58:31 -05004195 Ops << static_cast<uint32_t>(CI->getZExtValue());
David Neto22f144c2017-06-12 14:26:21 -04004196 } else {
SJW01901d92020-05-21 08:58:31 -05004197 Ops << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004198 Opcode = spv::OpVectorExtractDynamic;
4199 }
4200
SJWf93f5f32020-05-05 07:27:56 -05004201 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004202 break;
4203 }
4204 case Instruction::InsertElement: {
4205 // Handle <4 x i8> type manually.
4206 Type *CompositeTy = I.getOperand(0)->getType();
4207 if (is4xi8vec(CompositeTy)) {
SJW806a5d82020-07-15 12:51:38 -05004208 SPIRVID CstFFID = getSPIRVInt32Constant(0xFF);
David Neto22f144c2017-06-12 14:26:21 -04004209
SJW01901d92020-05-21 08:58:31 -05004210 SPIRVID ShiftAmountID = 0;
David Neto22f144c2017-06-12 14:26:21 -04004211 if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
4212 // Handle constant index.
SJW806a5d82020-07-15 12:51:38 -05004213 uint32_t Idx = static_cast<uint32_t>(CI->getZExtValue());
4214 ShiftAmountID = getSPIRVInt32Constant(Idx * 8);
David Neto22f144c2017-06-12 14:26:21 -04004215 } else {
4216 // Handle variable index.
SJWf93f5f32020-05-05 07:27:56 -05004217 SPIRVOperandVec TmpOps;
David Neto22f144c2017-06-12 14:26:21 -04004218
SJW806a5d82020-07-15 12:51:38 -05004219 TmpOps << Type::getInt32Ty(Context) << I.getOperand(2)
4220 << getSPIRVInt32Constant(8);
David Neto22f144c2017-06-12 14:26:21 -04004221
SJWf93f5f32020-05-05 07:27:56 -05004222 ShiftAmountID = addSPIRVInst(spv::OpIMul, TmpOps);
David Neto22f144c2017-06-12 14:26:21 -04004223 }
4224
4225 //
4226 // Generate mask operations.
4227 //
4228
4229 // ShiftLeft mask according to index of insertelement.
SJWf93f5f32020-05-05 07:27:56 -05004230 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004231
SJW01901d92020-05-21 08:58:31 -05004232 Ops << CompositeTy << CstFFID << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004233
SJW01901d92020-05-21 08:58:31 -05004234 SPIRVID MaskID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004235
4236 // Inverse mask.
4237 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004238 Ops << CompositeTy << MaskID;
David Neto22f144c2017-06-12 14:26:21 -04004239
SJW01901d92020-05-21 08:58:31 -05004240 SPIRVID InvMaskID = addSPIRVInst(spv::OpNot, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004241
4242 // Apply mask.
4243 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004244 Ops << CompositeTy << I.getOperand(0) << InvMaskID;
David Neto22f144c2017-06-12 14:26:21 -04004245
SJW01901d92020-05-21 08:58:31 -05004246 SPIRVID OrgValID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004247
4248 // Create correct value according to index of insertelement.
4249 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004250 Ops << CompositeTy << I.getOperand(1) << ShiftAmountID;
David Neto22f144c2017-06-12 14:26:21 -04004251
SJW01901d92020-05-21 08:58:31 -05004252 SPIRVID InsertValID = addSPIRVInst(spv::OpShiftLeftLogical, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004253
4254 // Insert value to original value.
4255 Ops.clear();
SJW01901d92020-05-21 08:58:31 -05004256 Ops << CompositeTy << OrgValID << InsertValID;
David Neto22f144c2017-06-12 14:26:21 -04004257
SJWf93f5f32020-05-05 07:27:56 -05004258 RID = addSPIRVInst(spv::OpBitwiseOr, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004259 break;
4260 }
4261
SJWf93f5f32020-05-05 07:27:56 -05004262 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004263
James Priced26efea2018-06-09 23:28:32 +01004264 // Ops[0] = Result Type ID
SJW01901d92020-05-21 08:58:31 -05004265 Ops << I.getType();
David Neto22f144c2017-06-12 14:26:21 -04004266
4267 spv::Op Opcode = spv::OpCompositeInsert;
4268 if (const ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand(2))) {
David Neto257c3892018-04-11 13:19:45 -04004269 const auto value = CI->getZExtValue();
4270 assert(value <= UINT32_MAX);
James Priced26efea2018-06-09 23:28:32 +01004271 // Ops[1] = Object ID
4272 // Ops[2] = Composite ID
4273 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004274 Ops << I.getOperand(1) << I.getOperand(0) << static_cast<uint32_t>(value);
David Neto22f144c2017-06-12 14:26:21 -04004275 } else {
James Priced26efea2018-06-09 23:28:32 +01004276 // Ops[1] = Composite ID
4277 // Ops[2] = Object ID
4278 // Ops[3] ... Ops[n] = Indexes (Literal Number)
SJW01901d92020-05-21 08:58:31 -05004279 Ops << I.getOperand(0) << I.getOperand(1) << I.getOperand(2);
David Neto22f144c2017-06-12 14:26:21 -04004280 Opcode = spv::OpVectorInsertDynamic;
4281 }
4282
SJWf93f5f32020-05-05 07:27:56 -05004283 RID = addSPIRVInst(Opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004284 break;
4285 }
4286 case Instruction::ShuffleVector: {
4287 // Ops[0] = Result Type ID
4288 // Ops[1] = Vector 1 ID
4289 // Ops[2] = Vector 2 ID
4290 // Ops[3] ... Ops[n] = Components (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004291 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004292
SJW01901d92020-05-21 08:58:31 -05004293 Ops << I.getType() << I.getOperand(0) << I.getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004294
alan-bakerc9666712020-04-01 16:31:21 -04004295 auto shuffle = cast<ShuffleVectorInst>(&I);
4296 SmallVector<int, 4> mask;
4297 shuffle->getShuffleMask(mask);
4298 for (auto i : mask) {
4299 if (i == UndefMaskElem) {
4300 if (clspv::Option::HackUndef())
4301 // Use 0 instead of undef.
SJW01901d92020-05-21 08:58:31 -05004302 Ops << 0;
alan-bakerc9666712020-04-01 16:31:21 -04004303 else
4304 // Undef for shuffle in SPIR-V.
SJW01901d92020-05-21 08:58:31 -05004305 Ops << 0xffffffff;
David Neto22f144c2017-06-12 14:26:21 -04004306 } else {
SJW01901d92020-05-21 08:58:31 -05004307 Ops << i;
David Neto22f144c2017-06-12 14:26:21 -04004308 }
4309 }
4310
SJWf93f5f32020-05-05 07:27:56 -05004311 RID = addSPIRVInst(spv::OpVectorShuffle, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004312 break;
4313 }
4314 case Instruction::ICmp:
4315 case Instruction::FCmp: {
4316 CmpInst *CmpI = cast<CmpInst>(&I);
4317
David Netod4ca2e62017-07-06 18:47:35 -04004318 // Pointer equality is invalid.
alan-bakerb6b09dc2018-11-08 16:59:28 -05004319 Type *ArgTy = CmpI->getOperand(0)->getType();
David Netod4ca2e62017-07-06 18:47:35 -04004320 if (isa<PointerType>(ArgTy)) {
4321 CmpI->print(errs());
alan-baker21574d32020-01-29 16:00:31 -05004322 std::string name = I.getParent()->getParent()->getName().str();
David Netod4ca2e62017-07-06 18:47:35 -04004323 errs()
4324 << "\nPointer equality test is not supported by SPIR-V for Vulkan, "
4325 << "in function " << name << "\n";
4326 llvm_unreachable("Pointer equality check is invalid");
4327 break;
4328 }
4329
SJWf93f5f32020-05-05 07:27:56 -05004330 SPIRVOperandVec Ops;
alan-baker15106572020-11-06 15:08:10 -05004331 if (CmpI->getPredicate() == CmpInst::FCMP_ORD ||
4332 CmpI->getPredicate() == CmpInst::FCMP_UNO) {
4333 // Implement ordered and unordered comparisons are OpIsNan instructions.
4334 // Optimize the constants to simplify the resulting code.
4335 auto lhs = CmpI->getOperand(0);
4336 auto rhs = CmpI->getOperand(1);
4337 auto const_lhs = dyn_cast_or_null<Constant>(lhs);
4338 auto const_rhs = dyn_cast_or_null<Constant>(rhs);
4339 if ((const_lhs && const_lhs->isNaN()) ||
4340 (const_rhs && const_rhs->isNaN())) {
4341 // Result is a constant, false of ordered, true for unordered.
4342 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4343 RID = getSPIRVConstant(ConstantInt::getFalse(CmpI->getType()));
4344 } else {
4345 RID = getSPIRVConstant(ConstantInt::getTrue(CmpI->getType()));
4346 }
4347 break;
4348 }
4349 SPIRVID lhs_id;
4350 SPIRVID rhs_id;
4351 if (!const_lhs) {
4352 // Generate OpIsNan for the lhs.
4353 Ops.clear();
4354 Ops << CmpI->getType() << lhs;
4355 lhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4356 }
4357 if (!const_rhs) {
4358 // Generate OpIsNan for the rhs.
4359 Ops.clear();
4360 Ops << CmpI->getType() << rhs;
4361 rhs_id = addSPIRVInst(spv::OpIsNan, Ops);
4362 }
4363 if (lhs_id.isValid() && rhs_id.isValid()) {
4364 // Or the results for the lhs and rhs.
4365 Ops.clear();
4366 Ops << CmpI->getType() << lhs_id << rhs_id;
4367 RID = addSPIRVInst(spv::OpLogicalOr, Ops);
4368 } else {
4369 RID = lhs_id.isValid() ? lhs_id : rhs_id;
4370 }
4371 if (CmpI->getPredicate() == CmpInst::FCMP_ORD) {
4372 // For ordered comparisons, invert the intermediate result.
4373 Ops.clear();
4374 Ops << CmpI->getType() << RID;
4375 RID = addSPIRVInst(spv::OpLogicalNot, Ops);
4376 }
4377 break;
4378 } else {
4379 // Remaining comparisons map directly to SPIR-V opcodes.
4380 // Ops[0] = Result Type ID
4381 // Ops[1] = Operand 1 ID
4382 // Ops[2] = Operand 2 ID
4383 Ops << CmpI->getType() << CmpI->getOperand(0) << CmpI->getOperand(1);
David Neto22f144c2017-06-12 14:26:21 -04004384
alan-baker15106572020-11-06 15:08:10 -05004385 spv::Op Opcode = GetSPIRVCmpOpcode(CmpI);
4386 RID = addSPIRVInst(Opcode, Ops);
4387 }
David Neto22f144c2017-06-12 14:26:21 -04004388 break;
4389 }
4390 case Instruction::Br: {
SJW88ed5fe2020-05-11 12:40:57 -05004391 // Branch instruction is deferred because it needs label's ID.
4392 BasicBlock *BrBB = I.getParent();
4393 if (ContinueBlocks.count(BrBB) || MergeBlocks.count(BrBB)) {
4394 // Placeholder for Merge operation
4395 RID = addSPIRVPlaceholder(&I);
4396 }
4397 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004398 break;
4399 }
4400 case Instruction::Switch: {
4401 I.print(errs());
4402 llvm_unreachable("Unsupported instruction???");
4403 break;
4404 }
4405 case Instruction::IndirectBr: {
4406 I.print(errs());
4407 llvm_unreachable("Unsupported instruction???");
4408 break;
4409 }
4410 case Instruction::PHI: {
SJW88ed5fe2020-05-11 12:40:57 -05004411 // PHI instruction is deferred because it needs label's ID.
4412 RID = addSPIRVPlaceholder(&I);
David Neto22f144c2017-06-12 14:26:21 -04004413 break;
4414 }
4415 case Instruction::Alloca: {
4416 //
4417 // Generate OpVariable.
4418 //
4419 // Ops[0] : Result Type ID
4420 // Ops[1] : Storage Class
SJWf93f5f32020-05-05 07:27:56 -05004421 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004422
SJW01901d92020-05-21 08:58:31 -05004423 Ops << I.getType() << spv::StorageClassFunction;
David Neto22f144c2017-06-12 14:26:21 -04004424
SJWf93f5f32020-05-05 07:27:56 -05004425 RID = addSPIRVInst(spv::OpVariable, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004426 break;
4427 }
4428 case Instruction::Load: {
4429 LoadInst *LD = cast<LoadInst>(&I);
4430 //
4431 // Generate OpLoad.
4432 //
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04004433
alan-baker5b86ed72019-02-15 08:26:50 -05004434 if (LD->getType()->isPointerTy()) {
4435 // Loading a pointer requires variable pointers.
4436 setVariablePointersCapabilities(LD->getType()->getPointerAddressSpace());
4437 }
David Neto22f144c2017-06-12 14:26:21 -04004438
SJW01901d92020-05-21 08:58:31 -05004439 SPIRVID PointerID = getSPIRVValue(LD->getPointerOperand());
David Netoa60b00b2017-09-15 16:34:09 -04004440 // This is a hack to work around what looks like a driver bug.
4441 // When we're loading from the special variable holding the WorkgroupSize
David Neto0a2f98d2017-09-15 19:38:40 -04004442 // builtin value, use an OpBitWiseAnd of the value's ID rather than
4443 // generating a load.
David Neto66cfe642018-03-24 06:13:56 -07004444 // TODO(dneto): Remove this awful hack once drivers are fixed.
David Netoa60b00b2017-09-15 16:34:09 -04004445 if (PointerID == WorkgroupSizeVarID) {
David Neto0a2f98d2017-09-15 19:38:40 -04004446 // Generate a bitwise-and of the original value with itself.
4447 // We should have been able to get away with just an OpCopyObject,
4448 // but we need something more complex to get past certain driver bugs.
4449 // This is ridiculous, but necessary.
4450 // TODO(dneto): Revisit this once drivers fix their bugs.
4451
SJWf93f5f32020-05-05 07:27:56 -05004452 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004453 Ops << LD->getType() << WorkgroupSizeValueID << WorkgroupSizeValueID;
David Neto0a2f98d2017-09-15 19:38:40 -04004454
SJWf93f5f32020-05-05 07:27:56 -05004455 RID = addSPIRVInst(spv::OpBitwiseAnd, Ops);
David Netoa60b00b2017-09-15 16:34:09 -04004456 break;
4457 }
4458
4459 // This is the normal path. Generate a load.
4460
David Neto22f144c2017-06-12 14:26:21 -04004461 // Ops[0] = Result Type ID
4462 // Ops[1] = Pointer ID
4463 // Ops[2] ... Ops[n] = Optional Memory Access
4464 //
4465 // TODO: Do we need to implement Optional Memory Access???
David Neto0a2f98d2017-09-15 19:38:40 -04004466
SJWf93f5f32020-05-05 07:27:56 -05004467 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004468 Ops << LD->getType() << LD->getPointerOperand();
David Neto22f144c2017-06-12 14:26:21 -04004469
SJWf93f5f32020-05-05 07:27:56 -05004470 RID = addSPIRVInst(spv::OpLoad, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004471 break;
4472 }
4473 case Instruction::Store: {
4474 StoreInst *ST = cast<StoreInst>(&I);
4475 //
4476 // Generate OpStore.
4477 //
4478
alan-baker5b86ed72019-02-15 08:26:50 -05004479 if (ST->getValueOperand()->getType()->isPointerTy()) {
4480 // Storing a pointer requires variable pointers.
4481 setVariablePointersCapabilities(
4482 ST->getValueOperand()->getType()->getPointerAddressSpace());
4483 }
4484
David Neto22f144c2017-06-12 14:26:21 -04004485 // Ops[0] = Pointer ID
4486 // Ops[1] = Object ID
4487 // Ops[2] ... Ops[n] = Optional Memory Access (later???)
4488 //
4489 // TODO: Do we need to implement Optional Memory Access???
SJWf93f5f32020-05-05 07:27:56 -05004490 SPIRVOperandVec Ops;
SJW01901d92020-05-21 08:58:31 -05004491 Ops << ST->getPointerOperand() << ST->getValueOperand();
David Neto22f144c2017-06-12 14:26:21 -04004492
SJWf93f5f32020-05-05 07:27:56 -05004493 RID = addSPIRVInst(spv::OpStore, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004494 break;
4495 }
4496 case Instruction::AtomicCmpXchg: {
4497 I.print(errs());
4498 llvm_unreachable("Unsupported instruction???");
4499 break;
4500 }
4501 case Instruction::AtomicRMW: {
Neil Henning39672102017-09-29 14:33:13 +01004502 AtomicRMWInst *AtomicRMW = dyn_cast<AtomicRMWInst>(&I);
4503
4504 spv::Op opcode;
4505
4506 switch (AtomicRMW->getOperation()) {
4507 default:
4508 I.print(errs());
4509 llvm_unreachable("Unsupported instruction???");
4510 case llvm::AtomicRMWInst::Add:
4511 opcode = spv::OpAtomicIAdd;
4512 break;
4513 case llvm::AtomicRMWInst::Sub:
4514 opcode = spv::OpAtomicISub;
4515 break;
4516 case llvm::AtomicRMWInst::Xchg:
4517 opcode = spv::OpAtomicExchange;
4518 break;
4519 case llvm::AtomicRMWInst::Min:
4520 opcode = spv::OpAtomicSMin;
4521 break;
4522 case llvm::AtomicRMWInst::Max:
4523 opcode = spv::OpAtomicSMax;
4524 break;
4525 case llvm::AtomicRMWInst::UMin:
4526 opcode = spv::OpAtomicUMin;
4527 break;
4528 case llvm::AtomicRMWInst::UMax:
4529 opcode = spv::OpAtomicUMax;
4530 break;
4531 case llvm::AtomicRMWInst::And:
4532 opcode = spv::OpAtomicAnd;
4533 break;
4534 case llvm::AtomicRMWInst::Or:
4535 opcode = spv::OpAtomicOr;
4536 break;
4537 case llvm::AtomicRMWInst::Xor:
4538 opcode = spv::OpAtomicXor;
4539 break;
4540 }
4541
4542 //
4543 // Generate OpAtomic*.
4544 //
SJWf93f5f32020-05-05 07:27:56 -05004545 SPIRVOperandVec Ops;
Neil Henning39672102017-09-29 14:33:13 +01004546
SJW01901d92020-05-21 08:58:31 -05004547 Ops << I.getType() << AtomicRMW->getPointerOperand();
Neil Henning39672102017-09-29 14:33:13 +01004548
SJW806a5d82020-07-15 12:51:38 -05004549 const auto ConstantScopeDevice = getSPIRVInt32Constant(spv::ScopeDevice);
SJW01901d92020-05-21 08:58:31 -05004550 Ops << ConstantScopeDevice;
Neil Henning39672102017-09-29 14:33:13 +01004551
SJW806a5d82020-07-15 12:51:38 -05004552 const auto ConstantMemorySemantics =
4553 getSPIRVInt32Constant(spv::MemorySemanticsUniformMemoryMask |
4554 spv::MemorySemanticsSequentiallyConsistentMask);
SJW01901d92020-05-21 08:58:31 -05004555 Ops << ConstantMemorySemantics << AtomicRMW->getValOperand();
Neil Henning39672102017-09-29 14:33:13 +01004556
SJWf93f5f32020-05-05 07:27:56 -05004557 RID = addSPIRVInst(opcode, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004558 break;
4559 }
4560 case Instruction::Fence: {
4561 I.print(errs());
4562 llvm_unreachable("Unsupported instruction???");
4563 break;
4564 }
4565 case Instruction::Call: {
4566 CallInst *Call = dyn_cast<CallInst>(&I);
SJW806a5d82020-07-15 12:51:38 -05004567 RID = GenerateInstructionFromCall(Call);
David Neto22f144c2017-06-12 14:26:21 -04004568 break;
4569 }
4570 case Instruction::Ret: {
4571 unsigned NumOps = I.getNumOperands();
4572 if (NumOps == 0) {
4573 //
4574 // Generate OpReturn.
4575 //
SJWf93f5f32020-05-05 07:27:56 -05004576 RID = addSPIRVInst(spv::OpReturn);
David Neto22f144c2017-06-12 14:26:21 -04004577 } else {
4578 //
4579 // Generate OpReturnValue.
4580 //
4581
4582 // Ops[0] = Return Value ID
SJWf93f5f32020-05-05 07:27:56 -05004583 SPIRVOperandVec Ops;
David Neto257c3892018-04-11 13:19:45 -04004584
SJW01901d92020-05-21 08:58:31 -05004585 Ops << I.getOperand(0);
David Neto22f144c2017-06-12 14:26:21 -04004586
SJWf93f5f32020-05-05 07:27:56 -05004587 RID = addSPIRVInst(spv::OpReturnValue, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004588 break;
4589 }
4590 break;
4591 }
4592 }
SJWf93f5f32020-05-05 07:27:56 -05004593
4594 // Register Instruction to ValueMap.
SJW01901d92020-05-21 08:58:31 -05004595 if (RID.isValid()) {
SJWf93f5f32020-05-05 07:27:56 -05004596 VMap[&I] = RID;
4597 }
David Neto22f144c2017-06-12 14:26:21 -04004598}
4599
4600void SPIRVProducerPass::GenerateFuncEpilogue() {
David Neto22f144c2017-06-12 14:26:21 -04004601 //
4602 // Generate OpFunctionEnd
4603 //
SJWf93f5f32020-05-05 07:27:56 -05004604 addSPIRVInst(spv::OpFunctionEnd);
David Neto22f144c2017-06-12 14:26:21 -04004605}
4606
4607bool SPIRVProducerPass::is4xi8vec(Type *Ty) const {
alan-bakerb39c8262019-03-08 14:03:37 -05004608 // Don't specialize <4 x i8> if i8 is generally supported.
4609 if (clspv::Option::Int8Support())
4610 return false;
4611
David Neto22f144c2017-06-12 14:26:21 -04004612 LLVMContext &Context = Ty->getContext();
James Pricecf53df42020-04-20 14:41:24 -04004613 if (auto VecTy = dyn_cast<VectorType>(Ty)) {
4614 if (VecTy->getElementType() == Type::getInt8Ty(Context) &&
alan-baker5a8c3be2020-09-09 13:44:26 -04004615 VecTy->getElementCount().getKnownMinValue() == 4) {
David Neto22f144c2017-06-12 14:26:21 -04004616 return true;
4617 }
4618 }
4619
4620 return false;
4621}
4622
4623void SPIRVProducerPass::HandleDeferredInstruction() {
David Neto22f144c2017-06-12 14:26:21 -04004624 DeferredInstVecType &DeferredInsts = getDeferredInstVec();
4625
SJW88ed5fe2020-05-11 12:40:57 -05004626 for (size_t i = 0; i < DeferredInsts.size(); ++i) {
4627 Value *Inst = DeferredInsts[i].first;
4628 SPIRVInstruction *Placeholder = DeferredInsts[i].second;
4629 SPIRVOperandVec Operands;
4630
4631 auto nextDeferred = [&i, &Inst, &DeferredInsts, &Placeholder]() {
4632 ++i;
4633 assert(DeferredInsts.size() > i);
4634 assert(Inst == DeferredInsts[i].first);
4635 Placeholder = DeferredInsts[i].second;
4636 };
David Neto22f144c2017-06-12 14:26:21 -04004637
4638 if (BranchInst *Br = dyn_cast<BranchInst>(Inst)) {
alan-baker06cad652019-12-03 17:56:47 -05004639 // Check whether this branch needs to be preceeded by merge instruction.
David Neto22f144c2017-06-12 14:26:21 -04004640 BasicBlock *BrBB = Br->getParent();
alan-baker06cad652019-12-03 17:56:47 -05004641 if (ContinueBlocks.count(BrBB)) {
David Neto22f144c2017-06-12 14:26:21 -04004642 //
4643 // Generate OpLoopMerge.
4644 //
4645 // Ops[0] = Merge Block ID
4646 // Ops[1] = Continue Target ID
4647 // Ops[2] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004648 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004649
SJW01901d92020-05-21 08:58:31 -05004650 Ops << MergeBlocks[BrBB] << ContinueBlocks[BrBB]
4651 << spv::LoopControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004652
SJW88ed5fe2020-05-11 12:40:57 -05004653 replaceSPIRVInst(Placeholder, spv::OpLoopMerge, Ops);
4654
4655 nextDeferred();
4656
alan-baker06cad652019-12-03 17:56:47 -05004657 } else if (MergeBlocks.count(BrBB)) {
4658 //
4659 // Generate OpSelectionMerge.
4660 //
4661 // Ops[0] = Merge Block ID
4662 // Ops[1] = Selection Control
SJWf93f5f32020-05-05 07:27:56 -05004663 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004664
alan-baker06cad652019-12-03 17:56:47 -05004665 auto MergeBB = MergeBlocks[BrBB];
SJW01901d92020-05-21 08:58:31 -05004666 Ops << MergeBB << spv::SelectionControlMaskNone;
David Neto22f144c2017-06-12 14:26:21 -04004667
SJW88ed5fe2020-05-11 12:40:57 -05004668 replaceSPIRVInst(Placeholder, spv::OpSelectionMerge, Ops);
4669
4670 nextDeferred();
David Neto22f144c2017-06-12 14:26:21 -04004671 }
4672
4673 if (Br->isConditional()) {
4674 //
4675 // Generate OpBranchConditional.
4676 //
4677 // Ops[0] = Condition ID
4678 // Ops[1] = True Label ID
4679 // Ops[2] = False Label ID
4680 // Ops[3] ... Ops[n] = Branch weights (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004681 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004682
SJW01901d92020-05-21 08:58:31 -05004683 Ops << Br->getCondition() << Br->getSuccessor(0) << Br->getSuccessor(1);
David Neto22f144c2017-06-12 14:26:21 -04004684
SJW88ed5fe2020-05-11 12:40:57 -05004685 replaceSPIRVInst(Placeholder, spv::OpBranchConditional, Ops);
4686
David Neto22f144c2017-06-12 14:26:21 -04004687 } else {
4688 //
4689 // Generate OpBranch.
4690 //
4691 // Ops[0] = Target Label ID
SJWf93f5f32020-05-05 07:27:56 -05004692 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004693
SJW01901d92020-05-21 08:58:31 -05004694 Ops << Br->getSuccessor(0);
David Neto22f144c2017-06-12 14:26:21 -04004695
SJW88ed5fe2020-05-11 12:40:57 -05004696 replaceSPIRVInst(Placeholder, spv::OpBranch, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004697 }
4698 } else if (PHINode *PHI = dyn_cast<PHINode>(Inst)) {
alan-baker5ed87542020-03-23 11:05:22 -04004699 if (PHI->getType()->isPointerTy() && !IsSamplerType(PHI->getType()) &&
4700 !IsImageType(PHI->getType())) {
alan-baker5b86ed72019-02-15 08:26:50 -05004701 // OpPhi on pointers requires variable pointers.
4702 setVariablePointersCapabilities(
4703 PHI->getType()->getPointerAddressSpace());
4704 if (!hasVariablePointers() && !selectFromSameObject(PHI)) {
SJW01901d92020-05-21 08:58:31 -05004705 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004706 }
4707 }
4708
David Neto22f144c2017-06-12 14:26:21 -04004709 //
4710 // Generate OpPhi.
4711 //
4712 // Ops[0] = Result Type ID
4713 // Ops[1] ... Ops[n] = (Variable ID, Parent ID) pairs
SJWf93f5f32020-05-05 07:27:56 -05004714 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004715
SJW01901d92020-05-21 08:58:31 -05004716 Ops << PHI->getType();
David Neto22f144c2017-06-12 14:26:21 -04004717
SJW88ed5fe2020-05-11 12:40:57 -05004718 for (unsigned j = 0; j < PHI->getNumIncomingValues(); j++) {
SJW01901d92020-05-21 08:58:31 -05004719 Ops << PHI->getIncomingValue(j) << PHI->getIncomingBlock(j);
David Neto22f144c2017-06-12 14:26:21 -04004720 }
4721
SJW88ed5fe2020-05-11 12:40:57 -05004722 replaceSPIRVInst(Placeholder, spv::OpPhi, Ops);
4723
David Neto22f144c2017-06-12 14:26:21 -04004724 } else if (CallInst *Call = dyn_cast<CallInst>(Inst)) {
4725 Function *Callee = Call->getCalledFunction();
David Neto3fbb4072017-10-16 11:28:14 -04004726 auto callee_name = Callee->getName();
David Neto22f144c2017-06-12 14:26:21 -04004727
SJW61531372020-06-09 07:31:08 -05004728 if (Builtins::Lookup(Callee) == Builtins::kClspvCompositeConstruct) {
David Netoab03f432017-11-03 17:00:44 -04004729 // Generate an OpCompositeConstruct
SJWf93f5f32020-05-05 07:27:56 -05004730 SPIRVOperandVec Ops;
David Netoab03f432017-11-03 17:00:44 -04004731
4732 // The result type.
SJW01901d92020-05-21 08:58:31 -05004733 Ops << Call->getType();
David Netoab03f432017-11-03 17:00:44 -04004734
4735 for (Use &use : Call->arg_operands()) {
SJW01901d92020-05-21 08:58:31 -05004736 Ops << use.get();
David Netoab03f432017-11-03 17:00:44 -04004737 }
4738
SJW88ed5fe2020-05-11 12:40:57 -05004739 replaceSPIRVInst(Placeholder, spv::OpCompositeConstruct, Ops);
David Netoab03f432017-11-03 17:00:44 -04004740
David Neto22f144c2017-06-12 14:26:21 -04004741 } else {
alan-baker5b86ed72019-02-15 08:26:50 -05004742 if (Call->getType()->isPointerTy()) {
4743 // Functions returning pointers require variable pointers.
4744 setVariablePointersCapabilities(
4745 Call->getType()->getPointerAddressSpace());
4746 }
4747
David Neto22f144c2017-06-12 14:26:21 -04004748 //
4749 // Generate OpFunctionCall.
4750 //
4751
4752 // Ops[0] = Result Type ID
4753 // Ops[1] = Callee Function ID
4754 // Ops[2] ... Ops[n] = Argument 0, ... , Argument n
SJWf93f5f32020-05-05 07:27:56 -05004755 SPIRVOperandVec Ops;
David Neto22f144c2017-06-12 14:26:21 -04004756
SJW01901d92020-05-21 08:58:31 -05004757 Ops << Call->getType();
David Neto22f144c2017-06-12 14:26:21 -04004758
SJW01901d92020-05-21 08:58:31 -05004759 SPIRVID CalleeID = getSPIRVValue(Callee);
SJW806a5d82020-07-15 12:51:38 -05004760 if (!CalleeID.isValid()) {
David Neto43568eb2017-10-13 18:25:25 -04004761 errs() << "Can't translate function call. Missing builtin? "
David Neto862b7d82018-06-14 18:48:37 -04004762 << callee_name << " in: " << *Call << "\n";
David Neto43568eb2017-10-13 18:25:25 -04004763 // TODO(dneto): Can we error out? Enabling this llvm_unreachable
4764 // causes an infinite loop. Instead, go ahead and generate
4765 // the bad function call. A validator will catch the 0-Id.
4766 // llvm_unreachable("Can't translate function call");
4767 }
David Neto22f144c2017-06-12 14:26:21 -04004768
SJW01901d92020-05-21 08:58:31 -05004769 Ops << CalleeID;
David Neto22f144c2017-06-12 14:26:21 -04004770
David Neto22f144c2017-06-12 14:26:21 -04004771 FunctionType *CalleeFTy = cast<FunctionType>(Call->getFunctionType());
SJW88ed5fe2020-05-11 12:40:57 -05004772 for (unsigned j = 0; j < CalleeFTy->getNumParams(); j++) {
4773 auto *operand = Call->getOperand(j);
alan-bakerd4d50652019-12-03 17:17:15 -05004774 auto *operand_type = operand->getType();
4775 // Images and samplers can be passed as function parameters without
4776 // variable pointers.
4777 if (operand_type->isPointerTy() && !IsImageType(operand_type) &&
4778 !IsSamplerType(operand_type)) {
alan-baker5b86ed72019-02-15 08:26:50 -05004779 auto sc =
4780 GetStorageClass(operand->getType()->getPointerAddressSpace());
4781 if (sc == spv::StorageClassStorageBuffer) {
4782 // Passing SSBO by reference requires variable pointers storage
4783 // buffer.
SJW01901d92020-05-21 08:58:31 -05004784 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05004785 } else if (sc == spv::StorageClassWorkgroup) {
4786 // Workgroup references require variable pointers if they are not
4787 // memory object declarations.
4788 if (auto *operand_call = dyn_cast<CallInst>(operand)) {
4789 // Workgroup accessor represents a variable reference.
SJW61531372020-06-09 07:31:08 -05004790 if (Builtins::Lookup(operand_call->getCalledFunction()) !=
4791 Builtins::kClspvLocal)
SJW01901d92020-05-21 08:58:31 -05004792 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004793 } else {
4794 // Arguments are function parameters.
4795 if (!isa<Argument>(operand))
SJW01901d92020-05-21 08:58:31 -05004796 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05004797 }
4798 }
4799 }
SJW01901d92020-05-21 08:58:31 -05004800 Ops << operand;
David Neto22f144c2017-06-12 14:26:21 -04004801 }
4802
SJW88ed5fe2020-05-11 12:40:57 -05004803 replaceSPIRVInst(Placeholder, spv::OpFunctionCall, Ops);
David Neto22f144c2017-06-12 14:26:21 -04004804 }
4805 }
4806 }
4807}
4808
SJW77b87ad2020-04-21 14:37:52 -05004809void SPIRVProducerPass::HandleDeferredDecorations() {
4810 const auto &DL = module->getDataLayout();
alan-baker5a8c3be2020-09-09 13:44:26 -04004811 if (getTypesNeedingArrayStride().empty()) {
David Neto1a1a0582017-07-07 12:01:44 -04004812 return;
David Netoc6f3ab22018-04-06 18:02:31 -04004813 }
David Neto1a1a0582017-07-07 12:01:44 -04004814
David Netoc6f3ab22018-04-06 18:02:31 -04004815 // Insert ArrayStride decorations on pointer types, due to OpPtrAccessChain
4816 // instructions we generated earlier.
alan-bakerc3fd07f2020-10-22 09:48:49 -04004817 DenseSet<uint32_t> seen;
David Neto85082642018-03-24 06:55:20 -07004818 for (auto *type : getTypesNeedingArrayStride()) {
alan-bakerc3fd07f2020-10-22 09:48:49 -04004819 auto id = getSPIRVType(type);
4820 if (!seen.insert(id.get()).second)
4821 continue;
4822
David Neto85082642018-03-24 06:55:20 -07004823 Type *elemTy = nullptr;
4824 if (auto *ptrTy = dyn_cast<PointerType>(type)) {
4825 elemTy = ptrTy->getElementType();
alan-bakerb6b09dc2018-11-08 16:59:28 -05004826 } else if (auto *arrayTy = dyn_cast<ArrayType>(type)) {
alan-baker8eb435a2020-04-08 00:42:06 -04004827 elemTy = arrayTy->getElementType();
4828 } else if (auto *vecTy = dyn_cast<VectorType>(type)) {
4829 elemTy = vecTy->getElementType();
David Neto85082642018-03-24 06:55:20 -07004830 } else {
4831 errs() << "Unhandled strided type " << *type << "\n";
4832 llvm_unreachable("Unhandled strided type");
4833 }
David Neto1a1a0582017-07-07 12:01:44 -04004834
4835 // Ops[0] = Target ID
4836 // Ops[1] = Decoration (ArrayStride)
4837 // Ops[2] = Stride number (Literal Number)
SJWf93f5f32020-05-05 07:27:56 -05004838 SPIRVOperandVec Ops;
David Neto1a1a0582017-07-07 12:01:44 -04004839
David Neto85082642018-03-24 06:55:20 -07004840 // Same as DL.getIndexedOffsetInType( elemTy, { 1 } );
Alan Bakerfcda9482018-10-02 17:09:59 -04004841 const uint32_t stride = static_cast<uint32_t>(GetTypeAllocSize(elemTy, DL));
David Neto257c3892018-04-11 13:19:45 -04004842
alan-bakerc3fd07f2020-10-22 09:48:49 -04004843 Ops << id << spv::DecorationArrayStride << stride;
David Neto1a1a0582017-07-07 12:01:44 -04004844
SJWf93f5f32020-05-05 07:27:56 -05004845 addSPIRVInst<kAnnotations>(spv::OpDecorate, Ops);
David Netoc6f3ab22018-04-06 18:02:31 -04004846 }
David Neto1a1a0582017-07-07 12:01:44 -04004847}
4848
SJW61531372020-06-09 07:31:08 -05004849glsl::ExtInst
4850SPIRVProducerPass::getExtInstEnum(const Builtins::FunctionInfo &func_info) {
SJW61531372020-06-09 07:31:08 -05004851 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05004852 case Builtins::kClamp: {
SJW61531372020-06-09 07:31:08 -05004853 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004854 if (param_type.type_id == Type::FloatTyID) {
4855 return glsl::ExtInst::ExtInstFClamp;
4856 }
4857 return param_type.is_signed ? glsl::ExtInst::ExtInstSClamp
4858 : glsl::ExtInst::ExtInstUClamp;
4859 }
4860 case Builtins::kMax: {
SJW61531372020-06-09 07:31:08 -05004861 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004862 if (param_type.type_id == Type::FloatTyID) {
4863 return glsl::ExtInst::ExtInstFMax;
4864 }
4865 return param_type.is_signed ? glsl::ExtInst::ExtInstSMax
4866 : glsl::ExtInst::ExtInstUMax;
4867 }
4868 case Builtins::kMin: {
SJW61531372020-06-09 07:31:08 -05004869 auto param_type = func_info.getParameter(0);
SJW2c317da2020-03-23 07:39:13 -05004870 if (param_type.type_id == Type::FloatTyID) {
4871 return glsl::ExtInst::ExtInstFMin;
4872 }
4873 return param_type.is_signed ? glsl::ExtInst::ExtInstSMin
4874 : glsl::ExtInst::ExtInstUMin;
4875 }
4876 case Builtins::kAbs:
4877 return glsl::ExtInst::ExtInstSAbs;
4878 case Builtins::kFmax:
Marco Antognini55d51862020-07-21 17:50:07 +01004879 return glsl::ExtInst::ExtInstNMax;
SJW2c317da2020-03-23 07:39:13 -05004880 case Builtins::kFmin:
Marco Antognini55d51862020-07-21 17:50:07 +01004881 return glsl::ExtInst::ExtInstNMin;
SJW2c317da2020-03-23 07:39:13 -05004882 case Builtins::kDegrees:
4883 return glsl::ExtInst::ExtInstDegrees;
4884 case Builtins::kRadians:
4885 return glsl::ExtInst::ExtInstRadians;
4886 case Builtins::kMix:
4887 return glsl::ExtInst::ExtInstFMix;
4888 case Builtins::kAcos:
4889 case Builtins::kAcospi:
4890 return glsl::ExtInst::ExtInstAcos;
4891 case Builtins::kAcosh:
4892 return glsl::ExtInst::ExtInstAcosh;
4893 case Builtins::kAsin:
4894 case Builtins::kAsinpi:
4895 return glsl::ExtInst::ExtInstAsin;
4896 case Builtins::kAsinh:
4897 return glsl::ExtInst::ExtInstAsinh;
4898 case Builtins::kAtan:
4899 case Builtins::kAtanpi:
4900 return glsl::ExtInst::ExtInstAtan;
4901 case Builtins::kAtanh:
4902 return glsl::ExtInst::ExtInstAtanh;
4903 case Builtins::kAtan2:
4904 case Builtins::kAtan2pi:
4905 return glsl::ExtInst::ExtInstAtan2;
4906 case Builtins::kCeil:
4907 return glsl::ExtInst::ExtInstCeil;
4908 case Builtins::kSin:
4909 case Builtins::kHalfSin:
4910 case Builtins::kNativeSin:
4911 return glsl::ExtInst::ExtInstSin;
4912 case Builtins::kSinh:
4913 return glsl::ExtInst::ExtInstSinh;
4914 case Builtins::kCos:
4915 case Builtins::kHalfCos:
4916 case Builtins::kNativeCos:
4917 return glsl::ExtInst::ExtInstCos;
4918 case Builtins::kCosh:
4919 return glsl::ExtInst::ExtInstCosh;
4920 case Builtins::kTan:
4921 case Builtins::kHalfTan:
4922 case Builtins::kNativeTan:
4923 return glsl::ExtInst::ExtInstTan;
4924 case Builtins::kTanh:
4925 return glsl::ExtInst::ExtInstTanh;
4926 case Builtins::kExp:
4927 case Builtins::kHalfExp:
4928 case Builtins::kNativeExp:
4929 return glsl::ExtInst::ExtInstExp;
4930 case Builtins::kExp2:
4931 case Builtins::kHalfExp2:
4932 case Builtins::kNativeExp2:
4933 return glsl::ExtInst::ExtInstExp2;
4934 case Builtins::kLog:
4935 case Builtins::kHalfLog:
4936 case Builtins::kNativeLog:
4937 return glsl::ExtInst::ExtInstLog;
4938 case Builtins::kLog2:
4939 case Builtins::kHalfLog2:
4940 case Builtins::kNativeLog2:
4941 return glsl::ExtInst::ExtInstLog2;
4942 case Builtins::kFabs:
4943 return glsl::ExtInst::ExtInstFAbs;
4944 case Builtins::kFma:
4945 return glsl::ExtInst::ExtInstFma;
4946 case Builtins::kFloor:
4947 return glsl::ExtInst::ExtInstFloor;
4948 case Builtins::kLdexp:
4949 return glsl::ExtInst::ExtInstLdexp;
4950 case Builtins::kPow:
4951 case Builtins::kPowr:
4952 case Builtins::kHalfPowr:
4953 case Builtins::kNativePowr:
4954 return glsl::ExtInst::ExtInstPow;
James Price38553362020-09-03 18:30:40 -04004955 case Builtins::kRint:
4956 return glsl::ExtInst::ExtInstRoundEven;
SJW2c317da2020-03-23 07:39:13 -05004957 case Builtins::kRound:
4958 return glsl::ExtInst::ExtInstRound;
4959 case Builtins::kSqrt:
4960 case Builtins::kHalfSqrt:
4961 case Builtins::kNativeSqrt:
4962 return glsl::ExtInst::ExtInstSqrt;
4963 case Builtins::kRsqrt:
4964 case Builtins::kHalfRsqrt:
4965 case Builtins::kNativeRsqrt:
4966 return glsl::ExtInst::ExtInstInverseSqrt;
4967 case Builtins::kTrunc:
4968 return glsl::ExtInst::ExtInstTrunc;
4969 case Builtins::kFrexp:
4970 return glsl::ExtInst::ExtInstFrexp;
SJW61531372020-06-09 07:31:08 -05004971 case Builtins::kClspvFract:
SJW2c317da2020-03-23 07:39:13 -05004972 case Builtins::kFract:
4973 return glsl::ExtInst::ExtInstFract;
4974 case Builtins::kSign:
4975 return glsl::ExtInst::ExtInstFSign;
4976 case Builtins::kLength:
4977 case Builtins::kFastLength:
4978 return glsl::ExtInst::ExtInstLength;
4979 case Builtins::kDistance:
4980 case Builtins::kFastDistance:
4981 return glsl::ExtInst::ExtInstDistance;
4982 case Builtins::kStep:
4983 return glsl::ExtInst::ExtInstStep;
4984 case Builtins::kSmoothstep:
4985 return glsl::ExtInst::ExtInstSmoothStep;
4986 case Builtins::kCross:
4987 return glsl::ExtInst::ExtInstCross;
4988 case Builtins::kNormalize:
4989 case Builtins::kFastNormalize:
4990 return glsl::ExtInst::ExtInstNormalize;
SJW61531372020-06-09 07:31:08 -05004991 case Builtins::kSpirvPack:
4992 return glsl::ExtInst::ExtInstPackHalf2x16;
4993 case Builtins::kSpirvUnpack:
4994 return glsl::ExtInst::ExtInstUnpackHalf2x16;
SJW2c317da2020-03-23 07:39:13 -05004995 default:
4996 break;
4997 }
4998
SJW61531372020-06-09 07:31:08 -05004999 if (func_info.getName().find("llvm.fmuladd.") == 0) {
5000 return glsl::ExtInst::ExtInstFma;
5001 }
5002 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005003}
5004
SJW61531372020-06-09 07:31:08 -05005005glsl::ExtInst SPIRVProducerPass::getIndirectExtInstEnum(
5006 const Builtins::FunctionInfo &func_info) {
5007 switch (func_info.getType()) {
SJW2c317da2020-03-23 07:39:13 -05005008 case Builtins::kClz:
5009 return glsl::ExtInst::ExtInstFindUMsb;
alan-baker2cecaa72020-11-05 14:05:20 -05005010 case Builtins::kCtz:
5011 return glsl::ExtInst::ExtInstFindILsb;
SJW2c317da2020-03-23 07:39:13 -05005012 case Builtins::kAcospi:
5013 return glsl::ExtInst::ExtInstAcos;
5014 case Builtins::kAsinpi:
5015 return glsl::ExtInst::ExtInstAsin;
5016 case Builtins::kAtanpi:
5017 return glsl::ExtInst::ExtInstAtan;
5018 case Builtins::kAtan2pi:
5019 return glsl::ExtInst::ExtInstAtan2;
5020 default:
5021 break;
5022 }
5023 return kGlslExtInstBad;
David Neto3fbb4072017-10-16 11:28:14 -04005024}
5025
SJW61531372020-06-09 07:31:08 -05005026glsl::ExtInst SPIRVProducerPass::getDirectOrIndirectExtInstEnum(
5027 const Builtins::FunctionInfo &func_info) {
5028 auto direct = getExtInstEnum(func_info);
David Neto3fbb4072017-10-16 11:28:14 -04005029 if (direct != kGlslExtInstBad)
5030 return direct;
SJW61531372020-06-09 07:31:08 -05005031 return getIndirectExtInstEnum(func_info);
David Neto22f144c2017-06-12 14:26:21 -04005032}
5033
David Neto22f144c2017-06-12 14:26:21 -04005034void SPIRVProducerPass::WriteOneWord(uint32_t Word) {
David Neto0676e6f2017-07-11 18:47:44 -04005035 binaryOut->write(reinterpret_cast<const char *>(&Word), sizeof(uint32_t));
David Neto22f144c2017-06-12 14:26:21 -04005036}
5037
SJW88ed5fe2020-05-11 12:40:57 -05005038void SPIRVProducerPass::WriteResultID(const SPIRVInstruction &Inst) {
SJW01901d92020-05-21 08:58:31 -05005039 WriteOneWord(Inst.getResultID().get());
David Neto22f144c2017-06-12 14:26:21 -04005040}
5041
SJW88ed5fe2020-05-11 12:40:57 -05005042void SPIRVProducerPass::WriteWordCountAndOpcode(const SPIRVInstruction &Inst) {
David Neto22f144c2017-06-12 14:26:21 -04005043 // High 16 bit : Word Count
5044 // Low 16 bit : Opcode
SJW88ed5fe2020-05-11 12:40:57 -05005045 uint32_t Word = Inst.getOpcode();
5046 const uint32_t count = Inst.getWordCount();
David Netoee2660d2018-06-28 16:31:29 -04005047 if (count > 65535) {
5048 errs() << "Word count limit of 65535 exceeded: " << count << "\n";
5049 llvm_unreachable("Word count too high");
5050 }
SJW88ed5fe2020-05-11 12:40:57 -05005051 Word |= Inst.getWordCount() << 16;
David Neto22f144c2017-06-12 14:26:21 -04005052 WriteOneWord(Word);
5053}
5054
SJW88ed5fe2020-05-11 12:40:57 -05005055void SPIRVProducerPass::WriteOperand(const SPIRVOperand &Op) {
5056 SPIRVOperandType OpTy = Op.getType();
David Neto22f144c2017-06-12 14:26:21 -04005057 switch (OpTy) {
5058 default: {
5059 llvm_unreachable("Unsupported SPIRV Operand Type???");
5060 break;
5061 }
5062 case SPIRVOperandType::NUMBERID: {
SJW88ed5fe2020-05-11 12:40:57 -05005063 WriteOneWord(Op.getNumID());
David Neto22f144c2017-06-12 14:26:21 -04005064 break;
5065 }
5066 case SPIRVOperandType::LITERAL_STRING: {
SJW88ed5fe2020-05-11 12:40:57 -05005067 std::string Str = Op.getLiteralStr();
David Neto22f144c2017-06-12 14:26:21 -04005068 const char *Data = Str.c_str();
5069 size_t WordSize = Str.size() / 4;
5070 for (unsigned Idx = 0; Idx < WordSize; Idx++) {
5071 WriteOneWord(*reinterpret_cast<const uint32_t *>(&Data[4 * Idx]));
5072 }
5073
5074 uint32_t Remainder = Str.size() % 4;
5075 uint32_t LastWord = 0;
5076 if (Remainder) {
5077 for (unsigned Idx = 0; Idx < Remainder; Idx++) {
5078 LastWord |= Data[4 * WordSize + Idx] << 8 * Idx;
5079 }
5080 }
5081
5082 WriteOneWord(LastWord);
5083 break;
5084 }
SJW88ed5fe2020-05-11 12:40:57 -05005085 case SPIRVOperandType::LITERAL_WORD: {
5086 WriteOneWord(Op.getLiteralNum()[0]);
5087 break;
5088 }
5089 case SPIRVOperandType::LITERAL_DWORD: {
5090 WriteOneWord(Op.getLiteralNum()[0]);
5091 WriteOneWord(Op.getLiteralNum()[1]);
David Neto22f144c2017-06-12 14:26:21 -04005092 break;
5093 }
5094 }
5095}
5096
5097void SPIRVProducerPass::WriteSPIRVBinary() {
SJW69939d52020-04-16 07:29:07 -05005098 for (int i = 0; i < kSectionCount; ++i) {
5099 WriteSPIRVBinary(SPIRVSections[i]);
5100 }
5101}
5102
5103void SPIRVProducerPass::WriteSPIRVBinary(SPIRVInstructionList &SPIRVInstList) {
SJW88ed5fe2020-05-11 12:40:57 -05005104 for (const auto &Inst : SPIRVInstList) {
5105 const auto &Ops = Inst.getOperands();
5106 spv::Op Opcode = static_cast<spv::Op>(Inst.getOpcode());
David Neto22f144c2017-06-12 14:26:21 -04005107
5108 switch (Opcode) {
5109 default: {
David Neto5c22a252018-03-15 16:07:41 -04005110 errs() << "Unsupported SPIR-V instruction opcode " << int(Opcode) << "\n";
David Neto22f144c2017-06-12 14:26:21 -04005111 llvm_unreachable("Unsupported SPIRV instruction");
5112 break;
5113 }
Marco Antognini68e5c512020-09-09 16:08:57 +01005114 case spv::OpUnreachable:
David Neto22f144c2017-06-12 14:26:21 -04005115 case spv::OpCapability:
5116 case spv::OpExtension:
5117 case spv::OpMemoryModel:
5118 case spv::OpEntryPoint:
5119 case spv::OpExecutionMode:
5120 case spv::OpSource:
5121 case spv::OpDecorate:
5122 case spv::OpMemberDecorate:
5123 case spv::OpBranch:
5124 case spv::OpBranchConditional:
5125 case spv::OpSelectionMerge:
5126 case spv::OpLoopMerge:
5127 case spv::OpStore:
5128 case spv::OpImageWrite:
5129 case spv::OpReturnValue:
5130 case spv::OpControlBarrier:
5131 case spv::OpMemoryBarrier:
5132 case spv::OpReturn:
5133 case spv::OpFunctionEnd:
alan-baker4986eff2020-10-29 13:38:00 -04005134 case spv::OpCopyMemory:
5135 case spv::OpAtomicStore: {
David Neto22f144c2017-06-12 14:26:21 -04005136 WriteWordCountAndOpcode(Inst);
5137 for (uint32_t i = 0; i < Ops.size(); i++) {
5138 WriteOperand(Ops[i]);
5139 }
5140 break;
5141 }
5142 case spv::OpTypeBool:
5143 case spv::OpTypeVoid:
5144 case spv::OpTypeSampler:
5145 case spv::OpLabel:
5146 case spv::OpExtInstImport:
5147 case spv::OpTypePointer:
5148 case spv::OpTypeRuntimeArray:
5149 case spv::OpTypeStruct:
5150 case spv::OpTypeImage:
5151 case spv::OpTypeSampledImage:
5152 case spv::OpTypeInt:
5153 case spv::OpTypeFloat:
5154 case spv::OpTypeArray:
5155 case spv::OpTypeVector:
alan-baker86ce19c2020-08-05 13:09:19 -04005156 case spv::OpTypeFunction:
5157 case spv::OpString: {
David Neto22f144c2017-06-12 14:26:21 -04005158 WriteWordCountAndOpcode(Inst);
5159 WriteResultID(Inst);
5160 for (uint32_t i = 0; i < Ops.size(); i++) {
5161 WriteOperand(Ops[i]);
5162 }
5163 break;
5164 }
5165 case spv::OpFunction:
5166 case spv::OpFunctionParameter:
5167 case spv::OpAccessChain:
5168 case spv::OpPtrAccessChain:
5169 case spv::OpInBoundsAccessChain:
5170 case spv::OpUConvert:
5171 case spv::OpSConvert:
5172 case spv::OpConvertFToU:
5173 case spv::OpConvertFToS:
5174 case spv::OpConvertUToF:
5175 case spv::OpConvertSToF:
5176 case spv::OpFConvert:
5177 case spv::OpConvertPtrToU:
5178 case spv::OpConvertUToPtr:
5179 case spv::OpBitcast:
alan-bakerc9c55ae2019-12-02 16:01:27 -05005180 case spv::OpFNegate:
David Neto22f144c2017-06-12 14:26:21 -04005181 case spv::OpIAdd:
alan-bakera52b7312020-10-26 08:58:51 -04005182 case spv::OpIAddCarry:
David Neto22f144c2017-06-12 14:26:21 -04005183 case spv::OpFAdd:
5184 case spv::OpISub:
alan-baker3f1bf492020-11-05 09:07:36 -05005185 case spv::OpISubBorrow:
David Neto22f144c2017-06-12 14:26:21 -04005186 case spv::OpFSub:
5187 case spv::OpIMul:
5188 case spv::OpFMul:
5189 case spv::OpUDiv:
5190 case spv::OpSDiv:
5191 case spv::OpFDiv:
5192 case spv::OpUMod:
5193 case spv::OpSRem:
5194 case spv::OpFRem:
Kévin Petit8a560882019-03-21 15:24:34 +00005195 case spv::OpUMulExtended:
5196 case spv::OpSMulExtended:
David Neto22f144c2017-06-12 14:26:21 -04005197 case spv::OpBitwiseOr:
5198 case spv::OpBitwiseXor:
5199 case spv::OpBitwiseAnd:
David Netoa394f392017-08-26 20:45:29 -04005200 case spv::OpNot:
David Neto22f144c2017-06-12 14:26:21 -04005201 case spv::OpShiftLeftLogical:
5202 case spv::OpShiftRightLogical:
5203 case spv::OpShiftRightArithmetic:
5204 case spv::OpBitCount:
David Netoab03f432017-11-03 17:00:44 -04005205 case spv::OpCompositeConstruct:
David Neto22f144c2017-06-12 14:26:21 -04005206 case spv::OpCompositeExtract:
5207 case spv::OpVectorExtractDynamic:
5208 case spv::OpCompositeInsert:
David Neto0a2f98d2017-09-15 19:38:40 -04005209 case spv::OpCopyObject:
David Neto22f144c2017-06-12 14:26:21 -04005210 case spv::OpVectorInsertDynamic:
5211 case spv::OpVectorShuffle:
5212 case spv::OpIEqual:
5213 case spv::OpINotEqual:
5214 case spv::OpUGreaterThan:
5215 case spv::OpUGreaterThanEqual:
5216 case spv::OpULessThan:
5217 case spv::OpULessThanEqual:
5218 case spv::OpSGreaterThan:
5219 case spv::OpSGreaterThanEqual:
5220 case spv::OpSLessThan:
5221 case spv::OpSLessThanEqual:
5222 case spv::OpFOrdEqual:
5223 case spv::OpFOrdGreaterThan:
5224 case spv::OpFOrdGreaterThanEqual:
5225 case spv::OpFOrdLessThan:
5226 case spv::OpFOrdLessThanEqual:
5227 case spv::OpFOrdNotEqual:
5228 case spv::OpFUnordEqual:
5229 case spv::OpFUnordGreaterThan:
5230 case spv::OpFUnordGreaterThanEqual:
5231 case spv::OpFUnordLessThan:
5232 case spv::OpFUnordLessThanEqual:
5233 case spv::OpFUnordNotEqual:
5234 case spv::OpExtInst:
5235 case spv::OpIsInf:
5236 case spv::OpIsNan:
5237 case spv::OpAny:
5238 case spv::OpAll:
5239 case spv::OpUndef:
5240 case spv::OpConstantNull:
5241 case spv::OpLogicalOr:
5242 case spv::OpLogicalAnd:
5243 case spv::OpLogicalNot:
5244 case spv::OpLogicalNotEqual:
5245 case spv::OpConstantComposite:
5246 case spv::OpSpecConstantComposite:
5247 case spv::OpConstantTrue:
5248 case spv::OpConstantFalse:
5249 case spv::OpConstant:
5250 case spv::OpSpecConstant:
5251 case spv::OpVariable:
5252 case spv::OpFunctionCall:
5253 case spv::OpSampledImage:
alan-baker75090e42020-02-20 11:21:04 -05005254 case spv::OpImageFetch:
alan-bakerf6bc8252020-09-23 14:58:55 -04005255 case spv::OpImageRead:
David Neto22f144c2017-06-12 14:26:21 -04005256 case spv::OpImageSampleExplicitLod:
David Neto5c22a252018-03-15 16:07:41 -04005257 case spv::OpImageQuerySize:
alan-bakerce179f12019-12-06 19:02:22 -05005258 case spv::OpImageQuerySizeLod:
David Neto22f144c2017-06-12 14:26:21 -04005259 case spv::OpSelect:
5260 case spv::OpPhi:
5261 case spv::OpLoad:
alan-baker4986eff2020-10-29 13:38:00 -04005262 case spv::OpAtomicLoad:
David Neto22f144c2017-06-12 14:26:21 -04005263 case spv::OpAtomicIAdd:
5264 case spv::OpAtomicISub:
5265 case spv::OpAtomicExchange:
5266 case spv::OpAtomicIIncrement:
5267 case spv::OpAtomicIDecrement:
5268 case spv::OpAtomicCompareExchange:
5269 case spv::OpAtomicUMin:
5270 case spv::OpAtomicSMin:
5271 case spv::OpAtomicUMax:
5272 case spv::OpAtomicSMax:
5273 case spv::OpAtomicAnd:
5274 case spv::OpAtomicOr:
5275 case spv::OpAtomicXor:
SJW806a5d82020-07-15 12:51:38 -05005276 case spv::OpDot:
5277 case spv::OpGroupNonUniformAll:
5278 case spv::OpGroupNonUniformAny:
5279 case spv::OpGroupNonUniformBroadcast:
5280 case spv::OpGroupNonUniformIAdd:
5281 case spv::OpGroupNonUniformFAdd:
5282 case spv::OpGroupNonUniformSMin:
5283 case spv::OpGroupNonUniformUMin:
5284 case spv::OpGroupNonUniformFMin:
5285 case spv::OpGroupNonUniformSMax:
5286 case spv::OpGroupNonUniformUMax:
5287 case spv::OpGroupNonUniformFMax: {
David Neto22f144c2017-06-12 14:26:21 -04005288 WriteWordCountAndOpcode(Inst);
5289 WriteOperand(Ops[0]);
5290 WriteResultID(Inst);
5291 for (uint32_t i = 1; i < Ops.size(); i++) {
5292 WriteOperand(Ops[i]);
5293 }
5294 break;
5295 }
5296 }
5297 }
5298}
Alan Baker9bf93fb2018-08-28 16:59:26 -04005299
alan-bakerb6b09dc2018-11-08 16:59:28 -05005300bool SPIRVProducerPass::IsTypeNullable(const Type *type) const {
Alan Baker9bf93fb2018-08-28 16:59:26 -04005301 switch (type->getTypeID()) {
alan-bakerb6b09dc2018-11-08 16:59:28 -05005302 case Type::HalfTyID:
5303 case Type::FloatTyID:
5304 case Type::DoubleTyID:
5305 case Type::IntegerTyID:
James Price59a1c752020-04-23 23:06:16 -04005306 case Type::FixedVectorTyID:
alan-bakerb6b09dc2018-11-08 16:59:28 -05005307 return true;
5308 case Type::PointerTyID: {
5309 const PointerType *pointer_type = cast<PointerType>(type);
5310 if (pointer_type->getPointerAddressSpace() !=
5311 AddressSpace::UniformConstant) {
5312 auto pointee_type = pointer_type->getPointerElementType();
5313 if (pointee_type->isStructTy() &&
5314 cast<StructType>(pointee_type)->isOpaque()) {
5315 // Images and samplers are not nullable.
5316 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005317 }
Alan Baker9bf93fb2018-08-28 16:59:26 -04005318 }
alan-bakerb6b09dc2018-11-08 16:59:28 -05005319 return true;
5320 }
5321 case Type::ArrayTyID:
alan-baker8eb435a2020-04-08 00:42:06 -04005322 return IsTypeNullable(type->getArrayElementType());
alan-bakerb6b09dc2018-11-08 16:59:28 -05005323 case Type::StructTyID: {
5324 const StructType *struct_type = cast<StructType>(type);
5325 // Images and samplers are not nullable.
5326 if (struct_type->isOpaque())
Alan Baker9bf93fb2018-08-28 16:59:26 -04005327 return false;
alan-bakerb6b09dc2018-11-08 16:59:28 -05005328 for (const auto element : struct_type->elements()) {
5329 if (!IsTypeNullable(element))
5330 return false;
5331 }
5332 return true;
5333 }
5334 default:
5335 return false;
Alan Baker9bf93fb2018-08-28 16:59:26 -04005336 }
5337}
Alan Bakerfcda9482018-10-02 17:09:59 -04005338
SJW77b87ad2020-04-21 14:37:52 -05005339void SPIRVProducerPass::PopulateUBOTypeMaps() {
Alan Bakerfcda9482018-10-02 17:09:59 -04005340 if (auto *offsets_md =
SJW77b87ad2020-04-21 14:37:52 -05005341 module->getNamedMetadata(clspv::RemappedTypeOffsetMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005342 // Metdata is stored as key-value pair operands. The first element of each
5343 // operand is the type and the second is a vector of offsets.
5344 for (const auto *operand : offsets_md->operands()) {
5345 const auto *pair = cast<MDTuple>(operand);
5346 auto *type =
5347 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5348 const auto *offset_vector = cast<MDTuple>(pair->getOperand(1));
5349 std::vector<uint32_t> offsets;
5350 for (const Metadata *offset_md : offset_vector->operands()) {
5351 const auto *constant_md = cast<ConstantAsMetadata>(offset_md);
alan-bakerb6b09dc2018-11-08 16:59:28 -05005352 offsets.push_back(static_cast<uint32_t>(
5353 cast<ConstantInt>(constant_md->getValue())->getZExtValue()));
Alan Bakerfcda9482018-10-02 17:09:59 -04005354 }
5355 RemappedUBOTypeOffsets.insert(std::make_pair(type, offsets));
5356 }
5357 }
5358
5359 if (auto *sizes_md =
SJW77b87ad2020-04-21 14:37:52 -05005360 module->getNamedMetadata(clspv::RemappedTypeSizesMetadataName())) {
Alan Bakerfcda9482018-10-02 17:09:59 -04005361 // Metadata is stored as key-value pair operands. The first element of each
5362 // operand is the type and the second is a triple of sizes: type size in
5363 // bits, store size and alloc size.
5364 for (const auto *operand : sizes_md->operands()) {
5365 const auto *pair = cast<MDTuple>(operand);
5366 auto *type =
5367 cast<ConstantAsMetadata>(pair->getOperand(0))->getValue()->getType();
5368 const auto *size_triple = cast<MDTuple>(pair->getOperand(1));
5369 uint64_t type_size_in_bits =
5370 cast<ConstantInt>(
5371 cast<ConstantAsMetadata>(size_triple->getOperand(0))->getValue())
5372 ->getZExtValue();
5373 uint64_t type_store_size =
5374 cast<ConstantInt>(
5375 cast<ConstantAsMetadata>(size_triple->getOperand(1))->getValue())
5376 ->getZExtValue();
5377 uint64_t type_alloc_size =
5378 cast<ConstantInt>(
5379 cast<ConstantAsMetadata>(size_triple->getOperand(2))->getValue())
5380 ->getZExtValue();
5381 RemappedUBOTypeSizes.insert(std::make_pair(
5382 type, std::make_tuple(type_size_in_bits, type_store_size,
5383 type_alloc_size)));
5384 }
5385 }
5386}
5387
5388uint64_t SPIRVProducerPass::GetTypeSizeInBits(Type *type,
5389 const DataLayout &DL) {
5390 auto iter = RemappedUBOTypeSizes.find(type);
5391 if (iter != RemappedUBOTypeSizes.end()) {
5392 return std::get<0>(iter->second);
5393 }
5394
5395 return DL.getTypeSizeInBits(type);
5396}
5397
5398uint64_t SPIRVProducerPass::GetTypeStoreSize(Type *type, const DataLayout &DL) {
5399 auto iter = RemappedUBOTypeSizes.find(type);
5400 if (iter != RemappedUBOTypeSizes.end()) {
5401 return std::get<1>(iter->second);
5402 }
5403
5404 return DL.getTypeStoreSize(type);
5405}
5406
5407uint64_t SPIRVProducerPass::GetTypeAllocSize(Type *type, const DataLayout &DL) {
5408 auto iter = RemappedUBOTypeSizes.find(type);
5409 if (iter != RemappedUBOTypeSizes.end()) {
5410 return std::get<2>(iter->second);
5411 }
5412
5413 return DL.getTypeAllocSize(type);
5414}
alan-baker5b86ed72019-02-15 08:26:50 -05005415
Kévin Petitbbbda972020-03-03 19:16:31 +00005416uint32_t SPIRVProducerPass::GetExplicitLayoutStructMemberOffset(
5417 StructType *type, unsigned member, const DataLayout &DL) {
5418 const auto StructLayout = DL.getStructLayout(type);
5419 // Search for the correct offsets if this type was remapped.
5420 std::vector<uint32_t> *offsets = nullptr;
5421 auto iter = RemappedUBOTypeOffsets.find(type);
5422 if (iter != RemappedUBOTypeOffsets.end()) {
5423 offsets = &iter->second;
5424 }
5425 auto ByteOffset =
5426 static_cast<uint32_t>(StructLayout->getElementOffset(member));
5427 if (offsets) {
5428 ByteOffset = (*offsets)[member];
5429 }
5430
5431 return ByteOffset;
5432}
5433
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005434void SPIRVProducerPass::setVariablePointersCapabilities(
5435 unsigned address_space) {
alan-baker5b86ed72019-02-15 08:26:50 -05005436 if (GetStorageClass(address_space) == spv::StorageClassStorageBuffer) {
SJW01901d92020-05-21 08:58:31 -05005437 setVariablePointersStorageBuffer();
alan-baker5b86ed72019-02-15 08:26:50 -05005438 } else {
SJW01901d92020-05-21 08:58:31 -05005439 setVariablePointers();
alan-baker5b86ed72019-02-15 08:26:50 -05005440 }
5441}
5442
Diego Novillo3cc8d7a2019-04-10 13:30:34 -04005443Value *SPIRVProducerPass::GetBasePointer(Value *v) {
alan-baker5b86ed72019-02-15 08:26:50 -05005444 if (auto *gep = dyn_cast<GetElementPtrInst>(v)) {
5445 return GetBasePointer(gep->getPointerOperand());
5446 }
5447
5448 // Conservatively return |v|.
5449 return v;
5450}
5451
5452bool SPIRVProducerPass::sameResource(Value *lhs, Value *rhs) const {
5453 if (auto *lhs_call = dyn_cast<CallInst>(lhs)) {
5454 if (auto *rhs_call = dyn_cast<CallInst>(rhs)) {
alan-baker7506abb2020-09-10 15:02:55 -04005455 const auto &lhs_func_info =
5456 Builtins::Lookup(lhs_call->getCalledFunction());
5457 const auto &rhs_func_info =
5458 Builtins::Lookup(rhs_call->getCalledFunction());
SJW61531372020-06-09 07:31:08 -05005459 if (lhs_func_info.getType() == Builtins::kClspvResource &&
5460 rhs_func_info.getType() == Builtins::kClspvResource) {
alan-baker5b86ed72019-02-15 08:26:50 -05005461 // For resource accessors, match descriptor set and binding.
5462 if (lhs_call->getOperand(0) == rhs_call->getOperand(0) &&
5463 lhs_call->getOperand(1) == rhs_call->getOperand(1))
5464 return true;
SJW61531372020-06-09 07:31:08 -05005465 } else if (lhs_func_info.getType() == Builtins::kClspvLocal &&
5466 rhs_func_info.getType() == Builtins::kClspvLocal) {
alan-baker5b86ed72019-02-15 08:26:50 -05005467 // For workgroup resources, match spec id.
5468 if (lhs_call->getOperand(0) == rhs_call->getOperand(0))
5469 return true;
5470 }
5471 }
5472 }
5473
5474 return false;
5475}
5476
5477bool SPIRVProducerPass::selectFromSameObject(Instruction *inst) {
5478 assert(inst->getType()->isPointerTy());
5479 assert(GetStorageClass(inst->getType()->getPointerAddressSpace()) ==
5480 spv::StorageClassStorageBuffer);
5481 const bool hack_undef = clspv::Option::HackUndef();
5482 if (auto *select = dyn_cast<SelectInst>(inst)) {
5483 auto *true_base = GetBasePointer(select->getTrueValue());
5484 auto *false_base = GetBasePointer(select->getFalseValue());
5485
5486 if (true_base == false_base)
5487 return true;
5488
5489 // If either the true or false operand is a null, then we satisfy the same
5490 // object constraint.
5491 if (auto *true_cst = dyn_cast<Constant>(true_base)) {
5492 if (true_cst->isNullValue() || (hack_undef && isa<UndefValue>(true_base)))
5493 return true;
5494 }
5495
5496 if (auto *false_cst = dyn_cast<Constant>(false_base)) {
5497 if (false_cst->isNullValue() ||
5498 (hack_undef && isa<UndefValue>(false_base)))
5499 return true;
5500 }
5501
5502 if (sameResource(true_base, false_base))
5503 return true;
5504 } else if (auto *phi = dyn_cast<PHINode>(inst)) {
5505 Value *value = nullptr;
5506 bool ok = true;
5507 for (unsigned i = 0; ok && i != phi->getNumIncomingValues(); ++i) {
5508 auto *base = GetBasePointer(phi->getIncomingValue(i));
5509 // Null values satisfy the constraint of selecting of selecting from the
5510 // same object.
5511 if (!value) {
5512 if (auto *cst = dyn_cast<Constant>(base)) {
5513 if (!cst->isNullValue() && !(hack_undef && isa<UndefValue>(base)))
5514 value = base;
5515 } else {
5516 value = base;
5517 }
5518 } else if (base != value) {
5519 if (auto *base_cst = dyn_cast<Constant>(base)) {
5520 if (base_cst->isNullValue() || (hack_undef && isa<UndefValue>(base)))
5521 continue;
5522 }
5523
5524 if (sameResource(value, base))
5525 continue;
5526
5527 // Values don't represent the same base.
5528 ok = false;
5529 }
5530 }
5531
5532 return ok;
5533 }
5534
5535 // Conservatively return false.
5536 return false;
5537}
alan-bakere9308012019-03-15 10:25:13 -04005538
5539bool SPIRVProducerPass::CalledWithCoherentResource(Argument &Arg) {
5540 if (!Arg.getType()->isPointerTy() ||
5541 Arg.getType()->getPointerAddressSpace() != clspv::AddressSpace::Global) {
5542 // Only SSBOs need to be annotated as coherent.
5543 return false;
5544 }
5545
5546 DenseSet<Value *> visited;
5547 std::vector<Value *> stack;
5548 for (auto *U : Arg.getParent()->users()) {
5549 if (auto *call = dyn_cast<CallInst>(U)) {
5550 stack.push_back(call->getOperand(Arg.getArgNo()));
5551 }
5552 }
5553
5554 while (!stack.empty()) {
5555 Value *v = stack.back();
5556 stack.pop_back();
5557
5558 if (!visited.insert(v).second)
5559 continue;
5560
5561 auto *resource_call = dyn_cast<CallInst>(v);
5562 if (resource_call &&
SJW61531372020-06-09 07:31:08 -05005563 Builtins::Lookup(resource_call->getCalledFunction()).getType() ==
5564 Builtins::kClspvResource) {
alan-bakere9308012019-03-15 10:25:13 -04005565 // If this is a resource accessor function, check if the coherent operand
5566 // is set.
5567 const auto coherent =
5568 unsigned(dyn_cast<ConstantInt>(resource_call->getArgOperand(5))
5569 ->getZExtValue());
5570 if (coherent == 1)
5571 return true;
5572 } else if (auto *arg = dyn_cast<Argument>(v)) {
5573 // If this is a function argument, trace through its callers.
alan-bakere98f3f92019-04-08 15:06:36 -04005574 for (auto U : arg->getParent()->users()) {
alan-bakere9308012019-03-15 10:25:13 -04005575 if (auto *call = dyn_cast<CallInst>(U)) {
5576 stack.push_back(call->getOperand(arg->getArgNo()));
5577 }
5578 }
5579 } else if (auto *user = dyn_cast<User>(v)) {
5580 // If this is a user, traverse all operands that could lead to resource
5581 // variables.
5582 for (unsigned i = 0; i != user->getNumOperands(); ++i) {
5583 Value *operand = user->getOperand(i);
5584 if (operand->getType()->isPointerTy() &&
5585 operand->getType()->getPointerAddressSpace() ==
5586 clspv::AddressSpace::Global) {
5587 stack.push_back(operand);
5588 }
5589 }
5590 }
5591 }
5592
5593 // No coherent resource variables encountered.
5594 return false;
5595}
alan-baker06cad652019-12-03 17:56:47 -05005596
SJW77b87ad2020-04-21 14:37:52 -05005597void SPIRVProducerPass::PopulateStructuredCFGMaps() {
alan-baker06cad652019-12-03 17:56:47 -05005598 // First, track loop merges and continues.
5599 DenseSet<BasicBlock *> LoopMergesAndContinues;
SJW77b87ad2020-04-21 14:37:52 -05005600 for (auto &F : *module) {
alan-baker06cad652019-12-03 17:56:47 -05005601 if (F.isDeclaration())
5602 continue;
5603
5604 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>(F).getDomTree();
5605 const LoopInfo &LI = getAnalysis<LoopInfoWrapperPass>(F).getLoopInfo();
5606 std::deque<BasicBlock *> order;
5607 DenseSet<BasicBlock *> visited;
5608 clspv::ComputeStructuredOrder(&*F.begin(), &DT, LI, &order, &visited);
5609
5610 for (auto BB : order) {
5611 auto terminator = BB->getTerminator();
5612 auto branch = dyn_cast<BranchInst>(terminator);
5613 if (LI.isLoopHeader(BB)) {
5614 auto L = LI.getLoopFor(BB);
5615 BasicBlock *ContinueBB = nullptr;
5616 BasicBlock *MergeBB = nullptr;
5617
5618 MergeBB = L->getExitBlock();
5619 if (!MergeBB) {
5620 // StructurizeCFG pass converts CFG into triangle shape and the cfg
5621 // has regions with single entry/exit. As a result, loop should not
5622 // have multiple exits.
5623 llvm_unreachable("Loop has multiple exits???");
5624 }
5625
5626 if (L->isLoopLatch(BB)) {
5627 ContinueBB = BB;
5628 } else {
5629 // From SPIR-V spec 2.11, Continue Target must dominate that back-edge
5630 // block.
5631 BasicBlock *Header = L->getHeader();
5632 BasicBlock *Latch = L->getLoopLatch();
5633 for (auto *loop_block : L->blocks()) {
5634 if (loop_block == Header) {
5635 continue;
5636 }
5637
5638 // Check whether block dominates block with back-edge.
5639 // The loop latch is the single block with a back-edge. If it was
5640 // possible, StructurizeCFG made the loop conform to this
5641 // requirement, otherwise |Latch| is a nullptr.
5642 if (DT.dominates(loop_block, Latch)) {
5643 ContinueBB = loop_block;
5644 }
5645 }
5646
5647 if (!ContinueBB) {
5648 llvm_unreachable("Wrong continue block from loop");
5649 }
5650 }
5651
5652 // Record the continue and merge blocks.
5653 MergeBlocks[BB] = MergeBB;
5654 ContinueBlocks[BB] = ContinueBB;
5655 LoopMergesAndContinues.insert(MergeBB);
5656 LoopMergesAndContinues.insert(ContinueBB);
5657 } else if (branch && branch->isConditional()) {
5658 auto L = LI.getLoopFor(BB);
5659 bool HasBackedge = false;
5660 while (L && !HasBackedge) {
5661 if (L->isLoopLatch(BB)) {
5662 HasBackedge = true;
5663 }
5664 L = L->getParentLoop();
5665 }
5666
5667 if (!HasBackedge) {
5668 // Only need a merge if the branch doesn't include a loop break or
5669 // continue.
5670 auto true_bb = branch->getSuccessor(0);
5671 auto false_bb = branch->getSuccessor(1);
5672 if (!LoopMergesAndContinues.count(true_bb) &&
5673 !LoopMergesAndContinues.count(false_bb)) {
5674 // StructurizeCFG pass already manipulated CFG. Just use false block
5675 // of branch instruction as merge block.
5676 MergeBlocks[BB] = false_bb;
5677 }
5678 }
5679 }
5680 }
5681 }
5682}
alan-baker86ce19c2020-08-05 13:09:19 -04005683
5684SPIRVID SPIRVProducerPass::getReflectionImport() {
5685 if (!ReflectionID.isValid()) {
5686 addSPIRVInst<kExtensions>(spv::OpExtension, "SPV_KHR_non_semantic_info");
5687 ReflectionID = addSPIRVInst<kImports>(spv::OpExtInstImport,
5688 "NonSemantic.ClspvReflection.1");
5689 }
5690 return ReflectionID;
5691}
5692
5693void SPIRVProducerPass::GenerateReflection() {
5694 GenerateKernelReflection();
5695 GeneratePushConstantReflection();
5696 GenerateSpecConstantReflection();
5697}
5698
5699void SPIRVProducerPass::GeneratePushConstantReflection() {
5700 if (auto GV = module->getGlobalVariable(clspv::PushConstantsVariableName())) {
5701 auto const &DL = module->getDataLayout();
5702 auto MD = GV->getMetadata(clspv::PushConstantsMetadataName());
5703 auto STy = cast<StructType>(GV->getValueType());
5704
5705 for (unsigned i = 0; i < STy->getNumElements(); i++) {
5706 auto pc = static_cast<clspv::PushConstant>(
5707 mdconst::extract<ConstantInt>(MD->getOperand(i))->getZExtValue());
5708 if (pc == PushConstant::KernelArgument)
5709 continue;
5710
5711 auto memberType = STy->getElementType(i);
5712 auto offset = GetExplicitLayoutStructMemberOffset(STy, i, DL);
5713 unsigned previousOffset = 0;
5714 if (i > 0) {
5715 previousOffset = GetExplicitLayoutStructMemberOffset(STy, i - 1, DL);
5716 }
5717 auto size = static_cast<uint32_t>(GetTypeSizeInBits(memberType, DL)) / 8;
5718 assert(isValidExplicitLayout(*module, STy, i,
5719 spv::StorageClassPushConstant, offset,
5720 previousOffset));
5721
5722 reflection::ExtInst pc_inst = reflection::ExtInstMax;
5723 switch (pc) {
5724 case PushConstant::GlobalOffset:
5725 pc_inst = reflection::ExtInstPushConstantGlobalOffset;
5726 break;
5727 case PushConstant::EnqueuedLocalSize:
5728 pc_inst = reflection::ExtInstPushConstantEnqueuedLocalSize;
5729 break;
5730 case PushConstant::GlobalSize:
5731 pc_inst = reflection::ExtInstPushConstantGlobalSize;
5732 break;
5733 case PushConstant::RegionOffset:
5734 pc_inst = reflection::ExtInstPushConstantRegionOffset;
5735 break;
5736 case PushConstant::NumWorkgroups:
5737 pc_inst = reflection::ExtInstPushConstantNumWorkgroups;
5738 break;
5739 case PushConstant::RegionGroupOffset:
5740 pc_inst = reflection::ExtInstPushConstantRegionGroupOffset;
5741 break;
5742 default:
5743 llvm_unreachable("Unhandled push constant");
5744 break;
5745 }
5746
5747 auto import_id = getReflectionImport();
5748 SPIRVOperandVec Ops;
5749 Ops << getSPIRVType(Type::getVoidTy(module->getContext())) << import_id
5750 << pc_inst << getSPIRVInt32Constant(offset)
5751 << getSPIRVInt32Constant(size);
5752 addSPIRVInst(spv::OpExtInst, Ops);
5753 }
5754 }
5755}
5756
5757void SPIRVProducerPass::GenerateSpecConstantReflection() {
5758 const uint32_t kMax = std::numeric_limits<uint32_t>::max();
5759 uint32_t wgsize_id[3] = {kMax, kMax, kMax};
5760 uint32_t global_offset_id[3] = {kMax, kMax, kMax};
5761 uint32_t work_dim_id = kMax;
5762 for (auto pair : clspv::GetSpecConstants(module)) {
5763 auto kind = pair.first;
5764 auto id = pair.second;
5765
5766 // Local memory size is only used for kernel arguments.
5767 if (kind == SpecConstant::kLocalMemorySize)
5768 continue;
5769
5770 switch (kind) {
5771 case SpecConstant::kWorkgroupSizeX:
5772 wgsize_id[0] = id;
5773 break;
5774 case SpecConstant::kWorkgroupSizeY:
5775 wgsize_id[1] = id;
5776 break;
5777 case SpecConstant::kWorkgroupSizeZ:
5778 wgsize_id[2] = id;
5779 break;
5780 case SpecConstant::kGlobalOffsetX:
5781 global_offset_id[0] = id;
5782 break;
5783 case SpecConstant::kGlobalOffsetY:
5784 global_offset_id[1] = id;
5785 break;
5786 case SpecConstant::kGlobalOffsetZ:
5787 global_offset_id[2] = id;
5788 break;
5789 case SpecConstant::kWorkDim:
5790 work_dim_id = id;
5791 break;
5792 default:
5793 llvm_unreachable("Unhandled spec constant");
5794 }
5795 }
5796
5797 auto import_id = getReflectionImport();
5798 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5799 SPIRVOperandVec Ops;
5800 if (wgsize_id[0] != kMax) {
5801 assert(wgsize_id[1] != kMax);
5802 assert(wgsize_id[2] != kMax);
5803 Ops.clear();
5804 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkgroupSize
5805 << getSPIRVInt32Constant(wgsize_id[0])
5806 << getSPIRVInt32Constant(wgsize_id[1])
5807 << getSPIRVInt32Constant(wgsize_id[2]);
5808 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5809 }
5810 if (global_offset_id[0] != kMax) {
5811 assert(global_offset_id[1] != kMax);
5812 assert(global_offset_id[2] != kMax);
5813 Ops.clear();
5814 Ops << void_id << import_id << reflection::ExtInstSpecConstantGlobalOffset
5815 << getSPIRVInt32Constant(global_offset_id[0])
5816 << getSPIRVInt32Constant(global_offset_id[1])
5817 << getSPIRVInt32Constant(global_offset_id[2]);
5818 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5819 }
5820 if (work_dim_id != kMax) {
5821 Ops.clear();
5822 Ops << void_id << import_id << reflection::ExtInstSpecConstantWorkDim
5823 << getSPIRVInt32Constant(work_dim_id);
5824 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5825 }
5826}
5827
5828void SPIRVProducerPass::GenerateKernelReflection() {
5829 const auto &DL = module->getDataLayout();
5830 auto import_id = getReflectionImport();
5831 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5832
5833 for (auto &F : *module) {
5834 if (F.isDeclaration() || F.getCallingConv() != CallingConv::SPIR_KERNEL) {
5835 continue;
5836 }
5837
5838 // OpString for the kernel name.
5839 auto kernel_name =
5840 addSPIRVInst<kDebug>(spv::OpString, F.getName().str().c_str());
5841
5842 // Kernel declaration
5843 // Ops[0] = void type
5844 // Ops[1] = reflection ext import
5845 // Ops[2] = function id
5846 // Ops[3] = kernel name
5847 SPIRVOperandVec Ops;
5848 Ops << void_id << import_id << reflection::ExtInstKernel << ValueMap[&F]
5849 << kernel_name;
5850 auto kernel_decl = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5851
5852 // Generate the required workgroup size property if it was specified.
5853 if (const MDNode *MD = F.getMetadata("reqd_work_group_size")) {
5854 uint32_t CurXDimCst = static_cast<uint32_t>(
5855 mdconst::extract<ConstantInt>(MD->getOperand(0))->getZExtValue());
5856 uint32_t CurYDimCst = static_cast<uint32_t>(
5857 mdconst::extract<ConstantInt>(MD->getOperand(1))->getZExtValue());
5858 uint32_t CurZDimCst = static_cast<uint32_t>(
5859 mdconst::extract<ConstantInt>(MD->getOperand(2))->getZExtValue());
5860
5861 Ops.clear();
5862 Ops << void_id << import_id
5863 << reflection::ExtInstPropertyRequiredWorkgroupSize << kernel_decl
5864 << getSPIRVInt32Constant(CurXDimCst)
5865 << getSPIRVInt32Constant(CurYDimCst)
5866 << getSPIRVInt32Constant(CurZDimCst);
5867 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5868 }
5869
5870 auto &resource_var_at_index = FunctionToResourceVarsMap[&F];
5871 auto *func_ty = F.getFunctionType();
5872
5873 // If we've clustered POD arguments, then argument details are in metadata.
5874 // If an argument maps to a resource variable, then get descriptor set and
5875 // binding from the resource variable. Other info comes from the metadata.
5876 const auto *arg_map = F.getMetadata(clspv::KernelArgMapMetadataName());
5877 auto local_spec_id_md =
5878 module->getNamedMetadata(clspv::LocalSpecIdMetadataName());
5879 if (arg_map) {
5880 for (const auto &arg : arg_map->operands()) {
5881 const MDNode *arg_node = dyn_cast<MDNode>(arg.get());
5882 assert(arg_node->getNumOperands() == 6);
5883 const auto name =
5884 dyn_cast<MDString>(arg_node->getOperand(0))->getString();
5885 const auto old_index =
5886 dyn_extract<ConstantInt>(arg_node->getOperand(1))->getZExtValue();
5887 // Remapped argument index
5888 const int new_index = static_cast<int>(
5889 dyn_extract<ConstantInt>(arg_node->getOperand(2))->getSExtValue());
5890 const auto offset =
5891 dyn_extract<ConstantInt>(arg_node->getOperand(3))->getZExtValue();
5892 const auto size =
5893 dyn_extract<ConstantInt>(arg_node->getOperand(4))->getZExtValue();
5894 const auto argKind = clspv::GetArgKindFromName(
5895 dyn_cast<MDString>(arg_node->getOperand(5))->getString().str());
5896
5897 // If this is a local memory argument, find the right spec id for this
5898 // argument.
5899 int64_t spec_id = -1;
5900 if (argKind == clspv::ArgKind::Local) {
5901 for (auto spec_id_arg : local_spec_id_md->operands()) {
5902 if ((&F == dyn_cast<Function>(
5903 dyn_cast<ValueAsMetadata>(spec_id_arg->getOperand(0))
5904 ->getValue())) &&
5905 (static_cast<uint64_t>(new_index) ==
5906 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(1))
5907 ->getZExtValue())) {
5908 spec_id =
5909 mdconst::extract<ConstantInt>(spec_id_arg->getOperand(2))
5910 ->getSExtValue();
5911 break;
5912 }
5913 }
5914 }
5915
5916 // Generate the specific argument instruction.
5917 const uint32_t ordinal = static_cast<uint32_t>(old_index);
5918 const uint32_t arg_offset = static_cast<uint32_t>(offset);
5919 const uint32_t arg_size = static_cast<uint32_t>(size);
5920 uint32_t elem_size = 0;
5921 uint32_t descriptor_set = 0;
5922 uint32_t binding = 0;
5923 if (spec_id > 0) {
5924 elem_size = static_cast<uint32_t>(
5925 GetTypeAllocSize(func_ty->getParamType(unsigned(new_index))
5926 ->getPointerElementType(),
5927 DL));
5928 } else if (new_index >= 0) {
5929 auto *info = resource_var_at_index[new_index];
5930 assert(info);
5931 descriptor_set = info->descriptor_set;
5932 binding = info->binding;
5933 }
5934 AddArgumentReflection(kernel_decl, name.str(), argKind, ordinal,
5935 descriptor_set, binding, arg_offset, arg_size,
5936 static_cast<uint32_t>(spec_id), elem_size);
5937 }
5938 } else {
5939 // There is no argument map.
5940 // Take descriptor info from the resource variable calls.
5941 // Take argument name and size from the arguments list.
5942
5943 SmallVector<Argument *, 4> arguments;
5944 for (auto &arg : F.args()) {
5945 arguments.push_back(&arg);
5946 }
5947
5948 unsigned arg_index = 0;
5949 for (auto *info : resource_var_at_index) {
5950 if (info) {
5951 auto arg = arguments[arg_index];
5952 unsigned arg_size = 0;
5953 if (info->arg_kind == clspv::ArgKind::Pod ||
5954 info->arg_kind == clspv::ArgKind::PodUBO ||
5955 info->arg_kind == clspv::ArgKind::PodPushConstant) {
5956 arg_size =
5957 static_cast<uint32_t>(DL.getTypeStoreSize(arg->getType()));
5958 }
5959
5960 // Local pointer arguments are unused in this case.
5961 // offset, spec_id and elem_size always 0.
5962 AddArgumentReflection(kernel_decl, arg->getName().str(),
5963 info->arg_kind, arg_index, info->descriptor_set,
5964 info->binding, 0, arg_size, 0, 0);
5965 }
5966 arg_index++;
5967 }
5968 // Generate mappings for pointer-to-local arguments.
5969 for (arg_index = 0; arg_index < arguments.size(); ++arg_index) {
5970 Argument *arg = arguments[arg_index];
5971 auto where = LocalArgSpecIds.find(arg);
5972 if (where != LocalArgSpecIds.end()) {
5973 auto &local_arg_info = LocalSpecIdInfoMap[where->second];
5974
5975 // descriptor_set, binding, offset and size are always 0.
5976 AddArgumentReflection(kernel_decl, arg->getName().str(),
5977 ArgKind::Local, arg_index, 0, 0, 0, 0,
5978 static_cast<uint32_t>(local_arg_info.spec_id),
5979 static_cast<uint32_t>(GetTypeAllocSize(
5980 local_arg_info.elem_type, DL)));
5981 }
5982 }
5983 }
5984 }
5985}
5986
5987void SPIRVProducerPass::AddArgumentReflection(
5988 SPIRVID kernel_decl, const std::string &name, clspv::ArgKind arg_kind,
5989 uint32_t ordinal, uint32_t descriptor_set, uint32_t binding,
5990 uint32_t offset, uint32_t size, uint32_t spec_id, uint32_t elem_size) {
5991 // Generate ArgumentInfo for this argument.
5992 // TODO: generate remaining optional operands.
5993 auto import_id = getReflectionImport();
5994 auto arg_name = addSPIRVInst<kDebug>(spv::OpString, name.c_str());
5995 auto void_id = getSPIRVType(Type::getVoidTy(module->getContext()));
5996 SPIRVOperandVec Ops;
5997 Ops << void_id << import_id << reflection::ExtInstArgumentInfo << arg_name;
5998 auto arg_info = addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
5999
6000 Ops.clear();
6001 Ops << void_id << import_id;
6002 reflection::ExtInst ext_inst = reflection::ExtInstMax;
6003 // Determine the extended instruction.
6004 switch (arg_kind) {
6005 case clspv::ArgKind::Buffer:
6006 ext_inst = reflection::ExtInstArgumentStorageBuffer;
6007 break;
6008 case clspv::ArgKind::BufferUBO:
6009 ext_inst = reflection::ExtInstArgumentUniform;
6010 break;
6011 case clspv::ArgKind::Local:
6012 ext_inst = reflection::ExtInstArgumentWorkgroup;
6013 break;
6014 case clspv::ArgKind::Pod:
6015 ext_inst = reflection::ExtInstArgumentPodStorageBuffer;
6016 break;
6017 case clspv::ArgKind::PodUBO:
6018 ext_inst = reflection::ExtInstArgumentPodUniform;
6019 break;
6020 case clspv::ArgKind::PodPushConstant:
6021 ext_inst = reflection::ExtInstArgumentPodPushConstant;
6022 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006023 case clspv::ArgKind::SampledImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006024 ext_inst = reflection::ExtInstArgumentSampledImage;
6025 break;
alan-bakerf6bc8252020-09-23 14:58:55 -04006026 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006027 ext_inst = reflection::ExtInstArgumentStorageImage;
6028 break;
6029 case clspv::ArgKind::Sampler:
6030 ext_inst = reflection::ExtInstArgumentSampler;
6031 break;
6032 default:
6033 llvm_unreachable("Unhandled argument reflection");
6034 break;
6035 }
6036 Ops << ext_inst << kernel_decl << getSPIRVInt32Constant(ordinal);
6037
6038 // Add descriptor set and binding for applicable arguments.
6039 switch (arg_kind) {
6040 case clspv::ArgKind::Buffer:
6041 case clspv::ArgKind::BufferUBO:
6042 case clspv::ArgKind::Pod:
6043 case clspv::ArgKind::PodUBO:
alan-bakerf6bc8252020-09-23 14:58:55 -04006044 case clspv::ArgKind::SampledImage:
6045 case clspv::ArgKind::StorageImage:
alan-baker86ce19c2020-08-05 13:09:19 -04006046 case clspv::ArgKind::Sampler:
6047 Ops << getSPIRVInt32Constant(descriptor_set)
6048 << getSPIRVInt32Constant(binding);
6049 break;
6050 default:
6051 break;
6052 }
6053
6054 // Add remaining operands for arguments.
6055 switch (arg_kind) {
6056 case clspv::ArgKind::Local:
6057 Ops << getSPIRVInt32Constant(spec_id) << getSPIRVInt32Constant(elem_size);
6058 break;
6059 case clspv::ArgKind::Pod:
6060 case clspv::ArgKind::PodUBO:
6061 case clspv::ArgKind::PodPushConstant:
6062 Ops << getSPIRVInt32Constant(offset) << getSPIRVInt32Constant(size);
6063 break;
6064 default:
6065 break;
6066 }
6067 Ops << arg_info;
6068 addSPIRVInst<kReflection>(spv::OpExtInst, Ops);
6069}