blob: ed32b3dde6ae5d9e8e1217dc20935ce05fb941e8 [file] [log] [blame]
David Netoc5fb5242018-07-30 13:28:31 -04001// Copyright 2018 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include <climits>
16#include <map>
17#include <set>
18#include <utility>
19#include <vector>
20
21#include "llvm/ADT/DenseMap.h"
22#include "llvm/ADT/SmallVector.h"
23#include "llvm/ADT/UniqueVector.h"
24#include "llvm/IR/Constants.h"
25#include "llvm/IR/DerivedTypes.h"
26#include "llvm/IR/Function.h"
27#include "llvm/IR/IRBuilder.h"
28#include "llvm/IR/Instructions.h"
29#include "llvm/IR/Module.h"
30#include "llvm/Pass.h"
31#include "llvm/Support/raw_ostream.h"
32
33#include "clspv/Option.h"
34#include "clspv/Passes.h"
35
36#include "ArgKind.h"
Alan Baker202c8c72018-08-13 13:47:44 -040037#include "Constants.h"
David Netoc5fb5242018-07-30 13:28:31 -040038
39using namespace llvm;
40
41#define DEBUG_TYPE "directresourceaccess"
42
43namespace {
44
45cl::opt<bool> ShowDRA("show-dra", cl::init(false), cl::Hidden,
46 cl::desc("Show direct resource access details"));
47
48using SamplerMapType = llvm::ArrayRef<std::pair<unsigned, std::string>>;
49
50class DirectResourceAccessPass final : public ModulePass {
51public:
52 static char ID;
53 DirectResourceAccessPass() : ModulePass(ID) {}
54 bool runOnModule(Module &M) override;
55
56private:
57 // Return the functions reachable from entry point functions, where
58 // callers appear before callees. OpenCL C does not permit recursion
59 // or function or pointers, so this is always well defined. The ordering
60 // should be reproducible from one run to the next.
61 UniqueVector<Function *> CallGraphOrderedFunctions(Module &);
62
63 // For each kernel argument that will map to a resource variable (descriptor),
64 // try to rewrite the uses of the argument as a direct access of the resource.
65 // We can only do this if all the callees of the function use the same
66 // resource access value for that argument. Returns true if the module
67 // changed.
68 bool RewriteResourceAccesses(Function *fn);
69
70 // Rewrite uses of this resrouce-based arg if all the callers pass in the
71 // same resource access. Returns true if the module changed.
72 bool RewriteAccessesForArg(Function *fn, int arg_index, Argument &arg);
73};
74} // namespace
75
76char DirectResourceAccessPass::ID = 0;
77static RegisterPass<DirectResourceAccessPass> X("DirectResourceAccessPass",
78 "Direct resource access");
79
80namespace clspv {
81ModulePass *createDirectResourceAccessPass() {
82 return new DirectResourceAccessPass();
83}
84} // namespace clspv
85
86namespace {
87bool DirectResourceAccessPass::runOnModule(Module &M) {
88 bool Changed = false;
89
90 if (clspv::Option::DirectResourceAccess()) {
91 auto ordered_functions = CallGraphOrderedFunctions(M);
92 for (auto *fn : ordered_functions) {
93 Changed |= RewriteResourceAccesses(fn);
94 }
95 }
96
97 return Changed;
98}
99
100UniqueVector<Function *>
101DirectResourceAccessPass::CallGraphOrderedFunctions(Module &M) {
102 // Use a topological sort.
103
104 // Make an ordered list of all functions having bodies, with kernel entry
105 // points listed first.
106 UniqueVector<Function *> functions;
107 SmallVector<Function *, 10> entry_points;
108 for (Function &F : M) {
109 if (F.isDeclaration()) {
110 continue;
111 }
112 if (F.getCallingConv() == CallingConv::SPIR_KERNEL) {
113 functions.insert(&F);
114 entry_points.push_back(&F);
115 }
116 }
117 // Add the remaining functions.
118 for (Function &F : M) {
119 if (F.isDeclaration()) {
120 continue;
121 }
122 if (F.getCallingConv() != CallingConv::SPIR_KERNEL) {
123 functions.insert(&F);
124 }
125 }
126
127 // This will be a complete set of reveresed edges, i.e. with all pairs
128 // of (callee, caller).
129 using Edge = std::pair<unsigned, unsigned>;
130 auto make_edge = [&functions](Function *callee, Function *caller) {
131 return std::pair<unsigned, unsigned>{functions.idFor(callee),
132 functions.idFor(caller)};
133 };
134 std::set<Edge> reverse_edges;
135 // Map each function to the functions it calls, and populate |reverse_edges|.
136 std::map<Function *, SmallVector<Function *, 3>> calls_functions;
137 for (Function *callee : functions) {
138 for (auto &use : callee->uses()) {
139 if (auto *call = dyn_cast<CallInst>(use.getUser())) {
140 Function *caller = call->getParent()->getParent();
141 calls_functions[caller].push_back(callee);
142 reverse_edges.insert(make_edge(callee, caller));
143 }
144 }
145 }
146 // Sort the callees in module-order. This helps us produce a deterministic
147 // result.
148 for (auto &pair : calls_functions) {
149 auto &callees = pair.second;
150 std::sort(callees.begin(), callees.end(),
151 [&functions](Function *lhs, Function *rhs) {
152 return functions.idFor(lhs) < functions.idFor(rhs);
153 });
154 }
155
156 // Use Kahn's algorithm for topoological sort.
157 UniqueVector<Function *> result;
158 SmallVector<Function *, 10> work_list(entry_points.begin(),
159 entry_points.end());
160 while (!work_list.empty()) {
161 Function *caller = work_list.back();
162 work_list.pop_back();
163 result.insert(caller);
164 auto &callees = calls_functions[caller];
165 for (auto *callee : callees) {
166 reverse_edges.erase(make_edge(callee, caller));
167 auto lower_bound = reverse_edges.lower_bound(make_edge(callee, nullptr));
168 if (lower_bound == reverse_edges.end() ||
169 lower_bound->first != functions.idFor(callee)) {
170 // Callee has no other unvisited callers.
171 work_list.push_back(callee);
172 }
173 }
174 }
175 // If reverse_edges is not empty then there was a cycle. But we don't care
176 // about that erroneous case.
177
178 if (ShowDRA) {
179 outs() << "DRA: Ordered functions:\n";
180 for (Function *fun : result) {
181 outs() << "DRA: " << fun->getName() << "\n";
182 }
183 }
184 return result;
185}
186
187bool DirectResourceAccessPass::RewriteResourceAccesses(Function *fn) {
188 bool Changed = false;
189 int arg_index = 0;
190 for (Argument &arg : fn->args()) {
191 switch (clspv::GetArgKindForType(arg.getType())) {
192 case clspv::ArgKind::Buffer:
193 case clspv::ArgKind::ReadOnlyImage:
194 case clspv::ArgKind::WriteOnlyImage:
195 case clspv::ArgKind::Sampler:
Alan Baker202c8c72018-08-13 13:47:44 -0400196 case clspv::ArgKind::Local:
David Netoc5fb5242018-07-30 13:28:31 -0400197 Changed |= RewriteAccessesForArg(fn, arg_index, arg);
198 break;
David Neto3a0df832018-08-03 14:35:42 -0400199 default:
200 // Should not happen
201 break;
David Netoc5fb5242018-07-30 13:28:31 -0400202 }
203 arg_index++;
204 }
205 return Changed;
206}
207
208bool DirectResourceAccessPass::RewriteAccessesForArg(Function *fn,
209 int arg_index,
210 Argument &arg) {
211 bool Changed = false;
212 if (fn->use_empty()) {
213 return false;
214 }
215
216 // We can convert a parameter to a direct resource access if it is
217 // either a direct call to a clspv.resource.var.* or if it a GEP of
218 // such a thing (where the GEP can only have zero indices).
219 struct ParamInfo {
Alan Baker202c8c72018-08-13 13:47:44 -0400220 // The base value. It is either a global variable or a resource-access
221 // builtin function. (@clspv.resource.var.* or @clspv.local.var.*)
222 Value *base;
David Netoc5fb5242018-07-30 13:28:31 -0400223 // The descriptor set.
224 uint32_t set;
225 // The binding.
226 uint32_t binding;
227 // If the parameter is a GEP, then this is the number of zero-indices
228 // the GEP used.
229 unsigned num_gep_zeroes;
230 // An example call fitting
231 CallInst *sample_call;
232 };
233 // The common valid parameter info across all the callers seen soo far.
234
235 bool seen_one = false;
236 ParamInfo common;
237 // Tries to merge the given parameter info into |common|. If it is the first
238 // time we've tried, then save it. Returns true if there is no conflict.
239 auto merge_param_info = [&seen_one, &common](const ParamInfo &pi) {
240 if (!seen_one) {
241 common = pi;
242 seen_one = true;
243 return true;
244 }
Alan Baker202c8c72018-08-13 13:47:44 -0400245 return pi.base == common.base && pi.set == common.set &&
David Netoc5fb5242018-07-30 13:28:31 -0400246 pi.binding == common.binding &&
247 pi.num_gep_zeroes == common.num_gep_zeroes;
248 };
249
250 for (auto &use : fn->uses()) {
251 if (auto *caller = dyn_cast<CallInst>(use.getUser())) {
252 Value *value = caller->getArgOperand(arg_index);
253 // We care about two cases:
254 // - a direct call to clspv.resource.var.*
255 // - a GEP with only zero indices, where the base pointer is
256
257 // Unpack GEPs with zeros, if we can. Rewrite |value| as we go along.
258 unsigned num_gep_zeroes = 0;
David Neto2f450002018-08-01 16:13:03 -0400259 bool first_gep = true;
David Netoc5fb5242018-07-30 13:28:31 -0400260 for (auto *gep = dyn_cast<GetElementPtrInst>(value); gep;
261 gep = dyn_cast<GetElementPtrInst>(value)) {
262 if (!gep->hasAllZeroIndices()) {
263 return false;
264 }
David Neto2f450002018-08-01 16:13:03 -0400265 // If not the first GEP, then ignore the "element" index (which I call
266 // "slide") since that will be combined with the last index of the
267 // previous GEP.
268 num_gep_zeroes += gep->getNumIndices() + (first_gep ? 0 : -1);
David Netoc5fb5242018-07-30 13:28:31 -0400269 value = gep->getPointerOperand();
David Neto2f450002018-08-01 16:13:03 -0400270 first_gep = false;
David Netoc5fb5242018-07-30 13:28:31 -0400271 }
272 if (auto *call = dyn_cast<CallInst>(value)) {
273 // If the call is a call to a @clspv.resource.var.* function, then try
274 // to merge it, assuming the given number of GEP zero-indices so far.
275 if (call->getCalledFunction()->getName().startswith(
Alan Baker202c8c72018-08-13 13:47:44 -0400276 clspv::ResourceAccessorFunction())) {
David Netoc5fb5242018-07-30 13:28:31 -0400277 const auto set = uint32_t(
278 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
279 const auto binding = uint32_t(
280 dyn_cast<ConstantInt>(call->getOperand(1))->getZExtValue());
281 if (!merge_param_info({call->getCalledFunction(), set, binding,
Alan Baker202c8c72018-08-13 13:47:44 -0400282 num_gep_zeroes, call}))
David Netoc5fb5242018-07-30 13:28:31 -0400283 return false;
Alan Baker202c8c72018-08-13 13:47:44 -0400284 } else if (call->getCalledFunction()->getName().startswith(
285 clspv::WorkgroupAccessorFunction())) {
286 const uint32_t spec_id = uint32_t(
287 dyn_cast<ConstantInt>(call->getOperand(0))->getZExtValue());
288 if (!merge_param_info({call->getCalledFunction(), spec_id, 0,
289 num_gep_zeroes, call}))
290 return false;
David Netoc5fb5242018-07-30 13:28:31 -0400291 } else {
292 // A call but not to a resource access builtin function.
293 return false;
294 }
Alan Baker202c8c72018-08-13 13:47:44 -0400295 } else if (isa<GlobalValue>(value)) {
296 if (!merge_param_info({value, 0, 0, num_gep_zeroes, nullptr}))
297 return false;
David Netoc5fb5242018-07-30 13:28:31 -0400298 } else {
299 // Not a call.
300 return false;
301 }
302 } else {
303 // There isn't enough commonality. Bail out without changing anything.
304 return false;
305 }
306 }
307 if (ShowDRA) {
308 if (seen_one) {
309 outs() << "DRA: Rewrite " << fn->getName() << " arg " << arg_index << " "
Alan Baker202c8c72018-08-13 13:47:44 -0400310 << arg.getName() << ": " << common.base->getName() << " ("
David Netoc5fb5242018-07-30 13:28:31 -0400311 << common.set << "," << common.binding
Alan Bakerfc6888e2018-08-20 20:54:33 -0400312 << ") zeroes: " << common.num_gep_zeroes << " sample-call ";
313 if (common.sample_call) outs() << *common.sample_call << "\n";
314 else outs() << "nullptr\n";
David Netoc5fb5242018-07-30 13:28:31 -0400315 }
316 }
317
318 // Now rewrite the argument, using the info in |common|.
319
320 Changed = true;
321 IRBuilder<> Builder(fn->getParent()->getContext());
322 auto *zero = Builder.getInt32(0);
323 Builder.SetInsertPoint(fn->getEntryBlock().getFirstNonPHI());
324
Alan Baker202c8c72018-08-13 13:47:44 -0400325 Value *replacement = common.base;
326 if (Function* function = dyn_cast<Function>(replacement)) {
327 // Create the call.
328 SmallVector<Value *, 8> args(common.sample_call->arg_begin(),
329 common.sample_call->arg_end());
330 replacement = Builder.CreateCall(function, args);
331 if (ShowDRA) {
332 outs() << "DRA: Replace: call " << *replacement << "\n";
333 }
David Netoc5fb5242018-07-30 13:28:31 -0400334 }
335 if (common.num_gep_zeroes) {
336 SmallVector<Value *, 3> zeroes;
337 for (unsigned i = 0; i < common.num_gep_zeroes; i++) {
338 zeroes.push_back(zero);
339 }
Alan Baker202c8c72018-08-13 13:47:44 -0400340 // Builder.CreateGEP is not used to avoid creating a GEPConstantExpr in the
341 // case of global variables.
342 replacement = GetElementPtrInst::Create(nullptr, replacement, zeroes);
343 Builder.Insert(cast<Instruction>(replacement));
David Netoc5fb5242018-07-30 13:28:31 -0400344 if (ShowDRA) {
345 outs() << "DRA: Replace: gep " << *replacement << "\n";
346 }
347 }
348 arg.replaceAllUsesWith(replacement);
349
350 return Changed;
351}
352
353} // namespace