blob: 7ee430f137e20c9c4346a0de29075caa71190931 [file] [log] [blame]
Nicolas Capens41a73022020-01-30 00:30:14 -05001// Copyright 2020 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "LLVMReactor.hpp"
16
17#include "Debug.hpp"
18#include "ExecutableMemory.hpp"
19#include "Routine.hpp"
20
Nicolas Capens41a73022020-01-30 00:30:14 -050021// TODO(b/143539525): Eliminate when warning has been fixed.
22#ifdef _MSC_VER
23__pragma(warning(push))
24 __pragma(warning(disable : 4146)) // unary minus operator applied to unsigned type, result still unsigned
25#endif
26
Nicolas Capens41a73022020-01-30 00:30:14 -050027#include "llvm/ExecutionEngine/Orc/CompileUtils.h"
28#include "llvm/ExecutionEngine/Orc/IRCompileLayer.h"
Nicolas Capens41a73022020-01-30 00:30:14 -050029#include "llvm/ExecutionEngine/Orc/RTDyldObjectLinkingLayer.h"
Nicolas Capens41a73022020-01-30 00:30:14 -050030#include "llvm/ExecutionEngine/SectionMemoryManager.h"
Nicolas Capens41a73022020-01-30 00:30:14 -050031#include "llvm/IR/LegacyPassManager.h"
Nicolas Capens41a73022020-01-30 00:30:14 -050032#include "llvm/Support/TargetSelect.h"
Nicolas Capens41a73022020-01-30 00:30:14 -050033#include "llvm/Transforms/InstCombine/InstCombine.h"
34#include "llvm/Transforms/Scalar.h"
35#include "llvm/Transforms/Scalar/GVN.h"
36
Nicolas Capens41a73022020-01-30 00:30:14 -050037#ifdef _MSC_VER
38 __pragma(warning(pop))
39#endif
40
Nicolas Capens41a73022020-01-30 00:30:14 -050041#if defined(_WIN64)
42 extern "C" void __chkstk();
43#elif defined(_WIN32)
44extern "C" void _chkstk();
45#endif
46
Antonio Maioranodd48b7e2020-02-05 13:17:07 -050047#if __has_feature(memory_sanitizer)
48# include <sanitizer/msan_interface.h>
49#endif
50
Anton D. Kachalovac4e1d22020-02-11 15:44:27 +010051#ifdef __ARM_EABI__
52extern "C" signed __aeabi_idivmod();
53#endif
54
Nicolas Capens41a73022020-01-30 00:30:14 -050055namespace {
56
Nicolas Capens41a73022020-01-30 00:30:14 -050057// JITGlobals is a singleton that holds all the immutable machine specific
58// information for the host device.
59class JITGlobals
60{
61public:
Nicolas Capens41a73022020-01-30 00:30:14 -050062 static JITGlobals *get();
63
Ben Claytonee18f392020-10-19 16:54:21 -040064 llvm::orc::JITTargetMachineBuilder getTargetMachineBuilder(rr::Optimization::Level optLevel) const;
65 const llvm::DataLayout &getDataLayout() const;
66 const llvm::Triple getTargetTriple() const;
Nicolas Capens41a73022020-01-30 00:30:14 -050067
68private:
Ben Claytonee18f392020-10-19 16:54:21 -040069 JITGlobals(const llvm::orc::JITTargetMachineBuilder &jtmb, const llvm::DataLayout &dataLayout);
70
Nicolas Capens41a73022020-01-30 00:30:14 -050071 static llvm::CodeGenOpt::Level toLLVM(rr::Optimization::Level level);
Ben Claytonee18f392020-10-19 16:54:21 -040072 const llvm::orc::JITTargetMachineBuilder jtmb;
73 const llvm::DataLayout dataLayout;
Nicolas Capens41a73022020-01-30 00:30:14 -050074};
75
76JITGlobals *JITGlobals::get()
77{
Ben Claytonee18f392020-10-19 16:54:21 -040078 static JITGlobals instance = [] {
79 llvm::InitializeNativeTarget();
80 llvm::InitializeNativeTargetAsmPrinter();
81 llvm::InitializeNativeTargetAsmParser();
82
83 auto jtmb = llvm::orc::JITTargetMachineBuilder::detectHost();
84 ASSERT_MSG(jtmb, "JITTargetMachineBuilder::detectHost() failed");
85 auto dataLayout = jtmb->getDefaultDataLayoutForTarget();
86 ASSERT_MSG(dataLayout, "JITTargetMachineBuilder::getDefaultDataLayoutForTarget() failed");
87 return JITGlobals(jtmb.get(), dataLayout.get());
88 }();
Nicolas Capens41a73022020-01-30 00:30:14 -050089 return &instance;
90}
91
Ben Claytonee18f392020-10-19 16:54:21 -040092llvm::orc::JITTargetMachineBuilder JITGlobals::getTargetMachineBuilder(rr::Optimization::Level optLevel) const
Nicolas Capens41a73022020-01-30 00:30:14 -050093{
Ben Claytonee18f392020-10-19 16:54:21 -040094 llvm::orc::JITTargetMachineBuilder out = jtmb;
95 out.setCodeGenOptLevel(toLLVM(optLevel));
96 return out;
Nicolas Capens41a73022020-01-30 00:30:14 -050097}
98
Ben Claytonee18f392020-10-19 16:54:21 -040099const llvm::DataLayout &JITGlobals::getDataLayout() const
Nicolas Capens41a73022020-01-30 00:30:14 -0500100{
Ben Claytonee18f392020-10-19 16:54:21 -0400101 return dataLayout;
102}
Nicolas Capens41a73022020-01-30 00:30:14 -0500103
Ben Claytonee18f392020-10-19 16:54:21 -0400104const llvm::Triple JITGlobals::getTargetTriple() const
105{
106 return jtmb.getTargetTriple();
107}
Nicolas Capens41a73022020-01-30 00:30:14 -0500108
Ben Claytonee18f392020-10-19 16:54:21 -0400109JITGlobals::JITGlobals(const llvm::orc::JITTargetMachineBuilder &jtmb, const llvm::DataLayout &dataLayout)
110 : jtmb(jtmb)
111 , dataLayout(dataLayout)
112{
Nicolas Capens41a73022020-01-30 00:30:14 -0500113}
114
115llvm::CodeGenOpt::Level JITGlobals::toLLVM(rr::Optimization::Level level)
116{
117 switch(level)
118 {
119 case rr::Optimization::Level::None: return ::llvm::CodeGenOpt::None;
120 case rr::Optimization::Level::Less: return ::llvm::CodeGenOpt::Less;
121 case rr::Optimization::Level::Default: return ::llvm::CodeGenOpt::Default;
122 case rr::Optimization::Level::Aggressive: return ::llvm::CodeGenOpt::Aggressive;
123 default: UNREACHABLE("Unknown Optimization Level %d", int(level));
124 }
125 return ::llvm::CodeGenOpt::Default;
126}
127
David 'Digit' Turner48f3f6c2020-03-23 14:23:10 +0100128class MemoryMapper final : public llvm::SectionMemoryManager::MemoryMapper
Nicolas Capens41a73022020-01-30 00:30:14 -0500129{
130public:
131 MemoryMapper() {}
132 ~MemoryMapper() final {}
133
134 llvm::sys::MemoryBlock allocateMappedMemory(
135 llvm::SectionMemoryManager::AllocationPurpose purpose,
136 size_t numBytes, const llvm::sys::MemoryBlock *const nearBlock,
137 unsigned flags, std::error_code &errorCode) final
138 {
139 errorCode = std::error_code();
140
141 // Round up numBytes to page size.
142 size_t pageSize = rr::memoryPageSize();
143 numBytes = (numBytes + pageSize - 1) & ~(pageSize - 1);
144
145 bool need_exec =
146 purpose == llvm::SectionMemoryManager::AllocationPurpose::Code;
147 void *addr = rr::allocateMemoryPages(
148 numBytes, flagsToPermissions(flags), need_exec);
149 if(!addr)
150 return llvm::sys::MemoryBlock();
151 return llvm::sys::MemoryBlock(addr, numBytes);
152 }
153
154 std::error_code protectMappedMemory(const llvm::sys::MemoryBlock &block,
155 unsigned flags)
156 {
157 // Round down base address to align with a page boundary. This matches
158 // DefaultMMapper behavior.
159 void *addr = block.base();
Nicolas Capens41a73022020-01-30 00:30:14 -0500160 size_t size = block.allocatedSize();
Nicolas Capens41a73022020-01-30 00:30:14 -0500161 size_t pageSize = rr::memoryPageSize();
162 addr = reinterpret_cast<void *>(
163 reinterpret_cast<uintptr_t>(addr) & ~(pageSize - 1));
164 size += reinterpret_cast<uintptr_t>(block.base()) -
165 reinterpret_cast<uintptr_t>(addr);
166
167 rr::protectMemoryPages(addr, size, flagsToPermissions(flags));
168 return std::error_code();
169 }
170
171 std::error_code releaseMappedMemory(llvm::sys::MemoryBlock &block)
172 {
Nicolas Capens41a73022020-01-30 00:30:14 -0500173 size_t size = block.allocatedSize();
Nicolas Capens41a73022020-01-30 00:30:14 -0500174
175 rr::deallocateMemoryPages(block.base(), size);
176 return std::error_code();
177 }
178
179private:
180 int flagsToPermissions(unsigned flags)
181 {
182 int result = 0;
183 if(flags & llvm::sys::Memory::MF_READ)
184 {
185 result |= rr::PERMISSION_READ;
186 }
187 if(flags & llvm::sys::Memory::MF_WRITE)
188 {
189 result |= rr::PERMISSION_WRITE;
190 }
191 if(flags & llvm::sys::Memory::MF_EXEC)
192 {
193 result |= rr::PERMISSION_EXECUTE;
194 }
195 return result;
196 }
197};
198
199template<typename T>
200T alignUp(T val, T alignment)
201{
202 return alignment * ((val + alignment - 1) / alignment);
203}
204
205void *alignedAlloc(size_t size, size_t alignment)
206{
207 ASSERT(alignment < 256);
208 auto allocation = new uint8_t[size + sizeof(uint8_t) + alignment];
209 auto aligned = allocation;
210 aligned += sizeof(uint8_t); // Make space for the base-address offset.
211 aligned = reinterpret_cast<uint8_t *>(alignUp(reinterpret_cast<uintptr_t>(aligned), alignment)); // align
212 auto offset = static_cast<uint8_t>(aligned - allocation);
213 aligned[-1] = offset;
214 return aligned;
215}
216
217void alignedFree(void *ptr)
218{
219 auto aligned = reinterpret_cast<uint8_t *>(ptr);
220 auto offset = aligned[-1];
221 auto allocation = aligned - offset;
222 delete[] allocation;
223}
224
225template<typename T>
226static void atomicLoad(void *ptr, void *ret, llvm::AtomicOrdering ordering)
227{
228 *reinterpret_cast<T *>(ret) = std::atomic_load_explicit<T>(reinterpret_cast<std::atomic<T> *>(ptr), rr::atomicOrdering(ordering));
229}
230
231template<typename T>
232static void atomicStore(void *ptr, void *val, llvm::AtomicOrdering ordering)
233{
234 std::atomic_store_explicit<T>(reinterpret_cast<std::atomic<T> *>(ptr), *reinterpret_cast<T *>(val), rr::atomicOrdering(ordering));
235}
236
237#ifdef __ANDROID__
238template<typename F>
239static uint32_t sync_fetch_and_op(uint32_t volatile *ptr, uint32_t val, F f)
240{
241 // Build an arbitrary op out of looped CAS
242 for(;;)
243 {
244 uint32_t expected = *ptr;
245 uint32_t desired = f(expected, val);
246
247 if(expected == __sync_val_compare_and_swap_4(ptr, expected, desired))
248 {
249 return expected;
250 }
251 }
252}
253#endif
254
Ben Claytonee18f392020-10-19 16:54:21 -0400255class ExternalSymbolGenerator : public llvm::orc::JITDylib::DefinitionGenerator
Nicolas Capens41a73022020-01-30 00:30:14 -0500256{
257 struct Atomic
258 {
259 static void load(size_t size, void *ptr, void *ret, llvm::AtomicOrdering ordering)
260 {
261 switch(size)
262 {
263 case 1: atomicLoad<uint8_t>(ptr, ret, ordering); break;
264 case 2: atomicLoad<uint16_t>(ptr, ret, ordering); break;
265 case 4: atomicLoad<uint32_t>(ptr, ret, ordering); break;
266 case 8: atomicLoad<uint64_t>(ptr, ret, ordering); break;
267 default:
Ben Claytonce54c592020-02-07 11:30:51 +0000268 UNIMPLEMENTED_NO_BUG("Atomic::load(size: %d)", int(size));
Nicolas Capens41a73022020-01-30 00:30:14 -0500269 }
270 }
271 static void store(size_t size, void *ptr, void *ret, llvm::AtomicOrdering ordering)
272 {
273 switch(size)
274 {
275 case 1: atomicStore<uint8_t>(ptr, ret, ordering); break;
276 case 2: atomicStore<uint16_t>(ptr, ret, ordering); break;
277 case 4: atomicStore<uint32_t>(ptr, ret, ordering); break;
278 case 8: atomicStore<uint64_t>(ptr, ret, ordering); break;
279 default:
Ben Claytonce54c592020-02-07 11:30:51 +0000280 UNIMPLEMENTED_NO_BUG("Atomic::store(size: %d)", int(size));
Nicolas Capens41a73022020-01-30 00:30:14 -0500281 }
282 }
283 };
284
Ben Claytonee18f392020-10-19 16:54:21 -0400285 static void nop() {}
286 static void neverCalled() { UNREACHABLE("Should never be called"); }
Nicolas Capens41a73022020-01-30 00:30:14 -0500287
Ben Claytonee18f392020-10-19 16:54:21 -0400288 static void *coroutine_alloc_frame(size_t size) { return alignedAlloc(size, 16); }
289 static void coroutine_free_frame(void *ptr) { alignedFree(ptr); }
Nicolas Capens41a73022020-01-30 00:30:14 -0500290
291#ifdef __ANDROID__
Ben Claytonee18f392020-10-19 16:54:21 -0400292 // forwarders since we can't take address of builtins
293 static void sync_synchronize() { __sync_synchronize(); }
294 static uint32_t sync_fetch_and_add_4(uint32_t *ptr, uint32_t val) { return __sync_fetch_and_add_4(ptr, val); }
295 static uint32_t sync_fetch_and_and_4(uint32_t *ptr, uint32_t val) { return __sync_fetch_and_and_4(ptr, val); }
296 static uint32_t sync_fetch_and_or_4(uint32_t *ptr, uint32_t val) { return __sync_fetch_and_or_4(ptr, val); }
297 static uint32_t sync_fetch_and_xor_4(uint32_t *ptr, uint32_t val) { return __sync_fetch_and_xor_4(ptr, val); }
298 static uint32_t sync_fetch_and_sub_4(uint32_t *ptr, uint32_t val) { return __sync_fetch_and_sub_4(ptr, val); }
299 static uint32_t sync_lock_test_and_set_4(uint32_t *ptr, uint32_t val) { return __sync_lock_test_and_set_4(ptr, val); }
300 static uint32_t sync_val_compare_and_swap_4(uint32_t *ptr, uint32_t expected, uint32_t desired) { return __sync_val_compare_and_swap_4(ptr, expected, desired); }
Nicolas Capens41a73022020-01-30 00:30:14 -0500301
Ben Claytonee18f392020-10-19 16:54:21 -0400302 static uint32_t sync_fetch_and_max_4(uint32_t *ptr, uint32_t val)
303 {
304 return sync_fetch_and_op(ptr, val, [](int32_t a, int32_t b) { return std::max(a, b); });
305 }
306 static uint32_t sync_fetch_and_min_4(uint32_t *ptr, uint32_t val)
307 {
308 return sync_fetch_and_op(ptr, val, [](int32_t a, int32_t b) { return std::min(a, b); });
309 }
310 static uint32_t sync_fetch_and_umax_4(uint32_t *ptr, uint32_t val)
311 {
312 return sync_fetch_and_op(ptr, val, [](uint32_t a, uint32_t b) { return std::max(a, b); });
313 }
314 static uint32_t sync_fetch_and_umin_4(uint32_t *ptr, uint32_t val)
315 {
316 return sync_fetch_and_op(ptr, val, [](uint32_t a, uint32_t b) { return std::min(a, b); });
317 }
Nicolas Capens41a73022020-01-30 00:30:14 -0500318#endif
Nicolas Capens41a73022020-01-30 00:30:14 -0500319 class Resolver
320 {
321 public:
Ben Claytonee18f392020-10-19 16:54:21 -0400322 using FunctionMap = llvm::StringMap<void *>;
Nicolas Capens41a73022020-01-30 00:30:14 -0500323
324 FunctionMap functions;
325
326 Resolver()
327 {
Antonio Maiorano8cbee412020-06-10 15:59:20 -0400328#ifdef ENABLE_RR_PRINT
Ben Claytonee18f392020-10-19 16:54:21 -0400329 functions.try_emplace("rr::DebugPrintf", reinterpret_cast<void *>(rr::DebugPrintf));
Antonio Maiorano8cbee412020-06-10 15:59:20 -0400330#endif
Ben Claytonee18f392020-10-19 16:54:21 -0400331 functions.try_emplace("nop", reinterpret_cast<void *>(nop));
332 functions.try_emplace("floorf", reinterpret_cast<void *>(floorf));
333 functions.try_emplace("nearbyintf", reinterpret_cast<void *>(nearbyintf));
334 functions.try_emplace("truncf", reinterpret_cast<void *>(truncf));
335 functions.try_emplace("printf", reinterpret_cast<void *>(printf));
336 functions.try_emplace("puts", reinterpret_cast<void *>(puts));
337 functions.try_emplace("fmodf", reinterpret_cast<void *>(fmodf));
Nicolas Capens41a73022020-01-30 00:30:14 -0500338
Ben Claytonee18f392020-10-19 16:54:21 -0400339 functions.try_emplace("sinf", reinterpret_cast<void *>(sinf));
340 functions.try_emplace("cosf", reinterpret_cast<void *>(cosf));
341 functions.try_emplace("asinf", reinterpret_cast<void *>(asinf));
342 functions.try_emplace("acosf", reinterpret_cast<void *>(acosf));
343 functions.try_emplace("atanf", reinterpret_cast<void *>(atanf));
344 functions.try_emplace("sinhf", reinterpret_cast<void *>(sinhf));
345 functions.try_emplace("coshf", reinterpret_cast<void *>(coshf));
346 functions.try_emplace("tanhf", reinterpret_cast<void *>(tanhf));
347 functions.try_emplace("asinhf", reinterpret_cast<void *>(asinhf));
348 functions.try_emplace("acoshf", reinterpret_cast<void *>(acoshf));
349 functions.try_emplace("atanhf", reinterpret_cast<void *>(atanhf));
350 functions.try_emplace("atan2f", reinterpret_cast<void *>(atan2f));
351 functions.try_emplace("powf", reinterpret_cast<void *>(powf));
352 functions.try_emplace("expf", reinterpret_cast<void *>(expf));
353 functions.try_emplace("logf", reinterpret_cast<void *>(logf));
354 functions.try_emplace("exp2f", reinterpret_cast<void *>(exp2f));
355 functions.try_emplace("log2f", reinterpret_cast<void *>(log2f));
Nicolas Capens41a73022020-01-30 00:30:14 -0500356
Ben Claytonee18f392020-10-19 16:54:21 -0400357 functions.try_emplace("sin", reinterpret_cast<void *>(static_cast<double (*)(double)>(sin)));
358 functions.try_emplace("cos", reinterpret_cast<void *>(static_cast<double (*)(double)>(cos)));
359 functions.try_emplace("asin", reinterpret_cast<void *>(static_cast<double (*)(double)>(asin)));
360 functions.try_emplace("acos", reinterpret_cast<void *>(static_cast<double (*)(double)>(acos)));
361 functions.try_emplace("atan", reinterpret_cast<void *>(static_cast<double (*)(double)>(atan)));
362 functions.try_emplace("sinh", reinterpret_cast<void *>(static_cast<double (*)(double)>(sinh)));
363 functions.try_emplace("cosh", reinterpret_cast<void *>(static_cast<double (*)(double)>(cosh)));
364 functions.try_emplace("tanh", reinterpret_cast<void *>(static_cast<double (*)(double)>(tanh)));
365 functions.try_emplace("asinh", reinterpret_cast<void *>(static_cast<double (*)(double)>(asinh)));
366 functions.try_emplace("acosh", reinterpret_cast<void *>(static_cast<double (*)(double)>(acosh)));
367 functions.try_emplace("atanh", reinterpret_cast<void *>(static_cast<double (*)(double)>(atanh)));
368 functions.try_emplace("atan2", reinterpret_cast<void *>(static_cast<double (*)(double, double)>(atan2)));
369 functions.try_emplace("pow", reinterpret_cast<void *>(static_cast<double (*)(double, double)>(pow)));
370 functions.try_emplace("exp", reinterpret_cast<void *>(static_cast<double (*)(double)>(exp)));
371 functions.try_emplace("log", reinterpret_cast<void *>(static_cast<double (*)(double)>(log)));
372 functions.try_emplace("exp2", reinterpret_cast<void *>(static_cast<double (*)(double)>(exp2)));
373 functions.try_emplace("log2", reinterpret_cast<void *>(static_cast<double (*)(double)>(log2)));
Nicolas Capens41a73022020-01-30 00:30:14 -0500374
Ben Claytonee18f392020-10-19 16:54:21 -0400375 functions.try_emplace("atomic_load", reinterpret_cast<void *>(Atomic::load));
376 functions.try_emplace("atomic_store", reinterpret_cast<void *>(Atomic::store));
Nicolas Capens41a73022020-01-30 00:30:14 -0500377
378 // FIXME(b/119409619): use an allocator here so we can control all memory allocations
Ben Claytonee18f392020-10-19 16:54:21 -0400379 functions.try_emplace("coroutine_alloc_frame", reinterpret_cast<void *>(coroutine_alloc_frame));
380 functions.try_emplace("coroutine_free_frame", reinterpret_cast<void *>(coroutine_free_frame));
Nicolas Capens41a73022020-01-30 00:30:14 -0500381
382#ifdef __APPLE__
Ben Claytonee18f392020-10-19 16:54:21 -0400383 functions.try_emplace("sincosf_stret", reinterpret_cast<void *>(__sincosf_stret));
Nicolas Capens41a73022020-01-30 00:30:14 -0500384#elif defined(__linux__)
Ben Claytonee18f392020-10-19 16:54:21 -0400385 functions.try_emplace("sincosf", reinterpret_cast<void *>(sincosf));
Nicolas Capens41a73022020-01-30 00:30:14 -0500386#elif defined(_WIN64)
Ben Claytonee18f392020-10-19 16:54:21 -0400387 functions.try_emplace("chkstk", reinterpret_cast<void *>(__chkstk));
Nicolas Capens41a73022020-01-30 00:30:14 -0500388#elif defined(_WIN32)
Ben Claytonee18f392020-10-19 16:54:21 -0400389 functions.try_emplace("chkstk", reinterpret_cast<void *>(_chkstk));
Nicolas Capens41a73022020-01-30 00:30:14 -0500390#endif
391
Anton D. Kachalovac4e1d22020-02-11 15:44:27 +0100392#ifdef __ARM_EABI__
Ben Claytonee18f392020-10-19 16:54:21 -0400393 functions.try_emplace("aeabi_idivmod", reinterpret_cast<void *>(__aeabi_idivmod));
Anton D. Kachalovac4e1d22020-02-11 15:44:27 +0100394#endif
Nicolas Capens41a73022020-01-30 00:30:14 -0500395#ifdef __ANDROID__
Ben Claytonee18f392020-10-19 16:54:21 -0400396 functions.try_emplace("aeabi_unwind_cpp_pr0", reinterpret_cast<void *>(F::neverCalled));
397 functions.try_emplace("sync_synchronize", reinterpret_cast<void *>(F::sync_synchronize));
398 functions.try_emplace("sync_fetch_and_add_4", reinterpret_cast<void *>(F::sync_fetch_and_add_4));
399 functions.try_emplace("sync_fetch_and_and_4", reinterpret_cast<void *>(F::sync_fetch_and_and_4));
400 functions.try_emplace("sync_fetch_and_or_4", reinterpret_cast<void *>(F::sync_fetch_and_or_4));
401 functions.try_emplace("sync_fetch_and_xor_4", reinterpret_cast<void *>(F::sync_fetch_and_xor_4));
402 functions.try_emplace("sync_fetch_and_sub_4", reinterpret_cast<void *>(F::sync_fetch_and_sub_4));
403 functions.try_emplace("sync_lock_test_and_set_4", reinterpret_cast<void *>(F::sync_lock_test_and_set_4));
404 functions.try_emplace("sync_val_compare_and_swap_4", reinterpret_cast<void *>(F::sync_val_compare_and_swap_4));
405 functions.try_emplace("sync_fetch_and_max_4", reinterpret_cast<void *>(F::sync_fetch_and_max_4));
406 functions.try_emplace("sync_fetch_and_min_4", reinterpret_cast<void *>(F::sync_fetch_and_min_4));
407 functions.try_emplace("sync_fetch_and_umax_4", reinterpret_cast<void *>(F::sync_fetch_and_umax_4));
408 functions.try_emplace("sync_fetch_and_umin_4", reinterpret_cast<void *>(F::sync_fetch_and_umin_4));
Nicolas Capens41a73022020-01-30 00:30:14 -0500409#endif
Antonio Maioranodd48b7e2020-02-05 13:17:07 -0500410#if __has_feature(memory_sanitizer)
Ben Claytonee18f392020-10-19 16:54:21 -0400411 functions.try_emplace("msan_unpoison", reinterpret_cast<void *>(__msan_unpoison));
Antonio Maioranodd48b7e2020-02-05 13:17:07 -0500412#endif
Nicolas Capens41a73022020-01-30 00:30:14 -0500413 }
414 };
415
Ben Claytonee18f392020-10-19 16:54:21 -0400416 llvm::Error tryToGenerate(llvm::orc::LookupKind kind,
417 llvm::orc::JITDylib &dylib,
418 llvm::orc::JITDylibLookupFlags flags,
419 const llvm::orc::SymbolLookupSet &set) override
420 {
421 static Resolver resolver;
Nicolas Capens41a73022020-01-30 00:30:14 -0500422
Ben Claytonee18f392020-10-19 16:54:21 -0400423 llvm::orc::SymbolMap symbols;
Nicolas Capens41a73022020-01-30 00:30:14 -0500424
Ben Claytonee18f392020-10-19 16:54:21 -0400425#if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
426 std::string missing;
427#endif // !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
428
429 for(auto symbol : set)
430 {
431 auto name = symbol.first;
432
433 // Trim off any underscores from the start of the symbol. LLVM likes
434 // to append these on macOS.
435 auto trimmed = (*name).drop_while([](char c) { return c == '_'; });
436
437 auto it = resolver.functions.find(trimmed.str());
438 if(it != resolver.functions.end())
439 {
440 symbols[name] = llvm::JITEvaluatedSymbol(
441 static_cast<llvm::JITTargetAddress>(reinterpret_cast<uintptr_t>(it->second)),
442 llvm::JITSymbolFlags::Exported);
443 }
444#if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
445 else
446 {
447 missing += (missing.empty() ? "'" : ", '") + (*name).str() + "'";
448 }
449#endif // !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
450 }
451
452#if !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
453 // Missing functions will likely make the module fail in exciting non-obvious ways.
454 if(!missing.empty())
455 {
456 WARN("Missing external functions: %s", missing.c_str());
457 }
458#endif // !defined(NDEBUG) || defined(DCHECK_ALWAYS_ON)
459
460 if(symbols.empty())
461 {
462 return llvm::Error::success();
463 }
464
465 return dylib.define(llvm::orc::absoluteSymbols(std::move(symbols)));
466 }
467};
Nicolas Capens41a73022020-01-30 00:30:14 -0500468
469// JITRoutine is a rr::Routine that holds a LLVM JIT session, compiler and
470// object layer as each routine may require different target machine
471// settings and no Reactor routine directly links against another.
472class JITRoutine : public rr::Routine
473{
Ben Claytonee18f392020-10-19 16:54:21 -0400474 using ObjLayer = llvm::orc::RTDyldObjectLinkingLayer;
475 using CompileLayer = llvm::orc::IRCompileLayer;
476
477 llvm::orc::RTDyldObjectLinkingLayer objectLayer;
478 llvm::orc::IRCompileLayer compileLayer;
479 llvm::orc::MangleAndInterner mangle;
480 llvm::orc::ThreadSafeContext ctx;
481 llvm::orc::ExecutionSession session;
482 llvm::orc::JITDylib &dylib;
483 std::vector<const void *> addresses;
Nicolas Capens41a73022020-01-30 00:30:14 -0500484
485public:
486 JITRoutine(
487 std::unique_ptr<llvm::Module> module,
488 llvm::Function **funcs,
489 size_t count,
490 const rr::Config &config)
Ben Claytonee18f392020-10-19 16:54:21 -0400491 : objectLayer(session, []() { return std::make_unique<llvm::SectionMemoryManager>(new MemoryMapper()); })
492 , compileLayer(session, objectLayer, std::make_unique<llvm::orc::ConcurrentIRCompiler>(JITGlobals::get()->getTargetMachineBuilder(config.getOptimization().getLevel())))
493 , mangle(session, JITGlobals::get()->getDataLayout())
494 , ctx(std::make_unique<llvm::LLVMContext>())
495 , dylib(session.createJITDylib("<routine>"))
Nicolas Capens41a73022020-01-30 00:30:14 -0500496 , addresses(count)
497 {
Ben Claytona7bc2b92020-03-26 11:24:49 +0000498
Ben Claytonee18f392020-10-19 16:54:21 -0400499#ifdef ENABLE_RR_DEBUG_INFO
500 // TODO(b/165000222): Update this on next LLVM roll.
501 // https://github.com/llvm/llvm-project/commit/98f2bb4461072347dcca7d2b1b9571b3a6525801
502 // introduces RTDyldObjectLinkingLayer::registerJITEventListener().
503 // The current API does not appear to have any way to bind the
504 // rr::DebugInfo::NotifyFreeingObject event.
505 objectLayer.setNotifyLoaded([](llvm::orc::VModuleKey,
506 const llvm::object::ObjectFile &obj,
507 const llvm::RuntimeDyld::LoadedObjectInfo &l) {
508 static std::atomic<uint64_t> unique_key{ 0 };
509 rr::DebugInfo::NotifyObjectEmitted(unique_key++, obj, l);
510 });
511#endif // ENABLE_RR_DEBUG_INFO
Ben Claytona7bc2b92020-03-26 11:24:49 +0000512
Ben Claytonee18f392020-10-19 16:54:21 -0400513 if(JITGlobals::get()->getTargetTriple().isOSBinFormatCOFF())
514 {
515 // Hack to support symbol visibility in COFF.
516 // Matches hack in llvm::orc::LLJIT::createObjectLinkingLayer().
517 // See documentation on these functions for more detail.
518 objectLayer.setOverrideObjectFlagsWithResponsibilityFlags(true);
519 objectLayer.setAutoClaimResponsibilityForObjectSymbols(true);
520 }
521
522 dylib.addGenerator(std::make_unique<ExternalSymbolGenerator>());
523
524 llvm::SmallVector<llvm::orc::SymbolStringPtr, 8> names(count);
Nicolas Capens41a73022020-01-30 00:30:14 -0500525 for(size_t i = 0; i < count; i++)
526 {
527 auto func = funcs[i];
Nicolas Capens41a73022020-01-30 00:30:14 -0500528 func->setLinkage(llvm::GlobalValue::ExternalLinkage);
529 func->setDoesNotThrow();
Ben Claytonee18f392020-10-19 16:54:21 -0400530 if(!func->hasName())
531 {
532 func->setName("f" + llvm::Twine(i).str());
533 }
534 names[i] = mangle(func->getName());
Nicolas Capens41a73022020-01-30 00:30:14 -0500535 }
536
Nicolas Capens41a73022020-01-30 00:30:14 -0500537 // Once the module is passed to the compileLayer, the
538 // llvm::Functions are freed. Make sure funcs are not referenced
539 // after this point.
540 funcs = nullptr;
541
Ben Claytonee18f392020-10-19 16:54:21 -0400542 llvm::cantFail(compileLayer.add(dylib, llvm::orc::ThreadSafeModule(std::move(module), ctx)));
Nicolas Capens41a73022020-01-30 00:30:14 -0500543
544 // Resolve the function addresses.
545 for(size_t i = 0; i < count; i++)
546 {
Ben Claytonee18f392020-10-19 16:54:21 -0400547 auto symbol = session.lookup({ &dylib }, names[i]);
548 ASSERT_MSG(symbol, "Failed to lookup address of routine function %d: %s",
549 (int)i, llvm::toString(symbol.takeError()).c_str());
550 addresses[i] = reinterpret_cast<void *>(static_cast<intptr_t>(symbol->getAddress()));
Nicolas Capens41a73022020-01-30 00:30:14 -0500551 }
552 }
553
554 const void *getEntry(int index) const override
555 {
556 return addresses[index];
557 }
Nicolas Capens41a73022020-01-30 00:30:14 -0500558};
559
560} // anonymous namespace
561
562namespace rr {
563
564JITBuilder::JITBuilder(const rr::Config &config)
565 : config(config)
566 , module(new llvm::Module("", context))
567 , builder(new llvm::IRBuilder<>(context))
568{
Ben Claytonee18f392020-10-19 16:54:21 -0400569 module->setDataLayout(JITGlobals::get()->getDataLayout());
Nicolas Capens41a73022020-01-30 00:30:14 -0500570}
571
572void JITBuilder::optimize(const rr::Config &cfg)
573{
Nicolas Capens41a73022020-01-30 00:30:14 -0500574#ifdef ENABLE_RR_DEBUG_INFO
575 if(debugInfo != nullptr)
576 {
577 return; // Don't optimize if we're generating debug info.
578 }
579#endif // ENABLE_RR_DEBUG_INFO
580
Ben Claytonee18f392020-10-19 16:54:21 -0400581 llvm::legacy::PassManager passManager;
Nicolas Capens41a73022020-01-30 00:30:14 -0500582
583 for(auto pass : cfg.getOptimization().getPasses())
584 {
585 switch(pass)
586 {
587 case rr::Optimization::Pass::Disabled: break;
Ben Claytonee18f392020-10-19 16:54:21 -0400588 case rr::Optimization::Pass::CFGSimplification: passManager.add(llvm::createCFGSimplificationPass()); break;
589 case rr::Optimization::Pass::LICM: passManager.add(llvm::createLICMPass()); break;
590 case rr::Optimization::Pass::AggressiveDCE: passManager.add(llvm::createAggressiveDCEPass()); break;
591 case rr::Optimization::Pass::GVN: passManager.add(llvm::createGVNPass()); break;
592 case rr::Optimization::Pass::InstructionCombining: passManager.add(llvm::createInstructionCombiningPass()); break;
593 case rr::Optimization::Pass::Reassociate: passManager.add(llvm::createReassociatePass()); break;
594 case rr::Optimization::Pass::DeadStoreElimination: passManager.add(llvm::createDeadStoreEliminationPass()); break;
595 case rr::Optimization::Pass::SCCP: passManager.add(llvm::createSCCPPass()); break;
596 case rr::Optimization::Pass::ScalarReplAggregates: passManager.add(llvm::createSROAPass()); break;
597 case rr::Optimization::Pass::EarlyCSEPass: passManager.add(llvm::createEarlyCSEPass()); break;
Nicolas Capens41a73022020-01-30 00:30:14 -0500598 default:
599 UNREACHABLE("pass: %d", int(pass));
600 }
601 }
602
Ben Claytonee18f392020-10-19 16:54:21 -0400603 passManager.run(*module);
Nicolas Capens41a73022020-01-30 00:30:14 -0500604}
605
606std::shared_ptr<rr::Routine> JITBuilder::acquireRoutine(llvm::Function **funcs, size_t count, const rr::Config &cfg)
607{
608 ASSERT(module);
609 return std::make_shared<JITRoutine>(std::move(module), funcs, count, cfg);
610}
611
612} // namespace rr