1 /*
2 * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "optimizer_run.h"
17 #include "compile_method.h"
18 #include "mem/pool_manager.h"
19 #include "mem/code_allocator.h"
20 #include "include/class.h"
21 #include "include/method.h"
22 #include "optimizer/ir/ir_constructor.h"
23 #include "optimizer/ir/runtime_interface.h"
24 #include "optimizer/analysis/loop_analyzer.h"
25 #include "optimizer/pass.h"
26 #include "optimizer/ir_builder/ir_builder.h"
27 #include "utils/logger.h"
28 #include "code_info/code_info.h"
29 #include "events/events.h"
30 #include "trace/trace.h"
31 #include "optimizer/optimizations/regalloc/reg_alloc_linear_scan.h"
32 #include "optimizer/code_generator/codegen.h"
33
34 namespace ark::compiler {
35
36 #ifdef PANDA_COMPILER_DEBUG_INFO
37 static Span<uint8_t> EmitElf(Graph *graph, CodeAllocator *codeAllocator, ArenaAllocator *gdbDebugInfoAllocator,
38 const std::string &methodName);
39 #endif
40
SetCompilationStart()41 void JITStats::SetCompilationStart()
42 {
43 ASSERT(startTime_ == 0);
44 startTime_ = time::GetCurrentTimeInNanos();
45 }
EndCompilationWithStats(const std::string & methodName,bool isOsr,size_t bcSize,size_t codeSize)46 void JITStats::EndCompilationWithStats(const std::string &methodName, bool isOsr, size_t bcSize, size_t codeSize)
47 {
48 ASSERT(startTime_ != 0);
49 auto time = time::GetCurrentTimeInNanos() - startTime_;
50 statsList_.push_back(Entry {PandaString(methodName, internalAllocator_->Adapter()), isOsr, bcSize, codeSize, time});
51 startTime_ = 0;
52 }
53
ResetCompilationStart()54 void JITStats::ResetCompilationStart()
55 {
56 startTime_ = 0;
57 }
58
DumpCsv(char sep)59 void JITStats::DumpCsv(char sep)
60 {
61 ASSERT(g_options.WasSetCompilerDumpJitStatsCsv());
62 std::ofstream csv(g_options.GetCompilerDumpJitStatsCsv(), std::ofstream::trunc);
63 for (const auto &i : statsList_) {
64 csv << "\"" << i.methodName << "\"" << sep;
65 csv << i.isOsr << sep;
66 csv << i.bcSize << sep;
67 csv << i.codeSize << sep;
68 csv << i.time;
69 csv << '\n';
70 }
71 }
72
73 struct EventCompilationArgs {
74 const std::string methodName_;
75 bool isOsr;
76 size_t bcSize;
77 uintptr_t address;
78 size_t codeSize;
79 size_t infoSize;
80 events::CompilationStatus status;
81 };
82
EndCompilation(const EventCompilationArgs & args,JITStats * jitStats)83 static void EndCompilation(const EventCompilationArgs &args, JITStats *jitStats)
84 {
85 [[maybe_unused]] auto [methodName, isOsr, bcSize, address, codeSize, infoSize, status] = args;
86 EVENT_COMPILATION(methodName, isOsr, bcSize, address, codeSize, infoSize, status);
87 if (jitStats != nullptr) {
88 ASSERT((codeSize != 0) == (status == events::CompilationStatus::COMPILED));
89 jitStats->EndCompilationWithStats(methodName, isOsr, bcSize, codeSize);
90 }
91 }
92
ChooseArch(Arch arch)93 Arch ChooseArch(Arch arch)
94 {
95 if (arch != Arch::NONE) {
96 return arch;
97 }
98
99 arch = RUNTIME_ARCH;
100 if (RUNTIME_ARCH == Arch::X86_64 && g_options.WasSetCompilerCrossArch()) {
101 arch = GetArchFromString(g_options.GetCompilerCrossArch());
102 }
103
104 return arch;
105 }
106
CheckSingleImplementation(Graph * graph)107 static bool CheckSingleImplementation(Graph *graph)
108 {
109 // Check that all methods that were inlined due to its single implementation property, still have this property,
110 // otherwise we must drop compiled code.
111 // NOTE(compiler): we need to reset hotness counter hereby avoid yet another warmup phase.
112 auto cha = graph->GetRuntime()->GetCha();
113 ASSERT(cha != nullptr);
114 for (auto siMethod : graph->GetSingleImplementationList()) {
115 if (!cha->IsSingleImplementation(siMethod)) {
116 LOG(WARNING, COMPILER)
117 << "Method lost single-implementation property after compilation, so we need to drop "
118 "whole compiled code: "
119 << graph->GetRuntime()->GetMethodFullName(siMethod);
120 return false;
121 }
122 }
123 return true;
124 }
125
EmitCode(const Graph * graph,CodeAllocator * allocator)126 static Span<uint8_t> EmitCode(const Graph *graph, CodeAllocator *allocator)
127 {
128 size_t codeOffset = RoundUp(CodePrefix::STRUCT_SIZE, GetCodeAlignment(graph->GetArch()));
129 CodePrefix prefix;
130 prefix.codeSize = graph->GetCode().size();
131 prefix.codeInfoOffset = codeOffset + RoundUp(graph->GetCode().size(), sizeof(uint32_t));
132 prefix.codeInfoSize = graph->GetCodeInfoData().size();
133 size_t codeSize = prefix.codeInfoOffset + prefix.codeInfoSize;
134 auto memRange = allocator->AllocateCodeUnprotected(codeSize);
135 if (memRange.GetSize() == 0) {
136 return Span<uint8_t> {};
137 }
138
139 auto data = reinterpret_cast<uint8_t *>(memRange.GetData());
140 memcpy_s(data, sizeof(CodePrefix), &prefix, sizeof(CodePrefix));
141 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
142 memcpy_s(&data[codeOffset], graph->GetCode().size(), graph->GetCode().data(), graph->GetCode().size());
143 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
144 memcpy_s(&data[prefix.codeInfoOffset], graph->GetCodeInfoData().size(), graph->GetCodeInfoData().data(),
145 graph->GetCodeInfoData().size());
146
147 allocator->ProtectCode(memRange);
148
149 return Span<uint8_t>(reinterpret_cast<uint8_t *>(memRange.GetData()), codeSize);
150 }
151
152 struct EmitCodeArgs {
153 Graph *graph;
154 CodeAllocator *codeAllocator;
155 ArenaAllocator *gdbDebugInfoAllocator;
156 const std::string methodName_;
157 };
158
GetEntryPoint(EmitCodeArgs && args,Method * method,bool isOsr,JITStats * jitStats)159 static uint8_t *GetEntryPoint(EmitCodeArgs &&args, [[maybe_unused]] Method *method, [[maybe_unused]] bool isOsr,
160 JITStats *jitStats)
161 {
162 [[maybe_unused]] auto [graph, codeAllocator, gdbDebugInfoAllocator, methodName] = args;
163 #ifdef PANDA_COMPILER_DEBUG_INFO
164 auto generatedData = g_options.IsCompilerEmitDebugInfo()
165 ? EmitElf(graph, codeAllocator, gdbDebugInfoAllocator, methodName)
166 : EmitCode(graph, codeAllocator);
167 #else
168 auto generatedData = EmitCode(graph, codeAllocator);
169 #endif
170 if (generatedData.Empty()) {
171 LOG(INFO, COMPILER) << "Compilation failed due to memory allocation fail: " << methodName;
172 return nullptr;
173 }
174 CodeInfo codeInfo(generatedData);
175 LOG(INFO, COMPILER) << "Compiled code for '" << methodName << "' has been installed to "
176 << bit_cast<void *>(codeInfo.GetCode()) << ", code size " << codeInfo.GetCodeSize();
177
178 auto entryPoint = const_cast<uint8_t *>(codeInfo.GetCode());
179 EndCompilation(EventCompilationArgs {methodName, isOsr, method->GetCodeSize(),
180 reinterpret_cast<uintptr_t>(entryPoint), codeInfo.GetCodeSize(),
181 codeInfo.GetInfoSize(), events::CompilationStatus::COMPILED},
182 jitStats);
183 return entryPoint;
184 }
185
186 template <TaskRunnerMode RUNNER_MODE>
RunOptimizations(CompilerTaskRunner<RUNNER_MODE> taskRunner,JITStats * jitStats)187 static void RunOptimizations(CompilerTaskRunner<RUNNER_MODE> taskRunner, JITStats *jitStats)
188 {
189 auto &taskCtx = taskRunner.GetContext();
190 taskCtx.GetGraph()->SetLanguage(taskCtx.GetMethod()->GetClass()->GetSourceLang());
191
192 taskRunner.AddCallbackOnSuccess([]([[maybe_unused]] CompilerContext<RUNNER_MODE> &compilerCtx) {
193 LOG(DEBUG, COMPILER) << "The method " << compilerCtx.GetMethodName() << " is compiled";
194 });
195 taskRunner.AddCallbackOnFail([jitStats](CompilerContext<RUNNER_MODE> &compilerCtx) {
196 if (!compiler::g_options.IsCompilerIgnoreFailures()) {
197 LOG(FATAL, COMPILER) << "RunOptimizations failed!";
198 }
199 LOG(WARNING, COMPILER) << "RunOptimizations failed!";
200 EndCompilation(EventCompilationArgs {compilerCtx.GetMethodName(), compilerCtx.IsOsr(),
201 compilerCtx.GetMethod()->GetCodeSize(), 0, 0, 0,
202 events::CompilationStatus::FAILED},
203 jitStats);
204 });
205
206 // Run compiler optimizations over created graph
207 RunOptimizations<RUNNER_MODE>(std::move(taskRunner));
208 }
209
210 struct CheckCompilationArgs {
211 RuntimeInterface *runtime;
212 CodeAllocator *codeAllocator;
213 ArenaAllocator *gdbDebugInfoAllocator;
214 JITStats *jitStats;
215 bool isDynamic;
216 Arch arch;
217 };
218
219 template <TaskRunnerMode RUNNER_MODE>
CheckCompilation(CheckCompilationArgs && args,CompilerContext<RUNNER_MODE> & compilerCtx)220 static bool CheckCompilation(CheckCompilationArgs &&args, CompilerContext<RUNNER_MODE> &compilerCtx)
221 {
222 auto [runtime, codeAllocator, gdbDebugInfoAllocator, jitStats, isDynamic, arch] = args;
223 auto *graph = compilerCtx.GetGraph();
224
225 ASSERT(graph != nullptr && graph->GetCode().data() != nullptr);
226
227 auto &name = compilerCtx.GetMethodName();
228 auto isOsr = compilerCtx.IsOsr();
229 auto *method = compilerCtx.GetMethod();
230
231 if (!isDynamic && !CheckSingleImplementation(graph)) {
232 EndCompilation(EventCompilationArgs {name, isOsr, method->GetCodeSize(), 0, 0, 0,
233 events::CompilationStatus::FAILED_SINGLE_IMPL},
234 jitStats);
235 return false;
236 }
237
238 // Drop non-native code in any case
239 if (arch != RUNTIME_ARCH) {
240 EndCompilation(
241 EventCompilationArgs {name, isOsr, method->GetCodeSize(), 0, 0, 0, events::CompilationStatus::DROPPED},
242 jitStats);
243 return false;
244 }
245
246 auto entryPoint =
247 GetEntryPoint(EmitCodeArgs {graph, codeAllocator, gdbDebugInfoAllocator, name}, method, isOsr, jitStats);
248 if (entryPoint == nullptr) {
249 return false;
250 }
251 if (isOsr) {
252 if (!runtime->TrySetOsrCode(method, entryPoint)) {
253 // Compiled code has been deoptimized, so we shouldn't install osr code.
254 // NOTE(compiler): release compiled code memory, when CodeAllocator supports freeing the memory.
255 return false;
256 }
257 } else {
258 runtime->SetCompiledEntryPoint(method, entryPoint);
259 }
260 return true;
261 }
262
263 template <TaskRunnerMode RUNNER_MODE>
JITCompileMethod(RuntimeInterface * runtime,CodeAllocator * codeAllocator,ArenaAllocator * gdbDebugInfoAllocator,JITStats * jitStats,CompilerTaskRunner<RUNNER_MODE> taskRunner)264 void JITCompileMethod(RuntimeInterface *runtime, CodeAllocator *codeAllocator, ArenaAllocator *gdbDebugInfoAllocator,
265 JITStats *jitStats, CompilerTaskRunner<RUNNER_MODE> taskRunner)
266 {
267 auto &taskCtx = taskRunner.GetContext();
268 auto *taskMethod = taskCtx.GetMethod();
269 taskCtx.SetMethodName(runtime->GetMethodFullName(taskMethod, false));
270 auto &methodName = taskCtx.GetMethodName();
271
272 SCOPED_TRACE_STREAM << "JIT compiling " << methodName;
273
274 bool regex = g_options.WasSetCompilerRegex();
275 bool regexWithSign = g_options.WasSetCompilerRegexWithSignature();
276 ASSERT_PRINT(!(regex && regexWithSign),
277 "'--compiler-regex' and '--compiler-regex-with-signature' cannot be used together.");
278 if ((regex || regexWithSign) && !g_options.MatchesRegex(runtime->GetMethodFullName(taskMethod, regexWithSign))) {
279 LOG(DEBUG, COMPILER) << "Skip the method due to regexp mismatch: "
280 << runtime->GetMethodFullName(taskMethod, true);
281 taskCtx.SetCompilationStatus(false);
282 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), false);
283 return;
284 }
285
286 if (jitStats != nullptr) {
287 jitStats->SetCompilationStart();
288 }
289
290 taskRunner.AddFinalize([jitStats](CompilerContext<RUNNER_MODE> &compilerCtx) {
291 if (jitStats != nullptr) {
292 // Reset compilation start time in all cases for consistency
293 jitStats->ResetCompilationStart();
294 }
295 auto *graph = compilerCtx.GetGraph();
296 if (graph != nullptr) {
297 graph->~Graph();
298 }
299 });
300
301 auto arch = ChooseArch(Arch::NONE);
302 bool isDynamic = ark::panda_file::IsDynamicLanguage(taskMethod->GetClass()->GetSourceLang());
303
304 taskRunner.AddCallbackOnSuccess([runtime, codeAllocator, gdbDebugInfoAllocator, jitStats, isDynamic,
305 arch](CompilerContext<RUNNER_MODE> &compilerCtx) {
306 bool compilationStatus = CheckCompilation<RUNNER_MODE>(
307 CheckCompilationArgs {runtime, codeAllocator, gdbDebugInfoAllocator, jitStats, isDynamic, arch},
308 compilerCtx);
309 compilerCtx.SetCompilationStatus(compilationStatus);
310 });
311 taskRunner.AddCallbackOnFail(
312 [](CompilerContext<RUNNER_MODE> &compilerCtx) { compilerCtx.SetCompilationStatus(false); });
313
314 CompileInGraph<RUNNER_MODE>(runtime, isDynamic, arch, std::move(taskRunner), jitStats);
315 }
316
317 template <TaskRunnerMode RUNNER_MODE>
CompileInGraph(RuntimeInterface * runtime,bool isDynamic,Arch arch,CompilerTaskRunner<RUNNER_MODE> taskRunner,JITStats * jitStats)318 void CompileInGraph(RuntimeInterface *runtime, bool isDynamic, Arch arch, CompilerTaskRunner<RUNNER_MODE> taskRunner,
319 JITStats *jitStats)
320 {
321 auto &taskCtx = taskRunner.GetContext();
322 auto isOsr = taskCtx.IsOsr();
323 auto *method = taskCtx.GetMethod();
324 auto &methodName = taskCtx.GetMethodName();
325
326 LOG(INFO, COMPILER) << "Compile method" << (isOsr ? "(OSR)" : "") << ": " << methodName << " ("
327 << runtime->GetFileName(method) << ')';
328
329 #ifdef __APPLE__
330 LOG(DEBUG, COMPILER) << "Compilation unsupported for this platform!";
331 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), false);
332 return;
333 #else
334 if (arch == Arch::NONE || !BackendSupport(arch)) {
335 LOG(DEBUG, COMPILER) << "Compilation unsupported for this platform!";
336 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), false);
337 return;
338 }
339 #endif
340
341 auto *allocator = taskCtx.GetAllocator();
342 auto *localAllocator = taskCtx.GetLocalAllocator();
343 auto *graph = allocator->template New<Graph>(Graph::GraphArgs {allocator, localAllocator, arch, method, runtime},
344 nullptr, isOsr, isDynamic);
345 taskCtx.SetGraph(graph);
346 if (graph == nullptr) {
347 LOG(ERROR, COMPILER) << "Creating graph failed!";
348 EndCompilation(
349 EventCompilationArgs {methodName, isOsr, method->GetCodeSize(), 0, 0, 0, events::CompilationStatus::FAILED},
350 jitStats);
351 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), false);
352 return;
353 }
354
355 taskRunner.SetTaskOnSuccess([jitStats](CompilerTaskRunner<RUNNER_MODE> nextRunner) {
356 RunOptimizations<RUNNER_MODE>(std::move(nextRunner), jitStats);
357 });
358
359 bool success = graph->template RunPass<IrBuilder>();
360 if (!success) {
361 if (!compiler::g_options.IsCompilerIgnoreFailures()) {
362 LOG(FATAL, COMPILER) << "IrBuilder failed!";
363 }
364 LOG(WARNING, COMPILER) << "IrBuilder failed!";
365 EndCompilation(
366 EventCompilationArgs {methodName, isOsr, method->GetCodeSize(), 0, 0, 0, events::CompilationStatus::FAILED},
367 jitStats);
368 };
369 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), success);
370 }
371
372 template void JITCompileMethod<BACKGROUND_MODE>(RuntimeInterface *, CodeAllocator *, ArenaAllocator *, JITStats *,
373 CompilerTaskRunner<BACKGROUND_MODE>);
374 template void JITCompileMethod<INPLACE_MODE>(RuntimeInterface *, CodeAllocator *, ArenaAllocator *, JITStats *,
375 CompilerTaskRunner<INPLACE_MODE>);
376 template void CompileInGraph<BACKGROUND_MODE>(RuntimeInterface *, bool, Arch, CompilerTaskRunner<BACKGROUND_MODE>,
377 JITStats *);
378 template void CompileInGraph<INPLACE_MODE>(RuntimeInterface *, bool, Arch, CompilerTaskRunner<INPLACE_MODE>,
379 JITStats *);
380 template void RunOptimizations<BACKGROUND_MODE>(CompilerTaskRunner<BACKGROUND_MODE>, JITStats *);
381 template void RunOptimizations<INPLACE_MODE>(CompilerTaskRunner<INPLACE_MODE>, JITStats *);
382 template bool CheckCompilation<BACKGROUND_MODE>(CheckCompilationArgs &&args, CompilerContext<BACKGROUND_MODE> &);
383 template bool CheckCompilation<INPLACE_MODE>(CheckCompilationArgs &&args, CompilerContext<INPLACE_MODE> &);
384
385 } // namespace ark::compiler
386
387 #ifdef PANDA_COMPILER_DEBUG_INFO
388
389 #include "optimizer/ir/aot_data.h"
390 #include "tools/debug/jit_writer.h"
391
392 // Next "C"-code need for enable interaction with gdb
393 // Please read "JIT Compilation Interface" from gdb-documentation for more information
394 extern "C" {
395 // Gdb will replace implementation of this function
396 // NOLINTNEXTLINE(readability-identifier-naming)
__jit_debug_register_code(void)397 void NO_INLINE __jit_debug_register_code(void)
398 {
399 // NOLINTNEXTLINE(hicpp-no-assembler)
400 asm("");
401 }
402
403 // Default version for descriptor (may be checked before register code)
404 // NOLINTNEXTLINE(modernize-use-nullptr, readability-identifier-naming)
405 jit_descriptor __jit_debug_descriptor = {1, JIT_NOACTION, NULL, NULL}; // CC-OFF(G.EXP.01-CPP) public API
406 } // extern "C"
407
408 namespace ark::compiler {
409
410 // NOLINTNEXTLINE(fuchsia-statically-constructed-objects)
411 static os::memory::Mutex g_jitDebugLock;
412
413 // Will register jit-elf description in linked list
RegisterJitCode(jit_code_entry * entry)414 static void RegisterJitCode(jit_code_entry *entry)
415 {
416 ASSERT(g_options.IsCompilerEmitDebugInfo());
417
418 os::memory::LockHolder lock(g_jitDebugLock);
419 // Re-link list
420 entry->nextEntry = __jit_debug_descriptor.firstEntry;
421 if (__jit_debug_descriptor.firstEntry != nullptr) {
422 __jit_debug_descriptor.firstEntry->prevEntry = entry;
423 }
424 __jit_debug_descriptor.firstEntry = entry;
425
426 // Fill last entry
427 __jit_debug_descriptor.relevantEntry = entry;
428 __jit_debug_descriptor.actionFlag = JIT_REGISTER_FN;
429
430 // Call gdb-callback
431 __jit_debug_register_code();
432 __jit_debug_descriptor.actionFlag = JIT_NOACTION;
433 __jit_debug_descriptor.relevantEntry = nullptr;
434 }
435
436 // When code allocator cleaned - also will clean entry
CleanJitDebugCode()437 void CleanJitDebugCode()
438 {
439 ASSERT(g_options.IsCompilerEmitDebugInfo());
440
441 os::memory::LockHolder lock(g_jitDebugLock);
442 __jit_debug_descriptor.actionFlag = JIT_UNREGISTER_FN;
443
444 while (__jit_debug_descriptor.firstEntry != nullptr) {
445 __jit_debug_descriptor.firstEntry->prevEntry = nullptr;
446 __jit_debug_descriptor.relevantEntry = __jit_debug_descriptor.firstEntry;
447 // Call gdb-callback
448 __jit_debug_register_code();
449
450 __jit_debug_descriptor.firstEntry = __jit_debug_descriptor.firstEntry->nextEntry;
451 }
452
453 __jit_debug_descriptor.actionFlag = JIT_NOACTION;
454 __jit_debug_descriptor.relevantEntry = nullptr;
455 }
456
457 // For each jit code - will generate small elf description and put them in gdb-special linked list.
EmitElf(Graph * graph,CodeAllocator * codeAllocator,ArenaAllocator * gdbDebugInfoAllocator,const std::string & methodName)458 static Span<uint8_t> EmitElf(Graph *graph, CodeAllocator *codeAllocator, ArenaAllocator *gdbDebugInfoAllocator,
459 const std::string &methodName)
460 {
461 ASSERT(g_options.IsCompilerEmitDebugInfo());
462
463 if (graph->GetCode().Empty()) {
464 return {};
465 }
466
467 JitDebugWriter jitWriter(graph->GetArch(), graph->GetRuntime(), codeAllocator, methodName);
468
469 jitWriter.Start();
470
471 auto method = reinterpret_cast<Method *>(graph->GetMethod());
472 auto klass = reinterpret_cast<Class *>(graph->GetRuntime()->GetClass(method));
473 jitWriter.StartClass(*klass);
474
475 CompiledMethod compiledMethod(graph->GetArch(), method, 0);
476 compiledMethod.SetCode(graph->GetCode().ToConst());
477 compiledMethod.SetCodeInfo(graph->GetCodeInfoData());
478 compiledMethod.SetCfiInfo(graph->GetCallingConvention()->GetCfiInfo());
479
480 jitWriter.AddMethod(compiledMethod);
481 jitWriter.EndClass();
482 jitWriter.End();
483 if (!jitWriter.Write()) {
484 return {};
485 }
486
487 auto gdbEntry {gdbDebugInfoAllocator->New<jit_code_entry>()};
488 if (gdbEntry == nullptr) {
489 return {};
490 }
491
492 auto elfFile {jitWriter.GetElf()};
493 // Pointer to Elf-file entry
494 gdbEntry->symfileAddr = reinterpret_cast<const char *>(elfFile.Data());
495 // Elf-in-memory file size
496 gdbEntry->symfileSize = elfFile.Size();
497 gdbEntry->prevEntry = nullptr;
498
499 RegisterJitCode(gdbEntry);
500 return jitWriter.GetCode();
501 }
502
503 } // namespace ark::compiler
504 #endif
505