1 /*
2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "optimizer_run.h"
17 #include "compile_method.h"
18 #include "mem/pool_manager.h"
19 #include "mem/code_allocator.h"
20 #include "include/class.h"
21 #include "include/method.h"
22 #include "optimizer/ir/ir_constructor.h"
23 #include "optimizer/ir/runtime_interface.h"
24 #include "optimizer/analysis/loop_analyzer.h"
25 #include "optimizer/pass.h"
26 #include "optimizer/ir_builder/ir_builder.h"
27 #include "utils/logger.h"
28 #include "code_info/code_info.h"
29 #include "events/events.h"
30 #include "trace/trace.h"
31 #include "optimizer/optimizations/regalloc/reg_alloc_linear_scan.h"
32 #include "optimizer/code_generator/codegen.h"
33
34 namespace ark::compiler {
35
36 #ifdef PANDA_COMPILER_DEBUG_INFO
37 static Span<uint8_t> EmitElf(Graph *graph, CodeAllocator *codeAllocator, ArenaAllocator *gdbDebugInfoAllocator,
38 const std::string &methodName);
39 #endif
40
SetCompilationStart()41 void JITStats::SetCompilationStart()
42 {
43 ASSERT(startTime_ == 0);
44 startTime_ = time::GetCurrentTimeInNanos();
45 }
EndCompilationWithStats(const std::string & methodName,bool isOsr,size_t bcSize,size_t codeSize)46 void JITStats::EndCompilationWithStats(const std::string &methodName, bool isOsr, size_t bcSize, size_t codeSize)
47 {
48 ASSERT(startTime_ != 0);
49 auto time = time::GetCurrentTimeInNanos() - startTime_;
50 statsList_.push_back(Entry {PandaString(methodName, internalAllocator_->Adapter()), isOsr, bcSize, codeSize, time});
51 startTime_ = 0;
52 }
53
ResetCompilationStart()54 void JITStats::ResetCompilationStart()
55 {
56 startTime_ = 0;
57 }
58
DumpCsv(char sep)59 void JITStats::DumpCsv(char sep)
60 {
61 ASSERT(g_options.WasSetCompilerDumpJitStatsCsv());
62 std::ofstream csv(g_options.GetCompilerDumpJitStatsCsv(), std::ofstream::trunc);
63 for (const auto &i : statsList_) {
64 csv << "\"" << i.methodName << "\"" << sep;
65 csv << i.isOsr << sep;
66 csv << i.bcSize << sep;
67 csv << i.codeSize << sep;
68 csv << i.time;
69 csv << '\n';
70 }
71 }
72
73 struct EventCompilationArgs {
74 const std::string methodName_;
75 bool isOsr;
76 size_t bcSize;
77 uintptr_t address;
78 size_t codeSize;
79 size_t infoSize;
80 events::CompilationStatus status;
81 };
82
EndCompilation(const EventCompilationArgs & args,JITStats * jitStats)83 static void EndCompilation(const EventCompilationArgs &args, JITStats *jitStats)
84 {
85 [[maybe_unused]] auto [methodName, isOsr, bcSize, address, codeSize, infoSize, status] = args;
86 EVENT_COMPILATION(methodName, isOsr, bcSize, address, codeSize, infoSize, status);
87 if (jitStats != nullptr) {
88 ASSERT((codeSize != 0) == (status == events::CompilationStatus::COMPILED));
89 jitStats->EndCompilationWithStats(methodName, isOsr, bcSize, codeSize);
90 }
91 }
92
ChooseArch(Arch arch)93 Arch ChooseArch(Arch arch)
94 {
95 if (arch != Arch::NONE) {
96 return arch;
97 }
98
99 arch = RUNTIME_ARCH;
100 if (RUNTIME_ARCH == Arch::X86_64 && g_options.WasSetCompilerCrossArch()) {
101 arch = GetArchFromString(g_options.GetCompilerCrossArch());
102 }
103
104 return arch;
105 }
106
CheckSingleImplementation(Graph * graph)107 static bool CheckSingleImplementation(Graph *graph)
108 {
109 // Check that all methods that were inlined due to its single implementation property, still have this property,
110 // otherwise we must drop compiled code.
111 // NOTE(compiler): we need to reset hotness counter hereby avoid yet another warmup phase.
112 auto cha = graph->GetRuntime()->GetCha();
113 for (auto siMethod : graph->GetSingleImplementationList()) {
114 if (!cha->IsSingleImplementation(siMethod)) {
115 LOG(WARNING, COMPILER)
116 << "Method lost single-implementation property after compilation, so we need to drop "
117 "whole compiled code: "
118 << graph->GetRuntime()->GetMethodFullName(siMethod);
119 return false;
120 }
121 }
122 return true;
123 }
124
EmitCode(const Graph * graph,CodeAllocator * allocator)125 static Span<uint8_t> EmitCode(const Graph *graph, CodeAllocator *allocator)
126 {
127 size_t codeOffset = RoundUp(CodePrefix::STRUCT_SIZE, GetCodeAlignment(graph->GetArch()));
128 CodePrefix prefix;
129 prefix.codeSize = graph->GetCode().size();
130 prefix.codeInfoOffset = codeOffset + RoundUp(graph->GetCode().size(), sizeof(uint32_t));
131 prefix.codeInfoSize = graph->GetCodeInfoData().size();
132 size_t codeSize = prefix.codeInfoOffset + prefix.codeInfoSize;
133 auto memRange = allocator->AllocateCodeUnprotected(codeSize);
134 if (memRange.GetSize() == 0) {
135 return Span<uint8_t> {};
136 }
137
138 auto data = reinterpret_cast<uint8_t *>(memRange.GetData());
139 memcpy_s(data, sizeof(CodePrefix), &prefix, sizeof(CodePrefix));
140 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
141 memcpy_s(&data[codeOffset], graph->GetCode().size(), graph->GetCode().data(), graph->GetCode().size());
142 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
143 memcpy_s(&data[prefix.codeInfoOffset], graph->GetCodeInfoData().size(), graph->GetCodeInfoData().data(),
144 graph->GetCodeInfoData().size());
145
146 allocator->ProtectCode(memRange);
147
148 return Span<uint8_t>(reinterpret_cast<uint8_t *>(memRange.GetData()), codeSize);
149 }
150
151 struct EmitCodeArgs {
152 Graph *graph;
153 CodeAllocator *codeAllocator;
154 ArenaAllocator *gdbDebugInfoAllocator;
155 const std::string methodName_;
156 };
157
GetEntryPoint(EmitCodeArgs && args,Method * method,bool isOsr,JITStats * jitStats)158 static uint8_t *GetEntryPoint(EmitCodeArgs &&args, [[maybe_unused]] Method *method, [[maybe_unused]] bool isOsr,
159 JITStats *jitStats)
160 {
161 [[maybe_unused]] auto [graph, codeAllocator, gdbDebugInfoAllocator, methodName] = args;
162 #ifdef PANDA_COMPILER_DEBUG_INFO
163 auto generatedData = g_options.IsCompilerEmitDebugInfo()
164 ? EmitElf(graph, codeAllocator, gdbDebugInfoAllocator, methodName)
165 : EmitCode(graph, codeAllocator);
166 #else
167 auto generatedData = EmitCode(graph, codeAllocator);
168 #endif
169 if (generatedData.Empty()) {
170 LOG(INFO, COMPILER) << "Compilation failed due to memory allocation fail: " << methodName;
171 return nullptr;
172 }
173 CodeInfo codeInfo(generatedData);
174 LOG(INFO, COMPILER) << "Compiled code for '" << methodName << "' has been installed to "
175 << bit_cast<void *>(codeInfo.GetCode()) << ", code size " << codeInfo.GetCodeSize();
176
177 auto entryPoint = const_cast<uint8_t *>(codeInfo.GetCode());
178 EndCompilation(EventCompilationArgs {methodName, isOsr, method->GetCodeSize(),
179 reinterpret_cast<uintptr_t>(entryPoint), codeInfo.GetCodeSize(),
180 codeInfo.GetInfoSize(), events::CompilationStatus::COMPILED},
181 jitStats);
182 return entryPoint;
183 }
184
185 template <TaskRunnerMode RUNNER_MODE>
RunOptimizations(CompilerTaskRunner<RUNNER_MODE> taskRunner,JITStats * jitStats)186 static void RunOptimizations(CompilerTaskRunner<RUNNER_MODE> taskRunner, JITStats *jitStats)
187 {
188 auto &taskCtx = taskRunner.GetContext();
189 taskCtx.GetGraph()->SetLanguage(taskCtx.GetMethod()->GetClass()->GetSourceLang());
190
191 taskRunner.AddCallbackOnSuccess([]([[maybe_unused]] CompilerContext<RUNNER_MODE> &compilerCtx) {
192 LOG(DEBUG, COMPILER) << "The method " << compilerCtx.GetMethodName() << " is compiled";
193 });
194 taskRunner.AddCallbackOnFail([jitStats](CompilerContext<RUNNER_MODE> &compilerCtx) {
195 if (!compiler::g_options.IsCompilerIgnoreFailures()) {
196 LOG(FATAL, COMPILER) << "RunOptimizations failed!";
197 }
198 LOG(WARNING, COMPILER) << "RunOptimizations failed!";
199 EndCompilation(EventCompilationArgs {compilerCtx.GetMethodName(), compilerCtx.IsOsr(),
200 compilerCtx.GetMethod()->GetCodeSize(), 0, 0, 0,
201 events::CompilationStatus::FAILED},
202 jitStats);
203 });
204
205 // Run compiler optimizations over created graph
206 RunOptimizations<RUNNER_MODE>(std::move(taskRunner));
207 }
208
209 struct CheckCompilationArgs {
210 RuntimeInterface *runtime;
211 CodeAllocator *codeAllocator;
212 ArenaAllocator *gdbDebugInfoAllocator;
213 JITStats *jitStats;
214 bool isDynamic;
215 Arch arch;
216 };
217
218 template <TaskRunnerMode RUNNER_MODE>
CheckCompilation(CheckCompilationArgs && args,CompilerContext<RUNNER_MODE> & compilerCtx)219 static bool CheckCompilation(CheckCompilationArgs &&args, CompilerContext<RUNNER_MODE> &compilerCtx)
220 {
221 auto [runtime, codeAllocator, gdbDebugInfoAllocator, jitStats, isDynamic, arch] = args;
222 auto *graph = compilerCtx.GetGraph();
223
224 ASSERT(graph != nullptr && graph->GetCode().data() != nullptr);
225
226 auto &name = compilerCtx.GetMethodName();
227 auto isOsr = compilerCtx.IsOsr();
228 auto *method = compilerCtx.GetMethod();
229
230 if (!isDynamic && !CheckSingleImplementation(graph)) {
231 EndCompilation(EventCompilationArgs {name, isOsr, method->GetCodeSize(), 0, 0, 0,
232 events::CompilationStatus::FAILED_SINGLE_IMPL},
233 jitStats);
234 return false;
235 }
236
237 // Drop non-native code in any case
238 if (arch != RUNTIME_ARCH) {
239 EndCompilation(
240 EventCompilationArgs {name, isOsr, method->GetCodeSize(), 0, 0, 0, events::CompilationStatus::DROPPED},
241 jitStats);
242 return false;
243 }
244
245 auto entryPoint =
246 GetEntryPoint(EmitCodeArgs {graph, codeAllocator, gdbDebugInfoAllocator, name}, method, isOsr, jitStats);
247 if (entryPoint == nullptr) {
248 return false;
249 }
250 if (isOsr) {
251 if (!runtime->TrySetOsrCode(method, entryPoint)) {
252 // Compiled code has been deoptimized, so we shouldn't install osr code.
253 // NOTE(compiler): release compiled code memory, when CodeAllocator supports freeing the memory.
254 return false;
255 }
256 } else {
257 runtime->SetCompiledEntryPoint(method, entryPoint);
258 }
259 return true;
260 }
261
262 template <TaskRunnerMode RUNNER_MODE>
JITCompileMethod(RuntimeInterface * runtime,CodeAllocator * codeAllocator,ArenaAllocator * gdbDebugInfoAllocator,JITStats * jitStats,CompilerTaskRunner<RUNNER_MODE> taskRunner)263 void JITCompileMethod(RuntimeInterface *runtime, CodeAllocator *codeAllocator, ArenaAllocator *gdbDebugInfoAllocator,
264 JITStats *jitStats, CompilerTaskRunner<RUNNER_MODE> taskRunner)
265 {
266 auto &taskCtx = taskRunner.GetContext();
267 auto *taskMethod = taskCtx.GetMethod();
268 taskCtx.SetMethodName(runtime->GetMethodFullName(taskMethod, false));
269 auto &methodName = taskCtx.GetMethodName();
270
271 SCOPED_TRACE_STREAM << "JIT compiling " << methodName;
272
273 bool regex = g_options.WasSetCompilerRegex();
274 bool regexWithSign = g_options.WasSetCompilerRegexWithSignature();
275 ASSERT_PRINT(!(regex && regexWithSign),
276 "'--compiler-regex' and '--compiler-regex-with-signature' cannot be used together.");
277 if ((regex || regexWithSign) && !g_options.MatchesRegex(runtime->GetMethodFullName(taskMethod, regexWithSign))) {
278 LOG(DEBUG, COMPILER) << "Skip the method due to regexp mismatch: "
279 << runtime->GetMethodFullName(taskMethod, true);
280 taskCtx.SetCompilationStatus(false);
281 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), false);
282 return;
283 }
284
285 if (jitStats != nullptr) {
286 jitStats->SetCompilationStart();
287 }
288
289 taskRunner.AddFinalize([jitStats](CompilerContext<RUNNER_MODE> &compilerCtx) {
290 if (jitStats != nullptr) {
291 // Reset compilation start time in all cases for consistency
292 jitStats->ResetCompilationStart();
293 }
294 auto *graph = compilerCtx.GetGraph();
295 if (graph != nullptr) {
296 graph->~Graph();
297 }
298 });
299
300 auto arch = ChooseArch(Arch::NONE);
301 bool isDynamic = ark::panda_file::IsDynamicLanguage(taskMethod->GetClass()->GetSourceLang());
302
303 taskRunner.AddCallbackOnSuccess([runtime, codeAllocator, gdbDebugInfoAllocator, jitStats, isDynamic,
304 arch](CompilerContext<RUNNER_MODE> &compilerCtx) {
305 bool compilationStatus = CheckCompilation<RUNNER_MODE>(
306 CheckCompilationArgs {runtime, codeAllocator, gdbDebugInfoAllocator, jitStats, isDynamic, arch},
307 compilerCtx);
308 compilerCtx.SetCompilationStatus(compilationStatus);
309 });
310 taskRunner.AddCallbackOnFail(
311 [](CompilerContext<RUNNER_MODE> &compilerCtx) { compilerCtx.SetCompilationStatus(false); });
312
313 CompileInGraph<RUNNER_MODE>(runtime, isDynamic, arch, std::move(taskRunner), jitStats);
314 }
315
316 template <TaskRunnerMode RUNNER_MODE>
CompileInGraph(RuntimeInterface * runtime,bool isDynamic,Arch arch,CompilerTaskRunner<RUNNER_MODE> taskRunner,JITStats * jitStats)317 void CompileInGraph(RuntimeInterface *runtime, bool isDynamic, Arch arch, CompilerTaskRunner<RUNNER_MODE> taskRunner,
318 JITStats *jitStats)
319 {
320 auto &taskCtx = taskRunner.GetContext();
321 auto isOsr = taskCtx.IsOsr();
322 auto *method = taskCtx.GetMethod();
323 auto &methodName = taskCtx.GetMethodName();
324
325 LOG(INFO, COMPILER) << "Compile method" << (isOsr ? "(OSR)" : "") << ": " << methodName << " ("
326 << runtime->GetFileName(method) << ')';
327
328 if (arch == Arch::NONE || !BackendSupport(arch)) {
329 LOG(DEBUG, COMPILER) << "Compilation unsupported for this platform!";
330 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), false);
331 return;
332 }
333
334 auto *allocator = taskCtx.GetAllocator();
335 auto *localAllocator = taskCtx.GetLocalAllocator();
336 auto *graph = allocator->template New<Graph>(Graph::GraphArgs {allocator, localAllocator, arch, method, runtime},
337 nullptr, isOsr, isDynamic);
338 taskCtx.SetGraph(graph);
339 if (graph == nullptr) {
340 LOG(ERROR, COMPILER) << "Creating graph failed!";
341 EndCompilation(
342 EventCompilationArgs {methodName, isOsr, method->GetCodeSize(), 0, 0, 0, events::CompilationStatus::FAILED},
343 jitStats);
344 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), false);
345 return;
346 }
347
348 taskRunner.SetTaskOnSuccess([jitStats](CompilerTaskRunner<RUNNER_MODE> nextRunner) {
349 RunOptimizations<RUNNER_MODE>(std::move(nextRunner), jitStats);
350 });
351
352 bool success = graph->template RunPass<IrBuilder>();
353 if (!success) {
354 if (!compiler::g_options.IsCompilerIgnoreFailures()) {
355 LOG(FATAL, COMPILER) << "IrBuilder failed!";
356 }
357 LOG(WARNING, COMPILER) << "IrBuilder failed!";
358 EndCompilation(
359 EventCompilationArgs {methodName, isOsr, method->GetCodeSize(), 0, 0, 0, events::CompilationStatus::FAILED},
360 jitStats);
361 };
362 CompilerTaskRunner<RUNNER_MODE>::EndTask(std::move(taskRunner), success);
363 }
364
365 template void JITCompileMethod<BACKGROUND_MODE>(RuntimeInterface *, CodeAllocator *, ArenaAllocator *, JITStats *,
366 CompilerTaskRunner<BACKGROUND_MODE>);
367 template void JITCompileMethod<INPLACE_MODE>(RuntimeInterface *, CodeAllocator *, ArenaAllocator *, JITStats *,
368 CompilerTaskRunner<INPLACE_MODE>);
369 template void CompileInGraph<BACKGROUND_MODE>(RuntimeInterface *, bool, Arch, CompilerTaskRunner<BACKGROUND_MODE>,
370 JITStats *);
371 template void CompileInGraph<INPLACE_MODE>(RuntimeInterface *, bool, Arch, CompilerTaskRunner<INPLACE_MODE>,
372 JITStats *);
373 template void RunOptimizations<BACKGROUND_MODE>(CompilerTaskRunner<BACKGROUND_MODE>, JITStats *);
374 template void RunOptimizations<INPLACE_MODE>(CompilerTaskRunner<INPLACE_MODE>, JITStats *);
375 template bool CheckCompilation<BACKGROUND_MODE>(CheckCompilationArgs &&args, CompilerContext<BACKGROUND_MODE> &);
376 template bool CheckCompilation<INPLACE_MODE>(CheckCompilationArgs &&args, CompilerContext<INPLACE_MODE> &);
377
378 } // namespace ark::compiler
379
380 #ifdef PANDA_COMPILER_DEBUG_INFO
381
382 #include "optimizer/ir/aot_data.h"
383 #include "tools/debug/jit_writer.h"
384
385 // Next "C"-code need for enable interaction with gdb
386 // Please read "JIT Compilation Interface" from gdb-documentation for more information
387 extern "C" {
388 // Gdb will replace implementation of this function
389 // NOLINTNEXTLINE(readability-identifier-naming)
__jit_debug_register_code(void)390 void NO_INLINE __jit_debug_register_code(void)
391 {
392 // NOLINTNEXTLINE(hicpp-no-assembler)
393 asm("");
394 }
395
396 // Default version for descriptor (may be checked before register code)
397 // NOLINTNEXTLINE(modernize-use-nullptr, readability-identifier-naming)
398 jit_descriptor __jit_debug_descriptor = {1, JIT_NOACTION, NULL, NULL}; // CC-OFF(G.EXP.01-CPP) public API
399 } // extern "C"
400
401 namespace ark::compiler {
402
403 // NOLINTNEXTLINE(fuchsia-statically-constructed-objects)
404 static os::memory::Mutex g_jitDebugLock;
405
406 // Will register jit-elf description in linked list
RegisterJitCode(jit_code_entry * entry)407 static void RegisterJitCode(jit_code_entry *entry)
408 {
409 ASSERT(g_options.IsCompilerEmitDebugInfo());
410
411 os::memory::LockHolder lock(g_jitDebugLock);
412 // Re-link list
413 entry->nextEntry = __jit_debug_descriptor.firstEntry;
414 if (__jit_debug_descriptor.firstEntry != nullptr) {
415 __jit_debug_descriptor.firstEntry->prevEntry = entry;
416 }
417 __jit_debug_descriptor.firstEntry = entry;
418
419 // Fill last entry
420 __jit_debug_descriptor.relevantEntry = entry;
421 __jit_debug_descriptor.actionFlag = JIT_REGISTER_FN;
422
423 // Call gdb-callback
424 __jit_debug_register_code();
425 __jit_debug_descriptor.actionFlag = JIT_NOACTION;
426 __jit_debug_descriptor.relevantEntry = nullptr;
427 }
428
429 // When code allocator cleaned - also will clean entry
CleanJitDebugCode()430 void CleanJitDebugCode()
431 {
432 ASSERT(g_options.IsCompilerEmitDebugInfo());
433
434 os::memory::LockHolder lock(g_jitDebugLock);
435 __jit_debug_descriptor.actionFlag = JIT_UNREGISTER_FN;
436
437 while (__jit_debug_descriptor.firstEntry != nullptr) {
438 __jit_debug_descriptor.firstEntry->prevEntry = nullptr;
439 __jit_debug_descriptor.relevantEntry = __jit_debug_descriptor.firstEntry;
440 // Call gdb-callback
441 __jit_debug_register_code();
442
443 __jit_debug_descriptor.firstEntry = __jit_debug_descriptor.firstEntry->nextEntry;
444 }
445
446 __jit_debug_descriptor.actionFlag = JIT_NOACTION;
447 __jit_debug_descriptor.relevantEntry = nullptr;
448 }
449
450 // For each jit code - will generate small elf description and put them in gdb-special linked list.
EmitElf(Graph * graph,CodeAllocator * codeAllocator,ArenaAllocator * gdbDebugInfoAllocator,const std::string & methodName)451 static Span<uint8_t> EmitElf(Graph *graph, CodeAllocator *codeAllocator, ArenaAllocator *gdbDebugInfoAllocator,
452 const std::string &methodName)
453 {
454 ASSERT(g_options.IsCompilerEmitDebugInfo());
455
456 if (graph->GetCode().Empty()) {
457 return {};
458 }
459
460 JitDebugWriter jitWriter(graph->GetArch(), graph->GetRuntime(), codeAllocator, methodName);
461
462 jitWriter.Start();
463
464 auto method = reinterpret_cast<Method *>(graph->GetMethod());
465 auto klass = reinterpret_cast<Class *>(graph->GetRuntime()->GetClass(method));
466 jitWriter.StartClass(*klass);
467
468 CompiledMethod compiledMethod(graph->GetArch(), method, 0);
469 compiledMethod.SetCode(graph->GetCode().ToConst());
470 compiledMethod.SetCodeInfo(graph->GetCodeInfoData());
471 compiledMethod.SetCfiInfo(graph->GetCallingConvention()->GetCfiInfo());
472
473 jitWriter.AddMethod(compiledMethod);
474 jitWriter.EndClass();
475 jitWriter.End();
476 if (!jitWriter.Write()) {
477 return {};
478 }
479
480 auto gdbEntry {gdbDebugInfoAllocator->New<jit_code_entry>()};
481 if (gdbEntry == nullptr) {
482 return {};
483 }
484
485 auto elfFile {jitWriter.GetElf()};
486 // Pointer to Elf-file entry
487 gdbEntry->symfileAddr = reinterpret_cast<const char *>(elfFile.Data());
488 // Elf-in-memory file size
489 gdbEntry->symfileSize = elfFile.Size();
490 gdbEntry->prevEntry = nullptr;
491
492 RegisterJitCode(gdbEntry);
493 return jitWriter.GetCode();
494 }
495
496 } // namespace ark::compiler
497 #endif
498