1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "cg_phasemanager.h"
17 #include <vector>
18 #include <string>
19 #include "cg_option.h"
20 #include "args.h"
21 #include "label_creation.h"
22 #include "isel.h"
23 #include "offset_adjust.h"
24 #include "alignment.h"
25 #include "yieldpoint.h"
26 #include "emit.h"
27 #include "reg_alloc.h"
28 #if TARGAARCH64
29 #include "aarch64_emitter.h"
30 #include "aarch64_obj_emitter.h"
31 #include "aarch64_cg.h"
32 #elif TARGRISCV64
33 #include "riscv64_emitter.h"
34 #elif TARGX86_64
35 #include "x64_cg.h"
36 #include "x64_emitter.h"
37 #include "string_utils.h"
38 #endif
39
40 namespace maplebe {
41 #define JAVALANG (module.IsJavaModule())
42 #define CLANG (module.GetSrcLang() == kSrcLangC)
43
44 #define RELEASE(pointer) \
45 do { \
46 if ((pointer) != nullptr) { \
47 delete (pointer); \
48 (pointer) = nullptr; \
49 } \
50 } while (0)
51
52 namespace {
53
DumpMIRFunc(MIRFunction & func,const char * msg,bool printAlways=false,const char * extraMsg=nullptr)54 void DumpMIRFunc(MIRFunction &func, const char *msg, bool printAlways = false, const char *extraMsg = nullptr)
55 {
56 bool dumpAll = (CGOptions::GetDumpPhases().find("*") != CGOptions::GetDumpPhases().end());
57 bool dumpFunc = CGOptions::FuncFilter(func.GetName());
58
59 if (printAlways || (dumpAll && dumpFunc)) {
60 LogInfo::MapleLogger() << msg << '\n';
61 func.Dump();
62
63 if (extraMsg) {
64 LogInfo::MapleLogger() << extraMsg << '\n';
65 }
66 }
67 }
68
69 } /* anonymous namespace */
70
GenerateOutPutFile(MIRModule & m)71 void CgFuncPM::GenerateOutPutFile(MIRModule &m)
72 {
73 CHECK_FATAL(cg != nullptr, "cg is null");
74 CHECK_FATAL(cg->GetEmitter(), "emitter is null");
75 #if TARGX86_64
76 assembler::Assembler &assm = static_cast<X64Emitter &>(*cg->GetEmitter()).GetAssembler();
77 if (!cgOptions->SuppressFileInfo()) {
78 assm.InitialFileInfo(m.GetInputFileName());
79 }
80 if (cgOptions->WithDwarf()) {
81 assm.EmitDIHeader();
82 }
83 #else
84 if (CGOptions::GetEmitFileType() == CGOptions::kAsm) {
85 if (!cgOptions->SuppressFileInfo()) {
86 cg->GetEmitter()->EmitFileInfo(m.GetInputFileName());
87 }
88 if (cgOptions->WithDwarf()) {
89 cg->GetEmitter()->EmitDIHeader();
90 }
91 }
92 #endif
93 InitProfile(m);
94 }
95
FuncLevelRun(CGFunc & cgFunc,AnalysisDataManager & serialADM)96 bool CgFuncPM::FuncLevelRun(CGFunc &cgFunc, AnalysisDataManager &serialADM)
97 {
98 bool changed = false;
99 for (size_t i = 0; i < phasesSequence.size(); ++i) {
100 SolveSkipFrom(CGOptions::GetSkipFromPhase(), i);
101 const MaplePhaseInfo *curPhase = MaplePhaseRegister::GetMaplePhaseRegister()->GetPhaseByID(phasesSequence[i]);
102 if (!IsQuiet()) {
103 LogInfo::MapleLogger() << "---Run MplCG " << (curPhase->IsAnalysis() ? "analysis" : "transform")
104 << " Phase [ " << curPhase->PhaseName() << " ]---\n";
105 }
106 if (curPhase->IsAnalysis()) {
107 changed |= RunAnalysisPhase<MapleFunctionPhase<CGFunc>, CGFunc>(*curPhase, serialADM, cgFunc);
108 } else {
109 changed |= RunTransformPhase<MapleFunctionPhase<CGFunc>, CGFunc>(*curPhase, serialADM, cgFunc);
110 DumpFuncCGIR(cgFunc, curPhase->PhaseName());
111 }
112 SolveSkipAfter(CGOptions::GetSkipAfterPhase(), i);
113 }
114 return changed;
115 }
116
PostOutPut(MIRModule & m)117 void CgFuncPM::PostOutPut(MIRModule &m)
118 {
119 #if TARGX86_64
120 X64Emitter *x64Emitter = static_cast<X64Emitter *>(cg->GetEmitter());
121 assembler::Assembler &assm = x64Emitter->GetAssembler();
122 if (cgOptions->WithDwarf()) {
123 assm.EmitDIFooter();
124 }
125 x64Emitter->EmitGlobalVariable(*cg);
126 x64Emitter->EmitDebugInfo(*cg);
127 assm.FinalizeFileInfo();
128 assm.CloseOutput();
129 #else
130 if (CGOptions::GetEmitFileType() == CGOptions::kAsm) {
131 cg->GetEmitter()->EmitHugeSoRoutines(true);
132 if (cgOptions->WithDwarf()) {
133 cg->GetEmitter()->EmitDIFooter();
134 }
135 /* Emit global info */
136 EmitGlobalInfo(m);
137 } else {
138 cg->GetEmitter()->Finish();
139 cg->GetEmitter()->CloseOutput();
140 }
141 #endif
142 }
143
144 void MarkUsedStaticSymbol(const StIdx &symbolIdx);
145 std::map<StIdx, bool> visitedSym;
146
CollectStaticSymbolInVar(MIRConst * mirConst)147 void CollectStaticSymbolInVar(MIRConst *mirConst)
148 {
149 if (mirConst->GetKind() == kConstAddrof) {
150 auto *addrSymbol = static_cast<MIRAddrofConst *>(mirConst);
151 MIRSymbol *sym = GlobalTables::GetGsymTable().GetSymbolFromStidx(addrSymbol->GetSymbolIndex().Idx(), true);
152 if (sym != nullptr) {
153 MarkUsedStaticSymbol(sym->GetStIdx());
154 }
155 } else if (mirConst->GetKind() == kConstAggConst) {
156 auto &constVec = static_cast<MIRAggConst *>(mirConst)->GetConstVec();
157 for (auto &cst : constVec) {
158 CollectStaticSymbolInVar(cst);
159 }
160 }
161 }
162
MarkUsedStaticSymbol(const StIdx & symbolIdx)163 void MarkUsedStaticSymbol(const StIdx &symbolIdx)
164 {
165 if (!symbolIdx.IsGlobal()) {
166 return;
167 }
168 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(symbolIdx.Idx(), true);
169 if (symbol == nullptr) {
170 return;
171 }
172 if (visitedSym[symbolIdx]) {
173 return;
174 } else {
175 visitedSym[symbolIdx] = true;
176 }
177 symbol->ResetIsDeleted();
178 if (symbol->IsConst()) {
179 auto *konst = symbol->GetKonst();
180 CollectStaticSymbolInVar(konst);
181 }
182 }
183
RecursiveMarkUsedStaticSymbol(const BaseNode * baseNode)184 void RecursiveMarkUsedStaticSymbol(const BaseNode *baseNode)
185 {
186 if (baseNode == nullptr) {
187 return;
188 }
189 Opcode op = baseNode->GetOpCode();
190 switch (op) {
191 case OP_block: {
192 const BlockNode *blk = static_cast<const BlockNode *>(baseNode);
193 for (auto &stmt : blk->GetStmtNodes()) {
194 RecursiveMarkUsedStaticSymbol(&stmt);
195 }
196 break;
197 }
198 case OP_dassign: {
199 const DassignNode *dassignNode = static_cast<const DassignNode *>(baseNode);
200 MarkUsedStaticSymbol(dassignNode->GetStIdx());
201 break;
202 }
203 case OP_addrof:
204 case OP_addrofoff:
205 case OP_dread: {
206 const AddrofNode *dreadNode = static_cast<const AddrofNode *>(baseNode);
207 MarkUsedStaticSymbol(dreadNode->GetStIdx());
208 break;
209 }
210 default: {
211 break;
212 }
213 }
214 for (size_t i = 0; i < baseNode->NumOpnds(); ++i) {
215 RecursiveMarkUsedStaticSymbol(baseNode->Opnd(i));
216 }
217 }
218
CollectStaticSymbolInFunction(MIRFunction & func)219 void CollectStaticSymbolInFunction(MIRFunction &func)
220 {
221 RecursiveMarkUsedStaticSymbol(func.GetBody());
222 }
223
SweepUnusedStaticSymbol(MIRModule & m)224 void CgFuncPM::SweepUnusedStaticSymbol(MIRModule &m)
225 {
226 if (!m.IsCModule()) {
227 return;
228 }
229 size_t size = GlobalTables::GetGsymTable().GetSymbolTableSize();
230 for (size_t i = 0; i < size; ++i) {
231 MIRSymbol *mirSymbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(static_cast<uint32>(i));
232 if (mirSymbol != nullptr && (mirSymbol->GetSKind() == kStVar || mirSymbol->GetSKind() == kStConst) &&
233 (mirSymbol->GetStorageClass() == kScFstatic || mirSymbol->GetStorageClass() == kScPstatic)) {
234 mirSymbol->SetIsDeleted();
235 }
236 }
237
238 visitedSym.clear();
239 /* scan all funtions */
240 std::vector<MIRFunction *> &funcTable = GlobalTables::GetFunctionTable().GetFuncTable();
241 /* don't optimize this loop to iterator or range-base loop
242 * because AddCallGraphNode(mirFunc) will change GlobalTables::GetFunctionTable().GetFuncTable()
243 */
244 for (size_t index = 0; index < funcTable.size(); ++index) {
245 MIRFunction *mirFunc = funcTable.at(index);
246 if (mirFunc == nullptr || mirFunc->GetBody() == nullptr) {
247 continue;
248 }
249 m.SetCurFunction(mirFunc);
250 CollectStaticSymbolInFunction(*mirFunc);
251 /* scan function symbol declaration
252 * find addrof static const */
253 MIRSymbolTable *funcSymTab = mirFunc->GetSymTab();
254 if (funcSymTab) {
255 size_t localSymSize = funcSymTab->GetSymbolTableSize();
256 for (uint32 i = 0; i < localSymSize; ++i) {
257 MIRSymbol *st = funcSymTab->GetSymbolFromStIdx(i);
258 if (st && st->IsConst()) {
259 MIRConst *mirConst = st->GetKonst();
260 CollectStaticSymbolInVar(mirConst);
261 }
262 }
263 }
264 }
265 /* scan global symbol declaration
266 * find addrof static const */
267 auto &symbolSet = m.GetSymbolSet();
268 for (auto sit = symbolSet.begin(); sit != symbolSet.end(); ++sit) {
269 MIRSymbol *s = GlobalTables::GetGsymTable().GetSymbolFromStidx(sit->Idx(), true);
270 if (s->IsConst()) {
271 MIRConst *mirConst = s->GetKonst();
272 CollectStaticSymbolInVar(mirConst);
273 }
274 }
275 }
276
277 /* =================== new phase manager =================== */
278 #ifdef RA_PERF_ANALYSIS
279 extern void printLSRATime();
280 extern void printRATime();
281 #endif
282
PhaseRun(MIRModule & m)283 bool CgFuncPM::PhaseRun(MIRModule &m)
284 {
285 CreateCGAndBeCommon(m);
286 bool changed = false;
287 /* reserve static symbol for debugging */
288 if (!cgOptions->WithDwarf()) {
289 SweepUnusedStaticSymbol(m);
290 }
291 if (cgOptions->IsRunCG()) {
292 GenerateOutPutFile(m);
293
294 /* Run the cg optimizations phases */
295 PrepareLower(m);
296
297 uint32 countFuncId = 0;
298 unsigned long rangeNum = 0;
299
300 auto userDefinedOptLevel = cgOptions->GetOptimizeLevel();
301 cg->EnrollTargetPhases(this);
302
303 auto admMempool = AllocateMemPoolInPhaseManager("cg phase manager's analysis data manager mempool");
304 auto *serialADM = GetManagerMemPool()->New<AnalysisDataManager>(*(admMempool.get()));
305 for (auto it = m.GetFunctionList().begin(); it != m.GetFunctionList().end(); ++it) {
306 DEBUG_ASSERT(serialADM->CheckAnalysisInfoEmpty(), "clean adm before function run");
307 MIRFunction *mirFunc = *it;
308 if (mirFunc->GetBody() == nullptr) {
309 continue;
310 }
311 if (userDefinedOptLevel == CGOptions::kLevel2 && m.HasPartO2List()) {
312 if (m.IsInPartO2List(mirFunc->GetNameStrIdx())) {
313 cgOptions->EnableO2();
314 } else {
315 cgOptions->EnableO0();
316 }
317 ClearAllPhases();
318 cg->EnrollTargetPhases(this);
319 cg->UpdateCGOptions(*cgOptions);
320 Globals::GetInstance()->SetOptimLevel(cgOptions->GetOptimizeLevel());
321 }
322 if (!IsQuiet()) {
323 LogInfo::MapleLogger() << ">>>>>>>>>>>>>>>>>>>>>>>>>>>>> Optimizing Function < " << mirFunc->GetName()
324 << " id=" << mirFunc->GetPuidxOrigin() << " >---\n";
325 }
326 /* LowerIR. */
327 m.SetCurFunction(mirFunc);
328 if (cg->DoConstFold()) {
329 DumpMIRFunc(*mirFunc, "************* before ConstantFold **************");
330 ConstantFold cf(m);
331 (void)cf.Simplify(mirFunc->GetBody());
332 }
333
334 if (m.GetFlavor() != MIRFlavor::kFlavorLmbc) {
335 DoFuncCGLower(m, *mirFunc);
336 }
337 /* create CGFunc */
338 MIRSymbol *funcSt = GlobalTables::GetGsymTable().GetSymbolFromStidx(mirFunc->GetStIdx().Idx());
339 auto funcMp = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, funcSt->GetName());
340 auto stackMp = std::make_unique<StackMemPool>(funcMp->GetCtrler(), "");
341 MapleAllocator funcScopeAllocator(funcMp.get());
342 mirFunc->SetPuidxOrigin(++countFuncId);
343 CGFunc *cgFunc =
344 cg->CreateCGFunc(m, *mirFunc, *beCommon, *funcMp, *stackMp, funcScopeAllocator, countFuncId);
345 CHECK_FATAL(cgFunc != nullptr, "Create CG Function failed in cg_phase_manager");
346 CG::SetCurCGFunc(*cgFunc);
347
348 if (cgOptions->WithDwarf()) {
349 cgFunc->SetDebugInfo(m.GetDbgInfo());
350 }
351 /* Run the cg optimizations phases. */
352 if (CGOptions::UseRange() && rangeNum >= CGOptions::GetRangeBegin() &&
353 rangeNum <= CGOptions::GetRangeEnd()) {
354 CGOptions::EnableInRange();
355 }
356 changed = FuncLevelRun(*cgFunc, *serialADM);
357 /* Delete mempool. */
358 mirFunc->ReleaseCodeMemory();
359 ++rangeNum;
360 CGOptions::DisableInRange();
361 }
362 PostOutPut(m);
363 #ifdef RA_PERF_ANALYSIS
364 if (cgOptions->IsEnableTimePhases()) {
365 printLSRATime();
366 printRATime();
367 }
368 #endif
369 } else {
370 LogInfo::MapleLogger(kLlErr) << "Skipped generating .s because -no-cg is given" << '\n';
371 }
372 RELEASE(cg);
373 RELEASE(beCommon);
374 return changed;
375 }
376
DumpFuncCGIR(const CGFunc & f,const std::string & phaseName) const377 void CgFuncPM::DumpFuncCGIR(const CGFunc &f, const std::string &phaseName) const
378 {
379 if (CGOptions::DumpPhase(phaseName) && CGOptions::FuncFilter(f.GetName())) {
380 LogInfo::MapleLogger() << "\n******** CG IR After " << phaseName << ": *********\n";
381 f.DumpCGIR();
382 }
383 }
384
EmitGlobalInfo(MIRModule & m) const385 void CgFuncPM::EmitGlobalInfo(MIRModule &m) const
386 {
387 EmitDuplicatedAsmFunc(m);
388 EmitFastFuncs(m);
389 if (cgOptions->IsGenerateObjectMap()) {
390 cg->GenerateObjectMaps(*beCommon);
391 }
392 cg->GetEmitter()->EmitGlobalVariable();
393 EmitDebugInfo(m);
394 cg->GetEmitter()->CloseOutput();
395 }
396
InitProfile(MIRModule & m) const397 void CgFuncPM::InitProfile(MIRModule &m) const
398 {
399 if (!CGOptions::IsProfileDataEmpty()) {
400 uint32 dexNameIdx = m.GetFileinfo(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName("INFO_filename"));
401 const std::string &dexName = GlobalTables::GetStrTable().GetStringFromStrIdx(GStrIdx(dexNameIdx));
402 bool deCompressSucc = m.GetProfile().DeCompress(CGOptions::GetProfileData(), dexName);
403 if (!deCompressSucc) {
404 LogInfo::MapleLogger() << "WARN: DeCompress() " << CGOptions::GetProfileData() << "failed in mplcg()\n";
405 }
406 }
407 }
408
CreateCGAndBeCommon(MIRModule & m)409 void CgFuncPM::CreateCGAndBeCommon(MIRModule &m)
410 {
411 DEBUG_ASSERT(cgOptions != nullptr, "New cg phase manager running FAILED :: cgOptions unset");
412 auto outputFileName = m.GetOutputFileName();
413 #if TARGAARCH64 || TARGRISCV64
414 cg = new AArch64CG(m, *cgOptions, cgOptions->GetEHExclusiveFunctionNameVec(), CGOptions::GetCyclePatternMap());
415 if (CGOptions::GetEmitFileType() == CGOptions::kAsm) {
416 cg->SetEmitter(*m.GetMemPool()->New<AArch64AsmEmitter>(*cg, outputFileName));
417 } else {
418 outputFileName = outputFileName.replace(outputFileName.length() - 1, 1, 1, 'o');
419 cg->SetEmitter(*m.GetMemPool()->New<AArch64ObjEmitter>(*cg, outputFileName));
420 }
421 #elif TARGARM32
422 cg = new Arm32CG(m, *cgOptions, cgOptions->GetEHExclusiveFunctionNameVec(), CGOptions::GetCyclePatternMap());
423 cg->SetEmitter(*m.GetMemPool()->New<Arm32AsmEmitter>(*cg, outputFileName));
424 #elif TARGX86_64
425 cg = new X64CG(m, *cgOptions);
426 if (CGOptions::GetEmitFileType() == CGOptions::kAsm) {
427 assembler::Assembler *assembler = new assembler::AsmAssembler(outputFileName);
428 cg->SetEmitter(*m.GetMemPool()->New<X64Emitter>(*cg, *assembler));
429 } else {
430 outputFileName = outputFileName.replace(outputFileName.length() - 1, 1, 1, 'o');
431 assembler::Assembler *assembler = new assembler::ElfAssembler(outputFileName);
432 cg->SetEmitter(*m.GetMemPool()->New<X64Emitter>(*cg, *assembler));
433 }
434 #else
435 #error "unknown platform"
436 #endif
437
438 /*
439 * Must be done before creating any BECommon instances.
440 *
441 * BECommon, when constructed, will calculate the type, size and align of all types. As a side effect, it will also
442 * lower ptr and ref types into a64. That will drop the information of what a ptr or ref points to.
443 *
444 * All metadata generation passes which depend on the pointed-to type must be done here.
445 */
446 cg->GenPrimordialObjectList(m.GetBaseName());
447 /* We initialize a couple of BECommon's tables using the size information of GlobalTables.type_table_.
448 * So, BECommon must be allocated after all the parsing is done and user-defined types are all acounted.
449 */
450 beCommon = new BECommon(m);
451 Globals::GetInstance()->SetBECommon(*beCommon);
452 Globals::GetInstance()->SetTarget(*cg);
453
454 /* If a metadata generation pass depends on object layout it must be done after creating BECommon. */
455 cg->GenExtraTypeMetadata(cgOptions->GetClassListFile(), m.GetBaseName());
456
457 if (cg->NeedInsertInstrumentationFunction()) {
458 CHECK_FATAL(cgOptions->IsInsertCall(), "handling of --insert-call is not correct");
459 cg->SetInstrumentationFunction(cgOptions->GetInstrumentationFunction());
460 }
461 #if TARGAARCH64
462 if (!m.IsCModule()) {
463 CGOptions::EnableFramePointer();
464 }
465 #endif
466 }
467
PrepareLower(MIRModule & m)468 void CgFuncPM::PrepareLower(MIRModule &m)
469 {
470 mirLower = GetManagerMemPool()->New<MIRLower>(m, nullptr);
471 mirLower->Init();
472 cgLower =
473 GetManagerMemPool()->New<CGLowerer>(m, *beCommon, cg->GenerateExceptionHandlingCode(), cg->GenerateVerboseCG());
474 cgLower->RegisterBuiltIns();
475 if (m.IsJavaModule()) {
476 cgLower->InitArrayClassCacheTableIndex();
477 }
478 cgLower->RegisterExternalLibraryFunctions();
479 cgLower->SetCheckLoadStore(CGOptions::IsCheckArrayStore());
480 if (cg->IsStackProtectorStrong() || cg->IsStackProtectorAll() || m.HasPartO2List()) {
481 cg->AddStackGuardvar();
482 }
483 }
484
DoFuncCGLower(const MIRModule & m,MIRFunction & mirFunc)485 void CgFuncPM::DoFuncCGLower(const MIRModule &m, MIRFunction &mirFunc)
486 {
487 if (m.GetFlavor() <= kFeProduced) {
488 mirLower->SetLowerCG();
489 mirLower->SetMirFunc(&mirFunc);
490
491 DumpMIRFunc(mirFunc, "************* before MIRLowerer **************");
492 mirLower->LowerFunc(mirFunc);
493 }
494
495 bool isNotQuiet = !CGOptions::IsQuiet();
496 DumpMIRFunc(mirFunc, "************* before CGLowerer **************", isNotQuiet);
497
498 cgLower->LowerFunc(mirFunc);
499
500 DumpMIRFunc(mirFunc, "************* after CGLowerer **************", isNotQuiet,
501 "************* end CGLowerer **************");
502 }
503
EmitDuplicatedAsmFunc(MIRModule & m) const504 void CgFuncPM::EmitDuplicatedAsmFunc(MIRModule &m) const
505 {
506 if (CGOptions::IsDuplicateAsmFileEmpty()) {
507 return;
508 }
509
510 std::ifstream duplicateAsmFileFD(CGOptions::GetDuplicateAsmFile());
511
512 if (!duplicateAsmFileFD.is_open()) {
513 duplicateAsmFileFD.close();
514 ERR(kLncErr, " %s open failed!", CGOptions::GetDuplicateAsmFile().c_str());
515 return;
516 }
517 std::string contend;
518 bool onlyForFramework = false;
519 bool isFramework = IsFramework(m);
520
521 while (getline(duplicateAsmFileFD, contend)) {
522 if (!contend.compare("#Libframework_start")) {
523 onlyForFramework = true;
524 }
525
526 if (!contend.compare("#Libframework_end")) {
527 onlyForFramework = false;
528 }
529
530 if (onlyForFramework && !isFramework) {
531 continue;
532 }
533
534 (void)cg->GetEmitter()->Emit(contend + "\n");
535 }
536 duplicateAsmFileFD.close();
537 }
538
EmitFastFuncs(const MIRModule & m) const539 void CgFuncPM::EmitFastFuncs(const MIRModule &m) const
540 {
541 if (CGOptions::IsFastFuncsAsmFileEmpty() || !(m.IsJavaModule())) {
542 return;
543 }
544
545 struct stat buffer;
546 if (stat(CGOptions::GetFastFuncsAsmFile().c_str(), &buffer) != 0) {
547 return;
548 }
549
550 std::ifstream fastFuncsAsmFileFD(CGOptions::GetFastFuncsAsmFile());
551 if (fastFuncsAsmFileFD.is_open()) {
552 std::string contend;
553 (void)cg->GetEmitter()->Emit("#define ENABLE_LOCAL_FAST_FUNCS 1\n");
554
555 while (getline(fastFuncsAsmFileFD, contend)) {
556 (void)cg->GetEmitter()->Emit(contend + "\n");
557 }
558 }
559 fastFuncsAsmFileFD.close();
560 }
561
EmitDebugInfo(const MIRModule & m) const562 void CgFuncPM::EmitDebugInfo(const MIRModule &m) const
563 {
564 if (!cgOptions->WithDwarf()) {
565 return;
566 }
567 cg->GetEmitter()->SetupDBGInfo(m.GetDbgInfo());
568 cg->GetEmitter()->EmitDIHeaderFileInfo();
569 cg->GetEmitter()->EmitDIDebugInfoSection(m.GetDbgInfo());
570 cg->GetEmitter()->EmitDIDebugAbbrevSection(m.GetDbgInfo());
571 cg->GetEmitter()->EmitDIDebugARangesSection();
572 cg->GetEmitter()->EmitDIDebugRangesSection();
573 cg->GetEmitter()->EmitDIDebugLineSection();
574 cg->GetEmitter()->EmitDIDebugStrSection();
575 }
576
IsFramework(MIRModule & m) const577 bool CgFuncPM::IsFramework([[maybe_unused]] MIRModule &m) const
578 {
579 return false;
580 }
581 MAPLE_TRANSFORM_PHASE_REGISTER(CgFuncPM, cgFuncPhaseManager)
582 /* register codegen common phases */
583 MAPLE_TRANSFORM_PHASE_REGISTER(CgLayoutFrame, layoutstackframe)
584 MAPLE_TRANSFORM_PHASE_REGISTER(CgCreateLabel, createstartendlabel)
585 MAPLE_TRANSFORM_PHASE_REGISTER(InstructionSelector, instructionselector)
586 MAPLE_TRANSFORM_PHASE_REGISTER(CgMoveRegArgs, moveargs)
587 MAPLE_TRANSFORM_PHASE_REGISTER(CgRegAlloc, regalloc)
588 MAPLE_TRANSFORM_PHASE_REGISTER(CgAlignAnalysis, alignanalysis)
589 MAPLE_TRANSFORM_PHASE_REGISTER(CgFrameFinalize, framefinalize)
590 MAPLE_TRANSFORM_PHASE_REGISTER(CgYieldPointInsertion, yieldpoint)
591 MAPLE_TRANSFORM_PHASE_REGISTER(CgGenProEpiLog, generateproepilog)
592 } /* namespace maplebe */
593