• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PROEPILOG_H
17 #define MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PROEPILOG_H
18 
19 #include "proepilog.h"
20 #include "cg.h"
21 #include "operand.h"
22 #include "aarch64_cgfunc.h"
23 #include "aarch64_operand.h"
24 #include "aarch64_insn.h"
25 
26 namespace maplebe {
27 using namespace maple;
28 
29 class AArch64GenProEpilog : public GenProEpilog {
30 public:
AArch64GenProEpilog(CGFunc & func,MemPool & memPool)31     AArch64GenProEpilog(CGFunc &func, MemPool &memPool)
32         : GenProEpilog(func), tmpAlloc(&memPool), exitBB2CallSitesMap(tmpAlloc.Adapter())
33     {
34         useFP = func.UseFP();
35         if (func.GetMirModule().GetFlavor() == MIRFlavor::kFlavorLmbc) {
36             stackBaseReg = RFP;
37         } else {
38             stackBaseReg = useFP ? R29 : RSP;
39         }
40         exitBB2CallSitesMap.clear();
41     }
42     ~AArch64GenProEpilog() override = default;
43 
44     bool TailCallOpt() override;
45     bool NeedProEpilog() override;
46     static MemOperand *SplitStpLdpOffsetForCalleeSavedWithAddInstruction(CGFunc &cgFunc, const MemOperand &mo,
47                                                                          uint32 bitLen,
48                                                                          AArch64reg baseReg = AArch64reg::kRinvalid);
49     static void AppendInstructionPushPair(CGFunc &cgFunc, AArch64reg reg0, AArch64reg reg1, RegType rty, int offset);
50     static void AppendInstructionPushSingle(CGFunc &cgFunc, AArch64reg reg, RegType rty, int offset);
51     static void AppendInstructionPopSingle(CGFunc &cgFunc, AArch64reg reg, RegType rty, int offset);
52     static void AppendInstructionPopPair(CGFunc &cgFunc, AArch64reg reg0, AArch64reg reg1, RegType rty, int offset);
53     void Run() override;
54 
55 private:
56     void GenStackGuard(BB &);
57     BB &GenStackGuardCheckInsn(BB &);
58     bool HasLoop();
59     bool OptimizeTailBB(BB &bb, MapleSet<Insn *> &callInsns, const BB &exitBB) const;
60     void TailCallBBOpt(BB &bb, MapleSet<Insn *> &callInsns, BB &exitBB);
61     bool InsertOpndRegs(Operand &opnd, std::set<regno_t> &vecRegs) const;
62     bool InsertInsnRegs(Insn &insn, bool insetSource, std::set<regno_t> &vecSourceRegs, bool insertTarget,
63                         std::set<regno_t> &vecTargetRegs);
64     bool FindRegs(Operand &insn, std::set<regno_t> &vecRegs) const;
65     bool BackwardFindDependency(BB &ifbb, std::set<regno_t> &vecReturnSourceReg, std::list<Insn *> &existingInsns,
66                                 std::list<Insn *> &moveInsns);
67     BB *IsolateFastPath(BB &);
68     void AppendInstructionAllocateCallFrame(AArch64reg reg0, AArch64reg reg1, RegType rty);
69     void AppendInstructionAllocateCallFrameDebug(AArch64reg reg0, AArch64reg reg1, RegType rty);
70     void GeneratePushRegs();
71     void GeneratePushUnnamedVarargRegs();
72     void AppendInstructionStackCheck(AArch64reg reg, RegType rty, int offset);
73     void GenerateProlog(BB &);
74 
75     void GenerateRet(BB &bb);
76     bool TestPredsOfRetBB(const BB &exitBB);
77     void AppendInstructionDeallocateCallFrame(AArch64reg reg0, AArch64reg reg1, RegType rty);
78     void AppendInstructionDeallocateCallFrameDebug(AArch64reg reg0, AArch64reg reg1, RegType rty);
79     void GeneratePopRegs();
80     void AppendJump(const MIRSymbol &func);
81     void GenerateEpilog(BB &);
82     void GenerateEpilogForCleanup(BB &);
83     void ConvertToTailCalls(MapleSet<Insn *> &callInsnsMap);
84     Insn &CreateAndAppendInstructionForAllocateCallFrame(int64 argsToStkPassSize, AArch64reg reg0, AArch64reg reg1,
85                                                          RegType rty);
86     Insn &AppendInstructionForAllocateOrDeallocateCallFrame(int64 argsToStkPassSize, AArch64reg reg0, AArch64reg reg1,
87                                                             RegType rty, bool isAllocate);
GetExitBB2CallSitesMap()88     MapleMap<BB *, MapleSet<Insn *>> &GetExitBB2CallSitesMap()
89     {
90         return exitBB2CallSitesMap;
91     }
SetCurTailcallExitBB(BB * bb)92     void SetCurTailcallExitBB(BB *bb)
93     {
94         curTailcallExitBB = bb;
95     }
GetCurTailcallExitBB()96     BB *GetCurTailcallExitBB()
97     {
98         return curTailcallExitBB;
99     }
SetFastPathReturnBB(BB * bb)100     void SetFastPathReturnBB(BB *bb)
101     {
102         fastPathReturnBB = bb;
103     }
GetFastPathReturnBB()104     BB *GetFastPathReturnBB()
105     {
106         return fastPathReturnBB;
107     }
108     MapleAllocator tmpAlloc;
109     static constexpr const int32 kOffset8MemPos = 8;
110     static constexpr const int32 kOffset16MemPos = 16;
111     MapleMap<BB *, MapleSet<Insn *>> exitBB2CallSitesMap;
112     BB *curTailcallExitBB = nullptr;
113     BB *fastPathReturnBB = nullptr;
114     bool useFP = true;
115     /* frame pointer(x29) is available as a general-purpose register if useFP is set as false */
116     AArch64reg stackBaseReg = RFP;
117 };
118 } /* namespace maplebe */
119 
120 #endif /* MAPLEBE_INCLUDE_CG_AARCH64_AARCH64_PROEPILOG_H */
121