• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "x64_proepilog.h"
17 #include "x64_cg.h"
18 
19 namespace maplebe {
20 using namespace maple;
NeedProEpilog()21 bool X64GenProEpilog::NeedProEpilog()
22 {
23     return true;
24 }
GenerateCalleeSavedRegs(bool isPush)25 void X64GenProEpilog::GenerateCalleeSavedRegs(bool isPush)
26 {
27     X64CGFunc &x64cgFunc = static_cast<X64CGFunc &>(cgFunc);
28     const auto &calleeSavedRegs = x64cgFunc.GetCalleeSavedRegs();
29     if (calleeSavedRegs.empty()) {
30         return;
31     }
32     /* CalleeSave(0) = -(FrameSize + CalleeReg - ArgsStk) */
33     X64MemLayout *memLayout = static_cast<X64MemLayout *>(cgFunc.GetMemlayout());
34     int64 offset = -static_cast<int64>((memLayout->StackFrameSize() +
35         static_cast<X64CGFunc &>(cgFunc).SizeOfCalleeSaved() - memLayout->SizeOfArgsToStackPass()));
36     RegOperand &baseReg = cgFunc.GetOpndBuilder()->CreatePReg(x64::RBP, k64BitSize, kRegTyInt);
37     std::vector<std::pair<uint16, int32>> calleeRegAndOffsetVec;
38     for (const auto &reg : calleeSavedRegs) {
39         RegType regType = IsGPRegister(reg) ? kRegTyInt : kRegTyFloat;
40         uint32 regByteSize = IsGPRegister(reg) ? kX64IntregBytelen : kX64FpregBytelen;
41         uint32 regSize = regByteSize * kBitsPerByte;
42         DEBUG_ASSERT((regSize == k32BitSize || regSize == k64BitSize), "only supported 32/64-bits");
43         RegOperand &calleeReg = cgFunc.GetOpndBuilder()->CreatePReg(reg, regSize, regType);
44         calleeRegAndOffsetVec.push_back(
45             std::pair<uint16, int32>(static_cast<uint16>(reg) - 1, static_cast<int32>(offset)));
46         MemOperand &memOpnd = cgFunc.GetOpndBuilder()->CreateMem(baseReg, offset, regSize);
47         if (isPush) {
48             GeneratePushCalleeSavedRegs(calleeReg, memOpnd, regSize);
49         } else {
50             GeneratePopCalleeSavedRegs(calleeReg, memOpnd, regSize);
51         }
52         offset += static_cast<int64>(regByteSize);
53     }
54     const auto &emitMemoryManager = CGOptions::GetInstance().GetEmitMemoryManager();
55     if (emitMemoryManager.codeSpace != nullptr) {
56         emitMemoryManager.funcCalleeOffsetSaver(emitMemoryManager.codeSpace, cgFunc.GetName(), calleeRegAndOffsetVec);
57         const int32 fp2SPDelta = 16;  // FP + return address;
58         emitMemoryManager.funcFpSPDeltaSaver(emitMemoryManager.codeSpace, cgFunc.GetName(), fp2SPDelta);
59     }
60 }
61 
GeneratePushCalleeSavedRegs(RegOperand & regOpnd,MemOperand & memOpnd,uint32 regSize)62 void X64GenProEpilog::GeneratePushCalleeSavedRegs(RegOperand &regOpnd, MemOperand &memOpnd, uint32 regSize)
63 {
64     MOperator mMovrmOp = (regSize == k32BitSize) ? x64::MOP_movl_r_m : x64::MOP_movq_r_m;
65     Insn &copyInsn = cgFunc.GetInsnBuilder()->BuildInsn(mMovrmOp, X64CG::kMd[mMovrmOp]);
66     copyInsn.AddOpndChain(regOpnd).AddOpndChain(memOpnd);
67     cgFunc.GetCurBB()->AppendInsn(copyInsn);
68 }
69 
GeneratePopCalleeSavedRegs(RegOperand & regOpnd,MemOperand & memOpnd,uint32 regSize)70 void X64GenProEpilog::GeneratePopCalleeSavedRegs(RegOperand &regOpnd, MemOperand &memOpnd, uint32 regSize)
71 {
72     MOperator mMovrmOp = (regSize == k32BitSize) ? x64::MOP_movl_m_r : x64::MOP_movq_m_r;
73     Insn &copyInsn = cgFunc.GetInsnBuilder()->BuildInsn(mMovrmOp, X64CG::kMd[mMovrmOp]);
74     copyInsn.AddOpndChain(memOpnd).AddOpndChain(regOpnd);
75     cgFunc.GetCurBB()->AppendInsn(copyInsn);
76 }
77 
GenerateProlog(BB & bb)78 void X64GenProEpilog::GenerateProlog(BB &bb)
79 {
80     auto &x64CGFunc = static_cast<X64CGFunc &>(cgFunc);
81     BB *formerCurBB = cgFunc.GetCurBB();
82     x64CGFunc.GetDummyBB()->ClearInsns();
83     x64CGFunc.GetDummyBB()->SetIsProEpilog(true);
84     cgFunc.SetCurBB(*x64CGFunc.GetDummyBB());
85 
86     /* push %rbp */
87     MOperator mPushrOp = x64::MOP_pushq_r;
88     Insn &pushInsn = cgFunc.GetInsnBuilder()->BuildInsn(mPushrOp, X64CG::kMd[mPushrOp]);
89     RegOperand &opndFpReg = cgFunc.GetOpndBuilder()->CreatePReg(x64::RBP, k64BitSize, kRegTyInt);
90     pushInsn.AddOpndChain(opndFpReg);
91     cgFunc.GetCurBB()->AppendInsn(pushInsn);
92 
93     /* mov %rsp, %rbp */
94     MOperator mMovrrOp = x64::MOP_movq_r_r;
95     Insn &copyInsn = cgFunc.GetInsnBuilder()->BuildInsn(mMovrrOp, X64CG::kMd[mMovrrOp]);
96     RegOperand &opndSpReg = cgFunc.GetOpndBuilder()->CreatePReg(x64::RSP, k64BitSize, kRegTyInt);
97     copyInsn.AddOpndChain(opndSpReg).AddOpndChain(opndFpReg);
98     cgFunc.GetCurBB()->AppendInsn(copyInsn);
99 
100     /* sub $framesize, %rsp */
101     if (cgFunc.GetFunction().HasCall() || cgFunc.HasVLAOrAlloca()) {
102         MOperator mSubirOp = x64::MOP_subq_i_r;
103         Insn &subInsn = cgFunc.GetInsnBuilder()->BuildInsn(mSubirOp, X64CG::kMd[mSubirOp]);
104         auto *memLayout = static_cast<X64MemLayout *>(cgFunc.GetMemlayout());
105         int64 trueFrameSize =
106             static_cast<int64>(memLayout->StackFrameSize() + static_cast<X64CGFunc &>(cgFunc).SizeOfCalleeSaved());
107         ImmOperand &opndImm = cgFunc.GetOpndBuilder()->CreateImm(k32BitSize, trueFrameSize);
108         subInsn.AddOpndChain(opndImm).AddOpndChain(opndSpReg);
109         cgFunc.GetCurBB()->AppendInsn(subInsn);
110     }
111 
112     GenerateCalleeSavedRegs(true);
113 
114     bb.InsertAtBeginning(*x64CGFunc.GetDummyBB());
115     x64CGFunc.GetDummyBB()->SetIsProEpilog(false);
116     cgFunc.SetCurBB(*formerCurBB);
117 }
118 
GenerateEpilog(BB & bb)119 void X64GenProEpilog::GenerateEpilog(BB &bb)
120 {
121     auto &x64CGFunc = static_cast<X64CGFunc &>(cgFunc);
122     BB *formerCurBB = cgFunc.GetCurBB();
123     x64CGFunc.GetDummyBB()->ClearInsns();
124     x64CGFunc.GetDummyBB()->SetIsProEpilog(true);
125     cgFunc.SetCurBB(*x64CGFunc.GetDummyBB());
126 
127     GenerateCalleeSavedRegs(false);
128 
129     if (cgFunc.GetFunction().HasCall() || cgFunc.HasVLAOrAlloca()) {
130         /*
131          * leave  equal with
132          * mov rsp rbp
133          * pop rbp
134          */
135         MOperator mLeaveOp = x64::MOP_leaveq;
136         Insn &popInsn = cgFunc.GetInsnBuilder()->BuildInsn(mLeaveOp, X64CG::kMd[mLeaveOp]);
137         cgFunc.GetCurBB()->AppendInsn(popInsn);
138     } else {
139         /* pop %rbp */
140         MOperator mPopOp = x64::MOP_popq_r;
141         Insn &pushInsn = cgFunc.GetInsnBuilder()->BuildInsn(mPopOp, X64CG::kMd[mPopOp]);
142         RegOperand &opndFpReg = cgFunc.GetOpndBuilder()->CreatePReg(x64::RBP, k64BitSize, kRegTyInt);
143         pushInsn.AddOpndChain(opndFpReg);
144         cgFunc.GetCurBB()->AppendInsn(pushInsn);
145     }
146     /* ret */
147     MOperator mRetOp = x64::MOP_retq;
148     Insn &retInsn = cgFunc.GetInsnBuilder()->BuildInsn(mRetOp, X64CG::kMd[mRetOp]);
149     cgFunc.GetCurBB()->AppendInsn(retInsn);
150 
151     bb.AppendBBInsns(*x64CGFunc.GetDummyBB());
152     x64CGFunc.GetDummyBB()->SetIsProEpilog(false);
153     cgFunc.SetCurBB(*formerCurBB);
154 }
155 
Run()156 void X64GenProEpilog::Run()
157 {
158     if (cgFunc.GetFunction().IsDeoptFunc()) {   // deopt function does not need prologue/epilogue
159         return;
160     }
161     GenerateProlog(*(cgFunc.GetFirstBB()));
162     GenerateEpilog(*(cgFunc.GetLastBB()));
163 }
164 } /* namespace maplebe */
165