1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_phi_elimination.h"
17 #include "aarch64_cg.h"
18
19 namespace maplebe {
CreateTempRegForCSSA(RegOperand & oriOpnd)20 RegOperand &AArch64PhiEliminate::CreateTempRegForCSSA(RegOperand &oriOpnd)
21 {
22 return *phiEliAlloc.New<RegOperand>(GetAndIncreaseTempRegNO(), oriOpnd.GetSize(), oriOpnd.GetRegisterType());
23 }
24
CreateMov(RegOperand & destOpnd,RegOperand & fromOpnd)25 Insn &AArch64PhiEliminate::CreateMov(RegOperand &destOpnd, RegOperand &fromOpnd)
26 {
27 DEBUG_ASSERT(destOpnd.GetRegisterType() == fromOpnd.GetRegisterType(), "do not support this move in aarch64");
28 bool is64bit = destOpnd.GetSize() == k64BitSize;
29 bool isFloat = destOpnd.IsOfFloatOrSIMDClass();
30 Insn *insn = nullptr;
31 if (destOpnd.GetSize() == k128BitSize) {
32 DEBUG_ASSERT(isFloat, "unexpect 128bit int operand in aarch64");
33 insn = &cgFunc->GetInsnBuilder()->BuildVectorInsn(MOP_vmovvv, AArch64CG::kMd[MOP_vmovvv]);
34 insn->AddOpndChain(destOpnd).AddOpndChain(fromOpnd);
35 auto *vecSpecSrc = cgFunc->GetMemoryPool()->New<VectorRegSpec>(k128BitSize >> k3ByteSize, k8BitSize);
36 auto *vecSpecDest = cgFunc->GetMemoryPool()->New<VectorRegSpec>(k128BitSize >> k3ByteSize, k8BitSize);
37 static_cast<VectorInsn *>(insn)->PushRegSpecEntry(vecSpecDest).PushRegSpecEntry(vecSpecSrc);
38 } else {
39 insn = &cgFunc->GetInsnBuilder()->BuildInsn(
40 is64bit ? isFloat ? MOP_xvmovd : MOP_xmovrr : isFloat ? MOP_xvmovs : MOP_wmovrr, destOpnd, fromOpnd);
41 }
42 /* restore validBitNum */
43 if (destOpnd.GetValidBitsNum() != k64BitSize && destOpnd.GetValidBitsNum() != k32BitSize) {
44 destOpnd.SetValidBitsNum(destOpnd.GetSize());
45 }
46 if (fromOpnd.GetValidBitsNum() != k64BitSize && fromOpnd.GetValidBitsNum() != k32BitSize) {
47 fromOpnd.SetValidBitsNum(fromOpnd.GetSize());
48 }
49 /* copy remat info */
50 MaintainRematInfo(destOpnd, fromOpnd, true);
51 DEBUG_ASSERT(insn != nullptr, "create move insn failed");
52 insn->SetIsPhiMovInsn(true);
53 return *insn;
54 }
55
GetCGVirtualOpearnd(RegOperand & ssaOpnd,const Insn & curInsn)56 RegOperand &AArch64PhiEliminate::GetCGVirtualOpearnd(RegOperand &ssaOpnd, const Insn &curInsn)
57 {
58 VRegVersion *ssaVersion = GetSSAInfo()->FindSSAVersion(ssaOpnd.GetRegisterNumber());
59 DEBUG_ASSERT(ssaVersion != nullptr, "find ssaVersion failed");
60 DEBUG_ASSERT(!ssaVersion->IsDeleted(), "ssaVersion has been deleted");
61 RegOperand *regForRecreate = &ssaOpnd;
62 if (curInsn.GetMachineOpcode() != MOP_asm && !curInsn.IsVectorOp() && !curInsn.IsSpecialIntrinsic() &&
63 ssaVersion->GetAllUseInsns().empty() && !curInsn.IsAtomic()) {
64 CHECK_FATAL(false, "plz delete dead version");
65 }
66 if (GetSSAInfo()->IsNoDefVReg(ssaOpnd.GetRegisterNumber())) {
67 regForRecreate = MakeRoomForNoDefVreg(ssaOpnd);
68 } else {
69 DEBUG_ASSERT(regForRecreate->IsSSAForm(), "Opnd is not in ssa form");
70 }
71 RegOperand &newReg = cgFunc->GetOrCreateVirtualRegisterOperand(*regForRecreate);
72
73 DUInsnInfo *defInfo = ssaVersion->GetDefInsnInfo();
74 Insn *defInsn = defInfo != nullptr ? defInfo->GetInsn() : nullptr;
75 /*
76 * case1 : both def/use
77 * case2 : inline-asm (do not do aggressive optimization) "0"
78 * case3 : cc flag operand
79 */
80 if (defInsn != nullptr) {
81 /* case 1 */
82 uint32 defUseIdx = defInsn->GetBothDefUseOpnd();
83 if (defUseIdx != kInsnMaxOpnd) {
84 if (defInfo->GetOperands().count(defUseIdx)) {
85 CHECK_FATAL(defInfo->GetOperands()[defUseIdx] == 1, "multiple definiation");
86 Operand &preRegOpnd = defInsn->GetOperand(defUseIdx);
87 DEBUG_ASSERT(preRegOpnd.IsRegister(), "unexpect operand type");
88 newReg.SetRegisterNumber(static_cast<RegOperand &>(preRegOpnd).GetRegisterNumber());
89 }
90 }
91 /* case 2 */
92 if (defInsn->GetMachineOpcode() == MOP_asm) {
93 auto &inputList = static_cast<ListOperand &>(defInsn->GetOperand(kAsmInputListOpnd));
94 VRegVersion *LastVersion = nullptr;
95 for (auto inputReg : inputList.GetOperands()) {
96 LastVersion = GetSSAInfo()->FindSSAVersion(inputReg->GetRegisterNumber());
97 if (LastVersion != nullptr && LastVersion->GetOriginalRegNO() == ssaVersion->GetOriginalRegNO()) {
98 break;
99 }
100 LastVersion = nullptr;
101 }
102 if (LastVersion != nullptr) {
103 newReg.SetRegisterNumber(LastVersion->GetSSAvRegOpnd()->GetRegisterNumber());
104 } else {
105 const MapleMap<uint32, uint32> &bindingMap = defInsn->GetRegBinding();
106 auto pairIt = bindingMap.find(ssaVersion->GetOriginalRegNO());
107 if (pairIt != bindingMap.end()) {
108 newReg.SetRegisterNumber(pairIt->second);
109 }
110 }
111 }
112 /* case 3 */
113 if (ssaVersion->GetOriginalRegNO() == kRFLAG) {
114 newReg.SetRegisterNumber(kRFLAG);
115 }
116 } else {
117 newReg.SetRegisterNumber(ssaVersion->GetOriginalRegNO());
118 }
119 MaintainRematInfo(newReg, ssaOpnd, true);
120 newReg.SetOpndOutOfSSAForm();
121 return newReg;
122 }
123
AppendMovAfterLastVregDef(BB & bb,Insn & movInsn) const124 void AArch64PhiEliminate::AppendMovAfterLastVregDef(BB &bb, Insn &movInsn) const
125 {
126 Insn *posInsn = nullptr;
127 bool isPosPhi = false;
128 FOR_BB_INSNS_REV(insn, &bb) {
129 if (insn->IsPhi()) {
130 posInsn = insn;
131 isPosPhi = true;
132 break;
133 }
134 if (!insn->IsMachineInstruction()) {
135 continue;
136 }
137 if (insn->IsBranch()) {
138 posInsn = insn;
139 continue;
140 }
141 break;
142 }
143 CHECK_FATAL(posInsn != nullptr, "insert mov for phi failed");
144 if (isPosPhi) {
145 bb.InsertInsnAfter(*posInsn, movInsn);
146 } else {
147 bb.InsertInsnBefore(*posInsn, movInsn);
148 }
149 }
150
151 /* copy remat info */
MaintainRematInfo(RegOperand & destOpnd,RegOperand & fromOpnd,bool isCopy)152 void AArch64PhiEliminate::MaintainRematInfo(RegOperand &destOpnd, RegOperand &fromOpnd, bool isCopy)
153 {
154 if (CGOptions::GetRematLevel() > 0 && isCopy) {
155 if (fromOpnd.IsSSAForm()) {
156 VRegVersion *fromSSAVersion = GetSSAInfo()->FindSSAVersion(fromOpnd.GetRegisterNumber());
157 DEBUG_ASSERT(fromSSAVersion != nullptr, "nullptr check");
158 regno_t rematRegNO = fromSSAVersion->GetOriginalRegNO();
159 MIRPreg *fPreg = static_cast<AArch64CGFunc *>(cgFunc)->GetPseudoRegFromVirtualRegNO(rematRegNO);
160 if (fPreg != nullptr) {
161 PregIdx fPregIdx =
162 cgFunc->GetFunction().GetPregTab()->GetPregIdxFromPregno(static_cast<uint32>(fPreg->GetPregNo()));
163 RecordRematInfo(destOpnd.GetRegisterNumber(), fPregIdx);
164 }
165 } else {
166 regno_t rematRegNO = fromOpnd.GetRegisterNumber();
167 PregIdx fPreg = FindRematInfo(rematRegNO);
168 if (fPreg > 0) {
169 RecordRematInfo(destOpnd.GetRegisterNumber(), fPreg);
170 }
171 }
172 }
173 }
174
ReCreateRegOperand(Insn & insn)175 void AArch64PhiEliminate::ReCreateRegOperand(Insn &insn)
176 {
177 auto opndNum = static_cast<int32>(insn.GetOperandSize());
178 for (int i = opndNum - 1; i >= 0; --i) {
179 Operand &opnd = insn.GetOperand(static_cast<uint32>(i));
180 A64OperandPhiElmVisitor a64OpndPhiElmVisitor(this, insn, i);
181 opnd.Accept(a64OpndPhiElmVisitor);
182 }
183 }
184
Visit(RegOperand * v)185 void A64OperandPhiElmVisitor::Visit(RegOperand *v)
186 {
187 if (v->IsSSAForm()) {
188 DEBUG_ASSERT(v->GetRegisterNumber() != kRFLAG, "both condi and reg");
189 insn->SetOperand(idx, a64PhiEliminator->GetCGVirtualOpearnd(*v, *insn));
190 }
191 }
192
Visit(ListOperand * v)193 void A64OperandPhiElmVisitor::Visit(ListOperand *v)
194 {
195 std::list<RegOperand *> tempRegStore;
196 auto &opndList = v->GetOperands();
197
198 while (!opndList.empty()) {
199 auto *regOpnd = opndList.front();
200 opndList.pop_front();
201
202 if (regOpnd->IsSSAForm()) {
203 tempRegStore.push_back(&a64PhiEliminator->GetCGVirtualOpearnd(*regOpnd, *insn));
204 } else {
205 tempRegStore.push_back(regOpnd);
206 }
207 }
208
209 DEBUG_ASSERT(v->GetOperands().empty(), "need to clean list");
210 v->GetOperands().assign(tempRegStore.begin(), tempRegStore.end());
211 }
212
Visit(MemOperand * a64MemOpnd)213 void A64OperandPhiElmVisitor::Visit(MemOperand *a64MemOpnd)
214 {
215 RegOperand *baseRegOpnd = a64MemOpnd->GetBaseRegister();
216 RegOperand *indexRegOpnd = a64MemOpnd->GetIndexRegister();
217 if ((baseRegOpnd != nullptr && baseRegOpnd->IsSSAForm()) ||
218 (indexRegOpnd != nullptr && indexRegOpnd->IsSSAForm())) {
219 if (baseRegOpnd != nullptr && baseRegOpnd->IsSSAForm()) {
220 a64MemOpnd->SetBaseRegister(a64PhiEliminator->GetCGVirtualOpearnd(*baseRegOpnd, *insn));
221 }
222 if (indexRegOpnd != nullptr && indexRegOpnd->IsSSAForm()) {
223 a64MemOpnd->SetIndexRegister(a64PhiEliminator->GetCGVirtualOpearnd(*indexRegOpnd, *insn));
224 }
225 }
226 }
227 } // namespace maplebe
228