1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "aarch64_prop.h"
17 #include "aarch64_isa.h"
18 #include "aarch64_cg.h"
19 #include "aarch64_reg_coalesce.h"
20 #include <climits>
21
22 namespace maplebe {
23
24 #define PROP_DUMP CG_DEBUG_FUNC(cgFunc)
25
MayOverflow(const ImmOperand & value1,const ImmOperand & value2,bool is64Bit,bool isAdd,bool isSigned)26 bool MayOverflow(const ImmOperand &value1, const ImmOperand &value2, bool is64Bit, bool isAdd, bool isSigned)
27 {
28 if (value1.GetVary() || value2.GetVary()) {
29 return false;
30 }
31 int64 cstA = value1.GetValue();
32 int64 cstB = value2.GetValue();
33 if (isAdd) {
34 int64 res = static_cast<int64>(static_cast<uint64>(cstA) + static_cast<uint64>(cstB));
35 if (!isSigned) {
36 return static_cast<uint64>(res) < static_cast<uint64>(cstA);
37 }
38 auto rightShiftNumToGetSignFlag = (is64Bit ? 64 : 32) - 1;
39 return (static_cast<uint64>(res) >> rightShiftNumToGetSignFlag !=
40 static_cast<uint64>(cstA) >> rightShiftNumToGetSignFlag) &&
41 (static_cast<uint64>(res) >> rightShiftNumToGetSignFlag !=
42 static_cast<uint64>(cstB) >> rightShiftNumToGetSignFlag);
43 } else {
44 /* sub */
45 if (!isSigned) {
46 return cstA < cstB;
47 }
48 int64 res = static_cast<int64>(static_cast<uint64>(cstA) - static_cast<uint64>(cstB));
49 auto rightShiftNumToGetSignFlag = (is64Bit ? 64 : 32) - 1;
50 return (static_cast<uint64>(cstA) >> rightShiftNumToGetSignFlag !=
51 static_cast<uint64>(cstB) >> rightShiftNumToGetSignFlag) &&
52 (static_cast<uint64>(res) >> rightShiftNumToGetSignFlag !=
53 static_cast<uint64>(cstA) >> rightShiftNumToGetSignFlag);
54 }
55 }
56
IsInLimitCopyRange(VRegVersion * toBeReplaced)57 bool AArch64Prop::IsInLimitCopyRange(VRegVersion *toBeReplaced)
58 {
59 uint32 baseID = toBeReplaced->GetDefInsnInfo()->GetInsn()->GetId();
60 MapleUnorderedMap<uint32, DUInsnInfo *> &useList = toBeReplaced->GetAllUseInsns();
61 for (auto it : useList) {
62 if (it.second->GetInsn()->GetId() - baseID > k16BitSize) {
63 return false;
64 }
65 }
66 return true;
67 }
68
CopyProp()69 void AArch64Prop::CopyProp()
70 {
71 PropOptimizeManager optManager;
72 optManager.Optimize<CopyRegProp>(*cgFunc, GetSSAInfo(), GetRegll());
73 optManager.Optimize<ValidBitNumberProp>(*cgFunc, GetSSAInfo());
74 optManager.Optimize<RedundantPhiProp>(*cgFunc, GetSSAInfo());
75 }
76
TargetProp(Insn & insn)77 void AArch64Prop::TargetProp(Insn &insn)
78 {
79 A64ConstProp a64ConstProp(*memPool, *cgFunc, *GetSSAInfo(), insn);
80 a64ConstProp.DoOpt();
81 A64StrLdrProp a64StrLdrProp(*memPool, *cgFunc, *GetSSAInfo(), insn, *GetDce());
82 a64StrLdrProp.DoOpt();
83 }
84
DoOpt()85 void A64ConstProp::DoOpt()
86 {
87 if (curInsn->GetMachineOpcode() == MOP_wmovri32 || curInsn->GetMachineOpcode() == MOP_xmovri64) {
88 Operand &destOpnd = curInsn->GetOperand(kInsnFirstOpnd);
89 CHECK_FATAL(destOpnd.IsRegister(), "must be reg operand");
90 auto &destReg = static_cast<RegOperand &>(destOpnd);
91 if (destReg.IsSSAForm()) {
92 VRegVersion *destVersion = ssaInfo->FindSSAVersion(destReg.GetRegisterNumber());
93 DEBUG_ASSERT(destVersion != nullptr, "find Version failed");
94 Operand &constOpnd = curInsn->GetOperand(kInsnSecondOpnd);
95 CHECK_FATAL(constOpnd.IsImmediate(), "must be imm operand");
96 auto &immOperand = static_cast<ImmOperand &>(constOpnd);
97 bool isZero = immOperand.IsZero();
98 DEBUG_ASSERT(destVersion != nullptr, "destVersion should not be nullptr");
99 for (auto useDUInfoIt : destVersion->GetAllUseInsns()) {
100 if (isZero) {
101 ZeroRegProp(*useDUInfoIt.second, *destVersion->GetSSAvRegOpnd());
102 destVersion->CheckDeadUse(*useDUInfoIt.second->GetInsn());
103 }
104 (void)ConstProp(*useDUInfoIt.second, immOperand);
105 }
106 }
107 }
108 }
109
ZeroRegProp(DUInsnInfo & useDUInfo,RegOperand & toReplaceReg)110 void A64ConstProp::ZeroRegProp(DUInsnInfo &useDUInfo, RegOperand &toReplaceReg)
111 {
112 auto *useInsn = useDUInfo.GetInsn();
113 const InsnDesc *md = &AArch64CG::kMd[(useInsn->GetMachineOpcode())];
114 /* special case */
115 bool isSpecficCase = useInsn->GetMachineOpcode() == MOP_wbfirri5i5 || useInsn->GetMachineOpcode() == MOP_xbfirri6i6;
116 isSpecficCase &=
117 (useDUInfo.GetOperands().size() == 1) && (useDUInfo.GetOperands().begin()->first == kInsnSecondOpnd);
118 if (useInsn->IsStore() || md->IsCondDef() || isSpecficCase) {
119 RegOperand &zeroOpnd = cgFunc->GetZeroOpnd(toReplaceReg.GetSize());
120 for (auto &opndIt : useDUInfo.GetOperands()) {
121 if (useInsn->IsStore() && opndIt.first != 0) {
122 return;
123 }
124 Operand &opnd = useInsn->GetOperand(opndIt.first);
125 A64ReplaceRegOpndVisitor replaceRegOpndVisitor(*cgFunc, *useInsn, opndIt.first, toReplaceReg, zeroOpnd);
126 opnd.Accept(replaceRegOpndVisitor);
127 useDUInfo.ClearDU(opndIt.first);
128 }
129 }
130 }
131
GetReversalMOP(MOperator arithMop)132 MOperator A64ConstProp::GetReversalMOP(MOperator arithMop)
133 {
134 switch (arithMop) {
135 case MOP_waddrri12:
136 return MOP_wsubrri12;
137 case MOP_xaddrri12:
138 return MOP_xsubrri12;
139 case MOP_xsubrri12:
140 return MOP_xaddrri12;
141 case MOP_wsubrri12:
142 return MOP_waddrri12;
143 default:
144 CHECK_FATAL(false, "NYI");
145 break;
146 }
147 return MOP_undef;
148 }
149
GetRegImmMOP(MOperator regregMop,bool withLeftShift)150 MOperator A64ConstProp::GetRegImmMOP(MOperator regregMop, bool withLeftShift)
151 {
152 switch (regregMop) {
153 case MOP_xaddrrrs:
154 case MOP_xaddrrr: {
155 return withLeftShift ? MOP_xaddrri24 : MOP_xaddrri12;
156 }
157 case MOP_waddrrrs:
158 case MOP_waddrrr: {
159 return withLeftShift ? MOP_waddrri24 : MOP_waddrri12;
160 }
161 case MOP_xsubrrrs:
162 case MOP_xsubrrr: {
163 return withLeftShift ? MOP_xsubrri24 : MOP_xsubrri12;
164 }
165 case MOP_wsubrrrs:
166 case MOP_wsubrrr: {
167 return withLeftShift ? MOP_wsubrri24 : MOP_wsubrri12;
168 }
169 case MOP_xandrrrs:
170 return MOP_xandrri13;
171 case MOP_wandrrrs:
172 return MOP_wandrri12;
173 case MOP_xeorrrrs:
174 return MOP_xeorrri13;
175 case MOP_weorrrrs:
176 return MOP_weorrri12;
177 case MOP_xiorrrrs:
178 case MOP_xbfirri6i6:
179 return MOP_xiorrri13;
180 case MOP_wiorrrrs:
181 case MOP_wbfirri5i5:
182 return MOP_wiorrri12;
183 case MOP_xmovrr: {
184 return MOP_xmovri64;
185 }
186 case MOP_wmovrr: {
187 return MOP_wmovri32;
188 }
189 default:
190 CHECK_FATAL(false, "NYI");
191 break;
192 }
193 return MOP_undef;
194 }
195
GetFoldMopAndVal(int64 & newVal,int64 constVal,const Insn & arithInsn)196 MOperator A64ConstProp::GetFoldMopAndVal(int64 &newVal, int64 constVal, const Insn &arithInsn)
197 {
198 MOperator arithMop = arithInsn.GetMachineOpcode();
199 MOperator newMop = MOP_undef;
200 switch (arithMop) {
201 case MOP_waddrrr:
202 case MOP_xaddrrr: {
203 newVal = constVal + constVal;
204 newMop = (arithMop == MOP_waddrrr) ? MOP_wmovri32 : MOP_xmovri64;
205 break;
206 }
207 case MOP_waddrrrs:
208 case MOP_xaddrrrs: {
209 auto &shiftOpnd = static_cast<BitShiftOperand &>(arithInsn.GetOperand(kInsnFourthOpnd));
210 uint32 amount = shiftOpnd.GetShiftAmount();
211 BitShiftOperand::ShiftOp sOp = shiftOpnd.GetShiftOp();
212 switch (sOp) {
213 case BitShiftOperand::kLSL: {
214 newVal = constVal + static_cast<int64>((static_cast<unsigned>(constVal) << amount));
215 break;
216 }
217 case BitShiftOperand::kLSR: {
218 newVal = constVal + (static_cast<unsigned>(constVal) >> amount);
219 break;
220 }
221 case BitShiftOperand::kASR: {
222 newVal = constVal + (constVal >> amount);
223 break;
224 }
225 default:
226 CHECK_FATAL(false, "NYI");
227 break;
228 }
229 newMop = (arithMop == MOP_waddrrrs) ? MOP_wmovri32 : MOP_xmovri64;
230 break;
231 }
232 case MOP_wsubrrr:
233 case MOP_xsubrrr: {
234 newVal = 0;
235 newMop = (arithMop == MOP_wsubrrr) ? MOP_wmovri32 : MOP_xmovri64;
236 break;
237 }
238 case MOP_wsubrrrs:
239 case MOP_xsubrrrs: {
240 auto &shiftOpnd = static_cast<BitShiftOperand &>(arithInsn.GetOperand(kInsnFourthOpnd));
241 uint32 amount = shiftOpnd.GetShiftAmount();
242 BitShiftOperand::ShiftOp sOp = shiftOpnd.GetShiftOp();
243 switch (sOp) {
244 case BitShiftOperand::kLSL: {
245 newVal = constVal - static_cast<int64>((static_cast<unsigned>(constVal) << amount));
246 break;
247 }
248 case BitShiftOperand::kLSR: {
249 newVal = constVal - (static_cast<unsigned>(constVal) >> amount);
250 break;
251 }
252 case BitShiftOperand::kASR: {
253 newVal = constVal - (constVal >> amount);
254 break;
255 }
256 default:
257 CHECK_FATAL(false, "NYI");
258 break;
259 }
260 newMop = (arithMop == MOP_wsubrrrs) ? MOP_wmovri32 : MOP_xmovri64;
261 break;
262 }
263 default:
264 DEBUG_ASSERT(false, "this case is not supported currently");
265 break;
266 }
267 return newMop;
268 }
269
ReplaceInsnAndUpdateSSA(Insn & oriInsn,Insn & newInsn) const270 void A64ConstProp::ReplaceInsnAndUpdateSSA(Insn &oriInsn, Insn &newInsn) const
271 {
272 ssaInfo->ReplaceInsn(oriInsn, newInsn);
273 oriInsn.GetBB()->ReplaceInsn(oriInsn, newInsn);
274 /* dump insn replacement here */
275 }
276
MovConstReplace(DUInsnInfo & useDUInfo,ImmOperand & constOpnd)277 bool A64ConstProp::MovConstReplace(DUInsnInfo &useDUInfo, ImmOperand &constOpnd)
278 {
279 Insn *useInsn = useDUInfo.GetInsn();
280 MOperator curMop = useInsn->GetMachineOpcode();
281 if (useDUInfo.GetOperands().size() == 1) {
282 MOperator newMop = GetRegImmMOP(curMop, false);
283 Operand &destOpnd = useInsn->GetOperand(kInsnFirstOpnd);
284 if (constOpnd.IsSingleInstructionMovable(destOpnd.GetSize())) {
285 auto useOpndInfoIt = useDUInfo.GetOperands().begin();
286 uint32 useOpndIdx = useOpndInfoIt->first;
287 DEBUG_ASSERT(useOpndIdx == kInsnSecondOpnd, "invalid instruction in ssa form");
288 if (useOpndIdx == kInsnSecondOpnd) {
289 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, destOpnd, constOpnd);
290 ReplaceInsnAndUpdateSSA(*useInsn, newInsn);
291 return true;
292 }
293 }
294 } else {
295 DEBUG_ASSERT(false, "invalid instruction in ssa form");
296 }
297 return false;
298 }
299
300 /* support add now */
ArithmeticConstReplace(DUInsnInfo & useDUInfo,ImmOperand & constOpnd,ArithmeticType aT)301 bool A64ConstProp::ArithmeticConstReplace(DUInsnInfo &useDUInfo, ImmOperand &constOpnd, ArithmeticType aT)
302 {
303 Insn *useInsn = useDUInfo.GetInsn();
304 MOperator curMop = useInsn->GetMachineOpcode();
305 if (useDUInfo.GetOperands().size() == 1) {
306 MOperator newMop = GetRegImmMOP(curMop, false);
307 auto useOpndInfoIt = useDUInfo.GetOperands().begin();
308 uint32 useOpndIdx = useOpndInfoIt->first;
309 CHECK_FATAL(useOpndIdx == kInsnSecondOpnd || useOpndIdx == kInsnThirdOpnd, "check this insn");
310 Insn *newInsn = nullptr;
311 auto &tempImm = static_cast<ImmOperand &>(*constOpnd.Clone(*constPropMp));
312 if (tempImm.GetSize() == k64BitSize && useInsn->GetOperandSize(useOpndIdx) == k32BitSize) {
313 uint64 newVal = static_cast<uint64>(tempImm.GetValue()) & UINT32_MAX;
314 tempImm.SetValue(static_cast<int64>(newVal));
315 }
316 if (static_cast<AArch64CGFunc *>(cgFunc)->IsOperandImmValid(newMop, &constOpnd, kInsnThirdOpnd)) {
317 if (useOpndIdx == kInsnThirdOpnd) {
318 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, useInsn->GetOperand(kInsnFirstOpnd),
319 useInsn->GetOperand(kInsnSecondOpnd), constOpnd);
320 } else if (useOpndIdx == kInsnSecondOpnd && aT == kAArch64Add) { /* swap operand due to legality in aarch */
321 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, useInsn->GetOperand(kInsnFirstOpnd),
322 useInsn->GetOperand(kInsnThirdOpnd), constOpnd);
323 }
324 }
325 /* try aggressive opt in aarch64 add and sub */
326 if (newInsn == nullptr && (aT == kAArch64Add || aT == kAArch64Sub)) {
327 auto *tempImm = static_cast<ImmOperand *>(constOpnd.Clone(*constPropMp));
328 /* try aarch64 imm shift mode, aarch64 data processing instructions have 12 bits of space for values */
329 tempImm->SetValue(tempImm->GetValue() >> 12);
330 if (static_cast<AArch64CGFunc *>(cgFunc)->IsOperandImmValid(newMop, tempImm, kInsnThirdOpnd) &&
331 CGOptions::GetInstance().GetOptimizeLevel() < CGOptions::kLevel0) {
332 DEBUG_ASSERT(false, "NIY");
333 }
334 auto *zeroImm = &(static_cast<AArch64CGFunc *>(cgFunc)->CreateImmOperand(0, constOpnd.GetSize(), true));
335 /* value in immOpnd is signed */
336 if (MayOverflow(*zeroImm, constOpnd, constOpnd.GetSize() == k64BitSize, false, true)) {
337 return false;
338 }
339 /* (constA - var) can not reversal to (var + (-constA)) */
340 if (useOpndIdx == kInsnSecondOpnd && aT == kAArch64Sub) {
341 return false;
342 }
343 /* Addition and subtraction reversal */
344 tempImm->SetValue(-constOpnd.GetValue());
345 newMop = GetReversalMOP(newMop);
346 if (static_cast<AArch64CGFunc *>(cgFunc)->IsOperandImmValid(newMop, tempImm, kInsnThirdOpnd)) {
347 auto *cgImm = static_cast<ImmOperand *>(tempImm->Clone(*cgFunc->GetMemoryPool()));
348 newInsn = &cgFunc->GetInsnBuilder()->BuildInsn(newMop, useInsn->GetOperand(kInsnFirstOpnd),
349 useInsn->GetOperand(kInsnSecondOpnd), *cgImm);
350 if (useOpndIdx == kInsnSecondOpnd) { /* swap operand due to legality in aarch */
351 newInsn->SetOperand(kInsnSecondOpnd, useInsn->GetOperand(kInsnThirdOpnd));
352 }
353 }
354 }
355 if (newInsn != nullptr) {
356 ReplaceInsnAndUpdateSSA(*useInsn, *newInsn);
357 return true;
358 }
359 } else if (useDUInfo.GetOperands().size() == kOpndNum2) {
360 /* only support add & sub now */
361 int64 newValue = 0;
362 MOperator newMop = GetFoldMopAndVal(newValue, constOpnd.GetValue(), *useInsn);
363 bool isSigned = (newValue < 0);
364 auto *tempImm = static_cast<ImmOperand *>(constOpnd.Clone(*constPropMp));
365 tempImm->SetValue(newValue);
366 tempImm->SetSigned(isSigned);
367 if (tempImm->IsSingleInstructionMovable()) {
368 auto *newImmOpnd = static_cast<ImmOperand *>(tempImm->Clone(*cgFunc->GetMemoryPool()));
369 auto &newInsn =
370 cgFunc->GetInsnBuilder()->BuildInsn(newMop, useInsn->GetOperand(kInsnFirstOpnd), *newImmOpnd);
371 ReplaceInsnAndUpdateSSA(*useInsn, newInsn);
372 return true;
373 } else {
374 CHECK_FATAL(false, "invalid immediate");
375 }
376 } else {
377 DEBUG_ASSERT(false, "invalid instruction in ssa form");
378 }
379 return false;
380 }
381
ArithmeticConstFold(DUInsnInfo & useDUInfo,const ImmOperand & constOpnd,ArithmeticType aT)382 bool A64ConstProp::ArithmeticConstFold(DUInsnInfo &useDUInfo, const ImmOperand &constOpnd, ArithmeticType aT)
383 {
384 Insn *useInsn = useDUInfo.GetInsn();
385 if (useDUInfo.GetOperands().size() == 1) {
386 Operand &existedImm = useInsn->GetOperand(kInsnThirdOpnd);
387 DEBUG_ASSERT(existedImm.IsImmediate(), "must be");
388 Operand &destOpnd = useInsn->GetOperand(kInsnFirstOpnd);
389 bool is64Bit = destOpnd.GetSize() == k64BitSize;
390 ImmOperand *foldConst = CanDoConstFold(constOpnd, static_cast<ImmOperand &>(existedImm), aT, is64Bit);
391 if (foldConst != nullptr) {
392 MOperator newMop = is64Bit ? MOP_xmovri64 : MOP_wmovri32;
393 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, destOpnd, *foldConst);
394 ReplaceInsnAndUpdateSSA(*useInsn, newInsn);
395 return true;
396 }
397 }
398 return false;
399 }
400
ShiftConstReplace(DUInsnInfo & useDUInfo,const ImmOperand & constOpnd)401 bool A64ConstProp::ShiftConstReplace(DUInsnInfo &useDUInfo, const ImmOperand &constOpnd)
402 {
403 Insn *useInsn = useDUInfo.GetInsn();
404 MOperator curMop = useInsn->GetMachineOpcode();
405 if (useDUInfo.GetOperands().size() == 1) {
406 auto useOpndInfoIt = useDUInfo.GetOperands().begin();
407 uint32 useOpndIdx = useOpndInfoIt->first;
408 if (useOpndIdx == kInsnThirdOpnd) {
409 auto &shiftBit = static_cast<BitShiftOperand &>(useInsn->GetOperand(kInsnFourthOpnd));
410 int64 val = constOpnd.GetValue();
411 if (shiftBit.GetShiftOp() == BitShiftOperand::kLSL) {
412 val = val << shiftBit.GetShiftAmount();
413 } else if (shiftBit.GetShiftOp() == BitShiftOperand::kLSR) {
414 val = val >> shiftBit.GetShiftAmount();
415 } else if (shiftBit.GetShiftOp() == BitShiftOperand::kASR) {
416 val = static_cast<int64>((static_cast<uint64>(val)) >> shiftBit.GetShiftAmount());
417 } else {
418 CHECK_FATAL(false, "shift type is not defined");
419 }
420 auto *newImm = static_cast<ImmOperand *>(constOpnd.Clone(*constPropMp));
421 newImm->SetValue(val);
422 MOperator newMop = GetRegImmMOP(curMop, false);
423 if (static_cast<AArch64CGFunc *>(cgFunc)->IsOperandImmValid(newMop, newImm, kInsnThirdOpnd)) {
424 auto *cgNewImm = static_cast<ImmOperand *>(constOpnd.Clone(*cgFunc->GetMemoryPool()));
425 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, useInsn->GetOperand(kInsnFirstOpnd),
426 useInsn->GetOperand(kInsnSecondOpnd), *cgNewImm);
427 ReplaceInsnAndUpdateSSA(*useInsn, newInsn);
428 return true;
429 }
430 }
431 }
432 return false;
433 }
434
ConstProp(DUInsnInfo & useDUInfo,ImmOperand & constOpnd)435 bool A64ConstProp::ConstProp(DUInsnInfo &useDUInfo, ImmOperand &constOpnd)
436 {
437 MOperator curMop = useDUInfo.GetInsn()->GetMachineOpcode();
438 switch (curMop) {
439 case MOP_xmovrr:
440 case MOP_wmovrr: {
441 return MovConstReplace(useDUInfo, constOpnd);
442 }
443 case MOP_xsubrrr:
444 case MOP_wsubrrr: {
445 return ArithmeticConstReplace(useDUInfo, constOpnd, kAArch64Sub);
446 }
447 case MOP_xaddrrr:
448 case MOP_waddrrr: {
449 return ArithmeticConstReplace(useDUInfo, constOpnd, kAArch64Add);
450 }
451 case MOP_waddrri12:
452 case MOP_xaddrri12: {
453 return ArithmeticConstFold(useDUInfo, constOpnd, kAArch64Add);
454 }
455 case MOP_xsubrri12:
456 case MOP_wsubrri12: {
457 return ArithmeticConstFold(useDUInfo, constOpnd, kAArch64Sub);
458 }
459 case MOP_xiorrrrs:
460 case MOP_wiorrrrs:
461 case MOP_xeorrrrs:
462 case MOP_weorrrrs:
463 case MOP_xandrrrs:
464 case MOP_wandrrrs:
465 case MOP_xaddrrrs:
466 case MOP_waddrrrs:
467 case MOP_wsubrrrs:
468 case MOP_xsubrrrs: {
469 return ShiftConstReplace(useDUInfo, constOpnd);
470 }
471 case MOP_wbfirri5i5:
472 case MOP_xbfirri6i6: {
473 return BitInsertReplace(useDUInfo, constOpnd);
474 }
475 default:
476 break;
477 }
478 return false;
479 }
480
BitInsertReplace(DUInsnInfo & useDUInfo,const ImmOperand & constOpnd)481 bool A64ConstProp::BitInsertReplace(DUInsnInfo &useDUInfo, const ImmOperand &constOpnd)
482 {
483 Insn *useInsn = useDUInfo.GetInsn();
484 MOperator curMop = useInsn->GetMachineOpcode();
485 if (useDUInfo.GetOperands().size() == 1) {
486 auto useOpndInfoIt = useDUInfo.GetOperands().begin();
487 uint32 useOpndIdx = useOpndInfoIt->first;
488 if (useOpndIdx == kInsnSecondOpnd) {
489 auto &lsbOpnd = static_cast<ImmOperand &>(useInsn->GetOperand(kInsnThirdOpnd));
490 auto &widthOpnd = static_cast<ImmOperand &>(useInsn->GetOperand(kInsnFourthOpnd));
491 auto val = static_cast<uint64>(constOpnd.GetValue());
492 /* bfi width in the range [1 -64] */
493 auto width = static_cast<uint64>(widthOpnd.GetValue());
494 /* bit number of the lsb of the destination bitfield */
495 auto lsb = static_cast<uint64>(lsbOpnd.GetValue());
496 val = val & ((1U << width) - 1U);
497 if (static_cast<uint64>(__builtin_popcountl(val)) == width) {
498 val = val << lsb;
499 MOperator newMop = GetRegImmMOP(curMop, false);
500 Operand &newOpnd = cgFunc->CreateImmOperand(PTY_i64, static_cast<int64>(val));
501 if (static_cast<AArch64CGFunc *>(cgFunc)->IsOperandImmValid(newMop, &newOpnd, kInsnThirdOpnd)) {
502 Insn &newInsn = cgFunc->GetInsnBuilder()->BuildInsn(newMop, useInsn->GetOperand(kInsnFirstOpnd),
503 useInsn->GetOperand(kInsnFirstOpnd), newOpnd);
504 ReplaceInsnAndUpdateSSA(*useInsn, newInsn);
505 return true;
506 }
507 }
508 }
509 }
510 return false;
511 }
512
CanDoConstFold(const ImmOperand & value1,const ImmOperand & value2,ArithmeticType aT,bool is64Bit)513 ImmOperand *A64ConstProp::CanDoConstFold(const ImmOperand &value1, const ImmOperand &value2, ArithmeticType aT,
514 bool is64Bit)
515 {
516 auto *tempImm = static_cast<ImmOperand *>(value1.Clone(*constPropMp));
517 int64 newVal = 0;
518 bool isSigned = value1.IsSignedValue();
519 if (value1.IsSignedValue() != value2.IsSignedValue()) {
520 isSigned = false;
521 }
522 if (MayOverflow(value1, value2, is64Bit, aT == kAArch64Add, isSigned)) {
523 return nullptr;
524 }
525 switch (aT) {
526 case kAArch64Add: {
527 newVal = value1.GetValue() + value2.GetValue();
528 break;
529 }
530 case kAArch64Sub: {
531 newVal = value1.GetValue() - value2.GetValue();
532 break;
533 }
534 default:
535 return nullptr;
536 }
537 if (!is64Bit && isSigned && (newVal > INT_MAX || newVal < INT_MIN)) {
538 return nullptr;
539 }
540 if (!is64Bit && !isSigned && (newVal > UINT_MAX || newVal < 0)) {
541 return nullptr;
542 }
543 if (newVal < 0) {
544 tempImm->SetSigned();
545 }
546 tempImm->SetValue(newVal);
547 if (value2.GetVary() == kUnAdjustVary) {
548 tempImm->SetVary(kUnAdjustVary);
549 }
550 bool canBeMove = tempImm->IsSingleInstructionMovable(k64BitSize);
551 return canBeMove ? static_cast<ImmOperand *>(tempImm->Clone(*cgFunc->GetMemoryPool())) : nullptr;
552 }
553
DoOpt()554 void A64StrLdrProp::DoOpt()
555 {
556 DEBUG_ASSERT(curInsn != nullptr, "not input insn");
557 bool tryOptAgain = false;
558 do {
559 tryOptAgain = false;
560 MemOperand *currMemOpnd = StrLdrPropPreCheck(*curInsn);
561 if (currMemOpnd != nullptr && memPropMode != kUndef) {
562 /* can be changed to recursive propagation */
563 if (ReplaceMemOpnd(*currMemOpnd, nullptr)) {
564 tryOptAgain = true;
565 }
566 replaceVersions.clear();
567 }
568 } while (tryOptAgain);
569 }
570
ReplaceMemOpnd(const MemOperand & currMemOpnd,const Insn * defInsn)571 bool A64StrLdrProp::ReplaceMemOpnd(const MemOperand &currMemOpnd, const Insn *defInsn)
572 {
573 auto GetDefInsn = [&defInsn, this](const RegOperand ®Opnd, std::vector<Insn *> &allUseInsns) -> void {
574 if (regOpnd.IsSSAForm() && defInsn == nullptr) {
575 VRegVersion *replacedV = ssaInfo->FindSSAVersion(regOpnd.GetRegisterNumber());
576 if (replacedV->GetDefInsnInfo() != nullptr) {
577 for (auto it : replacedV->GetAllUseInsns()) {
578 allUseInsns.emplace_back(it.second->GetInsn());
579 }
580 defInsn = replacedV->GetDefInsnInfo()->GetInsn();
581 }
582 }
583 };
584 RegOperand *replacedReg = nullptr;
585 std::vector<Insn *> allUseInsns;
586 std::vector<MemOperand *> newMemOpnds;
587 bool doFullReplaceProp = true; /* due to register pressure, do not do partial prop */
588 if (memPropMode == kPropBase) {
589 replacedReg = currMemOpnd.GetBaseRegister();
590 } else {
591 Operand *offset = currMemOpnd.GetOffset();
592 DEBUG_ASSERT(offset->IsRegister(), "must be");
593 replacedReg = static_cast<RegOperand *>(offset);
594 }
595 CHECK_FATAL(replacedReg != nullptr, "check this insn");
596 GetDefInsn(*replacedReg, allUseInsns);
597 if (defInsn != nullptr) {
598 for (auto useInsn : allUseInsns) {
599 MemOperand *oldMemOpnd = StrLdrPropPreCheck(*useInsn, memPropMode);
600 if (CheckSameReplace(*replacedReg, oldMemOpnd)) {
601 MemOperand *newMemOpnd = SelectReplaceMem(*defInsn, *oldMemOpnd);
602 if (newMemOpnd != nullptr) {
603 uint32 opndIdx = GetMemOpndIdx(oldMemOpnd, *useInsn);
604 if (CheckNewMemOffset(*useInsn, newMemOpnd, opndIdx)) {
605 newMemOpnds.emplace_back(newMemOpnd);
606 continue;
607 }
608 }
609 }
610 doFullReplaceProp = false;
611 break;
612 }
613 } else {
614 doFullReplaceProp = false;
615 }
616 if (doFullReplaceProp) {
617 for (size_t i = 0; i < newMemOpnds.size(); ++i) {
618 DoMemReplace(*replacedReg, *newMemOpnds[i], *allUseInsns[i]);
619 }
620 return true;
621 }
622 return false;
623 }
624
CheckSameReplace(const RegOperand & replacedReg,const MemOperand * memOpnd) const625 bool A64StrLdrProp::CheckSameReplace(const RegOperand &replacedReg, const MemOperand *memOpnd) const
626 {
627 if (memOpnd != nullptr && memPropMode != kUndef) {
628 if (memPropMode == kPropBase) {
629 return replacedReg.GetRegisterNumber() == memOpnd->GetBaseRegister()->GetRegisterNumber();
630 } else {
631 Operand *offset = memOpnd->GetOffset();
632 DEBUG_ASSERT(offset->IsRegister(), "must be");
633 return replacedReg.GetRegisterNumber() == static_cast<RegOperand *>(offset)->GetRegisterNumber();
634 }
635 }
636 return false;
637 }
638
GetMemOpndIdx(MemOperand * newMemOpnd,const Insn & insn) const639 uint32 A64StrLdrProp::GetMemOpndIdx(MemOperand *newMemOpnd, const Insn &insn) const
640 {
641 uint32 opndIdx = kInsnMaxOpnd;
642 if (insn.IsLoadPair() || insn.IsStorePair()) {
643 DEBUG_ASSERT(newMemOpnd->GetOffsetImmediate() != nullptr, "unexpect insn");
644 opndIdx = kInsnThirdOpnd;
645 } else {
646 opndIdx = kInsnSecondOpnd;
647 }
648 return opndIdx;
649 }
650
DoMemReplace(const RegOperand & replacedReg,MemOperand & newMem,Insn & useInsn)651 void A64StrLdrProp::DoMemReplace(const RegOperand &replacedReg, MemOperand &newMem, Insn &useInsn)
652 {
653 VRegVersion *replacedV = ssaInfo->FindSSAVersion(replacedReg.GetRegisterNumber());
654 DEBUG_ASSERT(replacedV != nullptr, "must in ssa form");
655 uint32 opndIdx = GetMemOpndIdx(&newMem, useInsn);
656 replacedV->RemoveUseInsn(useInsn, opndIdx);
657 if (replacedV->GetAllUseInsns().empty()) {
658 (void)cgDce->RemoveUnuseDef(*replacedV);
659 }
660 for (auto &replaceit : replaceVersions) {
661 replaceit.second->AddUseInsn(*ssaInfo, useInsn, opndIdx);
662 }
663 useInsn.SetOperand(opndIdx, newMem);
664 }
665
StrLdrPropPreCheck(const Insn & insn,MemPropMode prevMod)666 MemOperand *A64StrLdrProp::StrLdrPropPreCheck(const Insn &insn, MemPropMode prevMod)
667 {
668 memPropMode = kUndef;
669 if (insn.IsLoad() || insn.IsStore()) {
670 if (insn.IsAtomic()) {
671 return nullptr;
672 }
673 auto *currMemOpnd = static_cast<MemOperand *>(insn.GetMemOpnd());
674 if (currMemOpnd != nullptr) {
675 memPropMode = SelectStrLdrPropMode(*currMemOpnd);
676 if (prevMod != kUndef) {
677 if (prevMod != memPropMode) {
678 memPropMode = prevMod;
679 return nullptr;
680 }
681 }
682 return currMemOpnd;
683 }
684 }
685 return nullptr;
686 }
687
SelectStrLdrPropMode(const MemOperand & currMemOpnd)688 MemPropMode A64StrLdrProp::SelectStrLdrPropMode(const MemOperand &currMemOpnd)
689 {
690 MemOperand::AArch64AddressingMode currAddrMode = currMemOpnd.GetAddrMode();
691 MemPropMode innerMemPropMode = kUndef;
692 switch (currAddrMode) {
693 case MemOperand::kAddrModeBOi: {
694 if (!currMemOpnd.IsPreIndexed() && !currMemOpnd.IsPostIndexed()) {
695 innerMemPropMode = kPropBase;
696 }
697 break;
698 }
699 case MemOperand::kAddrModeBOrX: {
700 innerMemPropMode = kPropOffset;
701 auto amount = currMemOpnd.ShiftAmount();
702 if (currMemOpnd.GetExtendAsString() == "LSL") {
703 if (amount != 0) {
704 innerMemPropMode = kPropShift;
705 }
706 break;
707 } else if (currMemOpnd.SignedExtend()) {
708 innerMemPropMode = kPropSignedExtend;
709 } else if (currMemOpnd.UnsignedExtend()) {
710 innerMemPropMode = kPropUnsignedExtend;
711 }
712 break;
713 }
714 default:
715 innerMemPropMode = kUndef;
716 }
717 return innerMemPropMode;
718 }
719
SelectReplaceMem(const Insn & defInsn,const MemOperand & currMemOpnd)720 MemOperand *A64StrLdrProp::SelectReplaceMem(const Insn &defInsn, const MemOperand &currMemOpnd)
721 {
722 MemOperand *newMemOpnd = nullptr;
723 Operand *offset = currMemOpnd.GetOffset();
724 RegOperand *base = currMemOpnd.GetBaseRegister();
725 MOperator opCode = defInsn.GetMachineOpcode();
726 constexpr uint32 kValidShiftAmount = 12;
727 switch (opCode) {
728 case MOP_xsubrri12:
729 case MOP_wsubrri12: {
730 RegOperand *replace = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
731 if (replace != nullptr) {
732 auto &immOpnd = static_cast<ImmOperand &>(defInsn.GetOperand(kInsnThirdOpnd));
733 int64 defVal = -(immOpnd.GetValue());
734 newMemOpnd = HandleArithImmDef(*replace, offset, defVal, currMemOpnd.GetSize());
735 }
736 break;
737 }
738 case MOP_xaddrri12:
739 case MOP_waddrri12: {
740 RegOperand *replace = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
741 if (replace != nullptr) {
742 auto &immOpnd = static_cast<ImmOperand &>(defInsn.GetOperand(kInsnThirdOpnd));
743 int64 defVal = immOpnd.GetValue();
744 newMemOpnd = HandleArithImmDef(*replace, offset, defVal, currMemOpnd.GetSize());
745 }
746 break;
747 }
748 case MOP_xaddrri24:
749 case MOP_waddrri24: {
750 RegOperand *replace = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
751 if (replace != nullptr) {
752 auto &immOpnd = static_cast<ImmOperand &>(defInsn.GetOperand(kInsnThirdOpnd));
753 auto &shiftOpnd = static_cast<BitShiftOperand &>(defInsn.GetOperand(kInsnFourthOpnd));
754 CHECK_FATAL(shiftOpnd.GetShiftAmount() == kValidShiftAmount, "invalid shiftAmount");
755 int64 defVal = (immOpnd.GetValue() << shiftOpnd.GetShiftAmount());
756 newMemOpnd = HandleArithImmDef(*replace, offset, defVal, currMemOpnd.GetSize());
757 }
758 break;
759 }
760 case MOP_xsubrri24:
761 case MOP_wsubrri24: {
762 RegOperand *replace = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
763 if (replace != nullptr) {
764 auto &immOpnd = static_cast<ImmOperand &>(defInsn.GetOperand(kInsnThirdOpnd));
765 auto &shiftOpnd = static_cast<BitShiftOperand &>(defInsn.GetOperand(kInsnFourthOpnd));
766 CHECK_FATAL(shiftOpnd.GetShiftAmount() == kValidShiftAmount, "invalid shiftAmount");
767 int64 defVal = -(immOpnd.GetValue() << shiftOpnd.GetShiftAmount());
768 newMemOpnd = HandleArithImmDef(*replace, offset, defVal, currMemOpnd.GetSize());
769 }
770 break;
771 }
772 case MOP_xaddrrr:
773 case MOP_waddrrr:
774 case MOP_dadd:
775 case MOP_sadd: {
776 if (memPropMode == kPropBase) {
777 auto *ofstOpnd = static_cast<ImmOperand *>(offset);
778 if (!ofstOpnd->IsZero()) {
779 break;
780 }
781
782 RegOperand *replace = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
783 RegOperand *newOfst = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnThirdOpnd)));
784
785 if (replace != nullptr && newOfst != nullptr) {
786 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(
787 MemOperand::kAddrModeBOrX, currMemOpnd.GetSize(), *replace, newOfst, nullptr, nullptr);
788 }
789 }
790 break;
791 }
792 case MOP_xaddrrrs:
793 case MOP_waddrrrs: {
794 if (memPropMode == kPropBase) {
795 auto *ofstOpnd = static_cast<ImmOperand *>(offset);
796 if (!ofstOpnd->IsZero()) {
797 break;
798 }
799 RegOperand *newBaseOpnd = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
800 RegOperand *newIndexOpnd = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnThirdOpnd)));
801 auto &shift = static_cast<BitShiftOperand &>(defInsn.GetOperand(kInsnFourthOpnd));
802 if (shift.GetShiftOp() != BitShiftOperand::kLSL) {
803 break;
804 }
805 if (newBaseOpnd != nullptr && newIndexOpnd != nullptr) {
806 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(
807 MemOperand::kAddrModeBOrX, currMemOpnd.GetSize(), *newBaseOpnd, *newIndexOpnd,
808 shift.GetShiftAmount(), false);
809 }
810 }
811 break;
812 }
813 case MOP_xadrpl12: {
814 if (memPropMode == kPropBase) {
815 if (currMemOpnd.GetSize() >= k128BitSize) {
816 // We can not be sure that the page offset is 16-byte aligned
817 break;
818 }
819 auto *ofstOpnd = static_cast<ImmOperand *>(offset);
820 CHECK_FATAL(ofstOpnd != nullptr, "oldOffset is null!");
821 int64 val = ofstOpnd->GetValue();
822 auto *offset1 = static_cast<StImmOperand *>(&defInsn.GetOperand(kInsnThirdOpnd));
823 CHECK_FATAL(offset1 != nullptr, "offset1 is null!");
824 val += offset1->GetOffset();
825 OfstOperand *newOfsetOpnd =
826 &static_cast<AArch64CGFunc *>(cgFunc)->CreateOfstOpnd(static_cast<uint64>(val), k32BitSize);
827 CHECK_FATAL(newOfsetOpnd != nullptr, "newOfsetOpnd is null!");
828 const MIRSymbol *addr = offset1->GetSymbol();
829 /* do not guarantee rodata alignment at Os */
830 if (CGOptions::OptimizeForSize() && addr->IsReadOnly()) {
831 break;
832 }
833 RegOperand *replace = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
834 if (replace != nullptr) {
835 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(
836 MemOperand::kAddrModeLo12Li, currMemOpnd.GetSize(), *replace, nullptr, newOfsetOpnd, addr);
837 }
838 }
839 break;
840 }
841 /* do this in const prop ? */
842 case MOP_wmovri32:
843 case MOP_xmovri64: {
844 if (memPropMode == kPropOffset) {
845 auto *imm = static_cast<ImmOperand *>(&defInsn.GetOperand(kInsnSecondOpnd));
846 OfstOperand *newOffset = &static_cast<AArch64CGFunc *>(cgFunc)->CreateOfstOpnd(
847 static_cast<uint64>(imm->GetValue()), k32BitSize);
848 CHECK_FATAL(newOffset != nullptr, "newOffset is null!");
849 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(
850 MemOperand::kAddrModeBOi, currMemOpnd.GetSize(), *base, nullptr, newOffset, nullptr);
851 }
852 break;
853 }
854 case MOP_xlslrri6:
855 case MOP_wlslrri5: {
856 auto *imm = static_cast<ImmOperand *>(&defInsn.GetOperand(kInsnThirdOpnd));
857 RegOperand *newOfst = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
858 if (newOfst != nullptr) {
859 uint32 shift = static_cast<uint32>(static_cast<int32>(imm->GetValue()));
860 if (memPropMode == kPropOffset) {
861 if (shift < k4ByteSize) {
862 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(
863 MemOperand::kAddrModeBOrX, currMemOpnd.GetSize(), *base, *newOfst, shift);
864 }
865 } else if (memPropMode == kPropShift) {
866 shift += currMemOpnd.ShiftAmount();
867 if (shift < k4ByteSize) {
868 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(
869 MemOperand::kAddrModeBOrX, currMemOpnd.GetSize(), *base, *newOfst, shift);
870 }
871 }
872 }
873 break;
874 }
875 case MOP_xsxtw64: {
876 newMemOpnd = SelectReplaceExt(defInsn, *base, static_cast<uint32>(currMemOpnd.ShiftAmount()), true,
877 currMemOpnd.GetSize());
878 break;
879 }
880 case MOP_xuxtw64: {
881 newMemOpnd = SelectReplaceExt(defInsn, *base, static_cast<uint32>(currMemOpnd.ShiftAmount()), false,
882 currMemOpnd.GetSize());
883 break;
884 }
885 default:
886 break;
887 }
888 return newMemOpnd;
889 }
890
GetReplaceReg(RegOperand & a64Reg)891 RegOperand *A64StrLdrProp::GetReplaceReg(RegOperand &a64Reg)
892 {
893 if (a64Reg.IsSSAForm()) {
894 regno_t ssaIndex = a64Reg.GetRegisterNumber();
895 replaceVersions[ssaIndex] = ssaInfo->FindSSAVersion(ssaIndex);
896 DEBUG_ASSERT(replaceVersions.size() <= 2, "CHECK THIS CASE IN A64PROP"); // size <= 2 in A64PROP
897 return &a64Reg;
898 }
899 return nullptr;
900 }
901
HandleArithImmDef(RegOperand & replace,Operand * oldOffset,int64 defVal,uint32 memSize) const902 MemOperand *A64StrLdrProp::HandleArithImmDef(RegOperand &replace, Operand *oldOffset, int64 defVal,
903 uint32 memSize) const
904 {
905 if (memPropMode != kPropBase) {
906 return nullptr;
907 }
908 OfstOperand *newOfstImm = nullptr;
909 if (oldOffset == nullptr) {
910 newOfstImm = &static_cast<AArch64CGFunc *>(cgFunc)->CreateOfstOpnd(static_cast<uint64>(defVal), k32BitSize);
911 } else {
912 auto *ofstOpnd = static_cast<OfstOperand *>(oldOffset);
913 CHECK_FATAL(ofstOpnd != nullptr, "oldOffsetOpnd is null");
914 newOfstImm = &static_cast<AArch64CGFunc *>(cgFunc)->CreateOfstOpnd(
915 static_cast<uint64>(defVal + ofstOpnd->GetValue()), k32BitSize);
916 }
917 CHECK_FATAL(newOfstImm != nullptr, "newOffset is null!");
918 return static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(MemOperand::kAddrModeBOi, memSize, replace, nullptr,
919 newOfstImm, nullptr);
920 }
921
SelectReplaceExt(const Insn & defInsn,RegOperand & base,uint32 amount,bool isSigned,uint32 memSize)922 MemOperand *A64StrLdrProp::SelectReplaceExt(const Insn &defInsn, RegOperand &base, uint32 amount, bool isSigned,
923 uint32 memSize)
924 {
925 MemOperand *newMemOpnd = nullptr;
926 RegOperand *newOfst = GetReplaceReg(static_cast<RegOperand &>(defInsn.GetOperand(kInsnSecondOpnd)));
927 if (newOfst == nullptr) {
928 return nullptr;
929 }
930 /* defInsn is extend, currMemOpnd is same extend or shift */
931 bool propExtend = (memPropMode == kPropShift) || ((memPropMode == kPropSignedExtend) && isSigned) ||
932 ((memPropMode == kPropUnsignedExtend) && !isSigned);
933 if (memPropMode == kPropOffset) {
934 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(MemOperand::kAddrModeBOrX, memSize, base,
935 *newOfst, 0, isSigned);
936 } else if (propExtend) {
937 newMemOpnd = static_cast<AArch64CGFunc *>(cgFunc)->CreateMemOperand(MemOperand::kAddrModeBOrX, memSize, base,
938 *newOfst, amount, isSigned);
939 } else {
940 return nullptr;
941 }
942 return newMemOpnd;
943 }
944
CheckNewMemOffset(const Insn & insn,MemOperand * newMemOpnd,uint32 opndIdx) const945 bool A64StrLdrProp::CheckNewMemOffset(const Insn &insn, MemOperand *newMemOpnd, uint32 opndIdx) const
946 {
947 auto *a64CgFunc = static_cast<AArch64CGFunc *>(cgFunc);
948 if ((newMemOpnd->GetOffsetImmediate() != nullptr) &&
949 !a64CgFunc->IsOperandImmValid(insn.GetMachineOpcode(), newMemOpnd, opndIdx)) {
950 return false;
951 }
952 auto newAmount = static_cast<uint32>(newMemOpnd->ShiftAmount());
953 if (!AArch64StoreLoadOpt::CheckNewAmount(insn, newAmount)) {
954 return false;
955 }
956 /* is ldp or stp, addrMode must be BOI */
957 if ((opndIdx == kInsnThirdOpnd) && (newMemOpnd->GetAddrMode() != MemOperand::kAddrModeBOi)) {
958 return false;
959 }
960 return true;
961 }
962
PropPatternOpt()963 void AArch64Prop::PropPatternOpt()
964 {
965 PropOptimizeManager optManager;
966 optManager.Optimize<ExtendMovPattern>(*cgFunc, GetSSAInfo());
967 optManager.Optimize<ExtendShiftPattern>(*cgFunc, GetSSAInfo());
968 optManager.Optimize<FpSpConstProp>(*cgFunc, GetSSAInfo());
969 optManager.Optimize<A64PregCopyPattern>(*cgFunc, GetSSAInfo());
970 }
971
IsSwapInsn(const Insn & insn) const972 bool ExtendShiftPattern::IsSwapInsn(const Insn &insn) const
973 {
974 MOperator op = insn.GetMachineOpcode();
975 switch (op) {
976 case MOP_xaddrrr:
977 case MOP_waddrrr:
978 case MOP_xiorrrr:
979 case MOP_wiorrrr:
980 return true;
981 default:
982 return false;
983 }
984 }
985
SetExMOpType(const Insn & use)986 void ExtendShiftPattern::SetExMOpType(const Insn &use)
987 {
988 MOperator op = use.GetMachineOpcode();
989 switch (op) {
990 case MOP_xaddrrr:
991 case MOP_xxwaddrrre:
992 case MOP_xaddrrrs: {
993 exMOpType = kExAdd;
994 break;
995 }
996 case MOP_waddrrr:
997 case MOP_wwwaddrrre:
998 case MOP_waddrrrs: {
999 exMOpType = kEwAdd;
1000 break;
1001 }
1002 case MOP_xsubrrr:
1003 case MOP_xxwsubrrre:
1004 case MOP_xsubrrrs: {
1005 exMOpType = kExSub;
1006 break;
1007 }
1008 case MOP_wsubrrr:
1009 case MOP_wwwsubrrre:
1010 case MOP_wsubrrrs: {
1011 exMOpType = kEwSub;
1012 break;
1013 }
1014 case MOP_xcmnrr:
1015 case MOP_xwcmnrre:
1016 case MOP_xcmnrrs: {
1017 exMOpType = kExCmn;
1018 break;
1019 }
1020 case MOP_wcmnrr:
1021 case MOP_wwcmnrre:
1022 case MOP_wcmnrrs: {
1023 exMOpType = kEwCmn;
1024 break;
1025 }
1026 case MOP_xcmprr:
1027 case MOP_xwcmprre:
1028 case MOP_xcmprrs: {
1029 exMOpType = kExCmp;
1030 break;
1031 }
1032 case MOP_wcmprr:
1033 case MOP_wwcmprre:
1034 case MOP_wcmprrs: {
1035 exMOpType = kEwCmp;
1036 break;
1037 }
1038 default: {
1039 exMOpType = kExUndef;
1040 }
1041 }
1042 }
1043
SetLsMOpType(const Insn & use)1044 void ExtendShiftPattern::SetLsMOpType(const Insn &use)
1045 {
1046 MOperator op = use.GetMachineOpcode();
1047 switch (op) {
1048 case MOP_xaddrrr:
1049 case MOP_xaddrrrs: {
1050 lsMOpType = kLxAdd;
1051 break;
1052 }
1053 case MOP_waddrrr:
1054 case MOP_waddrrrs: {
1055 lsMOpType = kLwAdd;
1056 break;
1057 }
1058 case MOP_xsubrrr:
1059 case MOP_xsubrrrs: {
1060 lsMOpType = kLxSub;
1061 break;
1062 }
1063 case MOP_wsubrrr:
1064 case MOP_wsubrrrs: {
1065 lsMOpType = kLwSub;
1066 break;
1067 }
1068 case MOP_xcmnrr:
1069 case MOP_xcmnrrs: {
1070 lsMOpType = kLxCmn;
1071 break;
1072 }
1073 case MOP_wcmnrr:
1074 case MOP_wcmnrrs: {
1075 lsMOpType = kLwCmn;
1076 break;
1077 }
1078 case MOP_xcmprr:
1079 case MOP_xcmprrs: {
1080 lsMOpType = kLxCmp;
1081 break;
1082 }
1083 case MOP_wcmprr:
1084 case MOP_wcmprrs: {
1085 lsMOpType = kLwCmp;
1086 break;
1087 }
1088 case MOP_xeorrrr:
1089 case MOP_xeorrrrs: {
1090 lsMOpType = kLxEor;
1091 break;
1092 }
1093 case MOP_weorrrr:
1094 case MOP_weorrrrs: {
1095 lsMOpType = kLwEor;
1096 break;
1097 }
1098 case MOP_xinegrr:
1099 case MOP_xinegrrs: {
1100 lsMOpType = kLxNeg;
1101 replaceIdx = kInsnSecondOpnd;
1102 break;
1103 }
1104 case MOP_winegrr:
1105 case MOP_winegrrs: {
1106 lsMOpType = kLwNeg;
1107 replaceIdx = kInsnSecondOpnd;
1108 break;
1109 }
1110 case MOP_xiorrrr:
1111 case MOP_xiorrrrs: {
1112 lsMOpType = kLxIor;
1113 break;
1114 }
1115 case MOP_wiorrrr:
1116 case MOP_wiorrrrs: {
1117 lsMOpType = kLwIor;
1118 break;
1119 }
1120 default: {
1121 lsMOpType = kLsUndef;
1122 }
1123 }
1124 }
1125
SelectExtendOrShift(const Insn & def)1126 void ExtendShiftPattern::SelectExtendOrShift(const Insn &def)
1127 {
1128 MOperator op = def.GetMachineOpcode();
1129 switch (op) {
1130 case MOP_xsxtb32:
1131 case MOP_xsxtb64:
1132 extendOp = ExtendShiftOperand::kSXTB;
1133 break;
1134 case MOP_xsxth32:
1135 case MOP_xsxth64:
1136 extendOp = ExtendShiftOperand::kSXTH;
1137 break;
1138 case MOP_xsxtw64:
1139 extendOp = ExtendShiftOperand::kSXTW;
1140 break;
1141 case MOP_xuxtb32:
1142 extendOp = ExtendShiftOperand::kUXTB;
1143 break;
1144 case MOP_xuxth32:
1145 extendOp = ExtendShiftOperand::kUXTH;
1146 break;
1147 case MOP_xuxtw64:
1148 extendOp = ExtendShiftOperand::kUXTW;
1149 break;
1150 case MOP_wlslrri5:
1151 case MOP_xlslrri6:
1152 shiftOp = BitShiftOperand::kLSL;
1153 break;
1154 case MOP_xlsrrri6:
1155 case MOP_wlsrrri5:
1156 shiftOp = BitShiftOperand::kLSR;
1157 break;
1158 case MOP_xasrrri6:
1159 case MOP_wasrrri5:
1160 shiftOp = BitShiftOperand::kASR;
1161 break;
1162 default: {
1163 extendOp = ExtendShiftOperand::kUndef;
1164 shiftOp = BitShiftOperand::kUndef;
1165 }
1166 }
1167 }
1168
1169 /* Optimize ExtendShiftPattern:
1170 * ==========================================================
1171 * nosuffix LSL LSR ASR extrn (def)
1172 * nosuffix | F | LSL | LSR | ASR | extrn |
1173 * LSL | F | LSL | F | F | extrn |
1174 * LSR | F | F | LSR | F | F |
1175 * ASR | F | F | F | ASR | F |
1176 * exten | F | F | F | F |exten(self)|
1177 * (use)
1178 * ===========================================================
1179 */
1180 constexpr uint32 kExtenAddShiftNum = 5;
1181 ExtendShiftPattern::SuffixType optTable[kExtenAddShiftNum][kExtenAddShiftNum] = {
1182 {ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kLSL, ExtendShiftPattern::kLSR, ExtendShiftPattern::kASR,
1183 ExtendShiftPattern::kExten},
1184 {ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kLSL, ExtendShiftPattern::kNoSuffix,
1185 ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kExten},
1186 {ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kLSR,
1187 ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kNoSuffix},
1188 {ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kNoSuffix,
1189 ExtendShiftPattern::kASR, ExtendShiftPattern::kNoSuffix},
1190 {ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kNoSuffix,
1191 ExtendShiftPattern::kNoSuffix, ExtendShiftPattern::kExten}};
1192
1193 /* Check whether ExtendShiftPattern optimization can be performed. */
CheckOpType(const Operand & lastOpnd) const1194 ExtendShiftPattern::SuffixType ExtendShiftPattern::CheckOpType(const Operand &lastOpnd) const
1195 {
1196 /* Assign values to useType and defType. */
1197 uint32 useType = ExtendShiftPattern::kNoSuffix;
1198 uint32 defType = shiftOp;
1199 if (extendOp != ExtendShiftOperand::kUndef) {
1200 defType = ExtendShiftPattern::kExten;
1201 }
1202 if (lastOpnd.IsOpdShift()) {
1203 BitShiftOperand lastShiftOpnd = static_cast<const BitShiftOperand &>(lastOpnd);
1204 useType = lastShiftOpnd.GetShiftOp();
1205 } else if (lastOpnd.IsOpdExtend()) {
1206 ExtendShiftOperand lastExtendOpnd = static_cast<const ExtendShiftOperand &>(lastOpnd);
1207 useType = ExtendShiftPattern::kExten;
1208 /* two insn is exten and exten ,value is exten(oneself) */
1209 if (useType == defType && extendOp != lastExtendOpnd.GetExtendOp()) {
1210 return ExtendShiftPattern::kNoSuffix;
1211 }
1212 }
1213 return optTable[useType][defType];
1214 }
1215
1216 constexpr uint32 kExMopTypeSize = 9;
1217 constexpr uint32 kLsMopTypeSize = 15;
1218
1219 MOperator exMopTable[kExMopTypeSize] = {MOP_undef, MOP_xxwaddrrre, MOP_wwwaddrrre, MOP_xxwsubrrre, MOP_wwwsubrrre,
1220 MOP_xwcmnrre, MOP_wwcmnrre, MOP_xwcmprre, MOP_wwcmprre};
1221 MOperator lsMopTable[kLsMopTypeSize] = {MOP_undef, MOP_xaddrrrs, MOP_waddrrrs, MOP_xsubrrrs, MOP_wsubrrrs,
1222 MOP_xcmnrrs, MOP_wcmnrrs, MOP_xcmprrs, MOP_wcmprrs, MOP_xeorrrrs,
1223 MOP_weorrrrs, MOP_xinegrrs, MOP_winegrrs, MOP_xiorrrrs, MOP_wiorrrrs};
1224 /* new Insn extenType:
1225 * =====================
1226 * (useMop) (defMop) (newmop)
1227 * | nosuffix | all | all|
1228 * | exten | ex | ex |
1229 * | ls | ex | ls |
1230 * | asr | !asr | F |
1231 * | !asr | asr | F |
1232 * (useMop) (defMop)
1233 * =====================
1234 */
ReplaceUseInsn(Insn & use,const Insn & def,uint32 amount)1235 void ExtendShiftPattern::ReplaceUseInsn(Insn &use, const Insn &def, uint32 amount)
1236 {
1237 AArch64CGFunc &a64CGFunc = static_cast<AArch64CGFunc &>(cgFunc);
1238 uint32 lastIdx = use.GetOperandSize() - k1BitSize;
1239 Operand &lastOpnd = use.GetOperand(lastIdx);
1240 ExtendShiftPattern::SuffixType optType = CheckOpType(lastOpnd);
1241 Operand *shiftOpnd = nullptr;
1242 if (optType == ExtendShiftPattern::kNoSuffix) {
1243 return;
1244 } else if (optType == ExtendShiftPattern::kExten) {
1245 replaceOp = exMopTable[exMOpType];
1246 if (amount > k4BitSize) {
1247 return;
1248 }
1249 shiftOpnd = &a64CGFunc.CreateExtendShiftOperand(extendOp, amount, static_cast<int32>(k64BitSize));
1250 } else {
1251 replaceOp = lsMopTable[lsMOpType];
1252 if (amount >= k32BitSize) {
1253 return;
1254 }
1255 shiftOpnd = &a64CGFunc.CreateBitShiftOperand(shiftOp, amount, static_cast<int32>(k64BitSize));
1256 }
1257 if (replaceOp == MOP_undef) {
1258 return;
1259 }
1260
1261 Insn *replaceUseInsn = nullptr;
1262 Operand &firstOpnd = use.GetOperand(kInsnFirstOpnd);
1263 Operand *secondOpnd = &use.GetOperand(kInsnSecondOpnd);
1264 if (replaceIdx == kInsnSecondOpnd) { /* replace neg insn */
1265 secondOpnd = &def.GetOperand(kInsnSecondOpnd);
1266 replaceUseInsn = &cgFunc.GetInsnBuilder()->BuildInsn(replaceOp, firstOpnd, *secondOpnd, *shiftOpnd);
1267 } else {
1268 Operand &thirdOpnd = def.GetOperand(kInsnSecondOpnd);
1269 replaceUseInsn = &cgFunc.GetInsnBuilder()->BuildInsn(replaceOp, firstOpnd, *secondOpnd, thirdOpnd, *shiftOpnd);
1270 }
1271 use.GetBB()->ReplaceInsn(use, *replaceUseInsn);
1272 if (PROP_DUMP) {
1273 LogInfo::MapleLogger() << ">>>>>>> In ExtendShiftPattern : <<<<<<<\n";
1274 LogInfo::MapleLogger() << "=======ReplaceInsn :\n";
1275 use.Dump();
1276 LogInfo::MapleLogger() << "=======NewInsn :\n";
1277 replaceUseInsn->Dump();
1278 }
1279 /* update ssa info */
1280 optSsaInfo->ReplaceInsn(use, *replaceUseInsn);
1281 newInsn = replaceUseInsn;
1282 optSuccess = true;
1283 }
1284
1285 /*
1286 * pattern1:
1287 * UXTB/UXTW X0, W1 <---- def x0
1288 * .... <---- (X0 not used)
1289 * AND/SUB/EOR X0, X1, X0 <---- use x0
1290 * ======>
1291 * AND/SUB/EOR X0, X1, W1 UXTB/UXTW
1292 *
1293 * pattern2:
1294 * LSL/LSR X0, X1, #8
1295 * ....(X0 not used)
1296 * AND/SUB/EOR X0, X1, X0
1297 * ======>
1298 * AND/SUB/EOR X0, X1, X1 LSL/LSR #8
1299 */
Optimize(Insn & insn)1300 void ExtendShiftPattern::Optimize(Insn &insn)
1301 {
1302 uint32 amount = 0;
1303 uint32 offset = 0;
1304 uint32 lastIdx = insn.GetOperandSize() - k1BitSize;
1305 Operand &lastOpnd = insn.GetOperand(lastIdx);
1306 if (lastOpnd.IsOpdShift()) {
1307 auto &lastShiftOpnd = static_cast<BitShiftOperand &>(lastOpnd);
1308 amount = lastShiftOpnd.GetShiftAmount();
1309 } else if (lastOpnd.IsOpdExtend()) {
1310 auto &lastExtendOpnd = static_cast<ExtendShiftOperand &>(lastOpnd);
1311 amount = lastExtendOpnd.GetShiftAmount();
1312 }
1313 if (shiftOp != BitShiftOperand::kUndef) {
1314 auto &immOpnd = static_cast<ImmOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1315 offset = static_cast<uint32>(immOpnd.GetValue());
1316 }
1317 amount += offset;
1318
1319 ReplaceUseInsn(insn, *defInsn, amount);
1320 }
1321
DoExtendShiftOpt(Insn & insn)1322 void ExtendShiftPattern::DoExtendShiftOpt(Insn &insn)
1323 {
1324 if (!CheckAllOpndCondition(insn)) {
1325 return;
1326 }
1327 Optimize(*curInsn);
1328 if (optSuccess) {
1329 DoExtendShiftOpt(*newInsn);
1330 }
1331 }
1332
SwapOpnd(Insn & insn)1333 void ExtendShiftPattern::SwapOpnd(Insn &insn)
1334 {
1335 Insn *swapInsn =
1336 &cgFunc.GetInsnBuilder()->BuildInsn(insn.GetMachineOpcode(), insn.GetOperand(kInsnFirstOpnd),
1337 insn.GetOperand(kInsnThirdOpnd), insn.GetOperand(kInsnSecondOpnd));
1338 insn.GetBB()->ReplaceInsn(insn, *swapInsn);
1339 optSsaInfo->ReplaceInsn(insn, *swapInsn);
1340 curInsn = swapInsn;
1341 replaceIdx = kInsnThirdOpnd;
1342 }
1343
CheckAllOpndCondition(Insn & insn)1344 bool ExtendShiftPattern::CheckAllOpndCondition(Insn &insn)
1345 {
1346 Init();
1347 SetLsMOpType(insn);
1348 SetExMOpType(insn);
1349 curInsn = &insn;
1350 if (IsSwapInsn(insn)) {
1351 if (CheckCondition(insn)) {
1352 return true;
1353 }
1354 Init();
1355 SetLsMOpType(insn);
1356 SetExMOpType(insn);
1357 replaceIdx = kInsnSecondOpnd;
1358 if (CheckCondition(insn)) {
1359 SwapOpnd(insn);
1360 return true;
1361 }
1362 } else {
1363 return CheckCondition(insn);
1364 }
1365 return false;
1366 }
1367
1368 /* check and set:
1369 * exMOpType, lsMOpType, extendOp, shiftOp, defInsn
1370 */
CheckCondition(Insn & insn)1371 bool ExtendShiftPattern::CheckCondition(Insn &insn)
1372 {
1373 if ((exMOpType == kExUndef) && (lsMOpType == kLsUndef)) {
1374 return false;
1375 }
1376 auto ®Operand = static_cast<RegOperand &>(insn.GetOperand(replaceIdx));
1377 regno_t regNo = regOperand.GetRegisterNumber();
1378 VRegVersion *useVersion = optSsaInfo->FindSSAVersion(regNo);
1379 defInsn = FindDefInsn(useVersion);
1380 if (!defInsn || (useVersion->GetAllUseInsns().size() > 1)) {
1381 return false;
1382 }
1383 SelectExtendOrShift(*defInsn);
1384 /* defInsn must be shift or extend */
1385 if ((extendOp == ExtendShiftOperand::kUndef) && (shiftOp == BitShiftOperand::kUndef)) {
1386 return false;
1387 }
1388 Operand &defSrcOpnd = defInsn->GetOperand(kInsnSecondOpnd);
1389 CHECK_FATAL(defSrcOpnd.IsRegister(), "defSrcOpnd must be register!");
1390 auto ®DefSrc = static_cast<RegOperand &>(defSrcOpnd);
1391 if (regDefSrc.IsPhysicalRegister()) {
1392 return false;
1393 }
1394 /*
1395 * has Implict cvt
1396 *
1397 * avoid cases as following:
1398 * lsr x2, x2, #8
1399 * ubfx w2, x2, #0, #32 lsr x2, x2, #8
1400 * eor w0, w0, w2 ===> eor w0, w0, x2 ==\=> eor w0, w0, w2, LSR #8
1401 *
1402 * the truncation causes the wrong value by shift right
1403 * shift left does not matter
1404 */
1405 if (useVersion->HasImplicitCvt() && shiftOp != BitShiftOperand::kUndef) {
1406 return false;
1407 }
1408 if ((shiftOp == BitShiftOperand::kLSR || shiftOp == BitShiftOperand::kASR) &&
1409 (defSrcOpnd.GetSize() > regOperand.GetSize())) {
1410 return false;
1411 }
1412 regno_t defSrcRegNo = regDefSrc.GetRegisterNumber();
1413 /* check regDefSrc */
1414 VRegVersion *replaceUseV = optSsaInfo->FindSSAVersion(defSrcRegNo);
1415 CHECK_FATAL(replaceUseV != nullptr, "useVRegVersion must not be null based on ssa");
1416 if (replaceUseV->GetAllUseInsns().size() > 1) {
1417 return false;
1418 }
1419 return true;
1420 }
1421
Init()1422 void ExtendShiftPattern::Init()
1423 {
1424 replaceOp = MOP_undef;
1425 extendOp = ExtendShiftOperand::kUndef;
1426 shiftOp = BitShiftOperand::kUndef;
1427 defInsn = nullptr;
1428 newInsn = nullptr;
1429 replaceIdx = kInsnThirdOpnd;
1430 optSuccess = false;
1431 exMOpType = kExUndef;
1432 lsMOpType = kLsUndef;
1433 }
1434
Run()1435 void ExtendShiftPattern::Run()
1436 {
1437 if (!cgFunc.GetMirModule().IsCModule()) {
1438 return;
1439 }
1440 FOR_ALL_BB_REV(bb, &cgFunc)
1441 {
1442 FOR_BB_INSNS_REV(insn, bb)
1443 {
1444 if (!insn->IsMachineInstruction()) {
1445 continue;
1446 }
1447 DoExtendShiftOpt(*insn);
1448 }
1449 }
1450 }
1451
Run()1452 void ExtendMovPattern::Run()
1453 {
1454 if (!cgFunc.GetMirModule().IsCModule()) {
1455 return;
1456 }
1457 FOR_ALL_BB(bb, &cgFunc)
1458 {
1459 FOR_BB_INSNS(insn, bb)
1460 {
1461 if (!insn->IsMachineInstruction()) {
1462 continue;
1463 }
1464 if (!CheckCondition(*insn)) {
1465 continue;
1466 }
1467 Optimize(*insn);
1468 }
1469 }
1470 }
1471
CheckSrcReg(regno_t srcRegNo,uint32 validNum)1472 bool ExtendMovPattern::CheckSrcReg(regno_t srcRegNo, uint32 validNum)
1473 {
1474 InsnSet srcDefSet;
1475 VRegVersion *useVersion = optSsaInfo->FindSSAVersion(srcRegNo);
1476 CHECK_FATAL(useVersion != nullptr, "useVRegVersion must not be null based on ssa");
1477 DUInsnInfo *defInfo = useVersion->GetDefInsnInfo();
1478 if (defInfo == nullptr) {
1479 return false;
1480 }
1481 Insn *insn = defInfo->GetInsn();
1482 srcDefSet.insert(insn);
1483 /* reserve insn set for non ssa version. */
1484 for (auto defInsn : srcDefSet) {
1485 CHECK_FATAL((defInsn != nullptr), "defInsn is null!");
1486 MOperator mOp = defInsn->GetMachineOpcode();
1487 switch (mOp) {
1488 case MOP_wiorrri12:
1489 case MOP_weorrri12: {
1490 /* check immVal if mop is OR */
1491 ImmOperand &imm = static_cast<ImmOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1492 uint32 bitNum = static_cast<uint32>(imm.GetValue());
1493 if ((bitNum >> validNum) != 0) {
1494 return false;
1495 }
1496 break;
1497 }
1498 case MOP_wandrri12: {
1499 /* check defSrcReg */
1500 RegOperand &defSrcRegOpnd = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1501 regno_t defSrcRegNo = defSrcRegOpnd.GetRegisterNumber();
1502 if (!CheckSrcReg(defSrcRegNo, validNum)) {
1503 return false;
1504 }
1505 break;
1506 }
1507 case MOP_wandrrr: {
1508 /* check defSrcReg */
1509 RegOperand &defSrcRegOpnd1 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1510 RegOperand &defSrcRegOpnd2 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1511 regno_t defSrcRegNo1 = defSrcRegOpnd1.GetRegisterNumber();
1512 regno_t defSrcRegNo2 = defSrcRegOpnd2.GetRegisterNumber();
1513 if (!CheckSrcReg(defSrcRegNo1, validNum) && !CheckSrcReg(defSrcRegNo2, validNum)) {
1514 return false;
1515 }
1516 break;
1517 }
1518 case MOP_wiorrrr:
1519 case MOP_weorrrr: {
1520 /* check defSrcReg */
1521 RegOperand &defSrcRegOpnd1 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnSecondOpnd));
1522 RegOperand &defSrcRegOpnd2 = static_cast<RegOperand &>(defInsn->GetOperand(kInsnThirdOpnd));
1523 regno_t defSrcRegNo1 = defSrcRegOpnd1.GetRegisterNumber();
1524 regno_t defSrcRegNo2 = defSrcRegOpnd2.GetRegisterNumber();
1525 if (!CheckSrcReg(defSrcRegNo1, validNum) || !CheckSrcReg(defSrcRegNo2, validNum)) {
1526 return false;
1527 }
1528 break;
1529 }
1530 case MOP_wldrb: {
1531 if (validNum != k8BitSize) {
1532 return false;
1533 }
1534 break;
1535 }
1536 case MOP_wldrh: {
1537 if (validNum != k16BitSize) {
1538 return false;
1539 }
1540 break;
1541 }
1542 default:
1543 return false;
1544 }
1545 }
1546 return true;
1547 }
1548
BitNotAffected(const Insn & insn,uint32 validNum)1549 bool ExtendMovPattern::BitNotAffected(const Insn &insn, uint32 validNum)
1550 {
1551 RegOperand &firstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1552 RegOperand &secondOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1553 regno_t desRegNo = firstOpnd.GetRegisterNumber();
1554 regno_t srcRegNo = secondOpnd.GetRegisterNumber();
1555 VRegVersion *useVersion = optSsaInfo->FindSSAVersion(desRegNo);
1556 CHECK_FATAL(useVersion != nullptr, "useVRegVersion must not be null based on ssa");
1557 DUInsnInfo *defInfo = useVersion->GetDefInsnInfo();
1558 if (defInfo == nullptr) {
1559 return false;
1560 }
1561 if (!CheckSrcReg(srcRegNo, validNum)) {
1562 return false;
1563 }
1564 replaceMop = MOP_wmovrr;
1565 return true;
1566 }
1567
CheckCondition(Insn & insn)1568 bool ExtendMovPattern::CheckCondition(Insn &insn)
1569 {
1570 MOperator mOp = insn.GetMachineOpcode();
1571 switch (mOp) {
1572 case MOP_xuxtb32:
1573 return BitNotAffected(insn, k8BitSize);
1574 case MOP_xuxth32:
1575 return BitNotAffected(insn, k16BitSize);
1576 default:
1577 return false;
1578 }
1579 }
1580
1581 /* No initialization required */
Init()1582 void ExtendMovPattern::Init()
1583 {
1584 replaceMop = MOP_undef;
1585 }
1586
Optimize(Insn & insn)1587 void ExtendMovPattern::Optimize(Insn &insn)
1588 {
1589 insn.SetMOP(AArch64CG::kMd[replaceMop]);
1590 }
1591
Run()1592 void CopyRegProp::Run()
1593 {
1594 FOR_ALL_BB(bb, &cgFunc)
1595 {
1596 FOR_BB_INSNS(insn, bb)
1597 {
1598 if (!insn->IsMachineInstruction()) {
1599 continue;
1600 }
1601 Init();
1602 if (!CheckCondition(*insn)) {
1603 continue;
1604 }
1605 Optimize(*insn);
1606 }
1607 }
1608 }
1609
IsValidCopyProp(const RegOperand & dstReg,const RegOperand & srcReg) const1610 bool CopyRegProp::IsValidCopyProp(const RegOperand &dstReg, const RegOperand &srcReg) const
1611 {
1612 DEBUG_ASSERT(destVersion != nullptr, "find destVersion failed");
1613 DEBUG_ASSERT(srcVersion != nullptr, "find srcVersion failed");
1614 LiveInterval *dstll = nullptr;
1615 LiveInterval *srcll = nullptr;
1616 if (destVersion->GetOriginalRegNO() == srcVersion->GetOriginalRegNO()) {
1617 return true;
1618 }
1619 regno_t dstRegNO = dstReg.GetRegisterNumber();
1620 regno_t srcRegNO = srcReg.GetRegisterNumber();
1621 for (auto useDUInfoIt : destVersion->GetAllUseInsns()) {
1622 if (useDUInfoIt.second == nullptr) {
1623 continue;
1624 }
1625 Insn *useInsn = (useDUInfoIt.second)->GetInsn();
1626 if (useInsn == nullptr) {
1627 continue;
1628 }
1629
1630 dstll = regll->GetLiveInterval(dstRegNO);
1631 srcll = regll->GetLiveInterval(srcRegNO);
1632 static_cast<AArch64LiveIntervalAnalysis *>(regll)->CheckInterference(*dstll, *srcll);
1633 BB *useBB = useInsn->GetBB();
1634 DEBUG_ASSERT(useBB != nullptr, "useBB is null");
1635 if (dstll->IsConflictWith(srcRegNO) &&
1636 /* support override value when the version is not transphi */
1637 (((useBB->IsInPhiDef(srcRegNO) || useBB->IsInPhiList(srcRegNO)) && useBB->HasCriticalEdge()) ||
1638 useBB->IsInPhiList(dstRegNO))) {
1639 return false;
1640 }
1641 }
1642 if (dstll && srcll) {
1643 regll->CoalesceLiveIntervals(*dstll, *srcll);
1644 }
1645 return true;
1646 }
1647
CheckCondition(Insn & insn)1648 bool CopyRegProp::CheckCondition(Insn &insn)
1649 {
1650 if (Globals::GetInstance()->GetTarget()->IsEffectiveCopy(insn)) {
1651 MOperator mOp = insn.GetMachineOpcode();
1652 if (mOp == MOP_xmovrr || mOp == MOP_wmovrr || mOp == MOP_xvmovs || mOp == MOP_xvmovd) {
1653 Operand &destOpnd = insn.GetOperand(kInsnFirstOpnd);
1654 Operand &srcOpnd = insn.GetOperand(kInsnSecondOpnd);
1655 DEBUG_ASSERT(destOpnd.IsRegister() && srcOpnd.IsRegister(), "must be");
1656 auto &destReg = static_cast<RegOperand &>(destOpnd);
1657 auto &srcReg = static_cast<RegOperand &>(srcOpnd);
1658 if (srcReg.GetRegisterNumber() == RZR) {
1659 insn.SetMOP(AArch64CG::kMd[mOp == MOP_xmovrr ? MOP_xmovri64 : MOP_wmovri32]);
1660 insn.SetOperand(kInsnSecondOpnd, cgFunc.CreateImmOperand(PTY_u64, 0));
1661 }
1662 if (destReg.IsSSAForm() && srcReg.IsSSAForm()) {
1663 /* case for ExplicitExtendProp */
1664 if (destReg.GetSize() != srcReg.GetSize()) {
1665 VaildateImplicitCvt(destReg, srcReg, insn);
1666 return false;
1667 }
1668 if (destReg.GetValidBitsNum() >= srcReg.GetValidBitsNum()) {
1669 destReg.SetValidBitsNum(srcReg.GetValidBitsNum());
1670 } else {
1671 MapleVector<uint32> &propInsns = optSsaInfo->GetSafePropInsns();
1672 if (std::find(propInsns.begin(), propInsns.end(), insn.GetId()) == propInsns.end()) {
1673 CHECK_FATAL(false, "do not support explicit extract bit in mov");
1674 return false;
1675 }
1676 }
1677 destVersion = optSsaInfo->FindSSAVersion(destReg.GetRegisterNumber());
1678 DEBUG_ASSERT(destVersion != nullptr, "find Version failed");
1679 srcVersion = optSsaInfo->FindSSAVersion(srcReg.GetRegisterNumber());
1680 DEBUG_ASSERT(srcVersion != nullptr, "find Version failed");
1681 if (!IsValidCopyProp(destReg, srcReg)) {
1682 return false;
1683 }
1684 return true;
1685 } else {
1686 /* should be eliminated by ssa peep */
1687 }
1688 }
1689 }
1690 return false;
1691 }
1692
Optimize(Insn & insn)1693 void CopyRegProp::Optimize(Insn &insn)
1694 {
1695 optSsaInfo->ReplaceAllUse(destVersion, srcVersion);
1696 if (cgFunc.IsExtendReg(destVersion->GetSSAvRegOpnd()->GetRegisterNumber())) {
1697 cgFunc.InsertExtendSet(srcVersion->GetSSAvRegOpnd()->GetRegisterNumber());
1698 }
1699 }
1700
VaildateImplicitCvt(RegOperand & destReg,const RegOperand & srcReg,Insn & movInsn)1701 void CopyRegProp::VaildateImplicitCvt(RegOperand &destReg, const RegOperand &srcReg, Insn &movInsn)
1702 {
1703 DEBUG_ASSERT(movInsn.GetMachineOpcode() == MOP_xmovrr || movInsn.GetMachineOpcode() == MOP_wmovrr,
1704 "NIY explicit CVT");
1705 if (destReg.GetSize() == k64BitSize && srcReg.GetSize() == k32BitSize) {
1706 movInsn.SetMOP(AArch64CG::kMd[MOP_xuxtw64]);
1707 } else if (destReg.GetSize() == k32BitSize && srcReg.GetSize() == k64BitSize) {
1708 movInsn.SetMOP(AArch64CG::kMd[MOP_xubfxrri6i6]);
1709 movInsn.AddOperand(cgFunc.CreateImmOperand(PTY_i64, 0));
1710 movInsn.AddOperand(cgFunc.CreateImmOperand(PTY_i64, k32BitSize));
1711 } else {
1712 CHECK_FATAL(false, " unknown explicit integer cvt, need implement in ssa prop ");
1713 }
1714 destReg.SetValidBitsNum(k32BitSize);
1715 }
1716
Run()1717 void RedundantPhiProp::Run()
1718 {
1719 FOR_ALL_BB(bb, &cgFunc)
1720 {
1721 for (auto phiIt : bb->GetPhiInsns()) {
1722 Init();
1723 if (!CheckCondition(*phiIt.second)) {
1724 continue;
1725 }
1726 Optimize(*phiIt.second);
1727 }
1728 }
1729 }
1730
Optimize(Insn & insn)1731 void RedundantPhiProp::Optimize(Insn &insn)
1732 {
1733 optSsaInfo->ReplaceAllUse(destVersion, srcVersion);
1734 }
1735
CheckCondition(Insn & insn)1736 bool RedundantPhiProp::CheckCondition(Insn &insn)
1737 {
1738 DEBUG_ASSERT(insn.IsPhi(), "must be phi insn here");
1739 auto &phiOpnd = static_cast<PhiOperand &>(insn.GetOperand(kInsnSecondOpnd));
1740 if (phiOpnd.IsRedundancy()) {
1741 auto &phiDestReg = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1742 destVersion = optSsaInfo->FindSSAVersion(phiDestReg.GetRegisterNumber());
1743 DEBUG_ASSERT(destVersion != nullptr, "find Version failed");
1744 uint32 srcRegNO = phiOpnd.GetOperands().begin()->second->GetRegisterNumber();
1745 srcVersion = optSsaInfo->FindSSAVersion(srcRegNO);
1746 DEBUG_ASSERT(srcVersion != nullptr, "find Version failed");
1747 return true;
1748 }
1749 return false;
1750 }
1751
CheckCondition(Insn & insn)1752 bool ValidBitNumberProp::CheckCondition(Insn &insn)
1753 {
1754 /* extend to all shift pattern in future */
1755 RegOperand *destOpnd = nullptr;
1756 RegOperand *srcOpnd = nullptr;
1757 if (insn.GetMachineOpcode() == MOP_xuxtw64) {
1758 destOpnd = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1759 srcOpnd = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1760 }
1761 if (insn.GetMachineOpcode() == MOP_xubfxrri6i6) {
1762 destOpnd = &static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
1763 srcOpnd = &static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd));
1764 auto &lsb = static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1765 auto &width = static_cast<ImmOperand &>(insn.GetOperand(kInsnFourthOpnd));
1766 if ((lsb.GetValue() != 0) || (width.GetValue() != k32BitSize)) {
1767 return false;
1768 }
1769 }
1770 if (destOpnd != nullptr && destOpnd->IsSSAForm() && srcOpnd != nullptr && srcOpnd->IsSSAForm()) {
1771 destVersion = optSsaInfo->FindSSAVersion(destOpnd->GetRegisterNumber());
1772 DEBUG_ASSERT(destVersion != nullptr, "find Version failed");
1773 srcVersion = optSsaInfo->FindSSAVersion(srcOpnd->GetRegisterNumber());
1774 DEBUG_ASSERT(srcVersion != nullptr, "find Version failed");
1775 if (destVersion->HasImplicitCvt()) {
1776 return false;
1777 }
1778 for (auto destUseIt : destVersion->GetAllUseInsns()) {
1779 Insn *useInsn = destUseIt.second->GetInsn();
1780 if (useInsn->GetMachineOpcode() == MOP_xuxtw64) {
1781 return false;
1782 }
1783 /* if srcOpnd upper 32 bits are valid, it can not prop to mop_x */
1784 if (srcOpnd->GetSize() == k64BitSize && destOpnd->GetSize() == k64BitSize) {
1785 const auto *useMD = useInsn->GetDesc();
1786 for (auto opndUseIt : destUseIt.second->GetOperands()) {
1787 const OpndDesc *useProp = useMD->opndMD[opndUseIt.first];
1788 if (useProp->GetSize() == k64BitSize) {
1789 return false;
1790 }
1791 }
1792 }
1793 }
1794 srcVersion->SetImplicitCvt();
1795 return true;
1796 }
1797 return false;
1798 }
1799
Optimize(Insn & insn)1800 void ValidBitNumberProp::Optimize(Insn &insn)
1801 {
1802 optSsaInfo->ReplaceAllUse(destVersion, srcVersion);
1803 DEBUG_ASSERT(srcVersion != nullptr, "srcVersion should not be nullptr");
1804 cgFunc.InsertExtendSet(srcVersion->GetSSAvRegOpnd()->GetRegisterNumber());
1805 }
1806
Run()1807 void ValidBitNumberProp::Run()
1808 {
1809 FOR_ALL_BB(bb, &cgFunc)
1810 {
1811 FOR_BB_INSNS(insn, bb)
1812 {
1813 if (!insn->IsMachineInstruction()) {
1814 continue;
1815 }
1816 Init();
1817 if (!CheckCondition(*insn)) {
1818 continue;
1819 }
1820 Optimize(*insn);
1821 }
1822 }
1823 }
1824
Run()1825 void FpSpConstProp::Run()
1826 {
1827 FOR_ALL_BB(bb, &cgFunc)
1828 {
1829 FOR_BB_INSNS(insn, bb)
1830 {
1831 if (!insn->IsMachineInstruction()) {
1832 continue;
1833 }
1834 Init();
1835 if (!CheckCondition(*insn)) {
1836 continue;
1837 }
1838 Optimize(*insn);
1839 }
1840 }
1841 }
1842
CheckCondition(Insn & insn)1843 bool FpSpConstProp::CheckCondition(Insn &insn)
1844 {
1845 std::set<uint32> defRegs = insn.GetDefRegs();
1846 auto &a64CGFunc = static_cast<AArch64CGFunc &>(cgFunc);
1847 if (defRegs.size() <= 1) {
1848 if (insn.ScanReg(RSP)) {
1849 fpSpBase = &a64CGFunc.GetOrCreatePhysicalRegisterOperand(RSP, k64BitSize, kRegTyInt);
1850 /* not safe due to varied sp in alloca */
1851 if (cgFunc.HasVLAOrAlloca()) {
1852 return false;
1853 }
1854 }
1855 if (insn.ScanReg(RFP)) {
1856 DEBUG_ASSERT(fpSpBase == nullptr, " unexpect for both sp fp using ");
1857 fpSpBase = &a64CGFunc.GetOrCreatePhysicalRegisterOperand(RFP, k64BitSize, kRegTyInt);
1858 }
1859 if (fpSpBase == nullptr) {
1860 return false;
1861 }
1862 if (insn.GetMachineOpcode() == MOP_xaddrri12) {
1863 aT = kAArch64Add;
1864 if (GetValidSSAInfo(insn.GetOperand(kInsnFirstOpnd))) {
1865 shiftOpnd = &static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1866 return true;
1867 }
1868 } else if (insn.GetMachineOpcode() == MOP_xsubrri12) {
1869 aT = kAArch64Sub;
1870 if (GetValidSSAInfo(insn.GetOperand(kInsnFirstOpnd))) {
1871 shiftOpnd = &static_cast<ImmOperand &>(insn.GetOperand(kInsnThirdOpnd));
1872 return true;
1873 }
1874 }
1875 }
1876 return false;
1877 }
1878
GetValidSSAInfo(Operand & opnd)1879 bool FpSpConstProp::GetValidSSAInfo(Operand &opnd)
1880 {
1881 if (opnd.IsRegister()) {
1882 auto ®Opnd = static_cast<RegOperand &>(opnd);
1883 if (regOpnd.IsSSAForm()) {
1884 replaced = optSsaInfo->FindSSAVersion(regOpnd.GetRegisterNumber());
1885 DEBUG_ASSERT(replaced != nullptr, "find ssa version failed in FpSpConstProp");
1886 return true;
1887 }
1888 }
1889 return false;
1890 }
1891
ArithmeticFold(int64 valInUse,ArithmeticType useAT) const1892 int64 FpSpConstProp::ArithmeticFold(int64 valInUse, ArithmeticType useAT) const
1893 {
1894 int64 valInDef = shiftOpnd->GetValue();
1895 int64 returnVal = 0;
1896 CHECK_FATAL(aT == kAArch64Add || aT == kAArch64Sub, "unsupport sp/fp arthimetic in aarch64");
1897 if (useAT == aT) {
1898 returnVal = valInUse + valInDef;
1899 } else {
1900 returnVal = valInUse - valInDef;
1901 }
1902 return returnVal;
1903 }
1904
PropInMem(DUInsnInfo & useDUInfo,Insn & useInsn)1905 void FpSpConstProp::PropInMem(DUInsnInfo &useDUInfo, Insn &useInsn)
1906 {
1907 MOperator useMop = useInsn.GetMachineOpcode();
1908 if (useInsn.IsAtomic()) {
1909 return;
1910 }
1911 if (useInsn.IsStore() || useInsn.IsLoad()) {
1912 if (useDUInfo.GetOperands().size() == 1) {
1913 auto useOpndIt = useDUInfo.GetOperands().begin();
1914 if (useOpndIt->first == kInsnSecondOpnd || useOpndIt->first == kInsnThirdOpnd) {
1915 DEBUG_ASSERT(useOpndIt->second == 1, "multiple use in memory opnd");
1916 auto *a64memOpnd = static_cast<MemOperand *>(useInsn.GetMemOpnd());
1917 if (a64memOpnd->IsIntactIndexed() && a64memOpnd->GetAddrMode() == MemOperand::kAddrModeBOi) {
1918 auto *ofstOpnd = static_cast<OfstOperand *>(a64memOpnd->GetOffsetImmediate());
1919 CHECK_FATAL(ofstOpnd != nullptr, "oldOffsetOpnd is null");
1920 int64 newVal = ArithmeticFold(ofstOpnd->GetValue(), kAArch64Add);
1921 auto *newOfstImm =
1922 &static_cast<AArch64CGFunc &>(cgFunc).CreateOfstOpnd(static_cast<uint64>(newVal), k64BitSize);
1923 if (ofstOpnd->GetVary() == kUnAdjustVary || shiftOpnd->GetVary() == kUnAdjustVary) {
1924 newOfstImm->SetVary(kUnAdjustVary);
1925 }
1926 auto *newMem = static_cast<AArch64CGFunc &>(cgFunc).CreateMemOperand(
1927 MemOperand::kAddrModeBOi, a64memOpnd->GetSize(), *fpSpBase, nullptr, newOfstImm, nullptr);
1928 if (static_cast<AArch64CGFunc &>(cgFunc).IsOperandImmValid(useMop, newMem, useOpndIt->first)) {
1929 useInsn.SetMemOpnd(newMem);
1930 useDUInfo.DecreaseDU(useOpndIt->first);
1931 replaced->CheckDeadUse(useInsn);
1932 }
1933 }
1934 }
1935 } else {
1936 /*
1937 * case : store stack location on stack
1938 * add x1, sp, #8
1939 * ...
1940 * store x1 [x1, #16]
1941 * not prop , not benefit to live range yet
1942 */
1943 return;
1944 }
1945 }
1946 }
1947
PropInArith(DUInsnInfo & useDUInfo,Insn & useInsn,ArithmeticType curAT)1948 void FpSpConstProp::PropInArith(DUInsnInfo &useDUInfo, Insn &useInsn, ArithmeticType curAT)
1949 {
1950 if (useDUInfo.GetOperands().size() == 1) {
1951 auto &a64cgFunc = static_cast<AArch64CGFunc &>(cgFunc);
1952 MOperator useMop = useInsn.GetMachineOpcode();
1953 DEBUG_ASSERT(useDUInfo.GetOperands().begin()->first == kInsnSecondOpnd, "NIY");
1954 DEBUG_ASSERT(useDUInfo.GetOperands().begin()->second == 1, "multiple use in add/sub");
1955 auto &curVal = static_cast<ImmOperand &>(useInsn.GetOperand(kInsnThirdOpnd));
1956 ImmOperand &newVal =
1957 a64cgFunc.CreateImmOperand(ArithmeticFold(curVal.GetValue(), curAT), curVal.GetSize(), false);
1958 if (newVal.GetValue() < 0) {
1959 newVal.Negate();
1960 useMop = A64ConstProp::GetReversalMOP(useMop);
1961 }
1962 if (curVal.GetVary() == kUnAdjustVary || shiftOpnd->GetVary() == kUnAdjustVary) {
1963 newVal.SetVary(kUnAdjustVary);
1964 }
1965 if (static_cast<AArch64CGFunc &>(cgFunc).IsOperandImmValid(useMop, &newVal, kInsnThirdOpnd)) {
1966 Insn &newInsn =
1967 cgFunc.GetInsnBuilder()->BuildInsn(useMop, useInsn.GetOperand(kInsnFirstOpnd), *fpSpBase, newVal);
1968 useInsn.GetBB()->ReplaceInsn(useInsn, newInsn);
1969 optSsaInfo->ReplaceInsn(useInsn, newInsn);
1970 }
1971 } else {
1972 CHECK_FATAL(false, "NYI");
1973 }
1974 }
1975
PropInCopy(DUInsnInfo & useDUInfo,Insn & useInsn,MOperator oriMop)1976 void FpSpConstProp::PropInCopy(DUInsnInfo &useDUInfo, Insn &useInsn, MOperator oriMop)
1977 {
1978 if (useDUInfo.GetOperands().size() == 1) {
1979 DEBUG_ASSERT(useDUInfo.GetOperands().begin()->first == kInsnSecondOpnd, "NIY");
1980 DEBUG_ASSERT(useDUInfo.GetOperands().begin()->second == 1, "multiple use in add/sub");
1981 auto &newVal = *static_cast<ImmOperand *>(shiftOpnd->Clone(*cgFunc.GetMemoryPool()));
1982 Insn &newInsn =
1983 cgFunc.GetInsnBuilder()->BuildInsn(oriMop, useInsn.GetOperand(kInsnFirstOpnd), *fpSpBase, newVal);
1984 useInsn.GetBB()->ReplaceInsn(useInsn, newInsn);
1985 optSsaInfo->ReplaceInsn(useInsn, newInsn);
1986 } else {
1987 CHECK_FATAL(false, "NYI");
1988 }
1989 }
1990
Optimize(Insn & insn)1991 void FpSpConstProp::Optimize(Insn &insn)
1992 {
1993 CHECK_FATAL(replaced != nullptr, "nullptr check");
1994 for (auto &useInsnInfo : replaced->GetAllUseInsns()) {
1995 Insn *useInsn = useInsnInfo.second->GetInsn();
1996 MOperator useMop = useInsn->GetMachineOpcode();
1997 PropInMem(*useInsnInfo.second, *useInsn);
1998 switch (useMop) {
1999 case MOP_xmovrr:
2000 case MOP_wmovrr:
2001 PropInCopy(*useInsnInfo.second, *useInsn, insn.GetMachineOpcode());
2002 break;
2003 case MOP_xaddrri12:
2004 PropInArith(*useInsnInfo.second, *useInsn, kAArch64Add);
2005 break;
2006 case MOP_xsubrri12:
2007 PropInArith(*useInsnInfo.second, *useInsn, kAArch64Sub);
2008 break;
2009 default:
2010 break;
2011 }
2012 }
2013 }
2014
DFSFindValidDefInsns(Insn * curDefInsn,RegOperand * lastPhiDef,std::unordered_map<uint32,bool> & visited)2015 bool A64PregCopyPattern::DFSFindValidDefInsns(Insn *curDefInsn, RegOperand *lastPhiDef,
2016 std::unordered_map<uint32, bool> &visited)
2017 {
2018 if (curDefInsn == nullptr) {
2019 return false;
2020 }
2021 /*
2022 * avoid the case as following:
2023 * R113 and R117 define each other.
2024 * [BB5] ----------------------------
2025 * phi: R113, (R111<4>, R117<9>) |
2026 * / \ |
2027 * / \ |
2028 * [BB6] ---- [BB7] |
2029 * add R116, R113, #4 phi: R117, (R113<5>, R116<6>) |
2030 * / \ |
2031 * / \ |
2032 * [BB8] [BB28] |
2033 * / |
2034 * / |
2035 * [BB9] ------ [BB5] |
2036 * mov R1, R117 --------------------------
2037 *
2038 * but the cases as following is right:
2039 * (1)
2040 * [BB124]
2041 * add R339, R336, #345 -------- is found twice
2042 * / \
2043 * / \
2044 * / [BB125]
2045 * \ /
2046 * \ /
2047 * [BB56]
2048 * phi: R370, (R339<124>, R339<125>)
2049 * |
2050 * |
2051 * [BB61]
2052 * mov R0, R370
2053 * (2)
2054 * [BB17]
2055 * phi: R242, (R241<14>, R218<53>) ------- is found twice
2056 * / \
2057 * / \
2058 * / [BB26] [BB32]
2059 * \ \ /
2060 * \ [BB27]
2061 * \ phi: R273, (R242<26>, R320<32>)
2062 * [BB25] /
2063 * \ [BB42]
2064 * \ /
2065 * [BB43]
2066 * phi: R321, (R242<25>, R273<42>)
2067 * |
2068 * [BB47]
2069 * mov R0, R321
2070 */
2071 if (visited[curDefInsn->GetId()] && curDefInsn->IsPhi() && lastPhiDef != nullptr) {
2072 auto &curPhiOpnd = static_cast<PhiOperand &>(curDefInsn->GetOperand(kInsnSecondOpnd));
2073 for (auto &curPhiListIt : curPhiOpnd.GetOperands()) {
2074 auto &curUseOpnd = static_cast<RegOperand &>(*curPhiListIt.second);
2075 if (&curUseOpnd == lastPhiDef) {
2076 return false;
2077 }
2078 }
2079 }
2080 if (visited[curDefInsn->GetId()]) {
2081 return true;
2082 }
2083 visited[curDefInsn->GetId()] = true;
2084 if (!curDefInsn->IsPhi()) {
2085 CHECK_FATAL(curDefInsn->IsMachineInstruction(), "expect valid insn");
2086 (void)validDefInsns.emplace_back(curDefInsn);
2087 return true;
2088 }
2089 auto &phiOpnd = static_cast<PhiOperand &>(curDefInsn->GetOperand(kInsnSecondOpnd));
2090 for (auto &phiListIt : phiOpnd.GetOperands()) {
2091 auto &useOpnd = static_cast<RegOperand &>(*phiListIt.second);
2092 VRegVersion *useVersion = optSsaInfo->FindSSAVersion(useOpnd.GetRegisterNumber());
2093 Insn *defInsn = FindDefInsn(useVersion);
2094 if (defInsn == nullptr) {
2095 return false;
2096 }
2097 lastPhiDef = &static_cast<RegOperand &>(curDefInsn->GetOperand(kInsnFirstOpnd));
2098 if (!DFSFindValidDefInsns(defInsn, lastPhiDef, visited)) {
2099 return false;
2100 }
2101 }
2102 return true;
2103 }
2104
CheckMultiUsePoints(const Insn * defInsn) const2105 bool A64PregCopyPattern::CheckMultiUsePoints(const Insn *defInsn) const
2106 {
2107 Operand &dstOpnd = defInsn->GetOperand(kInsnFirstOpnd);
2108 CHECK_FATAL(dstOpnd.IsRegister(), "dstOpnd must be register");
2109 VRegVersion *defVersion = optSsaInfo->FindSSAVersion(static_cast<RegOperand &>(dstOpnd).GetRegisterNumber());
2110 /* use: (phi) or (mov preg) */
2111 CHECK_FATAL(defVersion != nullptr, "nullptr check");
2112 for (auto &useInfoIt : defVersion->GetAllUseInsns()) {
2113 DUInsnInfo *useInfo = useInfoIt.second;
2114 CHECK_FATAL(useInfo, "get useDUInfo failed");
2115 Insn *useInsn = useInfo->GetInsn();
2116 CHECK_FATAL(useInsn, "get useInsn failed");
2117 if (!useInsn->IsPhi() && useInsn->GetMachineOpcode() != MOP_wmovrr &&
2118 useInsn->GetMachineOpcode() != MOP_xmovrr) {
2119 return false;
2120 }
2121 if ((useInsn->GetMachineOpcode() == MOP_wmovrr || useInsn->GetMachineOpcode() == MOP_xmovrr) &&
2122 !static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd)).IsPhysicalRegister()) {
2123 return false;
2124 }
2125 }
2126 return true;
2127 }
2128
CheckPhiCaseCondition(Insn & curInsn,Insn & defInsn)2129 bool A64PregCopyPattern::CheckPhiCaseCondition(Insn &curInsn, Insn &defInsn)
2130 {
2131 std::unordered_map<uint32, bool> visited;
2132 RegOperand *lastPhiDef =
2133 (defInsn.IsPhi() ? &static_cast<RegOperand &>(defInsn.GetOperand(kInsnFirstOpnd)) : nullptr);
2134 if (!DFSFindValidDefInsns(&defInsn, lastPhiDef, visited)) {
2135 return false;
2136 }
2137 if (!CheckValidDefInsn(validDefInsns[0])) {
2138 return false;
2139 }
2140 MOperator defMop = validDefInsns[0]->GetMachineOpcode();
2141 uint32 defOpndNum = validDefInsns[0]->GetOperandSize();
2142 for (size_t i = 1; i < validDefInsns.size(); ++i) {
2143 if (defMop != validDefInsns[i]->GetMachineOpcode()) {
2144 return false;
2145 }
2146 if (!CheckMultiUsePoints(validDefInsns[i])) {
2147 return false;
2148 }
2149 for (uint32 idx = 0; idx < defOpndNum; ++idx) {
2150 if (validDefInsns[0]->OpndIsDef(idx) && validDefInsns[i]->OpndIsDef(idx)) {
2151 continue;
2152 }
2153 Operand &opnd1 = validDefInsns[0]->GetOperand(idx);
2154 Operand &opnd2 = validDefInsns[i]->GetOperand(idx);
2155 if (!opnd1.Equals(opnd2) && differIdx == -1) {
2156 differIdx = static_cast<int>(idx);
2157 if (!validDefInsns[0]->GetOperand(static_cast<uint32>(differIdx)).IsRegister() ||
2158 !validDefInsns[i]->GetOperand(static_cast<uint32>(differIdx)).IsRegister()) {
2159 return false;
2160 }
2161 auto &differOpnd1 =
2162 static_cast<RegOperand &>(validDefInsns[0]->GetOperand(static_cast<uint32>(differIdx)));
2163 auto &differOpnd2 =
2164 static_cast<RegOperand &>(validDefInsns[1]->GetOperand(static_cast<uint32>(differIdx)));
2165 /* avoid cc reg */
2166 if (!differOpnd1.IsOfIntClass() || !differOpnd2.IsOfIntClass() || differOpnd1.IsPhysicalRegister() ||
2167 differOpnd2.IsPhysicalRegister()) {
2168 return false;
2169 }
2170 VRegVersion *differVersion1 = optSsaInfo->FindSSAVersion(differOpnd1.GetRegisterNumber());
2171 VRegVersion *differVersion2 = optSsaInfo->FindSSAVersion(differOpnd2.GetRegisterNumber());
2172 if (!differVersion1 || !differVersion2) {
2173 return false;
2174 }
2175 if (differVersion1->GetOriginalRegNO() != differVersion2->GetOriginalRegNO()) {
2176 return false;
2177 }
2178 differOrigNO = differVersion1->GetOriginalRegNO();
2179 } else if (!opnd1.Equals(opnd2) && idx != static_cast<uint32>(differIdx)) {
2180 return false;
2181 }
2182 }
2183 if (differIdx <= 0) {
2184 return false;
2185 }
2186 }
2187 return true;
2188 }
2189
CheckUselessDefInsn(const Insn * defInsn) const2190 bool A64PregCopyPattern::CheckUselessDefInsn(const Insn *defInsn) const
2191 {
2192 Operand &dstOpnd = defInsn->GetOperand(kInsnFirstOpnd);
2193 CHECK_FATAL(dstOpnd.IsRegister(), "dstOpnd must be register");
2194 VRegVersion *defVersion = optSsaInfo->FindSSAVersion(static_cast<RegOperand &>(dstOpnd).GetRegisterNumber());
2195 DEBUG_ASSERT(defVersion != nullptr, "nullptr check");
2196 if (defVersion->GetAllUseInsns().size() == 1) {
2197 return true;
2198 }
2199 /*
2200 * avoid the case as following
2201 * In a loop:
2202 * [BB43]
2203 * phi: R356, (R345<42>, R377<63>)
2204 * / \
2205 * / \
2206 * [BB44] \
2207 * add R377, R356, #1 /
2208 * mov R1, R377 /
2209 * bl /
2210 * \ /
2211 * \ /
2212 * [BB63]
2213 */
2214 for (auto &useInfoIt : defVersion->GetAllUseInsns()) {
2215 DUInsnInfo *useInfo = useInfoIt.second;
2216 CHECK_FATAL(useInfo, "get useDUInfo failed");
2217 Insn *useInsn = useInfo->GetInsn();
2218 CHECK_FATAL(useInsn, "get useInsn failed");
2219 if (useInsn->IsPhi()) {
2220 auto &phiDefOpnd = static_cast<RegOperand &>(useInsn->GetOperand(kInsnFirstOpnd));
2221 uint32 opndNum = defInsn->GetOperandSize();
2222 for (uint32 i = 0; i < opndNum; ++i) {
2223 if (defInsn->OpndIsDef(i)) {
2224 continue;
2225 }
2226 Operand &opnd = defInsn->GetOperand(i);
2227 if (opnd.IsRegister() &&
2228 static_cast<RegOperand &>(opnd).GetRegisterNumber() == phiDefOpnd.GetRegisterNumber()) {
2229 return false;
2230 }
2231 }
2232 }
2233 }
2234 return true;
2235 }
2236
CheckValidDefInsn(const Insn * defInsn)2237 bool A64PregCopyPattern::CheckValidDefInsn(const Insn *defInsn)
2238 {
2239 const auto *md = defInsn->GetDesc();
2240 CHECK_FATAL(md != nullptr, "expect valid AArch64MD");
2241 /* this pattern applies to all basicOps */
2242 if (md->IsMove() || md->IsStore() || md->IsLoad() || md->IsLoadStorePair() || md->IsCall() || md->IsDMB() ||
2243 md->IsVectorOp() || md->IsCondDef() || md->IsCondBranch() || md->IsUnCondBranch()) {
2244 return false;
2245 }
2246 uint32 opndNum = defInsn->GetOperandSize();
2247 for (uint32 i = 0; i < opndNum; ++i) {
2248 Operand &opnd = defInsn->GetOperand(i);
2249 if (!opnd.IsRegister() && !opnd.IsImmediate() && !opnd.IsOpdShift() && !opnd.IsOpdExtend()) {
2250 return false;
2251 }
2252 if (opnd.IsRegister()) {
2253 auto ®Opnd = static_cast<RegOperand &>(opnd);
2254 if (cgFunc.IsSPOrFP(regOpnd) || regOpnd.IsPhysicalRegister() ||
2255 (!regOpnd.IsOfIntClass() && !regOpnd.IsOfFloatOrSIMDClass())) {
2256 return false;
2257 }
2258 }
2259 }
2260 return true;
2261 }
2262
CheckCondition(Insn & insn)2263 bool A64PregCopyPattern::CheckCondition(Insn &insn)
2264 {
2265 MOperator curMop = insn.GetMachineOpcode();
2266 if (curMop != MOP_xmovrr && curMop != MOP_wmovrr) {
2267 return false;
2268 }
2269 auto &dstOpnd = static_cast<RegOperand &>(insn.GetOperand(kInsnFirstOpnd));
2270 if (!dstOpnd.IsPhysicalRegister()) {
2271 return false;
2272 }
2273 regno_t useRegNO = static_cast<RegOperand &>(insn.GetOperand(kInsnSecondOpnd)).GetRegisterNumber();
2274 VRegVersion *useVersion = optSsaInfo->FindSSAVersion(useRegNO);
2275 Insn *defInsn = FindDefInsn(useVersion);
2276 if (defInsn == nullptr) {
2277 return false;
2278 }
2279 Operand &defDstOpnd = defInsn->GetOperand(kInsnFirstOpnd);
2280 /* avoid inline-asm */
2281 if (!defDstOpnd.IsRegister()) {
2282 return false;
2283 }
2284 if (!CheckMultiUsePoints(defInsn)) {
2285 return false;
2286 }
2287 if (defInsn->IsPhi()) {
2288 isCrossPhi = true;
2289 firstPhiInsn = defInsn;
2290 return CheckPhiCaseCondition(insn, *defInsn);
2291 } else {
2292 if (!CheckValidDefInsn(defInsn)) {
2293 return false;
2294 }
2295 if (!CheckUselessDefInsn(defInsn)) {
2296 return false;
2297 }
2298 (void)validDefInsns.emplace_back(defInsn);
2299 }
2300 return true;
2301 }
2302
CreateNewPhiInsn(std::unordered_map<uint32,RegOperand * > & newPhiList,Insn * curInsn)2303 Insn &A64PregCopyPattern::CreateNewPhiInsn(std::unordered_map<uint32, RegOperand *> &newPhiList, Insn *curInsn)
2304 {
2305 CHECK_FATAL(!newPhiList.empty(), "empty newPhiList");
2306 RegOperand *differOrigOpnd = cgFunc.GetVirtualRegisterOperand(differOrigNO);
2307 CHECK_FATAL(differOrigOpnd != nullptr, "get original opnd default");
2308 PhiOperand &phiList = optSsaInfo->CreatePhiOperand();
2309 for (auto &it : newPhiList) {
2310 phiList.InsertOpnd(it.first, *it.second);
2311 }
2312 Insn &phiInsn = cgFunc.GetCG()->BuildPhiInsn(*differOrigOpnd, phiList);
2313 optSsaInfo->CreateNewInsnSSAInfo(phiInsn);
2314 BB *bb = curInsn->GetBB();
2315 (void)bb->InsertInsnBefore(*curInsn, phiInsn);
2316 /* <phiDef-ssaRegNO, phiInsn> */
2317 bb->AddPhiInsn(static_cast<RegOperand &>(phiInsn.GetOperand(kInsnFirstOpnd)).GetRegisterNumber(), phiInsn);
2318 return phiInsn;
2319 }
2320
2321 /*
2322 * Check whether the required phi is available, do not insert phi repeatedly.
2323 */
CheckAndGetExistPhiDef(Insn & phiInsn,std::vector<regno_t> & validDifferRegNOs) const2324 RegOperand *A64PregCopyPattern::CheckAndGetExistPhiDef(Insn &phiInsn, std::vector<regno_t> &validDifferRegNOs) const
2325 {
2326 MapleMap<regno_t, Insn *> &phiInsns = phiInsn.GetBB()->GetPhiInsns();
2327 for (auto &phiIt : phiInsns) {
2328 auto &def = static_cast<RegOperand &>(phiIt.second->GetOperand(kInsnFirstOpnd));
2329 VRegVersion *defVersion = optSsaInfo->FindSSAVersion(def.GetRegisterNumber());
2330 /*
2331 * if the phi of the change point has been created (according to original regNO), return the phiDefOpnd.
2332 * But, there is a problem: the phiDefOpnd of the same original regNO is not the required phi.
2333 * For example: (in parentheses is the original regNO)
2334 * add R110(R80), R106(R80), #1 add R122(R80), R118(R80), #1
2335 * \ /
2336 * \ /
2337 * (1) phi: R123(R80), [R110, R122]
2338 * mov R0, R123
2339 * It will return R123 of phi(1) because the differOrigNO is 80, but that's not what we want,
2340 * we need to create a new phi(2): R140(R80), [R106, R118].
2341 * so we need to check whether all phiOpnds have correct ssaRegNO.
2342 */
2343 DEBUG_ASSERT(defVersion != nullptr, "defVersion should not be nullptr");
2344 if (defVersion->GetOriginalRegNO() == differOrigNO) {
2345 auto &phiOpnd = static_cast<PhiOperand &>(phiIt.second->GetOperand(kInsnSecondOpnd));
2346 if (phiOpnd.GetOperands().size() == validDifferRegNOs.size()) {
2347 bool exist = true;
2348 for (auto &phiListIt : phiOpnd.GetOperands()) {
2349 if (std::find(validDifferRegNOs.begin(), validDifferRegNOs.end(),
2350 static_cast<RegOperand *>(phiListIt.second)->GetRegisterNumber()) ==
2351 validDifferRegNOs.end()) {
2352 exist = false;
2353 break;
2354 }
2355 }
2356 if (exist) {
2357 return &static_cast<RegOperand &>(phiIt.second->GetOperand(kInsnFirstOpnd));
2358 }
2359 }
2360 }
2361 }
2362 return nullptr;
2363 }
2364
DFSBuildPhiInsn(Insn * curInsn,std::unordered_map<uint32,RegOperand * > & visited)2365 RegOperand &A64PregCopyPattern::DFSBuildPhiInsn(Insn *curInsn, std::unordered_map<uint32, RegOperand *> &visited)
2366 {
2367 CHECK_FATAL(curInsn, "curInsn must not be null");
2368 if (visited[curInsn->GetId()] != nullptr) {
2369 return *visited[curInsn->GetId()];
2370 }
2371 if (!curInsn->IsPhi()) {
2372 return static_cast<RegOperand &>(curInsn->GetOperand(static_cast<uint32>(differIdx)));
2373 }
2374 std::unordered_map<uint32, RegOperand *> differPhiList;
2375 std::vector<regno_t> validDifferRegNOs;
2376 auto &phiOpnd = static_cast<PhiOperand &>(curInsn->GetOperand(kInsnSecondOpnd));
2377 for (auto &phiListIt : phiOpnd.GetOperands()) {
2378 auto &useOpnd = static_cast<RegOperand &>(*phiListIt.second);
2379 VRegVersion *useVersion = optSsaInfo->FindSSAVersion(useOpnd.GetRegisterNumber());
2380 Insn *defInsn = FindDefInsn(useVersion);
2381 CHECK_FATAL(defInsn != nullptr, "get defInsn failed");
2382 RegOperand &phiDefOpnd = DFSBuildPhiInsn(defInsn, visited);
2383 (void)differPhiList.emplace(phiListIt.first, &phiDefOpnd);
2384 (void)validDifferRegNOs.emplace_back(phiDefOpnd.GetRegisterNumber());
2385 }
2386 /*
2387 * The phi in control flow may already exists.
2388 * For example:
2389 * [BB26] [BB45]
2390 * add R191, R103, R187 add R166, R103, R164
2391 * \ /
2392 * \ /
2393 * [BB27]
2394 * phi: R192, (R191<26>, R166<45>) ------ curInsn
2395 * phi: R194, (R187<26>, R164<45>) ------ the phi witch we need already exists
2396 * / validDifferRegNOs : [187, 164]
2397 * /
2398 * [BB28] [BB46]
2399 * add R215, R103, R211 /
2400 * \ /
2401 * \ /
2402 * [BB29]
2403 * phi: R216, (R215<28>, R192<46>)
2404 * phi: R218, (R211<28>, R194<46>) ------ the phi witch we need already exists
2405 * mov R0, R216 validDifferRegNOs : [211, 194]
2406 */
2407 RegOperand *existPhiDef = CheckAndGetExistPhiDef(*curInsn, validDifferRegNOs);
2408 if (existPhiDef == nullptr) {
2409 Insn &phiInsn = CreateNewPhiInsn(differPhiList, curInsn);
2410 visited[curInsn->GetId()] = &static_cast<RegOperand &>(phiInsn.GetOperand(kInsnFirstOpnd));
2411 existPhiDef = &static_cast<RegOperand &>(phiInsn.GetOperand(kInsnFirstOpnd));
2412 }
2413 return *existPhiDef;
2414 }
2415
Optimize(Insn & insn)2416 void A64PregCopyPattern::Optimize(Insn &insn)
2417 {
2418 Insn *defInsn = *validDefInsns.begin();
2419 MOperator newMop = defInsn->GetMachineOpcode();
2420 Operand &dstOpnd = insn.GetOperand(kInsnFirstOpnd);
2421 Insn &newInsn = cgFunc.GetInsnBuilder()->BuildInsn(newMop, AArch64CG::kMd[newMop]);
2422 uint32 opndNum = defInsn->GetOperandSize();
2423 newInsn.ResizeOpnds(opndNum);
2424 if (!isCrossPhi) {
2425 for (uint32 i = 0; i < opndNum; ++i) {
2426 if (defInsn->OpndIsDef(i)) {
2427 newInsn.SetOperand(i, dstOpnd);
2428 } else {
2429 newInsn.SetOperand(i, defInsn->GetOperand(i));
2430 }
2431 }
2432 } else {
2433 std::vector<regno_t> validDifferRegNOs;
2434 for (Insn *vdInsn : validDefInsns) {
2435 auto &vdOpnd = static_cast<RegOperand &>(vdInsn->GetOperand(static_cast<uint32>(differIdx)));
2436 (void)validDifferRegNOs.emplace_back(vdOpnd.GetRegisterNumber());
2437 }
2438 RegOperand *differPhiDefOpnd = CheckAndGetExistPhiDef(*firstPhiInsn, validDifferRegNOs);
2439 if (differPhiDefOpnd == nullptr) {
2440 std::unordered_map<uint32, RegOperand *> visited;
2441 differPhiDefOpnd = &DFSBuildPhiInsn(firstPhiInsn, visited);
2442 }
2443 CHECK_FATAL(differPhiDefOpnd, "get differPhiDefOpnd failed");
2444 for (uint32 i = 0; i < opndNum; ++i) {
2445 if (defInsn->OpndIsDef(i)) {
2446 newInsn.SetOperand(i, dstOpnd);
2447 } else if (i == static_cast<uint32>(differIdx)) {
2448 newInsn.SetOperand(i, *differPhiDefOpnd);
2449 } else {
2450 newInsn.SetOperand(i, defInsn->GetOperand(i));
2451 }
2452 }
2453 }
2454 insn.GetBB()->ReplaceInsn(insn, newInsn);
2455 /* update ssa info */
2456 optSsaInfo->ReplaceInsn(insn, newInsn);
2457
2458 if (PROP_DUMP) {
2459 LogInfo::MapleLogger() << ">>>>>>> In A64PregCopyPattern : <<<<<<<\n";
2460 LogInfo::MapleLogger() << "======= ReplaceInsn :\n";
2461 insn.Dump();
2462 LogInfo::MapleLogger() << "======= NewInsn :\n";
2463 newInsn.Dump();
2464 }
2465 }
2466
Run()2467 void A64PregCopyPattern::Run()
2468 {
2469 FOR_ALL_BB(bb, &cgFunc)
2470 {
2471 FOR_BB_INSNS(insn, bb)
2472 {
2473 if (!insn->IsMachineInstruction()) {
2474 continue;
2475 }
2476 Init();
2477 if (!CheckCondition(*insn)) {
2478 continue;
2479 }
2480 Optimize(*insn);
2481 }
2482 }
2483 validDefInsns.clear();
2484 validDefInsns.shrink_to_fit();
2485 }
2486
Visit(RegOperand * v)2487 void A64ReplaceRegOpndVisitor::Visit(RegOperand *v)
2488 {
2489 (void)v;
2490 insn->SetOperand(idx, *newReg);
2491 }
Visit(MemOperand * a64memOpnd)2492 void A64ReplaceRegOpndVisitor::Visit(MemOperand *a64memOpnd)
2493 {
2494 bool changed = false;
2495 CHECK_FATAL(a64memOpnd->IsIntactIndexed(), "NYI post/pre index model");
2496 StackMemPool tempMemPool(memPoolCtrler, "temp mempool for A64ReplaceRegOpndVisitor");
2497 auto *cpyMem = a64memOpnd->Clone(tempMemPool);
2498 if (cpyMem->GetBaseRegister() != nullptr &&
2499 cpyMem->GetBaseRegister()->GetRegisterNumber() == oldReg->GetRegisterNumber()) {
2500 cpyMem->SetBaseRegister(*static_cast<RegOperand *>(newReg));
2501 changed = true;
2502 }
2503 if (cpyMem->GetIndexRegister() != nullptr &&
2504 cpyMem->GetIndexRegister()->GetRegisterNumber() == oldReg->GetRegisterNumber()) {
2505 CHECK_FATAL(!changed, "base reg is equal to index reg");
2506 cpyMem->SetIndexRegister(*newReg);
2507 changed = true;
2508 }
2509 if (changed) {
2510 insn->SetMemOpnd(&static_cast<AArch64CGFunc *>(cgFunc)->GetOrCreateMemOpnd(*cpyMem));
2511 }
2512 }
Visit(ListOperand * v)2513 void A64ReplaceRegOpndVisitor::Visit(ListOperand *v)
2514 {
2515 for (auto &it : v->GetOperands()) {
2516 if (it->GetRegisterNumber() == oldReg->GetRegisterNumber()) {
2517 it = newReg;
2518 }
2519 }
2520 }
Visit(PhiOperand * v)2521 void A64ReplaceRegOpndVisitor::Visit(PhiOperand *v)
2522 {
2523 for (auto &it : v->GetOperands()) {
2524 if (it.second->GetRegisterNumber() == oldReg->GetRegisterNumber()) {
2525 it.second = newReg;
2526 }
2527 }
2528 auto &phiDest = static_cast<RegOperand &>(insn->GetOperand(kInsnFirstOpnd));
2529 if (phiDest.GetValidBitsNum() > v->GetLeastCommonValidBit()) {
2530 phiDest.SetValidBitsNum(v->GetLeastCommonValidBit());
2531 }
2532 }
2533 } // namespace maplebe
2534