• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <array>
17 #include "optimizer/ir/analysis.h"
18 #include "optimizer/ir/basicblock.h"
19 #include "optimizer/analysis/alias_analysis.h"
20 #include "optimizer/analysis/bounds_analysis.h"
21 #include "lowering.h"
22 #include "optimizer/code_generator/encode.h"
23 
24 namespace ark::compiler {
25 
VisitAdd(GraphVisitor * v,Inst * inst)26 void Lowering::VisitAdd([[maybe_unused]] GraphVisitor *v, Inst *inst)
27 {
28     auto newInst = LowerBinaryOperationWithShiftedOperand<Opcode::Add>(inst);
29     if (newInst == nullptr && LowerAddSub(inst) != nullptr) {
30         return;
31     }
32     LowerMultiplyAddSub(newInst == nullptr ? inst : newInst);
33 }
34 
VisitSub(GraphVisitor * v,Inst * inst)35 void Lowering::VisitSub([[maybe_unused]] GraphVisitor *v, Inst *inst)
36 {
37     auto newInst = LowerBinaryOperationWithShiftedOperand<Opcode::Sub, false>(inst);
38     if (newInst == nullptr && LowerAddSub(inst) != nullptr) {
39         return;
40     }
41     LowerMultiplyAddSub(inst);
42 }
43 
VisitCastValueToAnyType(GraphVisitor * v,Inst * inst)44 void Lowering::VisitCastValueToAnyType([[maybe_unused]] GraphVisitor *v, Inst *inst)
45 {
46     auto graph = inst->GetBasicBlock()->GetGraph();
47     if (graph->IsBytecodeOptimizer() || graph->IsOsrMode()) {
48         // Find way to enable it in OSR mode.
49         return;
50     }
51 
52     // from
53     // 1.u64 Const N -> (v2)
54     // 2.any CastValueToAnyType INT_TYPE v1 -> (...)
55     //
56     // to
57     // 1.any Const Pack(N) -> (...)
58     if (LowerCastValueToAnyTypeWithConst(inst)) {
59         graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
60         COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
61         return;
62     }
63     auto anyType = inst->CastToCastValueToAnyType()->GetAnyType();
64     auto baseType = AnyBaseTypeToDataType(anyType);
65     // We can't propogate opject, because GC can move it
66     if (baseType == DataType::REFERENCE) {
67         return;
68     }
69     // from
70     // 2.any CastValueToAnyType INT_TYPE v1 -> (v3)
71     // 3     SaveState                   v2(acc)
72     //
73     // to
74     // 3     SaveState                   v1(acc)
75     auto input = inst->GetInput(0).GetInst();
76     if (input->IsConst() && baseType == DataType::VOID) {
77         input = graph->FindOrCreateConstant(DataType::Any(input->CastToConstant()->GetIntValue()));
78     }
79     for (auto it = inst->GetUsers().begin(); it != inst->GetUsers().end();) {
80         auto userInst = it->GetInst();
81         if (userInst->IsSaveState()) {
82             userInst->SetInput(it->GetIndex(), input);
83             it = inst->GetUsers().begin();
84         } else {
85             ++it;
86         }
87     }
88 }
89 
VisitCast(GraphVisitor * v,Inst * inst)90 void Lowering::VisitCast([[maybe_unused]] GraphVisitor *v, Inst *inst)
91 {
92     // unsigned Load in AARCH64 zerod all high bits
93     // from
94     //  1.u8(u16, u32) Load ->(v2)
95     //  2.u64(u32) Cast u8(u16, u32) -> (v3 ..)
96     // to
97     //  1.u8(u16) Load ->(v3, ..)
98     auto graph = inst->GetBasicBlock()->GetGraph();
99     if (graph->GetArch() != Arch::AARCH64) {
100         return;
101     }
102     auto type = inst->GetType();
103     if (DataType::IsTypeSigned(type)) {
104         return;
105     }
106     auto inputType = inst->CastToCast()->GetOperandsType();
107     if (DataType::IsTypeSigned(inputType) || DataType::Is64Bits(inputType, graph->GetArch())) {
108         return;
109     }
110     auto inputInst = inst->GetInput(0).GetInst();
111     if (!inputInst->IsLoad() || inputInst->GetType() != inputType) {
112         return;
113     }
114     inst->ReplaceUsers(inputInst);
115     inputInst->GetBasicBlock()->GetGraph()->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()),
116                                                                            inst->GetId(), inst->GetPc());
117     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
118 }
119 
120 template <Opcode OPC>
VisitBitwiseBinaryOperation(GraphVisitor * v,Inst * inst)121 void Lowering::VisitBitwiseBinaryOperation([[maybe_unused]] GraphVisitor *v, Inst *inst)
122 {
123     auto newInst = LowerBinaryOperationWithShiftedOperand<OPC>(inst);  // NOLINT(readability-magic-numbers)
124     if (newInst == nullptr && LowerLogic(inst) != nullptr) {
125         return;
126     }
127     LowerLogicWithInvertedOperand(newInst == nullptr ? inst : newInst);
128 }
129 
VisitOr(GraphVisitor * v,Inst * inst)130 void Lowering::VisitOr(GraphVisitor *v, Inst *inst)
131 {
132     VisitBitwiseBinaryOperation<Opcode::Or>(v, inst);
133 }
134 
VisitAnd(GraphVisitor * v,Inst * inst)135 void Lowering::VisitAnd(GraphVisitor *v, Inst *inst)
136 {
137     VisitBitwiseBinaryOperation<Opcode::And>(v, inst);
138 }
139 
VisitXor(GraphVisitor * v,Inst * inst)140 void Lowering::VisitXor(GraphVisitor *v, Inst *inst)
141 {
142     VisitBitwiseBinaryOperation<Opcode::Xor>(v, inst);
143 }
144 
VisitAndNot(GraphVisitor * v,Inst * inst)145 void Lowering::VisitAndNot([[maybe_unused]] GraphVisitor *v, Inst *inst)
146 {
147     LowerBinaryOperationWithShiftedOperand<Opcode::AndNot, false>(inst);
148 }
149 
VisitXorNot(GraphVisitor * v,Inst * inst)150 void Lowering::VisitXorNot([[maybe_unused]] GraphVisitor *v, Inst *inst)
151 {
152     LowerBinaryOperationWithShiftedOperand<Opcode::XorNot, false>(inst);
153 }
154 
VisitOrNot(GraphVisitor * v,Inst * inst)155 void Lowering::VisitOrNot([[maybe_unused]] GraphVisitor *v, Inst *inst)
156 {
157     LowerBinaryOperationWithShiftedOperand<Opcode::OrNot, false>(inst);
158 }
159 
VisitSaveState(GraphVisitor * v,Inst * inst)160 void Lowering::VisitSaveState([[maybe_unused]] GraphVisitor *v, Inst *inst)
161 {
162     ASSERT(inst->GetOpcode() == Opcode::SaveState);
163     LowerStateInst(inst->CastToSaveState());
164 }
165 
VisitSafePoint(GraphVisitor * v,Inst * inst)166 void Lowering::VisitSafePoint([[maybe_unused]] GraphVisitor *v, Inst *inst)
167 {
168     ASSERT(inst->GetOpcode() == Opcode::SafePoint);
169     LowerStateInst(inst->CastToSafePoint());
170 }
171 
VisitSaveStateOsr(GraphVisitor * v,Inst * inst)172 void Lowering::VisitSaveStateOsr([[maybe_unused]] GraphVisitor *v, Inst *inst)
173 {
174     ASSERT(inst->GetOpcode() == Opcode::SaveStateOsr);
175     LowerStateInst(inst->CastToSaveStateOsr());
176 }
177 
VisitSaveStateDeoptimize(GraphVisitor * v,Inst * inst)178 void Lowering::VisitSaveStateDeoptimize([[maybe_unused]] GraphVisitor *v, Inst *inst)
179 {
180     ASSERT(inst->GetOpcode() == Opcode::SaveStateDeoptimize);
181     LowerStateInst(inst->CastToSaveStateDeoptimize());
182 }
183 
VisitBoundsCheck(GraphVisitor * v,Inst * inst)184 void Lowering::VisitBoundsCheck([[maybe_unused]] GraphVisitor *v, Inst *inst)
185 {
186     ASSERT(inst->GetOpcode() == Opcode::BoundsCheck);
187     LowerConstArrayIndex<BoundsCheckInstI>(inst, Opcode::BoundsCheckI);
188 }
189 
VisitLoadArray(GraphVisitor * v,Inst * inst)190 void Lowering::VisitLoadArray([[maybe_unused]] GraphVisitor *v, Inst *inst)
191 {
192     ASSERT(inst->GetOpcode() == Opcode::LoadArray);
193     LowerConstArrayIndex<LoadInstI>(inst, Opcode::LoadArrayI);
194 }
195 
VisitLoadCompressedStringChar(GraphVisitor * v,Inst * inst)196 void Lowering::VisitLoadCompressedStringChar([[maybe_unused]] GraphVisitor *v, Inst *inst)
197 {
198     ASSERT(inst->GetOpcode() == Opcode::LoadCompressedStringChar);
199     LowerConstArrayIndex<LoadCompressedStringCharInstI>(inst, Opcode::LoadCompressedStringCharI);
200 }
201 
VisitStoreArray(GraphVisitor * v,Inst * inst)202 void Lowering::VisitStoreArray([[maybe_unused]] GraphVisitor *v, Inst *inst)
203 {
204     ASSERT(inst->GetOpcode() == Opcode::StoreArray);
205     LowerConstArrayIndex<StoreInstI>(inst, Opcode::StoreArrayI);
206 }
207 
VisitLoad(GraphVisitor * v,Inst * inst)208 void Lowering::VisitLoad([[maybe_unused]] GraphVisitor *v, Inst *inst)
209 {
210     ASSERT(inst->GetOpcode() == Opcode::Load);
211     LowerMemInstScale(inst);
212 }
213 
VisitStore(GraphVisitor * v,Inst * inst)214 void Lowering::VisitStore([[maybe_unused]] GraphVisitor *v, Inst *inst)
215 {
216     ASSERT(inst->GetOpcode() == Opcode::Store);
217     LowerMemInstScale(inst);
218 }
219 
VisitReturn(GraphVisitor * v,Inst * inst)220 void Lowering::VisitReturn([[maybe_unused]] GraphVisitor *v, Inst *inst)
221 {
222     ASSERT(inst->GetOpcode() == Opcode::Return);
223     LowerReturnInst(inst->CastToReturn());
224 }
225 
VisitShr(GraphVisitor * v,Inst * inst)226 void Lowering::VisitShr([[maybe_unused]] GraphVisitor *v, Inst *inst)
227 {
228     LowerShift(inst);
229 }
230 
VisitAShr(GraphVisitor * v,Inst * inst)231 void Lowering::VisitAShr([[maybe_unused]] GraphVisitor *v, Inst *inst)
232 {
233     LowerShift(inst);
234 }
235 
VisitShl(GraphVisitor * v,Inst * inst)236 void Lowering::VisitShl([[maybe_unused]] GraphVisitor *v, Inst *inst)
237 {
238     LowerShift(inst);
239 }
240 
VisitIfImm(GraphVisitor * v,Inst * inst)241 void Lowering::VisitIfImm([[maybe_unused]] GraphVisitor *v, Inst *inst)
242 {
243     ASSERT(inst->GetOpcode() == Opcode::IfImm);
244     static_cast<Lowering *>(v)->LowerIf(inst->CastToIfImm());
245 }
246 
VisitMul(GraphVisitor * v,Inst * inst)247 void Lowering::VisitMul([[maybe_unused]] GraphVisitor *v, Inst *inst)
248 {
249     if (inst->GetInput(1).GetInst()->GetOpcode() != Opcode::Constant) {
250         LowerNegateMultiply(inst);
251     } else {
252         LowerMulDivMod<Opcode::Mul>(inst);
253     }
254 }
255 
VisitDiv(GraphVisitor * v,Inst * inst)256 void Lowering::VisitDiv([[maybe_unused]] GraphVisitor *v, Inst *inst)
257 {
258     if (TryReplaceDivPowerOfTwo(v, inst)) {
259         return;
260     }
261     if (TryReplaceDivModNonPowerOfTwo(v, inst)) {
262         return;
263     }
264     LowerMulDivMod<Opcode::Div>(inst);
265 }
266 
ReplaceSignedDivPowerOfTwo(GraphVisitor * v,Inst * inst,int64_t sValue)267 void Lowering::ReplaceSignedDivPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, int64_t sValue)
268 {
269     // 0.signed Parameter
270     // 1.i64 Const 2^n -> {v2}
271     // 2.signed DIV v0, v1 -> {v3}
272     // 3.signed INST v2
273     // ===>
274     // 0.signed Parameter
275     // 1.i64 Const n -> {v2}
276     // 2.signed ASHR v0, type_size - 1 -> {v4} // 0 or -1
277     // 4.signed SHR v2, type_size - n -> {v5} //  0 or 2^n - 1
278     // 5.signed ADD v4, v0 -> {v6}
279     // 6.signed ASHR v5, n -> {v3 or v7}
280     // if n < 0 7.signed NEG v6 ->{v3}
281     // 3.signed INST v6 or v7
282 
283     auto graph = inst->GetBasicBlock()->GetGraph();
284     auto input0 = inst->GetInput(0).GetInst();
285     int64_t n = GetPowerOfTwo(bit_cast<uint64_t>(helpers::math::AbsOrMin(sValue)));
286     ASSERT(n != -1);
287 
288     auto typeSize = DataType::GetTypeSize(inst->GetType(), graph->GetArch());
289     auto ashr =
290         graph->CreateInstAShr(inst->GetType(), inst->GetPc(), input0, graph->FindOrCreateConstant(typeSize - 1));
291     inst->InsertBefore(ashr);
292     auto shr = graph->CreateInstShr(inst->GetType(), inst->GetPc(), ashr, graph->FindOrCreateConstant(typeSize - n));
293     inst->InsertBefore(shr);
294     auto add = graph->CreateInstAdd(inst->GetType(), inst->GetPc(), shr, input0);
295     inst->InsertBefore(add);
296     Inst *ashr2 = graph->CreateInstAShr(inst->GetType(), inst->GetPc(), add, graph->FindOrCreateConstant(n));
297 
298     auto result = ashr2;
299     if (sValue < 0) {
300         inst->InsertBefore(ashr2);
301         result = graph->CreateInstNeg(inst->GetType(), inst->GetPc(), ashr2);
302     }
303 
304     InsertInstruction(inst, result);
305 
306     LowerShift(ashr);
307     LowerShift(shr);
308     LowerShift(ashr2);
309 }
310 
ReplaceUnsignedDivPowerOfTwo(GraphVisitor * v,Inst * inst,uint64_t uValue)311 void Lowering::ReplaceUnsignedDivPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, uint64_t uValue)
312 {
313     // 0.unsigned Parameter
314     // 1.i64 Const 2^n -> {v2}
315     // 2.un-signed DIV v0, v1 -> {v3}
316     // 3.unsigned INST v2
317     // ===>
318     // 0.unsigned Parameter
319     // 1.i64 Const n -> {v2}
320     // 2.un-signed SHR v0, v1 -> {v3}
321     // 3.unsigned INST v2
322 
323     auto graph = inst->GetBasicBlock()->GetGraph();
324     auto input0 = inst->GetInput(0).GetInst();
325 
326     int64_t n = GetPowerOfTwo(uValue);
327     ASSERT(n != -1);
328     auto power = graph->FindOrCreateConstant(n);
329     auto shrInst = graph->CreateInstShr(inst->GetType(), inst->GetPc(), input0, power);
330     InsertInstruction(inst, shrInst);
331 
332     LowerShift(shrInst);
333 }
334 
TryReplaceDivPowerOfTwo(GraphVisitor * v,Inst * inst)335 bool Lowering::TryReplaceDivPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst)
336 {
337     if (inst->GetBasicBlock()->GetGraph()->IsBytecodeOptimizer()) {
338         return false;
339     }
340     if (DataType::IsFloatType(inst->GetType())) {
341         return false;
342     }
343     auto c = inst->GetInput(1).GetInst();
344     if (!c->IsConst()) {
345         return false;
346     }
347 
348     uint64_t uValue = c->CastToConstant()->GetInt64Value();
349     auto sValue = bit_cast<int64_t>(uValue);
350 
351     auto input0 = inst->GetInput(0).GetInst();
352     bool isSigned = DataType::IsTypeSigned(input0->GetType());
353     if ((isSigned && !helpers::math::IsPowerOfTwo(sValue)) || (!isSigned && !helpers::math::IsPowerOfTwo(uValue))) {
354         return false;
355     }
356 
357     if (isSigned) {
358         ReplaceSignedDivPowerOfTwo(v, inst, sValue);
359     } else {
360         ReplaceUnsignedDivPowerOfTwo(v, inst, uValue);
361     }
362     return true;
363 }
364 
TryReplaceDivModNonPowerOfTwo(GraphVisitor * v,Inst * inst)365 bool Lowering::TryReplaceDivModNonPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst)
366 {
367     auto graph = inst->GetBasicBlock()->GetGraph();
368     if (graph->IsBytecodeOptimizer()) {
369         return false;
370     }
371     if (DataType::IsFloatType(inst->GetType())) {
372         return false;
373     }
374     auto c = inst->GetInput(1).GetInst();
375     if (!c->IsConst()) {
376         return false;
377     }
378 
379     uint64_t uValue = c->CastToConstant()->GetInt64Value();
380 
381     auto input0 = inst->GetInput(0).GetInst();
382     bool isSigned = DataType::IsTypeSigned(input0->GetType());
383     if (!graph->GetEncoder()->CanOptimizeImmDivMod(uValue, isSigned)) {
384         return false;
385     }
386 
387     if (inst->GetOpcode() == Opcode::Div) {
388         auto divImmInst = graph->CreateInstDivI(inst->GetType(), inst->GetPc(), input0, uValue);
389         InsertInstruction(inst, divImmInst);
390     } else {
391         ASSERT(inst->GetOpcode() == Opcode::Mod);
392         auto modImmInst = graph->CreateInstModI(inst->GetType(), inst->GetPc(), input0, uValue);
393         InsertInstruction(inst, modImmInst);
394     }
395     return true;
396 }
397 
TryReplaceModPowerOfTwo(GraphVisitor * v,Inst * inst)398 bool Lowering::TryReplaceModPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst)
399 {
400     if (inst->GetBasicBlock()->GetGraph()->IsBytecodeOptimizer()) {
401         return false;
402     }
403     if (DataType::IsFloatType(inst->GetType())) {
404         return false;
405     }
406     auto c = inst->GetInput(1).GetInst();
407     if (!c->IsConst()) {
408         return false;
409     }
410 
411     uint64_t uValue = c->CastToConstant()->GetInt64Value();
412     auto sValue = bit_cast<int64_t>(uValue);
413 
414     auto input0 = inst->GetInput(0).GetInst();
415     bool isSigned = DataType::IsTypeSigned(input0->GetType());
416     if ((isSigned && !helpers::math::IsPowerOfTwo(sValue)) || (!isSigned && !helpers::math::IsPowerOfTwo(uValue))) {
417         return false;
418     }
419 
420     if (isSigned) {
421         int64_t absValue = helpers::math::AbsOrMin(sValue);
422         ReplaceSignedModPowerOfTwo(v, inst, bit_cast<uint64_t>(absValue));
423     } else {
424         ReplaceUnsignedModPowerOfTwo(v, inst, uValue);
425     }
426     return true;
427 }
428 
ReplaceSignedModPowerOfTwo(GraphVisitor * v,Inst * inst,uint64_t absValue)429 void Lowering::ReplaceSignedModPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, uint64_t absValue)
430 {
431     // It is optimal for AARCH64, not for AMD64. But even for AMD64 significantly better than original Mod.
432     // 1. ...
433     // 2. Const 0x4
434     // 3. Mod v1, v2
435     // ====>
436     // 1. ...
437     // 4. Const 0x3
438     // 7. Const 0xFFFFFFFFFFFFFFFC
439     // 5. Add v1, v4
440     // 6. SelectImm v5, v1, v1, 0, CC_LT
441     // 8. And v6, v7
442     // 9. Sub v1, v8
443     auto graph = inst->GetBasicBlock()->GetGraph();
444     auto input0 = inst->GetInput(0).GetInst();
445     auto valueMinus1 = absValue - 1;
446     uint32_t size = (inst->GetType() == DataType::UINT64 || inst->GetType() == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
447     Inst *addInst;
448     if (graph->GetEncoder()->CanEncodeImmAddSubCmp(valueMinus1, size, false)) {
449         addInst = graph->CreateInstAddI(inst, input0, valueMinus1);
450     } else {
451         auto valueMinus1Cnst = graph->FindOrCreateConstant(valueMinus1);
452         addInst = graph->CreateInstAdd(inst, input0, valueMinus1Cnst);
453     }
454     Inst *selectInst;
455     if (graph->GetEncoder()->CanEncodeImmAddSubCmp(0, size, true)) {
456         selectInst = graph->CreateInstSelectImm(inst, std::array<Inst *, 3U> {addInst, input0, input0}, 0,
457                                                 inst->GetType(), ConditionCode::CC_LT);
458     } else {
459         auto zeroCnst = graph->FindOrCreateConstant(0);
460         selectInst = graph->CreateInstSelect(inst, std::array<Inst *, 4U> {addInst, input0, input0, zeroCnst},
461                                              inst->GetType(), ConditionCode::CC_LT);
462     }
463     auto maskValue = ~static_cast<uint64_t>(valueMinus1);
464     Inst *andInst;
465     if (graph->GetEncoder()->CanEncodeImmLogical(maskValue, size)) {
466         andInst = graph->CreateInstAndI(inst, selectInst, maskValue);
467     } else {
468         auto mask = graph->FindOrCreateConstant(maskValue);
469         andInst = graph->CreateInstAnd(inst, selectInst, mask);
470     }
471     auto subInst = graph->CreateInstSub(inst, input0, andInst);
472 
473     inst->InsertBefore(addInst);
474     inst->InsertBefore(selectInst);
475     inst->InsertBefore(andInst);
476     InsertInstruction(inst, subInst);
477 }
478 
ReplaceUnsignedModPowerOfTwo(GraphVisitor * v,Inst * inst,uint64_t absValue)479 void Lowering::ReplaceUnsignedModPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, uint64_t absValue)
480 {
481     auto graph = inst->GetBasicBlock()->GetGraph();
482     auto valueMinus1 = absValue - 1;
483     uint32_t size = (inst->GetType() == DataType::UINT64 || inst->GetType() == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
484     Inst *andInst;
485     if (graph->GetEncoder()->CanEncodeImmLogical(valueMinus1, size)) {
486         andInst = graph->CreateInstAndI(inst, inst->GetInput(0).GetInst(), valueMinus1);
487     } else {
488         auto valueMinus1Cnst = graph->FindOrCreateConstant(valueMinus1);
489         andInst = graph->CreateInstAnd(inst, inst->GetInput(0).GetInst(), valueMinus1Cnst);
490     }
491     InsertInstruction(inst, andInst);
492 }
493 
VisitMod(GraphVisitor * v,Inst * inst)494 void Lowering::VisitMod([[maybe_unused]] GraphVisitor *v, Inst *inst)
495 {
496     if (TryReplaceModPowerOfTwo(v, inst)) {
497         return;
498     }
499     if (TryReplaceDivModNonPowerOfTwo(v, inst)) {
500         return;
501     }
502     LowerMulDivMod<Opcode::Mod>(inst);
503 }
504 
VisitNeg(GraphVisitor * v,Inst * inst)505 void Lowering::VisitNeg([[maybe_unused]] GraphVisitor *v, Inst *inst)
506 {
507     auto newInst = LowerNegateMultiply(inst);
508     LowerUnaryOperationWithShiftedOperand<Opcode::Neg>(newInst == nullptr ? inst : newInst);
509 }
510 
VisitDeoptimizeIf(GraphVisitor * v,Inst * inst)511 void Lowering::VisitDeoptimizeIf([[maybe_unused]] GraphVisitor *v, Inst *inst)
512 {
513     LowerToDeoptimizeCompare(inst);
514 }
515 
VisitLoadFromConstantPool(GraphVisitor * v,Inst * inst)516 void Lowering::VisitLoadFromConstantPool([[maybe_unused]] GraphVisitor *v, Inst *inst)
517 {
518     auto graph = inst->GetBasicBlock()->GetGraph();
519     auto newInst = graph->CreateInstLoadArrayI(DataType::ANY, inst->GetPc(), inst->GetInput(0).GetInst(),
520                                                inst->CastToLoadFromConstantPool()->GetTypeId());
521 #ifdef PANDA_COMPILER_DEBUG_INFO
522     newInst->SetCurrentMethod(inst->GetCurrentMethod());
523 #endif
524     inst->ReplaceUsers(newInst);
525     inst->RemoveInputs();
526     inst->GetBasicBlock()->ReplaceInst(inst, newInst);
527     graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
528     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
529 }
530 
531 // Replacing Compare EQ with Xor
532 // 1.i64 Const 0
533 // 2.b   ...
534 // 3.b   Compare EQ b v2, v1
535 // ===>
536 // 1.i64 Const 1
537 // 2.b   ...
538 // 3.i32 Xor v1, v2
VisitCompare(GraphVisitor * v,Inst * inst)539 void Lowering::VisitCompare(GraphVisitor *v, Inst *inst)
540 {
541     auto input0 = inst->GetInput(0).GetInst();
542     auto input1 = inst->GetInput(1).GetInst();
543 
544     if (inst->CastToCompare()->GetCc() != ConditionCode::CC_EQ) {
545         return;
546     }
547 
548     // Compare EQ b 0x0, v2
549     if (input1->GetType() == DataType::BOOL && input0->IsConst() && input0->CastToConstant()->GetIntValue() == 0U) {
550         std::swap(input0, input1);
551     }
552 
553     // Compare EQ b v2, 0x0
554     bool isApplicable =
555         input0->GetType() == DataType::BOOL && input1->IsConst() && input1->CastToConstant()->GetIntValue() == 0U;
556     if (!isApplicable) {
557         return;
558     }
559     // Always there are more than one user of Compare, because previous pass is Cleanup
560     bool onlyIfimm = true;
561     for (auto &user : inst->GetUsers()) {
562         if (user.GetInst()->GetOpcode() != Opcode::IfImm) {
563             onlyIfimm = false;
564             break;
565         }
566     }
567     // Skip optimization, if all users is IfImm, optimization Compare+IfImm will be better
568     if (onlyIfimm) {
569         return;
570     }
571     auto graph = inst->GetBasicBlock()->GetGraph();
572     auto cnst = graph->FindOrCreateConstant(1);
573     auto xorInst = graph->CreateInstXor(DataType::BOOL, inst->GetPc(), input0, cnst);
574 #ifdef PANDA_COMPILER_DEBUG_INFO
575     xorInst->SetCurrentMethod(inst->GetCurrentMethod());
576 #endif
577     InsertInstruction(inst, xorInst);
578     static_cast<Lowering *>(v)->VisitXor(v, xorInst);
579 }
580 
581 template <size_t MAX_OPERANDS>
SetInputsAndInsertInstruction(OperandsCapture<MAX_OPERANDS> & operands,Inst * inst,Inst * newInst)582 void Lowering::SetInputsAndInsertInstruction(OperandsCapture<MAX_OPERANDS> &operands, Inst *inst, Inst *newInst)
583 {
584     for (size_t idx = 0; idx < MAX_OPERANDS; idx++) {
585         newInst->SetInput(idx, operands.Get(idx));
586     }
587     InsertInstruction(inst, newInst);
588 }
589 
LowerShift(Inst * inst)590 void Lowering::LowerShift(Inst *inst)
591 {
592     Opcode opc = inst->GetOpcode();
593     ASSERT(opc == Opcode::Shr || opc == Opcode::AShr || opc == Opcode::Shl);
594     auto pred = GetCheckInstAndGetConstInput(inst);
595     if (pred == nullptr) {
596         return;
597     }
598     ASSERT(pred->GetOpcode() == Opcode::Constant);
599     uint64_t val = (static_cast<const ConstantInst *>(pred))->GetIntValue();
600     DataType::Type type = inst->GetType();
601     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
602     if (val >= size) {
603         return;
604     }
605 
606     auto graph = inst->GetBasicBlock()->GetGraph();
607     if (!graph->GetEncoder()->CanEncodeShift(size)) {
608         return;
609     }
610 
611     Inst *newInst;
612     if (opc == Opcode::Shr) {
613         newInst = graph->CreateInstShrI(inst, inst->GetInput(0).GetInst(), val);
614     } else if (opc == Opcode::AShr) {
615         newInst = graph->CreateInstAShrI(inst, inst->GetInput(0).GetInst(), val);
616     } else {
617         newInst = graph->CreateInstShlI(inst, inst->GetInput(0).GetInst(), val);
618     }
619     InsertInstruction(inst, newInst);
620 }
621 
GetInstructionWithShiftedOperand(Opcode opcode)622 constexpr Opcode Lowering::GetInstructionWithShiftedOperand(Opcode opcode)
623 {
624     switch (opcode) {
625         case Opcode::Add:
626             return Opcode::AddSR;
627         case Opcode::Sub:
628             return Opcode::SubSR;
629         case Opcode::And:
630             return Opcode::AndSR;
631         case Opcode::Or:
632             return Opcode::OrSR;
633         case Opcode::Xor:
634             return Opcode::XorSR;
635         case Opcode::AndNot:
636             return Opcode::AndNotSR;
637         case Opcode::OrNot:
638             return Opcode::OrNotSR;
639         case Opcode::XorNot:
640             return Opcode::XorNotSR;
641         case Opcode::Neg:
642             return Opcode::NegSR;
643         default:
644             UNREACHABLE();
645     }
646 }
647 
GetInstructionWithInvertedOperand(Opcode opcode)648 constexpr Opcode Lowering::GetInstructionWithInvertedOperand(Opcode opcode)
649 {
650     switch (opcode) {
651         case Opcode::And:
652             return Opcode::AndNot;
653         case Opcode::Or:
654             return Opcode::OrNot;
655         case Opcode::Xor:
656             return Opcode::XorNot;
657         default:
658             return Opcode::INVALID;
659     }
660 }
661 
GetShiftTypeByOpcode(Opcode opcode)662 ShiftType Lowering::GetShiftTypeByOpcode(Opcode opcode)
663 {
664     switch (opcode) {
665         case Opcode::Shl:
666         case Opcode::ShlI:
667             return ShiftType::LSL;
668         case Opcode::Shr:
669         case Opcode::ShrI:
670             return ShiftType::LSR;
671         case Opcode::AShr:
672         case Opcode::AShrI:
673             return ShiftType::ASR;
674         default:
675             UNREACHABLE();
676     }
677 }
678 
GetCheckInstAndGetConstInput(Inst * inst)679 Inst *Lowering::GetCheckInstAndGetConstInput(Inst *inst)
680 {
681     DataType::Type type = inst->GetType();
682     if (type != DataType::INT64 && type != DataType::UINT64 && type != DataType::INT32 && type != DataType::UINT32 &&
683         type != DataType::POINTER && type != DataType::BOOL) {
684         return nullptr;
685     }
686 
687     auto cnst = inst->GetInput(1).GetInst();
688     if (!cnst->IsConst()) {
689         if (!inst->IsCommutative() || !inst->GetInput(0).GetInst()->IsConst()) {
690             return nullptr;
691         }
692         ASSERT(!DataType::IsFloatType(inst->GetType()));
693         auto input = cnst;
694         cnst = inst->GetInput(0).GetInst();
695         inst->SetInput(0, input);
696         inst->SetInput(1, cnst);
697     }
698     ASSERT(cnst->GetOpcode() == Opcode::Constant);
699     return cnst;
700 }
701 
ConvertOpcode(Opcode newOpcode)702 ShiftOpcode Lowering::ConvertOpcode(Opcode newOpcode)
703 {
704     switch (newOpcode) {
705         case Opcode::NegSR:
706             return ShiftOpcode::NEG_SR;
707         case Opcode::AddSR:
708             return ShiftOpcode::ADD_SR;
709         case Opcode::SubSR:
710             return ShiftOpcode::SUB_SR;
711         case Opcode::AndSR:
712             return ShiftOpcode::AND_SR;
713         case Opcode::OrSR:
714             return ShiftOpcode::OR_SR;
715         case Opcode::XorSR:
716             return ShiftOpcode::XOR_SR;
717         case Opcode::AndNotSR:
718             return ShiftOpcode::AND_NOT_SR;
719         case Opcode::OrNotSR:
720             return ShiftOpcode::OR_NOT_SR;
721         case Opcode::XorNotSR:
722             return ShiftOpcode::XOR_NOT_SR;
723         default:
724             return ShiftOpcode::INVALID_SR;
725     }
726 }
727 
728 // Ask encoder whether Constant can be an immediate for Compare
ConstantFitsCompareImm(Inst * cst,uint32_t size,ConditionCode cc)729 bool Lowering::ConstantFitsCompareImm(Inst *cst, uint32_t size, ConditionCode cc)
730 {
731     ASSERT(cst->GetOpcode() == Opcode::Constant);
732     if (DataType::IsFloatType(cst->GetType())) {
733         return false;
734     }
735     auto *graph = cst->GetBasicBlock()->GetGraph();
736     auto *encoder = graph->GetEncoder();
737     int64_t val = cst->CastToConstant()->GetRawValue();
738     if (graph->IsBytecodeOptimizer()) {
739         return (size == HALF_SIZE) && (val == 0);
740     }
741     if (cc == ConditionCode::CC_TST_EQ || cc == ConditionCode::CC_TST_NE) {
742         return encoder->CanEncodeImmLogical(val, size);
743     }
744     return encoder->CanEncodeImmAddSubCmp(val, size, IsSignedConditionCode(cc));
745 }
746 
LowerAddSub(Inst * inst)747 Inst *Lowering::LowerAddSub(Inst *inst)
748 {
749     ASSERT(inst->GetOpcode() == Opcode::Add || inst->GetOpcode() == Opcode::Sub);
750     auto pred = GetCheckInstAndGetConstInput(inst);
751     if (pred == nullptr) {
752         return nullptr;
753     }
754 
755     ASSERT(pred->GetOpcode() == Opcode::Constant);
756 
757     auto graph = pred->GetBasicBlock()->GetGraph();
758     auto val = static_cast<int64_t>(pred->CastToConstant()->GetIntValue());
759     DataType::Type type = inst->GetType();
760     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
761     if (!graph->GetEncoder()->CanEncodeImmAddSubCmp(val, size, false)) {
762         return nullptr;
763     }
764 
765     bool isAdd = (inst->GetOpcode() == Opcode::Add);
766     if (val < 0 && graph->GetEncoder()->CanEncodeImmAddSubCmp(-val, size, false)) {
767         val = -val;
768         isAdd = !isAdd;
769     }
770 
771     Inst *newInst;
772     if (isAdd) {
773         newInst = graph->CreateInstAddI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
774     } else {
775         newInst = graph->CreateInstSubI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
776     }
777     InsertInstruction(inst, newInst);
778     return newInst;
779 }
780 
781 template <Opcode OPCODE>
LowerMulDivMod(Inst * inst)782 void Lowering::LowerMulDivMod(Inst *inst)
783 {
784     ASSERT(inst->GetOpcode() == OPCODE);
785     auto graph = inst->GetBasicBlock()->GetGraph();
786     if (graph->IsInstThrowable(inst)) {
787         return;
788     }
789 
790     auto pred = GetCheckInstAndGetConstInput(inst);
791     if (pred == nullptr) {
792         return;
793     }
794 
795     auto val = static_cast<int64_t>(pred->CastToConstant()->GetIntValue());
796     DataType::Type type = inst->GetType();
797     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
798     if (!graph->GetEncoder()->CanEncodeImmMulDivMod(val, size)) {
799         return;
800     }
801 
802     Inst *newInst;
803     // NOLINTNEXTLINE(readability-magic-numbers,readability-braces-around-statements, bugprone-branch-clone)
804     if constexpr (OPCODE == Opcode::Mul) {
805         newInst = graph->CreateInstMulI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
806         // NOLINTNEXTLINE(readability-misleading-indentation,readability-braces-around-statements)
807     } else if constexpr (OPCODE == Opcode::Div) {
808         newInst = graph->CreateInstDivI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
809         if (graph->IsBytecodeOptimizer()) {
810             inst->ClearFlag(compiler::inst_flags::NO_DCE);  // In Bytecode Optimizer Div may have NO_DCE flag
811             if (val == 0) {
812                 newInst->SetFlag(compiler::inst_flags::NO_DCE);
813             }
814         }
815         // NOLINTNEXTLINE(readability-misleading-indentation)
816     } else {
817         newInst = graph->CreateInstModI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
818         if (graph->IsBytecodeOptimizer()) {
819             inst->ClearFlag(compiler::inst_flags::NO_DCE);  // In Bytecode Optimizer Div may have NO_DCE flag
820             if (val == 0) {
821                 newInst->SetFlag(compiler::inst_flags::NO_DCE);
822             }
823         }
824     }
825     InsertInstruction(inst, newInst);
826 }
827 
LowerMultiplyAddSub(Inst * inst)828 Inst *Lowering::LowerMultiplyAddSub(Inst *inst)
829 {
830     // Don't use MAdd/MSub for floating point inputs to avoid different results for interpreted and
831     // compiled code due to better precision of target instructions implementing MAdd/MSub.
832     if (DataType::GetCommonType(inst->GetType()) != DataType::INT64) {
833         return nullptr;
834     }
835 
836     OperandsCapture<3U> operands {};
837     InstructionsCapture<2U> insts {};
838     InstructionsCapture<3U> instsSub3 {};
839     bool isSub = true;
840 
841     // clang-format off
842     using MAddMatcher = ADD<MUL<SRC0, SRC1, Flags::S>, SRC2>;
843     using MSubMatcher2Ops =
844         AnyOf<SUB<SRC2, MUL<SRC0, SRC1, Flags::S>>,
845               ADD<BinaryOp<Opcode::MNeg, SRC0, SRC1, Flags::S>, SRC2>>;
846     using MSubMatcher3Ops =
847         AnyOf<ADD<MUL<NEG<SRC0, Flags::S>, SRC1, Flags::S>, SRC2>,
848               ADD<NEG<MUL<SRC0, SRC1, Flags::S>, Flags::S>, SRC2>>;
849     // clang-format on
850 
851     if (MSubMatcher2Ops::Capture(inst, operands, insts)) {
852         // Operands may have different types (but the same common type), but instructions
853         // having different types can not be fused, because it will change semantics.
854         if (!insts.HaveSameType()) {
855             return nullptr;
856         }
857     } else if (MSubMatcher3Ops::Capture(inst, operands, instsSub3)) {
858         if (!instsSub3.HaveSameType()) {
859             return nullptr;
860         }
861     } else if (MAddMatcher::Capture(inst, operands, insts.ResetIndex())) {
862         isSub = false;
863         if (!insts.HaveSameType()) {
864             return nullptr;
865         }
866     } else {
867         return nullptr;
868     }
869 
870     auto graph = inst->GetBasicBlock()->GetGraph();
871     auto encoder = graph->GetEncoder();
872     if ((isSub && !encoder->CanEncodeMSub()) || (!isSub && !encoder->CanEncodeMAdd())) {
873         return nullptr;
874     }
875 
876     Inst *newInst = isSub ? graph->CreateInstMSub(inst) : graph->CreateInstMAdd(inst);
877     SetInputsAndInsertInstruction(operands, inst, newInst);
878     return newInst;
879 }
880 
LowerNegateMultiply(Inst * inst)881 Inst *Lowering::LowerNegateMultiply(Inst *inst)
882 {
883     OperandsCapture<2U> operands {};
884     InstructionsCapture<2U> insts {};
885     using MNegMatcher = AnyOf<NEG<MUL<SRC0, SRC1, Flags::S>>, MUL<NEG<SRC0, Flags::S>, SRC1>>;
886     if (!MNegMatcher::Capture(inst, operands, insts) || !operands.HaveCommonType() || !insts.HaveSameType()) {
887         return nullptr;
888     }
889 
890     auto graph = inst->GetBasicBlock()->GetGraph();
891     if (!graph->GetEncoder()->CanEncodeMNeg()) {
892         return nullptr;
893     }
894 
895     Inst *newInst = graph->CreateInstMNeg(inst);
896     SetInputsAndInsertInstruction(operands, inst, newInst);
897     return newInst;
898 }
899 
LowerCastValueToAnyTypeWithConst(Inst * inst)900 bool Lowering::LowerCastValueToAnyTypeWithConst(Inst *inst)
901 {
902     auto graph = inst->GetBasicBlock()->GetGraph();
903     auto anyType = inst->CastToCastValueToAnyType()->GetAnyType();
904     auto baseType = AnyBaseTypeToDataType(anyType);
905     if (!IsTypeNumeric(baseType) || baseType == DataType::POINTER) {
906         return false;
907     }
908     auto inputInst = inst->GetInput(0).GetInst();
909     if (!inputInst->IsConst()) {
910         return false;
911     }
912     auto imm = inputInst->CastToConstant()->GetRawValue();
913     auto packImm = graph->GetRuntime()->GetPackConstantByPrimitiveType(anyType, imm);
914     auto anyConst = inst->GetBasicBlock()->GetGraph()->FindOrCreateConstant(DataType::Any(packImm));
915     inst->ReplaceUsers(anyConst);
916     return true;
917 }
918 
LowerLogicWithInvertedOperand(Inst * inst)919 void Lowering::LowerLogicWithInvertedOperand(Inst *inst)
920 {
921     OperandsCapture<2U> operands {};
922     InstructionsCapture<2U> insts {};
923     using Matcher = AnyOf<BinaryOp<Opcode::Or, SRC0, UnaryOp<Opcode::Not, SRC1, Flags::S>, Flags::C>,
924                           BinaryOp<Opcode::And, SRC0, UnaryOp<Opcode::Not, SRC1, Flags::S>, Flags::C>,
925                           BinaryOp<Opcode::Xor, SRC0, UnaryOp<Opcode::Not, SRC1, Flags::S>, Flags::C>>;
926     if (!Matcher::Capture(inst, operands, insts) || !insts.HaveSameType()) {
927         return;
928     }
929 
930     auto graph = inst->GetBasicBlock()->GetGraph();
931     auto encoder = graph->GetEncoder();
932     auto opcode = inst->GetOpcode();
933     Inst *newInst;
934     if (opcode == Opcode::Or) {
935         if (!encoder->CanEncodeOrNot()) {
936             return;
937         }
938         newInst = graph->CreateInstOrNot(inst);
939     } else if (opcode == Opcode::And) {
940         if (!encoder->CanEncodeAndNot()) {
941             return;
942         }
943         newInst = graph->CreateInstAndNot(inst);
944     } else {
945         if (!encoder->CanEncodeXorNot()) {
946             return;
947         }
948         newInst = graph->CreateInstXorNot(inst);
949     }
950 
951     SetInputsAndInsertInstruction(operands, inst, newInst);
952 }
953 
954 template <typename T, size_t MAX_OPERANDS>
LowerOperationWithShiftedOperand(Inst * inst,OperandsCapture<MAX_OPERANDS> & operands,Inst * shiftInst,Opcode newOpcode)955 Inst *Lowering::LowerOperationWithShiftedOperand(Inst *inst, OperandsCapture<MAX_OPERANDS> &operands, Inst *shiftInst,
956                                                  Opcode newOpcode)
957 {
958     auto graph = inst->GetBasicBlock()->GetGraph();
959     auto encoder = graph->GetEncoder();
960 
961     ShiftType shiftType = GetShiftTypeByOpcode(shiftInst->GetOpcode());
962     if (!encoder->CanEncodeShiftedOperand(ConvertOpcode(newOpcode), shiftType)) {
963         return nullptr;
964     }
965     uint64_t imm = static_cast<BinaryImmOperation *>(shiftInst)->GetImm();
966     auto newInst = static_cast<T *>(graph->CreateInst(newOpcode));
967     newInst->SetType(inst->GetType());
968     newInst->SetPc(inst->GetPc());
969     newInst->SetImm(imm);
970     newInst->SetShiftType(shiftType);
971 #ifdef PANDA_COMPILER_DEBUG_INFO
972     newInst->SetCurrentMethod(inst->GetCurrentMethod());
973 #endif
974     SetInputsAndInsertInstruction(operands, inst, newInst);
975     return newInst;
976 }
977 
978 template <Opcode OPCODE, bool IS_COMMUTATIVE>
LowerBinaryOperationWithShiftedOperand(Inst * inst)979 Inst *Lowering::LowerBinaryOperationWithShiftedOperand(Inst *inst)
980 {
981     OperandsCapture<2U> operands {};
982     InstructionsCapture<2U> insts {};
983     InstructionsCapture<3U> invInsts {};
984     constexpr auto FLAGS = IS_COMMUTATIVE ? Flags::COMMUTATIVE : Flags::NONE;
985 
986     // We're expecting that at this point all "shift by immediate" patterns were replaced with ShlI/ShrI/AShrI
987     // clang-format off
988     using Matcher = AnyOf<BinaryOp<OPCODE, SRC0, SHLI<SRC1>, FLAGS>,
989                           BinaryOp<OPCODE, SRC0, SHRI<SRC1>, FLAGS>,
990                           BinaryOp<OPCODE, SRC0, ASHRI<SRC1>, FLAGS>>;
991     // Instead of replacing instruction having inverted operand with single inverted-operand instruction
992     // and then applying the rules defined above we're applying explicitly defined rules for such patterns,
993     // because after inverted-operand instruction insertion there will be several users for shift operation.
994     // BinaryOp won't match the IR-tree with a pattern and either more complicated checks should be introduced there
995     // or DCE pass followed by additional Lowering pass should be performed.
996     // To keep things simple and avoid extra Lowering passes explicit rules were added.
997     using InvertedOperandMatcher = MatchIf<GetInstructionWithInvertedOperand(OPCODE) != Opcode::INVALID,
998                                          AnyOf<BinaryOp<OPCODE, SRC0, NOT<SHLI<SRC1>>>,
999                                          BinaryOp<OPCODE, SRC0, NOT<SHRI<SRC1>>>,
1000                                          BinaryOp<OPCODE, SRC0, NOT<ASHRI<SRC1>>>>>;
1001     // clang-format on
1002 
1003     if (GetCommonType(inst->GetType()) != DataType::INT64) {
1004         return nullptr;
1005     }
1006 
1007     Inst *shiftInst;
1008     Opcode newOpc;
1009 
1010     if (InvertedOperandMatcher::Capture(inst, operands, invInsts) && invInsts.HaveSameType()) {
1011         auto rightOperand =
1012             operands.Get(0) == inst->GetInput(0).GetInst() ? inst->GetInput(1).GetInst() : inst->GetInput(0).GetInst();
1013         shiftInst = rightOperand->GetInput(0).GetInst();
1014         newOpc = GetInstructionWithShiftedOperand(GetInstructionWithInvertedOperand(OPCODE));
1015     } else if (Matcher::Capture(inst, operands, insts) && insts.HaveSameType()) {
1016         shiftInst =
1017             operands.Get(0) == inst->GetInput(0).GetInst() ? inst->GetInput(1).GetInst() : inst->GetInput(0).GetInst();
1018         newOpc = GetInstructionWithShiftedOperand(OPCODE);
1019     } else {
1020         return nullptr;
1021     }
1022 
1023     return LowerOperationWithShiftedOperand<BinaryShiftedRegisterOperation>(inst, operands, shiftInst, newOpc);
1024 }
1025 
1026 template <Opcode OPCODE>
LowerUnaryOperationWithShiftedOperand(Inst * inst)1027 void Lowering::LowerUnaryOperationWithShiftedOperand(Inst *inst)
1028 {
1029     OperandsCapture<1> operands {};
1030     InstructionsCapture<2U> insts {};
1031     // We're expecting that at this point all "shift by immediate" patterns were replaced with ShlI/ShrI/AShrI
1032     // clang-format off
1033     using Matcher = AnyOf<UnaryOp<OPCODE, SHLI<SRC0>>,
1034                           UnaryOp<OPCODE, SHRI<SRC0>>,
1035                           UnaryOp<OPCODE, ASHRI<SRC0>>>;
1036     // clang-format on
1037     if (!Matcher::Capture(inst, operands, insts) || GetCommonType(inst->GetType()) != DataType::INT64 ||
1038         !insts.HaveSameType()) {
1039         return;
1040     }
1041     LowerOperationWithShiftedOperand<UnaryShiftedRegisterOperation>(inst, operands, inst->GetInput(0).GetInst(),
1042                                                                     GetInstructionWithShiftedOperand(OPCODE));
1043 }
1044 
LowerLogic(Inst * inst)1045 Inst *Lowering::LowerLogic(Inst *inst)
1046 {
1047     Opcode opc = inst->GetOpcode();
1048     ASSERT(opc == Opcode::Or || opc == Opcode::And || opc == Opcode::Xor);
1049     auto pred = GetCheckInstAndGetConstInput(inst);
1050     if (pred == nullptr) {
1051         return nullptr;
1052     }
1053     ASSERT(pred->GetOpcode() == Opcode::Constant);
1054     uint64_t val = pred->CastToConstant()->GetIntValue();
1055     DataType::Type type = inst->GetType();
1056     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
1057     auto graph = inst->GetBasicBlock()->GetGraph();
1058     if (!graph->GetEncoder()->CanEncodeImmLogical(val, size)) {
1059         return nullptr;
1060     }
1061     Inst *newInst;
1062     if (opc == Opcode::Or) {
1063         newInst = graph->CreateInstOrI(inst, inst->GetInput(0).GetInst(), val);
1064     } else if (opc == Opcode::And) {
1065         newInst = graph->CreateInstAndI(inst, inst->GetInput(0).GetInst(), val);
1066     } else {
1067         newInst = graph->CreateInstXorI(inst, inst->GetInput(0).GetInst(), val);
1068     }
1069     InsertInstruction(inst, newInst);
1070     return newInst;
1071 }
1072 
1073 // From
1074 //  2.u64 ShlI v1, 0x3 -> (v3)
1075 //  3.u64 Load v0, v2  -> (...)
1076 // To
1077 //  3.u64 Load v0, v2, Scale 0x3 -> (...)
LowerMemInstScale(Inst * inst)1078 void Lowering::LowerMemInstScale(Inst *inst)
1079 {
1080     auto opcode = inst->GetOpcode();
1081     ASSERT(opcode == Opcode::Load || opcode == Opcode::Store);
1082     auto inputInst = inst->GetInput(1).GetInst();
1083     if (inputInst->GetOpcode() != Opcode::ShlI) {
1084         return;
1085     }
1086     auto graph = inst->GetBasicBlock()->GetGraph();
1087     auto inputType = inputInst->GetType();
1088     if (Is64BitsArch(graph->GetArch())) {
1089         if (inputType != DataType::UINT64 && inputType != DataType::INT64) {
1090             return;
1091         }
1092     } else {
1093         if (inputType != DataType::UINT32 && inputType != DataType::INT32) {
1094             return;
1095         }
1096     }
1097     auto type = inst->GetType();
1098     uint64_t val = inputInst->CastToShlI()->GetImm();
1099     uint32_t size = DataType::GetTypeSize(type, graph->GetArch());
1100     if (!graph->GetEncoder()->CanEncodeScale(val, size)) {
1101         return;
1102     }
1103     if (opcode == Opcode::Load) {
1104         ASSERT(inst->CastToLoad()->GetScale() == 0);
1105         inst->CastToLoad()->SetScale(val);
1106     } else {
1107         ASSERT(inst->CastToStore()->GetScale() == 0);
1108         inst->CastToStore()->SetScale(val);
1109     }
1110     inst->SetInput(1, inputInst->GetInput(0).GetInst());
1111     graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
1112     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1113 }
1114 
1115 template <typename LowLevelType>
LowerConstArrayIndex(Inst * inst,Opcode lowLevelOpcode)1116 void Lowering::LowerConstArrayIndex(Inst *inst, Opcode lowLevelOpcode)
1117 {
1118     if (inst->GetBasicBlock()->GetGraph()->IsBytecodeOptimizer()) {
1119         return;
1120     }
1121     static constexpr size_t ARRAY_INDEX_INPUT = 1;
1122     auto inputInst = inst->GetInput(ARRAY_INDEX_INPUT).GetInst();
1123     ASSERT(inputInst->GetOpcode() != Opcode::BoundsCheckI);
1124     if (inputInst->IsConst()) {
1125         uint64_t value = inputInst->CastToConstant()->GetIntValue();
1126 
1127         auto graph = inst->GetBasicBlock()->GetGraph();
1128         auto newInst = graph->CreateInst(lowLevelOpcode);
1129         newInst->SetType(inst->GetType());
1130         newInst->SetPc(inst->GetPc());
1131 #ifdef PANDA_COMPILER_DEBUG_INFO
1132         newInst->SetCurrentMethod(inst->GetCurrentMethod());
1133 #endif
1134         static_cast<LowLevelType *>(newInst)->SetImm(value);
1135 
1136         // StoreInst and BoundsCheckInst have 3 inputs, LoadInst - has 2 inputs
1137         newInst->SetInput(0, inst->GetInput(0).GetInst());
1138         if (inst->GetInputsCount() == 3U) {
1139             newInst->SetInput(1, inst->GetInput(2U).GetInst());
1140         } else {
1141             ASSERT(inst->GetInputsCount() == 2U);
1142         }
1143         if (inst->GetOpcode() == Opcode::StoreArray) {
1144             newInst->CastToStoreArrayI()->SetNeedBarrier(inst->CastToStoreArray()->GetNeedBarrier());
1145         }
1146 
1147         if (inst->GetOpcode() == Opcode::LoadArray) {
1148             newInst->CastToLoadArrayI()->SetNeedBarrier(inst->CastToLoadArray()->GetNeedBarrier());
1149             newInst->CastToLoadArrayI()->SetIsArray(inst->CastToLoadArray()->IsArray());
1150         }
1151         if (inst->GetOpcode() == Opcode::BoundsCheck) {
1152             newInst->CastToBoundsCheckI()->SetIsArray(inst->CastToBoundsCheck()->IsArray());
1153             if (inst->CanDeoptimize()) {
1154                 newInst->SetFlag(inst_flags::CAN_DEOPTIMIZE);
1155             }
1156         }
1157 
1158         // Replace instruction immediately because it's not removable by DCE
1159         if (inst->GetOpcode() != Opcode::BoundsCheck) {
1160             inst->ReplaceUsers(newInst);
1161         } else {
1162             auto cnst = graph->FindOrCreateConstant(value);
1163             inst->ReplaceUsers(cnst);
1164         }
1165         inst->RemoveInputs();
1166         inst->GetBasicBlock()->ReplaceInst(inst, newInst);
1167         graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
1168         COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1169     }
1170 }
1171 
LowerStateInst(SaveStateInst * saveState)1172 void Lowering::LowerStateInst(SaveStateInst *saveState)
1173 {
1174     size_t idx = 0;
1175     size_t inputsCount = saveState->GetInputsCount();
1176     auto graph = saveState->GetBasicBlock()->GetGraph();
1177     if (graph->IsBytecodeOptimizer()) {
1178         return;
1179     }
1180     bool skipFloats = (graph->GetArch() == Arch::AARCH32);
1181     while (idx < inputsCount) {
1182         auto inputInst = saveState->GetInput(idx).GetInst();
1183         // In Aarch32 floats values stores in different format then integer
1184         if (inputInst->GetOpcode() == Opcode::NullPtr ||
1185             (inputInst->IsConst() && (!skipFloats || inputInst->GetType() == DataType::INT64))) {
1186             uint64_t rawValue =
1187                 inputInst->GetOpcode() == Opcode::NullPtr ? 0 : inputInst->CastToConstant()->GetRawValue();
1188             auto vreg = saveState->GetVirtualRegister(idx);
1189             auto type = inputInst->GetType();
1190             // There are no INT64 in dynamic
1191             if (type == DataType::INT64 && graph->IsDynamicMethod()) {
1192                 type = DataType::INT32;
1193             }
1194             saveState->AppendImmediate(rawValue, vreg.Value(), type, vreg.GetVRegType());
1195             saveState->RemoveInput(idx);
1196             inputsCount--;
1197             graph->GetEventWriter().EventLowering(GetOpcodeString(saveState->GetOpcode()), saveState->GetId(),
1198                                                   saveState->GetPc());
1199             COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(saveState->GetOpcode());
1200         } else {
1201             idx++;
1202         }
1203     }
1204 }
1205 
LowerReturnInst(FixedInputsInst1 * ret)1206 void Lowering::LowerReturnInst(FixedInputsInst1 *ret)
1207 {
1208     auto graph = ret->GetBasicBlock()->GetGraph();
1209     if (graph->IsBytecodeOptimizer()) {
1210         return;
1211     }
1212     ASSERT(ret->GetOpcode() == Opcode::Return);
1213     auto inputInst = ret->GetInput(0).GetInst();
1214     if (inputInst->IsConst()) {
1215         uint64_t rawValue = inputInst->CastToConstant()->GetRawValue();
1216         auto retImm = graph->CreateInstReturnI(ret->GetType(), ret->GetPc(), rawValue);
1217 #ifdef PANDA_COMPILER_DEBUG_INFO
1218         retImm->SetCurrentMethod(ret->GetCurrentMethod());
1219 #endif
1220 
1221         // Replace instruction immediately because it's not removable by DCE
1222         ret->RemoveInputs();
1223         ret->GetBasicBlock()->ReplaceInst(ret, retImm);
1224         graph->GetEventWriter().EventLowering(GetOpcodeString(ret->GetOpcode()), ret->GetId(), ret->GetPc());
1225         COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(ret->GetOpcode());
1226     }
1227 }
1228 
1229 // We'd like to swap only to make second operand immediate
BetterToSwapCompareInputs(Inst * cmp)1230 bool Lowering::BetterToSwapCompareInputs(Inst *cmp)
1231 {
1232     ASSERT(cmp->GetOpcode() == Opcode::Compare);
1233     auto in0 = cmp->GetInput(0).GetInst();
1234     auto in1 = cmp->GetInput(1).GetInst();
1235     if (DataType::IsFloatType(in0->GetType())) {
1236         return false;
1237     }
1238     if (in0->GetOpcode() == compiler::Opcode::NullPtr) {
1239         return true;
1240     }
1241 
1242     if (in0->IsConst()) {
1243         if (in1->IsConst()) {
1244             DataType::Type type = cmp->CastToCompare()->GetOperandsType();
1245             uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
1246             auto cc = cmp->CastToCompare()->GetCc();
1247             return ConstantFitsCompareImm(in0, size, cc) && !ConstantFitsCompareImm(in1, size, cc);
1248         }
1249         return true;
1250     }
1251     return false;
1252 }
1253 
1254 // Optimize order of input arguments for decreasing using accumulator (Bytecodeoptimizer only).
OptimizeIfInput(compiler::Inst * ifInst)1255 void Lowering::OptimizeIfInput(compiler::Inst *ifInst)
1256 {
1257     ASSERT(ifInst->GetOpcode() == compiler::Opcode::If);
1258     compiler::Inst *input0 = ifInst->GetInput(0).GetInst();
1259     compiler::Inst *input1 = ifInst->GetInput(1).GetInst();
1260 
1261     if (input0->IsDominate(input1)) {
1262         ifInst->SetInput(0, input1);
1263         ifInst->SetInput(1, input0);
1264         // And change CC
1265         auto cc = ifInst->CastToIf()->GetCc();
1266         cc = SwapOperandsConditionCode(cc);
1267         ifInst->CastToIf()->SetCc(cc);
1268     }
1269 }
1270 
JoinFcmpInst(IfImmInst * inst,CmpInst * input)1271 void Lowering::JoinFcmpInst(IfImmInst *inst, CmpInst *input)
1272 {
1273     auto cc = inst->GetCc();
1274     ASSERT(cc == ConditionCode::CC_EQ || cc == ConditionCode::CC_NE || IsSignedConditionCode(cc));
1275     if (input->IsFcmpg()) {
1276         /* Please look at the table of vector condition flags:
1277          * LT => Less than, or unordered
1278          * LE => Less than or equal, or unordered
1279          * GT => Greater than
1280          * GE => Greater than or equal
1281          *
1282          * LO => Less than
1283          * LS => Less than or equal
1284          * HI => Greater than, or unordered
1285          * HS => Greater than or equal, or unordered
1286          *
1287          * So we change condition to "unsigned" for Fcmpg (which should return "greater than" for unordered
1288          * comparisons).
1289          */
1290         cc = InverseSignednessConditionCode(cc);
1291     }
1292 
1293     // New instruction
1294     auto graph = input->GetBasicBlock()->GetGraph();
1295     auto replace = graph->CreateInstIf(DataType::NO_TYPE, inst->GetPc(), input->GetInput(0).GetInst(),
1296                                        input->GetInput(1).GetInst(), input->GetOperandsType(), cc, inst->GetMethod());
1297 #ifdef PANDA_COMPILER_DEBUG_INFO
1298     replace->SetCurrentMethod(inst->GetCurrentMethod());
1299 #endif
1300 
1301     // Replace IfImm instruction immediately because it's not removable by DCE
1302     inst->RemoveInputs();
1303     inst->GetBasicBlock()->ReplaceInst(inst, replace);
1304     graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
1305     if (graph->IsBytecodeOptimizer()) {
1306         OptimizeIfInput(replace);
1307     }
1308     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1309 }
1310 
LowerIf(IfImmInst * inst)1311 void Lowering::LowerIf(IfImmInst *inst)
1312 {
1313     ASSERT(inst->GetCc() == ConditionCode::CC_NE || inst->GetCc() == ConditionCode::CC_EQ);
1314     ASSERT(inst->GetImm() == 0);
1315     if (inst->GetOperandsType() != DataType::BOOL) {
1316         ASSERT(GetGraph()->IsDynamicMethod());
1317         return;
1318     }
1319     auto input = inst->GetInput(0).GetInst();
1320     if (input->GetOpcode() != Opcode::Compare && input->GetOpcode() != Opcode::And) {
1321         return;
1322     }
1323     // Check, that inst have only IfImm user
1324     for (auto &user : input->GetUsers()) {
1325         if (user.GetInst()->GetOpcode() != Opcode::IfImm) {
1326             return;
1327         }
1328     }
1329     // Try put constant in second input
1330     if (input->GetOpcode() == Opcode::Compare && BetterToSwapCompareInputs(input)) {
1331         // Swap inputs
1332         auto in0 = input->GetInput(0).GetInst();
1333         auto in1 = input->GetInput(1).GetInst();
1334         input->SetInput(0, in1);
1335         input->SetInput(1, in0);
1336         // And change CC
1337         auto cc = input->CastToCompare()->GetCc();
1338         cc = SwapOperandsConditionCode(cc);
1339         input->CastToCompare()->SetCc(cc);
1340     }
1341     if (!GetGraph()->IsBytecodeOptimizer()) {
1342         for (auto &newInput : input->GetInputs()) {
1343             auto realNewInput = input->GetDataFlowInput(newInput.GetInst());
1344             if (realNewInput->IsMovableObject()) {
1345                 ssb_.SearchAndCreateMissingObjInSaveState(GetGraph(), realNewInput, inst);
1346             }
1347         }
1348     }
1349     auto cst = input->GetInput(1).GetInst();
1350     DataType::Type type =
1351         (input->GetOpcode() == Opcode::Compare) ? input->CastToCompare()->GetOperandsType() : input->GetType();
1352     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
1353     auto cc = input->GetOpcode() == Opcode::Compare ? input->CastToCompare()->GetCc() : ConditionCode::CC_TST_NE;
1354     // IfImm can be inverted
1355     if (inst->GetCc() == ConditionCode::CC_EQ && inst->GetImm() == 0) {
1356         cc = GetInverseConditionCode(cc);
1357     }
1358     if (cst->GetOpcode() == compiler::Opcode::NullPtr || (cst->IsConst() && ConstantFitsCompareImm(cst, size, cc))) {
1359         // In-place change for IfImm
1360         InPlaceLowerIfImm(inst, input, cst, cc, type);
1361     } else {
1362         LowerIfImmToIf(inst, input, cc, type);
1363     }
1364 }
1365 
InPlaceLowerIfImm(IfImmInst * inst,Inst * input,Inst * cst,ConditionCode cc,DataType::Type inputType)1366 void Lowering::InPlaceLowerIfImm(IfImmInst *inst, Inst *input, Inst *cst, ConditionCode cc, DataType::Type inputType)
1367 {
1368     auto graph = inst->GetBasicBlock()->GetGraph();
1369     inst->SetOperandsType(inputType);
1370     auto newInput = input->GetInput(0).GetInst();
1371     // For compare(nullptr, 0) set `nullptr` as new input
1372     if (cst->GetOpcode() == Opcode::NullPtr && IsZeroConstant(newInput) &&
1373         DataType::IsReference(inst->GetOperandsType())) {
1374         newInput = cst;
1375     }
1376     inst->SetInput(0, newInput);
1377 
1378     uint64_t val = cst->GetOpcode() == Opcode::NullPtr ? 0 : cst->CastToConstant()->GetRawValue();
1379     inst->SetImm(val);
1380     inst->SetCc(cc);
1381     inst->GetBasicBlock()->GetGraph()->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(),
1382                                                                       inst->GetPc());
1383     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1384 
1385     if (inst->GetImm() == 0 && newInput->GetOpcode() == Opcode::Cmp &&
1386         DataType::IsFloatType(newInput->CastToCmp()->GetOperandsType()) && !graph->IsBytecodeOptimizer()) {
1387         // Check inst and input are the only users of new_input
1388         bool join {true};
1389         for (auto &user : newInput->GetUsers()) {
1390             if (auto userInst = user.GetInst(); userInst != inst && userInst != input) {
1391                 join = false;
1392                 break;
1393             }
1394         }
1395         if (join) {
1396             JoinFcmpInst(inst, newInput->CastToCmp());
1397         }
1398     }
1399 }
1400 
LowerIfImmToIf(IfImmInst * inst,Inst * input,ConditionCode cc,DataType::Type inputType)1401 void Lowering::LowerIfImmToIf(IfImmInst *inst, Inst *input, ConditionCode cc, DataType::Type inputType)
1402 {
1403     auto graph = inst->GetBasicBlock()->GetGraph();
1404     // New instruction
1405     auto replace = graph->CreateInstIf(DataType::NO_TYPE, inst->GetPc(), input->GetInput(0).GetInst(),
1406                                        input->GetInput(1).GetInst(), inputType, cc, inst->GetMethod());
1407 #ifdef PANDA_COMPILER_DEBUG_INFO
1408     replace->SetCurrentMethod(inst->GetCurrentMethod());
1409 #endif
1410     // Replace IfImm instruction immediately because it's not removable by DCE
1411     inst->RemoveInputs();
1412     inst->GetBasicBlock()->ReplaceInst(inst, replace);
1413     graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
1414     if (graph->IsBytecodeOptimizer()) {
1415         OptimizeIfInput(replace);
1416     }
1417     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1418 }
1419 
LowerToDeoptimizeCompare(Inst * inst)1420 void Lowering::LowerToDeoptimizeCompare(Inst *inst)
1421 {
1422     ASSERT(inst->GetOpcode() == Opcode::DeoptimizeIf);
1423     auto graph = inst->GetBasicBlock()->GetGraph();
1424     ASSERT(!graph->IsBytecodeOptimizer());
1425 
1426     auto deoptIf = inst->CastToDeoptimizeIf();
1427     if (deoptIf->GetInput(0).GetInst()->GetOpcode() != Opcode::Compare) {
1428         return;
1429     }
1430     auto compare = deoptIf->GetInput(0).GetInst()->CastToCompare();
1431     if (!compare->HasSingleUser()) {
1432         return;
1433     }
1434     COMPILER_LOG(DEBUG, LOWERING) << __func__ << "\n" << *compare << "\n" << *deoptIf;
1435     auto cmpInp1 = compare->GetInput(1).GetInst();
1436     DataType::Type type = compare->GetOperandsType();
1437     uint32_t size =
1438         (type == DataType::UINT64 || type == DataType::INT64 || type == DataType::ANY) ? WORD_SIZE : HALF_SIZE;
1439     Inst *deoptCmp = nullptr;
1440     if ((cmpInp1->IsConst() && ConstantFitsCompareImm(cmpInp1, size, compare->GetCc())) || cmpInp1->IsNullPtr()) {
1441         uint64_t imm = cmpInp1->IsNullPtr() ? 0 : cmpInp1->CastToConstant()->GetRawValue();
1442         deoptCmp = graph->CreateInstDeoptimizeCompareImm(deoptIf, compare, imm);
1443     } else {
1444         deoptCmp = graph->CreateInstDeoptimizeCompare(deoptIf, compare);
1445         deoptCmp->SetInput(1, compare->GetInput(1).GetInst());
1446     }
1447     deoptCmp->SetInput(0, compare->GetInput(0).GetInst());
1448     deoptCmp->SetSaveState(deoptIf->GetSaveState());
1449 #ifdef PANDA_COMPILER_DEBUG_INFO
1450     deoptCmp->SetCurrentMethod(inst->GetCurrentMethod());
1451 #endif
1452     deoptIf->ReplaceUsers(deoptCmp);
1453     deoptIf->GetBasicBlock()->InsertAfter(deoptCmp, deoptIf);
1454     deoptIf->ClearFlag(compiler::inst_flags::NO_DCE);
1455     graph->GetEventWriter().EventLowering(GetOpcodeString(deoptIf->GetOpcode()), deoptIf->GetId(), deoptIf->GetPc());
1456     COMPILER_LOG(DEBUG, LOWERING) << "===>\n" << *deoptCmp;
1457 }
1458 
InvalidateAnalyses()1459 void Lowering::InvalidateAnalyses()
1460 {
1461     GetGraph()->InvalidateAnalysis<BoundsAnalysis>();
1462     GetGraph()->InvalidateAnalysis<AliasAnalysis>();
1463 }
1464 
RunImpl()1465 bool Lowering::RunImpl()
1466 {
1467     VisitGraph();
1468     return true;
1469 }
1470 }  // namespace ark::compiler
1471