• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021-2025 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include <array>
17 #include "optimizer/ir/analysis.h"
18 #include "optimizer/ir/basicblock.h"
19 #include "optimizer/analysis/alias_analysis.h"
20 #include "optimizer/analysis/bounds_analysis.h"
21 #include "lowering.h"
22 #include "optimizer/code_generator/encode.h"
23 
24 namespace ark::compiler {
25 
VisitAdd(GraphVisitor * v,Inst * inst)26 void Lowering::VisitAdd([[maybe_unused]] GraphVisitor *v, Inst *inst)
27 {
28     auto newInst = LowerBinaryOperationWithShiftedOperand<Opcode::Add>(inst);
29     if (newInst == nullptr && LowerAddSub(inst) != nullptr) {
30         return;
31     }
32     LowerMultiplyAddSub(newInst == nullptr ? inst : newInst);
33 }
34 
VisitSub(GraphVisitor * v,Inst * inst)35 void Lowering::VisitSub([[maybe_unused]] GraphVisitor *v, Inst *inst)
36 {
37     auto newInst = LowerBinaryOperationWithShiftedOperand<Opcode::Sub, false>(inst);
38     if (newInst == nullptr && LowerAddSub(inst) != nullptr) {
39         return;
40     }
41     LowerMultiplyAddSub(inst);
42 }
43 
VisitCastValueToAnyType(GraphVisitor * v,Inst * inst)44 void Lowering::VisitCastValueToAnyType([[maybe_unused]] GraphVisitor *v, Inst *inst)
45 {
46     auto graph = inst->GetBasicBlock()->GetGraph();
47     if (graph->IsBytecodeOptimizer() || graph->IsOsrMode()) {
48         // Find way to enable it in OSR mode.
49         return;
50     }
51 
52     // from
53     // 1.u64 Const N -> (v2)
54     // 2.any CastValueToAnyType INT_TYPE v1 -> (...)
55     //
56     // to
57     // 1.any Const Pack(N) -> (...)
58     if (LowerCastValueToAnyTypeWithConst(inst)) {
59         graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
60         COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
61         return;
62     }
63     auto anyType = inst->CastToCastValueToAnyType()->GetAnyType();
64     auto baseType = AnyBaseTypeToDataType(anyType);
65     // We can't propogate opject, because GC can move it
66     if (baseType == DataType::REFERENCE) {
67         return;
68     }
69     // from
70     // 2.any CastValueToAnyType INT_TYPE v1 -> (v3)
71     // 3     SaveState                   v2(acc)
72     //
73     // to
74     // 3     SaveState                   v1(acc)
75     auto input = inst->GetInput(0).GetInst();
76     if (input->IsConst() && baseType == DataType::VOID) {
77         input = graph->FindOrCreateConstant(DataType::Any(input->CastToConstant()->GetIntValue()));
78     }
79     for (auto it = inst->GetUsers().begin(); it != inst->GetUsers().end();) {
80         auto userInst = it->GetInst();
81         if (userInst->IsSaveState()) {
82             userInst->SetInput(it->GetIndex(), input);
83             it = inst->GetUsers().begin();
84         } else {
85             ++it;
86         }
87     }
88 }
89 
VisitCast(GraphVisitor * v,Inst * inst)90 void Lowering::VisitCast([[maybe_unused]] GraphVisitor *v, Inst *inst)
91 {
92     // unsigned Load in AARCH64 zerod all high bits
93     // from
94     //  1.u8(u16, u32) Load ->(v2)
95     //  2.u64(u32) Cast u8(u16, u32) -> (v3 ..)
96     // to
97     //  1.u8(u16) Load ->(v3, ..)
98     auto graph = inst->GetBasicBlock()->GetGraph();
99     if (graph->GetArch() != Arch::AARCH64) {
100         return;
101     }
102     auto type = inst->GetType();
103     if (DataType::IsTypeSigned(type)) {
104         return;
105     }
106     auto inputType = inst->CastToCast()->GetOperandsType();
107     if (DataType::IsTypeSigned(inputType) || DataType::Is64Bits(inputType, graph->GetArch())) {
108         return;
109     }
110     auto inputInst = inst->GetInput(0).GetInst();
111     if (!inputInst->IsLoad() || inputInst->GetType() != inputType) {
112         return;
113     }
114     inst->ReplaceUsers(inputInst);
115     inputInst->GetBasicBlock()->GetGraph()->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()),
116                                                                            inst->GetId(), inst->GetPc());
117     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
118 }
119 
120 template <Opcode OPC>
VisitBitwiseBinaryOperation(GraphVisitor * v,Inst * inst)121 void Lowering::VisitBitwiseBinaryOperation([[maybe_unused]] GraphVisitor *v, Inst *inst)
122 {
123     auto newInst = LowerBinaryOperationWithShiftedOperand<OPC>(inst);  // NOLINT(readability-magic-numbers)
124     if (newInst == nullptr && LowerLogic(inst) != nullptr) {
125         return;
126     }
127     LowerLogicWithInvertedOperand(newInst == nullptr ? inst : newInst);
128 }
129 
VisitOr(GraphVisitor * v,Inst * inst)130 void Lowering::VisitOr(GraphVisitor *v, Inst *inst)
131 {
132     VisitBitwiseBinaryOperation<Opcode::Or>(v, inst);
133 }
134 
VisitAnd(GraphVisitor * v,Inst * inst)135 void Lowering::VisitAnd(GraphVisitor *v, Inst *inst)
136 {
137     VisitBitwiseBinaryOperation<Opcode::And>(v, inst);
138 }
139 
VisitXor(GraphVisitor * v,Inst * inst)140 void Lowering::VisitXor(GraphVisitor *v, Inst *inst)
141 {
142     VisitBitwiseBinaryOperation<Opcode::Xor>(v, inst);
143 }
144 
VisitAndNot(GraphVisitor * v,Inst * inst)145 void Lowering::VisitAndNot([[maybe_unused]] GraphVisitor *v, Inst *inst)
146 {
147     LowerBinaryOperationWithShiftedOperand<Opcode::AndNot, false>(inst);
148 }
149 
VisitXorNot(GraphVisitor * v,Inst * inst)150 void Lowering::VisitXorNot([[maybe_unused]] GraphVisitor *v, Inst *inst)
151 {
152     LowerBinaryOperationWithShiftedOperand<Opcode::XorNot, false>(inst);
153 }
154 
VisitOrNot(GraphVisitor * v,Inst * inst)155 void Lowering::VisitOrNot([[maybe_unused]] GraphVisitor *v, Inst *inst)
156 {
157     LowerBinaryOperationWithShiftedOperand<Opcode::OrNot, false>(inst);
158 }
159 
VisitSaveState(GraphVisitor * v,Inst * inst)160 void Lowering::VisitSaveState([[maybe_unused]] GraphVisitor *v, Inst *inst)
161 {
162     ASSERT(inst->GetOpcode() == Opcode::SaveState);
163     LowerStateInst(inst->CastToSaveState());
164 }
165 
VisitSafePoint(GraphVisitor * v,Inst * inst)166 void Lowering::VisitSafePoint([[maybe_unused]] GraphVisitor *v, Inst *inst)
167 {
168     ASSERT(inst->GetOpcode() == Opcode::SafePoint);
169     LowerStateInst(inst->CastToSafePoint());
170 }
171 
VisitSaveStateOsr(GraphVisitor * v,Inst * inst)172 void Lowering::VisitSaveStateOsr([[maybe_unused]] GraphVisitor *v, Inst *inst)
173 {
174     ASSERT(inst->GetOpcode() == Opcode::SaveStateOsr);
175     LowerStateInst(inst->CastToSaveStateOsr());
176 }
177 
VisitSaveStateDeoptimize(GraphVisitor * v,Inst * inst)178 void Lowering::VisitSaveStateDeoptimize([[maybe_unused]] GraphVisitor *v, Inst *inst)
179 {
180     ASSERT(inst->GetOpcode() == Opcode::SaveStateDeoptimize);
181     LowerStateInst(inst->CastToSaveStateDeoptimize());
182 }
183 
VisitBoundsCheck(GraphVisitor * v,Inst * inst)184 void Lowering::VisitBoundsCheck([[maybe_unused]] GraphVisitor *v, Inst *inst)
185 {
186     ASSERT(inst->GetOpcode() == Opcode::BoundsCheck);
187     LowerConstArrayIndex<BoundsCheckInstI>(inst, Opcode::BoundsCheckI);
188 }
189 
VisitLoadArray(GraphVisitor * v,Inst * inst)190 void Lowering::VisitLoadArray([[maybe_unused]] GraphVisitor *v, Inst *inst)
191 {
192     ASSERT(inst->GetOpcode() == Opcode::LoadArray);
193     LowerConstArrayIndex<LoadInstI>(inst, Opcode::LoadArrayI);
194 }
195 
VisitLoadCompressedStringChar(GraphVisitor * v,Inst * inst)196 void Lowering::VisitLoadCompressedStringChar([[maybe_unused]] GraphVisitor *v, Inst *inst)
197 {
198     ASSERT(inst->GetOpcode() == Opcode::LoadCompressedStringChar);
199     LowerConstArrayIndex<LoadCompressedStringCharInstI>(inst, Opcode::LoadCompressedStringCharI);
200 }
201 
VisitStoreArray(GraphVisitor * v,Inst * inst)202 void Lowering::VisitStoreArray([[maybe_unused]] GraphVisitor *v, Inst *inst)
203 {
204     ASSERT(inst->GetOpcode() == Opcode::StoreArray);
205     LowerConstArrayIndex<StoreInstI>(inst, Opcode::StoreArrayI);
206 }
207 
VisitLoad(GraphVisitor * v,Inst * inst)208 void Lowering::VisitLoad([[maybe_unused]] GraphVisitor *v, Inst *inst)
209 {
210     ASSERT(inst->GetOpcode() == Opcode::Load);
211     LowerMemInstScale(inst);
212 }
213 
VisitLoadNative(GraphVisitor * v,Inst * inst)214 void Lowering::VisitLoadNative([[maybe_unused]] GraphVisitor *v, Inst *inst)
215 {
216     ASSERT(inst->GetOpcode() == Opcode::LoadNative);
217     inst->SetOpcode(Opcode::Load);
218     VisitLoad(v, inst);
219 }
220 
VisitStore(GraphVisitor * v,Inst * inst)221 void Lowering::VisitStore([[maybe_unused]] GraphVisitor *v, Inst *inst)
222 {
223     ASSERT(inst->GetOpcode() == Opcode::Store);
224     LowerMemInstScale(inst);
225 }
226 
VisitStoreNative(GraphVisitor * v,Inst * inst)227 void Lowering::VisitStoreNative([[maybe_unused]] GraphVisitor *v, Inst *inst)
228 {
229     ASSERT(inst->GetOpcode() == Opcode::StoreNative);
230     inst->SetOpcode(Opcode::Store);
231     VisitStore(v, inst);
232 }
233 
VisitReturn(GraphVisitor * v,Inst * inst)234 void Lowering::VisitReturn([[maybe_unused]] GraphVisitor *v, Inst *inst)
235 {
236     ASSERT(inst->GetOpcode() == Opcode::Return);
237     LowerReturnInst(inst->CastToReturn());
238 }
239 
VisitShr(GraphVisitor * v,Inst * inst)240 void Lowering::VisitShr([[maybe_unused]] GraphVisitor *v, Inst *inst)
241 {
242     LowerShift(inst);
243 }
244 
VisitAShr(GraphVisitor * v,Inst * inst)245 void Lowering::VisitAShr([[maybe_unused]] GraphVisitor *v, Inst *inst)
246 {
247     LowerShift(inst);
248 }
249 
VisitShl(GraphVisitor * v,Inst * inst)250 void Lowering::VisitShl([[maybe_unused]] GraphVisitor *v, Inst *inst)
251 {
252     LowerShift(inst);
253 }
254 
VisitIfImm(GraphVisitor * v,Inst * inst)255 void Lowering::VisitIfImm([[maybe_unused]] GraphVisitor *v, Inst *inst)
256 {
257     ASSERT(inst->GetOpcode() == Opcode::IfImm);
258     static_cast<Lowering *>(v)->LowerIf(inst->CastToIfImm());
259 }
260 
VisitMul(GraphVisitor * v,Inst * inst)261 void Lowering::VisitMul([[maybe_unused]] GraphVisitor *v, Inst *inst)
262 {
263     if (inst->GetInput(1).GetInst()->GetOpcode() != Opcode::Constant) {
264         LowerNegateMultiply(inst);
265     } else {
266         LowerMulDivMod<Opcode::Mul>(inst);
267     }
268 }
269 
VisitDiv(GraphVisitor * v,Inst * inst)270 void Lowering::VisitDiv([[maybe_unused]] GraphVisitor *v, Inst *inst)
271 {
272     if (TryReplaceDivPowerOfTwo(v, inst)) {
273         return;
274     }
275     if (TryReplaceDivModNonPowerOfTwo(v, inst)) {
276         return;
277     }
278     LowerMulDivMod<Opcode::Div>(inst);
279 }
280 
ReplaceSignedDivPowerOfTwo(GraphVisitor * v,Inst * inst,int64_t sValue)281 void Lowering::ReplaceSignedDivPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, int64_t sValue)
282 {
283     // 0.signed Parameter
284     // 1.i64 Const 2^n -> {v2}
285     // 2.signed DIV v0, v1 -> {v3}
286     // 3.signed INST v2
287     // ===>
288     // 0.signed Parameter
289     // 1.i64 Const n -> {v2}
290     // 2.signed ASHR v0, type_size - 1 -> {v4} // 0 or -1
291     // 4.signed SHR v2, type_size - n -> {v5} //  0 or 2^n - 1
292     // 5.signed ADD v4, v0 -> {v6}
293     // 6.signed ASHR v5, n -> {v3 or v7}
294     // if n < 0 7.signed NEG v6 ->{v3}
295     // 3.signed INST v6 or v7
296 
297     auto graph = inst->GetBasicBlock()->GetGraph();
298     auto input0 = inst->GetInput(0).GetInst();
299     int64_t n = GetPowerOfTwo(bit_cast<uint64_t>(helpers::math::AbsOrMin(sValue)));
300     ASSERT(n != -1);
301 
302     auto typeSize = DataType::GetTypeSize(inst->GetType(), graph->GetArch());
303     auto ashr =
304         graph->CreateInstAShr(inst->GetType(), inst->GetPc(), input0, graph->FindOrCreateConstant(typeSize - 1));
305     inst->InsertBefore(ashr);
306     auto shr = graph->CreateInstShr(inst->GetType(), inst->GetPc(), ashr, graph->FindOrCreateConstant(typeSize - n));
307     inst->InsertBefore(shr);
308     auto add = graph->CreateInstAdd(inst->GetType(), inst->GetPc(), shr, input0);
309     inst->InsertBefore(add);
310     Inst *ashr2 = graph->CreateInstAShr(inst->GetType(), inst->GetPc(), add, graph->FindOrCreateConstant(n));
311 
312     auto result = ashr2;
313     if (sValue < 0) {
314         inst->InsertBefore(ashr2);
315         result = graph->CreateInstNeg(inst->GetType(), inst->GetPc(), ashr2);
316     }
317 
318     InsertInstruction(inst, result);
319 
320     LowerShift(ashr);
321     LowerShift(shr);
322     LowerShift(ashr2);
323 }
324 
ReplaceUnsignedDivPowerOfTwo(GraphVisitor * v,Inst * inst,uint64_t uValue)325 void Lowering::ReplaceUnsignedDivPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, uint64_t uValue)
326 {
327     // 0.unsigned Parameter
328     // 1.i64 Const 2^n -> {v2}
329     // 2.un-signed DIV v0, v1 -> {v3}
330     // 3.unsigned INST v2
331     // ===>
332     // 0.unsigned Parameter
333     // 1.i64 Const n -> {v2}
334     // 2.un-signed SHR v0, v1 -> {v3}
335     // 3.unsigned INST v2
336 
337     auto graph = inst->GetBasicBlock()->GetGraph();
338     auto input0 = inst->GetInput(0).GetInst();
339 
340     int64_t n = GetPowerOfTwo(uValue);
341     ASSERT(n != -1);
342     auto power = graph->FindOrCreateConstant(n);
343     auto shrInst = graph->CreateInstShr(inst->GetType(), inst->GetPc(), input0, power);
344     InsertInstruction(inst, shrInst);
345 
346     LowerShift(shrInst);
347 }
348 
SatisfyReplaceDivMovConditions(Inst * inst)349 bool Lowering::SatisfyReplaceDivMovConditions(Inst *inst)
350 {
351     if (inst->GetBasicBlock()->GetGraph()->IsBytecodeOptimizer()) {
352         return false;
353     }
354     if (DataType::IsFloatType(inst->GetType())) {
355         return false;
356     }
357     auto c = inst->GetInput(1).GetInst();
358     return c->IsConst();
359 }
360 
TryReplaceDivPowerOfTwo(GraphVisitor * v,Inst * inst)361 bool Lowering::TryReplaceDivPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst)
362 {
363     if (!SatisfyReplaceDivMovConditions(inst)) {
364         return false;
365     }
366 
367     uint64_t uValue = inst->GetInput(1).GetInst()->CastToConstant()->GetInt64Value();
368     auto sValue = bit_cast<int64_t>(uValue);
369 
370     auto input0 = inst->GetInput(0).GetInst();
371     bool isSigned = DataType::IsTypeSigned(input0->GetType());
372     if ((isSigned && !helpers::math::IsPowerOfTwo(sValue)) || (!isSigned && !helpers::math::IsPowerOfTwo(uValue))) {
373         return false;
374     }
375 
376     if (isSigned) {
377         ReplaceSignedDivPowerOfTwo(v, inst, sValue);
378     } else {
379         ReplaceUnsignedDivPowerOfTwo(v, inst, uValue);
380     }
381     return true;
382 }
383 
TryReplaceDivModNonPowerOfTwo(GraphVisitor * v,Inst * inst)384 bool Lowering::TryReplaceDivModNonPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst)
385 {
386     if (!SatisfyReplaceDivMovConditions(inst)) {
387         return false;
388     }
389 
390     auto *graph = inst->GetBasicBlock()->GetGraph();
391     uint64_t uValue = inst->GetInput(1).GetInst()->CastToConstant()->GetInt64Value();
392 
393     auto input0 = inst->GetInput(0).GetInst();
394     bool isSigned = DataType::IsTypeSigned(input0->GetType());
395     auto encoder = graph->GetEncoder();
396     ASSERT(encoder != nullptr);
397     if (!encoder->CanOptimizeImmDivMod(uValue, isSigned)) {
398         return false;
399     }
400 
401     if (inst->GetOpcode() == Opcode::Div) {
402         auto divImmInst = graph->CreateInstDivI(inst->GetType(), inst->GetPc(), input0, uValue);
403         InsertInstruction(inst, divImmInst);
404     } else {
405         ASSERT(inst->GetOpcode() == Opcode::Mod);
406         auto modImmInst = graph->CreateInstModI(inst->GetType(), inst->GetPc(), input0, uValue);
407         InsertInstruction(inst, modImmInst);
408     }
409     return true;
410 }
411 
TryReplaceModPowerOfTwo(GraphVisitor * v,Inst * inst)412 bool Lowering::TryReplaceModPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst)
413 {
414     if (!SatisfyReplaceDivMovConditions(inst)) {
415         return false;
416     }
417 
418     uint64_t uValue = inst->GetInput(1).GetInst()->CastToConstant()->GetInt64Value();
419     auto sValue = bit_cast<int64_t>(uValue);
420 
421     auto input0 = inst->GetInput(0).GetInst();
422     bool isSigned = DataType::IsTypeSigned(input0->GetType());
423     if ((isSigned && !helpers::math::IsPowerOfTwo(sValue)) || (!isSigned && !helpers::math::IsPowerOfTwo(uValue))) {
424         return false;
425     }
426 
427     if (isSigned) {
428         int64_t absValue = helpers::math::AbsOrMin(sValue);
429         ReplaceSignedModPowerOfTwo(v, inst, bit_cast<uint64_t>(absValue));
430     } else {
431         ReplaceUnsignedModPowerOfTwo(v, inst, uValue);
432     }
433     return true;
434 }
435 
ReplaceSignedModPowerOfTwo(GraphVisitor * v,Inst * inst,uint64_t absValue)436 void Lowering::ReplaceSignedModPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, uint64_t absValue)
437 {
438     // It is optimal for AARCH64, not for AMD64. But even for AMD64 significantly better than original Mod.
439     // 1. ...
440     // 2. Const 0x4
441     // 3. Mod v1, v2
442     // ====>
443     // 1. ...
444     // 4. Const 0x3
445     // 7. Const 0xFFFFFFFFFFFFFFFC
446     // 5. Add v1, v4
447     // 6. SelectImm v5, v1, v1, 0, CC_LT
448     // 8. And v6, v7
449     // 9. Sub v1, v8
450     auto graph = inst->GetBasicBlock()->GetGraph();
451     auto input0 = inst->GetInput(0).GetInst();
452     auto valueMinus1 = absValue - 1;
453     uint32_t size = (inst->GetType() == DataType::UINT64 || inst->GetType() == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
454     Inst *addInst;
455     if (graph->GetEncoder()->CanEncodeImmAddSubCmp(valueMinus1, size, false)) {
456         addInst = graph->CreateInstAddI(inst, input0, valueMinus1);
457     } else {
458         auto valueMinus1Cnst = graph->FindOrCreateConstant(valueMinus1);
459         addInst = graph->CreateInstAdd(inst, input0, valueMinus1Cnst);
460     }
461     Inst *selectInst;
462     auto encoder = graph->GetEncoder();
463     ASSERT(encoder != nullptr);
464     if (encoder->CanEncodeImmAddSubCmp(0, size, true)) {
465         selectInst = graph->CreateInstSelectImm(inst, std::array<Inst *, 3U> {addInst, input0, input0}, 0,
466                                                 inst->GetType(), ConditionCode::CC_LT);
467     } else {
468         auto zeroCnst = graph->FindOrCreateConstant(0);
469         selectInst = graph->CreateInstSelect(inst, std::array<Inst *, 4U> {addInst, input0, input0, zeroCnst},
470                                              inst->GetType(), ConditionCode::CC_LT);
471     }
472     auto maskValue = ~static_cast<uint64_t>(valueMinus1);
473     Inst *andInst;
474     ASSERT(encoder != nullptr);
475     if (encoder->CanEncodeImmLogical(maskValue, size)) {
476         andInst = graph->CreateInstAndI(inst, selectInst, maskValue);
477     } else {
478         auto mask = graph->FindOrCreateConstant(maskValue);
479         andInst = graph->CreateInstAnd(inst, selectInst, mask);
480     }
481     auto subInst = graph->CreateInstSub(inst, input0, andInst);
482 
483     inst->InsertBefore(addInst);
484     inst->InsertBefore(selectInst);
485     inst->InsertBefore(andInst);
486     InsertInstruction(inst, subInst);
487 }
488 
ReplaceUnsignedModPowerOfTwo(GraphVisitor * v,Inst * inst,uint64_t absValue)489 void Lowering::ReplaceUnsignedModPowerOfTwo([[maybe_unused]] GraphVisitor *v, Inst *inst, uint64_t absValue)
490 {
491     auto graph = inst->GetBasicBlock()->GetGraph();
492     auto valueMinus1 = absValue - 1;
493     uint32_t size = (inst->GetType() == DataType::UINT64 || inst->GetType() == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
494     Inst *andInst;
495     auto encoder = graph->GetEncoder();
496     ASSERT(encoder != nullptr);
497     if (encoder->CanEncodeImmLogical(valueMinus1, size)) {
498         andInst = graph->CreateInstAndI(inst, inst->GetInput(0).GetInst(), valueMinus1);
499     } else {
500         auto valueMinus1Cnst = graph->FindOrCreateConstant(valueMinus1);
501         andInst = graph->CreateInstAnd(inst, inst->GetInput(0).GetInst(), valueMinus1Cnst);
502     }
503     InsertInstruction(inst, andInst);
504 }
505 
VisitMod(GraphVisitor * v,Inst * inst)506 void Lowering::VisitMod([[maybe_unused]] GraphVisitor *v, Inst *inst)
507 {
508     if (TryReplaceModPowerOfTwo(v, inst)) {
509         return;
510     }
511     if (TryReplaceDivModNonPowerOfTwo(v, inst)) {
512         return;
513     }
514     LowerMulDivMod<Opcode::Mod>(inst);
515 }
516 
VisitNeg(GraphVisitor * v,Inst * inst)517 void Lowering::VisitNeg([[maybe_unused]] GraphVisitor *v, Inst *inst)
518 {
519     auto newInst = LowerNegateMultiply(inst);
520     LowerUnaryOperationWithShiftedOperand<Opcode::Neg>(newInst == nullptr ? inst : newInst);
521 }
522 
VisitDeoptimizeIf(GraphVisitor * v,Inst * inst)523 void Lowering::VisitDeoptimizeIf([[maybe_unused]] GraphVisitor *v, Inst *inst)
524 {
525     LowerToDeoptimizeCompare(inst);
526 }
527 
VisitLoadFromConstantPool(GraphVisitor * v,Inst * inst)528 void Lowering::VisitLoadFromConstantPool([[maybe_unused]] GraphVisitor *v, Inst *inst)
529 {
530     auto graph = inst->GetBasicBlock()->GetGraph();
531     auto newInst = graph->CreateInstLoadArrayI(DataType::ANY, inst->GetPc(), inst->GetInput(0).GetInst(),
532                                                inst->CastToLoadFromConstantPool()->GetTypeId());
533 #ifdef PANDA_COMPILER_DEBUG_INFO
534     newInst->SetCurrentMethod(inst->GetCurrentMethod());
535 #endif
536     inst->ReplaceUsers(newInst);
537     inst->RemoveInputs();
538     inst->GetBasicBlock()->ReplaceInst(inst, newInst);
539     graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
540     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
541 }
542 
543 // Replacing Compare EQ with Xor
544 // 1.i64 Const 0
545 // 2.b   ...
546 // 3.b   Compare EQ b v2, v1
547 // ===>
548 // 1.i64 Const 1
549 // 2.b   ...
550 // 3.i32 Xor v1, v2
VisitCompare(GraphVisitor * v,Inst * inst)551 void Lowering::VisitCompare(GraphVisitor *v, Inst *inst)
552 {
553     auto input0 = inst->GetInput(0).GetInst();
554     auto input1 = inst->GetInput(1).GetInst();
555 
556     if (inst->CastToCompare()->GetCc() != ConditionCode::CC_EQ) {
557         return;
558     }
559 
560     // Compare EQ b 0x0, v2
561     if (input1->GetType() == DataType::BOOL && input0->IsConst() && input0->CastToConstant()->GetIntValue() == 0U) {
562         std::swap(input0, input1);
563     }
564 
565     // Compare EQ b v2, 0x0
566     bool isApplicable =
567         input0->GetType() == DataType::BOOL && input1->IsConst() && input1->CastToConstant()->GetIntValue() == 0U;
568     if (!isApplicable) {
569         return;
570     }
571     // Always there are more than one user of Compare, because previous pass is Cleanup
572     bool onlyIfimm = true;
573     for (auto &user : inst->GetUsers()) {
574         if (user.GetInst()->GetOpcode() != Opcode::IfImm) {
575             onlyIfimm = false;
576             break;
577         }
578     }
579     // Skip optimization, if all users is IfImm, optimization Compare+IfImm will be better
580     if (onlyIfimm) {
581         return;
582     }
583     auto graph = inst->GetBasicBlock()->GetGraph();
584     auto cnst = graph->FindOrCreateConstant(1);
585     auto xorInst = graph->CreateInstXor(DataType::BOOL, inst->GetPc(), input0, cnst);
586 #ifdef PANDA_COMPILER_DEBUG_INFO
587     xorInst->SetCurrentMethod(inst->GetCurrentMethod());
588 #endif
589     InsertInstruction(inst, xorInst);
590     static_cast<Lowering *>(v)->VisitXor(v, xorInst);
591 }
592 
593 template <size_t MAX_OPERANDS>
SetInputsAndInsertInstruction(OperandsCapture<MAX_OPERANDS> & operands,Inst * inst,Inst * newInst)594 void Lowering::SetInputsAndInsertInstruction(OperandsCapture<MAX_OPERANDS> &operands, Inst *inst, Inst *newInst)
595 {
596     for (size_t idx = 0; idx < MAX_OPERANDS; idx++) {
597         newInst->SetInput(idx, operands.Get(idx));
598     }
599     InsertInstruction(inst, newInst);
600 }
601 
LowerShift(Inst * inst)602 void Lowering::LowerShift(Inst *inst)
603 {
604     Opcode opc = inst->GetOpcode();
605     ASSERT(opc == Opcode::Shr || opc == Opcode::AShr || opc == Opcode::Shl);
606     auto pred = GetCheckInstAndGetConstInput(inst);
607     if (pred == nullptr) {
608         return;
609     }
610     ASSERT(pred->GetOpcode() == Opcode::Constant);
611     uint64_t val = (static_cast<const ConstantInst *>(pred))->GetIntValue();
612     DataType::Type type = inst->GetType();
613     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
614     if (val >= size) {
615         return;
616     }
617 
618     auto graph = inst->GetBasicBlock()->GetGraph();
619     auto encoder = graph->GetEncoder();
620     ASSERT(encoder != nullptr);
621     if (!encoder->CanEncodeShift(size)) {
622         return;
623     }
624 
625     Inst *newInst;
626     if (opc == Opcode::Shr) {
627         newInst = graph->CreateInstShrI(inst, inst->GetInput(0).GetInst(), val);
628     } else if (opc == Opcode::AShr) {
629         newInst = graph->CreateInstAShrI(inst, inst->GetInput(0).GetInst(), val);
630     } else {
631         newInst = graph->CreateInstShlI(inst, inst->GetInput(0).GetInst(), val);
632     }
633     InsertInstruction(inst, newInst);
634 }
635 
GetInstructionWithShiftedOperand(Opcode opcode)636 constexpr Opcode Lowering::GetInstructionWithShiftedOperand(Opcode opcode)
637 {
638     switch (opcode) {
639         case Opcode::Add:
640             return Opcode::AddSR;
641         case Opcode::Sub:
642             return Opcode::SubSR;
643         case Opcode::And:
644             return Opcode::AndSR;
645         case Opcode::Or:
646             return Opcode::OrSR;
647         case Opcode::Xor:
648             return Opcode::XorSR;
649         case Opcode::AndNot:
650             return Opcode::AndNotSR;
651         case Opcode::OrNot:
652             return Opcode::OrNotSR;
653         case Opcode::XorNot:
654             return Opcode::XorNotSR;
655         case Opcode::Neg:
656             return Opcode::NegSR;
657         default:
658             UNREACHABLE();
659     }
660 }
661 
GetInstructionWithInvertedOperand(Opcode opcode)662 constexpr Opcode Lowering::GetInstructionWithInvertedOperand(Opcode opcode)
663 {
664     switch (opcode) {
665         case Opcode::And:
666             return Opcode::AndNot;
667         case Opcode::Or:
668             return Opcode::OrNot;
669         case Opcode::Xor:
670             return Opcode::XorNot;
671         default:
672             return Opcode::INVALID;
673     }
674 }
675 
GetShiftTypeByOpcode(Opcode opcode)676 ShiftType Lowering::GetShiftTypeByOpcode(Opcode opcode)
677 {
678     switch (opcode) {
679         case Opcode::Shl:
680         case Opcode::ShlI:
681             return ShiftType::LSL;
682         case Opcode::Shr:
683         case Opcode::ShrI:
684             return ShiftType::LSR;
685         case Opcode::AShr:
686         case Opcode::AShrI:
687             return ShiftType::ASR;
688         default:
689             UNREACHABLE();
690     }
691 }
692 
GetCheckInstAndGetConstInput(Inst * inst)693 Inst *Lowering::GetCheckInstAndGetConstInput(Inst *inst)
694 {
695     DataType::Type type = inst->GetType();
696     if (type != DataType::INT64 && type != DataType::UINT64 && type != DataType::INT32 && type != DataType::UINT32 &&
697         type != DataType::POINTER && type != DataType::BOOL) {
698         return nullptr;
699     }
700 
701     auto cnst = inst->GetInput(1).GetInst();
702     if (!cnst->IsConst()) {
703         if (!inst->IsCommutative() || !inst->GetInput(0).GetInst()->IsConst()) {
704             return nullptr;
705         }
706         ASSERT(!DataType::IsFloatType(inst->GetType()));
707         auto input = cnst;
708         cnst = inst->GetInput(0).GetInst();
709         inst->SetInput(0, input);
710         inst->SetInput(1, cnst);
711     }
712     ASSERT(cnst->GetOpcode() == Opcode::Constant);
713     return cnst;
714 }
715 
ConvertOpcode(Opcode newOpcode)716 ShiftOpcode Lowering::ConvertOpcode(Opcode newOpcode)
717 {
718     switch (newOpcode) {
719         case Opcode::NegSR:
720             return ShiftOpcode::NEG_SR;
721         case Opcode::AddSR:
722             return ShiftOpcode::ADD_SR;
723         case Opcode::SubSR:
724             return ShiftOpcode::SUB_SR;
725         case Opcode::AndSR:
726             return ShiftOpcode::AND_SR;
727         case Opcode::OrSR:
728             return ShiftOpcode::OR_SR;
729         case Opcode::XorSR:
730             return ShiftOpcode::XOR_SR;
731         case Opcode::AndNotSR:
732             return ShiftOpcode::AND_NOT_SR;
733         case Opcode::OrNotSR:
734             return ShiftOpcode::OR_NOT_SR;
735         case Opcode::XorNotSR:
736             return ShiftOpcode::XOR_NOT_SR;
737         default:
738             return ShiftOpcode::INVALID_SR;
739     }
740 }
741 
742 // Ask encoder whether Constant can be an immediate for Compare
ConstantFitsCompareImm(Inst * cst,uint32_t size,ConditionCode cc)743 bool Lowering::ConstantFitsCompareImm(Inst *cst, uint32_t size, ConditionCode cc)
744 {
745     ASSERT(cst->GetOpcode() == Opcode::Constant);
746     if (DataType::IsFloatType(cst->GetType())) {
747         return false;
748     }
749     auto *graph = cst->GetBasicBlock()->GetGraph();
750     auto *encoder = graph->GetEncoder();
751     int64_t val = cst->CastToConstant()->GetRawValue();
752     if (graph->IsBytecodeOptimizer()) {
753         return (size == HALF_SIZE) && (val == 0);
754     }
755     if (cc == ConditionCode::CC_TST_EQ || cc == ConditionCode::CC_TST_NE) {
756         ASSERT(encoder != nullptr);
757         return encoder->CanEncodeImmLogical(val, size);
758     }
759     return encoder->CanEncodeImmAddSubCmp(val, size, IsSignedConditionCode(cc));
760 }
761 
LowerAddSub(Inst * inst)762 Inst *Lowering::LowerAddSub(Inst *inst)
763 {
764     ASSERT(inst->GetOpcode() == Opcode::Add || inst->GetOpcode() == Opcode::Sub);
765     auto pred = GetCheckInstAndGetConstInput(inst);
766     if (pred == nullptr) {
767         return nullptr;
768     }
769 
770     ASSERT(pred->GetOpcode() == Opcode::Constant);
771 
772     auto graph = pred->GetBasicBlock()->GetGraph();
773     auto val = static_cast<int64_t>(pred->CastToConstant()->GetIntValue());
774     DataType::Type type = inst->GetType();
775     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
776     auto encoder = graph->GetEncoder();
777     ASSERT(encoder != nullptr);
778     if (!encoder->CanEncodeImmAddSubCmp(val, size, false)) {
779         return nullptr;
780     }
781 
782     bool isAdd = (inst->GetOpcode() == Opcode::Add);
783     if (val < 0 && graph->GetEncoder()->CanEncodeImmAddSubCmp(-val, size, false)) {
784         val = -val;
785         isAdd = !isAdd;
786     }
787 
788     Inst *newInst;
789     if (isAdd) {
790         newInst = graph->CreateInstAddI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
791     } else {
792         newInst = graph->CreateInstSubI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
793     }
794     InsertInstruction(inst, newInst);
795     return newInst;
796 }
797 
798 template <Opcode OPCODE>
LowerMulDivMod(Inst * inst)799 void Lowering::LowerMulDivMod(Inst *inst)
800 {
801     ASSERT(inst->GetOpcode() == OPCODE);
802     auto graph = inst->GetBasicBlock()->GetGraph();
803     if (graph->IsInstThrowable(inst)) {
804         return;
805     }
806 
807     auto pred = GetCheckInstAndGetConstInput(inst);
808     if (pred == nullptr) {
809         return;
810     }
811 
812     auto val = static_cast<int64_t>(pred->CastToConstant()->GetIntValue());
813     DataType::Type type = inst->GetType();
814     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
815     auto encoder = graph->GetEncoder();
816     ASSERT(encoder != nullptr);
817     if (!encoder->CanEncodeImmMulDivMod(val, size)) {
818         return;
819     }
820 
821     Inst *newInst;
822     // NOLINTNEXTLINE(readability-magic-numbers,readability-braces-around-statements, bugprone-branch-clone)
823     if constexpr (OPCODE == Opcode::Mul) {
824         newInst = graph->CreateInstMulI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
825         // NOLINTNEXTLINE(readability-misleading-indentation,readability-braces-around-statements)
826     } else if constexpr (OPCODE == Opcode::Div) {
827         newInst = graph->CreateInstDivI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
828         if (graph->IsBytecodeOptimizer()) {
829             inst->ClearFlag(compiler::inst_flags::NO_DCE);  // In Bytecode Optimizer Div may have NO_DCE flag
830             if (val == 0) {
831                 newInst->SetFlag(compiler::inst_flags::NO_DCE);
832             }
833         }
834         // NOLINTNEXTLINE(readability-misleading-indentation)
835     } else {
836         newInst = graph->CreateInstModI(inst, inst->GetInput(0).GetInst(), static_cast<uint64_t>(val));
837         if (graph->IsBytecodeOptimizer()) {
838             inst->ClearFlag(compiler::inst_flags::NO_DCE);  // In Bytecode Optimizer Div may have NO_DCE flag
839             if (val == 0) {
840                 newInst->SetFlag(compiler::inst_flags::NO_DCE);
841             }
842         }
843     }
844     InsertInstruction(inst, newInst);
845 }
846 
LowerMultiplyAddSub(Inst * inst)847 Inst *Lowering::LowerMultiplyAddSub(Inst *inst)
848 {
849     // Don't use MAdd/MSub for floating point inputs to avoid different results for interpreted and
850     // compiled code due to better precision of target instructions implementing MAdd/MSub.
851     if (DataType::GetCommonType(inst->GetType()) != DataType::INT64) {
852         return nullptr;
853     }
854 
855     OperandsCapture<3U> operands {};
856     InstructionsCapture<2U> insts {};
857     InstructionsCapture<3U> instsSub3 {};
858     bool isSub = true;
859 
860     // clang-format off
861     using MAddMatcher = ADD<MUL<SRC0, SRC1, Flags::S>, SRC2>;
862     using MSubMatcher2Ops =
863         AnyOf<SUB<SRC2, MUL<SRC0, SRC1, Flags::S>>,
864               ADD<BinaryOp<Opcode::MNeg, SRC0, SRC1, Flags::S>, SRC2>>;
865     using MSubMatcher3Ops =
866         AnyOf<ADD<MUL<NEG<SRC0, Flags::S>, SRC1, Flags::S>, SRC2>,
867               ADD<NEG<MUL<SRC0, SRC1, Flags::S>, Flags::S>, SRC2>>;
868     // clang-format on
869 
870     if (MSubMatcher2Ops::Capture(inst, operands, insts)) {
871         // Operands may have different types (but the same common type), but instructions
872         // having different types can not be fused, because it will change semantics.
873         if (!insts.HaveSameType()) {
874             return nullptr;
875         }
876     } else if (MSubMatcher3Ops::Capture(inst, operands, instsSub3)) {
877         if (!instsSub3.HaveSameType()) {
878             return nullptr;
879         }
880     } else if (MAddMatcher::Capture(inst, operands, insts.ResetIndex())) {
881         isSub = false;
882         if (!insts.HaveSameType()) {
883             return nullptr;
884         }
885     } else {
886         return nullptr;
887     }
888 
889     auto graph = inst->GetBasicBlock()->GetGraph();
890     auto encoder = graph->GetEncoder();
891     ASSERT(encoder != nullptr);
892     if ((isSub && !encoder->CanEncodeMSub()) || (!isSub && !encoder->CanEncodeMAdd())) {
893         return nullptr;
894     }
895 
896     Inst *newInst = isSub ? graph->CreateInstMSub(inst) : graph->CreateInstMAdd(inst);
897     SetInputsAndInsertInstruction(operands, inst, newInst);
898     return newInst;
899 }
900 
LowerNegateMultiply(Inst * inst)901 Inst *Lowering::LowerNegateMultiply(Inst *inst)
902 {
903     OperandsCapture<2U> operands {};
904     InstructionsCapture<2U> insts {};
905     using MNegMatcher = AnyOf<NEG<MUL<SRC0, SRC1, Flags::S>>, MUL<NEG<SRC0, Flags::S>, SRC1>>;
906     if (!MNegMatcher::Capture(inst, operands, insts) || !operands.HaveCommonType() || !insts.HaveSameType()) {
907         return nullptr;
908     }
909 
910     auto graph = inst->GetBasicBlock()->GetGraph();
911     auto encoder = graph->GetEncoder();
912     ASSERT(encoder != nullptr);
913     if (!encoder->CanEncodeMNeg()) {
914         return nullptr;
915     }
916 
917     Inst *newInst = graph->CreateInstMNeg(inst);
918     SetInputsAndInsertInstruction(operands, inst, newInst);
919     return newInst;
920 }
921 
LowerCastValueToAnyTypeWithConst(Inst * inst)922 bool Lowering::LowerCastValueToAnyTypeWithConst(Inst *inst)
923 {
924     auto graph = inst->GetBasicBlock()->GetGraph();
925     auto anyType = inst->CastToCastValueToAnyType()->GetAnyType();
926     auto baseType = AnyBaseTypeToDataType(anyType);
927     if (!IsTypeNumeric(baseType) || baseType == DataType::POINTER) {
928         return false;
929     }
930     auto inputInst = inst->GetInput(0).GetInst();
931     if (!inputInst->IsConst()) {
932         return false;
933     }
934     auto imm = inputInst->CastToConstant()->GetRawValue();
935     auto packImm = graph->GetRuntime()->GetPackConstantByPrimitiveType(anyType, imm);
936     auto anyConst = inst->GetBasicBlock()->GetGraph()->FindOrCreateConstant(DataType::Any(packImm));
937     inst->ReplaceUsers(anyConst);
938     return true;
939 }
940 
LowerLogicWithInvertedOperand(Inst * inst)941 void Lowering::LowerLogicWithInvertedOperand(Inst *inst)
942 {
943     OperandsCapture<2U> operands {};
944     InstructionsCapture<2U> insts {};
945     using Matcher = AnyOf<BinaryOp<Opcode::Or, SRC0, UnaryOp<Opcode::Not, SRC1, Flags::S>, Flags::C>,
946                           BinaryOp<Opcode::And, SRC0, UnaryOp<Opcode::Not, SRC1, Flags::S>, Flags::C>,
947                           BinaryOp<Opcode::Xor, SRC0, UnaryOp<Opcode::Not, SRC1, Flags::S>, Flags::C>>;
948     if (!Matcher::Capture(inst, operands, insts) || !insts.HaveSameType()) {
949         return;
950     }
951 
952     auto graph = inst->GetBasicBlock()->GetGraph();
953     auto encoder = graph->GetEncoder();
954     auto opcode = inst->GetOpcode();
955     Inst *newInst;
956     if (opcode == Opcode::Or) {
957         ASSERT(encoder != nullptr);
958         if (!encoder->CanEncodeOrNot()) {
959             return;
960         }
961         newInst = graph->CreateInstOrNot(inst);
962     } else if (opcode == Opcode::And) {
963         if (!encoder->CanEncodeAndNot()) {
964             return;
965         }
966         newInst = graph->CreateInstAndNot(inst);
967     } else {
968         if (!encoder->CanEncodeXorNot()) {
969             return;
970         }
971         newInst = graph->CreateInstXorNot(inst);
972     }
973 
974     SetInputsAndInsertInstruction(operands, inst, newInst);
975 }
976 
977 template <typename T, size_t MAX_OPERANDS>
LowerOperationWithShiftedOperand(Inst * inst,OperandsCapture<MAX_OPERANDS> & operands,Inst * shiftInst,Opcode newOpcode)978 Inst *Lowering::LowerOperationWithShiftedOperand(Inst *inst, OperandsCapture<MAX_OPERANDS> &operands, Inst *shiftInst,
979                                                  Opcode newOpcode)
980 {
981     auto graph = inst->GetBasicBlock()->GetGraph();
982     auto encoder = graph->GetEncoder();
983 
984     ShiftType shiftType = GetShiftTypeByOpcode(shiftInst->GetOpcode());
985     if (!encoder->CanEncodeShiftedOperand(ConvertOpcode(newOpcode), shiftType)) {
986         return nullptr;
987     }
988     uint64_t imm = static_cast<BinaryImmOperation *>(shiftInst)->GetImm();
989     auto newInst = static_cast<T *>(graph->CreateInst(newOpcode));
990     newInst->SetType(inst->GetType());
991     newInst->SetPc(inst->GetPc());
992     newInst->SetImm(imm);
993     newInst->SetShiftType(shiftType);
994 #ifdef PANDA_COMPILER_DEBUG_INFO
995     newInst->SetCurrentMethod(inst->GetCurrentMethod());
996 #endif
997     SetInputsAndInsertInstruction(operands, inst, newInst);
998     return newInst;
999 }
1000 
1001 template <Opcode OPCODE, bool IS_COMMUTATIVE>
LowerBinaryOperationWithShiftedOperand(Inst * inst)1002 Inst *Lowering::LowerBinaryOperationWithShiftedOperand(Inst *inst)
1003 {
1004     OperandsCapture<2U> operands {};
1005     InstructionsCapture<2U> insts {};
1006     InstructionsCapture<3U> invInsts {};
1007     constexpr auto FLAGS = IS_COMMUTATIVE ? Flags::COMMUTATIVE : Flags::NONE;
1008 
1009     // We're expecting that at this point all "shift by immediate" patterns were replaced with ShlI/ShrI/AShrI
1010     // clang-format off
1011     using Matcher = AnyOf<BinaryOp<OPCODE, SRC0, SHLI<SRC1>, FLAGS>,
1012                           BinaryOp<OPCODE, SRC0, SHRI<SRC1>, FLAGS>,
1013                           BinaryOp<OPCODE, SRC0, ASHRI<SRC1>, FLAGS>>;
1014     // Instead of replacing instruction having inverted operand with single inverted-operand instruction
1015     // and then applying the rules defined above we're applying explicitly defined rules for such patterns,
1016     // because after inverted-operand instruction insertion there will be several users for shift operation.
1017     // BinaryOp won't match the IR-tree with a pattern and either more complicated checks should be introduced there
1018     // or DCE pass followed by additional Lowering pass should be performed.
1019     // To keep things simple and avoid extra Lowering passes explicit rules were added.
1020     using InvertedOperandMatcher = MatchIf<GetInstructionWithInvertedOperand(OPCODE) != Opcode::INVALID,
1021                                          AnyOf<BinaryOp<OPCODE, SRC0, NOT<SHLI<SRC1>>>,
1022                                          BinaryOp<OPCODE, SRC0, NOT<SHRI<SRC1>>>,
1023                                          BinaryOp<OPCODE, SRC0, NOT<ASHRI<SRC1>>>>>;
1024     // clang-format on
1025 
1026     if (GetCommonType(inst->GetType()) != DataType::INT64) {
1027         return nullptr;
1028     }
1029 
1030     Inst *shiftInst;
1031     Opcode newOpc;
1032 
1033     if (InvertedOperandMatcher::Capture(inst, operands, invInsts) && invInsts.HaveSameType()) {
1034         auto rightOperand =
1035             operands.Get(0) == inst->GetInput(0).GetInst() ? inst->GetInput(1).GetInst() : inst->GetInput(0).GetInst();
1036         shiftInst = rightOperand->GetInput(0).GetInst();
1037         newOpc = GetInstructionWithShiftedOperand(GetInstructionWithInvertedOperand(OPCODE));
1038     } else if (Matcher::Capture(inst, operands, insts) && insts.HaveSameType()) {
1039         shiftInst =
1040             operands.Get(0) == inst->GetInput(0).GetInst() ? inst->GetInput(1).GetInst() : inst->GetInput(0).GetInst();
1041         newOpc = GetInstructionWithShiftedOperand(OPCODE);
1042     } else {
1043         return nullptr;
1044     }
1045 
1046     return LowerOperationWithShiftedOperand<BinaryShiftedRegisterOperation>(inst, operands, shiftInst, newOpc);
1047 }
1048 
1049 template <Opcode OPCODE>
LowerUnaryOperationWithShiftedOperand(Inst * inst)1050 void Lowering::LowerUnaryOperationWithShiftedOperand(Inst *inst)
1051 {
1052     OperandsCapture<1> operands {};
1053     InstructionsCapture<2U> insts {};
1054     // We're expecting that at this point all "shift by immediate" patterns were replaced with ShlI/ShrI/AShrI
1055     // clang-format off
1056     using Matcher = AnyOf<UnaryOp<OPCODE, SHLI<SRC0>>,
1057                           UnaryOp<OPCODE, SHRI<SRC0>>,
1058                           UnaryOp<OPCODE, ASHRI<SRC0>>>;
1059     // clang-format on
1060     if (!Matcher::Capture(inst, operands, insts) || GetCommonType(inst->GetType()) != DataType::INT64 ||
1061         !insts.HaveSameType()) {
1062         return;
1063     }
1064     LowerOperationWithShiftedOperand<UnaryShiftedRegisterOperation>(inst, operands, inst->GetInput(0).GetInst(),
1065                                                                     GetInstructionWithShiftedOperand(OPCODE));
1066 }
1067 
LowerLogic(Inst * inst)1068 Inst *Lowering::LowerLogic(Inst *inst)
1069 {
1070     Opcode opc = inst->GetOpcode();
1071     ASSERT(opc == Opcode::Or || opc == Opcode::And || opc == Opcode::Xor);
1072     auto pred = GetCheckInstAndGetConstInput(inst);
1073     if (pred == nullptr) {
1074         return nullptr;
1075     }
1076     ASSERT(pred->GetOpcode() == Opcode::Constant);
1077     uint64_t val = pred->CastToConstant()->GetIntValue();
1078     DataType::Type type = inst->GetType();
1079     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
1080     auto graph = inst->GetBasicBlock()->GetGraph();
1081     auto encoder = graph->GetEncoder();
1082     ASSERT(encoder != nullptr);
1083     if (!encoder->CanEncodeImmLogical(val, size)) {
1084         return nullptr;
1085     }
1086     Inst *newInst;
1087     if (opc == Opcode::Or) {
1088         newInst = graph->CreateInstOrI(inst, inst->GetInput(0).GetInst(), val);
1089     } else if (opc == Opcode::And) {
1090         newInst = graph->CreateInstAndI(inst, inst->GetInput(0).GetInst(), val);
1091     } else {
1092         newInst = graph->CreateInstXorI(inst, inst->GetInput(0).GetInst(), val);
1093     }
1094     InsertInstruction(inst, newInst);
1095     return newInst;
1096 }
1097 
1098 // From
1099 //  2.u64 ShlI v1, 0x3 -> (v3)
1100 //  3.u64 Load v0, v2  -> (...)
1101 // To
1102 //  3.u64 Load v0, v2, Scale 0x3 -> (...)
LowerMemInstScale(Inst * inst)1103 void Lowering::LowerMemInstScale(Inst *inst)
1104 {
1105     auto opcode = inst->GetOpcode();
1106     ASSERT(opcode == Opcode::Load || opcode == Opcode::Store);
1107     auto inputInst = inst->GetInput(1).GetInst();
1108     if (inputInst->GetOpcode() != Opcode::ShlI) {
1109         return;
1110     }
1111     auto graph = inst->GetBasicBlock()->GetGraph();
1112     auto inputType = inputInst->GetType();
1113     if (Is64BitsArch(graph->GetArch())) {
1114         if (inputType != DataType::UINT64 && inputType != DataType::INT64) {
1115             return;
1116         }
1117     } else {
1118         if (inputType != DataType::UINT32 && inputType != DataType::INT32) {
1119             return;
1120         }
1121     }
1122     auto type = inst->GetType();
1123     uint64_t val = inputInst->CastToShlI()->GetImm();
1124     uint32_t size = DataType::GetTypeSize(type, graph->GetArch());
1125     if (!graph->GetEncoder()->CanEncodeScale(val, size)) {
1126         return;
1127     }
1128     if (opcode == Opcode::Load) {
1129         ASSERT(inst->CastToLoad()->GetScale() == 0);
1130         inst->CastToLoad()->SetScale(val);
1131     } else {
1132         ASSERT(inst->CastToStore()->GetScale() == 0);
1133         inst->CastToStore()->SetScale(val);
1134     }
1135     inst->SetInput(1, inputInst->GetInput(0).GetInst());
1136     graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
1137     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1138 }
1139 
1140 template <typename LowLevelType>
LowerConstArrayIndex(Inst * inst,Opcode lowLevelOpcode)1141 void Lowering::LowerConstArrayIndex(Inst *inst, Opcode lowLevelOpcode)
1142 {
1143     if (inst->GetBasicBlock()->GetGraph()->IsBytecodeOptimizer()) {
1144         return;
1145     }
1146     static constexpr size_t ARRAY_INDEX_INPUT = 1;
1147     auto inputInst = inst->GetInput(ARRAY_INDEX_INPUT).GetInst();
1148     ASSERT(inputInst->GetOpcode() != Opcode::BoundsCheckI);
1149     if (inputInst->IsConst()) {
1150         uint64_t value = inputInst->CastToConstant()->GetIntValue();
1151 
1152         auto graph = inst->GetBasicBlock()->GetGraph();
1153         auto newInst = graph->CreateInst(lowLevelOpcode);
1154         newInst->SetType(inst->GetType());
1155         newInst->SetPc(inst->GetPc());
1156 #ifdef PANDA_COMPILER_DEBUG_INFO
1157         newInst->SetCurrentMethod(inst->GetCurrentMethod());
1158 #endif
1159         static_cast<LowLevelType *>(newInst)->SetImm(value);
1160 
1161         // StoreInst and BoundsCheckInst have 3 inputs, LoadInst - has 2 inputs
1162         newInst->SetInput(0, inst->GetInput(0).GetInst());
1163         if (inst->GetInputsCount() == 3U) {
1164             newInst->SetInput(1, inst->GetInput(2U).GetInst());
1165         } else {
1166             ASSERT(inst->GetInputsCount() == 2U);
1167         }
1168         if (inst->GetOpcode() == Opcode::StoreArray) {
1169             newInst->CastToStoreArrayI()->SetNeedBarrier(inst->CastToStoreArray()->GetNeedBarrier());
1170         }
1171 
1172         if (inst->GetOpcode() == Opcode::LoadArray) {
1173             newInst->CastToLoadArrayI()->SetNeedBarrier(inst->CastToLoadArray()->GetNeedBarrier());
1174             newInst->CastToLoadArrayI()->SetIsArray(inst->CastToLoadArray()->IsArray());
1175         }
1176         if (inst->GetOpcode() == Opcode::BoundsCheck) {
1177             newInst->CastToBoundsCheckI()->SetIsArray(inst->CastToBoundsCheck()->IsArray());
1178             if (inst->CanDeoptimize()) {
1179                 newInst->SetFlag(inst_flags::CAN_DEOPTIMIZE);
1180             }
1181         }
1182 
1183         // Replace instruction immediately because it's not removable by DCE
1184         if (inst->GetOpcode() != Opcode::BoundsCheck) {
1185             inst->ReplaceUsers(newInst);
1186         } else {
1187             auto cnst = graph->FindOrCreateConstant(value);
1188             inst->ReplaceUsers(cnst);
1189         }
1190         inst->RemoveInputs();
1191         inst->GetBasicBlock()->ReplaceInst(inst, newInst);
1192         graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
1193         COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1194     }
1195 }
1196 
LowerStateInst(SaveStateInst * saveState)1197 void Lowering::LowerStateInst(SaveStateInst *saveState)
1198 {
1199     size_t idx = 0;
1200     size_t inputsCount = saveState->GetInputsCount();
1201     auto graph = saveState->GetBasicBlock()->GetGraph();
1202     if (graph->IsBytecodeOptimizer()) {
1203         return;
1204     }
1205     bool skipFloats = (graph->GetArch() == Arch::AARCH32);
1206     while (idx < inputsCount) {
1207         auto inputInst = saveState->GetInput(idx).GetInst();
1208         // In Aarch32 floats values stores in different format then integer
1209         if (inputInst->GetOpcode() == Opcode::NullPtr ||
1210             (inputInst->IsConst() && (!skipFloats || inputInst->GetType() == DataType::INT64))) {
1211             uint64_t rawValue =
1212                 inputInst->GetOpcode() == Opcode::NullPtr ? 0 : inputInst->CastToConstant()->GetRawValue();
1213             auto vreg = saveState->GetVirtualRegister(idx);
1214             auto type = inputInst->GetType();
1215             // There are no INT64 in dynamic
1216             if (type == DataType::INT64 && graph->IsDynamicMethod()) {
1217                 type = DataType::INT32;
1218             }
1219             saveState->AppendImmediate(rawValue, vreg.Value(), type, vreg.GetVRegType());
1220             saveState->RemoveInput(idx);
1221             inputsCount--;
1222             graph->GetEventWriter().EventLowering(GetOpcodeString(saveState->GetOpcode()), saveState->GetId(),
1223                                                   saveState->GetPc());
1224             COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(saveState->GetOpcode());
1225         } else {
1226             idx++;
1227         }
1228     }
1229 }
1230 
LowerReturnInst(FixedInputsInst1 * ret)1231 void Lowering::LowerReturnInst(FixedInputsInst1 *ret)
1232 {
1233     auto graph = ret->GetBasicBlock()->GetGraph();
1234     if (graph->IsBytecodeOptimizer()) {
1235         return;
1236     }
1237     ASSERT(ret->GetOpcode() == Opcode::Return);
1238     auto inputInst = ret->GetInput(0).GetInst();
1239     if (inputInst->IsConst()) {
1240         uint64_t rawValue = inputInst->CastToConstant()->GetRawValue();
1241         auto retImm = graph->CreateInstReturnI(ret->GetType(), ret->GetPc(), rawValue);
1242 #ifdef PANDA_COMPILER_DEBUG_INFO
1243         retImm->SetCurrentMethod(ret->GetCurrentMethod());
1244 #endif
1245 
1246         // Replace instruction immediately because it's not removable by DCE
1247         ret->RemoveInputs();
1248         ret->GetBasicBlock()->ReplaceInst(ret, retImm);
1249         graph->GetEventWriter().EventLowering(GetOpcodeString(ret->GetOpcode()), ret->GetId(), ret->GetPc());
1250         COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(ret->GetOpcode());
1251     }
1252 }
1253 
1254 // We'd like to swap only to make second operand immediate
BetterToSwapCompareInputs(Inst * cmp)1255 bool Lowering::BetterToSwapCompareInputs(Inst *cmp)
1256 {
1257     ASSERT(cmp->GetOpcode() == Opcode::Compare);
1258     auto in0 = cmp->GetInput(0).GetInst();
1259     auto in1 = cmp->GetInput(1).GetInst();
1260     if (DataType::IsFloatType(in0->GetType())) {
1261         return false;
1262     }
1263     if (in0->GetOpcode() == compiler::Opcode::NullPtr) {
1264         return true;
1265     }
1266 
1267     if (in0->IsConst()) {
1268         if (in1->IsConst()) {
1269             DataType::Type type = cmp->CastToCompare()->GetOperandsType();
1270             uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
1271             auto cc = cmp->CastToCompare()->GetCc();
1272             return ConstantFitsCompareImm(in0, size, cc) && !ConstantFitsCompareImm(in1, size, cc);
1273         }
1274         return true;
1275     }
1276     return false;
1277 }
1278 
1279 // Optimize order of input arguments for decreasing using accumulator (Bytecodeoptimizer only).
OptimizeIfInput(compiler::Inst * ifInst)1280 void Lowering::OptimizeIfInput(compiler::Inst *ifInst)
1281 {
1282     ASSERT(ifInst->GetOpcode() == compiler::Opcode::If);
1283     compiler::Inst *input0 = ifInst->GetInput(0).GetInst();
1284     compiler::Inst *input1 = ifInst->GetInput(1).GetInst();
1285 
1286     if (input0->IsDominate(input1)) {
1287         ifInst->SetInput(0, input1);
1288         ifInst->SetInput(1, input0);
1289         // And change CC
1290         auto cc = ifInst->CastToIf()->GetCc();
1291         cc = SwapOperandsConditionCode(cc);
1292         ifInst->CastToIf()->SetCc(cc);
1293     }
1294 }
1295 
JoinFcmpInst(IfImmInst * inst,CmpInst * input)1296 void Lowering::JoinFcmpInst(IfImmInst *inst, CmpInst *input)
1297 {
1298     auto cc = inst->GetCc();
1299     ASSERT(cc == ConditionCode::CC_EQ || cc == ConditionCode::CC_NE || IsSignedConditionCode(cc));
1300     if (input->IsFcmpg()) {
1301         /* Please look at the table of vector condition flags:
1302          * LT => Less than, or unordered
1303          * LE => Less than or equal, or unordered
1304          * GT => Greater than
1305          * GE => Greater than or equal
1306          *
1307          * LO => Less than
1308          * LS => Less than or equal
1309          * HI => Greater than, or unordered
1310          * HS => Greater than or equal, or unordered
1311          *
1312          * So we change condition to "unsigned" for Fcmpg (which should return "greater than" for unordered
1313          * comparisons).
1314          */
1315         cc = InverseSignednessConditionCode(cc);
1316     }
1317 
1318     LowerIfImmToIf(inst, input, cc, input->GetOperandsType());
1319 }
1320 
LowerIf(IfImmInst * inst)1321 void Lowering::LowerIf(IfImmInst *inst)
1322 {
1323     ASSERT(inst->GetCc() == ConditionCode::CC_NE || inst->GetCc() == ConditionCode::CC_EQ);
1324     ASSERT(inst->GetImm() == 0);
1325     if (inst->GetOperandsType() != DataType::BOOL) {
1326         ASSERT(GetGraph()->IsDynamicMethod());
1327         return;
1328     }
1329     auto input = inst->GetInput(0).GetInst();
1330     if (input->GetOpcode() != Opcode::Compare && input->GetOpcode() != Opcode::And) {
1331         return;
1332     }
1333     // Check, that inst have only IfImm user
1334     for (auto &user : input->GetUsers()) {
1335         if (user.GetInst()->GetOpcode() != Opcode::IfImm) {
1336             return;
1337         }
1338     }
1339     // Try put constant in second input
1340     if (input->GetOpcode() == Opcode::Compare && BetterToSwapCompareInputs(input)) {
1341         // Swap inputs
1342         auto in0 = input->GetInput(0).GetInst();
1343         auto in1 = input->GetInput(1).GetInst();
1344         input->SetInput(0, in1);
1345         input->SetInput(1, in0);
1346         // And change CC
1347         auto cc = input->CastToCompare()->GetCc();
1348         cc = SwapOperandsConditionCode(cc);
1349         input->CastToCompare()->SetCc(cc);
1350     }
1351     if (!GetGraph()->IsBytecodeOptimizer()) {
1352         for (auto &newInput : input->GetInputs()) {
1353             auto realNewInput = input->GetDataFlowInput(newInput.GetInst());
1354             if (realNewInput->IsMovableObject()) {
1355                 ssb_.SearchAndCreateMissingObjInSaveState(GetGraph(), realNewInput, inst);
1356             }
1357         }
1358     }
1359     auto cst = input->GetInput(1).GetInst();
1360     DataType::Type type =
1361         (input->GetOpcode() == Opcode::Compare) ? input->CastToCompare()->GetOperandsType() : input->GetType();
1362     uint32_t size = (type == DataType::UINT64 || type == DataType::INT64) ? WORD_SIZE : HALF_SIZE;
1363     auto cc = input->GetOpcode() == Opcode::Compare ? input->CastToCompare()->GetCc() : ConditionCode::CC_TST_NE;
1364     // IfImm can be inverted
1365     if (inst->GetCc() == ConditionCode::CC_EQ && inst->GetImm() == 0) {
1366         cc = GetInverseConditionCode(cc);
1367     }
1368     if (cst->GetOpcode() == compiler::Opcode::NullPtr || (cst->IsConst() && ConstantFitsCompareImm(cst, size, cc))) {
1369         // In-place change for IfImm
1370         InPlaceLowerIfImm(inst, input, cst, cc, type);
1371     } else {
1372         LowerIfImmToIf(inst, input, cc, type);
1373     }
1374 }
1375 
InPlaceLowerIfImm(IfImmInst * inst,Inst * input,Inst * cst,ConditionCode cc,DataType::Type inputType)1376 void Lowering::InPlaceLowerIfImm(IfImmInst *inst, Inst *input, Inst *cst, ConditionCode cc, DataType::Type inputType)
1377 {
1378     auto graph = inst->GetBasicBlock()->GetGraph();
1379     inst->SetOperandsType(inputType);
1380     auto newInput = input->GetInput(0).GetInst();
1381     // For compare(nullptr, 0) set `nullptr` as new input
1382     if (cst->GetOpcode() == Opcode::NullPtr && IsZeroConstant(newInput) &&
1383         DataType::IsReference(inst->GetOperandsType())) {
1384         newInput = cst;
1385     }
1386     inst->SetInput(0, newInput);
1387 
1388     uint64_t val = cst->GetOpcode() == Opcode::NullPtr ? 0 : cst->CastToConstant()->GetRawValue();
1389     inst->SetImm(val);
1390     inst->SetCc(cc);
1391     inst->GetBasicBlock()->GetGraph()->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(),
1392                                                                       inst->GetPc());
1393     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1394 
1395     if (inst->GetImm() == 0 && newInput->GetOpcode() == Opcode::Cmp &&
1396         DataType::IsFloatType(newInput->CastToCmp()->GetOperandsType()) && !graph->IsBytecodeOptimizer()) {
1397         // Check inst and input are the only users of new_input
1398         bool join {true};
1399         for (auto &user : newInput->GetUsers()) {
1400             if (auto userInst = user.GetInst(); userInst != inst && userInst != input) {
1401                 join = false;
1402                 break;
1403             }
1404         }
1405         if (join) {
1406             JoinFcmpInst(inst, newInput->CastToCmp());
1407         }
1408     }
1409 }
1410 
LowerIfImmToIf(IfImmInst * inst,Inst * input,ConditionCode cc,DataType::Type inputType)1411 void Lowering::LowerIfImmToIf(IfImmInst *inst, Inst *input, ConditionCode cc, DataType::Type inputType)
1412 {
1413     auto graph = inst->GetBasicBlock()->GetGraph();
1414     // New instruction
1415     auto replace = graph->CreateInstIf(DataType::NO_TYPE, inst->GetPc(), input->GetInput(0).GetInst(),
1416                                        input->GetInput(1).GetInst(), inputType, cc, inst->GetMethod());
1417 #ifdef PANDA_COMPILER_DEBUG_INFO
1418     replace->SetCurrentMethod(inst->GetCurrentMethod());
1419 #endif
1420     // Replace IfImm instruction immediately because it's not removable by DCE
1421     inst->RemoveInputs();
1422     inst->GetBasicBlock()->ReplaceInst(inst, replace);
1423     graph->GetEventWriter().EventLowering(GetOpcodeString(inst->GetOpcode()), inst->GetId(), inst->GetPc());
1424     if (graph->IsBytecodeOptimizer()) {
1425         OptimizeIfInput(replace);
1426     }
1427     COMPILER_LOG(DEBUG, LOWERING) << "Lowering is applied for " << GetOpcodeString(inst->GetOpcode());
1428 }
1429 
LowerToDeoptimizeCompare(Inst * inst)1430 void Lowering::LowerToDeoptimizeCompare(Inst *inst)
1431 {
1432     ASSERT(inst->GetOpcode() == Opcode::DeoptimizeIf);
1433     auto graph = inst->GetBasicBlock()->GetGraph();
1434     ASSERT(!graph->IsBytecodeOptimizer());
1435 
1436     auto deoptIf = inst->CastToDeoptimizeIf();
1437     if (deoptIf->GetInput(0).GetInst()->GetOpcode() != Opcode::Compare) {
1438         return;
1439     }
1440     auto compare = deoptIf->GetInput(0).GetInst()->CastToCompare();
1441     if (!compare->HasSingleUser()) {
1442         return;
1443     }
1444     COMPILER_LOG(DEBUG, LOWERING) << __func__ << "\n" << *compare << "\n" << *deoptIf;
1445     auto cmpInp1 = compare->GetInput(1).GetInst();
1446     DataType::Type type = compare->GetOperandsType();
1447     uint32_t size =
1448         (type == DataType::UINT64 || type == DataType::INT64 || type == DataType::ANY) ? WORD_SIZE : HALF_SIZE;
1449     Inst *deoptCmp = nullptr;
1450     if ((cmpInp1->IsConst() && ConstantFitsCompareImm(cmpInp1, size, compare->GetCc())) || cmpInp1->IsNullPtr()) {
1451         uint64_t imm = cmpInp1->IsNullPtr() ? 0 : cmpInp1->CastToConstant()->GetRawValue();
1452         deoptCmp = graph->CreateInstDeoptimizeCompareImm(deoptIf, compare, imm);
1453     } else {
1454         deoptCmp = graph->CreateInstDeoptimizeCompare(deoptIf, compare);
1455         deoptCmp->SetInput(1, compare->GetInput(1).GetInst());
1456     }
1457     deoptCmp->SetInput(0, compare->GetInput(0).GetInst());
1458     deoptCmp->SetSaveState(deoptIf->GetSaveState());
1459 #ifdef PANDA_COMPILER_DEBUG_INFO
1460     deoptCmp->SetCurrentMethod(inst->GetCurrentMethod());
1461 #endif
1462     deoptIf->ReplaceUsers(deoptCmp);
1463     deoptIf->GetBasicBlock()->InsertAfter(deoptCmp, deoptIf);
1464     deoptIf->ClearFlag(compiler::inst_flags::NO_DCE);
1465     graph->GetEventWriter().EventLowering(GetOpcodeString(deoptIf->GetOpcode()), deoptIf->GetId(), deoptIf->GetPc());
1466     COMPILER_LOG(DEBUG, LOWERING) << "===>\n" << *deoptCmp;
1467 }
1468 
InvalidateAnalyses()1469 void Lowering::InvalidateAnalyses()
1470 {
1471     GetGraph()->InvalidateAnalysis<BoundsAnalysis>();
1472     GetGraph()->InvalidateAnalysis<AliasAnalysis>();
1473 }
1474 
RunImpl()1475 bool Lowering::RunImpl()
1476 {
1477     VisitGraph();
1478     return true;
1479 }
1480 }  // namespace ark::compiler
1481