1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "mir_lower.h"
17
18 #define DO_LT_0_CHECK 1
19
20 namespace maple {
21 // Remove intrinsicop __builtin_expect and record likely info to brStmt
22 // Target condExpr example:
23 // ne u1 i64 (
24 // intrinsicop i64 C___builtin_expect (
25 // cvt i64 i32 (dread i32 %levVar_9354), cvt i64 i32 (constval i32 0)),
26 // constval i64 0)
LowerCondGotoStmtWithBuiltinExpect(CondGotoNode & brStmt)27 void LowerCondGotoStmtWithBuiltinExpect(CondGotoNode &brStmt)
28 {
29 BaseNode *condExpr = brStmt.Opnd(0);
30 // Poke ne for dread shortCircuit
31 // Example:
32 // dassign %shortCircuit 0 (ne u1 i64 (
33 // intrinsicop i64 C___builtin_expect (
34 // cvt i64 i32 (dread i32 %levVar_32349),
35 // cvt i64 i32 (constval i32 0)),
36 // constval i64 0))
37 // dassign %shortCircuit 0 (ne u1 u32 (dread u32 %shortCircuit, constval u1 0))
38 if (condExpr->GetOpCode() == OP_ne && condExpr->Opnd(0)->GetOpCode() == OP_dread &&
39 condExpr->Opnd(1)->GetOpCode() == OP_constval) {
40 auto *constVal = static_cast<ConstvalNode *>(condExpr->Opnd(1))->GetConstVal();
41 if (constVal->GetKind() == kConstInt && static_cast<MIRIntConst *>(constVal)->GetValue() == 0) {
42 condExpr = condExpr->Opnd(0);
43 }
44 }
45 if (condExpr->GetOpCode() == OP_dread) {
46 // Example:
47 // dassign %shortCircuit 0 (ne u1 i64 (
48 // intrinsicop i64 C___builtin_expect (
49 // cvt i64 i32 (dread i32 %levVar_9488),
50 // cvt i64 i32 (constval i32 1)),
51 // constval i64 0))
52 // brfalse @shortCircuit_label_13351 (dread u32 %shortCircuit)
53 StIdx stIdx = static_cast<DreadNode *>(condExpr)->GetStIdx();
54 FieldID fieldId = static_cast<DreadNode *>(condExpr)->GetFieldID();
55 if (fieldId != 0) {
56 return;
57 }
58 if (brStmt.GetPrev() == nullptr || brStmt.GetPrev()->GetOpCode() != OP_dassign) {
59 return; // prev stmt may be a label, we skip it too
60 }
61 auto *dassign = static_cast<DassignNode *>(brStmt.GetPrev());
62 if (stIdx != dassign->GetStIdx() || dassign->GetFieldID() != 0) {
63 return;
64 }
65 condExpr = dassign->GetRHS();
66 }
67 if (condExpr->GetOpCode() == OP_ne) {
68 // opnd1 must be int const 0
69 BaseNode *opnd1 = condExpr->Opnd(1);
70 if (opnd1->GetOpCode() != OP_constval) {
71 return;
72 }
73 auto *constVal = static_cast<ConstvalNode *>(opnd1)->GetConstVal();
74 if (constVal->GetKind() != kConstInt || static_cast<MIRIntConst *>(constVal)->GetValue() != 0) {
75 return;
76 }
77 // opnd0 must be intrinsicop C___builtin_expect
78 BaseNode *opnd0 = condExpr->Opnd(0);
79 if (opnd0->GetOpCode() != OP_intrinsicop) {
80 return;
81 }
82 // We trust constant fold
83 auto *expectedConstExpr = opnd0->Opnd(1);
84 if (expectedConstExpr->GetOpCode() == OP_cvt) {
85 expectedConstExpr = expectedConstExpr->Opnd(0);
86 }
87 if (expectedConstExpr->GetOpCode() != OP_constval) {
88 return;
89 }
90 auto *expectedConstNode = static_cast<ConstvalNode *>(expectedConstExpr)->GetConstVal();
91 CHECK_FATAL(expectedConstNode->GetKind() == kConstInt, "must be");
92 auto expectedVal = static_cast<MIRIntConst *>(expectedConstNode)->GetValue();
93 if (expectedVal != 0 && expectedVal != 1) {
94 return;
95 }
96 bool likelyTrue = (expectedVal == 1); // The condition is likely to be true
97 bool likelyBranch = (brStmt.GetOpCode() == OP_brtrue ? likelyTrue : !likelyTrue); // High probability jump
98 if (likelyBranch) {
99 brStmt.SetBranchProb(kProbLikely);
100 } else {
101 brStmt.SetBranchProb(kProbUnlikely);
102 }
103 // Remove __builtin_expect
104 condExpr->SetOpnd(opnd0->Opnd(0), 0);
105 }
106 }
107
LowerBuiltinExpect(BlockNode & block)108 void MIRLower::LowerBuiltinExpect(BlockNode &block)
109 {
110 auto *stmt = block.GetFirst();
111 auto *last = block.GetLast();
112 while (stmt != last) {
113 if (stmt->GetOpCode() == OP_brtrue || stmt->GetOpCode() == OP_brfalse) {
114 LowerCondGotoStmtWithBuiltinExpect(*static_cast<CondGotoNode *>(stmt));
115 }
116 stmt = stmt->GetNext();
117 }
118 }
119
CreateCondGotoStmt(Opcode op,BlockNode & blk,const IfStmtNode & ifStmt)120 LabelIdx MIRLower::CreateCondGotoStmt(Opcode op, BlockNode &blk, const IfStmtNode &ifStmt)
121 {
122 auto *brStmt = mirModule.CurFuncCodeMemPool()->New<CondGotoNode>(op);
123 brStmt->SetOpnd(ifStmt.Opnd(), 0);
124 brStmt->SetSrcPos(ifStmt.GetSrcPos());
125 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "mirModule.CurFunction() should not be nullptr");
126 LabelIdx lableIdx = mirModule.CurFunction()->GetLabelTab()->CreateLabel();
127 mirModule.CurFunction()->GetLabelTab()->AddToStringLabelMap(lableIdx);
128 brStmt->SetOffset(lableIdx);
129 blk.AddStatement(brStmt);
130 if (GetFuncProfData()) {
131 GetFuncProfData()->CopyStmtFreq(brStmt->GetStmtID(), ifStmt.GetStmtID());
132 }
133 bool thenEmpty = (ifStmt.GetThenPart() == nullptr) || (ifStmt.GetThenPart()->GetFirst() == nullptr);
134 if (thenEmpty) {
135 blk.AppendStatementsFromBlock(*ifStmt.GetElsePart());
136 } else {
137 blk.AppendStatementsFromBlock(*ifStmt.GetThenPart());
138 }
139 return lableIdx;
140 }
141
CreateBrFalseStmt(BlockNode & blk,const IfStmtNode & ifStmt)142 void MIRLower::CreateBrFalseStmt(BlockNode &blk, const IfStmtNode &ifStmt)
143 {
144 LabelIdx labelIdx = CreateCondGotoStmt(OP_brfalse, blk, ifStmt);
145 auto *lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
146 lableStmt->SetLabelIdx(labelIdx);
147 blk.AddStatement(lableStmt);
148 // set stmtfreqs
149 if (GetFuncProfData()) {
150 DEBUG_ASSERT(GetFuncProfData()->GetStmtFreq(ifStmt.GetThenPart()->GetStmtID()) >= 0, "sanity check");
151 int64_t freq = GetFuncProfData()->GetStmtFreq(ifStmt.GetStmtID()) -
152 GetFuncProfData()->GetStmtFreq(ifStmt.GetThenPart()->GetStmtID());
153 GetFuncProfData()->SetStmtFreq(lableStmt->GetStmtID(), freq);
154 }
155 }
156
CreateBrTrueStmt(BlockNode & blk,const IfStmtNode & ifStmt)157 void MIRLower::CreateBrTrueStmt(BlockNode &blk, const IfStmtNode &ifStmt)
158 {
159 LabelIdx labelIdx = CreateCondGotoStmt(OP_brtrue, blk, ifStmt);
160 auto *lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
161 lableStmt->SetLabelIdx(labelIdx);
162 blk.AddStatement(lableStmt);
163 // set stmtfreqs
164 if (GetFuncProfData()) {
165 DEBUG_ASSERT(GetFuncProfData()->GetStmtFreq(ifStmt.GetElsePart()->GetStmtID()) >= 0, "sanity check");
166 int64_t freq = GetFuncProfData()->GetStmtFreq(ifStmt.GetStmtID()) -
167 GetFuncProfData()->GetStmtFreq(ifStmt.GetElsePart()->GetStmtID());
168 GetFuncProfData()->SetStmtFreq(lableStmt->GetStmtID(), freq);
169 }
170 }
171
CreateBrFalseAndGotoStmt(BlockNode & blk,const IfStmtNode & ifStmt)172 void MIRLower::CreateBrFalseAndGotoStmt(BlockNode &blk, const IfStmtNode &ifStmt)
173 {
174 LabelIdx labelIdx = CreateCondGotoStmt(OP_brfalse, blk, ifStmt);
175 bool fallThroughFromThen = !IfStmtNoFallThrough(ifStmt);
176 LabelIdx gotoLableIdx = 0;
177 if (fallThroughFromThen) {
178 auto *gotoStmt = mirModule.CurFuncCodeMemPool()->New<GotoNode>(OP_goto);
179 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "mirModule.CurFunction() should not be nullptr");
180 gotoLableIdx = mirModule.CurFunction()->GetLabelTab()->CreateLabel();
181 mirModule.CurFunction()->GetLabelTab()->AddToStringLabelMap(gotoLableIdx);
182 gotoStmt->SetOffset(gotoLableIdx);
183 blk.AddStatement(gotoStmt);
184 // set stmtfreqs
185 if (GetFuncProfData()) {
186 GetFuncProfData()->CopyStmtFreq(gotoStmt->GetStmtID(), ifStmt.GetThenPart()->GetStmtID());
187 }
188 }
189 auto *lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
190 lableStmt->SetLabelIdx(labelIdx);
191 blk.AddStatement(lableStmt);
192 blk.AppendStatementsFromBlock(*ifStmt.GetElsePart());
193 // set stmtfreqs
194 if (GetFuncProfData()) {
195 GetFuncProfData()->CopyStmtFreq(lableStmt->GetStmtID(), ifStmt.GetElsePart()->GetStmtID());
196 }
197 if (fallThroughFromThen) {
198 lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
199 lableStmt->SetLabelIdx(gotoLableIdx);
200 blk.AddStatement(lableStmt);
201 // set endlabel stmtfreqs
202 if (GetFuncProfData()) {
203 GetFuncProfData()->CopyStmtFreq(lableStmt->GetStmtID(), ifStmt.GetStmtID());
204 }
205 }
206 }
207
LowerIfStmt(IfStmtNode & ifStmt,bool recursive)208 BlockNode *MIRLower::LowerIfStmt(IfStmtNode &ifStmt, bool recursive)
209 {
210 bool thenEmpty = (ifStmt.GetThenPart() == nullptr) || (ifStmt.GetThenPart()->GetFirst() == nullptr);
211 bool elseEmpty = (ifStmt.GetElsePart() == nullptr) || (ifStmt.GetElsePart()->GetFirst() == nullptr);
212 if (recursive) {
213 if (!thenEmpty) {
214 ifStmt.SetThenPart(LowerBlock(*ifStmt.GetThenPart()));
215 }
216 if (!elseEmpty) {
217 ifStmt.SetElsePart(LowerBlock(*ifStmt.GetElsePart()));
218 }
219 }
220 auto *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
221 if (thenEmpty && elseEmpty) {
222 // generate EVAL <cond> statement
223 auto *evalStmt = mirModule.CurFuncCodeMemPool()->New<UnaryStmtNode>(OP_eval);
224 evalStmt->SetOpnd(ifStmt.Opnd(), 0);
225 evalStmt->SetSrcPos(ifStmt.GetSrcPos());
226 blk->AddStatement(evalStmt);
227 if (GetFuncProfData()) {
228 GetFuncProfData()->CopyStmtFreq(evalStmt->GetStmtID(), ifStmt.GetStmtID());
229 }
230 } else if (elseEmpty) {
231 // brfalse <cond> <endlabel>
232 // <thenPart>
233 // label <endlabel>
234 CreateBrFalseStmt(*blk, ifStmt);
235 } else if (thenEmpty) {
236 // brtrue <cond> <endlabel>
237 // <elsePart>
238 // label <endlabel>
239 CreateBrTrueStmt(*blk, ifStmt);
240 } else {
241 // brfalse <cond> <elselabel>
242 // <thenPart>
243 // goto <endlabel>
244 // label <elselabel>
245 // <elsePart>
246 // label <endlabel>
247 CreateBrFalseAndGotoStmt(*blk, ifStmt);
248 }
249 return blk;
250 }
251
ConsecutiveCaseValsAndSameTarget(const CaseVector * switchTable)252 static bool ConsecutiveCaseValsAndSameTarget(const CaseVector *switchTable)
253 {
254 size_t caseNum = switchTable->size();
255 int lastVal = static_cast<int>((*switchTable)[0].first);
256 LabelIdx lblIdx = (*switchTable)[0].second;
257 for (size_t id = 1; id < caseNum; id++) {
258 lastVal++;
259 if (lastVal != (*switchTable)[id].first) {
260 return false;
261 }
262 if (lblIdx != (*switchTable)[id].second) {
263 return false;
264 }
265 }
266 return true;
267 }
268
269 // if there is only 1 case branch, replace with conditional branch(es) and
270 // return the optimized multiple statements; otherwise, return nullptr
LowerSwitchStmt(SwitchNode * switchNode)271 BlockNode *MIRLower::LowerSwitchStmt(SwitchNode *switchNode)
272 {
273 CaseVector *switchTable = &switchNode->GetSwitchTable();
274 if (switchTable->empty()) { // goto @defaultLabel
275 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
276 LabelIdx defaultLabel = switchNode->GetDefaultLabel();
277 MIRBuilder *builder = mirModule.GetMIRBuilder();
278 GotoNode *gotoStmt = builder->CreateStmtGoto(OP_goto, defaultLabel);
279 blk->AddStatement(gotoStmt);
280 return blk;
281 }
282 if (!ConsecutiveCaseValsAndSameTarget(switchTable)) {
283 return nullptr;
284 }
285 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
286 LabelIdx caseGotoLabel = switchTable->front().second;
287 LabelIdx defaultLabel = switchNode->GetDefaultLabel();
288 int64 minCaseVal = switchTable->front().first;
289 int64 maxCaseVal = switchTable->back().first;
290 BaseNode *switchOpnd = switchNode->Opnd(0);
291 MIRBuilder *builder = mirModule.GetMIRBuilder();
292 ConstvalNode *minCaseNode = builder->CreateIntConst(minCaseVal, switchOpnd->GetPrimType());
293 ConstvalNode *maxCaseNode = builder->CreateIntConst(maxCaseVal, switchOpnd->GetPrimType());
294 if (minCaseVal == maxCaseVal) {
295 // brtrue (x == minCaseVal) @case_goto_label
296 // goto @default_label
297 CompareNode *eqNode = builder->CreateExprCompare(
298 OP_eq, *GlobalTables::GetTypeTable().GetInt32(),
299 *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(switchOpnd->GetPrimType())), switchOpnd, minCaseNode);
300 CondGotoNode *condGoto = builder->CreateStmtCondGoto(eqNode, OP_brtrue, caseGotoLabel);
301 blk->AddStatement(condGoto);
302 GotoNode *gotoStmt = builder->CreateStmtGoto(OP_goto, defaultLabel);
303 blk->AddStatement(gotoStmt);
304 } else {
305 // brtrue (x < minCaseVal) @default_label
306 // brtrue (x > maxCaseVal) @default_label
307 // goto @case_goto_label
308 CompareNode *ltNode = builder->CreateExprCompare(
309 OP_lt, *GlobalTables::GetTypeTable().GetInt32(),
310 *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(switchOpnd->GetPrimType())), switchOpnd, minCaseNode);
311 CondGotoNode *condGoto = builder->CreateStmtCondGoto(ltNode, OP_brtrue, defaultLabel);
312 blk->AddStatement(condGoto);
313 CompareNode *gtNode = builder->CreateExprCompare(
314 OP_gt, *GlobalTables::GetTypeTable().GetInt32(),
315 *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(switchOpnd->GetPrimType())), switchOpnd, maxCaseNode);
316 condGoto = builder->CreateStmtCondGoto(gtNode, OP_brtrue, defaultLabel);
317 blk->AddStatement(condGoto);
318 GotoNode *gotoStmt = builder->CreateStmtGoto(OP_goto, caseGotoLabel);
319 blk->AddStatement(gotoStmt);
320 }
321 return blk;
322 }
323
LowerBlock(BlockNode & block)324 BlockNode *MIRLower::LowerBlock(BlockNode &block)
325 {
326 auto *newBlock = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
327 BlockNode *tmp = nullptr;
328 if (block.GetFirst() == nullptr) {
329 newBlock->SetStmtID(block.GetStmtID()); // keep original block stmtid
330 return newBlock;
331 }
332 StmtNode *nextStmt = block.GetFirst();
333 DEBUG_ASSERT(nextStmt != nullptr, "nullptr check");
334 do {
335 StmtNode *stmt = nextStmt;
336 nextStmt = stmt->GetNext();
337 switch (stmt->GetOpCode()) {
338 case OP_if:
339 tmp = LowerIfStmt(static_cast<IfStmtNode &>(*stmt), true);
340 newBlock->AppendStatementsFromBlock(*tmp);
341 break;
342 case OP_switch:
343 tmp = LowerSwitchStmt(static_cast<SwitchNode *>(stmt));
344 if (tmp != nullptr) {
345 newBlock->AppendStatementsFromBlock(*tmp);
346 } else {
347 newBlock->AddStatement(stmt);
348 }
349 break;
350 case OP_icallassigned:
351 case OP_icall: {
352 newBlock->AddStatement(stmt);
353 break;
354 }
355 case OP_block:
356 tmp = LowerBlock(static_cast<BlockNode &>(*stmt));
357 newBlock->AppendStatementsFromBlock(*tmp);
358 break;
359 default:
360 newBlock->AddStatement(stmt);
361 break;
362 }
363 } while (nextStmt != nullptr);
364 newBlock->SetStmtID(block.GetStmtID()); // keep original block stmtid
365 return newBlock;
366 }
367
368 // for lowering OP_cand and OP_cior embedded in the expression x which belongs
369 // to curstmt
LowerEmbeddedCandCior(BaseNode * x,StmtNode * curstmt,BlockNode * blk)370 BaseNode *MIRLower::LowerEmbeddedCandCior(BaseNode *x, StmtNode *curstmt, BlockNode *blk)
371 {
372 DEBUG_ASSERT(x != nullptr, "nullptr check");
373 {
374 for (size_t i = 0; i < x->GetNumOpnds(); i++) {
375 x->SetOpnd(LowerEmbeddedCandCior(x->Opnd(i), curstmt, blk), i);
376 }
377 return x;
378 }
379 }
380
381 // for lowering all appearances of OP_cand and OP_cior associated with condional
382 // branches in the block
LowerCandCior(BlockNode & block)383 void MIRLower::LowerCandCior(BlockNode &block)
384 {
385 if (block.GetFirst() == nullptr) {
386 return;
387 }
388 StmtNode *nextStmt = block.GetFirst();
389 do {
390 StmtNode *stmt = nextStmt;
391 nextStmt = stmt->GetNext();
392 { // call LowerEmbeddedCandCior() for all the expression operands
393 for (size_t i = 0; i < stmt->GetNumOpnds(); i++) {
394 stmt->SetOpnd(LowerEmbeddedCandCior(stmt->Opnd(i), stmt, &block), i);
395 }
396 }
397 } while (nextStmt != nullptr);
398 }
399
LowerFunc(MIRFunction & func)400 void MIRLower::LowerFunc(MIRFunction &func)
401 {
402 mirModule.SetCurFunction(&func);
403 BlockNode *origBody = func.GetBody();
404 DEBUG_ASSERT(origBody != nullptr, "nullptr check");
405 BlockNode *newBody = LowerBlock(*origBody);
406 DEBUG_ASSERT(newBody != nullptr, "nullptr check");
407 LowerBuiltinExpect(*newBody);
408 if (!InLFO()) {
409 LowerCandCior(*newBody);
410 }
411 func.SetBody(newBody);
412 }
413
FuncTypeFromFuncPtrExpr(BaseNode * x)414 MIRFuncType *MIRLower::FuncTypeFromFuncPtrExpr(BaseNode *x)
415 {
416 DEBUG_ASSERT(x != nullptr, "nullptr check");
417 MIRFuncType *res = nullptr;
418 MIRFunction *func = mirModule.CurFunction();
419 switch (x->GetOpCode()) {
420 case OP_regread: {
421 RegreadNode *regread = static_cast<RegreadNode *>(x);
422 MIRPreg *preg = func->GetPregTab()->PregFromPregIdx(regread->GetRegIdx());
423 // see if it is promoted from a symbol
424 if (preg->GetOp() == OP_dread) {
425 const MIRSymbol *symbol = preg->rematInfo.sym;
426 MIRType *mirType = symbol->GetType();
427
428 if (mirType->GetKind() == kTypePointer) {
429 res = static_cast<MIRPtrType *>(mirType)->GetPointedFuncType();
430 }
431 if (res != nullptr) {
432 break;
433 }
434 }
435 // check if a formal promoted to preg
436 for (FormalDef &formalDef : func->GetFormalDefVec()) {
437 if (!formalDef.formalSym->IsPreg()) {
438 continue;
439 }
440 if (formalDef.formalSym->GetPreg() == preg) {
441 MIRType *mirType = formalDef.formalSym->GetType();
442 if (mirType->GetKind() == kTypePointer) {
443 res = static_cast<MIRPtrType *>(mirType)->GetPointedFuncType();
444 }
445 break;
446 }
447 }
448 break;
449 }
450 case OP_dread: {
451 DreadNode *dread = static_cast<DreadNode *>(x);
452 MIRSymbol *symbol = func->GetLocalOrGlobalSymbol(dread->GetStIdx());
453 MIRType *mirType = symbol->GetType();
454 if (mirType->GetKind() == kTypePointer) {
455 res = static_cast<MIRPtrType *>(mirType)->GetPointedFuncType();
456 }
457 break;
458 }
459 case OP_iread: {
460 IreadNode *iread = static_cast<IreadNode *>(x);
461 MIRPtrType *ptrType = static_cast<MIRPtrType *>(iread->GetType());
462 MIRType *mirType = ptrType->GetPointedType();
463 if (mirType->GetKind() == kTypeFunction) {
464 res = static_cast<MIRFuncType *>(mirType);
465 } else if (mirType->GetKind() == kTypePointer) {
466 res = static_cast<MIRPtrType *>(mirType)->GetPointedFuncType();
467 }
468 break;
469 }
470 case OP_retype: {
471 MIRType *mirType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<RetypeNode *>(x)->GetTyIdx());
472 if (mirType->GetKind() == kTypePointer) {
473 res = static_cast<MIRPtrType *>(mirType)->GetPointedFuncType();
474 }
475 if (res == nullptr) {
476 res = FuncTypeFromFuncPtrExpr(x->Opnd(kNodeFirstOpnd));
477 }
478 break;
479 }
480 default:
481 CHECK_FATAL(false, "LMBCLowerer::FuncTypeFromFuncPtrExpr: NYI");
482 }
483 return res;
484 }
485
486 const std::set<std::string> MIRLower::kSetArrayHotFunc = {};
487
ShouldOptArrayMrt(const MIRFunction & func)488 bool MIRLower::ShouldOptArrayMrt(const MIRFunction &func)
489 {
490 return (MIRLower::kSetArrayHotFunc.find(func.GetName()) != MIRLower::kSetArrayHotFunc.end());
491 }
492 } // namespace maple
493