1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "lower.h"
17 #include <string>
18 #include <cinttypes>
19 #include <vector>
20 #include "mir_symbol.h"
21 #include "mir_function.h"
22 #include "cg_option.h"
23 #include "switch_lowerer.h"
24 #include "try_catch.h"
25 #include "intrinsic_op.h"
26 #include "mir_builder.h"
27 #include "opcode_info.h"
28 #include "rt.h"
29 #include "securec.h"
30 #include "string_utils.h"
31 #include "cast_opt.h"
32 #include "simplify.h"
33 #include "me_safety_warning.h"
34
35 namespace maplebe {
36 namespace arrayNameForLower {
37 const std::set<std::string> kArrayKlassName {
38 #include "array_klass_name.def"
39 };
40
41 const std::set<std::string> kArrayBaseName {
42 #include "array_base_name.def"
43 };
44 } // namespace arrayNameForLower
45
46 using namespace maple;
47
48 #define JAVALANG (mirModule.IsJavaModule())
49 #define TARGARM32 0
50
51 enum ExtFuncT : uint8 { kFmodDouble, kFmodFloat };
52
53 struct ExtFuncDescrT {
54 ExtFuncT fid;
55 const char *name;
56 PrimType retType;
57 PrimType argTypes[kMaxModFuncArgSize];
58 };
59
60 namespace {
61 std::pair<MIRIntrinsicID, const std::string> cgBuiltins[] = {
62 {INTRN_JAVA_ARRAY_LENGTH, "MCC_DexArrayLength"},
63 {INTRN_JAVA_ARRAY_FILL, "MCC_DexArrayFill"},
64 {INTRN_JAVA_CHECK_CAST, "MCC_DexCheckCast"},
65 {INTRN_JAVA_INSTANCE_OF, "MCC_DexInstanceOf"},
66 {INTRN_JAVA_INTERFACE_CALL, "MCC_DexInterfaceCall"},
67 {INTRN_JAVA_POLYMORPHIC_CALL, "MCC_DexPolymorphicCall"},
68 {INTRN_MCC_DeferredFillNewArray, "MCC_DeferredFillNewArray"},
69 {INTRN_MCC_DeferredInvoke, "MCC_DeferredInvoke"},
70 {INTRN_JAVA_CONST_CLASS, "MCC_GetReferenceToClass"},
71 {INTRN_JAVA_GET_CLASS, "MCC_GetClass"},
72 {INTRN_MPL_SET_CLASS, "MCC_SetJavaClass"},
73 {INTRN_MPL_MEMSET_LOCALVAR, "memset_s"},
74 };
75
76 ExtFuncDescrT extFnDescrs[] = {
77 {kFmodDouble, "fmod", PTY_f64, {PTY_f64, PTY_f64, kPtyInvalid}},
78 {kFmodFloat, "fmodf", PTY_f32, {PTY_f32, PTY_f32, kPtyInvalid}},
79 };
80
81 std::vector<std::pair<ExtFuncT, PUIdx>> extFuncs;
82 const std::string kOpAssertge = "OP_assertge";
83 const std::string kOpAssertlt = "OP_assertlt";
84 const std::string kOpCallAssertle = "OP_callassertle";
85 const std::string kOpReturnAssertle = "OP_returnassertle";
86 const std::string kOpAssignAssertle = "OP_assignassertle";
87 const std::string kFileSymbolNamePrefix = "symname";
88 } // namespace
89
90 const std::string CGLowerer::kIntrnRetValPrefix = "__iret";
91 const std::string CGLowerer::kUserRetValPrefix = "__uret";
92
GetFileNameSymbolName(const std::string & fileName) const93 std::string CGLowerer::GetFileNameSymbolName(const std::string &fileName) const
94 {
95 return kFileSymbolNamePrefix + std::regex_replace(fileName, std::regex("-"), "_");
96 }
97
CreateNewRetVar(const MIRType & ty,const std::string & prefix)98 MIRSymbol *CGLowerer::CreateNewRetVar(const MIRType &ty, const std::string &prefix)
99 {
100 const uint32 bufSize = 257;
101 char buf[bufSize] = {'\0'};
102 MIRFunction *func = GetCurrentFunc();
103 MIRSymbol *var = func->GetSymTab()->CreateSymbol(kScopeLocal);
104 int eNum = sprintf_s(buf, bufSize - 1, "%s%" PRId64, prefix.c_str(), ++seed);
105 if (eNum == -1) {
106 FATAL(kLncFatal, "sprintf_s failed");
107 }
108 std::string strBuf(buf);
109 var->SetNameStrIdx(mirModule.GetMIRBuilder()->GetOrCreateStringIndex(strBuf));
110 var->SetTyIdx(ty.GetTypeIndex());
111 var->SetStorageClass(kScAuto);
112 var->SetSKind(kStVar);
113 func->GetSymTab()->AddToStringSymbolMap(*var);
114 return var;
115 }
116
RegisterExternalLibraryFunctions()117 void CGLowerer::RegisterExternalLibraryFunctions()
118 {
119 for (uint32 i = 0; i < sizeof(extFnDescrs) / sizeof(extFnDescrs[0]); ++i) {
120 ExtFuncT id = extFnDescrs[i].fid;
121 CHECK_FATAL(id == i, "make sure id equal i");
122
123 MIRFunction *func =
124 mirModule.GetMIRBuilder()->GetOrCreateFunction(extFnDescrs[i].name, TyIdx(extFnDescrs[i].retType));
125 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
126 func->AllocSymTab();
127 MIRSymbol *funcSym = func->GetFuncSymbol();
128 funcSym->SetStorageClass(kScExtern);
129 funcSym->SetAppearsInCode(true);
130 /* return type */
131 MIRType *retTy = GlobalTables::GetTypeTable().GetPrimType(extFnDescrs[i].retType);
132
133 /* use void* for PTY_dynany */
134 if (retTy->GetPrimType() == PTY_dynany) {
135 retTy = GlobalTables::GetTypeTable().GetPtr();
136 }
137
138 std::vector<MIRSymbol *> formals;
139 for (uint32 j = 0; extFnDescrs[i].argTypes[j] != kPtyInvalid; ++j) {
140 PrimType primTy = extFnDescrs[i].argTypes[j];
141 MIRType *argTy = GlobalTables::GetTypeTable().GetPrimType(primTy);
142 /* use void* for PTY_dynany */
143 if (argTy->GetPrimType() == PTY_dynany) {
144 argTy = GlobalTables::GetTypeTable().GetPtr();
145 }
146 MIRSymbol *argSt = func->GetSymTab()->CreateSymbol(kScopeLocal);
147 const uint32 bufSize = 18;
148 char buf[bufSize] = {'\0'};
149 int eNum = sprintf_s(buf, bufSize - 1, "p%u", j);
150 if (eNum == -1) {
151 FATAL(kLncFatal, "sprintf_s failed");
152 }
153 std::string strBuf(buf);
154 argSt->SetNameStrIdx(mirModule.GetMIRBuilder()->GetOrCreateStringIndex(strBuf));
155 argSt->SetTyIdx(argTy->GetTypeIndex());
156 argSt->SetStorageClass(kScFormal);
157 argSt->SetSKind(kStVar);
158 func->GetSymTab()->AddToStringSymbolMap(*argSt);
159 formals.emplace_back(argSt);
160 }
161 func->UpdateFuncTypeAndFormalsAndReturnType(formals, retTy->GetTypeIndex(), false);
162 auto *funcType = func->GetMIRFuncType();
163 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
164 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
165 extFuncs.emplace_back(std::pair<ExtFuncT, PUIdx>(id, func->GetPuidx()));
166 }
167 }
168
NodeConvert(PrimType mType,BaseNode & expr)169 BaseNode *CGLowerer::NodeConvert(PrimType mType, BaseNode &expr)
170 {
171 PrimType srcType = expr.GetPrimType();
172 if (GetPrimTypeSize(mType) == GetPrimTypeSize(srcType)) {
173 return &expr;
174 }
175 TypeCvtNode *cvtNode = mirModule.CurFuncCodeMemPool()->New<TypeCvtNode>(OP_cvt);
176 cvtNode->SetFromType(srcType);
177 cvtNode->SetPrimType(mType);
178 cvtNode->SetOpnd(&expr, 0);
179 return cvtNode;
180 }
181
LowerIaddrof(const IreadNode & iaddrof)182 BaseNode *CGLowerer::LowerIaddrof(const IreadNode &iaddrof)
183 {
184 if (iaddrof.GetFieldID() == 0) {
185 return iaddrof.Opnd(0);
186 }
187 MIRType *type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(iaddrof.GetTyIdx());
188 MIRPtrType *pointerTy = static_cast<MIRPtrType *>(type);
189 CHECK_FATAL(pointerTy != nullptr, "LowerIaddrof: expect a pointer type at iaddrof node");
190 MIRStructType *structTy =
191 static_cast<MIRStructType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(pointerTy->GetPointedTyIdx()));
192 CHECK_FATAL(structTy != nullptr, "LowerIaddrof: non-zero fieldID for non-structure");
193 int32 offset = beCommon.GetFieldOffset(*structTy, iaddrof.GetFieldID()).first;
194 if (offset == 0) {
195 return iaddrof.Opnd(0);
196 }
197 uint32 loweredPtrType = static_cast<uint32>(GetLoweredPtrType());
198 MIRIntConst *offsetConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
199 offset, *GlobalTables::GetTypeTable().GetTypeTable().at(loweredPtrType));
200 BaseNode *offsetNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(offsetConst);
201 offsetNode->SetPrimType(GetLoweredPtrType());
202
203 BinaryNode *addNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
204 addNode->SetPrimType(GetLoweredPtrType());
205 addNode->SetBOpnd(iaddrof.Opnd(0), 0);
206 addNode->SetBOpnd(offsetNode, 1);
207 return addNode;
208 }
209
SplitBinaryNodeOpnd1(BinaryNode & bNode,BlockNode & blkNode)210 BaseNode *CGLowerer::SplitBinaryNodeOpnd1(BinaryNode &bNode, BlockNode &blkNode)
211 {
212 if (Globals::GetInstance()->GetOptimLevel() >= CGOptions::kLevel1) {
213 return &bNode;
214 }
215 MIRBuilder *mirbuilder = mirModule.GetMIRBuilder();
216 static uint32 val = 0;
217 std::string name("bnaryTmp");
218 name += std::to_string(val++);
219
220 BaseNode *opnd1 = bNode.Opnd(1);
221 MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(opnd1->GetPrimType()));
222 MIRSymbol *dnodeSt = mirbuilder->GetOrCreateLocalDecl(const_cast<const std::string &>(name), *ty);
223 DassignNode *dnode = mirbuilder->CreateStmtDassign(const_cast<MIRSymbol &>(*dnodeSt), 0, opnd1);
224 blkNode.InsertAfter(blkNode.GetLast(), dnode);
225
226 BaseNode *dreadNode = mirbuilder->CreateExprDread(*dnodeSt);
227 bNode.SetOpnd(dreadNode, 1);
228
229 return &bNode;
230 }
231
SplitTernaryNodeResult(TernaryNode & tNode,BaseNode & parent,BlockNode & blkNode)232 BaseNode *CGLowerer::SplitTernaryNodeResult(TernaryNode &tNode, BaseNode &parent, BlockNode &blkNode)
233 {
234 if (Globals::GetInstance()->GetOptimLevel() >= CGOptions::kLevel1) {
235 return &tNode;
236 }
237 MIRBuilder *mirbuilder = mirModule.GetMIRBuilder();
238 static uint32 val = 0;
239 std::string name("tnaryTmp");
240 name += std::to_string(val++);
241
242 MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(tNode.GetPrimType()));
243 MIRSymbol *dassignNodeSym = mirbuilder->GetOrCreateLocalDecl(const_cast<const std::string &>(name), *ty);
244 DassignNode *dassignNode = mirbuilder->CreateStmtDassign(const_cast<MIRSymbol &>(*dassignNodeSym), 0, &tNode);
245 blkNode.InsertAfter(blkNode.GetLast(), dassignNode);
246
247 BaseNode *dreadNode = mirbuilder->CreateExprDread(*dassignNodeSym);
248 for (size_t i = 0; i < parent.NumOpnds(); i++) {
249 if (parent.Opnd(i) == &tNode) {
250 parent.SetOpnd(dreadNode, i);
251 break;
252 }
253 }
254
255 return dreadNode;
256 }
257
258 /* Check if the operand of the select node is complex enough for either
259 * functionality or performance reason so we need to lower it to if-then-else.
260 */
IsComplexSelect(const TernaryNode & tNode) const261 bool CGLowerer::IsComplexSelect(const TernaryNode &tNode) const
262 {
263 if (tNode.GetPrimType() == PTY_agg) {
264 return true;
265 }
266 /* Iread may have side effect which may cause correctness issue. */
267 if (HasIreadExpr(tNode.Opnd(kFirstReg)) || HasIreadExpr(tNode.Opnd(kSecondReg))) {
268 return true;
269 }
270 // it will be generated many insn for complex expr, leading to
271 // worse performance than punishment of branch prediction error
272 constexpr size_t maxDepth = 3;
273 if (MaxDepth(tNode.Opnd(kFirstReg)) > maxDepth || MaxDepth(tNode.Opnd(kSecondReg)) > maxDepth) {
274 return true;
275 }
276 return false;
277 }
278
FindTheCurrentStmtFreq(const StmtNode * stmt) const279 int32 CGLowerer::FindTheCurrentStmtFreq(const StmtNode *stmt) const
280 {
281 while (stmt != nullptr) {
282 int32 freq = mirModule.CurFunction()->GetFreqFromLastStmt(stmt->GetStmtID());
283 if (freq != -1) {
284 return freq;
285 }
286 stmt = stmt->GetPrev();
287 }
288 return -1;
289 }
290
291 /* Lower agg select node back to if-then-else stmt. */
292 /*
293 0(brfalse)
294 | \
295 1 2
296 \ |
297 \ |
298 3
299 */
LowerComplexSelect(const TernaryNode & tNode,BaseNode & parent,BlockNode & blkNode)300 BaseNode *CGLowerer::LowerComplexSelect(const TernaryNode &tNode, BaseNode &parent, BlockNode &blkNode)
301 {
302 MIRBuilder *mirbuilder = mirModule.GetMIRBuilder();
303
304 MIRType *resultTy = 0;
305 MIRFunction *func = mirModule.CurFunction();
306 if (tNode.GetPrimType() == PTY_agg) {
307 if (tNode.Opnd(1)->op == OP_dread) {
308 DreadNode *trueNode = static_cast<DreadNode *>(tNode.Opnd(1));
309 resultTy = mirModule.CurFunction()->GetLocalOrGlobalSymbol(trueNode->GetStIdx())->GetType();
310 } else if (tNode.Opnd(1)->op == OP_iread) {
311 IreadNode *trueNode = static_cast<IreadNode *>(tNode.Opnd(1));
312 MIRPtrType *ptrty =
313 static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(trueNode->GetTyIdx()));
314 resultTy =
315 static_cast<MIRStructType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(ptrty->GetPointedTyIdx()));
316 if (trueNode->GetFieldID() != 0) {
317 MIRStructType *structty = static_cast<MIRStructType *>(resultTy);
318 resultTy =
319 GlobalTables::GetTypeTable().GetTypeFromTyIdx(structty->GetFieldTyIdx(trueNode->GetFieldID()));
320 }
321 } else {
322 CHECK_FATAL(false, "NYI: LowerComplexSelect");
323 }
324 } else {
325 resultTy = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(tNode.GetPrimType()));
326 }
327
328 CondGotoNode *brTargetStmt = mirModule.CurFuncCodeMemPool()->New<CondGotoNode>(OP_brfalse);
329 brTargetStmt->SetOpnd(tNode.Opnd(0), 0);
330 LabelIdx targetIdx = mirModule.CurFunction()->GetLabelTab()->CreateLabel();
331 mirModule.CurFunction()->GetLabelTab()->AddToStringLabelMap(targetIdx);
332 brTargetStmt->SetOffset(targetIdx);
333 // Update the current stmt frequence
334 int32 currentStmtFreq = 0;
335 if (kOpcodeInfo.IsStmt(parent.GetOpCode())) {
336 currentStmtFreq = FindTheCurrentStmtFreq(static_cast<StmtNode *>(&parent));
337 }
338 currentStmtFreq = currentStmtFreq == -1 ? 0 : currentStmtFreq;
339 func->SetLastFreqMap(brTargetStmt->GetStmtID(), static_cast<uint32>(currentStmtFreq));
340 blkNode.InsertAfter(blkNode.GetLast(), brTargetStmt);
341 union {
342 MIRSymbol *resSym;
343 PregIdx resPreg;
344 } cplxSelRes; // complex select result
345 uint32 fallthruStmtFreq = static_cast<uint32>((currentStmtFreq + 1) / 2);
346 if (tNode.GetPrimType() == PTY_agg) {
347 static uint32 val = 0;
348 std::string name("ComplexSelectTmp");
349 name += std::to_string(val++);
350 cplxSelRes.resSym = mirbuilder->GetOrCreateLocalDecl(const_cast<std::string &>(name), *resultTy);
351 DassignNode *dassignTrue = mirbuilder->CreateStmtDassign(*cplxSelRes.resSym, 0, tNode.Opnd(1));
352 // Fallthru: update the frequence 1
353 func->SetFirstFreqMap(dassignTrue->GetStmtID(), fallthruStmtFreq);
354 blkNode.InsertAfter(blkNode.GetLast(), dassignTrue);
355 } else {
356 cplxSelRes.resPreg = mirbuilder->GetCurrentFunction()->GetPregTab()->CreatePreg(tNode.GetPrimType());
357 RegassignNode *regassignTrue =
358 mirbuilder->CreateStmtRegassign(tNode.GetPrimType(), cplxSelRes.resPreg, tNode.Opnd(1));
359 // Update the frequence first opnd
360 func->SetFirstFreqMap(regassignTrue->GetStmtID(), fallthruStmtFreq);
361 blkNode.InsertAfter(blkNode.GetLast(), regassignTrue);
362 }
363
364 GotoNode *gotoStmt = mirModule.CurFuncCodeMemPool()->New<GotoNode>(OP_goto);
365 LabelIdx EndIdx = mirModule.CurFunction()->GetLabelTab()->CreateLabel();
366 mirModule.CurFunction()->GetLabelTab()->AddToStringLabelMap(EndIdx);
367 gotoStmt->SetOffset(EndIdx);
368 // Update the frequence first opnd
369 func->SetLastFreqMap(gotoStmt->GetStmtID(), fallthruStmtFreq);
370 blkNode.InsertAfter(blkNode.GetLast(), gotoStmt);
371
372 uint32 targetStmtFreq = static_cast<uint32>(currentStmtFreq / 2);
373 LabelNode *lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
374 lableStmt->SetLabelIdx(targetIdx);
375 func->SetFirstFreqMap(lableStmt->GetStmtID(), targetStmtFreq);
376 blkNode.InsertAfter(blkNode.GetLast(), lableStmt);
377
378 if (tNode.GetPrimType() == PTY_agg) {
379 DassignNode *dassignFalse = mirbuilder->CreateStmtDassign(*cplxSelRes.resSym, 0, tNode.Opnd(2));
380 // Update the frequence second opnd
381 func->SetLastFreqMap(dassignFalse->GetStmtID(), targetStmtFreq);
382 blkNode.InsertAfter(blkNode.GetLast(), dassignFalse);
383 } else {
384 RegassignNode *regassignFalse =
385 mirbuilder->CreateStmtRegassign(tNode.GetPrimType(), cplxSelRes.resPreg, tNode.Opnd(2));
386 // Update the frequence 2
387 func->SetLastFreqMap(regassignFalse->GetStmtID(), targetStmtFreq);
388 blkNode.InsertAfter(blkNode.GetLast(), regassignFalse);
389 }
390
391 lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
392 lableStmt->SetLabelIdx(EndIdx);
393 // Update the frequence third opnd
394 func->SetFirstFreqMap(lableStmt->GetStmtID(), static_cast<uint32>(currentStmtFreq));
395 blkNode.InsertAfter(blkNode.GetLast(), lableStmt);
396
397 BaseNode *exprNode =
398 (tNode.GetPrimType() == PTY_agg)
399 ? static_cast<BaseNode *>(mirbuilder->CreateExprDread(*cplxSelRes.resSym))
400 : static_cast<BaseNode *>(mirbuilder->CreateExprRegread(tNode.GetPrimType(), cplxSelRes.resPreg));
401 for (size_t i = 0; i < parent.NumOpnds(); i++) {
402 if (parent.Opnd(i) == &tNode) {
403 parent.SetOpnd(exprNode, i);
404 break;
405 }
406 }
407
408 return exprNode;
409 }
410
LowerFarray(ArrayNode & array)411 BaseNode *CGLowerer::LowerFarray(ArrayNode &array)
412 {
413 auto *farrayType = static_cast<MIRFarrayType *>(array.GetArrayType(GlobalTables::GetTypeTable()));
414 size_t eSize = GlobalTables::GetTypeTable().GetTypeFromTyIdx(farrayType->GetElemTyIdx())->GetSize();
415 if (farrayType->GetKind() == kTypeJArray) {
416 if (farrayType->GetElemType()->GetKind() != kTypeScalar) {
417 /* not the last dimension of primitive array */
418 eSize = RTSupport::GetRTSupportInstance().GetObjectAlignment();
419 }
420 }
421
422 MIRType &arrayType = *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType()));
423 /* how about multi-dimension array? */
424 if (array.GetIndex(0)->GetOpCode() == OP_constval) {
425 const ConstvalNode *constvalNode = static_cast<const ConstvalNode *>(array.GetIndex(0));
426 if (constvalNode->GetConstVal()->GetKind() == kConstInt) {
427 const MIRIntConst *pIntConst = static_cast<const MIRIntConst *>(constvalNode->GetConstVal());
428 CHECK_FATAL(JAVALANG || !pIntConst->IsNegative(), "Array index should >= 0.");
429 uint64 eleOffset = pIntConst->GetExtValue() * eSize;
430
431 if (farrayType->GetKind() == kTypeJArray) {
432 eleOffset += RTSupport::GetRTSupportInstance().GetArrayContentOffset();
433 }
434
435 BaseNode *baseNode = NodeConvert(array.GetPrimType(), *array.GetBase());
436 if (eleOffset == 0) {
437 return baseNode;
438 }
439
440 MIRIntConst *eleConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(eleOffset, arrayType);
441 BaseNode *offsetNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eleConst);
442 offsetNode->SetPrimType(array.GetPrimType());
443
444 BaseNode *rAdd = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
445 rAdd->SetPrimType(array.GetPrimType());
446 rAdd->SetOpnd(baseNode, 0);
447 rAdd->SetOpnd(offsetNode, 1);
448 return rAdd;
449 }
450 }
451
452 BaseNode *resNode = NodeConvert(array.GetPrimType(), *array.GetIndex(0));
453 BaseNode *rMul = nullptr;
454
455 if ((farrayType->GetKind() == kTypeJArray) && (resNode->GetOpCode() == OP_constval)) {
456 ConstvalNode *idxNode = static_cast<ConstvalNode *>(resNode);
457 uint64 idx = safe_cast<MIRIntConst>(idxNode->GetConstVal())->GetExtValue();
458 MIRIntConst *eConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(idx * eSize, arrayType);
459 rMul = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
460 rMul->SetPrimType(array.GetPrimType());
461 } else {
462 MIRIntConst *eConst =
463 GlobalTables::GetIntConstTable().GetOrCreateIntConst(static_cast<int64>(eSize), arrayType);
464 BaseNode *eSizeNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
465 eSizeNode->SetPrimType(array.GetPrimType());
466 rMul = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
467 rMul->SetPrimType(array.GetPrimType());
468 rMul->SetOpnd(resNode, 0);
469 rMul->SetOpnd(eSizeNode, 1);
470 }
471
472 BaseNode *baseNode = NodeConvert(array.GetPrimType(), *array.GetBase());
473
474 if (farrayType->GetKind() == kTypeJArray) {
475 BaseNode *jarrayBaseNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
476 MIRIntConst *arrayHeaderNode = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
477 RTSupport::GetRTSupportInstance().GetArrayContentOffset(), arrayType);
478 BaseNode *arrayHeaderCstNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(arrayHeaderNode);
479 arrayHeaderCstNode->SetPrimType(array.GetPrimType());
480 jarrayBaseNode->SetPrimType(array.GetPrimType());
481 jarrayBaseNode->SetOpnd(baseNode, 0);
482 jarrayBaseNode->SetOpnd(arrayHeaderCstNode, 1);
483 baseNode = jarrayBaseNode;
484 }
485
486 BaseNode *rAdd = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
487 rAdd->SetPrimType(array.GetPrimType());
488 rAdd->SetOpnd(baseNode, 0);
489 rAdd->SetOpnd(rMul, 1);
490 return rAdd;
491 }
492
LowerArrayDim(ArrayNode & array,int32 dim)493 BaseNode *CGLowerer::LowerArrayDim(ArrayNode &array, int32 dim)
494 {
495 BaseNode *resNode = NodeConvert(array.GetPrimType(), *array.GetIndex(dim - 1));
496 /* process left dimension index, resNode express the last dim, so dim need sub 2 */
497 CHECK_FATAL(dim > (std::numeric_limits<int>::min)() + 1, "out of range");
498 int leftDim = dim - 2;
499 MIRType *aType = array.GetArrayType(GlobalTables::GetTypeTable());
500 MIRArrayType *arrayType = static_cast<MIRArrayType *>(aType);
501 for (int i = leftDim; i >= 0; --i) {
502 BaseNode *mpyNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
503 BaseNode *item =
504 NodeConvert(array.GetPrimType(), *array.GetDim(mirModule, GlobalTables::GetTypeTable(), dim - 1));
505 if (mirModule.IsCModule()) {
506 item = NodeConvert(array.GetPrimType(), *array.GetIndex(static_cast<size_t>(static_cast<unsigned int>(i))));
507 int64 offsetSize = 1;
508 for (int32 j = i + 1; j < dim; ++j) {
509 offsetSize *= arrayType->GetSizeArrayItem(static_cast<uint32>(j));
510 }
511 MIRIntConst *offsetCst = mirModule.CurFuncCodeMemPool()->New<MIRIntConst>(
512 offsetSize, *GlobalTables::GetTypeTable().GetTypeFromTyIdx(array.GetPrimType()));
513 BaseNode *eleOffset = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(offsetCst);
514 eleOffset->SetPrimType(array.GetPrimType());
515 mpyNode->SetPrimType(array.GetPrimType());
516 mpyNode->SetOpnd(eleOffset, 0);
517 mpyNode->SetOpnd(item, 1);
518 } else {
519 for (int j = leftDim; j > i; --j) {
520 BaseNode *mpyNodes = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
521 mpyNodes->SetPrimType(array.GetPrimType());
522 mpyNodes->SetOpnd(item, 0);
523 mpyNodes->SetOpnd(
524 NodeConvert(array.GetPrimType(), *array.GetDim(mirModule, GlobalTables::GetTypeTable(), j)), 1);
525 item = mpyNodes;
526 }
527 mpyNode->SetPrimType(array.GetPrimType());
528 mpyNode->SetOpnd(NodeConvert(array.GetPrimType(), *array.GetIndex(i)), 0);
529 mpyNode->SetOpnd(item, 1);
530 }
531
532 BaseNode *newResNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
533 newResNode->SetPrimType(array.GetPrimType());
534 newResNode->SetOpnd(resNode, 0);
535 newResNode->SetOpnd(mpyNode, 1);
536 resNode = newResNode;
537 }
538 return resNode;
539 }
540
LowerArrayForLazyBiding(BaseNode & baseNode,BaseNode & offsetNode,const BaseNode & parent)541 BaseNode *CGLowerer::LowerArrayForLazyBiding(BaseNode &baseNode, BaseNode &offsetNode, const BaseNode &parent)
542 {
543 if (parent.GetOpCode() == OP_iread && (baseNode.GetOpCode() == maple::OP_addrof)) {
544 const MIRSymbol *st =
545 mirModule.CurFunction()->GetLocalOrGlobalSymbol(static_cast<AddrofNode &>(baseNode).GetStIdx());
546 if (StringUtils::StartsWith(st->GetName(), namemangler::kDecoupleStaticValueStr) ||
547 ((StringUtils::StartsWith(st->GetName(), namemangler::kMuidFuncUndefTabPrefixStr) ||
548 StringUtils::StartsWith(st->GetName(), namemangler::kMuidFuncDefTabPrefixStr) ||
549 StringUtils::StartsWith(st->GetName(), namemangler::kMuidDataDefTabPrefixStr) ||
550 StringUtils::StartsWith(st->GetName(), namemangler::kMuidDataUndefTabPrefixStr)) &&
551 CGOptions::IsLazyBinding())) {
552 /* for decouple static or lazybinding def/undef tables, replace it with intrinsic */
553 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
554 args.emplace_back(&baseNode);
555 args.emplace_back(&offsetNode);
556 return mirBuilder->CreateExprIntrinsicop(INTRN_MPL_READ_STATIC_OFFSET_TAB, OP_intrinsicop,
557 *GlobalTables::GetTypeTable().GetPrimType(parent.GetPrimType()),
558 args);
559 }
560 }
561 return nullptr;
562 }
563
LowerArray(ArrayNode & array,const BaseNode & parent)564 BaseNode *CGLowerer::LowerArray(ArrayNode &array, const BaseNode &parent)
565 {
566 MIRType *aType = array.GetArrayType(GlobalTables::GetTypeTable());
567 if (aType->GetKind() == kTypeFArray || aType->GetKind() == kTypeJArray) {
568 return LowerFarray(array);
569 }
570 MIRArrayType *arrayType = static_cast<MIRArrayType *>(aType);
571 int32 dim = arrayType->GetDim();
572 BaseNode *resNode = LowerArrayDim(array, dim);
573 BaseNode *rMul = nullptr;
574 size_t eSize = beCommon.GetTypeSize(arrayType->GetElemTyIdx().GetIdx());
575 Opcode opAdd = OP_add;
576 MIRType &arrayTypes = *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType()));
577 if (resNode->GetOpCode() == OP_constval) {
578 /* index is a constant, we can calculate the offset now */
579 ConstvalNode *idxNode = static_cast<ConstvalNode *>(resNode);
580 uint64 idx = safe_cast<MIRIntConst>(idxNode->GetConstVal())->GetExtValue();
581 MIRIntConst *eConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(idx * eSize, arrayTypes);
582 rMul = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
583 rMul->SetPrimType(array.GetPrimType());
584 if (dim == 1) {
585 opAdd = OP_CG_array_elem_add;
586 }
587 } else {
588 MIRIntConst *eConst =
589 GlobalTables::GetIntConstTable().GetOrCreateIntConst(static_cast<int64>(eSize), arrayTypes);
590 BaseNode *tmpNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
591 tmpNode->SetPrimType(array.GetPrimType());
592 rMul = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
593 rMul->SetPrimType(array.GetPrimType());
594 rMul->SetOpnd(resNode, 0);
595 rMul->SetOpnd(tmpNode, 1);
596 }
597 BaseNode *baseNode = NodeConvert(array.GetPrimType(), *array.GetBase());
598 if (rMul->GetOpCode() == OP_constval) {
599 BaseNode *intrnNode = LowerArrayForLazyBiding(*baseNode, *rMul, parent);
600 if (intrnNode != nullptr) {
601 return intrnNode;
602 }
603 }
604 BaseNode *rAdd = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(opAdd);
605 rAdd->SetPrimType(array.GetPrimType());
606 rAdd->SetOpnd(baseNode, 0);
607 rAdd->SetOpnd(rMul, 1);
608 return rAdd;
609 }
610
LowerCArray(ArrayNode & array)611 BaseNode *CGLowerer::LowerCArray(ArrayNode &array)
612 {
613 MIRType *aType = array.GetArrayType(GlobalTables::GetTypeTable());
614 if (aType->GetKind() == kTypeFArray || aType->GetKind() == kTypeJArray) {
615 return LowerFarray(array);
616 }
617
618 MIRArrayType *arrayType = static_cast<MIRArrayType *>(aType);
619 /* There are two cases where dimension > 1.
620 * 1) arrayType->dim > 1. Process the current arrayType. (nestedArray = false)
621 * 2) arrayType->dim == 1, but arraytype->eTyIdx is another array. (nestedArray = true)
622 * Assume at this time 1) and 2) cannot mix.
623 * Along with the array dimension, there is the array indexing.
624 * It is allowed to index arrays less than the dimension.
625 * This is dictated by the number of indexes.
626 */
627 bool nestedArray = false;
628 int dim = arrayType->GetDim();
629 MIRType *innerType = nullptr;
630 MIRArrayType *innerArrayType = nullptr;
631 uint64 elemSize = 0;
632 if (dim == 1) {
633 innerType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(arrayType->GetElemTyIdx());
634 if (innerType->GetKind() == kTypeArray) {
635 nestedArray = true;
636 do {
637 innerArrayType = static_cast<MIRArrayType *>(innerType);
638 elemSize = RoundUp(beCommon.GetTypeSize(innerArrayType->GetElemTyIdx().GetIdx()),
639 beCommon.GetTypeAlign(arrayType->GetElemTyIdx().GetIdx()));
640 dim++;
641 innerType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(innerArrayType->GetElemTyIdx());
642 } while (innerType->GetKind() == kTypeArray);
643 }
644 }
645
646 int32 numIndex = static_cast<int>(array.NumOpnds()) - 1;
647 MIRArrayType *curArrayType = arrayType;
648 BaseNode *resNode = NodeConvert(array.GetPrimType(), *array.GetIndex(0));
649 if (dim > 1) {
650 BaseNode *prevNode = nullptr;
651 for (int i = 0; (i < dim) && (i < numIndex); i++) {
652 uint32 mpyDim = 1;
653 if (nestedArray) {
654 CHECK_FATAL(arrayType->GetSizeArrayItem(0) > 0, "Zero size array dimension");
655 innerType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(curArrayType->GetElemTyIdx());
656 curArrayType = static_cast<MIRArrayType *>(innerType);
657 while (innerType->GetKind() == kTypeArray) {
658 innerArrayType = static_cast<MIRArrayType *>(innerType);
659 mpyDim *= innerArrayType->GetSizeArrayItem(0);
660 innerType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(innerArrayType->GetElemTyIdx());
661 }
662 } else {
663 CHECK_FATAL(arrayType->GetSizeArrayItem(static_cast<uint32>(i)) > 0, "Zero size array dimension");
664 for (int j = i + 1; j < dim; j++) {
665 mpyDim *= arrayType->GetSizeArrayItem(static_cast<uint32>(j));
666 }
667 }
668
669 BaseNode *index = static_cast<ConstvalNode *>(array.GetIndex(static_cast<size_t>(i)));
670 bool isConst = false;
671 uint64 indexVal = 0;
672 if (index->op == OP_constval) {
673 ConstvalNode *constNode = static_cast<ConstvalNode *>(index);
674 indexVal = (static_cast<MIRIntConst *>(constNode->GetConstVal()))->GetExtValue();
675 isConst = true;
676 MIRIntConst *newConstNode = mirModule.GetMemPool()->New<MIRIntConst>(
677 indexVal * mpyDim, *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType())));
678 BaseNode *newValNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(newConstNode);
679 newValNode->SetPrimType(array.GetPrimType());
680 if (i == 0) {
681 prevNode = newValNode;
682 continue;
683 } else {
684 resNode = newValNode;
685 }
686 }
687 if (i > 0 && !isConst) {
688 resNode = NodeConvert(array.GetPrimType(), *array.GetIndex(static_cast<size_t>(i)));
689 }
690
691 BaseNode *mpyNode;
692 if (isConst) {
693 MIRIntConst *mulConst = mirModule.GetMemPool()->New<MIRIntConst>(
694 mpyDim * indexVal, *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType())));
695 BaseNode *mulSize = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(mulConst);
696 mulSize->SetPrimType(array.GetPrimType());
697 mpyNode = mulSize;
698 } else if (mpyDim == 1 && prevNode) {
699 mpyNode = prevNode;
700 prevNode = resNode;
701 } else {
702 mpyNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
703 mpyNode->SetPrimType(array.GetPrimType());
704 MIRIntConst *mulConst = mirModule.GetMemPool()->New<MIRIntConst>(
705 mpyDim, *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType())));
706 BaseNode *mulSize = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(mulConst);
707 mulSize->SetPrimType(array.GetPrimType());
708 mpyNode->SetOpnd(NodeConvert(array.GetPrimType(), *mulSize), 0);
709 mpyNode->SetOpnd(resNode, 1);
710 }
711 if (i == 0) {
712 prevNode = mpyNode;
713 continue;
714 }
715 BaseNode *newResNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
716 newResNode->SetPrimType(array.GetPrimType());
717 newResNode->SetOpnd(mpyNode, 0);
718 newResNode->SetOpnd(prevNode, 1);
719 prevNode = newResNode;
720 }
721 resNode = prevNode;
722 }
723
724 BaseNode *rMul = nullptr;
725 // esize is the size of the array element (eg. int = 4 long = 8)
726 uint64 esize;
727 if (nestedArray) {
728 esize = elemSize;
729 } else {
730 esize = beCommon.GetTypeSize(arrayType->GetElemTyIdx().GetIdx());
731 }
732 Opcode opadd = OP_add;
733 if (resNode->op == OP_constval) {
734 // index is a constant, we can calculate the offset now
735 ConstvalNode *idxNode = static_cast<ConstvalNode *>(resNode);
736 uint64 idx = static_cast<MIRIntConst *>(idxNode->GetConstVal())->GetExtValue();
737 MIRIntConst *econst = mirModule.GetMemPool()->New<MIRIntConst>(
738 idx * esize, *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType())));
739 rMul = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(econst);
740 rMul->SetPrimType(array.GetPrimType());
741 if (dim == 1 && array.GetBase()->op == OP_addrof &&
742 static_cast<AddrofNode *>(array.GetBase())->GetFieldID() == 0) {
743 opadd = OP_CG_array_elem_add;
744 }
745 } else {
746 MIRIntConst *econst = mirModule.GetMemPool()->New<MIRIntConst>(
747 esize, *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType())));
748 BaseNode *eSize = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(econst);
749 eSize->SetPrimType(array.GetPrimType());
750 rMul = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
751 rMul->SetPrimType(array.GetPrimType());
752 rMul->SetOpnd(resNode, 0);
753 rMul->SetOpnd(eSize, 1);
754 }
755 BaseNode *baseNode = NodeConvert(array.GetPrimType(), *array.GetBase());
756 BaseNode *rAdd = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(opadd);
757 rAdd->SetPrimType(array.GetPrimType());
758 rAdd->SetOpnd(baseNode, 0);
759 rAdd->SetOpnd(rMul, 1);
760 return rAdd;
761 }
762
WriteBitField(const std::pair<int32,int32> & byteBitOffsets,const MIRBitFieldType * fieldType,BaseNode * baseAddr,BaseNode * rhs,BlockNode * block)763 StmtNode *CGLowerer::WriteBitField(const std::pair<int32, int32> &byteBitOffsets, const MIRBitFieldType *fieldType,
764 BaseNode *baseAddr, BaseNode *rhs, BlockNode *block)
765 {
766 auto bitSize = fieldType->GetFieldSize();
767 auto primType = fieldType->GetPrimType();
768 auto byteOffset = byteBitOffsets.first;
769 auto bitOffset = byteBitOffsets.second;
770 auto *builder = mirModule.GetMIRBuilder();
771 auto *bitField = builder->CreateExprIreadoff(primType, byteOffset, baseAddr);
772 auto primTypeBitSize = GetPrimTypeBitSize(primType);
773 if ((static_cast<uint32>(bitOffset) + bitSize) <= primTypeBitSize) {
774 if (CGOptions::IsBigEndian()) {
775 bitOffset =
776 (static_cast<int64>(beCommon.GetTypeSize(fieldType->GetTypeIndex()) * kBitsPerByte) - bitOffset) -
777 bitSize;
778 }
779 auto depositBits = builder->CreateExprDepositbits(OP_depositbits, primType, static_cast<uint32>(bitOffset),
780 bitSize, bitField, rhs);
781 return builder->CreateStmtIassignoff(primType, byteOffset, baseAddr, depositBits);
782 }
783 // if space not enough in the unit with size of primType, we would make an extra assignment from next bound
784 auto bitsRemained = (bitOffset + bitSize) - primTypeBitSize;
785 auto bitsExtracted = primTypeBitSize - bitOffset;
786 if (CGOptions::IsBigEndian()) {
787 bitOffset = 0;
788 }
789 auto *depositedLowerBits = builder->CreateExprDepositbits(OP_depositbits, primType, static_cast<uint32>(bitOffset),
790 bitsExtracted, bitField, rhs);
791 auto *assignedLowerBits = builder->CreateStmtIassignoff(primType, byteOffset, baseAddr, depositedLowerBits);
792 block->AddStatement(assignedLowerBits);
793 auto *extractedHigherBits =
794 builder->CreateExprExtractbits(OP_extractbits, primType, bitsExtracted, bitsRemained, rhs);
795 auto *bitFieldRemained =
796 builder->CreateExprIreadoff(primType, byteOffset + static_cast<int32>(GetPrimTypeSize(primType)), baseAddr);
797 auto *depositedHigherBits = builder->CreateExprDepositbits(OP_depositbits, primType, 0, bitsRemained,
798 bitFieldRemained, extractedHigherBits);
799 auto *assignedHigherBits = builder->CreateStmtIassignoff(
800 primType, byteOffset + static_cast<int32>(GetPrimTypeSize(primType)), baseAddr, depositedHigherBits);
801 return assignedHigherBits;
802 }
803
ReadBitField(const std::pair<int32,int32> & byteBitOffsets,const MIRBitFieldType * fieldType,BaseNode * baseAddr)804 BaseNode *CGLowerer::ReadBitField(const std::pair<int32, int32> &byteBitOffsets, const MIRBitFieldType *fieldType,
805 BaseNode *baseAddr)
806 {
807 auto bitSize = fieldType->GetFieldSize();
808 auto primType = fieldType->GetPrimType();
809 auto byteOffset = byteBitOffsets.first;
810 auto bitOffset = byteBitOffsets.second;
811 auto *builder = mirModule.GetMIRBuilder();
812 auto *bitField = builder->CreateExprIreadoff(primType, byteOffset, baseAddr);
813 auto primTypeBitSize = GetPrimTypeBitSize(primType);
814 if ((static_cast<uint32>(bitOffset) + bitSize) <= primTypeBitSize) {
815 if (CGOptions::IsBigEndian()) {
816 bitOffset =
817 (static_cast<int64>(beCommon.GetTypeSize(fieldType->GetTypeIndex()) * kBitsPerByte) - bitOffset) -
818 bitSize;
819 }
820 return builder->CreateExprExtractbits(OP_extractbits, primType, static_cast<uint32>(bitOffset), bitSize,
821 bitField);
822 }
823 // if space not enough in the unit with size of primType, the result would be binding of two exprs of load
824 auto bitsRemained = (bitOffset + bitSize) - primTypeBitSize;
825 if (CGOptions::IsBigEndian()) {
826 bitOffset = 0;
827 }
828 auto *extractedLowerBits = builder->CreateExprExtractbits(OP_extractbits, primType, static_cast<uint32>(bitOffset),
829 bitSize - bitsRemained, bitField);
830 auto *bitFieldRemained =
831 builder->CreateExprIreadoff(primType, byteOffset + static_cast<int32>(GetPrimTypeSize(primType)), baseAddr);
832 auto *result = builder->CreateExprDepositbits(OP_depositbits, primType, bitSize - bitsRemained, bitsRemained,
833 extractedLowerBits, bitFieldRemained);
834 return result;
835 }
836
LowerDreadBitfield(DreadNode & dread)837 BaseNode *CGLowerer::LowerDreadBitfield(DreadNode &dread)
838 {
839 auto *symbol = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dread.GetStIdx());
840 auto *structTy = static_cast<MIRStructType *>(symbol->GetType());
841 auto fTyIdx = structTy->GetFieldTyIdx(dread.GetFieldID());
842 auto *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
843 if (fType->GetKind() != kTypeBitField) {
844 return &dread;
845 }
846 auto *builder = mirModule.GetMIRBuilder();
847 auto *baseAddr = builder->CreateExprAddrof(0, dread.GetStIdx());
848 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, dread.GetFieldID());
849 return ReadBitField(byteBitOffsets, static_cast<MIRBitFieldType *>(fType), baseAddr);
850 }
851
LowerIreadBitfield(IreadNode & iread)852 BaseNode *CGLowerer::LowerIreadBitfield(IreadNode &iread)
853 {
854 uint32 index = iread.GetTyIdx();
855 MIRPtrType *pointerTy = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(index));
856 MIRType *pointedTy = GlobalTables::GetTypeTable().GetTypeFromTyIdx(pointerTy->GetPointedTyIdx());
857 /* Here pointed type can be Struct or JArray */
858 MIRStructType *structTy = nullptr;
859 if (pointedTy->GetKind() != kTypeJArray) {
860 structTy = static_cast<MIRStructType *>(pointedTy);
861 } else {
862 /* it's a Jarray type. using it's parent's field info: java.lang.Object */
863 structTy = static_cast<MIRJarrayType *>(pointedTy)->GetParentType();
864 }
865 TyIdx fTyIdx = structTy->GetFieldTyIdx(iread.GetFieldID());
866 MIRType *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
867 if (fType->GetKind() != kTypeBitField) {
868 return &iread;
869 }
870 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, iread.GetFieldID());
871 return ReadBitField(byteBitOffsets, static_cast<MIRBitFieldType *>(fType), iread.Opnd(0));
872 }
873
874 // input node must be cvt, retype, zext or sext
LowerCastExpr(BaseNode & expr)875 BaseNode *CGLowerer::LowerCastExpr(BaseNode &expr)
876 {
877 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2) {
878 BaseNode *simplified = MapleCastOpt::SimplifyCast(*mirBuilder, &expr);
879 return simplified != nullptr ? simplified : &expr;
880 }
881 return &expr;
882 }
883
884 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
LowerReturnStructUsingFakeParm(NaryStmtNode & retNode)885 BlockNode *CGLowerer::LowerReturnStructUsingFakeParm(NaryStmtNode &retNode)
886 {
887 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
888 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
889 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
890 }
891 BaseNode *opnd0 = retNode.Opnd(0);
892 if (!(opnd0 && opnd0->GetPrimType() == PTY_agg)) {
893 /* It is possible function never returns and have a dummy return const instead of a struct. */
894 maple::LogInfo::MapleLogger(kLlWarn) << "return struct should have a kid" << std::endl;
895 }
896
897 MIRFunction *curFunc = GetCurrentFunc();
898 MIRSymbol *retSt = curFunc->GetFormal(0);
899 MIRPtrType *retTy = static_cast<MIRPtrType *>(retSt->GetType());
900 IassignNode *iassign = mirModule.CurFuncCodeMemPool()->New<IassignNode>();
901 iassign->SetTyIdx(retTy->GetTypeIndex());
902 DEBUG_ASSERT(opnd0 != nullptr, "opnd0 should not be nullptr");
903 if ((beCommon.GetTypeSize(retTy->GetPointedTyIdx().GetIdx()) <= k16ByteSize) && (opnd0->GetPrimType() == PTY_agg)) {
904 /* struct goes into register. */
905 curFunc->SetStructReturnedInRegs();
906 }
907 iassign->SetFieldID(0);
908 iassign->SetRHS(opnd0);
909 if (retSt->IsPreg()) {
910 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(
911 GetLoweredPtrType(), curFunc->GetPregTab()->GetPregIdxFromPregno(retSt->GetPreg()->GetPregNo()));
912 iassign->SetOpnd(regNode, 0);
913 } else {
914 AddrofNode *dreadNode = mirModule.CurFuncCodeMemPool()->New<AddrofNode>(OP_dread);
915 dreadNode->SetPrimType(GetLoweredPtrType());
916 dreadNode->SetStIdx(retSt->GetStIdx());
917 iassign->SetOpnd(dreadNode, 0);
918 }
919 blk->AddStatement(iassign);
920 retNode.GetNopnd().clear();
921 retNode.SetNumOpnds(0);
922 blk->AddStatement(&retNode);
923 return blk;
924 }
925
926 #endif /* TARGARM32 || TARGAARCH64 || TARGX86_64 */
927
LowerReturn(NaryStmtNode & retNode)928 BlockNode *CGLowerer::LowerReturn(NaryStmtNode &retNode)
929 {
930 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
931 if (retNode.NumOpnds() != 0) {
932 BaseNode *expr = retNode.Opnd(0);
933 Opcode opr = expr->GetOpCode();
934 if (opr == OP_dread) {
935 AddrofNode *retExpr = static_cast<AddrofNode *>(expr);
936 MIRFunction *mirFunc = mirModule.CurFunction();
937 MIRSymbol *sym = mirFunc->GetLocalOrGlobalSymbol(retExpr->GetStIdx());
938 if (sym->GetAttr(ATTR_localrefvar)) {
939 mirFunc->InsertMIRSymbol(sym);
940 }
941 }
942 }
943 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
944 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
945 }
946 blk->AddStatement(&retNode);
947 return blk;
948 }
949
LowerDassignBitfield(DassignNode & dassign,BlockNode & newBlk)950 StmtNode *CGLowerer::LowerDassignBitfield(DassignNode &dassign, BlockNode &newBlk)
951 {
952 dassign.SetRHS(LowerExpr(dassign, *dassign.GetRHS(), newBlk));
953 MIRSymbol *symbol = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dassign.GetStIdx());
954 MIRStructType *structTy = static_cast<MIRStructType *>(symbol->GetType());
955 CHECK_FATAL(structTy != nullptr, "LowerDassignBitfield: non-zero fieldID for non-structure");
956 TyIdx fTyIdx = structTy->GetFieldTyIdx(dassign.GetFieldID());
957 CHECK_FATAL(fTyIdx != 0u, "LowerDassignBitField: field id out of range for the structure");
958 MIRType *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
959 if (fType->GetKind() != kTypeBitField) {
960 return &dassign;
961 }
962 auto *builder = mirModule.GetMIRBuilder();
963 auto *baseAddr = builder->CreateExprAddrof(0, dassign.GetStIdx());
964 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, dassign.GetFieldID());
965 return WriteBitField(byteBitOffsets, static_cast<MIRBitFieldType *>(fType), baseAddr, dassign.GetRHS(), &newBlk);
966 }
967
LowerIassignBitfield(IassignNode & iassign,BlockNode & newBlk)968 StmtNode *CGLowerer::LowerIassignBitfield(IassignNode &iassign, BlockNode &newBlk)
969 {
970 DEBUG_ASSERT(iassign.Opnd(0) != nullptr, "iassign.Opnd(0) should not be nullptr");
971 iassign.SetOpnd(LowerExpr(iassign, *iassign.Opnd(0), newBlk), 0);
972 iassign.SetRHS(LowerExpr(iassign, *iassign.GetRHS(), newBlk));
973
974 CHECK_FATAL(iassign.GetTyIdx() < GlobalTables::GetTypeTable().GetTypeTable().size(),
975 "LowerIassignBitField: subscript out of range");
976 uint32 index = iassign.GetTyIdx();
977 MIRPtrType *pointerTy = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(index));
978 CHECK_FATAL(pointerTy != nullptr, "LowerIassignBitField: type in iassign should be pointer type");
979 MIRType *pointedTy = GlobalTables::GetTypeTable().GetTypeFromTyIdx(pointerTy->GetPointedTyIdx());
980 /*
981 * Here pointed type can be Struct or JArray
982 * We should seriously consider make JArray also a Struct type
983 */
984 MIRStructType *structTy = nullptr;
985 if (pointedTy->GetKind() != kTypeJArray) {
986 structTy = static_cast<MIRStructType *>(pointedTy);
987 } else {
988 /* it's a Jarray type. using it's parent's field info: java.lang.Object */
989 structTy = static_cast<MIRJarrayType *>(pointedTy)->GetParentType();
990 }
991
992 TyIdx fTyIdx = structTy->GetFieldTyIdx(iassign.GetFieldID());
993 MIRType *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
994 if (fType->GetKind() != kTypeBitField) {
995 return &iassign;
996 }
997 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, iassign.GetFieldID());
998 auto *bitFieldType = static_cast<MIRBitFieldType *>(fType);
999 return WriteBitField(byteBitOffsets, bitFieldType, iassign.Opnd(0), iassign.GetRHS(), &newBlk);
1000 }
1001
LowerIassign(IassignNode & iassign,BlockNode & newBlk)1002 void CGLowerer::LowerIassign(IassignNode &iassign, BlockNode &newBlk)
1003 {
1004 StmtNode *newStmt = nullptr;
1005 if (iassign.GetFieldID() != 0) {
1006 newStmt = LowerIassignBitfield(iassign, newBlk);
1007 } else {
1008 LowerStmt(iassign, newBlk);
1009 newStmt = &iassign;
1010 }
1011 newBlk.AddStatement(newStmt);
1012 }
1013
NewAsmTempStrIdx()1014 static GStrIdx NewAsmTempStrIdx()
1015 {
1016 static uint32 strIdxCount = 0; // to create unique temporary symbol names
1017 std::string asmTempStr("asm_tempvar");
1018 asmTempStr += std::to_string(++strIdxCount);
1019 return GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(asmTempStr);
1020 }
1021
LowerAsmStmt(AsmNode * asmNode,BlockNode * newBlk)1022 void CGLowerer::LowerAsmStmt(AsmNode *asmNode, BlockNode *newBlk)
1023 {
1024 for (size_t i = 0; i < asmNode->NumOpnds(); i++) {
1025 BaseNode *opnd = LowerExpr(*asmNode, *asmNode->Opnd(i), *newBlk);
1026 if (opnd->NumOpnds() == 0) {
1027 asmNode->SetOpnd(opnd, i);
1028 continue;
1029 }
1030 // introduce a temporary to store the expression tree operand
1031 TyIdx tyIdxUsed = static_cast<TyIdx>(opnd->GetPrimType());
1032 if (opnd->op == OP_iread) {
1033 IreadNode *ireadNode = static_cast<IreadNode *>(opnd);
1034 tyIdxUsed = ireadNode->GetType()->GetTypeIndex();
1035 }
1036 StmtNode *assignNode = nullptr;
1037 BaseNode *readOpnd = nullptr;
1038 PrimType type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdxUsed)->GetPrimType();
1039 if ((type != PTY_agg) && CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2) {
1040 PregIdx pregIdx = mirModule.CurFunction()->GetPregTab()->CreatePreg(type);
1041 assignNode = mirBuilder->CreateStmtRegassign(type, pregIdx, opnd);
1042 readOpnd = mirBuilder->CreateExprRegread(type, pregIdx);
1043 } else {
1044 MIRSymbol *st = mirModule.GetMIRBuilder()->CreateSymbol(tyIdxUsed, NewAsmTempStrIdx(), kStVar, kScAuto,
1045 mirModule.CurFunction(), kScopeLocal);
1046 assignNode = mirModule.GetMIRBuilder()->CreateStmtDassign(*st, 0, opnd);
1047 readOpnd = mirBuilder->CreateExprDread(*st);
1048 }
1049 newBlk->AddStatement(assignNode);
1050 asmNode->SetOpnd(readOpnd, i);
1051 }
1052 newBlk->AddStatement(asmNode);
1053 }
1054
SaveReturnValueInLocal(StIdx stIdx,uint16 fieldID)1055 DassignNode *CGLowerer::SaveReturnValueInLocal(StIdx stIdx, uint16 fieldID)
1056 {
1057 MIRSymbol *var;
1058 if (stIdx.IsGlobal()) {
1059 var = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
1060 } else {
1061 var = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
1062 }
1063 CHECK_FATAL(var != nullptr, "var should not be nullptr");
1064 PrimType pType;
1065 if (var->GetAttr(ATTR_oneelem_simd)) {
1066 pType = PTY_f64;
1067 } else {
1068 pType = GlobalTables::GetTypeTable().GetTypeTable().at(var->GetTyIdx())->GetPrimType();
1069 }
1070 RegreadNode *regRead = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
1071 return mirModule.GetMIRBuilder()->CreateStmtDassign(*var, fieldID, regRead);
1072 }
1073
LowerRem(BaseNode & expr,BlockNode & blk)1074 BaseNode *CGLowerer::LowerRem(BaseNode &expr, BlockNode &blk)
1075 {
1076 auto &remExpr = static_cast<BinaryNode &>(expr);
1077 if (!IsPrimitiveFloat(remExpr.GetPrimType())) {
1078 return &expr;
1079 }
1080 ExtFuncT fmodFunc = remExpr.GetPrimType() == PTY_f32 ? kFmodFloat : kFmodDouble;
1081 uint32 i = 0;
1082 for (; i < extFuncs.size(); ++i) {
1083 if (extFuncs[i].first == fmodFunc) {
1084 break;
1085 }
1086 }
1087 CHECK_FATAL(i < extFuncs.size(), "rem expression primtype is not PTY_f32 nor PTY_f64.");
1088 MIRSymbol *ret =
1089 CreateNewRetVar(*GlobalTables::GetTypeTable().GetPrimType(remExpr.GetPrimType()), kIntrnRetValPrefix);
1090 MapleVector<BaseNode *> args(mirModule.GetMIRBuilder()->GetCurrentFuncCodeMpAllocator()->Adapter());
1091 args.emplace_back(remExpr.Opnd(0));
1092 args.emplace_back(remExpr.Opnd(1));
1093 CallNode *callStmt = mirModule.GetMIRBuilder()->CreateStmtCallAssigned(extFuncs[i].second, args, ret);
1094 blk.AppendStatementsFromBlock(*LowerCallAssignedStmt(*callStmt));
1095 MIRType *type = GlobalTables::GetTypeTable().GetPrimType(extFnDescrs[fmodFunc].retType);
1096 return mirModule.GetMIRBuilder()->CreateExprDread(*type, 0, *ret);
1097 }
1098
1099 /* to lower call (including icall) and intrinsicall statements */
LowerCallStmt(StmtNode & stmt,StmtNode * & nextStmt,BlockNode & newBlk,MIRType * retty,bool uselvar,bool isIntrinAssign)1100 void CGLowerer::LowerCallStmt(StmtNode &stmt, StmtNode *&nextStmt, BlockNode &newBlk, MIRType *retty, bool uselvar,
1101 bool isIntrinAssign)
1102 {
1103 StmtNode *newStmt = nullptr;
1104 if (stmt.GetOpCode() == OP_intrinsiccall) {
1105 auto &intrnNode = static_cast<IntrinsiccallNode &>(stmt);
1106 newStmt = LowerIntrinsiccall(intrnNode, newBlk);
1107 } else {
1108 /* We note the function has a user-defined (i.e., not an intrinsic) call. */
1109 GetCurrentFunc()->SetHasCall();
1110 newStmt = &stmt;
1111 }
1112
1113 if (newStmt == nullptr) {
1114 return;
1115 }
1116
1117 if (newStmt->GetOpCode() == OP_call || newStmt->GetOpCode() == OP_icall || newStmt->GetOpCode() == OP_icallproto) {
1118 newStmt = LowerCall(static_cast<CallNode &>(*newStmt), nextStmt, newBlk, retty, uselvar);
1119 }
1120 newStmt->SetSrcPos(stmt.GetSrcPos());
1121 newBlk.AddStatement(newStmt);
1122 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2 && stmt.GetOpCode() == OP_intrinsiccall) {
1123 /* Try to expand memset and memcpy call lowered from intrinsiccall */
1124 /* Skip expansion if call returns a value that is used later. */
1125 BlockNode *blkLowered = isIntrinAssign ? nullptr : LowerMemop(*newStmt);
1126 if (blkLowered != nullptr) {
1127 newBlk.RemoveStmt(newStmt);
1128 newBlk.AppendStatementsFromBlock(*blkLowered);
1129 }
1130 }
1131 }
1132
GenCallNode(const StmtNode & stmt,PUIdx & funcCalled,CallNode & origCall)1133 StmtNode *CGLowerer::GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNode &origCall)
1134 {
1135 CallNode *newCall = nullptr;
1136 if (stmt.GetOpCode() == OP_callassigned) {
1137 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(origCall.GetPUIdx(), origCall.GetNopnd());
1138 } else if (stmt.GetOpCode() == OP_virtualcallassigned) {
1139 newCall = mirModule.GetMIRBuilder()->CreateStmtVirtualCall(origCall.GetPUIdx(), origCall.GetNopnd());
1140 } else if (stmt.GetOpCode() == OP_superclasscallassigned) {
1141 newCall = mirModule.GetMIRBuilder()->CreateStmtSuperclassCall(origCall.GetPUIdx(), origCall.GetNopnd());
1142 } else if (stmt.GetOpCode() == OP_interfacecallassigned) {
1143 newCall = mirModule.GetMIRBuilder()->CreateStmtInterfaceCall(origCall.GetPUIdx(), origCall.GetNopnd());
1144 }
1145 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
1146 newCall->SetSrcPos(stmt.GetSrcPos());
1147 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
1148 funcCalled = origCall.GetPUIdx();
1149 CHECK_FATAL((newCall->GetOpCode() == OP_call || newCall->GetOpCode() == OP_interfacecall),
1150 "virtual call or super class call are not expected");
1151 if (newCall->GetOpCode() == OP_interfacecall) {
1152 std::cerr << "interfacecall found\n";
1153 }
1154 newCall->SetStmtAttrs(stmt.GetStmtAttrs());
1155 return newCall;
1156 }
1157
GenIntrinsiccallNode(const StmtNode & stmt,PUIdx & funcCalled,bool & handledAtLowerLevel,IntrinsiccallNode & origCall)1158 StmtNode *CGLowerer::GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalled, bool &handledAtLowerLevel,
1159 IntrinsiccallNode &origCall)
1160 {
1161 StmtNode *newCall = nullptr;
1162 handledAtLowerLevel = IsIntrinsicCallHandledAtLowerLevel(origCall.GetIntrinsic());
1163 if (handledAtLowerLevel) {
1164 /* If the lower level can handle the intrinsic, just let it pass through. */
1165 newCall = &origCall;
1166 } else {
1167 PUIdx bFunc = GetBuiltinToUse(origCall.GetIntrinsic());
1168 if (bFunc != kFuncNotFound) {
1169 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(bFunc, origCall.GetNopnd());
1170 } else {
1171 if (stmt.GetOpCode() == OP_intrinsiccallassigned) {
1172 newCall =
1173 mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
1174 } else if (stmt.GetOpCode() == OP_xintrinsiccallassigned) {
1175 newCall =
1176 mirModule.GetMIRBuilder()->CreateStmtXintrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
1177 } else {
1178 newCall = mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(),
1179 origCall.GetNopnd(), origCall.GetTyIdx());
1180 }
1181 }
1182 newCall->SetSrcPos(stmt.GetSrcPos());
1183 funcCalled = bFunc;
1184 CHECK_FATAL((newCall->GetOpCode() == OP_call || newCall->GetOpCode() == OP_intrinsiccall),
1185 "xintrinsic and intrinsiccallwithtype call is not expected");
1186 }
1187 return newCall;
1188 }
1189
GenIcallNode(PUIdx & funcCalled,IcallNode & origCall)1190 StmtNode *CGLowerer::GenIcallNode(PUIdx &funcCalled, IcallNode &origCall)
1191 {
1192 IcallNode *newCall = nullptr;
1193 if (origCall.GetOpCode() == OP_icallassigned) {
1194 newCall = mirModule.GetMIRBuilder()->CreateStmtIcall(origCall.GetNopnd());
1195 } else {
1196 newCall = mirModule.GetMIRBuilder()->CreateStmtIcallproto(origCall.GetNopnd());
1197 newCall->SetRetTyIdx(static_cast<IcallNode &>(origCall).GetRetTyIdx());
1198 }
1199 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
1200 newCall->SetStmtAttrs(origCall.GetStmtAttrs());
1201 newCall->SetSrcPos(origCall.GetSrcPos());
1202 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
1203 funcCalled = kFuncNotFound;
1204 return newCall;
1205 }
1206
GenBlockNode(StmtNode & newCall,const CallReturnVector & p2nRets,const Opcode & opcode,const PUIdx & funcCalled,bool handledAtLowerLevel,bool uselvar)1207 BlockNode *CGLowerer::GenBlockNode(StmtNode &newCall, const CallReturnVector &p2nRets, const Opcode &opcode,
1208 const PUIdx &funcCalled, bool handledAtLowerLevel, bool uselvar)
1209 {
1210 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1211 blk->AddStatement(&newCall);
1212 if (!handledAtLowerLevel) {
1213 CHECK_FATAL(p2nRets.size() <= 1, "make sure p2nRets size <= 1");
1214 /* Create DassignStmt to save kSregRetval0. */
1215 StmtNode *dStmt = nullptr;
1216 MIRType *retType = nullptr;
1217 if (p2nRets.size() == 1) {
1218 MIRSymbol *sym = nullptr;
1219 StIdx stIdx = p2nRets[0].first;
1220 if (stIdx.IsGlobal()) {
1221 sym = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
1222 } else {
1223 sym = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
1224 }
1225 bool sizeIs0 = false;
1226 if (sym != nullptr) {
1227 retType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(sym->GetTyIdx());
1228 if (beCommon.GetTypeSize(retType->GetTypeIndex().GetIdx()) == 0) {
1229 sizeIs0 = true;
1230 }
1231 }
1232 if (!sizeIs0) {
1233 RegFieldPair regFieldPair = p2nRets[0].second;
1234 if (!regFieldPair.IsReg()) {
1235 uint16 fieldID = static_cast<uint16>(regFieldPair.GetFieldID());
1236 DassignNode *dn = SaveReturnValueInLocal(stIdx, fieldID);
1237 CHECK_FATAL(dn->GetFieldID() == 0, "make sure dn's fieldID return 0");
1238 LowerDassign(*dn, *blk);
1239 CHECK_FATAL(&newCall == blk->GetLast() || newCall.GetNext() == blk->GetLast(), "");
1240 dStmt = (&newCall == blk->GetLast()) ? nullptr : blk->GetLast();
1241 CHECK_FATAL(newCall.GetNext() == dStmt, "make sure newCall's next equal dStmt");
1242 } else {
1243 PregIdx pregIdx = static_cast<PregIdx>(regFieldPair.GetPregIdx());
1244 MIRPreg *mirPreg = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(pregIdx);
1245 bool is64x1vec = beCommon.CallIsOfAttr(FUNCATTR_oneelem_simd, &newCall);
1246 PrimType pType = is64x1vec ? PTY_f64 : mirPreg->GetPrimType();
1247 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
1248 RegassignNode *regAssign;
1249 if (is64x1vec && IsPrimitiveInteger(mirPreg->GetPrimType())) { // not f64
1250 MIRType *to;
1251 if (IsUnsignedInteger(mirPreg->GetPrimType())) {
1252 to = GlobalTables::GetTypeTable().GetUInt64();
1253 } else {
1254 to = GlobalTables::GetTypeTable().GetInt64();
1255 }
1256 MIRType *from = GlobalTables::GetTypeTable().GetDouble();
1257 BaseNode *rNode = mirModule.GetMIRBuilder()->CreateExprRetype(*to, *from, regNode);
1258 regAssign = mirModule.GetMIRBuilder()->CreateStmtRegassign(mirPreg->GetPrimType(),
1259 regFieldPair.GetPregIdx(), rNode);
1260 } else {
1261 regAssign = mirModule.GetMIRBuilder()->CreateStmtRegassign(mirPreg->GetPrimType(),
1262 regFieldPair.GetPregIdx(), regNode);
1263 }
1264 blk->AddStatement(regAssign);
1265 dStmt = regAssign;
1266 }
1267 }
1268 }
1269 blk->ResetBlock();
1270 /* if VerboseCG, insert a comment */
1271 if (ShouldAddAdditionalComment()) {
1272 CommentNode *cmnt = mirModule.CurFuncCodeMemPool()->New<CommentNode>(mirModule);
1273 cmnt->SetComment(kOpcodeInfo.GetName(opcode).c_str());
1274 if (funcCalled == kFuncNotFound) {
1275 cmnt->Append(" : unknown");
1276 } else {
1277 cmnt->Append(" : ");
1278 cmnt->Append(GlobalTables::GetFunctionTable().GetFunctionFromPuidx(funcCalled)->GetName());
1279 }
1280 blk->AddStatement(cmnt);
1281 }
1282 CHECK_FATAL(dStmt == nullptr || dStmt->GetNext() == nullptr, "make sure dStmt or dStmt's next is nullptr");
1283 LowerCallStmt(newCall, dStmt, *blk, retType, uselvar ? true : false, opcode == OP_intrinsiccallassigned);
1284 if (!uselvar && dStmt != nullptr) {
1285 dStmt->SetSrcPos(newCall.GetSrcPos());
1286 blk->AddStatement(dStmt);
1287 }
1288 }
1289 return blk;
1290 }
1291
1292 // try to expand memset and memcpy
LowerMemop(StmtNode & stmt)1293 BlockNode *CGLowerer::LowerMemop(StmtNode &stmt)
1294 {
1295 auto memOpKind = SimplifyMemOp::ComputeMemOpKind(stmt);
1296 if (memOpKind == MEM_OP_unknown) {
1297 return nullptr;
1298 }
1299 auto *prev = stmt.GetPrev();
1300 auto *next = stmt.GetNext();
1301 auto *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1302 blk->AddStatement(&stmt);
1303 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1304 bool success = simplifyMemOp.AutoSimplify(stmt, *blk, true);
1305 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1306 if (newTypeTableSize != oldTypeTableSize) {
1307 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
1308 }
1309 stmt.SetPrev(prev);
1310 stmt.SetNext(next); // recover callStmt's position
1311 if (!success) {
1312 return nullptr;
1313 }
1314 // lower new generated stmts
1315 auto *currStmt = blk->GetFirst();
1316 while (currStmt != nullptr) {
1317 auto *nextStmt = currStmt->GetNext();
1318 for (uint32 i = 0; i < currStmt->NumOpnds(); ++i) {
1319 currStmt->SetOpnd(LowerExpr(*currStmt, *currStmt->Opnd(i), *blk), i);
1320 }
1321 currStmt = nextStmt;
1322 }
1323 return blk;
1324 }
1325
LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode & intrinsicCall)1326 BlockNode *CGLowerer::LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode &intrinsicCall)
1327 {
1328 auto *builder = mirModule.GetMIRBuilder();
1329 auto *block = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1330 auto intrinsicID = intrinsicCall.GetIntrinsic();
1331 auto &opndVector = intrinsicCall.GetNopnd();
1332 auto returnPair = intrinsicCall.GetReturnVec().begin();
1333 auto regFieldPair = returnPair->second;
1334 if (regFieldPair.IsReg()) {
1335 auto regIdx = regFieldPair.GetPregIdx();
1336 auto primType = mirModule.CurFunction()->GetPregItem(static_cast<PregIdx>(regIdx))->GetPrimType();
1337 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, primType, TyIdx(0), opndVector);
1338 auto regAssign = builder->CreateStmtRegassign(primType, regIdx, intrinsicOp);
1339 block->AddStatement(regAssign);
1340 } else {
1341 auto fieldID = regFieldPair.GetFieldID();
1342 auto stIdx = returnPair->first;
1343 auto *type = mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx)->GetType();
1344 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, *type, opndVector);
1345 auto dAssign = builder->CreateStmtDassign(stIdx, fieldID, intrinsicOp);
1346 block->AddStatement(dAssign);
1347 }
1348 return LowerBlock(*block);
1349 }
1350
LowerCallAssignedStmt(StmtNode & stmt,bool uselvar)1351 BlockNode *CGLowerer::LowerCallAssignedStmt(StmtNode &stmt, bool uselvar)
1352 {
1353 StmtNode *newCall = nullptr;
1354 CallReturnVector *p2nRets = nullptr;
1355 PUIdx funcCalled = kFuncNotFound;
1356 bool handledAtLowerLevel = false;
1357 switch (stmt.GetOpCode()) {
1358 case OP_callassigned:
1359 case OP_virtualcallassigned:
1360 case OP_superclasscallassigned:
1361 case OP_interfacecallassigned: {
1362 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2) {
1363 BlockNode *blkLowered = LowerMemop(stmt);
1364 if (blkLowered != nullptr) {
1365 return blkLowered;
1366 }
1367 }
1368 auto &origCall = static_cast<CallNode &>(stmt);
1369 newCall = GenCallNode(stmt, funcCalled, origCall);
1370 p2nRets = &origCall.GetReturnVec();
1371 static_cast<CallNode *>(newCall)->SetReturnVec(*p2nRets);
1372 MIRFunction *curFunc = mirModule.CurFunction();
1373 curFunc->SetLastFreqMap(newCall->GetStmtID(),
1374 static_cast<uint32>(curFunc->GetFreqFromLastStmt(stmt.GetStmtID())));
1375 break;
1376 }
1377 case OP_intrinsiccallassigned:
1378 case OP_xintrinsiccallassigned: {
1379 IntrinsiccallNode &intrincall = static_cast<IntrinsiccallNode &>(stmt);
1380 auto intrinsicID = intrincall.GetIntrinsic();
1381 if (IntrinDesc::intrinTable[intrinsicID].IsAtomic()) {
1382 return LowerIntrinsiccallAassignedToAssignStmt(intrincall);
1383 }
1384 if (intrinsicID == INTRN_JAVA_POLYMORPHIC_CALL) {
1385 BaseNode *contextClassArg = GetBaseNodeFromCurFunc(*mirModule.CurFunction(), false);
1386 constexpr int kContextIdx = 4; /* stable index in MCC_DexPolymorphicCall, never out of range */
1387 intrincall.InsertOpnd(contextClassArg, kContextIdx);
1388
1389 BaseNode *firstArg = intrincall.GetNopndAt(0);
1390 BaseNode *baseVal = mirBuilder->CreateExprBinary(OP_add, *GlobalTables::GetTypeTable().GetPtr(),
1391 firstArg, mirBuilder->CreateIntConst(1, PTY_ref));
1392 intrincall.SetNOpndAt(0, baseVal);
1393 }
1394 newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, intrincall);
1395 p2nRets = &intrincall.GetReturnVec();
1396 static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
1397 break;
1398 }
1399 case OP_intrinsiccallwithtypeassigned: {
1400 auto &origCall = static_cast<IntrinsiccallNode &>(stmt);
1401 newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, origCall);
1402 p2nRets = &origCall.GetReturnVec();
1403 static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
1404 break;
1405 }
1406 case OP_icallprotoassigned:
1407 case OP_icallassigned: {
1408 auto &origCall = static_cast<IcallNode &>(stmt);
1409 newCall = GenIcallNode(funcCalled, origCall);
1410 p2nRets = &origCall.GetReturnVec();
1411 static_cast<IcallNode *>(newCall)->SetReturnVec(*p2nRets);
1412 break;
1413 }
1414 default:
1415 CHECK_FATAL(false, "NIY");
1416 return nullptr;
1417 }
1418
1419 /* transfer srcPosition location info */
1420 newCall->SetSrcPos(stmt.GetSrcPos());
1421 return GenBlockNode(*newCall, *p2nRets, stmt.GetOpCode(), funcCalled, handledAtLowerLevel, uselvar);
1422 }
1423
1424 #if TARGAARCH64
IsStructElementSame(MIRType * ty)1425 static PrimType IsStructElementSame(MIRType *ty)
1426 {
1427 if (ty->GetKind() == kTypeArray) {
1428 MIRArrayType *arrtype = static_cast<MIRArrayType *>(ty);
1429 MIRType *pty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(arrtype->GetElemTyIdx());
1430 if (pty->GetKind() == kTypeArray || pty->GetKind() == kTypeStruct) {
1431 return IsStructElementSame(pty);
1432 }
1433 return pty->GetPrimType();
1434 } else if (ty->GetKind() == kTypeStruct) {
1435 MIRStructType *sttype = static_cast<MIRStructType *>(ty);
1436 FieldVector fields = sttype->GetFields();
1437 PrimType oldtype = PTY_void;
1438 for (uint32 fcnt = 0; fcnt < fields.size(); ++fcnt) {
1439 TyIdx fieldtyidx = fields[fcnt].second.first;
1440 MIRType *fieldty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldtyidx);
1441 PrimType ptype = IsStructElementSame(fieldty);
1442 if (oldtype != PTY_void && oldtype != ptype) {
1443 return PTY_void;
1444 } else {
1445 oldtype = ptype;
1446 }
1447 }
1448 return oldtype;
1449 } else {
1450 return ty->GetPrimType();
1451 }
1452 }
1453 #endif
1454
1455 // return true if successfully lowered; nextStmt is in/out, and is made to point
1456 // to its following statement if lowering of the struct return is successful
LowerStructReturn(BlockNode & newBlk,StmtNode * stmt,StmtNode * & nextStmt,bool & lvar,BlockNode * oldBlk)1457 bool CGLowerer::LowerStructReturn(BlockNode &newBlk, StmtNode *stmt, StmtNode *&nextStmt, bool &lvar, BlockNode *oldBlk)
1458 {
1459 if (!nextStmt) {
1460 return false;
1461 }
1462 CallReturnVector *p2nrets = stmt->GetCallReturnVector();
1463 if (p2nrets->size() == 0) {
1464 return false;
1465 }
1466 CallReturnPair retPair = (*p2nrets)[0];
1467 if (retPair.second.IsReg()) {
1468 return false;
1469 }
1470 MIRSymbol *retSym = mirModule.CurFunction()->GetLocalOrGlobalSymbol(retPair.first);
1471 if (retSym->GetType()->GetPrimType() != PTY_agg) {
1472 return false;
1473 }
1474 if (nextStmt->op != OP_dassign) {
1475 // introduce a temporary and insert a dassign whose rhs is this temporary
1476 // and whose lhs is retSym
1477 MIRSymbol *temp = CreateNewRetVar(*retSym->GetType(), kUserRetValPrefix);
1478 BaseNode *rhs = mirModule.GetMIRBuilder()->CreateExprDread(*temp->GetType(), 0, *temp);
1479 DassignNode *dass =
1480 mirModule.GetMIRBuilder()->CreateStmtDassign(retPair.first, retPair.second.GetFieldID(), rhs);
1481 oldBlk->InsertBefore(nextStmt, dass);
1482 nextStmt = dass;
1483 // update CallReturnVector to the new temporary
1484 (*p2nrets)[0].first = temp->GetStIdx();
1485 (*p2nrets)[0].second.SetFieldID(0);
1486 }
1487 // now, it is certain that nextStmt is a dassign
1488 BaseNode *bnode = static_cast<DassignNode *>(nextStmt)->GetRHS();
1489 if (bnode->GetOpCode() != OP_dread) {
1490 return false;
1491 }
1492 DreadNode *dnode = static_cast<DreadNode *>(bnode);
1493 MIRType *dtype = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dnode->GetStIdx())->GetType();
1494 #if TARGAARCH64
1495 PrimType ty = IsStructElementSame(dtype);
1496 if (ty == PTY_f32 || ty == PTY_f64 || IsPrimitiveVector(ty)) {
1497 return false;
1498 }
1499 #endif
1500 if (dnode->GetPrimType() != PTY_agg) {
1501 return false;
1502 }
1503 CallReturnPair pair = (*p2nrets)[0];
1504 if (pair.first != dnode->GetStIdx() || pair.second.GetFieldID() != dnode->GetFieldID()) {
1505 return false;
1506 }
1507 auto *dnodeStmt = static_cast<DassignNode *>(nextStmt);
1508 if (dnodeStmt->GetFieldID() != 0) {
1509 return false;
1510 }
1511 if (dtype->GetSize() > k16ByteSize) {
1512 (*p2nrets)[0].first = dnodeStmt->GetStIdx();
1513 (*p2nrets)[0].second.SetFieldID(dnodeStmt->GetFieldID());
1514 lvar = true;
1515 // set ATTR_firstarg_return for callee
1516 if (stmt->GetOpCode() == OP_callassigned) {
1517 CallNode *callNode = static_cast<CallNode *>(stmt);
1518 MIRFunction *f = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callNode->GetPUIdx());
1519 f->SetFirstArgReturn();
1520 f->GetMIRFuncType()->SetFirstArgReturn();
1521 } else {
1522 // for icall, front-end already set ATTR_firstarg_return
1523 }
1524 } else { /* struct <= 16 passed in regs lowered into
1525 call &foo
1526 regassign u64 %1 (regread u64 %%retval0)
1527 regassign ptr %2 (addrof ptr $s)
1528 iassign <* u64> 0 (regread ptr %2, regread u64 %1) */
1529 MIRSymbol *symbol = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dnodeStmt->GetStIdx());
1530 auto *structType = static_cast<MIRStructType *>(symbol->GetType());
1531 auto size = static_cast<uint32>(structType->GetSize());
1532 if (stmt->GetOpCode() == OP_callassigned) {
1533 auto *callNode = static_cast<CallNode *>(stmt);
1534 for (size_t i = 0; i < callNode->GetNopndSize(); ++i) {
1535 BaseNode *newOpnd = LowerExpr(*callNode, *callNode->GetNopndAt(i), newBlk);
1536 callNode->SetOpnd(newOpnd, i);
1537 }
1538 CallNode *callStmt = mirModule.GetMIRBuilder()->CreateStmtCall(callNode->GetPUIdx(), callNode->GetNopnd());
1539 callStmt->SetSrcPos(callNode->GetSrcPos());
1540 newBlk.AddStatement(callStmt);
1541 } else if (stmt->GetOpCode() == OP_icallassigned || stmt->GetOpCode() == OP_icallprotoassigned) {
1542 auto *icallNode = static_cast<IcallNode *>(stmt);
1543 for (size_t i = 0; i < icallNode->GetNopndSize(); ++i) {
1544 BaseNode *newOpnd = LowerExpr(*icallNode, *icallNode->GetNopndAt(i), newBlk);
1545 icallNode->SetOpnd(newOpnd, i);
1546 }
1547 IcallNode *icallStmt = nullptr;
1548 if (stmt->GetOpCode() == OP_icallassigned) {
1549 icallStmt = mirModule.GetMIRBuilder()->CreateStmtIcall(icallNode->GetNopnd());
1550 } else {
1551 icallStmt = mirModule.GetMIRBuilder()->CreateStmtIcallproto(icallNode->GetNopnd());
1552 icallStmt->SetRetTyIdx(icallNode->GetRetTyIdx());
1553 }
1554 icallStmt->SetSrcPos(icallNode->GetSrcPos());
1555 newBlk.AddStatement(icallStmt);
1556 } else {
1557 return false;
1558 }
1559
1560 uint32 origSize = size;
1561 PregIdx pIdxR, pIdx1R, pIdx2R;
1562 StmtNode *aStmt = nullptr;
1563 RegreadNode *reg = nullptr;
1564
1565 /* save x0 */
1566 reg = mirBuilder->CreateExprRegread(PTY_u64, -kSregRetval0);
1567 pIdx1R = GetCurrentFunc()->GetPregTab()->CreatePreg(PTY_u64);
1568 aStmt = mirBuilder->CreateStmtRegassign(PTY_u64, pIdx1R, reg);
1569 newBlk.AddStatement(aStmt);
1570
1571 /* save x1 */
1572 if (origSize > k8ByteSize) {
1573 reg = mirBuilder->CreateExprRegread(PTY_u64, -kSregRetval1);
1574 pIdx2R = GetCurrentFunc()->GetPregTab()->CreatePreg(PTY_u64);
1575 aStmt = mirBuilder->CreateStmtRegassign(PTY_u64, pIdx2R, reg);
1576 newBlk.AddStatement(aStmt);
1577 }
1578
1579 /* save &s */
1580 BaseNode *regAddr = mirBuilder->CreateExprAddrof(0, *symbol);
1581 PregIdx pIdxL = GetCurrentFunc()->GetPregTab()->CreatePreg(GetLoweredPtrType());
1582 aStmt = mirBuilder->CreateStmtRegassign(PTY_a64, pIdxL, regAddr);
1583 newBlk.AddStatement(aStmt);
1584
1585 uint32 curSize = 0;
1586 PregIdx pIdxS;
1587 while (size) {
1588 pIdxR = pIdx1R;
1589 if (curSize >= k8ByteSize) {
1590 pIdxR = pIdx2R;
1591 }
1592 BaseNode *addr;
1593 BaseNode *shift;
1594 BaseNode *regreadExp;
1595 if (origSize != size) {
1596 MIRType *addrType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(GetLoweredPtrType());
1597 addr = mirBuilder->CreateExprBinary(OP_add, *addrType,
1598 mirBuilder->CreateExprRegread(GetLoweredPtrType(), pIdxL),
1599 mirBuilder->CreateIntConst(origSize - size, PTY_i32));
1600 } else {
1601 addr = mirBuilder->CreateExprRegread(GetLoweredPtrType(), pIdxL);
1602 }
1603 if (size >= k8ByteSize) {
1604 aStmt = mirBuilder->CreateStmtIassign(
1605 *beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt64()), 0, addr,
1606 mirBuilder->CreateExprRegread(PTY_u64, pIdxR));
1607 size -= k8ByteSize;
1608 curSize += k8ByteSize;
1609 } else if (size >= k4ByteSize) {
1610 MIRType *type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_u64);
1611
1612 if (CGOptions::IsBigEndian()) {
1613 regreadExp =
1614 mirBuilder->CreateExprBinary(OP_lshr, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1615 mirBuilder->CreateIntConst(k64BitSize - k32BitSize, PTY_i32));
1616 } else {
1617 regreadExp = mirBuilder->CreateExprRegread(PTY_u32, pIdxR);
1618 }
1619
1620 aStmt = mirBuilder->CreateStmtIassign(
1621 *beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt32()), 0, addr, regreadExp);
1622
1623 if (CGOptions::IsBigEndian()) {
1624 shift = mirBuilder->CreateExprBinary(OP_shl, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1625 mirBuilder->CreateIntConst(k32BitSize, PTY_i32));
1626 } else {
1627 shift = mirBuilder->CreateExprBinary(OP_lshr, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1628 mirBuilder->CreateIntConst(k32BitSize, PTY_i32));
1629 }
1630
1631 pIdxS = GetCurrentFunc()->GetPregTab()->CreatePreg(PTY_u64);
1632 StmtNode *sStmp = mirBuilder->CreateStmtRegassign(PTY_u64, pIdxS, shift);
1633
1634 pIdx1R = pIdx2R = pIdxS;
1635 newBlk.AddStatement(sStmp);
1636 size -= k4ByteSize;
1637 curSize += k4ByteSize;
1638 } else if (size >= k2ByteSize) {
1639 MIRType *type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_u64);
1640
1641 if (CGOptions::IsBigEndian()) {
1642 regreadExp =
1643 mirBuilder->CreateExprBinary(OP_lshr, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1644 mirBuilder->CreateIntConst(k64BitSize - k16BitSize, PTY_i32));
1645 } else {
1646 regreadExp = mirBuilder->CreateExprRegread(PTY_u16, pIdxR);
1647 }
1648
1649 aStmt = mirBuilder->CreateStmtIassign(
1650 *beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt16()), 0, addr, regreadExp);
1651
1652 if (CGOptions::IsBigEndian()) {
1653 shift = mirBuilder->CreateExprBinary(OP_shl, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1654 mirBuilder->CreateIntConst(k64BitSize - k16BitSize, PTY_i32));
1655 } else {
1656 shift = mirBuilder->CreateExprBinary(OP_lshr, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1657 mirBuilder->CreateIntConst(k16BitSize, PTY_i32));
1658 }
1659
1660 pIdxS = GetCurrentFunc()->GetPregTab()->CreatePreg(PTY_u64);
1661 StmtNode *sStmp = mirBuilder->CreateStmtRegassign(PTY_u64, pIdxS, shift);
1662
1663 pIdx1R = pIdx2R = pIdxS;
1664 newBlk.AddStatement(sStmp);
1665 size -= k2ByteSize;
1666 curSize += k2ByteSize;
1667 } else {
1668 MIRType *type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_u64);
1669
1670 if (CGOptions::IsBigEndian()) {
1671 regreadExp =
1672 mirBuilder->CreateExprBinary(OP_lshr, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1673 mirBuilder->CreateIntConst(k64BitSize - k8BitSize, PTY_i32));
1674 } else {
1675 regreadExp = mirBuilder->CreateExprRegread(PTY_u8, pIdxR);
1676 }
1677
1678 aStmt = mirBuilder->CreateStmtIassign(
1679 *beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt8()), 0, addr, regreadExp);
1680
1681 if (CGOptions::IsBigEndian()) {
1682 shift = mirBuilder->CreateExprBinary(OP_shl, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1683 mirBuilder->CreateIntConst(k64BitSize - k8BitSize, PTY_i32));
1684 } else {
1685 shift = mirBuilder->CreateExprBinary(OP_lshr, *type, mirBuilder->CreateExprRegread(PTY_u64, pIdxR),
1686 mirBuilder->CreateIntConst(k8BitSize, PTY_i32));
1687 }
1688
1689 pIdxS = GetCurrentFunc()->GetPregTab()->CreatePreg(PTY_u64);
1690 StmtNode *sStmp = mirBuilder->CreateStmtRegassign(PTY_u64, pIdxS, shift);
1691
1692 pIdx1R = pIdx2R = pIdxS;
1693 newBlk.AddStatement(sStmp);
1694 size -= k1ByteSize;
1695 curSize += k1ByteSize;
1696 }
1697 newBlk.AddStatement(aStmt);
1698 }
1699 }
1700 nextStmt = nextStmt->GetNext(); // skip the dassign
1701 return true;
1702 }
1703
LowerStmt(StmtNode & stmt,BlockNode & newBlk)1704 void CGLowerer::LowerStmt(StmtNode &stmt, BlockNode &newBlk)
1705 {
1706 for (size_t i = 0; i < stmt.NumOpnds(); ++i) {
1707 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(i), newBlk), i);
1708 }
1709 }
1710
LowerSwitchOpnd(StmtNode & stmt,BlockNode & newBlk)1711 void CGLowerer::LowerSwitchOpnd(StmtNode &stmt, BlockNode &newBlk)
1712 {
1713 BaseNode *opnd = LowerExpr(stmt, *stmt.Opnd(0), newBlk);
1714 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2 && opnd->GetOpCode() != OP_regread) {
1715 PrimType ptyp = stmt.Opnd(0)->GetPrimType();
1716 PregIdx pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(ptyp);
1717 RegassignNode *regAss = mirBuilder->CreateStmtRegassign(ptyp, pIdx, opnd);
1718 newBlk.AddStatement(regAss);
1719 GetCurrentFunc()->SetLastFreqMap(regAss->GetStmtID(),
1720 static_cast<uint32>(GetCurrentFunc()->GetFreqFromLastStmt(stmt.GetStmtID())));
1721 stmt.SetOpnd(mirBuilder->CreateExprRegread(ptyp, pIdx), 0);
1722 } else {
1723 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(0), newBlk), 0);
1724 }
1725 }
1726
AddElemToPrintf(MapleVector<BaseNode * > & argsPrintf,int num,...) const1727 void CGLowerer::AddElemToPrintf(MapleVector<BaseNode *> &argsPrintf, int num, ...) const
1728 {
1729 va_list argPtr;
1730 va_start(argPtr, num);
1731 for (int i = 0; i < num; ++i) {
1732 argsPrintf.push_back(va_arg(argPtr, BaseNode *));
1733 }
1734 va_end(argPtr);
1735 }
1736
SwitchAssertBoundary(StmtNode & stmt,MapleVector<BaseNode * > & argsPrintf)1737 void CGLowerer::SwitchAssertBoundary(StmtNode &stmt, MapleVector<BaseNode *> &argsPrintf)
1738 {
1739 MIRSymbol *errMsg;
1740 MIRSymbol *fileNameSym;
1741 ConstvalNode *lineNum;
1742 fileNameSym = mirBuilder->CreateConstStringSymbol(GetFileNameSymbolName(AssertBoundaryGetFileName(stmt)),
1743 AssertBoundaryGetFileName(stmt));
1744 lineNum = mirBuilder->CreateIntConst(stmt.GetSrcPos().LineNum(), PTY_u32);
1745 if (kOpcodeInfo.IsAssertLowerBoundary(stmt.GetOpCode())) {
1746 errMsg = mirBuilder->CreateConstStringSymbol(
1747 kOpAssertge, "%s:%d error: the pointer < the lower bounds when accessing the memory!\n");
1748 AddElemToPrintf(argsPrintf, 3 /* 3 parameters follow */, mirBuilder->CreateAddrof(*errMsg, PTY_a64),
1749 mirBuilder->CreateAddrof(*fileNameSym, PTY_a64), lineNum);
1750 } else {
1751 if (kOpcodeInfo.IsAssertLeBoundary(stmt.GetOpCode())) {
1752 if (stmt.GetOpCode() == OP_callassertle) {
1753 auto &callStmt = static_cast<CallAssertBoundaryStmtNode &>(stmt);
1754 std::string param;
1755 MIRSymbol *funcName;
1756 MIRSymbol *paramNum;
1757 param = maple::GetNthStr(callStmt.GetParamIndex());
1758 errMsg = mirBuilder->CreateConstStringSymbol(kOpCallAssertle,
1759 "%s:%d error: the pointer's bounds does not match the "
1760 "function %s declaration for the %s argument!\n");
1761 funcName = mirBuilder->CreateConstStringSymbol(callStmt.GetFuncName() + kOpCallAssertle,
1762 callStmt.GetFuncName());
1763 paramNum = mirBuilder->CreateConstStringSymbol(kOpCallAssertle + param, param);
1764 AddElemToPrintf(argsPrintf, 5 /* 5 parameters follow */, mirBuilder->CreateAddrof(*errMsg, PTY_a64),
1765 mirBuilder->CreateAddrof(*fileNameSym, PTY_a64), lineNum,
1766 mirBuilder->CreateAddrof(*funcName, PTY_a64),
1767 mirBuilder->CreateAddrof(*paramNum, PTY_a64));
1768 } else if (stmt.GetOpCode() == OP_returnassertle) {
1769 auto &callStmt = static_cast<CallAssertBoundaryStmtNode &>(stmt);
1770 MIRSymbol *funcName;
1771 errMsg = mirBuilder->CreateConstStringSymbol(
1772 kOpReturnAssertle,
1773 "%s:%d error: return value's bounds does not match the function declaration for %s\n");
1774 funcName = mirBuilder->CreateConstStringSymbol(callStmt.GetFuncName() + kOpReturnAssertle,
1775 callStmt.GetFuncName());
1776 AddElemToPrintf(argsPrintf, 4 /* 4 parameters follow */, mirBuilder->CreateAddrof(*errMsg, PTY_a64),
1777 mirBuilder->CreateAddrof(*fileNameSym, PTY_a64), lineNum,
1778 mirBuilder->CreateAddrof(*funcName, PTY_a64));
1779 } else {
1780 errMsg = mirBuilder->CreateConstStringSymbol(
1781 kOpAssignAssertle, "%s:%d error: l-value boundary should not be larger than r-value boundary!\n");
1782 AddElemToPrintf(argsPrintf, 3 /* 3 parameters follow */, mirBuilder->CreateAddrof(*errMsg, PTY_a64),
1783 mirBuilder->CreateAddrof(*fileNameSym, PTY_a64), lineNum);
1784 }
1785 } else {
1786 errMsg = mirBuilder->CreateConstStringSymbol(
1787 kOpAssertlt, "%s:%d error: the pointer >= the upper bounds when accessing the memory!\n");
1788 AddElemToPrintf(argsPrintf, 3 /* 3 parameters follow */, mirBuilder->CreateAddrof(*errMsg, PTY_a64),
1789 mirBuilder->CreateAddrof(*fileNameSym, PTY_a64), lineNum);
1790 }
1791 }
1792 }
1793
LowerAssertBoundary(StmtNode & stmt,BlockNode & block,BlockNode & newBlk,std::vector<StmtNode * > & abortNode)1794 void CGLowerer::LowerAssertBoundary(StmtNode &stmt, BlockNode &block, BlockNode &newBlk,
1795 std::vector<StmtNode *> &abortNode)
1796 {
1797 MIRFunction *curFunc = mirModule.CurFunction();
1798 BaseNode *op0 = LowerExpr(stmt, *stmt.Opnd(0), block);
1799 BaseNode *op1 = LowerExpr(stmt, *stmt.Opnd(1), block);
1800 LabelIdx labIdx = GetLabelIdx(*curFunc);
1801 LabelNode *labelBC = mirBuilder->CreateStmtLabel(labIdx);
1802 Opcode op = OP_ge;
1803 if (kOpcodeInfo.IsAssertUpperBoundary(stmt.GetOpCode())) {
1804 op = (kOpcodeInfo.IsAssertLeBoundary(stmt.GetOpCode())) ? OP_le : OP_lt;
1805 }
1806 BaseNode *cond =
1807 mirBuilder->CreateExprCompare(op, *GlobalTables::GetTypeTable().GetUInt1(),
1808 *GlobalTables::GetTypeTable().GetPrimType(op0->GetPrimType()), op0, op1);
1809 CondGotoNode *brFalseNode = mirBuilder->CreateStmtCondGoto(cond, OP_brfalse, labIdx);
1810
1811 MIRFunction *printf = mirBuilder->GetOrCreateFunction("printf", TyIdx(PTY_i32));
1812 printf->GetFuncSymbol()->SetAppearsInCode(true);
1813 beCommon.UpdateTypeTable(*printf->GetMIRFuncType());
1814 MapleVector<BaseNode *> argsPrintf(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
1815 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1816 SwitchAssertBoundary(stmt, argsPrintf);
1817 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1818 if (newTypeTableSize != oldTypeTableSize) {
1819 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
1820 }
1821 StmtNode *callPrintf = mirBuilder->CreateStmtCall(printf->GetPuidx(), argsPrintf);
1822 UnaryStmtNode *abortModeNode = mirBuilder->CreateStmtUnary(OP_abort, nullptr);
1823
1824 brFalseNode->SetSrcPos(stmt.GetSrcPos());
1825 labelBC->SetSrcPos(stmt.GetSrcPos());
1826 callPrintf->SetSrcPos(stmt.GetSrcPos());
1827 abortModeNode->SetSrcPos(stmt.GetSrcPos());
1828
1829 newBlk.AddStatement(brFalseNode);
1830 abortNode.emplace_back(labelBC);
1831 abortNode.emplace_back(callPrintf);
1832 abortNode.emplace_back(abortModeNode);
1833 }
1834
LowerBlock(BlockNode & block)1835 BlockNode *CGLowerer::LowerBlock(BlockNode &block)
1836 {
1837 BlockNode *newBlk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1838 BlockNode *tmpBlockNode = nullptr;
1839 std::vector<StmtNode *> abortNode;
1840 if (block.GetFirst() == nullptr) {
1841 return newBlk;
1842 }
1843
1844 StmtNode *nextStmt = block.GetFirst();
1845 do {
1846 StmtNode *stmt = nextStmt;
1847 nextStmt = stmt->GetNext();
1848 stmt->SetNext(nullptr);
1849 currentBlock = newBlk;
1850
1851 switch (stmt->GetOpCode()) {
1852 case OP_switch: {
1853 LowerSwitchOpnd(*stmt, *newBlk);
1854 auto switchMp = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "switchlowere");
1855 MapleAllocator switchAllocator(switchMp.get());
1856 SwitchLowerer switchLowerer(mirModule, static_cast<SwitchNode &>(*stmt), switchAllocator);
1857 BlockNode *blk = switchLowerer.LowerSwitch();
1858 if (blk->GetFirst() != nullptr) {
1859 newBlk->AppendStatementsFromBlock(*blk);
1860 }
1861 needBranchCleanup = true;
1862 break;
1863 }
1864 case OP_block:
1865 tmpBlockNode = LowerBlock(static_cast<BlockNode &>(*stmt));
1866 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
1867 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
1868 break;
1869 case OP_dassign: {
1870 LowerDassign(static_cast<DassignNode &>(*stmt), *newBlk);
1871 break;
1872 }
1873 case OP_regassign: {
1874 LowerRegassign(static_cast<RegassignNode &>(*stmt), *newBlk);
1875 break;
1876 }
1877 CASE_OP_ASSERT_BOUNDARY
1878 {
1879 LowerAssertBoundary(*stmt, block, *newBlk, abortNode);
1880 break;
1881 }
1882 case OP_iassign: {
1883 LowerIassign(static_cast<IassignNode &>(*stmt), *newBlk);
1884 break;
1885 }
1886 case OP_callassigned:
1887 case OP_icallassigned:
1888 case OP_icallprotoassigned: {
1889 // pass the addr of lvar if this is a struct call assignment
1890 bool lvar = false;
1891 // nextStmt could be changed by the call to LowerStructReturn
1892 if (!LowerStructReturn(*newBlk, stmt, nextStmt, lvar, &block)) {
1893 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt, lvar));
1894 }
1895 break;
1896 }
1897 case OP_virtualcallassigned:
1898 case OP_superclasscallassigned:
1899 case OP_interfacecallassigned:
1900 case OP_intrinsiccallassigned:
1901 case OP_xintrinsiccallassigned:
1902 case OP_intrinsiccallwithtypeassigned:
1903 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt));
1904 break;
1905 case OP_intrinsiccall:
1906 case OP_call:
1907 case OP_icall:
1908 case OP_icallproto:
1909 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
1910 // nextStmt could be changed by the call to LowerStructReturn
1911 LowerCallStmt(*stmt, nextStmt, *newBlk);
1912 #else
1913 LowerStmt(*stmt, *newBlk);
1914 #endif
1915 break;
1916 case OP_return: {
1917 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
1918 if (GetCurrentFunc()->IsFirstArgReturn() && stmt->NumOpnds() > 0) {
1919 newBlk->AppendStatementsFromBlock(
1920 *LowerReturnStructUsingFakeParm(static_cast<NaryStmtNode &>(*stmt)));
1921 } else {
1922 #endif
1923 NaryStmtNode *retNode = static_cast<NaryStmtNode *>(stmt);
1924 if (retNode->GetNopndSize() == 0) {
1925 newBlk->AddStatement(stmt);
1926 } else {
1927 tmpBlockNode = LowerReturn(*retNode);
1928 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
1929 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
1930 }
1931 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
1932 }
1933 #endif
1934 break;
1935 }
1936 case OP_comment:
1937 newBlk->AddStatement(stmt);
1938 break;
1939 case OP_try:
1940 LowerStmt(*stmt, *newBlk);
1941 newBlk->AddStatement(stmt);
1942 hasTry = true;
1943 break;
1944 case OP_endtry:
1945 LowerStmt(*stmt, *newBlk);
1946 newBlk->AddStatement(stmt);
1947 break;
1948 case OP_catch:
1949 LowerStmt(*stmt, *newBlk);
1950 newBlk->AddStatement(stmt);
1951 break;
1952 case OP_throw:
1953 if (mirModule.IsJavaModule()) {
1954 if (GenerateExceptionHandlingCode()) {
1955 LowerStmt(*stmt, *newBlk);
1956 newBlk->AddStatement(stmt);
1957 }
1958 } else {
1959 LowerStmt(*stmt, *newBlk);
1960 newBlk->AddStatement(stmt);
1961 }
1962 break;
1963 case OP_syncenter:
1964 case OP_syncexit: {
1965 LowerStmt(*stmt, *newBlk);
1966 StmtNode *tmp = LowerSyncEnterSyncExit(*stmt);
1967 CHECK_FATAL(tmp != nullptr, "nullptr is not expected");
1968 newBlk->AddStatement(tmp);
1969 break;
1970 }
1971 case OP_decrefreset: {
1972 /*
1973 * only gconly can reach here
1974 * lower stmt (decrefreset (addrof ptr %RegX_RXXXX)) to (dassign %RegX_RXXXX 0 (constval ref 0))
1975 */
1976 CHECK_FATAL(CGOptions::IsGCOnly(), "OP_decrefreset is expected only in gconly.");
1977 LowerResetStmt(*stmt, *newBlk);
1978 break;
1979 }
1980 case OP_asm: {
1981 LowerAsmStmt(static_cast<AsmNode *>(stmt), newBlk);
1982 break;
1983 }
1984 default:
1985 LowerStmt(*stmt, *newBlk);
1986 newBlk->AddStatement(stmt);
1987 break;
1988 }
1989 CHECK_FATAL(beCommon.GetSizeOfTypeSizeTable() == GlobalTables::GetTypeTable().GetTypeTableSize(), "Error!");
1990 } while (nextStmt != nullptr);
1991 for (auto node : abortNode) {
1992 newBlk->AddStatement(node);
1993 }
1994 return newBlk;
1995 }
1996
SimplifyBlock(BlockNode & block) const1997 void CGLowerer::SimplifyBlock(BlockNode &block) const
1998 {
1999 if (block.GetFirst() == nullptr) {
2000 return;
2001 }
2002 StmtNode *nextStmt = block.GetFirst();
2003 do {
2004 StmtNode *stmt = nextStmt;
2005 nextStmt = stmt->GetNext();
2006 Opcode op = stmt->GetOpCode();
2007 switch (op) {
2008 case OP_call: {
2009 auto *callStmt = static_cast<CallNode *>(stmt);
2010 if (CGOptions::IsDuplicateAsmFileEmpty()) {
2011 break;
2012 }
2013 auto *oldFunc = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callStmt->GetPUIdx());
2014 if (asmMap.find(oldFunc->GetName()) == asmMap.end()) {
2015 break;
2016 }
2017 auto *newFunc = theMIRModule->GetMIRBuilder()->GetOrCreateFunction(asmMap.at(oldFunc->GetName()),
2018 callStmt->GetTyIdx());
2019 MIRSymbol *funcSym = newFunc->GetFuncSymbol();
2020 funcSym->SetStorageClass(kScExtern);
2021 funcSym->SetAppearsInCode(true);
2022 callStmt->SetPUIdx(newFunc->GetPuidx());
2023 break;
2024 }
2025 default: {
2026 break;
2027 }
2028 }
2029 } while (nextStmt != nullptr);
2030 return;
2031 }
2032
GetArrayNodeType(BaseNode & baseNode)2033 MIRType *CGLowerer::GetArrayNodeType(BaseNode &baseNode)
2034 {
2035 MIRType *baseType = nullptr;
2036 auto curFunc = mirModule.CurFunction();
2037 if (baseNode.GetOpCode() == OP_regread) {
2038 RegreadNode *rrNode = static_cast<RegreadNode *>(&baseNode);
2039 MIRPreg *pReg = curFunc->GetPregTab()->PregFromPregIdx(rrNode->GetRegIdx());
2040 if (pReg->IsRef()) {
2041 baseType = pReg->GetMIRType();
2042 }
2043 }
2044 if (baseNode.GetOpCode() == OP_dread) {
2045 DreadNode *dreadNode = static_cast<DreadNode *>(&baseNode);
2046 MIRSymbol *symbol = curFunc->GetLocalOrGlobalSymbol(dreadNode->GetStIdx());
2047 baseType = symbol->GetType();
2048 }
2049 MIRType *arrayElemType = nullptr;
2050 if (baseType != nullptr) {
2051 MIRType *stType =
2052 GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<MIRPtrType *>(baseType)->GetPointedTyIdx());
2053 while (stType->GetKind() == kTypeJArray) {
2054 MIRJarrayType *baseType1 = static_cast<MIRJarrayType *>(stType);
2055 MIRType *elemType = baseType1->GetElemType();
2056 if (elemType->GetKind() == kTypePointer) {
2057 const TyIdx &index = static_cast<MIRPtrType *>(elemType)->GetPointedTyIdx();
2058 stType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(index);
2059 } else {
2060 stType = elemType;
2061 }
2062 }
2063
2064 arrayElemType = stType;
2065 }
2066 return arrayElemType;
2067 }
2068
SplitCallArg(CallNode & callNode,BaseNode * newOpnd,size_t i,BlockNode & newBlk)2069 void CGLowerer::SplitCallArg(CallNode &callNode, BaseNode *newOpnd, size_t i, BlockNode &newBlk)
2070 {
2071 if (newOpnd->GetOpCode() != OP_regread && newOpnd->GetOpCode() != OP_constval && newOpnd->GetOpCode() != OP_dread &&
2072 newOpnd->GetOpCode() != OP_addrof && newOpnd->GetOpCode() != OP_iaddrof &&
2073 newOpnd->GetOpCode() != OP_constval && newOpnd->GetOpCode() != OP_conststr &&
2074 newOpnd->GetOpCode() != OP_conststr16) {
2075 if (CGOptions::GetInstance().GetOptimizeLevel() == CGOptions::kLevel0) {
2076 MIRType *type = GlobalTables::GetTypeTable().GetPrimType(newOpnd->GetPrimType());
2077 MIRSymbol *ret = CreateNewRetVar(*type, kIntrnRetValPrefix);
2078 DassignNode *dassignNode = mirBuilder->CreateStmtDassign(*ret, 0, newOpnd);
2079 newBlk.AddStatement(dassignNode);
2080 callNode.SetOpnd(mirBuilder->CreateExprDread(*type, 0, *ret), i);
2081 } else {
2082 PregIdx pregIdx = mirModule.CurFunction()->GetPregTab()->CreatePreg(newOpnd->GetPrimType());
2083 RegassignNode *temp = mirBuilder->CreateStmtRegassign(newOpnd->GetPrimType(), pregIdx, newOpnd);
2084 newBlk.AddStatement(temp);
2085 callNode.SetOpnd(mirBuilder->CreateExprRegread(newOpnd->GetPrimType(), pregIdx), i);
2086 }
2087 } else {
2088 callNode.SetOpnd(newOpnd, i);
2089 }
2090 }
2091
LowerCall(CallNode & callNode,StmtNode * & nextStmt,BlockNode & newBlk,MIRType * retTy,bool uselvar)2092 StmtNode *CGLowerer::LowerCall(CallNode &callNode, StmtNode *&nextStmt, BlockNode &newBlk, MIRType *retTy, bool uselvar)
2093 {
2094 /*
2095 * nextStmt in-out
2096 * call $foo(constval u32 128)
2097 * dassign %jlt (dread agg %%retval)
2098 */
2099 bool isArrayStore = false;
2100
2101 if (callNode.GetOpCode() == OP_call) {
2102 MIRFunction *calleeFunc = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callNode.GetPUIdx());
2103 if ((calleeFunc->GetName() == "MCC_WriteRefField") && (callNode.Opnd(1)->GetOpCode() == OP_iaddrof)) {
2104 IreadNode *addrExpr = static_cast<IreadNode *>(callNode.Opnd(1));
2105 if (addrExpr->Opnd(0)->GetOpCode() == OP_array) {
2106 isArrayStore = true;
2107 }
2108 }
2109 }
2110
2111 for (size_t i = 0; i < callNode.GetNopndSize(); ++i) {
2112 BaseNode *newOpnd = LowerExpr(callNode, *callNode.GetNopndAt(i), newBlk);
2113 #if TARGAARCH64 || TARGRISCV64 || TARGX86_64
2114 callNode.SetOpnd(newOpnd, i);
2115 #else
2116 SplitCallArg(callNode, newOpnd, i, newBlk);
2117 #endif
2118 }
2119
2120 if (isArrayStore && checkLoadStore) {
2121 bool needCheckStore = true;
2122 MIRType *arrayElemType = GetArrayNodeType(*callNode.Opnd(0));
2123 MIRType *valueRealType = GetArrayNodeType(*callNode.Opnd(kNodeThirdOpnd));
2124 if ((arrayElemType != nullptr) && (valueRealType != nullptr) && (arrayElemType->GetKind() == kTypeClass) &&
2125 static_cast<MIRClassType *>(arrayElemType)->IsFinal() && (valueRealType->GetKind() == kTypeClass) &&
2126 static_cast<MIRClassType *>(valueRealType)->IsFinal() &&
2127 valueRealType->GetTypeIndex() == arrayElemType->GetTypeIndex()) {
2128 needCheckStore = false;
2129 }
2130
2131 if (needCheckStore) {
2132 MIRFunction *fn =
2133 mirModule.GetMIRBuilder()->GetOrCreateFunction("MCC_Reflect_Check_Arraystore", TyIdx(PTY_void));
2134 fn->GetFuncSymbol()->SetAppearsInCode(true);
2135 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
2136 fn->AllocSymTab();
2137 MapleVector<BaseNode *> args(mirModule.GetMIRBuilder()->GetCurrentFuncCodeMpAllocator()->Adapter());
2138 args.emplace_back(callNode.Opnd(0));
2139 args.emplace_back(callNode.Opnd(kNodeThirdOpnd));
2140 StmtNode *checkStoreStmt = mirModule.GetMIRBuilder()->CreateStmtCall(fn->GetPuidx(), args);
2141 newBlk.AddStatement(checkStoreStmt);
2142 }
2143 }
2144
2145 DassignNode *dassignNode = nullptr;
2146 if ((nextStmt != nullptr) && (nextStmt->GetOpCode() == OP_dassign)) {
2147 dassignNode = static_cast<DassignNode *>(nextStmt);
2148 }
2149
2150 /* if nextStmt is not a dassign stmt, return */
2151 if (dassignNode == nullptr) {
2152 return &callNode;
2153 }
2154
2155 if (!uselvar && retTy && beCommon.GetTypeSize(retTy->GetTypeIndex().GetIdx()) <= k16ByteSize) {
2156 /* return structure fitting in one or two regs. */
2157 return &callNode;
2158 }
2159
2160 MIRType *retType = nullptr;
2161 if (callNode.op == OP_icall || callNode.op == OP_icallproto) {
2162 if (retTy == nullptr) {
2163 return &callNode;
2164 } else {
2165 retType = retTy;
2166 }
2167 }
2168
2169 if (retType == nullptr) {
2170 MIRFunction *calleeFunc = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callNode.GetPUIdx());
2171 retType = calleeFunc->GetReturnType();
2172 if (calleeFunc->IsReturnStruct() && (retType->GetPrimType() == PTY_void)) {
2173 MIRPtrType *pretType = static_cast<MIRPtrType *>((calleeFunc->GetNthParamType(0)));
2174 CHECK_FATAL(pretType != nullptr, "nullptr is not expected");
2175 retType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(pretType->GetPointedTyIdx());
2176 CHECK_FATAL((retType->GetKind() == kTypeStruct) || (retType->GetKind() == kTypeUnion),
2177 "make sure retType is a struct type");
2178 }
2179 }
2180
2181 /* if return type is not of a struct, return */
2182 if ((retType->GetKind() != kTypeStruct) && (retType->GetKind() != kTypeUnion)) {
2183 return &callNode;
2184 }
2185
2186 MIRSymbol *dsgnSt = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dassignNode->GetStIdx());
2187 CHECK_FATAL(dsgnSt->GetType()->IsStructType(), "expects a struct type");
2188 MIRStructType *structTy = static_cast<MIRStructType *>(dsgnSt->GetType());
2189 if (structTy == nullptr) {
2190 return &callNode;
2191 }
2192
2193 RegreadNode *regReadNode = nullptr;
2194 if (dassignNode->Opnd(0)->GetOpCode() == OP_regread) {
2195 regReadNode = static_cast<RegreadNode *>(dassignNode->Opnd(0));
2196 }
2197 if (regReadNode == nullptr || (regReadNode->GetRegIdx() != -kSregRetval0)) {
2198 return &callNode;
2199 }
2200
2201 MapleVector<BaseNode *> newNopnd(mirModule.CurFuncCodeMemPoolAllocator()->Adapter());
2202 AddrofNode *addrofNode = mirModule.CurFuncCodeMemPool()->New<AddrofNode>(OP_addrof);
2203 addrofNode->SetPrimType(GetLoweredPtrType());
2204 addrofNode->SetStIdx(dsgnSt->GetStIdx());
2205 addrofNode->SetFieldID(0);
2206
2207 if (callNode.op == OP_icall || callNode.op == OP_icallproto) {
2208 auto ond = callNode.GetNopnd().begin();
2209 newNopnd.emplace_back(*ond);
2210 newNopnd.emplace_back(addrofNode);
2211 for (++ond; ond != callNode.GetNopnd().end(); ++ond) {
2212 newNopnd.emplace_back(*ond);
2213 }
2214 } else {
2215 newNopnd.emplace_back(addrofNode);
2216 for (auto *opnd : callNode.GetNopnd()) {
2217 newNopnd.emplace_back(opnd);
2218 }
2219 }
2220
2221 callNode.SetNOpnd(newNopnd);
2222 callNode.SetNumOpnds(static_cast<uint8>(newNopnd.size()));
2223 CHECK_FATAL(nextStmt != nullptr, "nullptr is not expected");
2224 nextStmt = nextStmt->GetNext();
2225 return &callNode;
2226 }
2227
LowerEntry(MIRFunction & func)2228 void CGLowerer::LowerEntry(MIRFunction &func)
2229 {
2230 // determine if needed to insert fake parameter to return struct for current function
2231 if (func.IsReturnStruct()) {
2232 MIRType *retType = func.GetReturnType();
2233 #if TARGAARCH64
2234 PrimType pty = IsStructElementSame(retType);
2235 if (pty == PTY_f32 || pty == PTY_f64 || IsPrimitiveVector(pty)) {
2236 func.SetStructReturnedInRegs();
2237 return;
2238 }
2239 #endif
2240 if (retType->GetPrimType() != PTY_agg) {
2241 return;
2242 }
2243 if (retType->GetSize() > k16ByteSize) {
2244 func.SetFirstArgReturn();
2245 func.GetMIRFuncType()->SetFirstArgReturn();
2246 } else {
2247 func.SetStructReturnedInRegs();
2248 }
2249 }
2250 if (func.IsFirstArgReturn() && func.GetReturnType()->GetPrimType() != PTY_void) {
2251 MIRSymbol *retSt = func.GetSymTab()->CreateSymbol(kScopeLocal);
2252 retSt->SetStorageClass(kScFormal);
2253 retSt->SetSKind(kStVar);
2254 std::string retName(".return.");
2255 MIRSymbol *funcSt = GlobalTables::GetGsymTable().GetSymbolFromStidx(func.GetStIdx().Idx());
2256 retName += funcSt->GetName();
2257 retSt->SetNameStrIdx(retName);
2258 MIRType *pointType = beCommon.BeGetOrCreatePointerType(*func.GetReturnType());
2259
2260 retSt->SetTyIdx(pointType->GetTypeIndex());
2261 std::vector<MIRSymbol *> formals;
2262 formals.emplace_back(retSt);
2263 for (uint32 i = 0; i < func.GetFormalCount(); ++i) {
2264 auto formal = func.GetFormal(i);
2265 formals.emplace_back(formal);
2266 }
2267 func.SetFirstArgReturn();
2268
2269 beCommon.AddElementToFuncReturnType(func, func.GetReturnTyIdx());
2270
2271 func.UpdateFuncTypeAndFormalsAndReturnType(formals, TyIdx(PTY_void), true);
2272 auto *funcType = func.GetMIRFuncType();
2273 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
2274 funcType->SetFirstArgReturn();
2275 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
2276 }
2277 }
2278
LowerPseudoRegs(const MIRFunction & func) const2279 void CGLowerer::LowerPseudoRegs(const MIRFunction &func) const
2280 {
2281 for (uint32 i = 1; i < func.GetPregTab()->Size(); ++i) {
2282 MIRPreg *ipr = func.GetPregTab()->PregFromPregIdx(i);
2283 PrimType primType = ipr->GetPrimType();
2284 if (primType == PTY_u1) {
2285 ipr->SetPrimType(PTY_u32);
2286 }
2287 }
2288 }
2289
CleanupBranches(MIRFunction & func) const2290 void CGLowerer::CleanupBranches(MIRFunction &func) const
2291 {
2292 BlockNode *block = func.GetBody();
2293 StmtNode *prev = nullptr;
2294 StmtNode *next = nullptr;
2295 for (StmtNode *curr = block->GetFirst(); curr != nullptr; curr = next) {
2296 next = curr->GetNext();
2297 if (next != nullptr) {
2298 CHECK_FATAL(curr == next->GetPrev(), "unexpected node");
2299 }
2300 if ((next != nullptr) && (prev != nullptr) && (curr->GetOpCode() == OP_goto)) {
2301 /*
2302 * Skip until find a label.
2303 * Note that the CURRent 'goto' statement may be the last statement
2304 * when discounting comment statements.
2305 * Make sure we don't lose any comments.
2306 */
2307 StmtNode *cmtB = nullptr;
2308 StmtNode *cmtE = nullptr;
2309 bool isCleanable = true;
2310 while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
2311 if ((next->GetOpCode() == OP_try) || (next->GetOpCode() == OP_endtry) ||
2312 (next->GetOpCode() == OP_catch)) {
2313 isCleanable = false;
2314 break;
2315 }
2316 next = next->GetNext();
2317 }
2318 if ((next != nullptr) && (!isCleanable)) {
2319 prev = next->GetPrev();
2320 continue;
2321 }
2322
2323 next = curr->GetNext();
2324
2325 while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
2326 if (next->GetOpCode() == OP_comment) {
2327 if (cmtB == nullptr) {
2328 cmtB = next;
2329 cmtE = next;
2330 } else {
2331 CHECK_FATAL(cmtE != nullptr, "cmt_e is null in CGLowerer::CleanupBranches");
2332 cmtE->SetNext(next);
2333 next->SetPrev(cmtE);
2334 cmtE = next;
2335 }
2336 }
2337 next = next->GetNext();
2338 }
2339
2340 curr->SetNext(next);
2341
2342 if (next != nullptr) {
2343 next->SetPrev(curr);
2344 }
2345
2346 StmtNode *insertAfter = nullptr;
2347
2348 if ((next != nullptr) &&
2349 ((static_cast<GotoNode *>(curr))->GetOffset() == (static_cast<LabelNode *>(next))->GetLabelIdx())) {
2350 insertAfter = prev;
2351 prev->SetNext(next); /* skip goto statement (which is pointed by curr) */
2352 next->SetPrev(prev);
2353 curr = next; /* make curr point to the label statement */
2354 next = next->GetNext(); /* advance next to the next statement of the label statement */
2355 } else {
2356 insertAfter = curr;
2357 }
2358
2359 /* insert comments before 'curr' */
2360 if (cmtB != nullptr) {
2361 CHECK_FATAL(cmtE != nullptr, "nullptr is not expected");
2362 StmtNode *iaNext = insertAfter->GetNext();
2363 if (iaNext != nullptr) {
2364 iaNext->SetPrev(cmtE);
2365 }
2366 cmtE->SetNext(iaNext);
2367
2368 insertAfter->SetNext(cmtB);
2369 cmtB->SetPrev(insertAfter);
2370
2371 if (insertAfter == curr) {
2372 curr = cmtE;
2373 }
2374 }
2375 if (next == nullptr) {
2376 func.GetBody()->SetLast(curr);
2377 }
2378 }
2379 prev = curr;
2380 }
2381 CHECK_FATAL(func.GetBody()->GetLast() == prev, "make sure the return value of GetLast equal prev");
2382 }
2383
2384 /*
2385 * We want to place catch blocks so that they don't come before any of java trys that refer to them.
2386 * In order to do that, we take advantage of the fact that the mpl. source we get is already flattened and
2387 * no java-try-end-try block is enclosed in any other java-try-end-try block. they appear in the mpl file.
2388 * We process each bb in bbList from the front to the end, and while doing so, we maintain a list of catch blocks
2389 * we have seen. When we get to an end-try block, we examine each catch block label it has (offsets),
2390 * and if we find any catch block in the "seen" list, we move the block after the end-try block.
2391 * Note that we need to find a basic block which does not have 'fallthruBranch' control path.
2392 * (Appending the catch block to any basic block that has the 'fallthruBranch' control path
2393 * will alter the program semantics)
2394 */
LowerTryCatchBlocks(BlockNode & body)2395 void CGLowerer::LowerTryCatchBlocks(BlockNode &body)
2396 {
2397 if (!hasTry) {
2398 return;
2399 }
2400
2401 #if DEBUG
2402 BBT::ValidateStmtList(nullptr, nullptr);
2403 #endif
2404 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "CreateNewBB mempool");
2405 TryCatchBlocksLower tryCatchLower(*memPool, body, mirModule);
2406 tryCatchLower.RecoverBasicBlock();
2407 bool generateEHCode = GenerateExceptionHandlingCode();
2408 tryCatchLower.SetGenerateEHCode(generateEHCode);
2409 tryCatchLower.TraverseBBList();
2410 #if DEBUG
2411 tryCatchLower.CheckTryCatchPattern();
2412 #endif
2413 }
2414
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const RegreadNode & rRead,const CGLowerer & cgLowerer)2415 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const RegreadNode &rRead,
2416 const CGLowerer &cgLowerer)
2417 {
2418 StIdx stIdx = cgLowerer.GetSymbolReferredToByPseudoRegister(rRead.GetRegIdx());
2419 return ((dassign.GetStIdx() == stIdx) && (dassign.GetFieldID() == 0));
2420 }
2421
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const DreadNode & dread)2422 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const DreadNode &dread)
2423 {
2424 return ((dassign.GetStIdx() == dread.GetStIdx()) && (dassign.GetFieldID() == dread.GetFieldID()));
2425 }
2426
IsDassignNOP(const DassignNode & dassign)2427 inline bool IsDassignNOP(const DassignNode &dassign)
2428 {
2429 if (dassign.GetRHS()->GetOpCode() == OP_dread) {
2430 return IsAccessingTheSameMemoryLocation(dassign, static_cast<DreadNode &>(*dassign.GetRHS()));
2431 }
2432 return false;
2433 }
2434
IsConstvalZero(const BaseNode & n)2435 inline bool IsConstvalZero(const BaseNode &n)
2436 {
2437 return ((n.GetOpCode() == OP_constval) && static_cast<const ConstvalNode &>(n).GetConstVal()->IsZero());
2438 }
2439
2440 #define NEXT_ID(x) ((x) + 1)
2441 #define INTRN_FIRST_SYNC_ENTER NEXT_ID(INTRN_LAST)
2442 #define INTRN_SECOND_SYNC_ENTER NEXT_ID(INTRN_FIRST_SYNC_ENTER)
2443 #define INTRN_THIRD_SYNC_ENTER NEXT_ID(INTRN_SECOND_SYNC_ENTER)
2444 #define INTRN_FOURTH_SYNC_ENTER NEXT_ID(INTRN_THIRD_SYNC_ENTER)
2445 #define INTRN_YNC_EXIT NEXT_ID(INTRN_FOURTH_SYNC_ENTER)
2446
2447 std::vector<std::pair<CGLowerer::BuiltinFunctionID, PUIdx>> CGLowerer::builtinFuncIDs;
2448 std::unordered_map<IntrinDesc *, PUIdx> CGLowerer::intrinFuncIDs;
2449 std::unordered_map<std::string, size_t> CGLowerer::arrayClassCacheIndex;
2450
RegisterFunctionVoidStarToVoid(BuiltinFunctionID id,const std::string & name,const std::string & paramName)2451 MIRFunction *CGLowerer::RegisterFunctionVoidStarToVoid(BuiltinFunctionID id, const std::string &name,
2452 const std::string ¶mName)
2453 {
2454 MIRFunction *func = mirBuilder->GetOrCreateFunction(name, GlobalTables::GetTypeTable().GetVoid()->GetTypeIndex());
2455 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
2456 func->AllocSymTab();
2457 MIRSymbol *funcSym = func->GetFuncSymbol();
2458 funcSym->SetStorageClass(kScExtern);
2459 funcSym->SetAppearsInCode(true);
2460 MIRType *argTy = GlobalTables::GetTypeTable().GetPtr();
2461 MIRSymbol *argSt = func->GetSymTab()->CreateSymbol(kScopeLocal);
2462 argSt->SetNameStrIdx(mirBuilder->GetOrCreateStringIndex(paramName));
2463 argSt->SetTyIdx(argTy->GetTypeIndex());
2464 argSt->SetStorageClass(kScFormal);
2465 argSt->SetSKind(kStVar);
2466 func->GetSymTab()->AddToStringSymbolMap(*argSt);
2467 std::vector<MIRSymbol *> formals;
2468 formals.emplace_back(argSt);
2469 if ((name == "MCC_SyncEnterFast0") || (name == "MCC_SyncEnterFast1") || (name == "MCC_SyncEnterFast2") ||
2470 (name == "MCC_SyncEnterFast3") || (name == "MCC_SyncExitFast")) {
2471 MIRSymbol *argStMatch = func->GetSymTab()->CreateSymbol(kScopeLocal);
2472 argStMatch->SetNameStrIdx(mirBuilder->GetOrCreateStringIndex("monitor_slot"));
2473 argStMatch->SetTyIdx(argTy->GetTypeIndex());
2474 argStMatch->SetStorageClass(kScFormal);
2475 argStMatch->SetSKind(kStVar);
2476 func->GetSymTab()->AddToStringSymbolMap(*argStMatch);
2477 formals.emplace_back(argStMatch);
2478 }
2479 func->UpdateFuncTypeAndFormalsAndReturnType(formals, GlobalTables::GetTypeTable().GetVoid()->GetTypeIndex(), false);
2480 auto *funcType = func->GetMIRFuncType();
2481 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
2482 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
2483
2484 builtinFuncIDs.emplace_back(std::pair<BuiltinFunctionID, PUIdx>(id, func->GetPuidx()));
2485 return func;
2486 }
2487
RegisterBuiltIns()2488 void CGLowerer::RegisterBuiltIns()
2489 {
2490 for (uint32 i = 0; i < sizeof(cgBuiltins) / sizeof(cgBuiltins[0]); ++i) {
2491 BuiltinFunctionID id = cgBuiltins[i].first;
2492 IntrinDesc &desc = IntrinDesc::intrinTable[id];
2493
2494 MIRFunction *func = mirBuilder->GetOrCreateFunction(cgBuiltins[i].second,
2495 GlobalTables::GetTypeTable().GetVoid()->GetTypeIndex());
2496 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
2497 func->AllocSymTab();
2498 MIRSymbol *funcSym = func->GetFuncSymbol();
2499 funcSym->SetStorageClass(kScExtern);
2500 funcSym->SetAppearsInCode(true);
2501 /* return type */
2502 MIRType *retTy = desc.GetReturnType();
2503 CHECK_FATAL(retTy != nullptr, "retTy should not be nullptr");
2504 /* use void* for PTY_dynany */
2505 if (retTy->GetPrimType() == PTY_dynany) {
2506 retTy = GlobalTables::GetTypeTable().GetPtr();
2507 }
2508
2509 std::vector<MIRSymbol *> formals;
2510 const std::string params[IntrinDesc::kMaxArgsNum] = {"p0", "p1", "p2", "p3", "p4", "p5"};
2511 for (uint32 j = 0; j < IntrinDesc::kMaxArgsNum; ++j) {
2512 MIRType *argTy = desc.GetArgType(j);
2513 if (argTy == nullptr) {
2514 break;
2515 }
2516 /* use void* for PTY_dynany */
2517 if (argTy->GetPrimType() == PTY_dynany) {
2518 argTy = GlobalTables::GetTypeTable().GetPtr();
2519 }
2520 MIRSymbol *argSt = func->GetSymTab()->CreateSymbol(kScopeLocal);
2521 argSt->SetNameStrIdx(mirBuilder->GetOrCreateStringIndex(params[j]));
2522 argSt->SetTyIdx(argTy->GetTypeIndex());
2523 argSt->SetStorageClass(kScFormal);
2524 argSt->SetSKind(kStVar);
2525 func->GetSymTab()->AddToStringSymbolMap(*argSt);
2526 formals.emplace_back(argSt);
2527 }
2528 func->UpdateFuncTypeAndFormalsAndReturnType(formals, retTy->GetTypeIndex(), false);
2529 auto *funcType = func->GetMIRFuncType();
2530 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
2531 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
2532
2533 builtinFuncIDs.emplace_back(std::pair<BuiltinFunctionID, PUIdx>(id, func->GetPuidx()));
2534 }
2535
2536 /* register __builtin_sync_enter */
2537 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_FIRST_SYNC_ENTER, "MCC_SyncEnterFast0", "obj"));
2538 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_SECOND_SYNC_ENTER, "MCC_SyncEnterFast1", "obj"));
2539 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_THIRD_SYNC_ENTER, "MCC_SyncEnterFast2", "obj"));
2540 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_FOURTH_SYNC_ENTER, "MCC_SyncEnterFast3", "obj"));
2541 /* register __builtin_sync_exit */
2542 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_YNC_EXIT, "MCC_SyncExitFast", "obj"));
2543 }
2544
2545 /*
2546 * From Maple IR Document as of Apr 14, 2017
2547 * Type Conversion Expression Opcodes
2548 * Conversions between integer types of different sizes require the cvt opcode.
2549 * Conversion between signed and unsigned integers of the same size does not
2550 * require any operation, not even retype.
2551 * cvt :
2552 * Convert the operand's value from <from-type> to <to-type>.
2553 * If the sizes of the two types are the same, the conversion must involve
2554 * altering the bits.
2555 * retype:
2556 * <opnd0> is converted to <prim-type> which has derived type <type> without
2557 * changing any bits. The size of <opnd0> and <prim-type> must be the same.
2558 * <opnd0> may be of aggregate type.
2559 */
MergeToCvtType(PrimType dType,PrimType sType,BaseNode & src) const2560 BaseNode *CGLowerer::MergeToCvtType(PrimType dType, PrimType sType, BaseNode &src) const
2561 {
2562 CHECK_FATAL(IsPrimitiveInteger(dType) || IsPrimitiveFloat(dType),
2563 "dtype should be primitiveInteger or primitiveFloat");
2564 CHECK_FATAL(IsPrimitiveInteger(sType) || IsPrimitiveFloat(sType),
2565 "sType should be primitiveInteger or primitiveFloat");
2566 /* src i32, dest f32; src i64, dest f64 */
2567 CHECK_FATAL(
2568 (IsPrimitiveInteger(sType) && IsPrimitiveFloat(dType) &&
2569 (GetPrimTypeBitSize(sType) == GetPrimTypeBitSize(dType))) ||
2570 (IsPrimitiveInteger(sType) && IsPrimitiveInteger(dType)),
2571 "when sType is primitiveInteger and dType is primitiveFloat, sType's primTypeBitSize must equal dType's,"
2572 " or both sType and dType should primitiveInteger");
2573
2574 /* src & dest are both of float type */
2575 MIRType *toType = GlobalTables::GetTypeTable().GetPrimType(dType);
2576 MIRType *fromType = GlobalTables::GetTypeTable().GetPrimType(sType);
2577 if (IsPrimitiveInteger(sType) && IsPrimitiveFloat(dType) &&
2578 (GetPrimTypeBitSize(sType) == GetPrimTypeBitSize(dType))) {
2579 return mirBuilder->CreateExprRetype(*toType, *fromType, &src);
2580 } else if (IsPrimitiveInteger(sType) && IsPrimitiveInteger(dType)) {
2581 if (GetPrimTypeBitSize(sType) >= GetPrimTypeBitSize(dType)) {
2582 if (dType == PTY_u1) { /* e.g., type _Bool */
2583 toType = GlobalTables::GetTypeTable().GetPrimType(PTY_u8);
2584 return mirBuilder->CreateExprCompare(OP_ne, *toType, *fromType, &src,
2585 mirBuilder->CreateIntConst(0, sType));
2586 } else if (GetPrimTypeBitSize(sType) > GetPrimTypeBitSize(dType)) {
2587 return mirBuilder->CreateExprTypeCvt(OP_cvt, *toType, *fromType, &src);
2588 } else if (IsSignedInteger(sType) != IsSignedInteger(dType)) {
2589 return mirBuilder->CreateExprTypeCvt(OP_cvt, *toType, *fromType, &src);
2590 }
2591 src.SetPrimType(dType);
2592 return &src;
2593 /*
2594 * Force type cvt here because we currently do not run constant folding
2595 * or contanst propagation before CG. We may revisit this decision later.
2596 */
2597 } else if (GetPrimTypeBitSize(sType) < GetPrimTypeBitSize(dType)) {
2598 return mirBuilder->CreateExprTypeCvt(OP_cvt, *toType, *fromType, &src);
2599 } else if (IsConstvalZero(src)) {
2600 return mirBuilder->CreateIntConst(0, dType);
2601 }
2602 CHECK_FATAL(false, "should not run here");
2603 }
2604 CHECK_FATAL(false, "should not run here");
2605 }
2606
GetLenNode(BaseNode & opnd0)2607 IreadNode &CGLowerer::GetLenNode(BaseNode &opnd0)
2608 {
2609 MIRIntConst *arrayHeaderNode = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
2610 RTSupport::GetRTSupportInstance().GetArrayLengthOffset(),
2611 *GlobalTables::GetTypeTable().GetTypeFromTyIdx(opnd0.GetPrimType()));
2612 BaseNode *arrayHeaderCstNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(arrayHeaderNode);
2613 arrayHeaderCstNode->SetPrimType(opnd0.GetPrimType());
2614 MIRType *addrType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(opnd0.GetPrimType());
2615 BaseNode *refLenAddr = mirBuilder->CreateExprBinary(OP_add, *addrType, &opnd0, arrayHeaderCstNode);
2616 MIRType *infoLenType = GlobalTables::GetTypeTable().GetInt32();
2617 MIRType *ptrType = beCommon.BeGetOrCreatePointerType(*infoLenType);
2618 IreadNode *lenNode = mirBuilder->CreateExprIread(*infoLenType, *ptrType, 0, refLenAddr);
2619 return (*lenNode);
2620 }
2621
GetLabelIdx(MIRFunction & curFunc) const2622 LabelIdx CGLowerer::GetLabelIdx(MIRFunction &curFunc) const
2623 {
2624 std::string suffix = std::to_string(curFunc.GetLabelTab()->GetLabelTableSize());
2625 GStrIdx labelStrIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName("__label_BC_" + suffix);
2626 LabelIdx labIdx = curFunc.GetLabelTab()->AddLabel(labelStrIdx);
2627 return labIdx;
2628 }
2629
ProcessArrayExpr(BaseNode & expr,BlockNode & blkNode)2630 void CGLowerer::ProcessArrayExpr(BaseNode &expr, BlockNode &blkNode)
2631 {
2632 bool needProcessArrayExpr = !ShouldOptarray() && mirModule.IsJavaModule();
2633 if (!needProcessArrayExpr) {
2634 return;
2635 }
2636 /* Array boundary check */
2637 MIRFunction *curFunc = mirModule.CurFunction();
2638 auto &arrayNode = static_cast<ArrayNode &>(expr);
2639 StmtNode *boundaryCheckStmt = nullptr;
2640 if (arrayNode.GetBoundsCheck()) {
2641 CHECK_FATAL(arrayNode.GetNopndSize() == kOperandNumBinary, "unexpected nOpnd size");
2642 BaseNode *opnd0 = arrayNode.GetNopndAt(0);
2643 if (opnd0->GetOpCode() == OP_iread) {
2644 PregIdx pregIdx = curFunc->GetPregTab()->CreatePreg(opnd0->GetPrimType());
2645 RegassignNode *temp = mirBuilder->CreateStmtRegassign(opnd0->GetPrimType(), pregIdx, opnd0);
2646 blkNode.InsertAfter(blkNode.GetLast(), temp);
2647 arrayNode.SetNOpndAt(0, mirBuilder->CreateExprRegread(opnd0->GetPrimType(), pregIdx));
2648 }
2649 IreadNode &lenNode = GetLenNode(*opnd0);
2650 PregIdx lenPregIdx = curFunc->GetPregTab()->CreatePreg(lenNode.GetPrimType());
2651 RegassignNode *lenRegassignNode = mirBuilder->CreateStmtRegassign(lenNode.GetPrimType(), lenPregIdx, &lenNode);
2652 BaseNode *lenRegreadNode = mirBuilder->CreateExprRegread(PTY_u32, lenPregIdx);
2653
2654 LabelIdx labIdx = GetLabelIdx(*curFunc);
2655 LabelNode *labelBC = mirBuilder->CreateStmtLabel(labIdx);
2656 ;
2657 BaseNode *cond = mirBuilder->CreateExprCompare(OP_ge, *GlobalTables::GetTypeTable().GetUInt1(),
2658 *GlobalTables::GetTypeTable().GetUInt32(),
2659 arrayNode.GetNopndAt(1), lenRegreadNode);
2660 CondGotoNode *brFalseNode = mirBuilder->CreateStmtCondGoto(cond, OP_brfalse, labIdx);
2661 MIRFunction *fn = mirBuilder->GetOrCreateFunction("MCC_Array_Boundary_Check", TyIdx(PTY_void));
2662 fn->GetFuncSymbol()->SetAppearsInCode(true);
2663 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
2664 fn->AllocSymTab();
2665 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
2666 args.emplace_back(arrayNode.GetNopndAt(0));
2667 args.emplace_back(arrayNode.GetNopndAt(1));
2668 boundaryCheckStmt = mirBuilder->CreateStmtCall(fn->GetPuidx(), args);
2669 blkNode.InsertAfter(blkNode.GetLast(), lenRegassignNode);
2670 blkNode.InsertAfter(blkNode.GetLast(), brFalseNode);
2671 blkNode.InsertAfter(blkNode.GetLast(), boundaryCheckStmt);
2672 blkNode.InsertAfter(blkNode.GetLast(), labelBC);
2673 }
2674 }
2675
LowerExpr(BaseNode & parent,BaseNode & expr,BlockNode & blkNode)2676 BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkNode)
2677 {
2678 bool isCvtU1Expr = (expr.GetOpCode() == OP_cvt && expr.GetPrimType() == PTY_u1 &&
2679 static_cast<TypeCvtNode &>(expr).FromType() != PTY_u1);
2680 if (expr.GetPrimType() == PTY_u1) {
2681 expr.SetPrimType(PTY_u8);
2682 }
2683 if (expr.GetOpCode() == OP_intrinsicopwithtype) {
2684 return LowerIntrinsicopwithtype(parent, static_cast<IntrinsicopNode &>(expr), blkNode);
2685 }
2686
2687 if (expr.GetOpCode() == OP_iread && expr.Opnd(0)->GetOpCode() == OP_array) {
2688 /* iread ptr <* <$MUIDDataDefTabEntry>> 1 (
2689 * array 0 ptr <* <[5] <$MUIDDataDefTabEntry>>> (addrof ...
2690 * ==>
2691 * intrinsicop a64 MPL_READ_STATIC_OFFSET_TAB (addrof ..
2692 */
2693 BaseNode *node = LowerExpr(expr, *expr.Opnd(0), blkNode);
2694 if (node->GetOpCode() == OP_intrinsicop) {
2695 auto *binNode = static_cast<IntrinsicopNode *>(node);
2696 CHECK_FATAL(binNode->GetIntrinsic() == INTRN_MPL_READ_STATIC_OFFSET_TAB, "Something wrong here");
2697 return binNode;
2698 } else {
2699 expr.SetOpnd(node, 0);
2700 }
2701 } else {
2702 for (size_t i = 0; i < expr.NumOpnds(); ++i) {
2703 expr.SetOpnd(LowerExpr(expr, *expr.Opnd(i), blkNode), i);
2704 }
2705 }
2706 // Convert `cvt u1 xx <expr>` to `ne u8 xx (<expr>, constval xx 0)`
2707 // No need to convert `cvt u1 u1 <expr>`
2708 if (isCvtU1Expr) {
2709 auto &cvtExpr = static_cast<TypeCvtNode &>(expr);
2710 PrimType fromType = cvtExpr.FromType();
2711 auto *fromMIRType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fromType));
2712 // We use u8 instead of u1 because codegen can't recognize u1
2713 auto *toMIRType = GlobalTables::GetTypeTable().GetUInt8();
2714 auto *zero = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, *fromMIRType);
2715 auto *converted = mirBuilder->CreateExprCompare(OP_ne, *toMIRType, *fromMIRType, cvtExpr.Opnd(0),
2716 mirBuilder->CreateConstval(zero));
2717 return converted;
2718 }
2719 switch (expr.GetOpCode()) {
2720 case OP_array: {
2721 ProcessArrayExpr(expr, blkNode);
2722 if (!mirModule.IsCModule()) {
2723 return LowerArray(static_cast<ArrayNode &>(expr), parent);
2724 } else {
2725 return LowerCArray(static_cast<ArrayNode &>(expr));
2726 }
2727 }
2728
2729 case OP_dread:
2730 return LowerDread(static_cast<DreadNode &>(expr), blkNode);
2731
2732 case OP_addrof:
2733 return LowerAddrof(static_cast<AddrofNode &>(expr));
2734
2735 case OP_iread:
2736 return LowerIread(static_cast<IreadNode &>(expr));
2737
2738 case OP_iaddrof:
2739 return LowerIaddrof(static_cast<IreadNode &>(expr));
2740
2741 case OP_select:
2742 if (IsComplexSelect(static_cast<TernaryNode &>(expr))) {
2743 return LowerComplexSelect(static_cast<TernaryNode &>(expr), parent, blkNode);
2744 } else if (mirModule.GetFlavor() != kFlavorLmbc) {
2745 return SplitTernaryNodeResult(static_cast<TernaryNode &>(expr), parent, blkNode);
2746 } else {
2747 return &expr;
2748 }
2749
2750 case OP_sizeoftype: {
2751 CHECK(static_cast<SizeoftypeNode &>(expr).GetTyIdx() < beCommon.GetSizeOfTypeSizeTable(),
2752 "index out of range in CGLowerer::LowerExpr");
2753 int64 typeSize = beCommon.GetTypeSize(static_cast<SizeoftypeNode &>(expr).GetTyIdx());
2754 return mirModule.GetMIRBuilder()->CreateIntConst(typeSize, PTY_u32);
2755 }
2756
2757 case OP_fieldsdist: {
2758 auto &fdNode = static_cast<FieldsDistNode &>(expr);
2759 CHECK(fdNode.GetTyIdx() < beCommon.GetSizeOfTypeSizeTable(), "index out of range in CGLowerer::LowerExpr");
2760 MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fdNode.GetTyIdx());
2761 CHECK(ty->GetKind() == kTypeClass, "wrong type for FieldsDistNode");
2762 MIRClassType *classType = static_cast<MIRClassType *>(ty);
2763 const JClassLayout &layout = beCommon.GetJClassLayout(*classType);
2764 DEBUG_ASSERT(!layout.empty(), "container should not be empty");
2765 int32 i1 = fdNode.GetFieldID1() > 0 ? fdNode.GetFieldID1() - 1 : 0;
2766 int32 i2 = fdNode.GetFieldID2() > 0 ? fdNode.GetFieldID2() - 1 : 0;
2767 int64 offset = layout[i2].GetOffset() - layout[i1].GetOffset();
2768 return mirModule.GetMIRBuilder()->CreateIntConst(offset, PTY_u32);
2769 }
2770
2771 case OP_intrinsicop:
2772 if (IsIntrinsicOpHandledAtLowerLevel(static_cast<IntrinsicopNode &>(expr).GetIntrinsic())) {
2773 return &expr;
2774 }
2775 return LowerIntrinsicop(parent, static_cast<IntrinsicopNode &>(expr), blkNode);
2776
2777 case OP_alloca: {
2778 GetCurrentFunc()->SetVlaOrAlloca(true);
2779 return &expr;
2780 }
2781 case OP_rem:
2782 return LowerRem(expr, blkNode);
2783
2784 case OP_cand:
2785 expr.SetOpCode(OP_land);
2786 return SplitBinaryNodeOpnd1(static_cast<BinaryNode &>(expr), blkNode);
2787 case OP_cior:
2788 expr.SetOpCode(OP_lior);
2789 return SplitBinaryNodeOpnd1(static_cast<BinaryNode &>(expr), blkNode);
2790 case OP_cvt:
2791 case OP_retype:
2792 case OP_zext:
2793 case OP_sext:
2794 return LowerCastExpr(expr);
2795 default:
2796 return &expr;
2797 }
2798 }
2799
LowerDread(DreadNode & dread,const BlockNode & block)2800 BaseNode *CGLowerer::LowerDread(DreadNode &dread, const BlockNode &block)
2801 {
2802 /* use PTY_u8 for boolean type in dread/iread */
2803 if (dread.GetPrimType() == PTY_u1) {
2804 dread.SetPrimType(PTY_u8);
2805 }
2806 return (dread.GetFieldID() == 0 ? LowerDreadToThreadLocal(dread, block) : LowerDreadBitfield(dread));
2807 }
2808
LowerRegassign(RegassignNode & regNode,BlockNode & newBlk)2809 void CGLowerer::LowerRegassign(RegassignNode ®Node, BlockNode &newBlk)
2810 {
2811 BaseNode *rhsOpnd = regNode.Opnd(0);
2812 Opcode op = rhsOpnd->GetOpCode();
2813 if ((op == OP_gcmalloc) || (op == OP_gcpermalloc)) {
2814 LowerGCMalloc(regNode, static_cast<GCMallocNode &>(*rhsOpnd), newBlk, op == OP_gcpermalloc);
2815 return;
2816 } else if ((op == OP_gcmallocjarray) || (op == OP_gcpermallocjarray)) {
2817 LowerJarrayMalloc(regNode, static_cast<JarrayMallocNode &>(*rhsOpnd), newBlk, op == OP_gcpermallocjarray);
2818 return;
2819 } else {
2820 regNode.SetOpnd(LowerExpr(regNode, *rhsOpnd, newBlk), 0);
2821 newBlk.AddStatement(®Node);
2822 }
2823 }
2824
ExtractSymbolAddress(const StIdx & stIdx)2825 BaseNode *CGLowerer::ExtractSymbolAddress(const StIdx &stIdx)
2826 {
2827 auto builder = mirModule.GetMIRBuilder();
2828 return builder->CreateExprAddrof(0, stIdx);
2829 }
2830
LowerDreadToThreadLocal(BaseNode & expr,const BlockNode & block)2831 BaseNode *CGLowerer::LowerDreadToThreadLocal(BaseNode &expr, const BlockNode &block)
2832 {
2833 auto *result = &expr;
2834 if (expr.GetOpCode() != maple::OP_dread) {
2835 return result;
2836 }
2837 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2838 auto dread = static_cast<DreadNode &>(expr);
2839 StIdx stIdx = dread.GetStIdx();
2840 if (!stIdx.IsGlobal()) {
2841 return result;
2842 }
2843 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
2844
2845 if (symbol->IsThreadLocal()) {
2846 // iread <* u32> 0 (regread u64 %addr)
2847 auto addr = ExtractSymbolAddress(stIdx);
2848 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
2849 auto iread = mirModule.GetMIRBuilder()->CreateExprIread(*symbol->GetType(), *ptrType, dread.GetFieldID(), addr);
2850 result = iread;
2851 }
2852 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2853 if (newTypeTableSize != oldTypeTableSize) {
2854 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
2855 }
2856 return result;
2857 }
2858
LowerDassignToThreadLocal(StmtNode & stmt,const BlockNode & block)2859 StmtNode *CGLowerer::LowerDassignToThreadLocal(StmtNode &stmt, const BlockNode &block)
2860 {
2861 StmtNode *result = &stmt;
2862 if (stmt.GetOpCode() != maple::OP_dassign) {
2863 return result;
2864 }
2865 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2866 auto dAssign = static_cast<DassignNode &>(stmt);
2867 StIdx stIdx = dAssign.GetStIdx();
2868 if (!stIdx.IsGlobal()) {
2869 return result;
2870 }
2871 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
2872 if (symbol->IsThreadLocal()) {
2873 // iassign <* u32> 0 (regread u64 %addr, dread u32 $x)
2874 auto addr = ExtractSymbolAddress(stIdx);
2875 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
2876 auto iassign =
2877 mirModule.GetMIRBuilder()->CreateStmtIassign(*ptrType, dAssign.GetFieldID(), addr, dAssign.GetRHS());
2878 result = iassign;
2879 }
2880 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2881 if (newTypeTableSize != oldTypeTableSize) {
2882 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
2883 }
2884 return result;
2885 }
2886
LowerDassign(DassignNode & dsNode,BlockNode & newBlk)2887 void CGLowerer::LowerDassign(DassignNode &dsNode, BlockNode &newBlk)
2888 {
2889 StmtNode *newStmt = nullptr;
2890 BaseNode *rhs = nullptr;
2891 Opcode op = dsNode.GetRHS()->GetOpCode();
2892 if (dsNode.GetFieldID() != 0) {
2893 newStmt = LowerDassignBitfield(dsNode, newBlk);
2894 } else if (op == OP_intrinsicop) {
2895 IntrinsicopNode *intrinNode = static_cast<IntrinsicopNode *>(dsNode.GetRHS());
2896 MIRType *retType = IntrinDesc::intrinTable[intrinNode->GetIntrinsic()].GetReturnType();
2897 CHECK_FATAL(retType != nullptr, "retType should not be nullptr");
2898 if (retType->GetKind() == kTypeStruct) {
2899 newStmt = LowerIntrinsicopDassign(dsNode, *intrinNode, newBlk);
2900 } else {
2901 rhs = LowerExpr(dsNode, *intrinNode, newBlk);
2902 dsNode.SetRHS(rhs);
2903 CHECK_FATAL(dsNode.GetRHS() != nullptr, "dsNode->rhs is null in CGLowerer::LowerDassign");
2904 if (!IsDassignNOP(dsNode)) {
2905 newStmt = &dsNode;
2906 }
2907 }
2908 } else if ((op == OP_gcmalloc) || (op == OP_gcpermalloc)) {
2909 LowerGCMalloc(dsNode, static_cast<GCMallocNode &>(*dsNode.GetRHS()), newBlk, op == OP_gcpermalloc);
2910 return;
2911 } else if ((op == OP_gcmallocjarray) || (op == OP_gcpermallocjarray)) {
2912 LowerJarrayMalloc(dsNode, static_cast<JarrayMallocNode &>(*dsNode.GetRHS()), newBlk,
2913 op == OP_gcpermallocjarray);
2914 return;
2915 } else {
2916 rhs = LowerExpr(dsNode, *dsNode.GetRHS(), newBlk);
2917 dsNode.SetRHS(rhs);
2918 newStmt = &dsNode;
2919 }
2920
2921 if (newStmt != nullptr) {
2922 newBlk.AddStatement(LowerDassignToThreadLocal(*newStmt, newBlk));
2923 }
2924 }
2925
2926 // Lower stmt Form
2927 // Initial form: decrefreset (addrof ptr %RegX_RXXXX)
2928 // Convert to form: dassign %RegX_RXXXX 0 (constval ref 0)
2929 // Final form: str xzr, [x29,#XX]
LowerResetStmt(StmtNode & stmt,BlockNode & block)2930 void CGLowerer::LowerResetStmt(StmtNode &stmt, BlockNode &block)
2931 {
2932 UnaryStmtNode &unaryStmtNode = static_cast<UnaryStmtNode &>(stmt);
2933 AddrofNode *addrofNode = static_cast<AddrofNode *>(unaryStmtNode.GetRHS());
2934 MIRType &type = *GlobalTables::GetTypeTable().GetPrimType(PTY_ref);
2935 MIRConst *constVal = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, type);
2936 ConstvalNode *exprConst = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>();
2937 exprConst->SetPrimType(type.GetPrimType());
2938 exprConst->SetConstVal(constVal);
2939 DassignNode *dassignNode = mirModule.CurFuncCodeMemPool()->New<DassignNode>();
2940 dassignNode->SetStIdx(addrofNode->GetStIdx());
2941 dassignNode->SetRHS(exprConst);
2942 dassignNode->SetFieldID(addrofNode->GetFieldID());
2943 block.AddStatement(dassignNode);
2944 }
2945
LowerIntrinsicopDassign(const DassignNode & dsNode,IntrinsicopNode & intrinNode,BlockNode & newBlk)2946 StmtNode *CGLowerer::LowerIntrinsicopDassign(const DassignNode &dsNode, IntrinsicopNode &intrinNode, BlockNode &newBlk)
2947 {
2948 for (size_t i = 0; i < intrinNode.GetNumOpnds(); ++i) {
2949 DEBUG_ASSERT(intrinNode.Opnd(i) != nullptr, "intrinNode.Opnd(i) should not be nullptr");
2950 intrinNode.SetOpnd(LowerExpr(intrinNode, *intrinNode.Opnd(i), newBlk), i);
2951 }
2952 MIRIntrinsicID intrnID = intrinNode.GetIntrinsic();
2953 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
2954 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
2955 const std::string name = intrinDesc->name;
2956 CHECK_FATAL(intrinDesc->name != nullptr, "intrinDesc's name should not be nullptr");
2957 st->SetNameStrIdx(name);
2958 st->SetStorageClass(kScText);
2959 st->SetSKind(kStFunc);
2960 MIRFunction *fn = mirModule.GetMemPool()->New<MIRFunction>(&mirModule, st->GetStIdx());
2961 MapleVector<BaseNode *> &nOpnds = intrinNode.GetNopnd();
2962 st->SetFunction(fn);
2963 std::vector<TyIdx> fnTyVec;
2964 std::vector<TypeAttrs> fnTaVec;
2965 CHECK_FATAL(intrinDesc->IsJsOp(), "intrinDesc should be JsOp");
2966 /* setup parameters */
2967 for (uint32 i = 0; i < nOpnds.size(); ++i) {
2968 fnTyVec.emplace_back(GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_a32)->GetTypeIndex());
2969 fnTaVec.emplace_back(TypeAttrs());
2970 BaseNode *addrNode = beCommon.GetAddressOfNode(*nOpnds[i]);
2971 CHECK_FATAL(addrNode != nullptr, "addrNode should not be nullptr");
2972 nOpnds[i] = addrNode;
2973 }
2974 MIRSymbol *dst = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dsNode.GetStIdx());
2975 MIRType *ty = dst->GetType();
2976 MIRType *fnType = beCommon.BeGetOrCreateFunctionType(ty->GetTypeIndex(), fnTyVec, fnTaVec);
2977 st->SetTyIdx(fnType->GetTypeIndex());
2978 fn->SetMIRFuncType(static_cast<MIRFuncType *>(fnType));
2979 fn->SetReturnTyIdx(ty->GetTypeIndex());
2980 CHECK_FATAL(ty->GetKind() == kTypeStruct, "ty's kind should be struct type");
2981 CHECK_FATAL(dsNode.GetFieldID() == 0, "dsNode's filedId should equal");
2982 AddrofNode *addrofNode = mirBuilder->CreateAddrof(*dst, PTY_a32);
2983 MapleVector<BaseNode *> newOpnd(mirModule.CurFuncCodeMemPoolAllocator()->Adapter());
2984 newOpnd.emplace_back(addrofNode);
2985 (void)newOpnd.insert(newOpnd.end(), nOpnds.begin(), nOpnds.end());
2986 CallNode *callStmt = mirModule.CurFuncCodeMemPool()->New<CallNode>(mirModule, OP_call);
2987 callStmt->SetPUIdx(st->GetFunction()->GetPuidx());
2988 callStmt->SetNOpnd(newOpnd);
2989 return callStmt;
2990 }
2991
2992 /* From maple_ir/include/d ex2mpl/dexintrinsic.def
2993 * JAVA_ARRAY_LENGTH
2994 * JAVA_ARRAY_FILL
2995 * JAVA_FILL_NEW_ARRAY
2996 * JAVA_CHECK_CAST
2997 * JAVA_CONST_CLASS
2998 * JAVA_INSTANCE_OF
2999 * JAVA_MERGE
3000 * JAVA_RANDOM
3001 * #if DEXHACK
3002 * JAVA_PRINTLN
3003 * #endif
3004 * INTRN_<<name>>
3005 * intrinsic
3006 */
LowerJavascriptIntrinsicop(IntrinsicopNode & intrinNode,const IntrinDesc & desc)3007 BaseNode *CGLowerer::LowerJavascriptIntrinsicop(IntrinsicopNode &intrinNode, const IntrinDesc &desc)
3008 {
3009 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
3010 CHECK_FATAL(desc.name != nullptr, "desc's name should not be nullptr");
3011 const std::string name = desc.name;
3012 st->SetNameStrIdx(name);
3013 st->SetStorageClass(kScText);
3014 st->SetSKind(kStFunc);
3015 MIRFunction *fn = mirModule.GetMemPool()->New<MIRFunction>(&mirModule, st->GetStIdx());
3016 MapleVector<BaseNode *> &nOpnds = intrinNode.GetNopnd();
3017 st->SetFunction(fn);
3018 std::vector<TyIdx> fnTyVec;
3019 std::vector<TypeAttrs> fnTaVec;
3020 CHECK_FATAL(desc.IsJsOp(), "desc should be jsOp");
3021 /* setup parameters */
3022 for (uint32 i = 0; i < nOpnds.size(); ++i) {
3023 fnTyVec.emplace_back(GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_a32)->GetTypeIndex());
3024 fnTaVec.emplace_back(TypeAttrs());
3025 BaseNode *addrNode = beCommon.GetAddressOfNode(*nOpnds[i]);
3026 CHECK_FATAL(addrNode != nullptr, "can not get address");
3027 nOpnds[i] = addrNode;
3028 }
3029
3030 MIRType *retType = desc.GetReturnType();
3031 CHECK_FATAL(retType != nullptr, "retType should not be nullptr");
3032 if (retType->GetKind() == kTypeStruct) {
3033 /* create a local symbol and dread it; */
3034 std::string tmpstr("__ret_struct_tmp_st");
3035 static uint32 tmpIdx = 0;
3036 tmpstr += std::to_string(tmpIdx++);
3037 MIRSymbol *tmpSt = mirBuilder->GetOrCreateDeclInFunc(tmpstr, *retType, *mirModule.CurFunction());
3038 MIRType *fnType = beCommon.BeGetOrCreateFunctionType(retType->GetTypeIndex(), fnTyVec, fnTaVec);
3039 st->SetTyIdx(fnType->GetTypeIndex());
3040 fn->SetMIRFuncType(static_cast<MIRFuncType *>(fnType));
3041 AddrofNode *addrofNode = mirBuilder->CreateAddrof(*tmpSt, PTY_a32);
3042 MapleVector<BaseNode *> newOpnd(mirModule.CurFuncCodeMemPoolAllocator()->Adapter());
3043 newOpnd.emplace_back(addrofNode);
3044 (void)newOpnd.insert(newOpnd.end(), nOpnds.begin(), nOpnds.end());
3045 CallNode *callStmt = mirModule.CurFuncCodeMemPool()->New<CallNode>(mirModule, OP_call);
3046 callStmt->SetPUIdx(st->GetFunction()->GetPuidx());
3047 callStmt->SetNOpnd(newOpnd);
3048 currentBlock->AddStatement(callStmt);
3049 /* return the dread */
3050 AddrofNode *drRetSt = mirBuilder->CreateDread(*tmpSt, PTY_agg);
3051 return drRetSt;
3052 }
3053 CHECK_FATAL(st->GetStIdx().FullIdx() != 0, "the fullIdx of st's stIdx should not equal 0");
3054 CallNode *callStmt = static_cast<CallNode *>(mirBuilder->CreateStmtCall(st->GetStIdx().FullIdx(), nOpnds));
3055 currentBlock->AddStatement(callStmt);
3056 PrimType promotedPrimType = intrinNode.GetPrimType() == PTY_u1 ? PTY_u32 : intrinNode.GetPrimType();
3057 BaseNode *drRetSt = mirBuilder->CreateExprRegread(promotedPrimType, -kSregRetval0);
3058 /*
3059 * for safty dassign the return value to a register and return the dread to that register
3060 * to avoid such code:
3061 * call $__js_int32 (addrof ptr %temp_var_8 0)
3062 * call $__jsop_getelem (addrof a32 %temp_var_9 0, addrof a32 $arr 0, dread i32 %%retval 0)
3063 * for many target, the first actual parameter and return value would use R0, which would cause the above
3064 * case fail
3065 */
3066 PregIdx tmpRegIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(promotedPrimType);
3067 RegassignNode *dstoReg = mirBuilder->CreateStmtRegassign(promotedPrimType, tmpRegIdx, drRetSt);
3068 currentBlock->AddStatement(dstoReg);
3069 RegreadNode *outDsNode = mirBuilder->CreateExprRegread(promotedPrimType, tmpRegIdx);
3070 return outDsNode;
3071 }
3072
CreateStmtCallWithReturnValue(const IntrinsicopNode & intrinNode,const MIRSymbol & ret,PUIdx bFunc,BaseNode * extraInfo) const3073 StmtNode *CGLowerer::CreateStmtCallWithReturnValue(const IntrinsicopNode &intrinNode, const MIRSymbol &ret, PUIdx bFunc,
3074 BaseNode *extraInfo) const
3075 {
3076 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3077 for (size_t i = 0; i < intrinNode.NumOpnds(); ++i) {
3078 args.emplace_back(intrinNode.Opnd(i));
3079 }
3080 if (extraInfo != nullptr) {
3081 args.emplace_back(extraInfo);
3082 }
3083 return mirBuilder->CreateStmtCallAssigned(bFunc, args, &ret, OP_callassigned);
3084 }
3085
CreateStmtCallWithReturnValue(const IntrinsicopNode & intrinNode,PregIdx retpIdx,PUIdx bFunc,BaseNode * extraInfo) const3086 StmtNode *CGLowerer::CreateStmtCallWithReturnValue(const IntrinsicopNode &intrinNode, PregIdx retpIdx, PUIdx bFunc,
3087 BaseNode *extraInfo) const
3088 {
3089 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3090 for (size_t i = 0; i < intrinNode.NumOpnds(); ++i) {
3091 args.emplace_back(intrinNode.Opnd(i));
3092 }
3093 if (extraInfo != nullptr) {
3094 args.emplace_back(extraInfo);
3095 }
3096 return mirBuilder->CreateStmtCallRegassigned(bFunc, args, retpIdx, OP_callassigned);
3097 }
3098
LowerIntrinJavaMerge(const BaseNode & parent,IntrinsicopNode & intrinNode)3099 BaseNode *CGLowerer::LowerIntrinJavaMerge(const BaseNode &parent, IntrinsicopNode &intrinNode)
3100 {
3101 BaseNode *resNode = &intrinNode;
3102 CHECK_FATAL(intrinNode.GetNumOpnds() > 0, "invalid JAVA_MERGE intrinsic node");
3103 BaseNode *candidate = intrinNode.Opnd(0);
3104 DEBUG_ASSERT(candidate != nullptr, "candidate should not be nullptr");
3105 resNode = candidate;
3106 if (parent.GetOpCode() == OP_regassign) {
3107 PrimType sTyp = resNode->GetPrimType();
3108 auto ®Assign = static_cast<const RegassignNode &>(parent);
3109 PrimType pType = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(regAssign.GetRegIdx())->GetPrimType();
3110 if (sTyp != pType) {
3111 resNode = MergeToCvtType(pType, sTyp, *resNode);
3112 }
3113 return resNode;
3114 }
3115 if (parent.GetOpCode() == OP_dassign) {
3116 auto &dassign = static_cast<const DassignNode &>(parent);
3117 if (candidate->GetOpCode() == OP_constval) {
3118 MIRSymbol *dest = GetCurrentFunc()->GetLocalOrGlobalSymbol(dassign.GetStIdx());
3119 MIRType *toType = dest->GetType();
3120 PrimType dTyp = toType->GetPrimType();
3121 PrimType sTyp = resNode->GetPrimType();
3122 if (dTyp != sTyp) {
3123 resNode = MergeToCvtType(dTyp, sTyp, *resNode);
3124 }
3125 return resNode;
3126 }
3127 CHECK_FATAL((candidate->GetOpCode() == OP_dread) || (candidate->GetOpCode() == OP_regread),
3128 "candidate's opcode should be OP_dread or OP_regread");
3129 bool differentLocation =
3130 (candidate->GetOpCode() == OP_dread)
3131 ? !IsAccessingTheSameMemoryLocation(dassign, static_cast<DreadNode &>(*candidate))
3132 : !IsAccessingTheSameMemoryLocation(dassign, static_cast<RegreadNode &>(*candidate), *this);
3133 if (differentLocation) {
3134 bool simpleMove = false;
3135 /* res_node already contains the 0-th operand. */
3136 for (size_t i = 1; i < intrinNode.GetNumOpnds(); ++i) {
3137 candidate = intrinNode.Opnd(i);
3138 DEBUG_ASSERT(candidate != nullptr, "candidate should not be nullptr");
3139 bool sameLocation =
3140 (candidate->GetOpCode() == OP_dread)
3141 ? IsAccessingTheSameMemoryLocation(dassign, static_cast<DreadNode &>(*candidate))
3142 : IsAccessingTheSameMemoryLocation(dassign, static_cast<RegreadNode &>(*candidate), *this);
3143 if (sameLocation) {
3144 simpleMove = true;
3145 resNode = candidate;
3146 break;
3147 }
3148 }
3149 if (!simpleMove) {
3150 /* if source and destination types don't match, insert 'retype' */
3151 MIRSymbol *dest = GetCurrentFunc()->GetLocalOrGlobalSymbol(dassign.GetStIdx());
3152 MIRType *toType = dest->GetType();
3153 PrimType dTyp = toType->GetPrimType();
3154 CHECK_FATAL((dTyp != PTY_agg) && (dassign.GetFieldID() <= 0),
3155 "dType should not be PTY_agg and dassign's filedId <= 0");
3156 PrimType sType = resNode->GetPrimType();
3157 if (dTyp != sType) {
3158 resNode = MergeToCvtType(dTyp, sType, *resNode);
3159 }
3160 }
3161 }
3162 return resNode;
3163 }
3164 CHECK_FATAL(false, "should not run here");
3165 return resNode;
3166 }
3167
LowerIntrinJavaArrayLength(const BaseNode & parent,IntrinsicopNode & intrinNode)3168 BaseNode *CGLowerer::LowerIntrinJavaArrayLength(const BaseNode &parent, IntrinsicopNode &intrinNode)
3169 {
3170 BaseNode *resNode = &intrinNode;
3171 PUIdx bFunc = GetBuiltinToUse(intrinNode.GetIntrinsic());
3172 CHECK_FATAL(bFunc != kFuncNotFound, "bFunc should not be kFuncNotFound");
3173 MIRFunction *biFunc = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(bFunc);
3174
3175 BaseNode *arrAddr = intrinNode.Opnd(0);
3176 DEBUG_ASSERT(arrAddr != nullptr, "arrAddr should not be nullptr");
3177 if (((arrAddr->GetPrimType() == PTY_a64) || (arrAddr->GetPrimType() == PTY_ref)) &&
3178 ((parent.GetOpCode() == OP_regassign) || (parent.GetOpCode() == OP_dassign) || (parent.GetOpCode() == OP_ge))) {
3179 MIRType *addrType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(arrAddr->GetPrimType()));
3180 MIRIntConst *arrayHeaderNode = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
3181 RTSupport::GetRTSupportInstance().GetArrayLengthOffset(), *addrType);
3182 BaseNode *arrayHeaderCstNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(arrayHeaderNode);
3183 arrayHeaderCstNode->SetPrimType(arrAddr->GetPrimType());
3184
3185 BaseNode *refLenAddr = mirBuilder->CreateExprBinary(OP_add, *addrType, arrAddr, arrayHeaderCstNode);
3186 MIRType *infoLenType = GlobalTables::GetTypeTable().GetInt32();
3187 MIRType *ptrType = beCommon.BeGetOrCreatePointerType(*infoLenType);
3188 resNode = mirBuilder->CreateExprIread(*infoLenType, *ptrType, 0, refLenAddr);
3189 auto curFunc = mirModule.CurFunction();
3190 std::string suffix = std::to_string(curFunc->GetLabelTab()->GetLabelTableSize());
3191 GStrIdx labelStrIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName("__label_nonnull_" + suffix);
3192 LabelIdx labIdx = curFunc->GetLabelTab()->AddLabel(labelStrIdx);
3193 LabelNode *labelNonNull = mirBuilder->CreateStmtLabel(labIdx);
3194
3195 BaseNode *cond = mirBuilder->CreateExprCompare(OP_ne, *GlobalTables::GetTypeTable().GetUInt1(),
3196 *GlobalTables::GetTypeTable().GetRef(), arrAddr,
3197 mirBuilder->CreateIntConst(0, PTY_ref));
3198 CondGotoNode *brtureNode = mirBuilder->CreateStmtCondGoto(cond, OP_brtrue, labIdx);
3199
3200 MIRFunction *newFunc = mirBuilder->GetOrCreateFunction("MCC_ThrowNullArrayNullPointerException",
3201 GlobalTables::GetTypeTable().GetVoid()->GetTypeIndex());
3202 newFunc->GetFuncSymbol()->SetAppearsInCode(true);
3203 beCommon.UpdateTypeTable(*newFunc->GetMIRFuncType());
3204 newFunc->AllocSymTab();
3205 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3206 StmtNode *call = mirBuilder->CreateStmtCallAssigned(newFunc->GetPuidx(), args, nullptr, OP_callassigned);
3207
3208 currentBlock->AddStatement(brtureNode);
3209 currentBlock->AppendStatementsFromBlock(*LowerCallAssignedStmt(*call));
3210 currentBlock->AddStatement(labelNonNull);
3211 return resNode;
3212 }
3213
3214 if (parent.GetOpCode() == OP_regassign) {
3215 auto ®Assign = static_cast<const RegassignNode &>(parent);
3216 StmtNode *biCall = CreateStmtCallWithReturnValue(intrinNode, regAssign.GetRegIdx(), bFunc);
3217 currentBlock->AppendStatementsFromBlock(*LowerCallAssignedStmt(*biCall));
3218 PrimType pType = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(regAssign.GetRegIdx())->GetPrimType();
3219 resNode = mirBuilder->CreateExprRegread(pType, regAssign.GetRegIdx());
3220 return resNode;
3221 }
3222
3223 if (parent.GetOpCode() == OP_dassign) {
3224 auto &dassign = static_cast<const DassignNode &>(parent);
3225 MIRSymbol *ret = GetCurrentFunc()->GetLocalOrGlobalSymbol(dassign.GetStIdx());
3226 StmtNode *biCall = CreateStmtCallWithReturnValue(intrinNode, *ret, bFunc);
3227 currentBlock->AppendStatementsFromBlock(*LowerCallAssignedStmt(*biCall));
3228 resNode = mirBuilder->CreateExprDread(*biFunc->GetReturnType(), 0, *ret);
3229 return resNode;
3230 }
3231 CHECK_FATAL(false, "should not run here");
3232 return resNode;
3233 }
3234
LowerIntrinsicop(const BaseNode & parent,IntrinsicopNode & intrinNode)3235 BaseNode *CGLowerer::LowerIntrinsicop(const BaseNode &parent, IntrinsicopNode &intrinNode)
3236 {
3237 BaseNode *resNode = &intrinNode;
3238 if (intrinNode.GetIntrinsic() == INTRN_JAVA_MERGE) {
3239 resNode = LowerIntrinJavaMerge(parent, intrinNode);
3240 } else if (intrinNode.GetIntrinsic() == INTRN_JAVA_ARRAY_LENGTH) {
3241 resNode = LowerIntrinJavaArrayLength(parent, intrinNode);
3242 }
3243
3244 return resNode;
3245 }
3246
ProcessClassInfo(MIRType & classType,bool & classInfoFromRt,std::string & classInfo) const3247 void CGLowerer::ProcessClassInfo(MIRType &classType, bool &classInfoFromRt, std::string &classInfo) const
3248 {
3249 MIRPtrType &ptrType = static_cast<MIRPtrType &>(classType);
3250 MIRType *pType = ptrType.GetPointedType();
3251 CHECK_FATAL(pType != nullptr, "Class type not found for INTRN_JAVA_CONST_CLASS");
3252 MIRType *typeScalar = nullptr;
3253
3254 if (pType->GetKind() == kTypeScalar) {
3255 typeScalar = pType;
3256 } else if (classType.GetKind() == kTypeScalar) {
3257 typeScalar = &classType;
3258 }
3259 if (typeScalar != nullptr) {
3260 std::string eName(GetPrimTypeJavaName(typeScalar->GetPrimType()));
3261 classInfo = PRIMITIVECLASSINFO_PREFIX_STR + eName;
3262 }
3263 if ((pType->GetKind() == kTypeByName) || (pType->GetKind() == kTypeClass) || (pType->GetKind() == kTypeInterface)) {
3264 MIRStructType *classTypeSecond = static_cast<MIRStructType *>(pType);
3265 classInfo = CLASSINFO_PREFIX_STR + classTypeSecond->GetName();
3266 } else if ((pType->GetKind() == kTypeArray) || (pType->GetKind() == kTypeJArray)) {
3267 MIRJarrayType *jarrayType = static_cast<MIRJarrayType *>(pType);
3268 CHECK_FATAL(jarrayType != nullptr, "jarrayType is null in CGLowerer::LowerIntrinsicopWithType");
3269 std::string baseName = jarrayType->GetJavaName();
3270 if (jarrayType->IsPrimitiveArray() && (jarrayType->GetDim() <= kThreeDimArray)) {
3271 classInfo = PRIMITIVECLASSINFO_PREFIX_STR + baseName;
3272 } else if (arrayNameForLower::kArrayBaseName.find(baseName) != arrayNameForLower::kArrayBaseName.end()) {
3273 classInfo = CLASSINFO_PREFIX_STR + baseName;
3274 } else {
3275 classInfoFromRt = true;
3276 classInfo = baseName;
3277 }
3278 }
3279 }
3280
GetBaseNodeFromCurFunc(MIRFunction & curFunc,bool isFromJarray)3281 BaseNode *CGLowerer::GetBaseNodeFromCurFunc(MIRFunction &curFunc, bool isFromJarray)
3282 {
3283 BaseNode *baseNode = nullptr;
3284 if (curFunc.IsStatic()) {
3285 /*
3286 * it's a static function.
3287 * pass caller functions's classinfo directly
3288 */
3289 std::string callerName = CLASSINFO_PREFIX_STR;
3290 callerName += mirModule.CurFunction()->GetBaseClassName();
3291 GStrIdx strIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(callerName);
3292 MIRSymbol *callerClassInfoSym = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(strIdx);
3293 if (callerClassInfoSym == nullptr) {
3294 if (isFromJarray) {
3295 MIRType *mType = GlobalTables::GetTypeTable().GetVoidPtr();
3296 CHECK_FATAL(mType != nullptr, "type is null in CGLowerer::LowerJarrayMalloc");
3297 callerClassInfoSym = mirBuilder->CreateGlobalDecl(callerName.c_str(), *mType);
3298 callerClassInfoSym->SetStorageClass(kScExtern);
3299 } else {
3300 callerClassInfoSym = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
3301 callerClassInfoSym->SetNameStrIdx(strIdx);
3302 callerClassInfoSym->SetStorageClass(kScGlobal);
3303 callerClassInfoSym->SetSKind(kStVar);
3304 /* it must be a local symbol */
3305 GlobalTables::GetGsymTable().AddToStringSymbolMap(*callerClassInfoSym);
3306 callerClassInfoSym->SetTyIdx(static_cast<TyIdx>(PTY_ptr));
3307 }
3308 }
3309
3310 baseNode = mirBuilder->CreateExprAddrof(0, *callerClassInfoSym);
3311 } else {
3312 /*
3313 * it's an instance function.
3314 * pass caller function's this pointer
3315 */
3316 CHECK_FATAL(curFunc.GetFormalCount() != 0, "index out of range in CGLowerer::GetBaseNodeFromCurFunc");
3317 MIRSymbol *formalSt = curFunc.GetFormal(0);
3318 if (formalSt->IsPreg()) {
3319 if (isFromJarray) {
3320 baseNode = mirBuilder->CreateExprRegread(
3321 formalSt->GetType()->GetPrimType(),
3322 curFunc.GetPregTab()->GetPregIdxFromPregno(formalSt->GetPreg()->GetPregNo()));
3323 } else {
3324 CHECK_FATAL(curFunc.GetParamSize() != 0, "index out of range in CGLowerer::GetBaseNodeFromCurFunc");
3325 baseNode = mirBuilder->CreateExprRegread(
3326 (curFunc.GetNthParamType(0))->GetPrimType(),
3327 curFunc.GetPregTab()->GetPregIdxFromPregno(formalSt->GetPreg()->GetPregNo()));
3328 }
3329 } else {
3330 baseNode = mirBuilder->CreateExprDread(*formalSt);
3331 }
3332 }
3333 return baseNode;
3334 }
3335
GetClassInfoExprFromRuntime(const std::string & classInfo)3336 BaseNode *CGLowerer::GetClassInfoExprFromRuntime(const std::string &classInfo)
3337 {
3338 /*
3339 * generate runtime call to get class information
3340 * jclass __mrt_getclass(jobject caller, const char *name)
3341 * if the calling function is an instance function, it's the calling obj
3342 * if the calling function is a static function, it's the calling class
3343 */
3344 BaseNode *classInfoExpr = nullptr;
3345 PUIdx getClassFunc = GetBuiltinToUse(INTRN_JAVA_GET_CLASS);
3346 CHECK_FATAL(getClassFunc != kFuncNotFound, "classfunc is not found");
3347 /* return jclass */
3348 MIRType *voidPtrType = GlobalTables::GetTypeTable().GetPtr();
3349 MIRSymbol *ret0 = CreateNewRetVar(*voidPtrType, kIntrnRetValPrefix);
3350
3351 BaseNode *arg0 = GetBaseNodeFromCurFunc(*mirModule.CurFunction(), false);
3352 BaseNode *arg1 = nullptr;
3353 /* classname */
3354 std::string klassJavaDescriptor;
3355 namemangler::DecodeMapleNameToJavaDescriptor(classInfo, klassJavaDescriptor);
3356 UStrIdx classNameStrIdx = GlobalTables::GetUStrTable().GetOrCreateStrIdxFromName(klassJavaDescriptor);
3357 arg1 = mirModule.GetMemPool()->New<ConststrNode>(classNameStrIdx);
3358 arg1->SetPrimType(PTY_ptr);
3359
3360 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3361 args.emplace_back(arg0);
3362 args.emplace_back(arg1);
3363 StmtNode *getClassCall = mirBuilder->CreateStmtCallAssigned(getClassFunc, args, ret0, OP_callassigned);
3364 currentBlock->AppendStatementsFromBlock(*LowerCallAssignedStmt(*getClassCall));
3365 classInfoExpr = mirBuilder->CreateExprDread(*voidPtrType, 0, *ret0);
3366 return classInfoExpr;
3367 }
3368
GetClassInfoExprFromArrayClassCache(const std::string & classInfo)3369 BaseNode *CGLowerer::GetClassInfoExprFromArrayClassCache(const std::string &classInfo)
3370 {
3371 std::string klassJavaDescriptor;
3372 namemangler::DecodeMapleNameToJavaDescriptor(classInfo, klassJavaDescriptor);
3373 if (arrayClassCacheIndex.find(klassJavaDescriptor) == arrayClassCacheIndex.end()) {
3374 return nullptr;
3375 }
3376 GStrIdx strIdx = GlobalTables::GetStrTable().GetStrIdxFromName(namemangler::kArrayClassCacheTable +
3377 mirModule.GetFileNameAsPostfix());
3378 MIRSymbol *arrayClassSt = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(strIdx);
3379 if (arrayClassSt == nullptr) {
3380 return nullptr;
3381 }
3382 auto index = arrayClassCacheIndex[klassJavaDescriptor];
3383 #ifdef USE_32BIT_REF
3384 const int32 width = 4;
3385 #else
3386 const int32 width = 8;
3387 #endif /* USE_32BIT_REF */
3388 int64 offset = static_cast<int64>(index) * width;
3389 ConstvalNode *offsetExpr = mirBuilder->CreateIntConst(offset, PTY_u32);
3390 AddrofNode *baseExpr = mirBuilder->CreateExprAddrof(0, *arrayClassSt, mirModule.GetMemPool());
3391 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3392 args.emplace_back(baseExpr);
3393 args.emplace_back(offsetExpr);
3394 return mirBuilder->CreateExprIntrinsicop(INTRN_MPL_READ_ARRAYCLASS_CACHE_ENTRY, OP_intrinsicop,
3395 *GlobalTables::GetTypeTable().GetPrimType(PTY_ref), args);
3396 }
3397
GetClassInfoExpr(const std::string & classInfo) const3398 BaseNode *CGLowerer::GetClassInfoExpr(const std::string &classInfo) const
3399 {
3400 BaseNode *classInfoExpr = nullptr;
3401 GStrIdx strIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(classInfo);
3402 MIRSymbol *classInfoSym = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(strIdx);
3403 if (classInfoSym != nullptr) {
3404 classInfoExpr = mirBuilder->CreateExprAddrof(0, *classInfoSym);
3405 } else {
3406 classInfoSym = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
3407 classInfoSym->SetNameStrIdx(strIdx);
3408 classInfoSym->SetStorageClass(kScGlobal);
3409 classInfoSym->SetSKind(kStVar);
3410 if (CGOptions::IsPIC()) {
3411 classInfoSym->SetStorageClass(kScExtern);
3412 } else {
3413 classInfoSym->SetAttr(ATTR_weak);
3414 }
3415 GlobalTables::GetGsymTable().AddToStringSymbolMap(*classInfoSym);
3416 classInfoSym->SetTyIdx(static_cast<TyIdx>(PTY_ptr));
3417
3418 classInfoExpr = mirBuilder->CreateExprAddrof(0, *classInfoSym);
3419 }
3420 return classInfoExpr;
3421 }
3422
LowerIntrinsicopWithType(const BaseNode & parent,IntrinsicopNode & intrinNode)3423 BaseNode *CGLowerer::LowerIntrinsicopWithType(const BaseNode &parent, IntrinsicopNode &intrinNode)
3424 {
3425 BaseNode *resNode = &intrinNode;
3426 if ((intrinNode.GetIntrinsic() == INTRN_JAVA_CONST_CLASS) ||
3427 (intrinNode.GetIntrinsic() == INTRN_JAVA_INSTANCE_OF)) {
3428 PUIdx bFunc = GetBuiltinToUse(intrinNode.GetIntrinsic());
3429 CHECK_FATAL(bFunc != kFuncNotFound, "bFunc not founded");
3430 MIRFunction *biFunc = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(bFunc);
3431 MIRType *classType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(intrinNode.GetTyIdx());
3432 std::string classInfo;
3433 BaseNode *classInfoExpr = nullptr;
3434 bool classInfoFromRt = false; /* whether the classinfo is generated by RT */
3435 ProcessClassInfo(*classType, classInfoFromRt, classInfo);
3436 if (classInfoFromRt) {
3437 classInfoExpr = GetClassInfoExprFromArrayClassCache(classInfo);
3438 if (classInfoExpr == nullptr) {
3439 classInfoExpr = GetClassInfoExprFromRuntime(classInfo);
3440 }
3441 } else {
3442 classInfoExpr = GetClassInfoExpr(classInfo);
3443 }
3444
3445 if (intrinNode.GetIntrinsic() == INTRN_JAVA_CONST_CLASS) {
3446 CHECK_FATAL(classInfoExpr != nullptr, "classInfoExpr should not be nullptr");
3447 resNode = classInfoExpr;
3448 return resNode;
3449 }
3450
3451 if (parent.GetOpCode() == OP_regassign) {
3452 auto ®Assign = static_cast<const RegassignNode &>(parent);
3453 StmtNode *biCall = CreateStmtCallWithReturnValue(intrinNode, regAssign.GetRegIdx(), bFunc, classInfoExpr);
3454 currentBlock->AppendStatementsFromBlock(*LowerCallAssignedStmt(*biCall));
3455 PrimType pTyp = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(regAssign.GetRegIdx())->GetPrimType();
3456 resNode = mirBuilder->CreateExprRegread(pTyp, regAssign.GetRegIdx());
3457 return resNode;
3458 }
3459
3460 if (parent.GetOpCode() == OP_dassign) {
3461 auto &dassign = static_cast<const DassignNode &>(parent);
3462 MIRSymbol *ret = GetCurrentFunc()->GetLocalOrGlobalSymbol(dassign.GetStIdx());
3463 StmtNode *biCall = CreateStmtCallWithReturnValue(intrinNode, *ret, bFunc, classInfoExpr);
3464 currentBlock->AppendStatementsFromBlock(*LowerCallAssignedStmt(*biCall));
3465 resNode = mirBuilder->CreateExprDread(*biFunc->GetReturnType(), 0, *ret);
3466 return resNode;
3467 }
3468 CHECK_FATAL(false, "should not run here");
3469 }
3470 CHECK_FATAL(false, "should not run here");
3471 return resNode;
3472 }
3473
LowerIntrinsicop(const BaseNode & parent,IntrinsicopNode & intrinNode,BlockNode & newBlk)3474 BaseNode *CGLowerer::LowerIntrinsicop(const BaseNode &parent, IntrinsicopNode &intrinNode, BlockNode &newBlk)
3475 {
3476 for (size_t i = 0; i < intrinNode.GetNumOpnds(); ++i) {
3477 intrinNode.SetOpnd(LowerExpr(intrinNode, *intrinNode.Opnd(i), newBlk), i);
3478 }
3479
3480 MIRIntrinsicID intrnID = intrinNode.GetIntrinsic();
3481 IntrinDesc &intrinDesc = IntrinDesc::intrinTable[intrnID];
3482 if (intrinDesc.IsJS()) {
3483 return LowerJavascriptIntrinsicop(intrinNode, intrinDesc);
3484 }
3485 if (intrinDesc.IsJava()) {
3486 return LowerIntrinsicop(parent, intrinNode);
3487 }
3488 if (intrinNode.GetIntrinsic() == INTRN_MPL_READ_OVTABLE_ENTRY_LAZY) {
3489 return &intrinNode;
3490 }
3491 if (intrinNode.GetIntrinsic() == INTRN_MPL_READ_ARRAYCLASS_CACHE_ENTRY) {
3492 return &intrinNode;
3493 }
3494 if (intrnID == INTRN_C_constant_p) {
3495 BaseNode *opnd = intrinNode.Opnd(0);
3496 int64 val = (opnd->op == OP_constval || opnd->op == OP_sizeoftype || opnd->op == OP_conststr ||
3497 opnd->op == OP_conststr16)
3498 ? 1
3499 : 0;
3500 return mirModule.GetMIRBuilder()->CreateIntConst(val, PTY_i32);
3501 }
3502 if (intrnID == INTRN_C___builtin_expect) {
3503 return intrinNode.Opnd(0);
3504 }
3505 if (intrinDesc.IsVectorOp() || intrinDesc.IsAtomic()) {
3506 return &intrinNode;
3507 }
3508 CHECK_FATAL(false, "unexpected intrinsic type in CGLowerer::LowerIntrinsicop");
3509 return &intrinNode;
3510 }
3511
LowerIntrinsicopwithtype(const BaseNode & parent,IntrinsicopNode & intrinNode,BlockNode & blk)3512 BaseNode *CGLowerer::LowerIntrinsicopwithtype(const BaseNode &parent, IntrinsicopNode &intrinNode, BlockNode &blk)
3513 {
3514 for (size_t i = 0; i < intrinNode.GetNumOpnds(); ++i) {
3515 intrinNode.SetOpnd(LowerExpr(intrinNode, *intrinNode.Opnd(i), blk), i);
3516 }
3517 MIRIntrinsicID intrnID = intrinNode.GetIntrinsic();
3518 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
3519 CHECK_FATAL(!intrinDesc->IsJS(), "intrinDesc should not be js");
3520 if (intrinDesc->IsJava()) {
3521 return LowerIntrinsicopWithType(parent, intrinNode);
3522 }
3523 CHECK_FATAL(false, "should not run here");
3524 return &intrinNode;
3525 }
3526
LowerIntrinsicMplClearStack(const IntrinsiccallNode & intrincall,BlockNode & newBlk)3527 StmtNode *CGLowerer::LowerIntrinsicMplClearStack(const IntrinsiccallNode &intrincall, BlockNode &newBlk)
3528 {
3529 StmtNode *newStmt =
3530 mirBuilder->CreateStmtIassign(*beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt8()), 0,
3531 intrincall.Opnd(0), mirBuilder->GetConstUInt8(0));
3532 newBlk.AddStatement(newStmt);
3533
3534 BaseNode *length = intrincall.Opnd(1);
3535 PrimType pType = PTY_i64;
3536 PregIdx pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(pType);
3537 newStmt = mirBuilder->CreateStmtRegassign(pType, pIdx, mirBuilder->CreateIntConst(1, pType));
3538 newBlk.AddStatement(newStmt);
3539 MIRFunction *func = GetCurrentFunc();
3540
3541 const std::string &name = func->GetName() + std::string("_Lalloca_");
3542 LabelIdx label1 = GetCurrentFunc()->GetOrCreateLableIdxFromName(name + std::to_string(labelIdx++));
3543 LabelIdx label2 = GetCurrentFunc()->GetOrCreateLableIdxFromName(name + std::to_string(labelIdx++));
3544
3545 newStmt = mirBuilder->CreateStmtGoto(OP_goto, label2);
3546 newBlk.AddStatement(newStmt);
3547 LabelNode *ln = mirBuilder->CreateStmtLabel(label1);
3548 newBlk.AddStatement(ln);
3549
3550 RegreadNode *regLen = mirBuilder->CreateExprRegread(pType, pIdx);
3551
3552 BinaryNode *addr =
3553 mirBuilder->CreateExprBinary(OP_add, *GlobalTables::GetTypeTable().GetAddr64(), intrincall.Opnd(0), regLen);
3554
3555 newStmt =
3556 mirBuilder->CreateStmtIassign(*beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt8()), 0,
3557 addr, mirBuilder->GetConstUInt8(0));
3558 newBlk.AddStatement(newStmt);
3559
3560 BinaryNode *subLen = mirBuilder->CreateExprBinary(OP_add, *GlobalTables::GetTypeTable().GetPrimType(pType), regLen,
3561 mirBuilder->CreateIntConst(1, pType));
3562 newStmt = mirBuilder->CreateStmtRegassign(pType, pIdx, subLen);
3563 newBlk.AddStatement(newStmt);
3564
3565 ln = mirBuilder->CreateStmtLabel(label2);
3566 newBlk.AddStatement(ln);
3567
3568 CompareNode *cmpExp =
3569 mirBuilder->CreateExprCompare(OP_lt, *GlobalTables::GetTypeTable().GetUInt32(),
3570 *GlobalTables::GetTypeTable().GetPrimType(pType), regLen, length);
3571 newStmt = mirBuilder->CreateStmtCondGoto(cmpExp, OP_brtrue, label1);
3572
3573 return newStmt;
3574 }
3575
LowerIntrinsicRCCall(const IntrinsiccallNode & intrincall)3576 StmtNode *CGLowerer::LowerIntrinsicRCCall(const IntrinsiccallNode &intrincall)
3577 {
3578 /* If GCONLY enabled, lowering RC intrinsics in another way. */
3579 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
3580 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
3581
3582 /* convert intrinsic call into function call. */
3583 if (intrinFuncIDs.find(intrinDesc) == intrinFuncIDs.end()) {
3584 /* add funcid into map */
3585 MIRFunction *fn = mirBuilder->GetOrCreateFunction(intrinDesc->name, TyIdx(PTY_void));
3586 fn->GetFuncSymbol()->SetAppearsInCode(true);
3587 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
3588 fn->AllocSymTab();
3589 intrinFuncIDs[intrinDesc] = fn->GetPuidx();
3590 }
3591 CallNode *callStmt = mirModule.CurFuncCodeMemPool()->New<CallNode>(mirModule, OP_call);
3592 callStmt->SetPUIdx(intrinFuncIDs.at(intrinDesc));
3593 for (size_t i = 0; i < intrincall.GetNopndSize(); ++i) {
3594 callStmt->GetNopnd().emplace_back(intrincall.GetNopndAt(i));
3595 callStmt->SetNumOpnds(callStmt->GetNumOpnds() + 1);
3596 }
3597 return callStmt;
3598 }
3599
LowerArrayStore(const IntrinsiccallNode & intrincall,BlockNode & newBlk)3600 void CGLowerer::LowerArrayStore(const IntrinsiccallNode &intrincall, BlockNode &newBlk)
3601 {
3602 bool needCheckStore = true;
3603 BaseNode *arrayNode = intrincall.Opnd(0);
3604 MIRType *arrayElemType = GetArrayNodeType(*arrayNode);
3605 BaseNode *valueNode = intrincall.Opnd(kNodeThirdOpnd);
3606 MIRType *valueRealType = GetArrayNodeType(*valueNode);
3607 if ((arrayElemType != nullptr) && (valueRealType != nullptr) && (arrayElemType->GetKind() == kTypeClass) &&
3608 static_cast<MIRClassType *>(arrayElemType)->IsFinal() && (valueRealType->GetKind() == kTypeClass) &&
3609 static_cast<MIRClassType *>(valueRealType)->IsFinal() &&
3610 (valueRealType->GetTypeIndex() == arrayElemType->GetTypeIndex())) {
3611 needCheckStore = false;
3612 }
3613
3614 if (needCheckStore) {
3615 MIRFunction *fn = mirBuilder->GetOrCreateFunction("MCC_Reflect_Check_Arraystore", TyIdx(PTY_void));
3616 fn->GetFuncSymbol()->SetAppearsInCode(true);
3617 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
3618 fn->AllocSymTab();
3619 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3620 args.emplace_back(intrincall.Opnd(0));
3621 args.emplace_back(intrincall.Opnd(kNodeThirdOpnd));
3622 StmtNode *checkStoreStmt = mirBuilder->CreateStmtCall(fn->GetPuidx(), args);
3623 newBlk.AddStatement(checkStoreStmt);
3624 }
3625 }
3626
LowerDefaultIntrinsicCall(IntrinsiccallNode & intrincall,MIRSymbol & st,MIRFunction & fn)3627 StmtNode *CGLowerer::LowerDefaultIntrinsicCall(IntrinsiccallNode &intrincall, MIRSymbol &st, MIRFunction &fn)
3628 {
3629 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
3630 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
3631 std::vector<TyIdx> funcTyVec;
3632 std::vector<TypeAttrs> fnTaVec;
3633 MapleVector<BaseNode *> &nOpnds = intrincall.GetNopnd();
3634 MIRType *retTy = intrinDesc->GetReturnType();
3635 CHECK_FATAL(retTy != nullptr, "retTy should not be nullptr");
3636 if (retTy->GetKind() == kTypeStruct) {
3637 funcTyVec.emplace_back(beCommon.BeGetOrCreatePointerType(*retTy)->GetTypeIndex());
3638 fnTaVec.emplace_back(TypeAttrs());
3639 fn.SetReturnStruct();
3640 }
3641 for (uint32 i = 0; i < nOpnds.size(); ++i) {
3642 MIRType *argTy = intrinDesc->GetArgType(i);
3643 CHECK_FATAL(argTy != nullptr, "argTy should not be nullptr");
3644 if (argTy->GetKind() == kTypeStruct) {
3645 funcTyVec.emplace_back(GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_a32)->GetTypeIndex());
3646 fnTaVec.emplace_back(TypeAttrs());
3647 BaseNode *addrNode = beCommon.GetAddressOfNode(*nOpnds[i]);
3648 CHECK_FATAL(addrNode != nullptr, "can not get address");
3649 nOpnds[i] = addrNode;
3650 } else {
3651 funcTyVec.emplace_back(argTy->GetTypeIndex());
3652 fnTaVec.emplace_back(TypeAttrs());
3653 }
3654 }
3655 MIRType *funcType = beCommon.BeGetOrCreateFunctionType(retTy->GetTypeIndex(), funcTyVec, fnTaVec);
3656 st.SetTyIdx(funcType->GetTypeIndex());
3657 fn.SetMIRFuncType(static_cast<MIRFuncType *>(funcType));
3658 if (retTy->GetKind() == kTypeStruct) {
3659 fn.SetReturnTyIdx(static_cast<TyIdx>(PTY_void));
3660 } else {
3661 fn.SetReturnTyIdx(retTy->GetTypeIndex());
3662 }
3663 return static_cast<CallNode *>(mirBuilder->CreateStmtCall(fn.GetPuidx(), nOpnds));
3664 }
3665
LowerIntrinsicMplCleanupLocalRefVarsSkip(IntrinsiccallNode & intrincall)3666 StmtNode *CGLowerer::LowerIntrinsicMplCleanupLocalRefVarsSkip(IntrinsiccallNode &intrincall)
3667 {
3668 MIRFunction *mirFunc = mirModule.CurFunction();
3669 BaseNode *skipExpr = intrincall.Opnd(intrincall.NumOpnds() - 1);
3670
3671 CHECK_FATAL(skipExpr != nullptr, "should be dread");
3672 CHECK_FATAL(skipExpr->GetOpCode() == OP_dread, "should be dread");
3673 DreadNode *refNode = static_cast<DreadNode *>(skipExpr);
3674 MIRSymbol *skipSym = mirFunc->GetLocalOrGlobalSymbol(refNode->GetStIdx());
3675 if (skipSym->GetAttr(ATTR_localrefvar)) {
3676 mirFunc->InsertMIRSymbol(skipSym);
3677 }
3678 return &intrincall;
3679 }
3680
LowerIntrinsiccall(IntrinsiccallNode & intrincall,BlockNode & newBlk)3681 StmtNode *CGLowerer::LowerIntrinsiccall(IntrinsiccallNode &intrincall, BlockNode &newBlk)
3682 {
3683 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
3684 for (size_t i = 0; i < intrincall.GetNumOpnds(); ++i) {
3685 intrincall.SetOpnd(LowerExpr(intrincall, *intrincall.Opnd(i), newBlk), i);
3686 }
3687 if (intrnID == INTRN_MPL_CLEAR_STACK) {
3688 return LowerIntrinsicMplClearStack(intrincall, newBlk);
3689 }
3690 if (intrnID == INTRN_C_va_start) {
3691 return &intrincall;
3692 }
3693 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
3694 if (intrinDesc->IsSpecial() || intrinDesc->IsAtomic()) {
3695 /* For special intrinsics we leave them to CGFunc::SelectIntrinCall() */
3696 return &intrincall;
3697 }
3698 /* default lowers intrinsic call to real function call. */
3699 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
3700 CHECK_FATAL(intrinDesc->name != nullptr, "intrinsic's name should not be nullptr");
3701 const std::string name = intrinDesc->name;
3702 st->SetNameStrIdx(name);
3703 st->SetStorageClass(kScText);
3704 st->SetSKind(kStFunc);
3705 MIRFunction *fn = mirBuilder->GetOrCreateFunction(intrinDesc->name, TyIdx(0));
3706 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
3707 fn->AllocSymTab();
3708 st->SetFunction(fn);
3709 st->SetAppearsInCode(true);
3710 return LowerDefaultIntrinsicCall(intrincall, *st, *fn);
3711 }
3712
LowerSyncEnterSyncExit(StmtNode & stmt)3713 StmtNode *CGLowerer::LowerSyncEnterSyncExit(StmtNode &stmt)
3714 {
3715 CHECK_FATAL(stmt.GetOpCode() == OP_syncenter || stmt.GetOpCode() == OP_syncexit,
3716 "stmt's opcode should be OP_syncenter or OP_syncexit");
3717
3718 auto &nStmt = static_cast<NaryStmtNode &>(stmt);
3719 BuiltinFunctionID id;
3720 if (nStmt.GetOpCode() == OP_syncenter) {
3721 if (nStmt.NumOpnds() == 1) {
3722 /* Just as ParseNaryStmt do for syncenter */
3723 MIRType &intType = *GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(PTY_i32));
3724 /* default 2 for __sync_enter_fast() */
3725 MIRIntConst *intConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(2, intType);
3726 ConstvalNode *exprConst = mirModule.GetMemPool()->New<ConstvalNode>();
3727 exprConst->SetPrimType(PTY_i32);
3728 exprConst->SetConstVal(intConst);
3729 nStmt.GetNopnd().emplace_back(exprConst);
3730 nStmt.SetNumOpnds(nStmt.GetNopndSize());
3731 }
3732 CHECK_FATAL(nStmt.NumOpnds() == kOperandNumBinary, "wrong args for syncenter");
3733 CHECK_FATAL(nStmt.Opnd(1)->GetOpCode() == OP_constval, "wrong 2nd arg type for syncenter");
3734 ConstvalNode *cst = static_cast<ConstvalNode *>(nStmt.GetNopndAt(1));
3735 MIRIntConst *intConst = safe_cast<MIRIntConst>(cst->GetConstVal());
3736 switch (intConst->GetExtValue()) {
3737 case kMCCSyncEnterFast0:
3738 id = INTRN_FIRST_SYNC_ENTER;
3739 break;
3740 case kMCCSyncEnterFast1:
3741 id = INTRN_SECOND_SYNC_ENTER;
3742 break;
3743 case kMCCSyncEnterFast2:
3744 id = INTRN_THIRD_SYNC_ENTER;
3745 break;
3746 case kMCCSyncEnterFast3:
3747 id = INTRN_FOURTH_SYNC_ENTER;
3748 break;
3749 default:
3750 CHECK_FATAL(false, "wrong kind for syncenter");
3751 break;
3752 }
3753 } else {
3754 CHECK_FATAL(nStmt.NumOpnds() == 1, "wrong args for syncexit");
3755 id = INTRN_YNC_EXIT;
3756 }
3757 PUIdx bFunc = GetBuiltinToUse(id);
3758 CHECK_FATAL(bFunc != kFuncNotFound, "bFunc should be found");
3759
3760 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3761 args.emplace_back(nStmt.Opnd(0));
3762 return mirBuilder->CreateStmtCall(bFunc, args);
3763 }
3764
GetBuiltinToUse(BuiltinFunctionID id) const3765 PUIdx CGLowerer::GetBuiltinToUse(BuiltinFunctionID id) const
3766 {
3767 /*
3768 * use std::vector & linear search as the number of entries is small.
3769 * we may revisit it if the number of entries gets larger.
3770 */
3771 for (const auto &funcID : builtinFuncIDs) {
3772 if (funcID.first == id) {
3773 return funcID.second;
3774 }
3775 }
3776 return kFuncNotFound;
3777 }
3778
LowerGCMalloc(const BaseNode & node,const GCMallocNode & gcmalloc,BlockNode & blkNode,bool perm)3779 void CGLowerer::LowerGCMalloc(const BaseNode &node, const GCMallocNode &gcmalloc, BlockNode &blkNode, bool perm)
3780 {
3781 MIRFunction *func =
3782 mirBuilder->GetOrCreateFunction((perm ? "MCC_NewPermanentObject" : "MCC_NewObj_fixed_class"), (TyIdx)(PTY_ref));
3783 func->GetFuncSymbol()->SetAppearsInCode(true);
3784 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
3785 func->AllocSymTab();
3786 /* Get the classinfo */
3787 MIRStructType *classType =
3788 static_cast<MIRStructType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(gcmalloc.GetTyIdx()));
3789 std::string classInfoName = CLASSINFO_PREFIX_STR + classType->GetName();
3790 MIRSymbol *classSym =
3791 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetStrIdxFromName(classInfoName));
3792 if (classSym == nullptr) {
3793 MIRType *pointerType = beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetVoid());
3794 classSym = mirBuilder->CreateGlobalDecl(classInfoName, *pointerType);
3795 classSym->SetStorageClass(kScExtern);
3796 }
3797 CallNode *callAssign = nullptr;
3798 auto *curFunc = mirModule.CurFunction();
3799 if (classSym->GetAttr(ATTR_abstract) || classSym->GetAttr(ATTR_interface)) {
3800 MIRFunction *funcSecond =
3801 mirBuilder->GetOrCreateFunction("MCC_Reflect_ThrowInstantiationError", static_cast<TyIdx>(PTY_ref));
3802 funcSecond->GetFuncSymbol()->SetAppearsInCode(true);
3803 beCommon.UpdateTypeTable(*funcSecond->GetMIRFuncType());
3804 funcSecond->AllocSymTab();
3805 BaseNode *arg = mirBuilder->CreateExprAddrof(0, *classSym);
3806 if (node.GetOpCode() == OP_dassign) {
3807 auto &dsNode = static_cast<const DassignNode &>(node);
3808 MIRSymbol *ret = curFunc->GetLocalOrGlobalSymbol(dsNode.GetStIdx());
3809 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3810 args.emplace_back(arg);
3811 callAssign = mirBuilder->CreateStmtCallAssigned(funcSecond->GetPuidx(), args, ret, OP_callassigned);
3812 } else {
3813 CHECK_FATAL(node.GetOpCode() == OP_regassign, "regassign expected");
3814 callAssign = mirBuilder->CreateStmtCallRegassigned(
3815 funcSecond->GetPuidx(), static_cast<const RegassignNode &>(node).GetRegIdx(), OP_callassigned, arg);
3816 }
3817 blkNode.AppendStatementsFromBlock(*LowerCallAssignedStmt(*callAssign));
3818 return;
3819 }
3820 BaseNode *arg = mirBuilder->CreateExprAddrof(0, *classSym);
3821
3822 if (node.GetOpCode() == OP_dassign) {
3823 MIRSymbol *ret = curFunc->GetLocalOrGlobalSymbol(static_cast<const DassignNode &>(node).GetStIdx());
3824 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3825 args.emplace_back(arg);
3826 callAssign = mirBuilder->CreateStmtCallAssigned(func->GetPuidx(), args, ret, OP_callassigned);
3827 } else {
3828 CHECK_FATAL(node.GetOpCode() == OP_regassign, "regassign expected");
3829 callAssign = mirBuilder->CreateStmtCallRegassigned(
3830 func->GetPuidx(), static_cast<const RegassignNode &>(node).GetRegIdx(), OP_callassigned, arg);
3831 }
3832 blkNode.AppendStatementsFromBlock(*LowerCallAssignedStmt(*callAssign));
3833 }
3834
GetNewArrayFuncName(const uint32 elemSize,const bool perm) const3835 std::string CGLowerer::GetNewArrayFuncName(const uint32 elemSize, const bool perm) const
3836 {
3837 if (elemSize == k1ByteSize) {
3838 return perm ? "MCC_NewPermArray8" : "MCC_NewArray8";
3839 }
3840 if (elemSize == k2ByteSize) {
3841 return perm ? "MCC_NewPermArray16" : "MCC_NewArray16";
3842 }
3843 if (elemSize == k4ByteSize) {
3844 return perm ? "MCC_NewPermArray32" : "MCC_NewArray32";
3845 }
3846 CHECK_FATAL((elemSize == k8ByteSize), "Invalid elemSize.");
3847 return perm ? "MCC_NewPermArray64" : "MCC_NewArray64";
3848 }
3849
LowerJarrayMalloc(const StmtNode & stmt,const JarrayMallocNode & node,BlockNode & blkNode,bool perm)3850 void CGLowerer::LowerJarrayMalloc(const StmtNode &stmt, const JarrayMallocNode &node, BlockNode &blkNode, bool perm)
3851 {
3852 /* Extract jarray type */
3853 TyIdx tyIdx = node.GetTyIdx();
3854 MIRType *type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdx);
3855 CHECK_FATAL(type->GetKind() == kTypeJArray, "Type param of gcmallocjarray is not a MIRJarrayType");
3856 auto jaryType = static_cast<MIRJarrayType *>(type);
3857 CHECK_FATAL(jaryType != nullptr, "Type param of gcmallocjarray is not a MIRJarrayType");
3858
3859 /* Inspect element type */
3860 MIRType *elemType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(jaryType->GetElemTyIdx());
3861 PrimType elemPrimType = elemType->GetPrimType();
3862 uint32 elemSize = GetPrimTypeSize(elemPrimType);
3863 if (elemType->GetKind() != kTypeScalar) { /* element is reference */
3864 elemSize = static_cast<uint32>(RTSupport::GetRTSupportInstance().GetFieldSize());
3865 }
3866
3867 std::string klassName = jaryType->GetJavaName();
3868 std::string arrayClassInfoName;
3869 bool isPredefinedArrayClass = false;
3870 BaseNode *arrayCacheNode = nullptr;
3871 if (jaryType->IsPrimitiveArray() && (jaryType->GetDim() <= kThreeDimArray)) {
3872 arrayClassInfoName = PRIMITIVECLASSINFO_PREFIX_STR + klassName;
3873 isPredefinedArrayClass = true;
3874 } else if (arrayNameForLower::kArrayKlassName.find(klassName) != arrayNameForLower::kArrayKlassName.end()) {
3875 arrayClassInfoName = CLASSINFO_PREFIX_STR + klassName;
3876 isPredefinedArrayClass = true;
3877 } else {
3878 arrayCacheNode = GetClassInfoExprFromArrayClassCache(klassName);
3879 }
3880
3881 std::string funcName;
3882 MapleVector<BaseNode *> args(mirModule.GetMPAllocator().Adapter());
3883 auto *curFunc = mirModule.CurFunction();
3884 if (isPredefinedArrayClass || (arrayCacheNode != nullptr)) {
3885 funcName = GetNewArrayFuncName(elemSize, perm);
3886 args.emplace_back(node.Opnd(0)); /* n_elems */
3887 if (isPredefinedArrayClass) {
3888 GStrIdx strIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(arrayClassInfoName);
3889 MIRSymbol *arrayClassSym = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(
3890 GlobalTables::GetStrTable().GetStrIdxFromName(arrayClassInfoName));
3891 if (arrayClassSym == nullptr) {
3892 arrayClassSym = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
3893 arrayClassSym->SetNameStrIdx(strIdx);
3894 arrayClassSym->SetStorageClass(kScGlobal);
3895 arrayClassSym->SetSKind(kStVar);
3896 if (CGOptions::IsPIC()) {
3897 arrayClassSym->SetStorageClass(kScExtern);
3898 } else {
3899 arrayClassSym->SetAttr(ATTR_weak);
3900 }
3901 GlobalTables::GetGsymTable().AddToStringSymbolMap(*arrayClassSym);
3902 arrayClassSym->SetTyIdx(static_cast<TyIdx>(PTY_ptr));
3903 }
3904 args.emplace_back(mirBuilder->CreateExprAddrof(0, *arrayClassSym));
3905 } else {
3906 args.emplace_back(arrayCacheNode);
3907 }
3908 } else {
3909 funcName = perm ? "MCC_NewPermanentArray" : "MCC_NewObj_flexible_cname";
3910 args.emplace_back(mirBuilder->CreateIntConst(elemSize, PTY_u32)); /* elem_size */
3911 args.emplace_back(node.Opnd(0)); /* n_elems */
3912 std::string klassJavaDescriptor;
3913 namemangler::DecodeMapleNameToJavaDescriptor(klassName, klassJavaDescriptor);
3914 UStrIdx classNameStrIdx = GlobalTables::GetUStrTable().GetOrCreateStrIdxFromName(klassJavaDescriptor);
3915 ConststrNode *classNameExpr = mirModule.GetMemPool()->New<ConststrNode>(classNameStrIdx);
3916 classNameExpr->SetPrimType(PTY_ptr);
3917 args.emplace_back(classNameExpr); /* class_name */
3918 args.emplace_back(GetBaseNodeFromCurFunc(*curFunc, true));
3919 /* set class flag 0 */
3920 args.emplace_back(mirBuilder->CreateIntConst(0, PTY_u32));
3921 }
3922 MIRFunction *func = mirBuilder->GetOrCreateFunction(funcName, static_cast<TyIdx>(PTY_ref));
3923 func->GetFuncSymbol()->SetAppearsInCode(true);
3924 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
3925 func->AllocSymTab();
3926 CallNode *callAssign = nullptr;
3927 if (stmt.GetOpCode() == OP_dassign) {
3928 auto &dsNode = static_cast<const DassignNode &>(stmt);
3929 MIRSymbol *ret = curFunc->GetLocalOrGlobalSymbol(dsNode.GetStIdx());
3930
3931 callAssign = mirBuilder->CreateStmtCallAssigned(func->GetPuidx(), args, ret, OP_callassigned);
3932 } else {
3933 auto ®Node = static_cast<const RegassignNode &>(stmt);
3934 callAssign =
3935 mirBuilder->CreateStmtCallRegassigned(func->GetPuidx(), args, regNode.GetRegIdx(), OP_callassigned);
3936 }
3937 blkNode.AppendStatementsFromBlock(*LowerCallAssignedStmt(*callAssign));
3938 }
3939
IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const3940 bool CGLowerer::IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const
3941 {
3942 switch (intrinsic) {
3943 case INTRN_MPL_ATOMIC_EXCHANGE_PTR:
3944 // js
3945 case INTRN_ADD_WITH_OVERFLOW:
3946 case INTRN_SUB_WITH_OVERFLOW:
3947 case INTRN_MUL_WITH_OVERFLOW:
3948 return true;
3949 default: {
3950 return false;
3951 }
3952 }
3953 }
3954
IsIntrinsicOpHandledAtLowerLevel(MIRIntrinsicID intrinsic) const3955 bool CGLowerer::IsIntrinsicOpHandledAtLowerLevel(MIRIntrinsicID intrinsic) const
3956 {
3957 switch (intrinsic) {
3958 #if TARGAARCH64 || TARGX86_64
3959 case INTRN_C_cos:
3960 case INTRN_C_cosf:
3961 case INTRN_C_cosh:
3962 case INTRN_C_coshf:
3963 case INTRN_C_acos:
3964 case INTRN_C_acosf:
3965 case INTRN_C_sin:
3966 case INTRN_C_sinf:
3967 case INTRN_C_sinh:
3968 case INTRN_C_sinhf:
3969 case INTRN_C_asin:
3970 case INTRN_C_asinf:
3971 case INTRN_C_atan:
3972 case INTRN_C_atanf:
3973 case INTRN_C_exp:
3974 case INTRN_C_expf:
3975 case INTRN_C_ffs:
3976 case INTRN_C_log:
3977 case INTRN_C_logf:
3978 case INTRN_C_log10:
3979 case INTRN_C_log10f:
3980 case INTRN_C_clz32:
3981 case INTRN_C_clz64:
3982 case INTRN_C_ctz32:
3983 case INTRN_C_ctz64:
3984 case INTRN_C_popcount32:
3985 case INTRN_C_popcount64:
3986 case INTRN_C_parity32:
3987 case INTRN_C_parity64:
3988 case INTRN_C_clrsb32:
3989 case INTRN_C_clrsb64:
3990 case INTRN_C_isaligned:
3991 case INTRN_C_alignup:
3992 case INTRN_C_aligndown:
3993 case INTRN_C___sync_add_and_fetch_1:
3994 case INTRN_C___sync_add_and_fetch_2:
3995 case INTRN_C___sync_add_and_fetch_4:
3996 case INTRN_C___sync_add_and_fetch_8:
3997 case INTRN_C___sync_sub_and_fetch_1:
3998 case INTRN_C___sync_sub_and_fetch_2:
3999 case INTRN_C___sync_sub_and_fetch_4:
4000 case INTRN_C___sync_sub_and_fetch_8:
4001 case INTRN_C___sync_fetch_and_add_1:
4002 case INTRN_C___sync_fetch_and_add_2:
4003 case INTRN_C___sync_fetch_and_add_4:
4004 case INTRN_C___sync_fetch_and_add_8:
4005 case INTRN_C___sync_fetch_and_sub_1:
4006 case INTRN_C___sync_fetch_and_sub_2:
4007 case INTRN_C___sync_fetch_and_sub_4:
4008 case INTRN_C___sync_fetch_and_sub_8:
4009 case INTRN_C___sync_bool_compare_and_swap_1:
4010 case INTRN_C___sync_bool_compare_and_swap_2:
4011 case INTRN_C___sync_bool_compare_and_swap_4:
4012 case INTRN_C___sync_bool_compare_and_swap_8:
4013 case INTRN_C___sync_val_compare_and_swap_1:
4014 case INTRN_C___sync_val_compare_and_swap_2:
4015 case INTRN_C___sync_val_compare_and_swap_4:
4016 case INTRN_C___sync_val_compare_and_swap_8:
4017 case INTRN_C___sync_lock_test_and_set_1:
4018 case INTRN_C___sync_lock_test_and_set_2:
4019 case INTRN_C___sync_lock_test_and_set_4:
4020 case INTRN_C___sync_lock_test_and_set_8:
4021 case INTRN_C___sync_lock_release_8:
4022 case INTRN_C___sync_lock_release_4:
4023 case INTRN_C___sync_lock_release_2:
4024 case INTRN_C___sync_lock_release_1:
4025 case INTRN_C___sync_fetch_and_and_1:
4026 case INTRN_C___sync_fetch_and_and_2:
4027 case INTRN_C___sync_fetch_and_and_4:
4028 case INTRN_C___sync_fetch_and_and_8:
4029 case INTRN_C___sync_fetch_and_or_1:
4030 case INTRN_C___sync_fetch_and_or_2:
4031 case INTRN_C___sync_fetch_and_or_4:
4032 case INTRN_C___sync_fetch_and_or_8:
4033 case INTRN_C___sync_fetch_and_xor_1:
4034 case INTRN_C___sync_fetch_and_xor_2:
4035 case INTRN_C___sync_fetch_and_xor_4:
4036 case INTRN_C___sync_fetch_and_xor_8:
4037 case INTRN_C___sync_fetch_and_nand_1:
4038 case INTRN_C___sync_fetch_and_nand_2:
4039 case INTRN_C___sync_fetch_and_nand_4:
4040 case INTRN_C___sync_fetch_and_nand_8:
4041 case INTRN_C___sync_and_and_fetch_1:
4042 case INTRN_C___sync_and_and_fetch_2:
4043 case INTRN_C___sync_and_and_fetch_4:
4044 case INTRN_C___sync_and_and_fetch_8:
4045 case INTRN_C___sync_or_and_fetch_1:
4046 case INTRN_C___sync_or_and_fetch_2:
4047 case INTRN_C___sync_or_and_fetch_4:
4048 case INTRN_C___sync_or_and_fetch_8:
4049 case INTRN_C___sync_xor_and_fetch_1:
4050 case INTRN_C___sync_xor_and_fetch_2:
4051 case INTRN_C___sync_xor_and_fetch_4:
4052 case INTRN_C___sync_xor_and_fetch_8:
4053 case INTRN_C___sync_nand_and_fetch_1:
4054 case INTRN_C___sync_nand_and_fetch_2:
4055 case INTRN_C___sync_nand_and_fetch_4:
4056 case INTRN_C___sync_nand_and_fetch_8:
4057 case INTRN_C___sync_synchronize:
4058 case INTRN_C__builtin_return_address:
4059 case INTRN_C__builtin_extract_return_addr:
4060 case INTRN_C_memcmp:
4061 case INTRN_C_strlen:
4062 case INTRN_C_strcmp:
4063 case INTRN_C_strncmp:
4064 case INTRN_C_strchr:
4065 case INTRN_C_strrchr:
4066 case INTRN_C_rev16_2:
4067 case INTRN_C_rev_4:
4068 case INTRN_C_rev_8:
4069 return true;
4070 #endif
4071 default:
4072 return false;
4073 }
4074 }
4075
InitArrayClassCacheTableIndex()4076 void CGLowerer::InitArrayClassCacheTableIndex()
4077 {
4078 MIRSymbol *reflectStrtabSym =
4079 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
4080 namemangler::kReflectionStrtabPrefixStr + mirModule.GetFileNameAsPostfix()));
4081 MIRSymbol *reflectStartHotStrtabSym =
4082 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
4083 namemangler::kReflectionStartHotStrtabPrefixStr + mirModule.GetFileNameAsPostfix()));
4084 MIRSymbol *reflectBothHotStrtabSym =
4085 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
4086 namemangler::kReflectionBothHotStrTabPrefixStr + mirModule.GetFileNameAsPostfix()));
4087 MIRSymbol *reflectRunHotStrtabSym =
4088 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
4089 namemangler::kReflectionRunHotStrtabPrefixStr + mirModule.GetFileNameAsPostfix()));
4090 MIRSymbol *arrayCacheNameTableSym =
4091 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
4092 namemangler::kArrayClassCacheNameTable + mirModule.GetFileNameAsPostfix()));
4093 if (arrayCacheNameTableSym == nullptr) {
4094 return;
4095 }
4096 MIRAggConst &aggConst = static_cast<MIRAggConst &>(*(arrayCacheNameTableSym->GetKonst()));
4097 MIRSymbol *strTab = nullptr;
4098 for (size_t i = 0; i < aggConst.GetConstVec().size(); ++i) {
4099 MIRConst *elemConst = aggConst.GetConstVecItem(i);
4100 uint32 intValue = static_cast<uint32>(((safe_cast<MIRIntConst>(elemConst))->GetExtValue()) & 0xFFFFFFFF);
4101 bool isHotReflectStr = (intValue & 0x00000003) != 0; /* use the last two bits of intValue in this expression */
4102 if (isHotReflectStr) {
4103 uint32 tag =
4104 (intValue & 0x00000003) - kCStringShift; /* use the last two bits of intValue in this expression */
4105 if (tag == kLayoutBootHot) {
4106 strTab = reflectStartHotStrtabSym;
4107 } else if (tag == kLayoutBothHot) {
4108 strTab = reflectBothHotStrtabSym;
4109 } else {
4110 strTab = reflectRunHotStrtabSym;
4111 }
4112 } else {
4113 strTab = reflectStrtabSym;
4114 }
4115 DEBUG_ASSERT(strTab != nullptr, "strTab is nullptr");
4116 std::string arrayClassName;
4117 MIRAggConst *strAgg = static_cast<MIRAggConst *>(strTab->GetKonst());
4118 for (auto start = (intValue >> 2); start < strAgg->GetConstVec().size();
4119 ++start) { /* the last two bits is flag */
4120 MIRIntConst *oneChar = static_cast<MIRIntConst *>(strAgg->GetConstVecItem(start));
4121 if ((oneChar != nullptr) && !oneChar->IsZero()) {
4122 arrayClassName += static_cast<char>(oneChar->GetExtValue());
4123 } else {
4124 break;
4125 }
4126 }
4127 arrayClassCacheIndex[arrayClassName] = i;
4128 }
4129 }
4130
LowerFunc(MIRFunction & func)4131 void CGLowerer::LowerFunc(MIRFunction &func)
4132 {
4133 labelIdx = 0;
4134 SetCurrentFunc(&func);
4135 hasTry = false;
4136 LowerEntry(func);
4137 LowerPseudoRegs(func);
4138 BlockNode *origBody = func.GetBody();
4139 CHECK_FATAL(origBody != nullptr, "origBody should not be nullptr");
4140
4141 BlockNode *newBody = LowerBlock(*origBody);
4142 func.SetBody(newBody);
4143 if (needBranchCleanup) {
4144 CleanupBranches(func);
4145 }
4146
4147 if (mirModule.IsJavaModule() && func.GetBody()->GetFirst() && GenerateExceptionHandlingCode()) {
4148 LowerTryCatchBlocks(*func.GetBody());
4149 }
4150 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
4151 // We do the simplify work here because now all the intrinsic calls and potential expansion work of memcpy or other
4152 // functions are handled well. So we can concentrate to do the replacement work.
4153 SimplifyBlock(*newBody);
4154 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
4155 if (newTypeTableSize != oldTypeTableSize) {
4156 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
4157 }
4158 }
4159 } /* namespace maplebe */
4160