1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "lower.h"
17 #include <string>
18 #include <cinttypes>
19 #include <vector>
20 #include "mir_symbol.h"
21 #include "mir_function.h"
22 #include "cg_option.h"
23 #include "switch_lowerer.h"
24 #include "try_catch.h"
25 #include "intrinsic_op.h"
26 #include "mir_builder.h"
27 #include "opcode_info.h"
28 #include "rt.h"
29 #include "securec.h"
30 #include "string_utils.h"
31 #include "cast_opt.h"
32 #include "simplify.h"
33 #include "me_safety_warning.h"
34
35 namespace maplebe {
36
37 using namespace maple;
38
39 #define TARGARM32 0
40
41 enum ExtFuncT : uint8 { kFmodDouble, kFmodFloat };
42
43 struct ExtFuncDescrT {
44 ExtFuncT fid;
45 const char *name;
46 PrimType retType;
47 PrimType argTypes[kMaxModFuncArgSize];
48 };
49
50 namespace {
51 std::pair<MIRIntrinsicID, const std::string> cgBuiltins[] = {
52 {INTRN_MPL_MEMSET_LOCALVAR, "memset_s"},
53 };
54
55 ExtFuncDescrT extFnDescrs[] = {
56 {kFmodDouble, "fmod", PTY_f64, {PTY_f64, PTY_f64, kPtyInvalid}},
57 {kFmodFloat, "fmodf", PTY_f32, {PTY_f32, PTY_f32, kPtyInvalid}},
58 };
59
60 std::vector<std::pair<ExtFuncT, PUIdx>> extFuncs;
61 const std::string kOpAssertge = "OP_assertge";
62 const std::string kOpAssertlt = "OP_assertlt";
63 const std::string kOpCallAssertle = "OP_callassertle";
64 const std::string kOpReturnAssertle = "OP_returnassertle";
65 const std::string kOpAssignAssertle = "OP_assignassertle";
66 const std::string kFileSymbolNamePrefix = "symname";
67 } // namespace
68
69 const std::string CGLowerer::kIntrnRetValPrefix = "__iret";
70 const std::string CGLowerer::kUserRetValPrefix = "__uret";
71
GetFileNameSymbolName(const std::string & fileName) const72 std::string CGLowerer::GetFileNameSymbolName(const std::string &fileName) const
73 {
74 return kFileSymbolNamePrefix + std::regex_replace(fileName, std::regex("-"), "_");
75 }
76
CreateNewRetVar(const MIRType & ty,const std::string & prefix)77 MIRSymbol *CGLowerer::CreateNewRetVar(const MIRType &ty, const std::string &prefix)
78 {
79 const uint32 bufSize = 257;
80 char buf[bufSize] = {'\0'};
81 MIRFunction *func = GetCurrentFunc();
82 DEBUG_ASSERT(func != nullptr, "func should not be nullptr");
83 MIRSymbol *var = func->GetSymTab()->CreateSymbol(kScopeLocal);
84 int eNum = sprintf_s(buf, bufSize - 1, "%s%" PRId64, prefix.c_str(), ++seed);
85 if (eNum == -1) {
86 FATAL(kLncFatal, "sprintf_s failed");
87 }
88 std::string strBuf(buf);
89 var->SetNameStrIdx(mirModule.GetMIRBuilder()->GetOrCreateStringIndex(strBuf));
90 var->SetTyIdx(ty.GetTypeIndex());
91 var->SetStorageClass(kScAuto);
92 var->SetSKind(kStVar);
93 func->GetSymTab()->AddToStringSymbolMap(*var);
94 return var;
95 }
96
RegisterExternalLibraryFunctions()97 void CGLowerer::RegisterExternalLibraryFunctions()
98 {
99 for (uint32 i = 0; i < sizeof(extFnDescrs) / sizeof(extFnDescrs[0]); ++i) {
100 ExtFuncT id = extFnDescrs[i].fid;
101 CHECK_FATAL(id == i, "make sure id equal i");
102
103 MIRFunction *func =
104 mirModule.GetMIRBuilder()->GetOrCreateFunction(extFnDescrs[i].name, TyIdx(extFnDescrs[i].retType));
105 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
106 func->AllocSymTab();
107 MIRSymbol *funcSym = func->GetFuncSymbol();
108 DEBUG_ASSERT(funcSym != nullptr, "nullptr check");
109 funcSym->SetStorageClass(kScExtern);
110 funcSym->SetAppearsInCode(true);
111 /* return type */
112 MIRType *retTy = GlobalTables::GetTypeTable().GetPrimType(extFnDescrs[i].retType);
113
114 /* use void* for PTY_dynany */
115 if (retTy->GetPrimType() == PTY_dynany) {
116 retTy = GlobalTables::GetTypeTable().GetPtr();
117 }
118
119 std::vector<MIRSymbol *> formals;
120 for (uint32 j = 0; extFnDescrs[i].argTypes[j] != kPtyInvalid; ++j) {
121 PrimType primTy = extFnDescrs[i].argTypes[j];
122 MIRType *argTy = GlobalTables::GetTypeTable().GetPrimType(primTy);
123 /* use void* for PTY_dynany */
124 if (argTy->GetPrimType() == PTY_dynany) {
125 argTy = GlobalTables::GetTypeTable().GetPtr();
126 }
127 MIRSymbol *argSt = func->GetSymTab()->CreateSymbol(kScopeLocal);
128 const uint32 bufSize = 18;
129 char buf[bufSize] = {'\0'};
130 int eNum = sprintf_s(buf, bufSize - 1, "p%u", j);
131 if (eNum == -1) {
132 FATAL(kLncFatal, "sprintf_s failed");
133 }
134 std::string strBuf(buf);
135 argSt->SetNameStrIdx(mirModule.GetMIRBuilder()->GetOrCreateStringIndex(strBuf));
136 argSt->SetTyIdx(argTy->GetTypeIndex());
137 argSt->SetStorageClass(kScFormal);
138 argSt->SetSKind(kStVar);
139 func->GetSymTab()->AddToStringSymbolMap(*argSt);
140 formals.emplace_back(argSt);
141 }
142 func->UpdateFuncTypeAndFormalsAndReturnType(formals, retTy->GetTypeIndex(), false);
143 auto *funcType = func->GetMIRFuncType();
144 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
145 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
146 extFuncs.emplace_back(std::pair<ExtFuncT, PUIdx>(id, func->GetPuidx()));
147 }
148 }
149
NodeConvert(PrimType mType,BaseNode & expr)150 BaseNode *CGLowerer::NodeConvert(PrimType mType, BaseNode &expr)
151 {
152 PrimType srcType = expr.GetPrimType();
153 if (GetPrimTypeSize(mType) == GetPrimTypeSize(srcType)) {
154 return &expr;
155 }
156 TypeCvtNode *cvtNode = mirModule.CurFuncCodeMemPool()->New<TypeCvtNode>(OP_cvt);
157 cvtNode->SetFromType(srcType);
158 cvtNode->SetPrimType(mType);
159 cvtNode->SetOpnd(&expr, 0);
160 return cvtNode;
161 }
162
LowerIaddrof(const IreadNode & iaddrof)163 BaseNode *CGLowerer::LowerIaddrof(const IreadNode &iaddrof)
164 {
165 if (iaddrof.GetFieldID() == 0) {
166 return iaddrof.Opnd(0);
167 }
168 MIRType *type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(iaddrof.GetTyIdx());
169 MIRPtrType *pointerTy = static_cast<MIRPtrType *>(type);
170 CHECK_FATAL(pointerTy != nullptr, "LowerIaddrof: expect a pointer type at iaddrof node");
171 MIRStructType *structTy =
172 static_cast<MIRStructType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(pointerTy->GetPointedTyIdx()));
173 CHECK_FATAL(structTy != nullptr, "LowerIaddrof: non-zero fieldID for non-structure");
174 int32 offset = beCommon.GetFieldOffset(*structTy, iaddrof.GetFieldID()).first;
175 if (offset == 0) {
176 return iaddrof.Opnd(0);
177 }
178 uint32 loweredPtrType = static_cast<uint32>(GetLoweredPtrType());
179 MIRIntConst *offsetConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
180 offset, *GlobalTables::GetTypeTable().GetTypeTable().at(loweredPtrType));
181 BaseNode *offsetNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(offsetConst);
182 offsetNode->SetPrimType(GetLoweredPtrType());
183
184 BinaryNode *addNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
185 addNode->SetPrimType(GetLoweredPtrType());
186 addNode->SetBOpnd(iaddrof.Opnd(0), 0);
187 addNode->SetBOpnd(offsetNode, 1);
188 return addNode;
189 }
190
SplitBinaryNodeOpnd1(BinaryNode & bNode,BlockNode & blkNode)191 BaseNode *CGLowerer::SplitBinaryNodeOpnd1(BinaryNode &bNode, BlockNode &blkNode)
192 {
193 if (Globals::GetInstance()->GetOptimLevel() >= CGOptions::kLevel1) {
194 return &bNode;
195 }
196 MIRBuilder *mirbuilder = mirModule.GetMIRBuilder();
197 static uint32 val = 0;
198 std::string name("bnaryTmp");
199 name += std::to_string(val++);
200
201 BaseNode *opnd1 = bNode.Opnd(1);
202 MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(opnd1->GetPrimType()));
203 MIRSymbol *dnodeSt = mirbuilder->GetOrCreateLocalDecl(const_cast<const std::string &>(name), *ty);
204 DassignNode *dnode = mirbuilder->CreateStmtDassign(const_cast<MIRSymbol &>(*dnodeSt), 0, opnd1);
205 blkNode.InsertAfter(blkNode.GetLast(), dnode);
206
207 BaseNode *dreadNode = mirbuilder->CreateExprDread(*dnodeSt);
208 bNode.SetOpnd(dreadNode, 1);
209
210 return &bNode;
211 }
212
SplitTernaryNodeResult(TernaryNode & tNode,BaseNode & parent,BlockNode & blkNode)213 BaseNode *CGLowerer::SplitTernaryNodeResult(TernaryNode &tNode, BaseNode &parent, BlockNode &blkNode)
214 {
215 if (Globals::GetInstance()->GetOptimLevel() >= CGOptions::kLevel1) {
216 return &tNode;
217 }
218 MIRBuilder *mirbuilder = mirModule.GetMIRBuilder();
219 static uint32 val = 0;
220 std::string name("tnaryTmp");
221 name += std::to_string(val++);
222
223 MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(tNode.GetPrimType()));
224 MIRSymbol *dassignNodeSym = mirbuilder->GetOrCreateLocalDecl(const_cast<const std::string &>(name), *ty);
225 DassignNode *dassignNode = mirbuilder->CreateStmtDassign(const_cast<MIRSymbol &>(*dassignNodeSym), 0, &tNode);
226 blkNode.InsertAfter(blkNode.GetLast(), dassignNode);
227
228 BaseNode *dreadNode = mirbuilder->CreateExprDread(*dassignNodeSym);
229 for (size_t i = 0; i < parent.NumOpnds(); i++) {
230 if (parent.Opnd(i) == &tNode) {
231 parent.SetOpnd(dreadNode, i);
232 break;
233 }
234 }
235
236 return dreadNode;
237 }
238
239 /* Check if the operand of the select node is complex enough for either
240 * functionality or performance reason so we need to lower it to if-then-else.
241 */
IsComplexSelect(const TernaryNode & tNode) const242 bool CGLowerer::IsComplexSelect(const TernaryNode &tNode) const
243 {
244 if (tNode.GetPrimType() == PTY_agg) {
245 return true;
246 }
247 /* Iread may have side effect which may cause correctness issue. */
248 if (HasIreadExpr(tNode.Opnd(kFirstReg)) || HasIreadExpr(tNode.Opnd(kSecondReg))) {
249 return true;
250 }
251 // it will be generated many insn for complex expr, leading to
252 // worse performance than punishment of branch prediction error
253 constexpr size_t maxDepth = 3;
254 if (MaxDepth(tNode.Opnd(kFirstReg)) > maxDepth || MaxDepth(tNode.Opnd(kSecondReg)) > maxDepth) {
255 return true;
256 }
257 return false;
258 }
259
FindTheCurrentStmtFreq(const StmtNode * stmt) const260 int32 CGLowerer::FindTheCurrentStmtFreq(const StmtNode *stmt) const
261 {
262 while (stmt != nullptr) {
263 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
264 int32 freq = mirModule.CurFunction()->GetFreqFromLastStmt(stmt->GetStmtID());
265 if (freq != -1) {
266 return freq;
267 }
268 stmt = stmt->GetPrev();
269 }
270 return -1;
271 }
272
273 /* Lower agg select node back to if-then-else stmt. */
274 /*
275 0(brfalse)
276 | \
277 1 2
278 \ |
279 \ |
280 3
281 */
LowerComplexSelect(const TernaryNode & tNode,BaseNode & parent,BlockNode & blkNode)282 BaseNode *CGLowerer::LowerComplexSelect(const TernaryNode &tNode, BaseNode &parent, BlockNode &blkNode)
283 {
284 MIRBuilder *mirbuilder = mirModule.GetMIRBuilder();
285
286 MIRType *resultTy = 0;
287 MIRFunction *func = mirModule.CurFunction();
288 if (tNode.GetPrimType() == PTY_agg) {
289 if (tNode.Opnd(1)->op == OP_dread) {
290 DreadNode *trueNode = static_cast<DreadNode *>(tNode.Opnd(1));
291 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "curFunction should not be nullptr");
292 resultTy = mirModule.CurFunction()->GetLocalOrGlobalSymbol(trueNode->GetStIdx())->GetType();
293 } else if (tNode.Opnd(1)->op == OP_iread) {
294 IreadNode *trueNode = static_cast<IreadNode *>(tNode.Opnd(1));
295 MIRPtrType *ptrty =
296 static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(trueNode->GetTyIdx()));
297 resultTy =
298 static_cast<MIRStructType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(ptrty->GetPointedTyIdx()));
299 if (trueNode->GetFieldID() != 0) {
300 MIRStructType *structty = static_cast<MIRStructType *>(resultTy);
301 resultTy =
302 GlobalTables::GetTypeTable().GetTypeFromTyIdx(structty->GetFieldTyIdx(trueNode->GetFieldID()));
303 }
304 } else {
305 CHECK_FATAL(false, "NYI: LowerComplexSelect");
306 }
307 } else {
308 resultTy = GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(tNode.GetPrimType()));
309 }
310
311 CondGotoNode *brTargetStmt = mirModule.CurFuncCodeMemPool()->New<CondGotoNode>(OP_brfalse);
312 brTargetStmt->SetOpnd(tNode.Opnd(0), 0);
313 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "curFunction should not be nullptr");
314 LabelIdx targetIdx = mirModule.CurFunction()->GetLabelTab()->CreateLabel();
315 mirModule.CurFunction()->GetLabelTab()->AddToStringLabelMap(targetIdx);
316 brTargetStmt->SetOffset(targetIdx);
317 // Update the current stmt frequence
318 int32 currentStmtFreq = 0;
319 if (kOpcodeInfo.IsStmt(parent.GetOpCode())) {
320 currentStmtFreq = FindTheCurrentStmtFreq(static_cast<StmtNode *>(&parent));
321 }
322 currentStmtFreq = currentStmtFreq == -1 ? 0 : currentStmtFreq;
323 DEBUG_ASSERT(func != nullptr, "func should not be nullptr");
324 func->SetLastFreqMap(brTargetStmt->GetStmtID(), static_cast<uint32>(currentStmtFreq));
325 blkNode.InsertAfter(blkNode.GetLast(), brTargetStmt);
326 union {
327 MIRSymbol *resSym;
328 PregIdx resPreg;
329 } cplxSelRes; // complex select result
330 uint32 fallthruStmtFreq = static_cast<uint32>((currentStmtFreq + 1) / 2);
331 if (tNode.GetPrimType() == PTY_agg) {
332 static uint32 val = 0;
333 std::string name("ComplexSelectTmp");
334 name += std::to_string(val++);
335 cplxSelRes.resSym = mirbuilder->GetOrCreateLocalDecl(const_cast<std::string &>(name), *resultTy);
336 DassignNode *dassignTrue = mirbuilder->CreateStmtDassign(*cplxSelRes.resSym, 0, tNode.Opnd(1));
337 // Fallthru: update the frequence 1
338 func->SetFirstFreqMap(dassignTrue->GetStmtID(), fallthruStmtFreq);
339 blkNode.InsertAfter(blkNode.GetLast(), dassignTrue);
340 } else {
341 cplxSelRes.resPreg = mirbuilder->GetCurrentFunction()->GetPregTab()->CreatePreg(tNode.GetPrimType());
342 RegassignNode *regassignTrue =
343 mirbuilder->CreateStmtRegassign(tNode.GetPrimType(), cplxSelRes.resPreg, tNode.Opnd(1));
344 // Update the frequence first opnd
345 func->SetFirstFreqMap(regassignTrue->GetStmtID(), fallthruStmtFreq);
346 blkNode.InsertAfter(blkNode.GetLast(), regassignTrue);
347 }
348 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
349 GotoNode *gotoStmt = mirModule.CurFuncCodeMemPool()->New<GotoNode>(OP_goto);
350 LabelIdx EndIdx = mirModule.CurFunction()->GetLabelTab()->CreateLabel();
351 mirModule.CurFunction()->GetLabelTab()->AddToStringLabelMap(EndIdx);
352 gotoStmt->SetOffset(EndIdx);
353 // Update the frequence first opnd
354 func->SetLastFreqMap(gotoStmt->GetStmtID(), fallthruStmtFreq);
355 blkNode.InsertAfter(blkNode.GetLast(), gotoStmt);
356
357 uint32 targetStmtFreq = static_cast<uint32>(currentStmtFreq / 2);
358 LabelNode *lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
359 lableStmt->SetLabelIdx(targetIdx);
360 func->SetFirstFreqMap(lableStmt->GetStmtID(), targetStmtFreq);
361 blkNode.InsertAfter(blkNode.GetLast(), lableStmt);
362
363 if (tNode.GetPrimType() == PTY_agg) {
364 DassignNode *dassignFalse = mirbuilder->CreateStmtDassign(*cplxSelRes.resSym, 0, tNode.Opnd(2));
365 // Update the frequence second opnd
366 func->SetLastFreqMap(dassignFalse->GetStmtID(), targetStmtFreq);
367 blkNode.InsertAfter(blkNode.GetLast(), dassignFalse);
368 } else {
369 RegassignNode *regassignFalse =
370 mirbuilder->CreateStmtRegassign(tNode.GetPrimType(), cplxSelRes.resPreg, tNode.Opnd(2));
371 // Update the frequence 2
372 func->SetLastFreqMap(regassignFalse->GetStmtID(), targetStmtFreq);
373 blkNode.InsertAfter(blkNode.GetLast(), regassignFalse);
374 }
375
376 lableStmt = mirModule.CurFuncCodeMemPool()->New<LabelNode>();
377 lableStmt->SetLabelIdx(EndIdx);
378 // Update the frequence third opnd
379 func->SetFirstFreqMap(lableStmt->GetStmtID(), static_cast<uint32>(currentStmtFreq));
380 blkNode.InsertAfter(blkNode.GetLast(), lableStmt);
381
382 BaseNode *exprNode =
383 (tNode.GetPrimType() == PTY_agg)
384 ? static_cast<BaseNode *>(mirbuilder->CreateExprDread(*cplxSelRes.resSym))
385 : static_cast<BaseNode *>(mirbuilder->CreateExprRegread(tNode.GetPrimType(), cplxSelRes.resPreg));
386 for (size_t i = 0; i < parent.NumOpnds(); i++) {
387 if (parent.Opnd(i) == &tNode) {
388 parent.SetOpnd(exprNode, i);
389 break;
390 }
391 }
392
393 return exprNode;
394 }
395
LowerFarray(ArrayNode & array)396 BaseNode *CGLowerer::LowerFarray(ArrayNode &array)
397 {
398 auto *farrayType = static_cast<MIRFarrayType *>(array.GetArrayType(GlobalTables::GetTypeTable()));
399 size_t eSize = GlobalTables::GetTypeTable().GetTypeFromTyIdx(farrayType->GetElemTyIdx())->GetSize();
400 if (farrayType->GetKind() == kTypeJArray) {
401 if (farrayType->GetElemType()->GetKind() != kTypeScalar) {
402 /* not the last dimension of primitive array */
403 eSize = RTSupport::GetRTSupportInstance().GetObjectAlignment();
404 }
405 }
406
407 MIRType &arrayType = *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType()));
408 /* how about multi-dimension array? */
409 if (array.GetIndex(0)->GetOpCode() == OP_constval) {
410 const ConstvalNode *constvalNode = static_cast<const ConstvalNode *>(array.GetIndex(0));
411 if (constvalNode->GetConstVal()->GetKind() == kConstInt) {
412 const MIRIntConst *pIntConst = static_cast<const MIRIntConst *>(constvalNode->GetConstVal());
413 CHECK_FATAL(!pIntConst->IsNegative(), "Array index should >= 0.");
414 uint64 eleOffset = static_cast<uint64>(pIntConst->GetExtValue() * eSize);
415
416 if (farrayType->GetKind() == kTypeJArray) {
417 eleOffset += static_cast<uint64>(RTSupport::GetRTSupportInstance().GetArrayContentOffset());
418 }
419
420 BaseNode *baseNode = NodeConvert(array.GetPrimType(), *array.GetBase());
421 if (eleOffset == 0) {
422 return baseNode;
423 }
424
425 MIRIntConst *eleConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(eleOffset, arrayType);
426 BaseNode *offsetNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eleConst);
427 offsetNode->SetPrimType(array.GetPrimType());
428
429 BaseNode *rAdd = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
430 rAdd->SetPrimType(array.GetPrimType());
431 rAdd->SetOpnd(baseNode, 0);
432 rAdd->SetOpnd(offsetNode, 1);
433 return rAdd;
434 }
435 }
436
437 BaseNode *resNode = NodeConvert(array.GetPrimType(), *array.GetIndex(0));
438 BaseNode *rMul = nullptr;
439
440 if ((farrayType->GetKind() == kTypeJArray) && (resNode->GetOpCode() == OP_constval)) {
441 ConstvalNode *idxNode = static_cast<ConstvalNode *>(resNode);
442 uint64 idx = static_cast<uint64>(safe_cast<MIRIntConst>(idxNode->GetConstVal())->GetExtValue());
443 MIRIntConst *eConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(idx * eSize, arrayType);
444 rMul = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
445 rMul->SetPrimType(array.GetPrimType());
446 } else {
447 MIRIntConst *eConst =
448 GlobalTables::GetIntConstTable().GetOrCreateIntConst(static_cast<int64>(eSize), arrayType);
449 BaseNode *eSizeNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
450 eSizeNode->SetPrimType(array.GetPrimType());
451 rMul = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
452 rMul->SetPrimType(array.GetPrimType());
453 rMul->SetOpnd(resNode, 0);
454 rMul->SetOpnd(eSizeNode, 1);
455 }
456
457 BaseNode *baseNode = NodeConvert(array.GetPrimType(), *array.GetBase());
458
459 if (farrayType->GetKind() == kTypeJArray) {
460 BaseNode *jarrayBaseNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
461 MIRIntConst *arrayHeaderNode = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
462 RTSupport::GetRTSupportInstance().GetArrayContentOffset(), arrayType);
463 BaseNode *arrayHeaderCstNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(arrayHeaderNode);
464 arrayHeaderCstNode->SetPrimType(array.GetPrimType());
465 jarrayBaseNode->SetPrimType(array.GetPrimType());
466 jarrayBaseNode->SetOpnd(baseNode, 0);
467 jarrayBaseNode->SetOpnd(arrayHeaderCstNode, 1);
468 baseNode = jarrayBaseNode;
469 }
470
471 BaseNode *rAdd = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
472 rAdd->SetPrimType(array.GetPrimType());
473 rAdd->SetOpnd(baseNode, 0);
474 rAdd->SetOpnd(rMul, 1);
475 return rAdd;
476 }
477
LowerArrayDim(ArrayNode & array,int32 dim)478 BaseNode *CGLowerer::LowerArrayDim(ArrayNode &array, int32 dim)
479 {
480 BaseNode *resNode = NodeConvert(array.GetPrimType(), *array.GetIndex(dim - 1));
481 /* process left dimension index, resNode express the last dim, so dim need sub 2 */
482 CHECK_FATAL(dim > (std::numeric_limits<int>::min)() + 1, "out of range");
483 int leftDim = dim - 2;
484 MIRType *aType = array.GetArrayType(GlobalTables::GetTypeTable());
485 MIRArrayType *arrayType = static_cast<MIRArrayType *>(aType);
486 for (int i = leftDim; i >= 0; --i) {
487 BaseNode *mpyNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
488 BaseNode *item =
489 NodeConvert(array.GetPrimType(), *array.GetDim(mirModule, GlobalTables::GetTypeTable(), dim - 1));
490 if (mirModule.IsCModule()) {
491 item = NodeConvert(array.GetPrimType(), *array.GetIndex(static_cast<size_t>(static_cast<unsigned int>(i))));
492 int64 offsetSize = 1;
493 for (int32 j = i + 1; j < dim; ++j) {
494 offsetSize *= static_cast<int64>(arrayType->GetSizeArrayItem(static_cast<uint32>(j)));
495 }
496 MIRIntConst *offsetCst = mirModule.CurFuncCodeMemPool()->New<MIRIntConst>(
497 offsetSize, *GlobalTables::GetTypeTable().GetTypeFromTyIdx(array.GetPrimType()));
498 BaseNode *eleOffset = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(offsetCst);
499 eleOffset->SetPrimType(array.GetPrimType());
500 mpyNode->SetPrimType(array.GetPrimType());
501 mpyNode->SetOpnd(eleOffset, 0);
502 mpyNode->SetOpnd(item, 1);
503 } else {
504 for (int j = leftDim; j > i; --j) {
505 BaseNode *mpyNodes = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
506 mpyNodes->SetPrimType(array.GetPrimType());
507 mpyNodes->SetOpnd(item, 0);
508 mpyNodes->SetOpnd(
509 NodeConvert(array.GetPrimType(), *array.GetDim(mirModule, GlobalTables::GetTypeTable(), j)), 1);
510 item = mpyNodes;
511 }
512 mpyNode->SetPrimType(array.GetPrimType());
513 mpyNode->SetOpnd(NodeConvert(array.GetPrimType(), *array.GetIndex(i)), 0);
514 mpyNode->SetOpnd(item, 1);
515 }
516
517 BaseNode *newResNode = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_add);
518 newResNode->SetPrimType(array.GetPrimType());
519 newResNode->SetOpnd(resNode, 0);
520 newResNode->SetOpnd(mpyNode, 1);
521 resNode = newResNode;
522 }
523 return resNode;
524 }
525
LowerArrayForLazyBiding(BaseNode & baseNode,BaseNode & offsetNode,const BaseNode & parent)526 BaseNode *CGLowerer::LowerArrayForLazyBiding(BaseNode &baseNode, BaseNode &offsetNode, const BaseNode &parent)
527 {
528 return nullptr;
529 }
530
LowerArray(ArrayNode & array,const BaseNode & parent)531 BaseNode *CGLowerer::LowerArray(ArrayNode &array, const BaseNode &parent)
532 {
533 MIRType *aType = array.GetArrayType(GlobalTables::GetTypeTable());
534 if (aType->GetKind() == kTypeFArray || aType->GetKind() == kTypeJArray) {
535 return LowerFarray(array);
536 }
537 MIRArrayType *arrayType = static_cast<MIRArrayType *>(aType);
538 int32 dim = arrayType->GetDim();
539 BaseNode *resNode = LowerArrayDim(array, dim);
540 BaseNode *rMul = nullptr;
541 size_t eSize = beCommon.GetTypeSize(arrayType->GetElemTyIdx().GetIdx());
542 Opcode opAdd = OP_add;
543 MIRType &arrayTypes = *GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(array.GetPrimType()));
544 if (resNode->GetOpCode() == OP_constval) {
545 /* index is a constant, we can calculate the offset now */
546 ConstvalNode *idxNode = static_cast<ConstvalNode *>(resNode);
547 uint64 idx = static_cast<uint64>(safe_cast<MIRIntConst>(idxNode->GetConstVal())->GetExtValue());
548 MIRIntConst *eConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(idx * eSize, arrayTypes);
549 rMul = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
550 rMul->SetPrimType(array.GetPrimType());
551 if (dim == 1) {
552 opAdd = OP_CG_array_elem_add;
553 }
554 } else {
555 MIRIntConst *eConst =
556 GlobalTables::GetIntConstTable().GetOrCreateIntConst(static_cast<int64>(eSize), arrayTypes);
557 BaseNode *tmpNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(eConst);
558 tmpNode->SetPrimType(array.GetPrimType());
559 rMul = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(OP_mul);
560 rMul->SetPrimType(array.GetPrimType());
561 rMul->SetOpnd(resNode, 0);
562 rMul->SetOpnd(tmpNode, 1);
563 }
564 BaseNode *baseNode = NodeConvert(array.GetPrimType(), *array.GetBase());
565 if (rMul->GetOpCode() == OP_constval) {
566 BaseNode *intrnNode = LowerArrayForLazyBiding(*baseNode, *rMul, parent);
567 if (intrnNode != nullptr) {
568 return intrnNode;
569 }
570 }
571 BaseNode *rAdd = mirModule.CurFuncCodeMemPool()->New<BinaryNode>(opAdd);
572 rAdd->SetPrimType(array.GetPrimType());
573 rAdd->SetOpnd(baseNode, 0);
574 rAdd->SetOpnd(rMul, 1);
575 return rAdd;
576 }
577
WriteBitField(const std::pair<int32,int32> & byteBitOffsets,const MIRBitFieldType * fieldType,BaseNode * baseAddr,BaseNode * rhs,BlockNode * block)578 StmtNode *CGLowerer::WriteBitField(const std::pair<int32, int32> &byteBitOffsets, const MIRBitFieldType *fieldType,
579 BaseNode *baseAddr, BaseNode *rhs, BlockNode *block)
580 {
581 auto bitSize = fieldType->GetFieldSize();
582 auto primType = fieldType->GetPrimType();
583 auto byteOffset = byteBitOffsets.first;
584 auto bitOffset = byteBitOffsets.second;
585 auto *builder = mirModule.GetMIRBuilder();
586 auto *bitField = builder->CreateExprIreadoff(primType, byteOffset, baseAddr);
587 auto primTypeBitSize = GetPrimTypeBitSize(primType);
588 if ((static_cast<uint32>(bitOffset) + bitSize) <= primTypeBitSize) {
589 if (CGOptions::IsBigEndian()) {
590 bitOffset =
591 (static_cast<int64>(beCommon.GetTypeSize(fieldType->GetTypeIndex()) * kBitsPerByte) - bitOffset) -
592 bitSize;
593 }
594 auto depositBits = builder->CreateExprDepositbits(OP_depositbits, primType, static_cast<uint32>(bitOffset),
595 bitSize, bitField, rhs);
596 return builder->CreateStmtIassignoff(primType, byteOffset, baseAddr, depositBits);
597 }
598 // if space not enough in the unit with size of primType, we would make an extra assignment from next bound
599 auto bitsRemained = (bitOffset + bitSize) - primTypeBitSize;
600 auto bitsExtracted = primTypeBitSize - static_cast<uint32>(bitOffset);
601 if (CGOptions::IsBigEndian()) {
602 bitOffset = 0;
603 }
604 auto *depositedLowerBits = builder->CreateExprDepositbits(OP_depositbits, primType, static_cast<uint32>(bitOffset),
605 bitsExtracted, bitField, rhs);
606 auto *assignedLowerBits = builder->CreateStmtIassignoff(primType, byteOffset, baseAddr, depositedLowerBits);
607 block->AddStatement(assignedLowerBits);
608 auto *extractedHigherBits =
609 builder->CreateExprExtractbits(OP_extractbits, primType, bitsExtracted, bitsRemained, rhs);
610 auto *bitFieldRemained =
611 builder->CreateExprIreadoff(primType, byteOffset + static_cast<int32>(GetPrimTypeSize(primType)), baseAddr);
612 auto *depositedHigherBits = builder->CreateExprDepositbits(OP_depositbits, primType, 0, bitsRemained,
613 bitFieldRemained, extractedHigherBits);
614 auto *assignedHigherBits = builder->CreateStmtIassignoff(
615 primType, byteOffset + static_cast<int32>(GetPrimTypeSize(primType)), baseAddr, depositedHigherBits);
616 return assignedHigherBits;
617 }
618
ReadBitField(const std::pair<int32,int32> & byteBitOffsets,const MIRBitFieldType * fieldType,BaseNode * baseAddr)619 BaseNode *CGLowerer::ReadBitField(const std::pair<int32, int32> &byteBitOffsets, const MIRBitFieldType *fieldType,
620 BaseNode *baseAddr)
621 {
622 auto bitSize = fieldType->GetFieldSize();
623 auto primType = fieldType->GetPrimType();
624 auto byteOffset = byteBitOffsets.first;
625 auto bitOffset = byteBitOffsets.second;
626 auto *builder = mirModule.GetMIRBuilder();
627 auto *bitField = builder->CreateExprIreadoff(primType, byteOffset, baseAddr);
628 auto primTypeBitSize = GetPrimTypeBitSize(primType);
629 if ((static_cast<uint32>(bitOffset) + bitSize) <= primTypeBitSize) {
630 if (CGOptions::IsBigEndian()) {
631 bitOffset =
632 (static_cast<int64>(beCommon.GetTypeSize(fieldType->GetTypeIndex()) * kBitsPerByte) - bitOffset) -
633 bitSize;
634 }
635 return builder->CreateExprExtractbits(OP_extractbits, primType, static_cast<uint32>(bitOffset), bitSize,
636 bitField);
637 }
638 // if space not enough in the unit with size of primType, the result would be binding of two exprs of load
639 auto bitsRemained = (bitOffset + bitSize) - primTypeBitSize;
640 if (CGOptions::IsBigEndian()) {
641 bitOffset = 0;
642 }
643 auto *extractedLowerBits = builder->CreateExprExtractbits(OP_extractbits, primType, static_cast<uint32>(bitOffset),
644 bitSize - bitsRemained, bitField);
645 auto *bitFieldRemained =
646 builder->CreateExprIreadoff(primType, byteOffset + static_cast<int32>(GetPrimTypeSize(primType)), baseAddr);
647 auto *result = builder->CreateExprDepositbits(OP_depositbits, primType, bitSize - bitsRemained, bitsRemained,
648 extractedLowerBits, bitFieldRemained);
649 return result;
650 }
651
LowerDreadBitfield(DreadNode & dread)652 BaseNode *CGLowerer::LowerDreadBitfield(DreadNode &dread)
653 {
654 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
655 auto *symbol = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dread.GetStIdx());
656 DEBUG_ASSERT(symbol != nullptr, "symbol should not be nullptr");
657 auto *structTy = static_cast<MIRStructType *>(symbol->GetType());
658 auto fTyIdx = structTy->GetFieldTyIdx(dread.GetFieldID());
659 auto *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
660 if (fType->GetKind() != kTypeBitField) {
661 return &dread;
662 }
663 auto *builder = mirModule.GetMIRBuilder();
664 auto *baseAddr = builder->CreateExprAddrof(0, dread.GetStIdx());
665 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, dread.GetFieldID());
666 return ReadBitField(byteBitOffsets, static_cast<MIRBitFieldType *>(fType), baseAddr);
667 }
668
LowerIreadBitfield(IreadNode & iread)669 BaseNode *CGLowerer::LowerIreadBitfield(IreadNode &iread)
670 {
671 uint32 index = iread.GetTyIdx();
672 MIRPtrType *pointerTy = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(index));
673 MIRType *pointedTy = GlobalTables::GetTypeTable().GetTypeFromTyIdx(pointerTy->GetPointedTyIdx());
674 /* Here pointed type can be Struct or JArray */
675 MIRStructType *structTy = nullptr;
676 if (pointedTy->GetKind() != kTypeJArray) {
677 structTy = static_cast<MIRStructType *>(pointedTy);
678 } else {
679 structTy = static_cast<MIRJarrayType *>(pointedTy)->GetParentType();
680 }
681 TyIdx fTyIdx = structTy->GetFieldTyIdx(iread.GetFieldID());
682 MIRType *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
683 if (fType->GetKind() != kTypeBitField) {
684 return &iread;
685 }
686 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, iread.GetFieldID());
687 return ReadBitField(byteBitOffsets, static_cast<MIRBitFieldType *>(fType), iread.Opnd(0));
688 }
689
690 // input node must be cvt, retype, zext or sext
LowerCastExpr(BaseNode & expr)691 BaseNode *CGLowerer::LowerCastExpr(BaseNode &expr)
692 {
693 return &expr;
694 }
695
696 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
LowerReturnStructUsingFakeParm(NaryStmtNode & retNode)697 BlockNode *CGLowerer::LowerReturnStructUsingFakeParm(NaryStmtNode &retNode)
698 {
699 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
700 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
701 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
702 }
703 BaseNode *opnd0 = retNode.Opnd(0);
704 if (!(opnd0 && opnd0->GetPrimType() == PTY_agg)) {
705 /* It is possible function never returns and have a dummy return const instead of a struct. */
706 maple::LogInfo::MapleLogger(kLlWarn) << "return struct should have a kid" << std::endl;
707 }
708
709 MIRFunction *curFunc = GetCurrentFunc();
710 MIRSymbol *retSt = curFunc->GetFormal(0);
711 MIRPtrType *retTy = static_cast<MIRPtrType *>(retSt->GetType());
712 IassignNode *iassign = mirModule.CurFuncCodeMemPool()->New<IassignNode>();
713 iassign->SetTyIdx(retTy->GetTypeIndex());
714 DEBUG_ASSERT(opnd0 != nullptr, "opnd0 should not be nullptr");
715 iassign->SetFieldID(0);
716 iassign->SetRHS(opnd0);
717 if (retSt->IsPreg()) {
718 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(
719 GetLoweredPtrType(), curFunc->GetPregTab()->GetPregIdxFromPregno(retSt->GetPreg()->GetPregNo()));
720 iassign->SetOpnd(regNode, 0);
721 } else {
722 AddrofNode *dreadNode = mirModule.CurFuncCodeMemPool()->New<AddrofNode>(OP_dread);
723 dreadNode->SetPrimType(GetLoweredPtrType());
724 dreadNode->SetStIdx(retSt->GetStIdx());
725 iassign->SetOpnd(dreadNode, 0);
726 }
727 blk->AddStatement(iassign);
728 retNode.GetNopnd().clear();
729 retNode.SetNumOpnds(0);
730 blk->AddStatement(&retNode);
731 return blk;
732 }
733
734 #endif /* TARGARM32 || TARGAARCH64 || TARGX86_64 */
735
LowerReturn(NaryStmtNode & retNode)736 BlockNode *CGLowerer::LowerReturn(NaryStmtNode &retNode)
737 {
738 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
739 if (retNode.NumOpnds() != 0) {
740 BaseNode *expr = retNode.Opnd(0);
741 Opcode opr = expr->GetOpCode();
742 if (opr == OP_dread) {
743 AddrofNode *retExpr = static_cast<AddrofNode *>(expr);
744 MIRFunction *mirFunc = mirModule.CurFunction();
745 MIRSymbol *sym = mirFunc->GetLocalOrGlobalSymbol(retExpr->GetStIdx());
746 if (sym->GetAttr(ATTR_localrefvar)) {
747 mirFunc->InsertMIRSymbol(sym);
748 }
749 }
750 }
751 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
752 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
753 }
754 blk->AddStatement(&retNode);
755 return blk;
756 }
757
LowerDassignBitfield(DassignNode & dassign,BlockNode & newBlk)758 StmtNode *CGLowerer::LowerDassignBitfield(DassignNode &dassign, BlockNode &newBlk)
759 {
760 dassign.SetRHS(LowerExpr(dassign, *dassign.GetRHS(), newBlk));
761 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
762 MIRSymbol *symbol = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dassign.GetStIdx());
763 MIRStructType *structTy = static_cast<MIRStructType *>(symbol->GetType());
764 CHECK_FATAL(structTy != nullptr, "LowerDassignBitfield: non-zero fieldID for non-structure");
765 TyIdx fTyIdx = structTy->GetFieldTyIdx(dassign.GetFieldID());
766 CHECK_FATAL(fTyIdx != 0u, "LowerDassignBitField: field id out of range for the structure");
767 MIRType *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
768 if (fType->GetKind() != kTypeBitField) {
769 return &dassign;
770 }
771 auto *builder = mirModule.GetMIRBuilder();
772 auto *baseAddr = builder->CreateExprAddrof(0, dassign.GetStIdx());
773 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, dassign.GetFieldID());
774 return WriteBitField(byteBitOffsets, static_cast<MIRBitFieldType *>(fType), baseAddr, dassign.GetRHS(), &newBlk);
775 }
776
LowerIassignBitfield(IassignNode & iassign,BlockNode & newBlk)777 StmtNode *CGLowerer::LowerIassignBitfield(IassignNode &iassign, BlockNode &newBlk)
778 {
779 DEBUG_ASSERT(iassign.Opnd(0) != nullptr, "iassign.Opnd(0) should not be nullptr");
780 iassign.SetOpnd(LowerExpr(iassign, *iassign.Opnd(0), newBlk), 0);
781 iassign.SetRHS(LowerExpr(iassign, *iassign.GetRHS(), newBlk));
782
783 CHECK_FATAL(iassign.GetTyIdx() < GlobalTables::GetTypeTable().GetTypeTable().size(),
784 "LowerIassignBitField: subscript out of range");
785 uint32 index = iassign.GetTyIdx();
786 MIRPtrType *pointerTy = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(index));
787 CHECK_FATAL(pointerTy != nullptr, "LowerIassignBitField: type in iassign should be pointer type");
788 MIRType *pointedTy = GlobalTables::GetTypeTable().GetTypeFromTyIdx(pointerTy->GetPointedTyIdx());
789 /*
790 * Here pointed type can be Struct or JArray
791 * We should seriously consider make JArray also a Struct type
792 */
793 MIRStructType *structTy = nullptr;
794 if (pointedTy->GetKind() != kTypeJArray) {
795 structTy = static_cast<MIRStructType *>(pointedTy);
796 } else {
797 structTy = static_cast<MIRJarrayType *>(pointedTy)->GetParentType();
798 }
799
800 TyIdx fTyIdx = structTy->GetFieldTyIdx(iassign.GetFieldID());
801 MIRType *fType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fTyIdx));
802 if (fType->GetKind() != kTypeBitField) {
803 return &iassign;
804 }
805 auto byteBitOffsets = beCommon.GetFieldOffset(*structTy, iassign.GetFieldID());
806 auto *bitFieldType = static_cast<MIRBitFieldType *>(fType);
807 return WriteBitField(byteBitOffsets, bitFieldType, iassign.Opnd(0), iassign.GetRHS(), &newBlk);
808 }
809
LowerIassign(IassignNode & iassign,BlockNode & newBlk)810 void CGLowerer::LowerIassign(IassignNode &iassign, BlockNode &newBlk)
811 {
812 StmtNode *newStmt = nullptr;
813 if (iassign.GetFieldID() != 0) {
814 newStmt = LowerIassignBitfield(iassign, newBlk);
815 } else {
816 LowerStmt(iassign, newBlk);
817 newStmt = &iassign;
818 }
819 newBlk.AddStatement(newStmt);
820 }
821
NewAsmTempStrIdx()822 static GStrIdx NewAsmTempStrIdx()
823 {
824 static uint32 strIdxCount = 0; // to create unique temporary symbol names
825 std::string asmTempStr("asm_tempvar");
826 asmTempStr += std::to_string(++strIdxCount);
827 return GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(asmTempStr);
828 }
829
LowerAsmStmt(AsmNode * asmNode,BlockNode * newBlk)830 void CGLowerer::LowerAsmStmt(AsmNode *asmNode, BlockNode *newBlk)
831 {
832 for (size_t i = 0; i < asmNode->NumOpnds(); i++) {
833 BaseNode *opnd = LowerExpr(*asmNode, *asmNode->Opnd(i), *newBlk);
834 if (opnd->NumOpnds() == 0) {
835 asmNode->SetOpnd(opnd, i);
836 continue;
837 }
838 // introduce a temporary to store the expression tree operand
839 TyIdx tyIdxUsed = static_cast<TyIdx>(opnd->GetPrimType());
840 if (opnd->op == OP_iread) {
841 IreadNode *ireadNode = static_cast<IreadNode *>(opnd);
842 tyIdxUsed = ireadNode->GetType()->GetTypeIndex();
843 }
844 StmtNode *assignNode = nullptr;
845 BaseNode *readOpnd = nullptr;
846 PrimType type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(tyIdxUsed)->GetPrimType();
847 if ((type != PTY_agg) && CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2) {
848 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "curFunction should not be nullptr");
849 PregIdx pregIdx = mirModule.CurFunction()->GetPregTab()->CreatePreg(type);
850 assignNode = mirBuilder->CreateStmtRegassign(type, pregIdx, opnd);
851 readOpnd = mirBuilder->CreateExprRegread(type, pregIdx);
852 } else {
853 MIRSymbol *st = mirModule.GetMIRBuilder()->CreateSymbol(tyIdxUsed, NewAsmTempStrIdx(), kStVar, kScAuto,
854 mirModule.CurFunction(), kScopeLocal);
855 assignNode = mirModule.GetMIRBuilder()->CreateStmtDassign(*st, 0, opnd);
856 readOpnd = mirBuilder->CreateExprDread(*st);
857 }
858 newBlk->AddStatement(assignNode);
859 asmNode->SetOpnd(readOpnd, i);
860 }
861 newBlk->AddStatement(asmNode);
862 }
863
NeedRetypeWhenLowerCallAssigned(PrimType pType)864 BaseNode *CGLowerer::NeedRetypeWhenLowerCallAssigned(PrimType pType)
865 {
866 BaseNode *retNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
867 if (IsPrimitiveInteger(pType) && GetPrimTypeBitSize(pType) <= k32BitSize) {
868 auto newPty = IsPrimitiveUnsigned(pType) ? PTY_u64 : PTY_i64;
869 retNode = mirModule.GetMIRBuilder()->CreateExprTypeCvt(OP_cvt, newPty, pType, *retNode);
870 }
871 return retNode;
872 }
873
SaveReturnValueInLocal(StIdx stIdx,uint16 fieldID)874 DassignNode *CGLowerer::SaveReturnValueInLocal(StIdx stIdx, uint16 fieldID)
875 {
876 MIRSymbol *var;
877 if (stIdx.IsGlobal()) {
878 var = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
879 } else {
880 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
881 var = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
882 }
883 CHECK_FATAL(var != nullptr, "var should not be nullptr");
884 PrimType pType;
885 if (var->GetAttr(ATTR_oneelem_simd)) {
886 pType = PTY_f64;
887 } else {
888 pType = GlobalTables::GetTypeTable().GetTypeTable().at(var->GetTyIdx())->GetPrimType();
889 }
890 auto *regRead = NeedRetypeWhenLowerCallAssigned(pType);
891 return mirModule.GetMIRBuilder()->CreateStmtDassign(*var, fieldID, regRead);
892 }
893
LowerRem(BaseNode & expr,BlockNode & blk)894 BaseNode *CGLowerer::LowerRem(BaseNode &expr, BlockNode &blk)
895 {
896 auto &remExpr = static_cast<BinaryNode &>(expr);
897 if (!IsPrimitiveFloat(remExpr.GetPrimType())) {
898 return &expr;
899 }
900 ExtFuncT fmodFunc = remExpr.GetPrimType() == PTY_f32 ? kFmodFloat : kFmodDouble;
901 uint32 i = 0;
902 for (; i < extFuncs.size(); ++i) {
903 if (extFuncs[i].first == fmodFunc) {
904 break;
905 }
906 }
907 CHECK_FATAL(i < extFuncs.size(), "rem expression primtype is not PTY_f32 nor PTY_f64.");
908 MIRSymbol *ret =
909 CreateNewRetVar(*GlobalTables::GetTypeTable().GetPrimType(remExpr.GetPrimType()), kIntrnRetValPrefix);
910 MapleVector<BaseNode *> args(mirModule.GetMIRBuilder()->GetCurrentFuncCodeMpAllocator()->Adapter());
911 args.emplace_back(remExpr.Opnd(0));
912 args.emplace_back(remExpr.Opnd(1));
913 CallNode *callStmt = mirModule.GetMIRBuilder()->CreateStmtCallAssigned(extFuncs[i].second, args, ret);
914 blk.AppendStatementsFromBlock(*LowerCallAssignedStmt(*callStmt));
915 MIRType *type = GlobalTables::GetTypeTable().GetPrimType(extFnDescrs[fmodFunc].retType);
916 return mirModule.GetMIRBuilder()->CreateExprDread(*type, 0, *ret);
917 }
918
919 /* to lower call (including icall) and intrinsicall statements */
LowerCallStmt(StmtNode & stmt,StmtNode * & nextStmt,BlockNode & newBlk,MIRType * retty,bool uselvar,bool isIntrinAssign)920 void CGLowerer::LowerCallStmt(StmtNode &stmt, StmtNode *&nextStmt, BlockNode &newBlk, MIRType *retty, bool uselvar,
921 bool isIntrinAssign)
922 {
923 StmtNode *newStmt = nullptr;
924 if (stmt.GetOpCode() == OP_intrinsiccall) {
925 auto &intrnNode = static_cast<IntrinsiccallNode &>(stmt);
926 newStmt = LowerIntrinsiccall(intrnNode, newBlk);
927 } else {
928 /* We note the function has a user-defined (i.e., not an intrinsic) call. */
929 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
930 GetCurrentFunc()->SetHasCall();
931 newStmt = &stmt;
932 }
933
934 if (newStmt == nullptr) {
935 return;
936 }
937
938 if (newStmt->GetOpCode() == OP_call || newStmt->GetOpCode() == OP_icall || newStmt->GetOpCode() == OP_icallproto) {
939 auto &callNode = static_cast<NaryStmtNode&>(*newStmt);
940 for (size_t i = 0; i < callNode.GetNopndSize(); ++i) {
941 BaseNode *newOpnd = LowerExpr(callNode, *callNode.GetNopndAt(i), newBlk);
942 callNode.SetOpnd(newOpnd, i);
943 }
944 newStmt = &callNode;
945 }
946 newStmt->SetSrcPos(stmt.GetSrcPos());
947 newBlk.AddStatement(newStmt);
948 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2 && stmt.GetOpCode() == OP_intrinsiccall) {
949 /* Try to expand memset and memcpy call lowered from intrinsiccall */
950 /* Skip expansion if call returns a value that is used later. */
951 BlockNode *blkLowered = isIntrinAssign ? nullptr : LowerMemop(*newStmt);
952 if (blkLowered != nullptr) {
953 newBlk.RemoveStmt(newStmt);
954 newBlk.AppendStatementsFromBlock(*blkLowered);
955 }
956 }
957 }
958
GenCallNode(const StmtNode & stmt,PUIdx & funcCalled,CallNode & origCall)959 StmtNode *CGLowerer::GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNode &origCall)
960 {
961 CallNode *newCall = nullptr;
962 if (stmt.GetOpCode() == OP_callassigned) {
963 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(origCall.GetPUIdx(), origCall.GetNopnd());
964 } else if (stmt.GetOpCode() == OP_virtualcallassigned) {
965 newCall = mirModule.GetMIRBuilder()->CreateStmtVirtualCall(origCall.GetPUIdx(), origCall.GetNopnd());
966 } else if (stmt.GetOpCode() == OP_superclasscallassigned) {
967 newCall = mirModule.GetMIRBuilder()->CreateStmtSuperclassCall(origCall.GetPUIdx(), origCall.GetNopnd());
968 } else if (stmt.GetOpCode() == OP_interfacecallassigned) {
969 newCall = mirModule.GetMIRBuilder()->CreateStmtInterfaceCall(origCall.GetPUIdx(), origCall.GetNopnd());
970 }
971 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
972 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
973 newCall->SetSrcPos(stmt.GetSrcPos());
974 funcCalled = origCall.GetPUIdx();
975 CHECK_FATAL((newCall->GetOpCode() == OP_call || newCall->GetOpCode() == OP_interfacecall),
976 "virtual call or super class call are not expected");
977 if (newCall->GetOpCode() == OP_interfacecall) {
978 std::cerr << "interfacecall found\n";
979 }
980 newCall->SetStmtAttrs(stmt.GetStmtAttrs());
981 return newCall;
982 }
983
GenIntrinsiccallNode(const StmtNode & stmt,PUIdx & funcCalled,bool & handledAtLowerLevel,IntrinsiccallNode & origCall)984 StmtNode *CGLowerer::GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalled, bool &handledAtLowerLevel,
985 IntrinsiccallNode &origCall)
986 {
987 StmtNode *newCall = nullptr;
988 handledAtLowerLevel = IsIntrinsicCallHandledAtLowerLevel(origCall.GetIntrinsic());
989 if (handledAtLowerLevel) {
990 /* If the lower level can handle the intrinsic, just let it pass through. */
991 newCall = &origCall;
992 } else {
993 PUIdx bFunc = GetBuiltinToUse(origCall.GetIntrinsic());
994 if (bFunc != kFuncNotFound) {
995 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(bFunc, origCall.GetNopnd());
996 CHECK_FATAL(newCall->GetOpCode() == OP_call, "intrinsicnode except intrinsiccall is not expected");
997 } else {
998 if (stmt.GetOpCode() == OP_intrinsiccallassigned) {
999 newCall =
1000 mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
1001 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccall,
1002 "intrinsicnode except intrinsiccall is not expected");
1003 } else if (stmt.GetOpCode() == OP_xintrinsiccallassigned) {
1004 newCall =
1005 mirModule.GetMIRBuilder()->CreateStmtXintrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
1006 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccall,
1007 "intrinsicnode except intrinsiccall is not expected");
1008 } else {
1009 newCall = mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(),
1010 origCall.GetNopnd(), origCall.GetTyIdx());
1011 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccallwithtype,
1012 "intrinsicnode except OP_intrinsiccallwithtype is not expected");
1013 }
1014 }
1015 newCall->SetSrcPos(stmt.GetSrcPos());
1016 funcCalled = bFunc;
1017 }
1018 return newCall;
1019 }
1020
GenIcallNode(PUIdx & funcCalled,IcallNode & origCall)1021 StmtNode *CGLowerer::GenIcallNode(PUIdx &funcCalled, IcallNode &origCall)
1022 {
1023 IcallNode *newCall = nullptr;
1024 if (origCall.GetOpCode() == OP_icallassigned) {
1025 newCall = mirModule.GetMIRBuilder()->CreateStmtIcall(origCall.GetNopnd());
1026 } else {
1027 newCall = mirModule.GetMIRBuilder()->CreateStmtIcallproto(origCall.GetNopnd(), origCall.GetRetTyIdx());
1028 newCall->SetRetTyIdx(static_cast<IcallNode &>(origCall).GetRetTyIdx());
1029 }
1030 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
1031 newCall->SetStmtAttrs(origCall.GetStmtAttrs());
1032 newCall->SetSrcPos(origCall.GetSrcPos());
1033 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
1034 funcCalled = kFuncNotFound;
1035 return newCall;
1036 }
1037
GenBlockNode(StmtNode & newCall,const CallReturnVector & p2nRets,const Opcode & opcode,const PUIdx & funcCalled,bool handledAtLowerLevel,bool uselvar)1038 BlockNode *CGLowerer::GenBlockNode(StmtNode &newCall, const CallReturnVector &p2nRets, const Opcode &opcode,
1039 const PUIdx &funcCalled, bool handledAtLowerLevel, bool uselvar)
1040 {
1041 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1042 blk->AddStatement(&newCall);
1043 if (!handledAtLowerLevel) {
1044 CHECK_FATAL(p2nRets.size() <= 1, "make sure p2nRets size <= 1");
1045 /* Create DassignStmt to save kSregRetval0. */
1046 StmtNode *dStmt = nullptr;
1047 MIRType *retType = nullptr;
1048 if (p2nRets.size() == 1) {
1049 MIRSymbol *sym = nullptr;
1050 StIdx stIdx = p2nRets[0].first;
1051 if (stIdx.IsGlobal()) {
1052 sym = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
1053 } else {
1054 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
1055 sym = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
1056 }
1057 bool sizeIs0 = false;
1058 if (sym != nullptr) {
1059 retType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(sym->GetTyIdx());
1060 if (beCommon.GetTypeSize(retType->GetTypeIndex().GetIdx()) == 0) {
1061 sizeIs0 = true;
1062 }
1063 }
1064 if (!sizeIs0) {
1065 RegFieldPair regFieldPair = p2nRets[0].second;
1066 if (!regFieldPair.IsReg()) {
1067 uint16 fieldID = static_cast<uint16>(regFieldPair.GetFieldID());
1068 DassignNode *dn = SaveReturnValueInLocal(stIdx, fieldID);
1069 CHECK_FATAL(dn->GetFieldID() == 0, "make sure dn's fieldID return 0");
1070 LowerDassign(*dn, *blk);
1071 CHECK_FATAL(&newCall == blk->GetLast() || newCall.GetNext() == blk->GetLast(), "");
1072 dStmt = (&newCall == blk->GetLast()) ? nullptr : blk->GetLast();
1073 CHECK_FATAL(newCall.GetNext() == dStmt, "make sure newCall's next equal dStmt");
1074 } else {
1075 PregIdx pregIdx = static_cast<PregIdx>(regFieldPair.GetPregIdx());
1076 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
1077 MIRPreg *mirPreg = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(pregIdx);
1078 bool is64x1vec = beCommon.CallIsOfAttr(FUNCATTR_oneelem_simd, &newCall);
1079 PrimType pType = is64x1vec ? PTY_f64 : mirPreg->GetPrimType();
1080 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
1081 RegassignNode *regAssign;
1082 if (is64x1vec && IsPrimitiveInteger(mirPreg->GetPrimType())) { // not f64
1083 MIRType *to;
1084 if (IsUnsignedInteger(mirPreg->GetPrimType())) {
1085 to = GlobalTables::GetTypeTable().GetUInt64();
1086 } else {
1087 to = GlobalTables::GetTypeTable().GetInt64();
1088 }
1089 MIRType *from = GlobalTables::GetTypeTable().GetDouble();
1090 BaseNode *rNode = mirModule.GetMIRBuilder()->CreateExprRetype(*to, *from, regNode);
1091 regAssign = mirModule.GetMIRBuilder()->CreateStmtRegassign(mirPreg->GetPrimType(),
1092 regFieldPair.GetPregIdx(), rNode);
1093 } else {
1094 regAssign = mirModule.GetMIRBuilder()->CreateStmtRegassign(mirPreg->GetPrimType(),
1095 regFieldPair.GetPregIdx(), regNode);
1096 }
1097 blk->AddStatement(regAssign);
1098 dStmt = regAssign;
1099 }
1100 }
1101 }
1102 blk->ResetBlock();
1103 /* if VerboseCG, insert a comment */
1104 if (ShouldAddAdditionalComment()) {
1105 CommentNode *cmnt = mirModule.CurFuncCodeMemPool()->New<CommentNode>(mirModule);
1106 cmnt->SetComment(kOpcodeInfo.GetName(opcode).c_str());
1107 if (funcCalled == kFuncNotFound) {
1108 cmnt->Append(" : unknown");
1109 } else {
1110 cmnt->Append(" : ");
1111 cmnt->Append(GlobalTables::GetFunctionTable().GetFunctionFromPuidx(funcCalled)->GetName());
1112 }
1113 blk->AddStatement(cmnt);
1114 }
1115 CHECK_FATAL(dStmt == nullptr || dStmt->GetNext() == nullptr, "make sure dStmt or dStmt's next is nullptr");
1116 LowerCallStmt(newCall, dStmt, *blk, retType, uselvar, opcode == OP_intrinsiccallassigned);
1117 if (!uselvar && dStmt != nullptr) {
1118 dStmt->SetSrcPos(newCall.GetSrcPos());
1119 blk->AddStatement(dStmt);
1120 }
1121 }
1122 return blk;
1123 }
1124
1125 // try to expand memset and memcpy
LowerMemop(StmtNode & stmt)1126 BlockNode *CGLowerer::LowerMemop(StmtNode &stmt)
1127 {
1128 auto memOpKind = SimplifyMemOp::ComputeMemOpKind(stmt);
1129 if (memOpKind == MEM_OP_unknown) {
1130 return nullptr;
1131 }
1132 auto *prev = stmt.GetPrev();
1133 auto *next = stmt.GetNext();
1134 auto *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1135 blk->AddStatement(&stmt);
1136 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1137 bool success = simplifyMemOp.AutoSimplify(stmt, *blk, true);
1138 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1139 if (newTypeTableSize != oldTypeTableSize) {
1140 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
1141 }
1142 stmt.SetPrev(prev);
1143 stmt.SetNext(next); // recover callStmt's position
1144 if (!success) {
1145 return nullptr;
1146 }
1147 // lower new generated stmts
1148 auto *currStmt = blk->GetFirst();
1149 while (currStmt != nullptr) {
1150 auto *nextStmt = currStmt->GetNext();
1151 for (uint32 i = 0; i < currStmt->NumOpnds(); ++i) {
1152 currStmt->SetOpnd(LowerExpr(*currStmt, *currStmt->Opnd(i), *blk), i);
1153 }
1154 currStmt = nextStmt;
1155 }
1156 return blk;
1157 }
1158
LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode & intrinsicCall)1159 BlockNode *CGLowerer::LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode &intrinsicCall)
1160 {
1161 auto *builder = mirModule.GetMIRBuilder();
1162 auto *block = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1163 auto intrinsicID = intrinsicCall.GetIntrinsic();
1164 auto &opndVector = intrinsicCall.GetNopnd();
1165 auto returnPair = intrinsicCall.GetReturnVec().begin();
1166 auto regFieldPair = returnPair->second;
1167 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
1168 if (regFieldPair.IsReg()) {
1169 auto regIdx = regFieldPair.GetPregIdx();
1170 auto primType = mirModule.CurFunction()->GetPregItem(static_cast<PregIdx>(regIdx))->GetPrimType();
1171 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, primType, TyIdx(0), opndVector);
1172 auto regAssign = builder->CreateStmtRegassign(primType, regIdx, intrinsicOp);
1173 block->AddStatement(regAssign);
1174 } else {
1175 auto fieldID = regFieldPair.GetFieldID();
1176 auto stIdx = returnPair->first;
1177 DEBUG_ASSERT(mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx) != nullptr, "nullptr check");
1178 auto *type = mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx)->GetType();
1179 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, *type, opndVector);
1180 auto dAssign = builder->CreateStmtDassign(stIdx, fieldID, intrinsicOp);
1181 block->AddStatement(dAssign);
1182 }
1183 return LowerBlock(*block);
1184 }
1185
LowerCallAssignedStmt(StmtNode & stmt,bool uselvar)1186 BlockNode *CGLowerer::LowerCallAssignedStmt(StmtNode &stmt, bool uselvar)
1187 {
1188 StmtNode *newCall = nullptr;
1189 CallReturnVector *p2nRets = nullptr;
1190 PUIdx funcCalled = kFuncNotFound;
1191 bool handledAtLowerLevel = false;
1192 switch (stmt.GetOpCode()) {
1193 case OP_callassigned:
1194 case OP_virtualcallassigned:
1195 case OP_superclasscallassigned:
1196 case OP_interfacecallassigned: {
1197 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2) {
1198 BlockNode *blkLowered = LowerMemop(stmt);
1199 if (blkLowered != nullptr) {
1200 return blkLowered;
1201 }
1202 }
1203 auto &origCall = static_cast<CallNode &>(stmt);
1204 newCall = GenCallNode(stmt, funcCalled, origCall);
1205 p2nRets = &origCall.GetReturnVec();
1206 static_cast<CallNode *>(newCall)->SetReturnVec(*p2nRets);
1207 MIRFunction *curFunc = mirModule.CurFunction();
1208 curFunc->SetLastFreqMap(newCall->GetStmtID(),
1209 static_cast<uint32>(curFunc->GetFreqFromLastStmt(stmt.GetStmtID())));
1210 break;
1211 }
1212 case OP_intrinsiccallassigned:
1213 case OP_xintrinsiccallassigned: {
1214 BlockNode *blockNode = LowerIntrinsiccallToIntrinsicop(stmt);
1215 if (blockNode) {
1216 return blockNode;
1217 }
1218 IntrinsiccallNode &intrincall = static_cast<IntrinsiccallNode &>(stmt);
1219 auto intrinsicID = intrincall.GetIntrinsic();
1220 if (IntrinDesc::intrinTable[intrinsicID].IsAtomic()) {
1221 return LowerIntrinsiccallAassignedToAssignStmt(intrincall);
1222 }
1223 newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, intrincall);
1224 p2nRets = &intrincall.GetReturnVec();
1225 static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
1226 break;
1227 }
1228 case OP_intrinsiccallwithtypeassigned: {
1229 BlockNode *blockNode = LowerIntrinsiccallToIntrinsicop(stmt);
1230 if (blockNode) {
1231 return blockNode;
1232 }
1233 auto &origCall = static_cast<IntrinsiccallNode &>(stmt);
1234 newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, origCall);
1235 p2nRets = &origCall.GetReturnVec();
1236 static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
1237 break;
1238 }
1239 case OP_icallprotoassigned:
1240 case OP_icallassigned: {
1241 auto &origCall = static_cast<IcallNode &>(stmt);
1242 newCall = GenIcallNode(funcCalled, origCall);
1243 p2nRets = &origCall.GetReturnVec();
1244 static_cast<IcallNode *>(newCall)->SetReturnVec(*p2nRets);
1245 break;
1246 }
1247 default:
1248 CHECK_FATAL(false, "NIY");
1249 return nullptr;
1250 }
1251
1252 /* transfer srcPosition location info */
1253 newCall->SetSrcPos(stmt.GetSrcPos());
1254 return GenBlockNode(*newCall, *p2nRets, stmt.GetOpCode(), funcCalled, handledAtLowerLevel, uselvar);
1255 }
1256
LowerIntrinsiccallToIntrinsicop(StmtNode & stmt)1257 BlockNode *CGLowerer::LowerIntrinsiccallToIntrinsicop(StmtNode &stmt)
1258 {
1259 IntrinsiccallNode &intrinCall = static_cast<IntrinsiccallNode &>(stmt);
1260 auto intrinsicID = intrinCall.GetIntrinsic();
1261 if (IntrinDesc::intrinTable[intrinsicID].IsAtomic()) {
1262 return LowerIntrinsiccallAassignedToAssignStmt(intrinCall);
1263 }
1264 return nullptr;
1265 }
1266
1267 #if TARGAARCH64
IsStructElementSame(MIRType * ty)1268 static PrimType IsStructElementSame(MIRType *ty)
1269 {
1270 if (ty->GetKind() == kTypeArray) {
1271 MIRArrayType *arrtype = static_cast<MIRArrayType *>(ty);
1272 MIRType *pty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(arrtype->GetElemTyIdx());
1273 if (pty->GetKind() == kTypeArray || pty->GetKind() == kTypeStruct) {
1274 return IsStructElementSame(pty);
1275 }
1276 return pty->GetPrimType();
1277 } else if (ty->GetKind() == kTypeStruct) {
1278 MIRStructType *sttype = static_cast<MIRStructType *>(ty);
1279 FieldVector fields = sttype->GetFields();
1280 PrimType oldtype = PTY_void;
1281 for (uint32 fcnt = 0; fcnt < fields.size(); ++fcnt) {
1282 TyIdx fieldtyidx = fields[fcnt].second.first;
1283 MIRType *fieldty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldtyidx);
1284 PrimType ptype = IsStructElementSame(fieldty);
1285 if (oldtype != PTY_void && oldtype != ptype) {
1286 return PTY_void;
1287 } else {
1288 oldtype = ptype;
1289 }
1290 }
1291 return oldtype;
1292 } else {
1293 return ty->GetPrimType();
1294 }
1295 }
1296 #endif
1297
1298 // return true if successfully lowered
LowerStructReturn(BlockNode & newBlk,StmtNode & stmt,bool & lvar)1299 bool CGLowerer::LowerStructReturn(BlockNode &newBlk, StmtNode &stmt, bool &lvar)
1300 {
1301 CallReturnVector *p2nrets = stmt.GetCallReturnVector();
1302 if (p2nrets->size() == 0) {
1303 return false;
1304 }
1305 CallReturnPair retPair = (*p2nrets)[0];
1306 if (retPair.second.IsReg()) {
1307 return false;
1308 }
1309 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
1310 MIRSymbol *retSym = mirModule.CurFunction()->GetLocalOrGlobalSymbol(retPair.first);
1311 DEBUG_ASSERT(retSym != nullptr, "retSym should not be nullptr");
1312 if (retSym->GetType()->GetPrimType() != PTY_agg) {
1313 return false;
1314 }
1315
1316 if (IsReturnInMemory(*retSym->GetType())) {
1317 lvar = true;
1318 } else if (!LowerStructReturnInRegs(newBlk, stmt, *retSym)) {
1319 return false;
1320 }
1321 return true;
1322 }
1323
LowerStructReturnInRegs(BlockNode & newBlk,StmtNode & stmt,const MIRSymbol & retSym)1324 bool CGLowerer::LowerStructReturnInRegs(BlockNode &newBlk, StmtNode &stmt, const MIRSymbol &retSym)
1325 {
1326 // lower callassigned -> call
1327 if (stmt.GetOpCode() == OP_callassigned) {
1328 auto &callNode = static_cast<CallNode &>(stmt);
1329 for (size_t i = 0; i < callNode.GetNopndSize(); ++i) {
1330 auto *newOpnd = LowerExpr(callNode, *callNode.GetNopndAt(i), newBlk);
1331 callNode.SetOpnd(newOpnd, i);
1332 }
1333 auto *callStmt = mirModule.GetMIRBuilder()->CreateStmtCall(callNode.GetPUIdx(), callNode.GetNopnd());
1334 callStmt->SetSrcPos(callNode.GetSrcPos());
1335 newBlk.AddStatement(callStmt);
1336 } else if (stmt.GetOpCode() == OP_icallassigned || stmt.GetOpCode() == OP_icallprotoassigned) {
1337 auto &icallNode = static_cast<IcallNode &>(stmt);
1338 for (size_t i = 0; i < icallNode.GetNopndSize(); ++i) {
1339 auto *newOpnd = LowerExpr(icallNode, *icallNode.GetNopndAt(i), newBlk);
1340 icallNode.SetOpnd(newOpnd, i);
1341 }
1342 IcallNode *icallStmt = nullptr;
1343 if (stmt.GetOpCode() == OP_icallassigned) {
1344 icallStmt = mirModule.GetMIRBuilder()->CreateStmtIcall(icallNode.GetNopnd());
1345 } else {
1346 icallStmt = mirModule.GetMIRBuilder()->CreateStmtIcallproto(icallNode.GetNopnd(), icallNode.GetRetTyIdx());
1347 }
1348 icallStmt->SetSrcPos(icallNode.GetSrcPos());
1349 newBlk.AddStatement(icallStmt);
1350 } else {
1351 return false;
1352 }
1353
1354 if (Triple::GetTriple().IsAarch64BeOrLe()) {
1355 #if TARGAARCH64
1356 PrimType primType = PTY_begin;
1357 size_t elemNum = 0;
1358 if (IsHomogeneousAggregates(*retSym.GetType(), primType, elemNum)) {
1359 LowerStructReturnInFpRegs(newBlk, stmt, retSym, primType, elemNum);
1360 } else {
1361 LowerStructReturnInGpRegs(newBlk, stmt, retSym);
1362 }
1363 #endif
1364 } else {
1365 LowerStructReturnInGpRegs(newBlk, stmt, retSym);
1366 }
1367 return true;
1368 }
1369
1370 // struct passed in gpregs, lowered into
1371 // call &foo
1372 // regassign u64 %1 (regread u64 %%retval0)
1373 // regassign ptr %2 (addrof ptr $s)
1374 // iassign <* u64> 0 (regread ptr %2, regread u64 %1)
LowerStructReturnInGpRegs(BlockNode & newBlk,const StmtNode & stmt,const MIRSymbol & symbol)1375 void CGLowerer::LowerStructReturnInGpRegs(BlockNode &newBlk, const StmtNode &stmt, const MIRSymbol &symbol)
1376 {
1377 auto size = static_cast<uint32>(symbol.GetType()->GetSize());
1378 if (size == 0) {
1379 return;
1380 }
1381 // save retval0, retval1
1382 PregIdx pIdx1R = 0;
1383 PregIdx pIdx2R = 0;
1384 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
1385 auto genRetvalSave = [this, &newBlk](PregIdx &pIdx, SpecialReg sreg) {
1386 auto *regreadNode = mirBuilder->CreateExprRegread(PTY_u64, -sreg);
1387 pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(PTY_u64);
1388 auto *aStmt = mirBuilder->CreateStmtRegassign(PTY_u64, pIdx, regreadNode);
1389 newBlk.AddStatement(aStmt);
1390 };
1391 genRetvalSave(pIdx1R, kSregRetval0);
1392 if (size > k8ByteSize) {
1393 genRetvalSave(pIdx2R, kSregRetval1);
1394 }
1395 // save &s
1396 BaseNode *regAddr = mirBuilder->CreateExprAddrof(0, symbol);
1397 LowerTypePtr(*regAddr);
1398 PregIdx pIdxL = GetCurrentFunc()->GetPregTab()->CreatePreg(GetLoweredPtrType());
1399 auto *aStmt = mirBuilder->CreateStmtRegassign(PTY_a64, pIdxL, regAddr);
1400 newBlk.AddStatement(aStmt);
1401
1402 // str retval to &s
1403 for (uint32 curSize = 0; curSize < size;) {
1404 // calc addr
1405 BaseNode *addrNode = mirBuilder->CreateExprRegread(GetLoweredPtrType(), pIdxL);
1406 if (curSize != 0) {
1407 MIRType *addrType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(GetLoweredPtrType());
1408 addrNode =
1409 mirBuilder->CreateExprBinary(OP_add, *addrType, addrNode, mirBuilder->CreateIntConst(curSize, PTY_i32));
1410 }
1411
1412 PregIdx pIdxR = (curSize < k8ByteSize) ? pIdx1R : pIdx2R;
1413 uint32 strSize = size - curSize;
1414 // gen str retval to &s + offset
1415 auto genStrRetval2Memory = [this, &newBlk, &addrNode, &curSize, &pIdxR](PrimType primType) {
1416 uint32 shiftSize = (curSize * kBitsPerByte) % k64BitSize;
1417 if (CGOptions::IsBigEndian()) {
1418 shiftSize = k64BitSize - GetPrimTypeBitSize(primType) + shiftSize;
1419 }
1420 BaseNode *regreadExp = mirBuilder->CreateExprRegread(PTY_u64, pIdxR);
1421 if (shiftSize != 0) {
1422 MIRType *type = GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_u64);
1423 regreadExp = mirBuilder->CreateExprBinary(OP_lshr, *type, regreadExp,
1424 mirBuilder->CreateIntConst(shiftSize, PTY_i32));
1425 }
1426 auto *pointedType = GlobalTables::GetTypeTable().GetPrimType(primType);
1427 auto *iassignStmt = mirBuilder->CreateStmtIassign(*beCommon.BeGetOrCreatePointerType(*pointedType), 0,
1428 addrNode, regreadExp);
1429 newBlk.AddStatement(iassignStmt);
1430 curSize += GetPrimTypeSize(primType);
1431 };
1432 if (strSize >= k8ByteSize) {
1433 genStrRetval2Memory(PTY_u64);
1434 } else if (strSize >= k4ByteSize) {
1435 genStrRetval2Memory(PTY_u32);
1436 } else if (strSize >= k2ByteSize) {
1437 genStrRetval2Memory(PTY_u16);
1438 } else {
1439 genStrRetval2Memory(PTY_u8);
1440 }
1441 }
1442 }
1443
1444 // struct passed in fpregs, lowered into
1445 // call &foo
1446 // regassign f64 %1 (regread f64 %%retval0)
1447 // regassign ptr %2 (addrof ptr $s)
1448 // iassign <* f64> 0 (regread ptr %2, regread f64 %1)
LowerStructReturnInFpRegs(BlockNode & newBlk,const StmtNode & stmt,const MIRSymbol & symbol,PrimType primType,size_t elemNum)1449 void CGLowerer::LowerStructReturnInFpRegs(BlockNode &newBlk, const StmtNode &stmt, const MIRSymbol &symbol,
1450 PrimType primType, size_t elemNum)
1451 {
1452 // save retvals
1453 static constexpr std::array sregs = {kSregRetval0, kSregRetval1, kSregRetval2, kSregRetval3};
1454 std::vector<PregIdx> pIdxs(sregs.size(), 0);
1455 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
1456 for (uint32 i = 0; i < elemNum; ++i) {
1457 auto *regreadNode = mirBuilder->CreateExprRegread(primType, -sregs[i]);
1458 pIdxs[i] = GetCurrentFunc()->GetPregTab()->CreatePreg(primType);
1459 auto *aStmt = mirBuilder->CreateStmtRegassign(primType, pIdxs[i], regreadNode);
1460 newBlk.AddStatement(aStmt);
1461 }
1462
1463 // save &s
1464 BaseNode *regAddr = mirBuilder->CreateExprAddrof(0, symbol);
1465 LowerTypePtr(*regAddr);
1466 PregIdx pIdxL = GetCurrentFunc()->GetPregTab()->CreatePreg(GetLoweredPtrType());
1467 auto *aStmt = mirBuilder->CreateStmtRegassign(PTY_a64, pIdxL, regAddr);
1468 newBlk.AddStatement(aStmt);
1469
1470 // str retvals to &s
1471 for (uint32 i = 0; i < elemNum; ++i) {
1472 uint32 offsetSize = i * GetPrimTypeSize(primType);
1473 BaseNode *addrNode = mirBuilder->CreateExprRegread(GetLoweredPtrType(), pIdxL);
1474 // addr add offset
1475 if (offsetSize != 0) {
1476 MIRType *addrType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(GetLoweredPtrType());
1477 addrNode = mirBuilder->CreateExprBinary(OP_add, *addrType, addrNode,
1478 mirBuilder->CreateIntConst(offsetSize, PTY_i32));
1479 }
1480 // gen iassigen to addr
1481 auto *pointedType = GlobalTables::GetTypeTable().GetPrimType(primType);
1482 auto *iassignStmt = mirBuilder->CreateStmtIassign(*beCommon.BeGetOrCreatePointerType(*pointedType), 0, addrNode,
1483 mirBuilder->CreateExprRegread(PTY_u64, pIdxs[i]));
1484 newBlk.AddStatement(iassignStmt);
1485 }
1486 }
1487
LowerStmt(StmtNode & stmt,BlockNode & newBlk)1488 void CGLowerer::LowerStmt(StmtNode &stmt, BlockNode &newBlk)
1489 {
1490 for (size_t i = 0; i < stmt.NumOpnds(); ++i) {
1491 DEBUG_ASSERT(stmt.Opnd(i) != nullptr, "null ptr check");
1492 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(i), newBlk), i);
1493 }
1494 }
1495
LowerSwitchOpnd(StmtNode & stmt,BlockNode & newBlk)1496 void CGLowerer::LowerSwitchOpnd(StmtNode &stmt, BlockNode &newBlk)
1497 {
1498 BaseNode *opnd = LowerExpr(stmt, *stmt.Opnd(0), newBlk);
1499 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2 && opnd->GetOpCode() != OP_regread) {
1500 PrimType ptyp = stmt.Opnd(0)->GetPrimType();
1501 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
1502 PregIdx pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(ptyp);
1503 RegassignNode *regAss = mirBuilder->CreateStmtRegassign(ptyp, pIdx, opnd);
1504 newBlk.AddStatement(regAss);
1505 GetCurrentFunc()->SetLastFreqMap(regAss->GetStmtID(),
1506 static_cast<uint32>(GetCurrentFunc()->GetFreqFromLastStmt(stmt.GetStmtID())));
1507 stmt.SetOpnd(mirBuilder->CreateExprRegread(ptyp, pIdx), 0);
1508 } else {
1509 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(0), newBlk), 0);
1510 }
1511 }
1512
AddElemToPrintf(MapleVector<BaseNode * > & argsPrintf,int num,...) const1513 void CGLowerer::AddElemToPrintf(MapleVector<BaseNode *> &argsPrintf, int num, ...) const
1514 {
1515 va_list argPtr;
1516 va_start(argPtr, num);
1517 for (int i = 0; i < num; ++i) {
1518 argsPrintf.push_back(va_arg(argPtr, BaseNode *));
1519 }
1520 va_end(argPtr);
1521 }
1522
LowerAssertBoundary(StmtNode & stmt,BlockNode & block,BlockNode & newBlk,std::vector<StmtNode * > & abortNode)1523 void CGLowerer::LowerAssertBoundary(StmtNode &stmt, BlockNode &block, BlockNode &newBlk,
1524 std::vector<StmtNode *> &abortNode)
1525 {
1526 MIRFunction *curFunc = mirModule.CurFunction();
1527 BaseNode *op0 = LowerExpr(stmt, *stmt.Opnd(0), block);
1528 BaseNode *op1 = LowerExpr(stmt, *stmt.Opnd(1), block);
1529 LabelIdx labIdx = GetLabelIdx(*curFunc);
1530 LabelNode *labelBC = mirBuilder->CreateStmtLabel(labIdx);
1531 Opcode op = OP_ge;
1532 if (kOpcodeInfo.IsAssertUpperBoundary(stmt.GetOpCode())) {
1533 op = (kOpcodeInfo.IsAssertLeBoundary(stmt.GetOpCode())) ? OP_le : OP_lt;
1534 }
1535 BaseNode *cond =
1536 mirBuilder->CreateExprCompare(op, *GlobalTables::GetTypeTable().GetUInt1(),
1537 *GlobalTables::GetTypeTable().GetPrimType(op0->GetPrimType()), op0, op1);
1538 CondGotoNode *brFalseNode = mirBuilder->CreateStmtCondGoto(cond, OP_brfalse, labIdx);
1539
1540 MIRFunction *printf = mirBuilder->GetOrCreateFunction("printf", TyIdx(PTY_i32));
1541 printf->GetFuncSymbol()->SetAppearsInCode(true);
1542 beCommon.UpdateTypeTable(*printf->GetMIRFuncType());
1543 MapleVector<BaseNode *> argsPrintf(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
1544 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1545 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
1546 if (newTypeTableSize != oldTypeTableSize) {
1547 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
1548 }
1549 StmtNode *callPrintf = mirBuilder->CreateStmtCall(printf->GetPuidx(), argsPrintf);
1550 UnaryStmtNode *abortModeNode = mirBuilder->CreateStmtUnary(OP_abort, nullptr);
1551
1552 brFalseNode->SetSrcPos(stmt.GetSrcPos());
1553 labelBC->SetSrcPos(stmt.GetSrcPos());
1554 callPrintf->SetSrcPos(stmt.GetSrcPos());
1555 abortModeNode->SetSrcPos(stmt.GetSrcPos());
1556
1557 newBlk.AddStatement(brFalseNode);
1558 abortNode.emplace_back(labelBC);
1559 abortNode.emplace_back(callPrintf);
1560 abortNode.emplace_back(abortModeNode);
1561 }
1562
LowerBlock(BlockNode & block)1563 BlockNode *CGLowerer::LowerBlock(BlockNode &block)
1564 {
1565 BlockNode *newBlk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
1566 BlockNode *tmpBlockNode = nullptr;
1567 std::vector<StmtNode *> abortNode;
1568 if (block.GetFirst() == nullptr) {
1569 return newBlk;
1570 }
1571
1572 StmtNode *nextStmt = block.GetFirst();
1573 do {
1574 StmtNode *stmt = nextStmt;
1575 nextStmt = stmt->GetNext();
1576 stmt->SetNext(nullptr);
1577 currentBlock = newBlk;
1578
1579 switch (stmt->GetOpCode()) {
1580 case OP_switch: {
1581 LowerSwitchOpnd(*stmt, *newBlk);
1582 auto switchMp = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "switchlowere");
1583 MapleAllocator switchAllocator(switchMp.get());
1584 SwitchLowerer switchLowerer(mirModule, static_cast<SwitchNode &>(*stmt), switchAllocator);
1585 BlockNode *blk = switchLowerer.LowerSwitch();
1586 if (blk->GetFirst() != nullptr) {
1587 newBlk->AppendStatementsFromBlock(*blk);
1588 }
1589 needBranchCleanup = true;
1590 break;
1591 }
1592 case OP_block:
1593 tmpBlockNode = LowerBlock(static_cast<BlockNode &>(*stmt));
1594 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
1595 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
1596 break;
1597 case OP_dassign: {
1598 LowerDassign(static_cast<DassignNode &>(*stmt), *newBlk);
1599 break;
1600 }
1601 case OP_regassign: {
1602 LowerRegassign(static_cast<RegassignNode &>(*stmt), *newBlk);
1603 break;
1604 }
1605 CASE_OP_ASSERT_BOUNDARY
1606 {
1607 LowerAssertBoundary(*stmt, block, *newBlk, abortNode);
1608 break;
1609 }
1610 case OP_iassign: {
1611 LowerIassign(static_cast<IassignNode &>(*stmt), *newBlk);
1612 break;
1613 }
1614 case OP_callassigned:
1615 case OP_icallassigned:
1616 case OP_icallprotoassigned: {
1617 // pass the addr of lvar if this is a struct call assignment
1618 bool lvar = false;
1619 // nextStmt could be changed by the call to LowerStructReturn
1620 if (!LowerStructReturn(*newBlk, *stmt, lvar)) {
1621 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt, lvar));
1622 }
1623 break;
1624 }
1625 case OP_virtualcallassigned:
1626 case OP_superclasscallassigned:
1627 case OP_interfacecallassigned:
1628 case OP_intrinsiccallassigned:
1629 case OP_xintrinsiccallassigned:
1630 case OP_intrinsiccallwithtypeassigned:
1631 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt));
1632 break;
1633 case OP_intrinsiccall:
1634 case OP_call:
1635 case OP_icall:
1636 case OP_icallproto:
1637 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
1638 // nextStmt could be changed by the call to LowerStructReturn
1639 LowerCallStmt(*stmt, nextStmt, *newBlk);
1640 #else
1641 LowerStmt(*stmt, *newBlk);
1642 #endif
1643 break;
1644 case OP_return: {
1645 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
1646 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
1647 if (GetCurrentFunc()->IsFirstArgReturn() && stmt->NumOpnds() > 0) {
1648 newBlk->AppendStatementsFromBlock(
1649 *LowerReturnStructUsingFakeParm(static_cast<NaryStmtNode &>(*stmt)));
1650 } else {
1651 #endif
1652 NaryStmtNode *retNode = static_cast<NaryStmtNode *>(stmt);
1653 if (retNode->GetNopndSize() == 0) {
1654 newBlk->AddStatement(stmt);
1655 } else {
1656 tmpBlockNode = LowerReturn(*retNode);
1657 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
1658 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
1659 }
1660 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
1661 }
1662 #endif
1663 break;
1664 }
1665 case OP_comment:
1666 newBlk->AddStatement(stmt);
1667 break;
1668 case OP_try:
1669 LowerStmt(*stmt, *newBlk);
1670 newBlk->AddStatement(stmt);
1671 hasTry = true;
1672 break;
1673 case OP_endtry:
1674 LowerStmt(*stmt, *newBlk);
1675 newBlk->AddStatement(stmt);
1676 break;
1677 case OP_catch:
1678 LowerStmt(*stmt, *newBlk);
1679 newBlk->AddStatement(stmt);
1680 break;
1681 case OP_throw:
1682 LowerStmt(*stmt, *newBlk);
1683 newBlk->AddStatement(stmt);
1684 break;
1685 case OP_syncenter:
1686 case OP_syncexit: {
1687 LowerStmt(*stmt, *newBlk);
1688 StmtNode *tmp = LowerSyncEnterSyncExit(*stmt);
1689 CHECK_FATAL(tmp != nullptr, "nullptr is not expected");
1690 newBlk->AddStatement(tmp);
1691 break;
1692 }
1693 case OP_decrefreset: {
1694 /*
1695 * only gconly can reach here
1696 * lower stmt (decrefreset (addrof ptr %RegX_RXXXX)) to (dassign %RegX_RXXXX 0 (constval ref 0))
1697 */
1698 CHECK_FATAL(CGOptions::IsGCOnly(), "OP_decrefreset is expected only in gconly.");
1699 LowerResetStmt(*stmt, *newBlk);
1700 break;
1701 }
1702 case OP_asm: {
1703 LowerAsmStmt(static_cast<AsmNode *>(stmt), newBlk);
1704 break;
1705 }
1706 default:
1707 LowerStmt(*stmt, *newBlk);
1708 newBlk->AddStatement(stmt);
1709 break;
1710 }
1711 CHECK_FATAL(beCommon.GetSizeOfTypeSizeTable() == GlobalTables::GetTypeTable().GetTypeTableSize(), "Error!");
1712 } while (nextStmt != nullptr);
1713 for (auto node : abortNode) {
1714 newBlk->AddStatement(node);
1715 }
1716 return newBlk;
1717 }
1718
SimplifyBlock(BlockNode & block) const1719 void CGLowerer::SimplifyBlock(BlockNode &block) const
1720 {
1721 if (block.GetFirst() == nullptr) {
1722 return;
1723 }
1724 StmtNode *nextStmt = block.GetFirst();
1725 do {
1726 StmtNode *stmt = nextStmt;
1727 nextStmt = stmt->GetNext();
1728 Opcode op = stmt->GetOpCode();
1729 switch (op) {
1730 case OP_call: {
1731 auto *callStmt = static_cast<CallNode *>(stmt);
1732 if (CGOptions::IsDuplicateAsmFileEmpty()) {
1733 break;
1734 }
1735 auto *oldFunc = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callStmt->GetPUIdx());
1736 if (asmMap.find(oldFunc->GetName()) == asmMap.end()) {
1737 break;
1738 }
1739 auto *newFunc = theMIRModule->GetMIRBuilder()->GetOrCreateFunction(asmMap.at(oldFunc->GetName()),
1740 callStmt->GetTyIdx());
1741 MIRSymbol *funcSym = newFunc->GetFuncSymbol();
1742 funcSym->SetStorageClass(kScExtern);
1743 funcSym->SetAppearsInCode(true);
1744 callStmt->SetPUIdx(newFunc->GetPuidx());
1745 break;
1746 }
1747 default: {
1748 break;
1749 }
1750 }
1751 } while (nextStmt != nullptr);
1752 return;
1753 }
1754
GetArrayNodeType(BaseNode & baseNode)1755 MIRType *CGLowerer::GetArrayNodeType(BaseNode &baseNode)
1756 {
1757 MIRType *baseType = nullptr;
1758 auto curFunc = mirModule.CurFunction();
1759 DEBUG_ASSERT(curFunc != nullptr, "curFunc should not be nullptr");
1760 if (baseNode.GetOpCode() == OP_regread) {
1761 RegreadNode *rrNode = static_cast<RegreadNode *>(&baseNode);
1762 MIRPreg *pReg = curFunc->GetPregTab()->PregFromPregIdx(rrNode->GetRegIdx());
1763 if (pReg->IsRef()) {
1764 baseType = pReg->GetMIRType();
1765 }
1766 }
1767 if (baseNode.GetOpCode() == OP_dread) {
1768 DreadNode *dreadNode = static_cast<DreadNode *>(&baseNode);
1769 MIRSymbol *symbol = curFunc->GetLocalOrGlobalSymbol(dreadNode->GetStIdx());
1770 DEBUG_ASSERT(symbol != nullptr, "nullptr check");
1771 baseType = symbol->GetType();
1772 }
1773 MIRType *arrayElemType = nullptr;
1774 if (baseType != nullptr) {
1775 MIRType *stType =
1776 GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<MIRPtrType *>(baseType)->GetPointedTyIdx());
1777 while (stType->GetKind() == kTypeJArray) {
1778 MIRJarrayType *baseType1 = static_cast<MIRJarrayType *>(stType);
1779 MIRType *elemType = baseType1->GetElemType();
1780 if (elemType->GetKind() == kTypePointer) {
1781 const TyIdx &index = static_cast<MIRPtrType *>(elemType)->GetPointedTyIdx();
1782 stType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(index);
1783 } else {
1784 stType = elemType;
1785 }
1786 }
1787
1788 arrayElemType = stType;
1789 }
1790 return arrayElemType;
1791 }
1792
SplitCallArg(CallNode & callNode,BaseNode * newOpnd,size_t i,BlockNode & newBlk)1793 void CGLowerer::SplitCallArg(CallNode &callNode, BaseNode *newOpnd, size_t i, BlockNode &newBlk)
1794 {
1795 if (newOpnd->GetOpCode() != OP_regread && newOpnd->GetOpCode() != OP_constval && newOpnd->GetOpCode() != OP_dread &&
1796 newOpnd->GetOpCode() != OP_addrof && newOpnd->GetOpCode() != OP_iaddrof &&
1797 newOpnd->GetOpCode() != OP_constval && newOpnd->GetOpCode() != OP_conststr &&
1798 newOpnd->GetOpCode() != OP_conststr16) {
1799 if (CGOptions::GetInstance().GetOptimizeLevel() == CGOptions::kLevel0) {
1800 MIRType *type = GlobalTables::GetTypeTable().GetPrimType(newOpnd->GetPrimType());
1801 MIRSymbol *ret = CreateNewRetVar(*type, kIntrnRetValPrefix);
1802 DassignNode *dassignNode = mirBuilder->CreateStmtDassign(*ret, 0, newOpnd);
1803 newBlk.AddStatement(dassignNode);
1804 callNode.SetOpnd(mirBuilder->CreateExprDread(*type, 0, *ret), i);
1805 } else {
1806 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "curFunction should not be nullptr");
1807 PregIdx pregIdx = mirModule.CurFunction()->GetPregTab()->CreatePreg(newOpnd->GetPrimType());
1808 RegassignNode *temp = mirBuilder->CreateStmtRegassign(newOpnd->GetPrimType(), pregIdx, newOpnd);
1809 newBlk.AddStatement(temp);
1810 callNode.SetOpnd(mirBuilder->CreateExprRegread(newOpnd->GetPrimType(), pregIdx), i);
1811 }
1812 } else {
1813 callNode.SetOpnd(newOpnd, i);
1814 }
1815 }
1816
1817
LowerTypePtr(BaseNode & node) const1818 void CGLowerer::LowerTypePtr(BaseNode &node) const
1819 {
1820 if ((node.GetPrimType() == PTY_ptr) || (node.GetPrimType() == PTY_ref)) {
1821 node.SetPrimType(GetLoweredPtrType());
1822 }
1823
1824 if (kOpcodeInfo.IsTypeCvt(node.GetOpCode())) {
1825 auto &cvt = static_cast<TypeCvtNode &>(node);
1826 if ((cvt.FromType() == PTY_ptr) || (cvt.FromType() == PTY_ref)) {
1827 cvt.SetFromType(GetLoweredPtrType());
1828 }
1829 } else if (kOpcodeInfo.IsCompare(node.GetOpCode())) {
1830 auto &cmp = static_cast<CompareNode &>(node);
1831 if ((cmp.GetOpndType() == PTY_ptr) || (cmp.GetOpndType() == PTY_ref)) {
1832 cmp.SetOpndType(GetLoweredPtrType());
1833 }
1834 }
1835 }
1836
LowerEntry(MIRFunction & func)1837 void CGLowerer::LowerEntry(MIRFunction &func)
1838 {
1839 // determine if needed to insert fake parameter to return struct for current function
1840 if (func.IsReturnStruct()) {
1841 MIRType *retType = func.GetReturnType();
1842 #if TARGAARCH64
1843 if (Triple::GetTriple().GetArch() == Triple::ArchType::aarch64) {
1844 PrimType pty = IsStructElementSame(retType);
1845 if (pty == PTY_f32 || pty == PTY_f64 || IsPrimitiveVector(pty)) {
1846 func.SetStructReturnedInRegs();
1847 return;
1848 }
1849 }
1850 #endif
1851 if (retType->GetPrimType() != PTY_agg) {
1852 return;
1853 }
1854 if (retType->GetSize() > k16ByteSize) {
1855 func.SetFirstArgReturn();
1856 func.GetMIRFuncType()->SetFirstArgReturn();
1857 } else {
1858 func.SetStructReturnedInRegs();
1859 }
1860 }
1861 if (func.IsFirstArgReturn() && func.GetReturnType()->GetPrimType() != PTY_void) {
1862 MIRSymbol *retSt = func.GetSymTab()->CreateSymbol(kScopeLocal);
1863 retSt->SetStorageClass(kScFormal);
1864 retSt->SetSKind(kStVar);
1865 std::string retName(".return.");
1866 MIRSymbol *funcSt = GlobalTables::GetGsymTable().GetSymbolFromStidx(func.GetStIdx().Idx());
1867 DEBUG_ASSERT(funcSt != nullptr, "null ptr check");
1868 retName += funcSt->GetName();
1869 retSt->SetNameStrIdx(retName);
1870 MIRType *pointType = beCommon.BeGetOrCreatePointerType(*func.GetReturnType());
1871
1872 retSt->SetTyIdx(pointType->GetTypeIndex());
1873 std::vector<MIRSymbol *> formals;
1874 formals.emplace_back(retSt);
1875 for (uint32 i = 0; i < func.GetFormalCount(); ++i) {
1876 auto formal = func.GetFormal(i);
1877 formals.emplace_back(formal);
1878 }
1879 func.SetFirstArgReturn();
1880
1881 beCommon.AddElementToFuncReturnType(func, func.GetReturnTyIdx());
1882
1883 func.UpdateFuncTypeAndFormalsAndReturnType(formals, TyIdx(PTY_void), true);
1884 auto *funcType = func.GetMIRFuncType();
1885 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
1886 funcType->SetFirstArgReturn();
1887 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
1888 }
1889 }
1890
LowerPseudoRegs(const MIRFunction & func) const1891 void CGLowerer::LowerPseudoRegs(const MIRFunction &func) const
1892 {
1893 for (uint32 i = 1; i < func.GetPregTab()->Size(); ++i) {
1894 MIRPreg *ipr = func.GetPregTab()->PregFromPregIdx(i);
1895 PrimType primType = ipr->GetPrimType();
1896 if (primType == PTY_u1) {
1897 ipr->SetPrimType(PTY_u32);
1898 }
1899 }
1900 }
1901
CleanupBranches(MIRFunction & func) const1902 void CGLowerer::CleanupBranches(MIRFunction &func) const
1903 {
1904 BlockNode *block = func.GetBody();
1905 StmtNode *prev = nullptr;
1906 StmtNode *next = nullptr;
1907 for (StmtNode *curr = block->GetFirst(); curr != nullptr; curr = next) {
1908 next = curr->GetNext();
1909 if (next != nullptr) {
1910 CHECK_FATAL(curr == next->GetPrev(), "unexpected node");
1911 }
1912 if ((next != nullptr) && (prev != nullptr) && (curr->GetOpCode() == OP_goto)) {
1913 /*
1914 * Skip until find a label.
1915 * Note that the CURRent 'goto' statement may be the last statement
1916 * when discounting comment statements.
1917 * Make sure we don't lose any comments.
1918 */
1919 StmtNode *cmtB = nullptr;
1920 StmtNode *cmtE = nullptr;
1921 bool isCleanable = true;
1922 while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
1923 if ((next->GetOpCode() == OP_try) || (next->GetOpCode() == OP_endtry) ||
1924 (next->GetOpCode() == OP_catch)) {
1925 isCleanable = false;
1926 break;
1927 }
1928 next = next->GetNext();
1929 }
1930 if ((next != nullptr) && (!isCleanable)) {
1931 prev = next->GetPrev();
1932 continue;
1933 }
1934
1935 next = curr->GetNext();
1936
1937 while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
1938 if (next->GetOpCode() == OP_comment) {
1939 if (cmtB == nullptr) {
1940 cmtB = next;
1941 cmtE = next;
1942 } else {
1943 CHECK_FATAL(cmtE != nullptr, "cmt_e is null in CGLowerer::CleanupBranches");
1944 cmtE->SetNext(next);
1945 next->SetPrev(cmtE);
1946 cmtE = next;
1947 }
1948 }
1949 next = next->GetNext();
1950 }
1951
1952 curr->SetNext(next);
1953
1954 if (next != nullptr) {
1955 next->SetPrev(curr);
1956 }
1957
1958 StmtNode *insertAfter = nullptr;
1959
1960 if ((next != nullptr) &&
1961 ((static_cast<GotoNode *>(curr))->GetOffset() == (static_cast<LabelNode *>(next))->GetLabelIdx())) {
1962 insertAfter = prev;
1963 prev->SetNext(next); /* skip goto statement (which is pointed by curr) */
1964 next->SetPrev(prev);
1965 curr = next; /* make curr point to the label statement */
1966 next = next->GetNext(); /* advance next to the next statement of the label statement */
1967 } else {
1968 insertAfter = curr;
1969 }
1970
1971 /* insert comments before 'curr' */
1972 if (cmtB != nullptr) {
1973 CHECK_FATAL(cmtE != nullptr, "nullptr is not expected");
1974 StmtNode *iaNext = insertAfter->GetNext();
1975 if (iaNext != nullptr) {
1976 iaNext->SetPrev(cmtE);
1977 }
1978 cmtE->SetNext(iaNext);
1979
1980 insertAfter->SetNext(cmtB);
1981 cmtB->SetPrev(insertAfter);
1982
1983 if (insertAfter == curr) {
1984 curr = cmtE;
1985 }
1986 }
1987 if (next == nullptr) {
1988 func.GetBody()->SetLast(curr);
1989 }
1990 }
1991 prev = curr;
1992 }
1993 CHECK_FATAL(func.GetBody()->GetLast() == prev, "make sure the return value of GetLast equal prev");
1994 }
1995
LowerTryCatchBlocks(BlockNode & body)1996 void CGLowerer::LowerTryCatchBlocks(BlockNode &body)
1997 {
1998 if (!hasTry) {
1999 return;
2000 }
2001
2002 #if DEBUG
2003 BBT::ValidateStmtList(nullptr, nullptr);
2004 #endif
2005 auto memPool = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "CreateNewBB mempool");
2006 TryCatchBlocksLower tryCatchLower(*memPool, body, mirModule);
2007 bool generateEHCode = GenerateExceptionHandlingCode();
2008 tryCatchLower.SetGenerateEHCode(generateEHCode);
2009 #if DEBUG
2010 tryCatchLower.CheckTryCatchPattern();
2011 #endif
2012 }
2013
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const RegreadNode & rRead,const CGLowerer & cgLowerer)2014 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const RegreadNode &rRead,
2015 const CGLowerer &cgLowerer)
2016 {
2017 StIdx stIdx = cgLowerer.GetSymbolReferredToByPseudoRegister(rRead.GetRegIdx());
2018 return ((dassign.GetStIdx() == stIdx) && (dassign.GetFieldID() == 0));
2019 }
2020
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const DreadNode & dread)2021 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const DreadNode &dread)
2022 {
2023 return ((dassign.GetStIdx() == dread.GetStIdx()) && (dassign.GetFieldID() == dread.GetFieldID()));
2024 }
2025
IsDassignNOP(const DassignNode & dassign)2026 inline bool IsDassignNOP(const DassignNode &dassign)
2027 {
2028 if (dassign.GetRHS()->GetOpCode() == OP_dread) {
2029 return IsAccessingTheSameMemoryLocation(dassign, static_cast<DreadNode &>(*dassign.GetRHS()));
2030 }
2031 return false;
2032 }
2033
IsConstvalZero(const BaseNode & n)2034 inline bool IsConstvalZero(const BaseNode &n)
2035 {
2036 return ((n.GetOpCode() == OP_constval) && static_cast<const ConstvalNode &>(n).GetConstVal()->IsZero());
2037 }
2038
2039 #define NEXT_ID(x) ((x) + 1)
2040 #define INTRN_FIRST_SYNC_ENTER NEXT_ID(INTRN_LAST)
2041 #define INTRN_SECOND_SYNC_ENTER NEXT_ID(INTRN_FIRST_SYNC_ENTER)
2042 #define INTRN_THIRD_SYNC_ENTER NEXT_ID(INTRN_SECOND_SYNC_ENTER)
2043 #define INTRN_FOURTH_SYNC_ENTER NEXT_ID(INTRN_THIRD_SYNC_ENTER)
2044 #define INTRN_YNC_EXIT NEXT_ID(INTRN_FOURTH_SYNC_ENTER)
2045
2046 std::vector<std::pair<CGLowerer::BuiltinFunctionID, PUIdx>> CGLowerer::builtinFuncIDs;
2047 std::unordered_map<IntrinDesc *, PUIdx> CGLowerer::intrinFuncIDs;
2048 std::unordered_map<std::string, size_t> CGLowerer::arrayClassCacheIndex;
2049
RegisterFunctionVoidStarToVoid(BuiltinFunctionID id,const std::string & name,const std::string & paramName)2050 MIRFunction *CGLowerer::RegisterFunctionVoidStarToVoid(BuiltinFunctionID id, const std::string &name,
2051 const std::string ¶mName)
2052 {
2053 MIRFunction *func = mirBuilder->GetOrCreateFunction(name, GlobalTables::GetTypeTable().GetVoid()->GetTypeIndex());
2054 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
2055 func->AllocSymTab();
2056 MIRSymbol *funcSym = func->GetFuncSymbol();
2057 funcSym->SetStorageClass(kScExtern);
2058 funcSym->SetAppearsInCode(true);
2059 MIRType *argTy = GlobalTables::GetTypeTable().GetPtr();
2060 MIRSymbol *argSt = func->GetSymTab()->CreateSymbol(kScopeLocal);
2061 argSt->SetNameStrIdx(mirBuilder->GetOrCreateStringIndex(paramName));
2062 argSt->SetTyIdx(argTy->GetTypeIndex());
2063 argSt->SetStorageClass(kScFormal);
2064 argSt->SetSKind(kStVar);
2065 func->GetSymTab()->AddToStringSymbolMap(*argSt);
2066 std::vector<MIRSymbol *> formals;
2067 formals.emplace_back(argSt);
2068 if ((name == "MCC_SyncEnterFast0") || (name == "MCC_SyncEnterFast1") || (name == "MCC_SyncEnterFast2") ||
2069 (name == "MCC_SyncEnterFast3") || (name == "MCC_SyncExitFast")) {
2070 MIRSymbol *argStMatch = func->GetSymTab()->CreateSymbol(kScopeLocal);
2071 argStMatch->SetNameStrIdx(mirBuilder->GetOrCreateStringIndex("monitor_slot"));
2072 argStMatch->SetTyIdx(argTy->GetTypeIndex());
2073 argStMatch->SetStorageClass(kScFormal);
2074 argStMatch->SetSKind(kStVar);
2075 func->GetSymTab()->AddToStringSymbolMap(*argStMatch);
2076 formals.emplace_back(argStMatch);
2077 }
2078 func->UpdateFuncTypeAndFormalsAndReturnType(formals, GlobalTables::GetTypeTable().GetVoid()->GetTypeIndex(), false);
2079 auto *funcType = func->GetMIRFuncType();
2080 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
2081 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
2082
2083 builtinFuncIDs.emplace_back(std::pair<BuiltinFunctionID, PUIdx>(id, func->GetPuidx()));
2084 return func;
2085 }
2086
RegisterBuiltIns()2087 void CGLowerer::RegisterBuiltIns()
2088 {
2089 for (uint32 i = 0; i < sizeof(cgBuiltins) / sizeof(cgBuiltins[0]); ++i) {
2090 BuiltinFunctionID id = cgBuiltins[i].first;
2091 IntrinDesc &desc = IntrinDesc::intrinTable[id];
2092
2093 MIRFunction *func = mirBuilder->GetOrCreateFunction(cgBuiltins[i].second,
2094 GlobalTables::GetTypeTable().GetVoid()->GetTypeIndex());
2095 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
2096 func->AllocSymTab();
2097 MIRSymbol *funcSym = func->GetFuncSymbol();
2098 DEBUG_ASSERT(funcSym != nullptr, "funcSym should not be nullptr");
2099 funcSym->SetStorageClass(kScExtern);
2100 funcSym->SetAppearsInCode(true);
2101 /* return type */
2102 MIRType *retTy = desc.GetReturnType();
2103 CHECK_FATAL(retTy != nullptr, "retTy should not be nullptr");
2104 /* use void* for PTY_dynany */
2105 if (retTy->GetPrimType() == PTY_dynany) {
2106 retTy = GlobalTables::GetTypeTable().GetPtr();
2107 }
2108
2109 std::vector<MIRSymbol *> formals;
2110 const std::string params[IntrinDesc::kMaxArgsNum] = {"p0", "p1", "p2", "p3", "p4", "p5"};
2111 for (uint32 j = 0; j < IntrinDesc::kMaxArgsNum; ++j) {
2112 MIRType *argTy = desc.GetArgType(j);
2113 if (argTy == nullptr) {
2114 break;
2115 }
2116 /* use void* for PTY_dynany */
2117 if (argTy->GetPrimType() == PTY_dynany) {
2118 argTy = GlobalTables::GetTypeTable().GetPtr();
2119 }
2120 MIRSymbol *argSt = func->GetSymTab()->CreateSymbol(kScopeLocal);
2121 argSt->SetNameStrIdx(mirBuilder->GetOrCreateStringIndex(params[j]));
2122 argSt->SetTyIdx(argTy->GetTypeIndex());
2123 argSt->SetStorageClass(kScFormal);
2124 argSt->SetSKind(kStVar);
2125 func->GetSymTab()->AddToStringSymbolMap(*argSt);
2126 formals.emplace_back(argSt);
2127 }
2128 func->UpdateFuncTypeAndFormalsAndReturnType(formals, retTy->GetTypeIndex(), false);
2129 auto *funcType = func->GetMIRFuncType();
2130 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
2131 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
2132
2133 builtinFuncIDs.emplace_back(std::pair<BuiltinFunctionID, PUIdx>(id, func->GetPuidx()));
2134 }
2135
2136 /* register __builtin_sync_enter */
2137 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_FIRST_SYNC_ENTER, "MCC_SyncEnterFast0", "obj"));
2138 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_SECOND_SYNC_ENTER, "MCC_SyncEnterFast1", "obj"));
2139 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_THIRD_SYNC_ENTER, "MCC_SyncEnterFast2", "obj"));
2140 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_FOURTH_SYNC_ENTER, "MCC_SyncEnterFast3", "obj"));
2141 /* register __builtin_sync_exit */
2142 static_cast<void>(RegisterFunctionVoidStarToVoid(INTRN_YNC_EXIT, "MCC_SyncExitFast", "obj"));
2143 }
2144
2145 /*
2146 * From Maple IR Document as of Apr 14, 2017
2147 * Type Conversion Expression Opcodes
2148 * Conversions between integer types of different sizes require the cvt opcode.
2149 * Conversion between signed and unsigned integers of the same size does not
2150 * require any operation, not even retype.
2151 * cvt :
2152 * Convert the operand's value from <from-type> to <to-type>.
2153 * If the sizes of the two types are the same, the conversion must involve
2154 * altering the bits.
2155 * retype:
2156 * <opnd0> is converted to <prim-type> which has derived type <type> without
2157 * changing any bits. The size of <opnd0> and <prim-type> must be the same.
2158 * <opnd0> may be of aggregate type.
2159 */
MergeToCvtType(PrimType dType,PrimType sType,BaseNode & src) const2160 BaseNode *CGLowerer::MergeToCvtType(PrimType dType, PrimType sType, BaseNode &src) const
2161 {
2162 CHECK_FATAL(IsPrimitiveInteger(dType) || IsPrimitiveFloat(dType),
2163 "dtype should be primitiveInteger or primitiveFloat");
2164 CHECK_FATAL(IsPrimitiveInteger(sType) || IsPrimitiveFloat(sType),
2165 "sType should be primitiveInteger or primitiveFloat");
2166 /* src i32, dest f32; src i64, dest f64 */
2167 CHECK_FATAL(
2168 (IsPrimitiveInteger(sType) && IsPrimitiveFloat(dType) &&
2169 (GetPrimTypeBitSize(sType) == GetPrimTypeBitSize(dType))) ||
2170 (IsPrimitiveInteger(sType) && IsPrimitiveInteger(dType)),
2171 "when sType is primitiveInteger and dType is primitiveFloat, sType's primTypeBitSize must equal dType's,"
2172 " or both sType and dType should primitiveInteger");
2173
2174 /* src & dest are both of float type */
2175 MIRType *toType = GlobalTables::GetTypeTable().GetPrimType(dType);
2176 MIRType *fromType = GlobalTables::GetTypeTable().GetPrimType(sType);
2177 if (IsPrimitiveInteger(sType) && IsPrimitiveFloat(dType) &&
2178 (GetPrimTypeBitSize(sType) == GetPrimTypeBitSize(dType))) {
2179 return mirBuilder->CreateExprRetype(*toType, *fromType, &src);
2180 } else if (IsPrimitiveInteger(sType) && IsPrimitiveInteger(dType)) {
2181 if (GetPrimTypeBitSize(sType) >= GetPrimTypeBitSize(dType)) {
2182 if (dType == PTY_u1) { /* e.g., type _Bool */
2183 toType = GlobalTables::GetTypeTable().GetPrimType(PTY_u8);
2184 return mirBuilder->CreateExprCompare(OP_ne, *toType, *fromType, &src,
2185 mirBuilder->CreateIntConst(0, sType));
2186 } else if (GetPrimTypeBitSize(sType) > GetPrimTypeBitSize(dType)) {
2187 return mirBuilder->CreateExprTypeCvt(OP_cvt, *toType, *fromType, &src);
2188 } else if (IsSignedInteger(sType) != IsSignedInteger(dType)) {
2189 return mirBuilder->CreateExprTypeCvt(OP_cvt, *toType, *fromType, &src);
2190 }
2191 src.SetPrimType(dType);
2192 return &src;
2193 /*
2194 * Force type cvt here because we currently do not run constant folding
2195 * or contanst propagation before CG. We may revisit this decision later.
2196 */
2197 } else if (GetPrimTypeBitSize(sType) < GetPrimTypeBitSize(dType)) {
2198 return mirBuilder->CreateExprTypeCvt(OP_cvt, *toType, *fromType, &src);
2199 } else if (IsConstvalZero(src)) {
2200 return mirBuilder->CreateIntConst(0, dType);
2201 }
2202 CHECK_FATAL(false, "should not run here");
2203 }
2204 CHECK_FATAL(false, "should not run here");
2205 }
2206
GetLenNode(BaseNode & opnd0)2207 IreadNode &CGLowerer::GetLenNode(BaseNode &opnd0)
2208 {
2209 MIRIntConst *arrayHeaderNode = GlobalTables::GetIntConstTable().GetOrCreateIntConst(
2210 RTSupport::GetRTSupportInstance().GetArrayLengthOffset(),
2211 *GlobalTables::GetTypeTable().GetTypeFromTyIdx(opnd0.GetPrimType()));
2212 BaseNode *arrayHeaderCstNode = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>(arrayHeaderNode);
2213 arrayHeaderCstNode->SetPrimType(opnd0.GetPrimType());
2214 MIRType *addrType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(opnd0.GetPrimType());
2215 BaseNode *refLenAddr = mirBuilder->CreateExprBinary(OP_add, *addrType, &opnd0, arrayHeaderCstNode);
2216 MIRType *infoLenType = GlobalTables::GetTypeTable().GetInt32();
2217 MIRType *ptrType = beCommon.BeGetOrCreatePointerType(*infoLenType);
2218 IreadNode *lenNode = mirBuilder->CreateExprIread(*infoLenType, *ptrType, 0, refLenAddr);
2219 return (*lenNode);
2220 }
2221
GetLabelIdx(MIRFunction & curFunc) const2222 LabelIdx CGLowerer::GetLabelIdx(MIRFunction &curFunc) const
2223 {
2224 std::string suffix = std::to_string(curFunc.GetLabelTab()->GetLabelTableSize());
2225 GStrIdx labelStrIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName("__label_BC_" + suffix);
2226 LabelIdx labIdx = curFunc.GetLabelTab()->AddLabel(labelStrIdx);
2227 return labIdx;
2228 }
2229
ProcessArrayExpr(BaseNode & expr,BlockNode & blkNode)2230 void CGLowerer::ProcessArrayExpr(BaseNode &expr, BlockNode &blkNode)
2231 {
2232 /* Array boundary check */
2233 MIRFunction *curFunc = mirModule.CurFunction();
2234 auto &arrayNode = static_cast<ArrayNode &>(expr);
2235 StmtNode *boundaryCheckStmt = nullptr;
2236 if (arrayNode.GetBoundsCheck()) {
2237 CHECK_FATAL(arrayNode.GetNopndSize() == kOperandNumBinary, "unexpected nOpnd size");
2238 BaseNode *opnd0 = arrayNode.GetNopndAt(0);
2239 if (opnd0->GetOpCode() == OP_iread) {
2240 DEBUG_ASSERT(curFunc != nullptr, "curFunc should not be nullptr");
2241 PregIdx pregIdx = curFunc->GetPregTab()->CreatePreg(opnd0->GetPrimType());
2242 RegassignNode *temp = mirBuilder->CreateStmtRegassign(opnd0->GetPrimType(), pregIdx, opnd0);
2243 blkNode.InsertAfter(blkNode.GetLast(), temp);
2244 arrayNode.SetNOpndAt(0, mirBuilder->CreateExprRegread(opnd0->GetPrimType(), pregIdx));
2245 }
2246 IreadNode &lenNode = GetLenNode(*opnd0);
2247 PregIdx lenPregIdx = curFunc->GetPregTab()->CreatePreg(lenNode.GetPrimType());
2248 RegassignNode *lenRegassignNode = mirBuilder->CreateStmtRegassign(lenNode.GetPrimType(), lenPregIdx, &lenNode);
2249 BaseNode *lenRegreadNode = mirBuilder->CreateExprRegread(PTY_u32, lenPregIdx);
2250
2251 LabelIdx labIdx = GetLabelIdx(*curFunc);
2252 LabelNode *labelBC = mirBuilder->CreateStmtLabel(labIdx);
2253 ;
2254 BaseNode *cond = mirBuilder->CreateExprCompare(OP_ge, *GlobalTables::GetTypeTable().GetUInt1(),
2255 *GlobalTables::GetTypeTable().GetUInt32(),
2256 arrayNode.GetNopndAt(1), lenRegreadNode);
2257 CondGotoNode *brFalseNode = mirBuilder->CreateStmtCondGoto(cond, OP_brfalse, labIdx);
2258 MIRFunction *fn = mirBuilder->GetOrCreateFunction("MCC_Array_Boundary_Check", TyIdx(PTY_void));
2259 fn->GetFuncSymbol()->SetAppearsInCode(true);
2260 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
2261 fn->AllocSymTab();
2262 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
2263 args.emplace_back(arrayNode.GetNopndAt(0));
2264 args.emplace_back(arrayNode.GetNopndAt(1));
2265 boundaryCheckStmt = mirBuilder->CreateStmtCall(fn->GetPuidx(), args);
2266 blkNode.InsertAfter(blkNode.GetLast(), lenRegassignNode);
2267 blkNode.InsertAfter(blkNode.GetLast(), brFalseNode);
2268 blkNode.InsertAfter(blkNode.GetLast(), boundaryCheckStmt);
2269 blkNode.InsertAfter(blkNode.GetLast(), labelBC);
2270 }
2271 }
2272
LowerExpr(BaseNode & parent,BaseNode & expr,BlockNode & blkNode)2273 BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkNode)
2274 {
2275 bool isCvtU1Expr = (expr.GetOpCode() == OP_cvt && expr.GetPrimType() == PTY_u1 &&
2276 static_cast<TypeCvtNode &>(expr).FromType() != PTY_u1);
2277 if (expr.GetPrimType() == PTY_u1) {
2278 expr.SetPrimType(PTY_u8);
2279 }
2280
2281 if (expr.GetOpCode() == OP_iread && expr.Opnd(0)->GetOpCode() == OP_array) {
2282 BaseNode *node = LowerExpr(expr, *expr.Opnd(0), blkNode);
2283 if (node->GetOpCode() == OP_intrinsicop) {
2284 auto *binNode = static_cast<IntrinsicopNode *>(node);
2285 return binNode;
2286 } else {
2287 expr.SetOpnd(node, 0);
2288 }
2289 } else {
2290 for (size_t i = 0; i < expr.NumOpnds(); ++i) {
2291 expr.SetOpnd(LowerExpr(expr, *expr.Opnd(i), blkNode), i);
2292 }
2293 }
2294 // Convert `cvt u1 xx <expr>` to `ne u8 xx (<expr>, constval xx 0)`
2295 // No need to convert `cvt u1 u1 <expr>`
2296 if (isCvtU1Expr) {
2297 auto &cvtExpr = static_cast<TypeCvtNode &>(expr);
2298 PrimType fromType = cvtExpr.FromType();
2299 auto *fromMIRType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fromType));
2300 // We use u8 instead of u1 because codegen can't recognize u1
2301 auto *toMIRType = GlobalTables::GetTypeTable().GetUInt8();
2302 auto *zero = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, *fromMIRType);
2303 auto *converted = mirBuilder->CreateExprCompare(OP_ne, *toMIRType, *fromMIRType, cvtExpr.Opnd(0),
2304 mirBuilder->CreateConstval(zero));
2305 return converted;
2306 }
2307 switch (expr.GetOpCode()) {
2308 case OP_array: {
2309 ProcessArrayExpr(expr, blkNode);
2310 return LowerArray(static_cast<ArrayNode &>(expr), parent);
2311 }
2312
2313 case OP_dread:
2314 return LowerDread(static_cast<DreadNode &>(expr), blkNode);
2315
2316 case OP_addrof:
2317 return LowerAddrof(static_cast<AddrofNode &>(expr));
2318
2319 case OP_iread:
2320 return LowerIread(static_cast<IreadNode &>(expr));
2321
2322 case OP_iaddrof:
2323 return LowerIaddrof(static_cast<IreadNode &>(expr));
2324
2325 case OP_select:
2326 if (IsComplexSelect(static_cast<TernaryNode &>(expr))) {
2327 return LowerComplexSelect(static_cast<TernaryNode &>(expr), parent, blkNode);
2328 } else if (mirModule.GetFlavor() != kFlavorLmbc) {
2329 return SplitTernaryNodeResult(static_cast<TernaryNode &>(expr), parent, blkNode);
2330 } else {
2331 return &expr;
2332 }
2333
2334 case OP_sizeoftype: {
2335 CHECK(static_cast<SizeoftypeNode &>(expr).GetTyIdx() < beCommon.GetSizeOfTypeSizeTable(),
2336 "index out of range in CGLowerer::LowerExpr");
2337 int64 typeSize = static_cast<int64>(beCommon.GetTypeSize(static_cast<SizeoftypeNode &>(expr).GetTyIdx()));
2338 return mirModule.GetMIRBuilder()->CreateIntConst(typeSize, PTY_u32);
2339 }
2340
2341 case OP_fieldsdist: {
2342 auto &fdNode = static_cast<FieldsDistNode &>(expr);
2343 CHECK(fdNode.GetTyIdx() < beCommon.GetSizeOfTypeSizeTable(), "index out of range in CGLowerer::LowerExpr");
2344 MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fdNode.GetTyIdx());
2345 CHECK(ty->GetKind() == kTypeClass, "wrong type for FieldsDistNode");
2346 MIRClassType *classType = static_cast<MIRClassType *>(ty);
2347 const JClassLayout &layout = beCommon.GetJClassLayout(*classType);
2348 DEBUG_ASSERT(!layout.empty(), "container should not be empty");
2349 int32 i1 = fdNode.GetFieldID1() > 0 ? fdNode.GetFieldID1() - 1 : 0;
2350 int32 i2 = fdNode.GetFieldID2() > 0 ? fdNode.GetFieldID2() - 1 : 0;
2351 int64 offset = layout[i2].GetOffset() - layout[i1].GetOffset();
2352 return mirModule.GetMIRBuilder()->CreateIntConst(offset, PTY_u32);
2353 }
2354
2355 case OP_intrinsicop:
2356 if (IsIntrinsicOpHandledAtLowerLevel(static_cast<IntrinsicopNode &>(expr).GetIntrinsic())) {
2357 return &expr;
2358 }
2359 return LowerIntrinsicop(parent, static_cast<IntrinsicopNode &>(expr), blkNode);
2360
2361 case OP_alloca: {
2362 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nulllptr");
2363 GetCurrentFunc()->SetVlaOrAlloca(true);
2364 return &expr;
2365 }
2366 case OP_rem:
2367 return LowerRem(expr, blkNode);
2368
2369 case OP_cand:
2370 expr.SetOpCode(OP_land);
2371 return SplitBinaryNodeOpnd1(static_cast<BinaryNode &>(expr), blkNode);
2372 case OP_cior:
2373 expr.SetOpCode(OP_lior);
2374 return SplitBinaryNodeOpnd1(static_cast<BinaryNode &>(expr), blkNode);
2375 case OP_cvt:
2376 case OP_retype:
2377 case OP_zext:
2378 case OP_sext:
2379 return LowerCastExpr(expr);
2380 default:
2381 return &expr;
2382 }
2383 }
2384
LowerDread(DreadNode & dread,const BlockNode & block)2385 BaseNode *CGLowerer::LowerDread(DreadNode &dread, const BlockNode &block)
2386 {
2387 /* use PTY_u8 for boolean type in dread/iread */
2388 if (dread.GetPrimType() == PTY_u1) {
2389 dread.SetPrimType(PTY_u8);
2390 }
2391 return (dread.GetFieldID() == 0 ? LowerDreadToThreadLocal(dread, block) : LowerDreadBitfield(dread));
2392 }
2393
LowerRegassign(RegassignNode & regNode,BlockNode & newBlk)2394 void CGLowerer::LowerRegassign(RegassignNode ®Node, BlockNode &newBlk)
2395 {
2396 BaseNode *rhsOpnd = regNode.Opnd(0);
2397 Opcode op = rhsOpnd->GetOpCode();
2398 if ((op == OP_gcmalloc) || (op == OP_gcpermalloc)) {
2399 LowerGCMalloc(regNode, static_cast<GCMallocNode &>(*rhsOpnd), newBlk, op == OP_gcpermalloc);
2400 return;
2401 } else {
2402 regNode.SetOpnd(LowerExpr(regNode, *rhsOpnd, newBlk), 0);
2403 newBlk.AddStatement(®Node);
2404 }
2405 }
2406
ExtractSymbolAddress(const StIdx & stIdx)2407 BaseNode *CGLowerer::ExtractSymbolAddress(const StIdx &stIdx)
2408 {
2409 auto builder = mirModule.GetMIRBuilder();
2410 return builder->CreateExprAddrof(0, stIdx);
2411 }
2412
LowerDreadToThreadLocal(BaseNode & expr,const BlockNode & block)2413 BaseNode *CGLowerer::LowerDreadToThreadLocal(BaseNode &expr, const BlockNode &block)
2414 {
2415 auto *result = &expr;
2416 if (expr.GetOpCode() != maple::OP_dread) {
2417 return result;
2418 }
2419 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2420 auto dread = static_cast<DreadNode &>(expr);
2421 StIdx stIdx = dread.GetStIdx();
2422 if (!stIdx.IsGlobal()) {
2423 return result;
2424 }
2425 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
2426 CHECK_FATAL(symbol != nullptr, "symbol should not be nullptr");
2427
2428 if (symbol->IsThreadLocal()) {
2429 // iread <* u32> 0 (regread u64 %addr)
2430 auto addr = ExtractSymbolAddress(stIdx);
2431 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
2432 auto iread = mirModule.GetMIRBuilder()->CreateExprIread(*symbol->GetType(), *ptrType, dread.GetFieldID(), addr);
2433 result = iread;
2434 }
2435 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2436 if (newTypeTableSize != oldTypeTableSize) {
2437 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
2438 }
2439 return result;
2440 }
2441
LowerDassignToThreadLocal(StmtNode & stmt,const BlockNode & block)2442 StmtNode *CGLowerer::LowerDassignToThreadLocal(StmtNode &stmt, const BlockNode &block)
2443 {
2444 StmtNode *result = &stmt;
2445 if (stmt.GetOpCode() != maple::OP_dassign) {
2446 return result;
2447 }
2448 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2449 auto dAssign = static_cast<DassignNode &>(stmt);
2450 StIdx stIdx = dAssign.GetStIdx();
2451 if (!stIdx.IsGlobal()) {
2452 return result;
2453 }
2454 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
2455 DEBUG_ASSERT(symbol != nullptr, "symbol should not be nullptr");
2456 if (symbol->IsThreadLocal()) {
2457 // iassign <* u32> 0 (regread u64 %addr, dread u32 $x)
2458 auto addr = ExtractSymbolAddress(stIdx);
2459 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
2460 auto iassign =
2461 mirModule.GetMIRBuilder()->CreateStmtIassign(*ptrType, dAssign.GetFieldID(), addr, dAssign.GetRHS());
2462 result = iassign;
2463 }
2464 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
2465 if (newTypeTableSize != oldTypeTableSize) {
2466 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
2467 }
2468 return result;
2469 }
2470
LowerDassign(DassignNode & dsNode,BlockNode & newBlk)2471 void CGLowerer::LowerDassign(DassignNode &dsNode, BlockNode &newBlk)
2472 {
2473 StmtNode *newStmt = nullptr;
2474 BaseNode *rhs = nullptr;
2475 Opcode op = dsNode.GetRHS()->GetOpCode();
2476 if (dsNode.GetFieldID() != 0) {
2477 newStmt = LowerDassignBitfield(dsNode, newBlk);
2478 } else if (op == OP_intrinsicop) {
2479 IntrinsicopNode *intrinNode = static_cast<IntrinsicopNode *>(dsNode.GetRHS());
2480 MIRType *retType = IntrinDesc::intrinTable[intrinNode->GetIntrinsic()].GetReturnType();
2481 CHECK_FATAL(retType != nullptr, "retType should not be nullptr");
2482 if (retType->GetKind() == kTypeStruct) {
2483 newStmt = LowerIntrinsicopDassign(dsNode, *intrinNode, newBlk);
2484 } else {
2485 rhs = LowerExpr(dsNode, *intrinNode, newBlk);
2486 dsNode.SetRHS(rhs);
2487 CHECK_FATAL(dsNode.GetRHS() != nullptr, "dsNode->rhs is null in CGLowerer::LowerDassign");
2488 if (!IsDassignNOP(dsNode)) {
2489 newStmt = &dsNode;
2490 }
2491 }
2492 } else if ((op == OP_gcmalloc) || (op == OP_gcpermalloc)) {
2493 LowerGCMalloc(dsNode, static_cast<GCMallocNode &>(*dsNode.GetRHS()), newBlk, op == OP_gcpermalloc);
2494 return;
2495 } else {
2496 rhs = LowerExpr(dsNode, *dsNode.GetRHS(), newBlk);
2497 dsNode.SetRHS(rhs);
2498 newStmt = &dsNode;
2499 }
2500
2501 if (newStmt != nullptr) {
2502 newBlk.AddStatement(LowerDassignToThreadLocal(*newStmt, newBlk));
2503 }
2504 }
2505
2506 // Lower stmt Form
2507 // Initial form: decrefreset (addrof ptr %RegX_RXXXX)
2508 // Convert to form: dassign %RegX_RXXXX 0 (constval ref 0)
2509 // Final form: str xzr, [x29,#XX]
LowerResetStmt(StmtNode & stmt,BlockNode & block)2510 void CGLowerer::LowerResetStmt(StmtNode &stmt, BlockNode &block)
2511 {
2512 UnaryStmtNode &unaryStmtNode = static_cast<UnaryStmtNode &>(stmt);
2513 AddrofNode *addrofNode = static_cast<AddrofNode *>(unaryStmtNode.GetRHS());
2514 MIRType &type = *GlobalTables::GetTypeTable().GetPrimType(PTY_ref);
2515 MIRConst *constVal = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, type);
2516 ConstvalNode *exprConst = mirModule.CurFuncCodeMemPool()->New<ConstvalNode>();
2517 exprConst->SetPrimType(type.GetPrimType());
2518 exprConst->SetConstVal(constVal);
2519 DassignNode *dassignNode = mirModule.CurFuncCodeMemPool()->New<DassignNode>();
2520 dassignNode->SetStIdx(addrofNode->GetStIdx());
2521 dassignNode->SetRHS(exprConst);
2522 dassignNode->SetFieldID(addrofNode->GetFieldID());
2523 block.AddStatement(dassignNode);
2524 }
2525
LowerIntrinsicopDassign(const DassignNode & dsNode,IntrinsicopNode & intrinNode,BlockNode & newBlk)2526 StmtNode *CGLowerer::LowerIntrinsicopDassign(const DassignNode &dsNode, IntrinsicopNode &intrinNode, BlockNode &newBlk)
2527 {
2528 for (size_t i = 0; i < intrinNode.GetNumOpnds(); ++i) {
2529 DEBUG_ASSERT(intrinNode.Opnd(i) != nullptr, "intrinNode.Opnd(i) should not be nullptr");
2530 intrinNode.SetOpnd(LowerExpr(intrinNode, *intrinNode.Opnd(i), newBlk), i);
2531 }
2532 MIRIntrinsicID intrnID = intrinNode.GetIntrinsic();
2533 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
2534 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
2535 const std::string name = intrinDesc->name;
2536 CHECK_FATAL(intrinDesc->name != nullptr, "intrinDesc's name should not be nullptr");
2537 st->SetNameStrIdx(name);
2538 st->SetStorageClass(kScText);
2539 st->SetSKind(kStFunc);
2540 MIRFunction *fn = mirModule.GetMemPool()->New<MIRFunction>(&mirModule, st->GetStIdx());
2541 MapleVector<BaseNode *> &nOpnds = intrinNode.GetNopnd();
2542 st->SetFunction(fn);
2543 std::vector<TyIdx> fnTyVec;
2544 std::vector<TypeAttrs> fnTaVec;
2545 CHECK_FATAL(intrinDesc->IsJsOp(), "intrinDesc should be JsOp");
2546 /* setup parameters */
2547 for (uint32 i = 0; i < nOpnds.size(); ++i) {
2548 fnTyVec.emplace_back(GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_a32)->GetTypeIndex());
2549 fnTaVec.emplace_back(TypeAttrs());
2550 BaseNode *addrNode = beCommon.GetAddressOfNode(*nOpnds[i]);
2551 CHECK_FATAL(addrNode != nullptr, "addrNode should not be nullptr");
2552 nOpnds[i] = addrNode;
2553 }
2554 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "curFunction should not be nullptr");
2555 MIRSymbol *dst = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dsNode.GetStIdx());
2556 MIRType *ty = dst->GetType();
2557 MIRType *fnType = beCommon.BeGetOrCreateFunctionType(ty->GetTypeIndex(), fnTyVec, fnTaVec);
2558 st->SetTyIdx(fnType->GetTypeIndex());
2559 fn->SetMIRFuncType(static_cast<MIRFuncType *>(fnType));
2560 fn->SetReturnTyIdx(ty->GetTypeIndex());
2561 CHECK_FATAL(ty->GetKind() == kTypeStruct, "ty's kind should be struct type");
2562 CHECK_FATAL(dsNode.GetFieldID() == 0, "dsNode's filedId should equal");
2563 AddrofNode *addrofNode = mirBuilder->CreateAddrof(*dst, PTY_a32);
2564 MapleVector<BaseNode *> newOpnd(mirModule.CurFuncCodeMemPoolAllocator()->Adapter());
2565 newOpnd.emplace_back(addrofNode);
2566 (void)newOpnd.insert(newOpnd.end(), nOpnds.begin(), nOpnds.end());
2567 CallNode *callStmt = mirModule.CurFuncCodeMemPool()->New<CallNode>(mirModule, OP_call);
2568 callStmt->SetPUIdx(st->GetFunction()->GetPuidx());
2569 callStmt->SetNOpnd(newOpnd);
2570 return callStmt;
2571 }
2572
CreateStmtCallWithReturnValue(const IntrinsicopNode & intrinNode,const MIRSymbol & ret,PUIdx bFunc,BaseNode * extraInfo) const2573 StmtNode *CGLowerer::CreateStmtCallWithReturnValue(const IntrinsicopNode &intrinNode, const MIRSymbol &ret, PUIdx bFunc,
2574 BaseNode *extraInfo) const
2575 {
2576 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
2577 for (size_t i = 0; i < intrinNode.NumOpnds(); ++i) {
2578 args.emplace_back(intrinNode.Opnd(i));
2579 }
2580 if (extraInfo != nullptr) {
2581 args.emplace_back(extraInfo);
2582 }
2583 return mirBuilder->CreateStmtCallAssigned(bFunc, args, &ret, OP_callassigned);
2584 }
2585
CreateStmtCallWithReturnValue(const IntrinsicopNode & intrinNode,PregIdx retpIdx,PUIdx bFunc,BaseNode * extraInfo) const2586 StmtNode *CGLowerer::CreateStmtCallWithReturnValue(const IntrinsicopNode &intrinNode, PregIdx retpIdx, PUIdx bFunc,
2587 BaseNode *extraInfo) const
2588 {
2589 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
2590 for (size_t i = 0; i < intrinNode.NumOpnds(); ++i) {
2591 args.emplace_back(intrinNode.Opnd(i));
2592 }
2593 if (extraInfo != nullptr) {
2594 args.emplace_back(extraInfo);
2595 }
2596 return mirBuilder->CreateStmtCallRegassigned(bFunc, args, retpIdx, OP_callassigned);
2597 }
2598
ProcessClassInfo(MIRType & classType,bool & classInfoFromRt,std::string & classInfo) const2599 void CGLowerer::ProcessClassInfo(MIRType &classType, bool &classInfoFromRt, std::string &classInfo) const
2600 {
2601 MIRPtrType &ptrType = static_cast<MIRPtrType &>(classType);
2602 MIRType *pType = ptrType.GetPointedType();
2603 CHECK_FATAL(pType != nullptr, "Class type not found for INTRN_J_CONST_CLASS");
2604 MIRType *typeScalar = nullptr;
2605
2606 if (pType->GetKind() == kTypeScalar) {
2607 typeScalar = pType;
2608 } else if (classType.GetKind() == kTypeScalar) {
2609 typeScalar = &classType;
2610 }
2611 if (typeScalar != nullptr) {
2612 classInfo = PRIMITIVECLASSINFO_PREFIX_STR;
2613 }
2614 if ((pType->GetKind() == kTypeByName) || (pType->GetKind() == kTypeClass) || (pType->GetKind() == kTypeInterface)) {
2615 MIRStructType *classTypeSecond = static_cast<MIRStructType *>(pType);
2616 classInfo = CLASSINFO_PREFIX_STR + classTypeSecond->GetName();
2617 } else if ((pType->GetKind() == kTypeArray) || (pType->GetKind() == kTypeJArray)) {
2618 MIRJarrayType *jarrayType = static_cast<MIRJarrayType *>(pType);
2619 CHECK_FATAL(jarrayType != nullptr, "jarrayType is null in CGLowerer::LowerIntrinsicopWithType");
2620 std::string baseName = "";
2621 if (jarrayType->IsPrimitiveArray() && (jarrayType->GetDim() <= kThreeDimArray)) {
2622 classInfo = PRIMITIVECLASSINFO_PREFIX_STR + baseName;
2623 } else {
2624 classInfoFromRt = true;
2625 classInfo = baseName;
2626 }
2627 }
2628 }
2629
GetBaseNodeFromCurFunc(MIRFunction & curFunc,bool isFromJarray)2630 BaseNode *CGLowerer::GetBaseNodeFromCurFunc(MIRFunction &curFunc, bool isFromJarray)
2631 {
2632 BaseNode *baseNode = nullptr;
2633 if (curFunc.IsStatic()) {
2634 /*
2635 * it's a static function.
2636 * pass caller functions's classinfo directly
2637 */
2638 std::string callerName = CLASSINFO_PREFIX_STR;
2639 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
2640 callerName += mirModule.CurFunction()->GetBaseClassName();
2641 GStrIdx strIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(callerName);
2642 MIRSymbol *callerClassInfoSym = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(strIdx);
2643 if (callerClassInfoSym == nullptr) {
2644 if (isFromJarray) {
2645 MIRType *mType = GlobalTables::GetTypeTable().GetVoidPtr();
2646 CHECK_FATAL(mType != nullptr, "type is null");
2647 callerClassInfoSym = mirBuilder->CreateGlobalDecl(callerName.c_str(), *mType);
2648 callerClassInfoSym->SetStorageClass(kScExtern);
2649 } else {
2650 callerClassInfoSym = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
2651 callerClassInfoSym->SetNameStrIdx(strIdx);
2652 callerClassInfoSym->SetStorageClass(kScGlobal);
2653 callerClassInfoSym->SetSKind(kStVar);
2654 /* it must be a local symbol */
2655 GlobalTables::GetGsymTable().AddToStringSymbolMap(*callerClassInfoSym);
2656 callerClassInfoSym->SetTyIdx(static_cast<TyIdx>(PTY_ptr));
2657 }
2658 }
2659
2660 baseNode = mirBuilder->CreateExprAddrof(0, *callerClassInfoSym);
2661 } else {
2662 /*
2663 * it's an instance function.
2664 * pass caller function's this pointer
2665 */
2666 CHECK_FATAL(curFunc.GetFormalCount() != 0, "index out of range in CGLowerer::GetBaseNodeFromCurFunc");
2667 MIRSymbol *formalSt = curFunc.GetFormal(0);
2668 if (formalSt->IsPreg()) {
2669 if (isFromJarray) {
2670 baseNode = mirBuilder->CreateExprRegread(
2671 formalSt->GetType()->GetPrimType(),
2672 curFunc.GetPregTab()->GetPregIdxFromPregno(formalSt->GetPreg()->GetPregNo()));
2673 } else {
2674 CHECK_FATAL(curFunc.GetParamSize() != 0, "index out of range in CGLowerer::GetBaseNodeFromCurFunc");
2675 baseNode = mirBuilder->CreateExprRegread(
2676 (curFunc.GetNthParamType(0))->GetPrimType(),
2677 curFunc.GetPregTab()->GetPregIdxFromPregno(formalSt->GetPreg()->GetPregNo()));
2678 }
2679 } else {
2680 baseNode = mirBuilder->CreateExprDread(*formalSt);
2681 }
2682 }
2683 return baseNode;
2684 }
2685
GetClassInfoExpr(const std::string & classInfo) const2686 BaseNode *CGLowerer::GetClassInfoExpr(const std::string &classInfo) const
2687 {
2688 BaseNode *classInfoExpr = nullptr;
2689 GStrIdx strIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(classInfo);
2690 MIRSymbol *classInfoSym = GlobalTables::GetGsymTable().GetSymbolFromStrIdx(strIdx);
2691 if (classInfoSym != nullptr) {
2692 classInfoExpr = mirBuilder->CreateExprAddrof(0, *classInfoSym);
2693 } else {
2694 classInfoSym = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
2695 classInfoSym->SetNameStrIdx(strIdx);
2696 classInfoSym->SetStorageClass(kScGlobal);
2697 classInfoSym->SetSKind(kStVar);
2698 if (CGOptions::IsPIC()) {
2699 classInfoSym->SetStorageClass(kScExtern);
2700 } else {
2701 classInfoSym->SetAttr(ATTR_weak);
2702 }
2703 GlobalTables::GetGsymTable().AddToStringSymbolMap(*classInfoSym);
2704 classInfoSym->SetTyIdx(static_cast<TyIdx>(PTY_ptr));
2705
2706 classInfoExpr = mirBuilder->CreateExprAddrof(0, *classInfoSym);
2707 }
2708 return classInfoExpr;
2709 }
2710
LowerIntrinsicop(const BaseNode & parent,IntrinsicopNode & intrinNode,BlockNode & newBlk)2711 BaseNode *CGLowerer::LowerIntrinsicop(const BaseNode &parent, IntrinsicopNode &intrinNode, BlockNode &newBlk)
2712 {
2713 for (size_t i = 0; i < intrinNode.GetNumOpnds(); ++i) {
2714 intrinNode.SetOpnd(LowerExpr(intrinNode, *intrinNode.Opnd(i), newBlk), i);
2715 }
2716
2717 MIRIntrinsicID intrnID = intrinNode.GetIntrinsic();
2718 IntrinDesc &intrinDesc = IntrinDesc::intrinTable[intrnID];
2719 if (intrinNode.GetIntrinsic() == INTRN_MPL_READ_OVTABLE_ENTRY_LAZY) {
2720 return &intrinNode;
2721 }
2722 if (intrinNode.GetIntrinsic() == INTRN_MPL_READ_ARRAYCLASS_CACHE_ENTRY) {
2723 return &intrinNode;
2724 }
2725 if (intrnID == INTRN_C_constant_p) {
2726 BaseNode *opnd = intrinNode.Opnd(0);
2727 int64 val = (opnd->op == OP_constval || opnd->op == OP_sizeoftype || opnd->op == OP_conststr ||
2728 opnd->op == OP_conststr16)
2729 ? 1
2730 : 0;
2731 return mirModule.GetMIRBuilder()->CreateIntConst(val, PTY_i32);
2732 }
2733 if (intrnID == INTRN_C___builtin_expect) {
2734 return intrinNode.Opnd(0);
2735 }
2736 if (intrinDesc.IsVectorOp() || intrinDesc.IsAtomic()) {
2737 return &intrinNode;
2738 }
2739 CHECK_FATAL(false, "unexpected intrinsic type in CGLowerer::LowerIntrinsicop");
2740 return &intrinNode;
2741 }
2742
LowerIntrinsicopwithtype(const BaseNode & parent,IntrinsicopNode & intrinNode,BlockNode & blk)2743 BaseNode *CGLowerer::LowerIntrinsicopwithtype(const BaseNode &parent, IntrinsicopNode &intrinNode, BlockNode &blk)
2744 {
2745 for (size_t i = 0; i < intrinNode.GetNumOpnds(); ++i) {
2746 intrinNode.SetOpnd(LowerExpr(intrinNode, *intrinNode.Opnd(i), blk), i);
2747 }
2748 MIRIntrinsicID intrnID = intrinNode.GetIntrinsic();
2749 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
2750 CHECK_FATAL(!intrinDesc->IsJS(), "intrinDesc should not be js");
2751 CHECK_FATAL(false, "should not run here");
2752 return &intrinNode;
2753 }
2754
LowerIntrinsicMplClearStack(const IntrinsiccallNode & intrincall,BlockNode & newBlk)2755 StmtNode *CGLowerer::LowerIntrinsicMplClearStack(const IntrinsiccallNode &intrincall, BlockNode &newBlk)
2756 {
2757 StmtNode *newStmt =
2758 mirBuilder->CreateStmtIassign(*beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt8()), 0,
2759 intrincall.Opnd(0), mirBuilder->GetConstUInt8(0));
2760 newBlk.AddStatement(newStmt);
2761
2762 BaseNode *length = intrincall.Opnd(1);
2763 PrimType pType = PTY_i64;
2764 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
2765 PregIdx pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(pType);
2766 newStmt = mirBuilder->CreateStmtRegassign(pType, pIdx, mirBuilder->CreateIntConst(1, pType));
2767 newBlk.AddStatement(newStmt);
2768 MIRFunction *func = GetCurrentFunc();
2769 DEBUG_ASSERT(func != nullptr, "func should not be nullptr");
2770 const std::string &name = func->GetName() + std::string("_Lalloca_");
2771 LabelIdx label1 = GetCurrentFunc()->GetOrCreateLableIdxFromName(name + std::to_string(labelIdx++));
2772 LabelIdx label2 = GetCurrentFunc()->GetOrCreateLableIdxFromName(name + std::to_string(labelIdx++));
2773
2774 newStmt = mirBuilder->CreateStmtGoto(OP_goto, label2);
2775 newBlk.AddStatement(newStmt);
2776 LabelNode *ln = mirBuilder->CreateStmtLabel(label1);
2777 newBlk.AddStatement(ln);
2778
2779 RegreadNode *regLen = mirBuilder->CreateExprRegread(pType, pIdx);
2780
2781 BinaryNode *addr =
2782 mirBuilder->CreateExprBinary(OP_add, *GlobalTables::GetTypeTable().GetAddr64(), intrincall.Opnd(0), regLen);
2783
2784 newStmt =
2785 mirBuilder->CreateStmtIassign(*beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetUInt8()), 0,
2786 addr, mirBuilder->GetConstUInt8(0));
2787 newBlk.AddStatement(newStmt);
2788
2789 BinaryNode *subLen = mirBuilder->CreateExprBinary(OP_add, *GlobalTables::GetTypeTable().GetPrimType(pType), regLen,
2790 mirBuilder->CreateIntConst(1, pType));
2791 newStmt = mirBuilder->CreateStmtRegassign(pType, pIdx, subLen);
2792 newBlk.AddStatement(newStmt);
2793
2794 ln = mirBuilder->CreateStmtLabel(label2);
2795 newBlk.AddStatement(ln);
2796
2797 CompareNode *cmpExp =
2798 mirBuilder->CreateExprCompare(OP_lt, *GlobalTables::GetTypeTable().GetUInt32(),
2799 *GlobalTables::GetTypeTable().GetPrimType(pType), regLen, length);
2800 newStmt = mirBuilder->CreateStmtCondGoto(cmpExp, OP_brtrue, label1);
2801
2802 return newStmt;
2803 }
2804
LowerIntrinsicRCCall(const IntrinsiccallNode & intrincall)2805 StmtNode *CGLowerer::LowerIntrinsicRCCall(const IntrinsiccallNode &intrincall)
2806 {
2807 /* If GCONLY enabled, lowering RC intrinsics in another way. */
2808 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
2809 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
2810
2811 /* convert intrinsic call into function call. */
2812 if (intrinFuncIDs.find(intrinDesc) == intrinFuncIDs.end()) {
2813 /* add funcid into map */
2814 MIRFunction *fn = mirBuilder->GetOrCreateFunction(intrinDesc->name, TyIdx(PTY_void));
2815 fn->GetFuncSymbol()->SetAppearsInCode(true);
2816 DEBUG_ASSERT(fn != nullptr, "nullptr check");
2817 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
2818 fn->AllocSymTab();
2819 intrinFuncIDs[intrinDesc] = fn->GetPuidx();
2820 }
2821 CallNode *callStmt = mirModule.CurFuncCodeMemPool()->New<CallNode>(mirModule, OP_call);
2822 callStmt->SetPUIdx(intrinFuncIDs.at(intrinDesc));
2823 for (size_t i = 0; i < intrincall.GetNopndSize(); ++i) {
2824 callStmt->GetNopnd().emplace_back(intrincall.GetNopndAt(i));
2825 callStmt->SetNumOpnds(callStmt->GetNumOpnds() + 1);
2826 }
2827 return callStmt;
2828 }
2829
LowerArrayStore(const IntrinsiccallNode & intrincall,BlockNode & newBlk)2830 void CGLowerer::LowerArrayStore(const IntrinsiccallNode &intrincall, BlockNode &newBlk)
2831 {
2832 bool needCheckStore = true;
2833 BaseNode *arrayNode = intrincall.Opnd(0);
2834 MIRType *arrayElemType = GetArrayNodeType(*arrayNode);
2835 BaseNode *valueNode = intrincall.Opnd(kNodeThirdOpnd);
2836 MIRType *valueRealType = GetArrayNodeType(*valueNode);
2837 if ((arrayElemType != nullptr) && (valueRealType != nullptr) && (arrayElemType->GetKind() == kTypeClass) &&
2838 static_cast<MIRClassType *>(arrayElemType)->IsFinal() && (valueRealType->GetKind() == kTypeClass) &&
2839 static_cast<MIRClassType *>(valueRealType)->IsFinal() &&
2840 (valueRealType->GetTypeIndex() == arrayElemType->GetTypeIndex())) {
2841 needCheckStore = false;
2842 }
2843
2844 if (needCheckStore) {
2845 MIRFunction *fn = mirBuilder->GetOrCreateFunction("MCC_Reflect_Check_Arraystore", TyIdx(PTY_void));
2846 DEBUG_ASSERT(fn->GetFuncSymbol() != nullptr, "fn->GetFuncSymbol() should not be null ptr");
2847 fn->GetFuncSymbol()->SetAppearsInCode(true);
2848 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
2849 fn->AllocSymTab();
2850 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
2851 args.emplace_back(intrincall.Opnd(0));
2852 args.emplace_back(intrincall.Opnd(kNodeThirdOpnd));
2853 StmtNode *checkStoreStmt = mirBuilder->CreateStmtCall(fn->GetPuidx(), args);
2854 newBlk.AddStatement(checkStoreStmt);
2855 }
2856 }
2857
LowerDefaultIntrinsicCall(IntrinsiccallNode & intrincall,MIRSymbol & st,MIRFunction & fn)2858 StmtNode *CGLowerer::LowerDefaultIntrinsicCall(IntrinsiccallNode &intrincall, MIRSymbol &st, MIRFunction &fn)
2859 {
2860 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
2861 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
2862 std::vector<TyIdx> funcTyVec;
2863 std::vector<TypeAttrs> fnTaVec;
2864 MapleVector<BaseNode *> &nOpnds = intrincall.GetNopnd();
2865 MIRType *retTy = intrinDesc->GetReturnType();
2866 CHECK_FATAL(retTy != nullptr, "retTy should not be nullptr");
2867 if (retTy->GetKind() == kTypeStruct) {
2868 funcTyVec.emplace_back(beCommon.BeGetOrCreatePointerType(*retTy)->GetTypeIndex());
2869 fnTaVec.emplace_back(TypeAttrs());
2870 fn.SetReturnStruct();
2871 }
2872 for (uint32 i = 0; i < nOpnds.size(); ++i) {
2873 MIRType *argTy = intrinDesc->GetArgType(i);
2874 CHECK_FATAL(argTy != nullptr, "argTy should not be nullptr");
2875 if (argTy->GetKind() == kTypeStruct) {
2876 funcTyVec.emplace_back(GlobalTables::GetTypeTable().GetTypeFromTyIdx(PTY_a32)->GetTypeIndex());
2877 fnTaVec.emplace_back(TypeAttrs());
2878 BaseNode *addrNode = beCommon.GetAddressOfNode(*nOpnds[i]);
2879 CHECK_FATAL(addrNode != nullptr, "can not get address");
2880 nOpnds[i] = addrNode;
2881 } else {
2882 funcTyVec.emplace_back(argTy->GetTypeIndex());
2883 fnTaVec.emplace_back(TypeAttrs());
2884 }
2885 }
2886 MIRType *funcType = beCommon.BeGetOrCreateFunctionType(retTy->GetTypeIndex(), funcTyVec, fnTaVec);
2887 st.SetTyIdx(funcType->GetTypeIndex());
2888 fn.SetMIRFuncType(static_cast<MIRFuncType *>(funcType));
2889 if (retTy->GetKind() == kTypeStruct) {
2890 fn.SetReturnTyIdx(static_cast<TyIdx>(PTY_void));
2891 } else {
2892 fn.SetReturnTyIdx(retTy->GetTypeIndex());
2893 }
2894 return static_cast<CallNode *>(mirBuilder->CreateStmtCall(fn.GetPuidx(), nOpnds));
2895 }
2896
LowerIntrinsicMplCleanupLocalRefVarsSkip(IntrinsiccallNode & intrincall)2897 StmtNode *CGLowerer::LowerIntrinsicMplCleanupLocalRefVarsSkip(IntrinsiccallNode &intrincall)
2898 {
2899 MIRFunction *mirFunc = mirModule.CurFunction();
2900 CHECK_FATAL(intrincall.NumOpnds() > 0, "must not be zero");
2901 BaseNode *skipExpr = intrincall.Opnd(intrincall.NumOpnds() - 1);
2902
2903 CHECK_FATAL(skipExpr != nullptr, "should be dread");
2904 CHECK_FATAL(skipExpr->GetOpCode() == OP_dread, "should be dread");
2905 DreadNode *refNode = static_cast<DreadNode *>(skipExpr);
2906 DEBUG_ASSERT(mirFunc != nullptr, "mirFunc should not nullptr");
2907 MIRSymbol *skipSym = mirFunc->GetLocalOrGlobalSymbol(refNode->GetStIdx());
2908 DEBUG_ASSERT(skipSym != nullptr, "skipSym should not be nullptr");
2909 if (skipSym->GetAttr(ATTR_localrefvar)) {
2910 mirFunc->InsertMIRSymbol(skipSym);
2911 }
2912 return &intrincall;
2913 }
2914
LowerIntrinsiccall(IntrinsiccallNode & intrincall,BlockNode & newBlk)2915 StmtNode *CGLowerer::LowerIntrinsiccall(IntrinsiccallNode &intrincall, BlockNode &newBlk)
2916 {
2917 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
2918 for (size_t i = 0; i < intrincall.GetNumOpnds(); ++i) {
2919 intrincall.SetOpnd(LowerExpr(intrincall, *intrincall.Opnd(i), newBlk), i);
2920 }
2921 if (intrnID == INTRN_MPL_CLEAR_STACK) {
2922 return LowerIntrinsicMplClearStack(intrincall, newBlk);
2923 }
2924 if (intrnID == INTRN_C_va_start) {
2925 return &intrincall;
2926 }
2927 if (intrnID == maple::INTRN_C___builtin_division_exception) {
2928 return &intrincall;
2929 }
2930 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
2931 if (intrinDesc->IsSpecial() || intrinDesc->IsAtomic()) {
2932 /* For special intrinsics we leave them to CGFunc::SelectIntrinsicCall() */
2933 return &intrincall;
2934 }
2935 /* default lowers intrinsic call to real function call. */
2936 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
2937 CHECK_FATAL(intrinDesc->name != nullptr, "intrinsic's name should not be nullptr");
2938 const std::string name = intrinDesc->name;
2939 st->SetNameStrIdx(name);
2940 st->SetStorageClass(kScText);
2941 st->SetSKind(kStFunc);
2942 MIRFunction *fn = mirBuilder->GetOrCreateFunction(intrinDesc->name, TyIdx(0));
2943 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
2944 fn->AllocSymTab();
2945 st->SetFunction(fn);
2946 st->SetAppearsInCode(true);
2947 return LowerDefaultIntrinsicCall(intrincall, *st, *fn);
2948 }
2949
LowerSyncEnterSyncExit(StmtNode & stmt)2950 StmtNode *CGLowerer::LowerSyncEnterSyncExit(StmtNode &stmt)
2951 {
2952 CHECK_FATAL(stmt.GetOpCode() == OP_syncenter || stmt.GetOpCode() == OP_syncexit,
2953 "stmt's opcode should be OP_syncenter or OP_syncexit");
2954
2955 auto &nStmt = static_cast<NaryStmtNode &>(stmt);
2956 BuiltinFunctionID id;
2957 if (nStmt.GetOpCode() == OP_syncenter) {
2958 if (nStmt.NumOpnds() == 1) {
2959 /* Just as ParseNaryStmt do for syncenter */
2960 MIRType &intType = *GlobalTables::GetTypeTable().GetTypeFromTyIdx(static_cast<TyIdx>(PTY_i32));
2961 /* default 2 for __sync_enter_fast() */
2962 MIRIntConst *intConst = GlobalTables::GetIntConstTable().GetOrCreateIntConst(2, intType);
2963 ConstvalNode *exprConst = mirModule.GetMemPool()->New<ConstvalNode>();
2964 exprConst->SetPrimType(PTY_i32);
2965 exprConst->SetConstVal(intConst);
2966 nStmt.GetNopnd().emplace_back(exprConst);
2967 nStmt.SetNumOpnds(nStmt.GetNopndSize());
2968 }
2969 CHECK_FATAL(nStmt.NumOpnds() == kOperandNumBinary, "wrong args for syncenter");
2970 CHECK_FATAL(nStmt.Opnd(1)->GetOpCode() == OP_constval, "wrong 2nd arg type for syncenter");
2971 ConstvalNode *cst = static_cast<ConstvalNode *>(nStmt.GetNopndAt(1));
2972 MIRIntConst *intConst = safe_cast<MIRIntConst>(cst->GetConstVal());
2973 DEBUG_ASSERT(intConst != nullptr, "intConst should not be nullptr");
2974 switch (intConst->GetExtValue()) {
2975 case kMCCSyncEnterFast0:
2976 id = INTRN_FIRST_SYNC_ENTER;
2977 break;
2978 case kMCCSyncEnterFast1:
2979 id = INTRN_SECOND_SYNC_ENTER;
2980 break;
2981 case kMCCSyncEnterFast2:
2982 id = INTRN_THIRD_SYNC_ENTER;
2983 break;
2984 case kMCCSyncEnterFast3:
2985 id = INTRN_FOURTH_SYNC_ENTER;
2986 break;
2987 default:
2988 CHECK_FATAL(false, "wrong kind for syncenter");
2989 break;
2990 }
2991 } else {
2992 CHECK_FATAL(nStmt.NumOpnds() == 1, "wrong args for syncexit");
2993 id = INTRN_YNC_EXIT;
2994 }
2995 PUIdx bFunc = GetBuiltinToUse(id);
2996 CHECK_FATAL(bFunc != kFuncNotFound, "bFunc should be found");
2997
2998 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
2999 args.emplace_back(nStmt.Opnd(0));
3000 return mirBuilder->CreateStmtCall(bFunc, args);
3001 }
3002
GetBuiltinToUse(BuiltinFunctionID id) const3003 PUIdx CGLowerer::GetBuiltinToUse(BuiltinFunctionID id) const
3004 {
3005 /*
3006 * use std::vector & linear search as the number of entries is small.
3007 * we may revisit it if the number of entries gets larger.
3008 */
3009 for (const auto &funcID : builtinFuncIDs) {
3010 if (funcID.first == id) {
3011 return funcID.second;
3012 }
3013 }
3014 return kFuncNotFound;
3015 }
3016
LowerGCMalloc(const BaseNode & node,const GCMallocNode & gcmalloc,BlockNode & blkNode,bool perm)3017 void CGLowerer::LowerGCMalloc(const BaseNode &node, const GCMallocNode &gcmalloc, BlockNode &blkNode, bool perm)
3018 {
3019 MIRFunction *func =
3020 mirBuilder->GetOrCreateFunction((perm ? "MCC_NewPermanentObject" : "MCC_NewObj_fixed_class"), (TyIdx)(PTY_ref));
3021 func->GetFuncSymbol()->SetAppearsInCode(true);
3022 beCommon.UpdateTypeTable(*func->GetMIRFuncType());
3023 func->AllocSymTab();
3024 /* Get the classinfo */
3025 MIRStructType *classType =
3026 static_cast<MIRStructType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(gcmalloc.GetTyIdx()));
3027 std::string classInfoName = CLASSINFO_PREFIX_STR + classType->GetName();
3028 MIRSymbol *classSym =
3029 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetStrIdxFromName(classInfoName));
3030 if (classSym == nullptr) {
3031 MIRType *pointerType = beCommon.BeGetOrCreatePointerType(*GlobalTables::GetTypeTable().GetVoid());
3032 classSym = mirBuilder->CreateGlobalDecl(classInfoName, *pointerType);
3033 classSym->SetStorageClass(kScExtern);
3034 }
3035 CallNode *callAssign = nullptr;
3036 auto *curFunc = mirModule.CurFunction();
3037 if (classSym->GetAttr(ATTR_abstract) || classSym->GetAttr(ATTR_interface)) {
3038 MIRFunction *funcSecond =
3039 mirBuilder->GetOrCreateFunction("MCC_Reflect_ThrowInstantiationError", static_cast<TyIdx>(PTY_ref));
3040 funcSecond->GetFuncSymbol()->SetAppearsInCode(true);
3041 beCommon.UpdateTypeTable(*funcSecond->GetMIRFuncType());
3042 funcSecond->AllocSymTab();
3043 BaseNode *arg = mirBuilder->CreateExprAddrof(0, *classSym);
3044 if (node.GetOpCode() == OP_dassign) {
3045 auto &dsNode = static_cast<const DassignNode &>(node);
3046 MIRSymbol *ret = curFunc->GetLocalOrGlobalSymbol(dsNode.GetStIdx());
3047 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3048 args.emplace_back(arg);
3049 callAssign = mirBuilder->CreateStmtCallAssigned(funcSecond->GetPuidx(), args, ret, OP_callassigned);
3050 } else {
3051 CHECK_FATAL(node.GetOpCode() == OP_regassign, "regassign expected");
3052 callAssign = mirBuilder->CreateStmtCallRegassigned(
3053 funcSecond->GetPuidx(), static_cast<const RegassignNode &>(node).GetRegIdx(), OP_callassigned, arg);
3054 }
3055 blkNode.AppendStatementsFromBlock(*LowerCallAssignedStmt(*callAssign));
3056 return;
3057 }
3058 BaseNode *arg = mirBuilder->CreateExprAddrof(0, *classSym);
3059
3060 if (node.GetOpCode() == OP_dassign) {
3061 MIRSymbol *ret = curFunc->GetLocalOrGlobalSymbol(static_cast<const DassignNode &>(node).GetStIdx());
3062 MapleVector<BaseNode *> args(mirBuilder->GetCurrentFuncCodeMpAllocator()->Adapter());
3063 args.emplace_back(arg);
3064 callAssign = mirBuilder->CreateStmtCallAssigned(func->GetPuidx(), args, ret, OP_callassigned);
3065 } else {
3066 CHECK_FATAL(node.GetOpCode() == OP_regassign, "regassign expected");
3067 callAssign = mirBuilder->CreateStmtCallRegassigned(
3068 func->GetPuidx(), static_cast<const RegassignNode &>(node).GetRegIdx(), OP_callassigned, arg);
3069 }
3070 blkNode.AppendStatementsFromBlock(*LowerCallAssignedStmt(*callAssign));
3071 }
3072
GetNewArrayFuncName(const uint32 elemSize,const bool perm) const3073 std::string CGLowerer::GetNewArrayFuncName(const uint32 elemSize, const bool perm) const
3074 {
3075 if (elemSize == k1ByteSize) {
3076 return perm ? "MCC_NewPermArray8" : "MCC_NewArray8";
3077 }
3078 if (elemSize == k2ByteSize) {
3079 return perm ? "MCC_NewPermArray16" : "MCC_NewArray16";
3080 }
3081 if (elemSize == k4ByteSize) {
3082 return perm ? "MCC_NewPermArray32" : "MCC_NewArray32";
3083 }
3084 CHECK_FATAL((elemSize == k8ByteSize), "Invalid elemSize.");
3085 return perm ? "MCC_NewPermArray64" : "MCC_NewArray64";
3086 }
3087
IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const3088 bool CGLowerer::IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const
3089 {
3090 switch (intrinsic) {
3091 case INTRN_MPL_ATOMIC_EXCHANGE_PTR:
3092 // js
3093 case INTRN_ADD_WITH_OVERFLOW:
3094 case INTRN_SUB_WITH_OVERFLOW:
3095 case INTRN_MUL_WITH_OVERFLOW:
3096 return true;
3097 default: {
3098 return false;
3099 }
3100 }
3101 }
3102
IsIntrinsicOpHandledAtLowerLevel(MIRIntrinsicID intrinsic) const3103 bool CGLowerer::IsIntrinsicOpHandledAtLowerLevel(MIRIntrinsicID intrinsic) const
3104 {
3105 switch (intrinsic) {
3106 #if TARGAARCH64 || TARGX86_64
3107 case INTRN_C_cos:
3108 case INTRN_C_cosf:
3109 case INTRN_C_cosh:
3110 case INTRN_C_coshf:
3111 case INTRN_C_acos:
3112 case INTRN_C_acosf:
3113 case INTRN_C_sin:
3114 case INTRN_C_sinf:
3115 case INTRN_C_sinh:
3116 case INTRN_C_sinhf:
3117 case INTRN_C_asin:
3118 case INTRN_C_asinf:
3119 case INTRN_C_atan:
3120 case INTRN_C_atanf:
3121 case INTRN_C_exp:
3122 case INTRN_C_expf:
3123 case INTRN_C_ffs:
3124 case INTRN_C_log:
3125 case INTRN_C_logf:
3126 case INTRN_C_log10:
3127 case INTRN_C_log10f:
3128 case INTRN_C_clz32:
3129 case INTRN_C_clz64:
3130 case INTRN_C_ctz32:
3131 case INTRN_C_ctz64:
3132 case INTRN_C_popcount32:
3133 case INTRN_C_popcount64:
3134 case INTRN_C_parity32:
3135 case INTRN_C_parity64:
3136 case INTRN_C_clrsb32:
3137 case INTRN_C_clrsb64:
3138 case INTRN_C_isaligned:
3139 case INTRN_C_alignup:
3140 case INTRN_C_aligndown:
3141 case INTRN_C___sync_add_and_fetch_1:
3142 case INTRN_C___sync_add_and_fetch_2:
3143 case INTRN_C___sync_add_and_fetch_4:
3144 case INTRN_C___sync_add_and_fetch_8:
3145 case INTRN_C___sync_sub_and_fetch_1:
3146 case INTRN_C___sync_sub_and_fetch_2:
3147 case INTRN_C___sync_sub_and_fetch_4:
3148 case INTRN_C___sync_sub_and_fetch_8:
3149 case INTRN_C___sync_fetch_and_add_1:
3150 case INTRN_C___sync_fetch_and_add_2:
3151 case INTRN_C___sync_fetch_and_add_4:
3152 case INTRN_C___sync_fetch_and_add_8:
3153 case INTRN_C___sync_fetch_and_sub_1:
3154 case INTRN_C___sync_fetch_and_sub_2:
3155 case INTRN_C___sync_fetch_and_sub_4:
3156 case INTRN_C___sync_fetch_and_sub_8:
3157 case INTRN_C___sync_bool_compare_and_swap_1:
3158 case INTRN_C___sync_bool_compare_and_swap_2:
3159 case INTRN_C___sync_bool_compare_and_swap_4:
3160 case INTRN_C___sync_bool_compare_and_swap_8:
3161 case INTRN_C___sync_val_compare_and_swap_1:
3162 case INTRN_C___sync_val_compare_and_swap_2:
3163 case INTRN_C___sync_val_compare_and_swap_4:
3164 case INTRN_C___sync_val_compare_and_swap_8:
3165 case INTRN_C___sync_lock_test_and_set_1:
3166 case INTRN_C___sync_lock_test_and_set_2:
3167 case INTRN_C___sync_lock_test_and_set_4:
3168 case INTRN_C___sync_lock_test_and_set_8:
3169 case INTRN_C___sync_lock_release_8:
3170 case INTRN_C___sync_lock_release_4:
3171 case INTRN_C___sync_lock_release_2:
3172 case INTRN_C___sync_lock_release_1:
3173 case INTRN_C___sync_fetch_and_and_1:
3174 case INTRN_C___sync_fetch_and_and_2:
3175 case INTRN_C___sync_fetch_and_and_4:
3176 case INTRN_C___sync_fetch_and_and_8:
3177 case INTRN_C___sync_fetch_and_or_1:
3178 case INTRN_C___sync_fetch_and_or_2:
3179 case INTRN_C___sync_fetch_and_or_4:
3180 case INTRN_C___sync_fetch_and_or_8:
3181 case INTRN_C___sync_fetch_and_xor_1:
3182 case INTRN_C___sync_fetch_and_xor_2:
3183 case INTRN_C___sync_fetch_and_xor_4:
3184 case INTRN_C___sync_fetch_and_xor_8:
3185 case INTRN_C___sync_fetch_and_nand_1:
3186 case INTRN_C___sync_fetch_and_nand_2:
3187 case INTRN_C___sync_fetch_and_nand_4:
3188 case INTRN_C___sync_fetch_and_nand_8:
3189 case INTRN_C___sync_and_and_fetch_1:
3190 case INTRN_C___sync_and_and_fetch_2:
3191 case INTRN_C___sync_and_and_fetch_4:
3192 case INTRN_C___sync_and_and_fetch_8:
3193 case INTRN_C___sync_or_and_fetch_1:
3194 case INTRN_C___sync_or_and_fetch_2:
3195 case INTRN_C___sync_or_and_fetch_4:
3196 case INTRN_C___sync_or_and_fetch_8:
3197 case INTRN_C___sync_xor_and_fetch_1:
3198 case INTRN_C___sync_xor_and_fetch_2:
3199 case INTRN_C___sync_xor_and_fetch_4:
3200 case INTRN_C___sync_xor_and_fetch_8:
3201 case INTRN_C___sync_nand_and_fetch_1:
3202 case INTRN_C___sync_nand_and_fetch_2:
3203 case INTRN_C___sync_nand_and_fetch_4:
3204 case INTRN_C___sync_nand_and_fetch_8:
3205 case INTRN_C___sync_synchronize:
3206 case INTRN_C__builtin_return_address:
3207 case INTRN_C__builtin_extract_return_addr:
3208 case INTRN_C_memcmp:
3209 case INTRN_C_strlen:
3210 case INTRN_C_strcmp:
3211 case INTRN_C_strncmp:
3212 case INTRN_C_strchr:
3213 case INTRN_C_strrchr:
3214 case INTRN_C_rev16_2:
3215 case INTRN_C_rev_4:
3216 case INTRN_C_rev_8:
3217 return true;
3218 #endif
3219 default:
3220 return false;
3221 }
3222 }
3223
InitArrayClassCacheTableIndex()3224 void CGLowerer::InitArrayClassCacheTableIndex()
3225 {
3226 MIRSymbol *reflectStrtabSym =
3227 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
3228 namemangler::kReflectionStrtabPrefixStr + mirModule.GetFileNameAsPostfix()));
3229 MIRSymbol *reflectStartHotStrtabSym =
3230 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
3231 namemangler::kReflectionStartHotStrtabPrefixStr + mirModule.GetFileNameAsPostfix()));
3232 MIRSymbol *reflectBothHotStrtabSym =
3233 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
3234 namemangler::kReflectionBothHotStrTabPrefixStr + mirModule.GetFileNameAsPostfix()));
3235 MIRSymbol *reflectRunHotStrtabSym =
3236 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
3237 namemangler::kReflectionRunHotStrtabPrefixStr + mirModule.GetFileNameAsPostfix()));
3238 MIRSymbol *arrayCacheNameTableSym =
3239 GlobalTables::GetGsymTable().GetSymbolFromStrIdx(GlobalTables::GetStrTable().GetOrCreateStrIdxFromName(
3240 namemangler::kArrayClassCacheNameTable + mirModule.GetFileNameAsPostfix()));
3241 if (arrayCacheNameTableSym == nullptr) {
3242 return;
3243 }
3244 MIRAggConst &aggConst = static_cast<MIRAggConst &>(*(arrayCacheNameTableSym->GetKonst()));
3245 MIRSymbol *strTab = nullptr;
3246 for (size_t i = 0; i < aggConst.GetConstVec().size(); ++i) {
3247 MIRConst *elemConst = aggConst.GetConstVecItem(i);
3248 uint32 intValue = static_cast<uint32>(((safe_cast<MIRIntConst>(elemConst))->GetExtValue()) & 0xFFFFFFFF);
3249 bool isHotReflectStr = (intValue & 0x00000003) != 0; /* use the last two bits of intValue in this expression */
3250 if (isHotReflectStr) {
3251 uint32 tag =
3252 (intValue & 0x00000003) - kCStringShift; /* use the last two bits of intValue in this expression */
3253 if (tag == kLayoutBootHot) {
3254 strTab = reflectStartHotStrtabSym;
3255 } else if (tag == kLayoutBothHot) {
3256 strTab = reflectBothHotStrtabSym;
3257 } else {
3258 strTab = reflectRunHotStrtabSym;
3259 }
3260 } else {
3261 strTab = reflectStrtabSym;
3262 }
3263 DEBUG_ASSERT(strTab != nullptr, "strTab is nullptr");
3264 std::string arrayClassName;
3265 MIRAggConst *strAgg = static_cast<MIRAggConst *>(strTab->GetKonst());
3266 for (auto start = (intValue >> 2); start < strAgg->GetConstVec().size();
3267 ++start) { /* the last two bits is flag */
3268 MIRIntConst *oneChar = static_cast<MIRIntConst *>(strAgg->GetConstVecItem(start));
3269 if ((oneChar != nullptr) && !oneChar->IsZero()) {
3270 arrayClassName += static_cast<char>(oneChar->GetExtValue());
3271 } else {
3272 break;
3273 }
3274 }
3275 arrayClassCacheIndex[arrayClassName] = i;
3276 }
3277 }
3278
LowerFunc(MIRFunction & func)3279 void CGLowerer::LowerFunc(MIRFunction &func)
3280 {
3281 labelIdx = 0;
3282 SetCurrentFunc(&func);
3283 hasTry = false;
3284 LowerEntry(func);
3285 LowerPseudoRegs(func);
3286 BlockNode *origBody = func.GetBody();
3287 CHECK_FATAL(origBody != nullptr, "origBody should not be nullptr");
3288
3289 BlockNode *newBody = LowerBlock(*origBody);
3290 func.SetBody(newBody);
3291 if (needBranchCleanup) {
3292 CleanupBranches(func);
3293 }
3294
3295 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
3296 // We do the simplify work here because now all the intrinsic calls and potential expansion work of memcpy or other
3297 // functions are handled well. So we can concentrate to do the replacement work.
3298 SimplifyBlock(*newBody);
3299 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
3300 if (newTypeTableSize != oldTypeTableSize) {
3301 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
3302 }
3303 }
3304 } /* namespace maplebe */
3305