1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "lower.h"
17 #include "switch_lowerer.h"
18
19 namespace maplebe {
20
21 using namespace maple;
22
23 #define TARGARM32 0
24
25 // input node must be cvt, retype, zext or sext
LowerCastExpr(BaseNode & expr)26 BaseNode *CGLowerer::LowerCastExpr(BaseNode &expr)
27 {
28 return &expr;
29 }
30
31 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
LowerReturnStructUsingFakeParm(NaryStmtNode & retNode)32 BlockNode *CGLowerer::LowerReturnStructUsingFakeParm(NaryStmtNode &retNode)
33 {
34 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
35 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
36 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
37 }
38 BaseNode *opnd0 = retNode.Opnd(0);
39 /* It is possible function never returns and have a dummy return const instead of a struct. */
40 maple::LogInfo::MapleLogger(kLlWarn) << "return struct should have a kid" << std::endl;
41
42 MIRFunction *curFunc = GetCurrentFunc();
43 MIRSymbol *retSt = curFunc->GetFormal(0);
44 MIRPtrType *retTy = static_cast<MIRPtrType *>(retSt->GetType());
45 IassignNode *iassign = mirModule.CurFuncCodeMemPool()->New<IassignNode>();
46 iassign->SetTyIdx(retTy->GetTypeIndex());
47 DEBUG_ASSERT(opnd0 != nullptr, "opnd0 should not be nullptr");
48 iassign->SetFieldID(0);
49 iassign->SetRHS(opnd0);
50 if (retSt->IsPreg()) {
51 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(
52 GetLoweredPtrType(), curFunc->GetPregTab()->GetPregIdxFromPregno(retSt->GetPreg()->GetPregNo()));
53 iassign->SetOpnd(regNode, 0);
54 } else {
55 AddrofNode *dreadNode = mirModule.CurFuncCodeMemPool()->New<AddrofNode>(OP_dread);
56 dreadNode->SetPrimType(GetLoweredPtrType());
57 dreadNode->SetStIdx(retSt->GetStIdx());
58 iassign->SetOpnd(dreadNode, 0);
59 }
60 blk->AddStatement(iassign);
61 retNode.GetNopnd().clear();
62 retNode.SetNumOpnds(0);
63 blk->AddStatement(&retNode);
64 return blk;
65 }
66
67 #endif /* TARGARM32 || TARGAARCH64 || TARGX86_64 */
68
LowerReturn(NaryStmtNode & retNode)69 BlockNode *CGLowerer::LowerReturn(NaryStmtNode &retNode)
70 {
71 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
72 if (retNode.NumOpnds() != 0) {
73 BaseNode *expr = retNode.Opnd(0);
74 Opcode opr = expr->GetOpCode();
75 if (opr == OP_dread) {
76 AddrofNode *retExpr = static_cast<AddrofNode *>(expr);
77 MIRFunction *mirFunc = mirModule.CurFunction();
78 MIRSymbol *sym = mirFunc->GetLocalOrGlobalSymbol(retExpr->GetStIdx());
79 if (sym->GetAttr(ATTR_localrefvar)) {
80 mirFunc->InsertMIRSymbol(sym);
81 }
82 }
83 }
84 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
85 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
86 }
87 blk->AddStatement(&retNode);
88 return blk;
89 }
90
LowerIassign(IassignNode & iassign,BlockNode & newBlk)91 void CGLowerer::LowerIassign(IassignNode &iassign, BlockNode &newBlk)
92 {
93 StmtNode *newStmt = nullptr;
94 CHECK_FATAL(iassign.GetFieldID() == 0, "fieldID must be 0");
95 LowerStmt(iassign, newBlk);
96 newStmt = &iassign;
97 newBlk.AddStatement(newStmt);
98 }
99
NeedRetypeWhenLowerCallAssigned(PrimType pType)100 BaseNode *CGLowerer::NeedRetypeWhenLowerCallAssigned(PrimType pType)
101 {
102 BaseNode *retNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
103 if (IsPrimitiveInteger(pType) && GetPrimTypeBitSize(pType) <= k32BitSize) {
104 auto newPty = IsPrimitiveUnsigned(pType) ? PTY_u64 : PTY_i64;
105 retNode = mirModule.GetMIRBuilder()->CreateExprTypeCvt(OP_cvt, newPty, pType, *retNode);
106 }
107 return retNode;
108 }
109
SaveReturnValueInLocal(StIdx stIdx,uint16 fieldID)110 DassignNode *CGLowerer::SaveReturnValueInLocal(StIdx stIdx, uint16 fieldID)
111 {
112 MIRSymbol *var;
113 if (stIdx.IsGlobal()) {
114 var = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
115 } else {
116 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
117 var = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
118 }
119 CHECK_FATAL(var != nullptr, "var should not be nullptr");
120 PrimType pType;
121 if (var->GetAttr(ATTR_oneelem_simd)) {
122 pType = PTY_f64;
123 } else {
124 pType = GlobalTables::GetTypeTable().GetTypeTable().at(var->GetTyIdx())->GetPrimType();
125 }
126 auto *regRead = NeedRetypeWhenLowerCallAssigned(pType);
127 return mirModule.GetMIRBuilder()->CreateStmtDassign(*var, fieldID, regRead);
128 }
129
130 /* to lower call (including icall) and intrinsicall statements */
LowerCallStmt(StmtNode & stmt,StmtNode * & nextStmt,BlockNode & newBlk,MIRType * retty,bool uselvar,bool isIntrinAssign)131 void CGLowerer::LowerCallStmt(StmtNode &stmt, StmtNode *&nextStmt, BlockNode &newBlk, MIRType *retty, bool uselvar,
132 bool isIntrinAssign)
133 {
134 StmtNode *newStmt = nullptr;
135 if (stmt.GetOpCode() == OP_intrinsiccall) {
136 auto &intrnNode = static_cast<IntrinsiccallNode &>(stmt);
137 if (intrnNode.GetIntrinsic() == maple::INTRN_JS_PURE_CALL) {
138 newStmt = &stmt;
139 } else {
140 newStmt = LowerIntrinsiccall(intrnNode, newBlk);
141 }
142 } else {
143 /* We note the function has a user-defined (i.e., not an intrinsic) call. */
144 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
145 GetCurrentFunc()->SetHasCall();
146 newStmt = &stmt;
147 }
148
149 if (newStmt == nullptr) {
150 return;
151 }
152
153 if (newStmt->GetOpCode() == OP_call || newStmt->GetOpCode() == OP_icall || newStmt->GetOpCode() == OP_icallproto) {
154 auto &callNode = static_cast<NaryStmtNode&>(*newStmt);
155 for (size_t i = 0; i < callNode.GetNopndSize(); ++i) {
156 BaseNode *newOpnd = LowerExpr(callNode, *callNode.GetNopndAt(i), newBlk);
157 callNode.SetOpnd(newOpnd, i);
158 }
159 newStmt = &callNode;
160 }
161 newStmt->SetSrcPos(stmt.GetSrcPos());
162 newBlk.AddStatement(newStmt);
163 }
164
GenCallNode(const StmtNode & stmt,PUIdx & funcCalled,CallNode & origCall)165 StmtNode *CGLowerer::GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNode &origCall)
166 {
167 CallNode *newCall = nullptr;
168 if (stmt.GetOpCode() == OP_callassigned) {
169 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(origCall.GetPUIdx(), origCall.GetNopnd());
170 }
171 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
172 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
173 newCall->SetSrcPos(stmt.GetSrcPos());
174 funcCalled = origCall.GetPUIdx();
175 CHECK_FATAL((newCall->GetOpCode() == OP_call),
176 "virtual call or super class call are not expected");
177 newCall->SetStmtAttrs(stmt.GetStmtAttrs());
178 return newCall;
179 }
180
GenIntrinsiccallNode(const StmtNode & stmt,PUIdx & funcCalled,bool & handledAtLowerLevel,IntrinsiccallNode & origCall)181 StmtNode *CGLowerer::GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalled, bool &handledAtLowerLevel,
182 IntrinsiccallNode &origCall)
183 {
184 StmtNode *newCall = nullptr;
185 handledAtLowerLevel = IsIntrinsicCallHandledAtLowerLevel(origCall.GetIntrinsic());
186 if (handledAtLowerLevel) {
187 /* If the lower level can handle the intrinsic, just let it pass through. */
188 newCall = &origCall;
189 } else {
190 PUIdx bFunc = GetBuiltinToUse(origCall.GetIntrinsic());
191 if (bFunc != kFuncNotFound) {
192 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(bFunc, origCall.GetNopnd());
193 CHECK_FATAL(newCall->GetOpCode() == OP_call, "intrinsicnode except intrinsiccall is not expected");
194 } else {
195 if (stmt.GetOpCode() == OP_intrinsiccallassigned) {
196 newCall =
197 mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
198 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccall,
199 "intrinsicnode except intrinsiccall is not expected");
200 } else {
201 newCall = mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(),
202 origCall.GetNopnd(), origCall.GetTyIdx());
203 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccallwithtype,
204 "intrinsicnode except OP_intrinsiccallwithtype is not expected");
205 }
206 }
207 newCall->SetSrcPos(stmt.GetSrcPos());
208 funcCalled = bFunc;
209 }
210 return newCall;
211 }
212
GenIcallNode(PUIdx & funcCalled,IcallNode & origCall)213 StmtNode *CGLowerer::GenIcallNode(PUIdx &funcCalled, IcallNode &origCall)
214 {
215 IcallNode *newCall = nullptr;
216 if (origCall.GetOpCode() == OP_icallassigned) {
217 newCall = mirModule.GetMIRBuilder()->CreateStmtIcall(origCall.GetNopnd());
218 } else {
219 newCall = mirModule.GetMIRBuilder()->CreateStmtIcallproto(origCall.GetNopnd(), origCall.GetRetTyIdx());
220 newCall->SetRetTyIdx(static_cast<IcallNode &>(origCall).GetRetTyIdx());
221 }
222 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
223 newCall->SetStmtAttrs(origCall.GetStmtAttrs());
224 newCall->SetSrcPos(origCall.GetSrcPos());
225 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
226 funcCalled = kFuncNotFound;
227 return newCall;
228 }
229
GenBlockNode(StmtNode & newCall,const CallReturnVector & p2nRets,const Opcode & opcode,const PUIdx & funcCalled,bool handledAtLowerLevel,bool uselvar)230 BlockNode *CGLowerer::GenBlockNode(StmtNode &newCall, const CallReturnVector &p2nRets, const Opcode &opcode,
231 const PUIdx &funcCalled, bool handledAtLowerLevel, bool uselvar)
232 {
233 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
234 blk->AddStatement(&newCall);
235 if (!handledAtLowerLevel) {
236 CHECK_FATAL(p2nRets.size() <= 1, "make sure p2nRets size <= 1");
237 /* Create DassignStmt to save kSregRetval0. */
238 StmtNode *dStmt = nullptr;
239 MIRType *retType = nullptr;
240 if (p2nRets.size() == 1) {
241 MIRSymbol *sym = nullptr;
242 StIdx stIdx = p2nRets[0].first;
243 if (stIdx.IsGlobal()) {
244 sym = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
245 } else {
246 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
247 sym = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
248 }
249 bool sizeIs0 = false;
250 if (sym != nullptr) {
251 retType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(sym->GetTyIdx());
252 if (beCommon.GetTypeSize(retType->GetTypeIndex().GetIdx()) == 0) {
253 sizeIs0 = true;
254 }
255 }
256 if (!sizeIs0) {
257 RegFieldPair regFieldPair = p2nRets[0].second;
258 if (!regFieldPair.IsReg()) {
259 uint16 fieldID = static_cast<uint16>(regFieldPair.GetFieldID());
260 DassignNode *dn = SaveReturnValueInLocal(stIdx, fieldID);
261 CHECK_FATAL(dn->GetFieldID() == 0, "make sure dn's fieldID return 0");
262 LowerDassign(*dn, *blk);
263 CHECK_FATAL(&newCall == blk->GetLast() || newCall.GetNext() == blk->GetLast(), "");
264 dStmt = (&newCall == blk->GetLast()) ? nullptr : blk->GetLast();
265 CHECK_FATAL(newCall.GetNext() == dStmt, "make sure newCall's next equal dStmt");
266 } else {
267 PregIdx pregIdx = static_cast<PregIdx>(regFieldPair.GetPregIdx());
268 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
269 MIRPreg *mirPreg = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(pregIdx);
270 PrimType pType = mirPreg->GetPrimType();
271 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
272 RegassignNode *regAssign;
273
274 regAssign = mirModule.GetMIRBuilder()->CreateStmtRegassign(
275 mirPreg->GetPrimType(), regFieldPair.GetPregIdx(), regNode);
276 blk->AddStatement(regAssign);
277 dStmt = regAssign;
278 }
279 }
280 }
281 blk->ResetBlock();
282 /* if VerboseCG, insert a comment */
283 if (ShouldAddAdditionalComment()) {
284 CommentNode *cmnt = mirModule.CurFuncCodeMemPool()->New<CommentNode>(mirModule);
285 cmnt->SetComment(kOpcodeInfo.GetName(opcode).c_str());
286 if (funcCalled == kFuncNotFound) {
287 cmnt->Append(" : unknown");
288 } else {
289 cmnt->Append(" : ");
290 cmnt->Append(GlobalTables::GetFunctionTable().GetFunctionFromPuidx(funcCalled)->GetName());
291 }
292 blk->AddStatement(cmnt);
293 }
294 CHECK_FATAL(dStmt == nullptr || dStmt->GetNext() == nullptr, "make sure dStmt or dStmt's next is nullptr");
295 LowerCallStmt(newCall, dStmt, *blk, retType, uselvar, opcode == OP_intrinsiccallassigned);
296 if (!uselvar && dStmt != nullptr) {
297 dStmt->SetSrcPos(newCall.GetSrcPos());
298 blk->AddStatement(dStmt);
299 }
300 }
301 return blk;
302 }
303
LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode & intrinsicCall)304 BlockNode *CGLowerer::LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode &intrinsicCall)
305 {
306 auto *builder = mirModule.GetMIRBuilder();
307 auto *block = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
308 auto intrinsicID = intrinsicCall.GetIntrinsic();
309 auto &opndVector = intrinsicCall.GetNopnd();
310 auto returnPair = intrinsicCall.GetReturnVec().begin();
311 auto regFieldPair = returnPair->second;
312 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
313 if (regFieldPair.IsReg()) {
314 auto regIdx = regFieldPair.GetPregIdx();
315 auto primType = mirModule.CurFunction()->GetPregItem(static_cast<PregIdx>(regIdx))->GetPrimType();
316 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, primType, TyIdx(0), opndVector);
317 auto regAssign = builder->CreateStmtRegassign(primType, regIdx, intrinsicOp);
318 block->AddStatement(regAssign);
319 } else {
320 auto fieldID = regFieldPair.GetFieldID();
321 auto stIdx = returnPair->first;
322 DEBUG_ASSERT(mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx) != nullptr, "nullptr check");
323 auto *type = mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx)->GetType();
324 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, *type, opndVector);
325 auto dAssign = builder->CreateStmtDassign(stIdx, fieldID, intrinsicOp);
326 block->AddStatement(dAssign);
327 }
328 return LowerBlock(*block);
329 }
330
LowerCallAssignedStmt(StmtNode & stmt,bool uselvar)331 BlockNode *CGLowerer::LowerCallAssignedStmt(StmtNode &stmt, bool uselvar)
332 {
333 StmtNode *newCall = nullptr;
334 CallReturnVector *p2nRets = nullptr;
335 PUIdx funcCalled = kFuncNotFound;
336 bool handledAtLowerLevel = false;
337 switch (stmt.GetOpCode()) {
338 case OP_callassigned: {
339 auto &origCall = static_cast<CallNode &>(stmt);
340 newCall = GenCallNode(stmt, funcCalled, origCall);
341 p2nRets = &origCall.GetReturnVec();
342 static_cast<CallNode *>(newCall)->SetReturnVec(*p2nRets);
343 MIRFunction *curFunc = mirModule.CurFunction();
344 curFunc->SetLastFreqMap(newCall->GetStmtID(),
345 static_cast<uint32>(curFunc->GetFreqFromLastStmt(stmt.GetStmtID())));
346 break;
347 }
348 case OP_intrinsiccallassigned: {
349 BlockNode *blockNode = LowerIntrinsiccallToIntrinsicop(stmt);
350 if (blockNode) {
351 return blockNode;
352 }
353 IntrinsiccallNode &intrincall = static_cast<IntrinsiccallNode &>(stmt);
354 newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, intrincall);
355 p2nRets = &intrincall.GetReturnVec();
356 static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
357 break;
358 }
359 case OP_icallprotoassigned:
360 case OP_icallassigned: {
361 auto &origCall = static_cast<IcallNode &>(stmt);
362 newCall = GenIcallNode(funcCalled, origCall);
363 p2nRets = &origCall.GetReturnVec();
364 static_cast<IcallNode *>(newCall)->SetReturnVec(*p2nRets);
365 break;
366 }
367 default:
368 CHECK_FATAL(false, "NIY");
369 return nullptr;
370 }
371
372 /* transfer srcPosition location info */
373 newCall->SetSrcPos(stmt.GetSrcPos());
374 return GenBlockNode(*newCall, *p2nRets, stmt.GetOpCode(), funcCalled, handledAtLowerLevel, uselvar);
375 }
376
LowerIntrinsiccallToIntrinsicop(StmtNode & stmt)377 BlockNode *CGLowerer::LowerIntrinsiccallToIntrinsicop(StmtNode &stmt)
378 {
379 return nullptr;
380 }
381
LowerStmt(StmtNode & stmt,BlockNode & newBlk)382 void CGLowerer::LowerStmt(StmtNode &stmt, BlockNode &newBlk)
383 {
384 for (size_t i = 0; i < stmt.NumOpnds(); ++i) {
385 DEBUG_ASSERT(stmt.Opnd(i) != nullptr, "null ptr check");
386 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(i), newBlk), i);
387 }
388 }
389
LowerSwitchOpnd(StmtNode & stmt,BlockNode & newBlk)390 void CGLowerer::LowerSwitchOpnd(StmtNode &stmt, BlockNode &newBlk)
391 {
392 BaseNode *opnd = LowerExpr(stmt, *stmt.Opnd(0), newBlk);
393 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2 && opnd->GetOpCode() != OP_regread) {
394 PrimType ptyp = stmt.Opnd(0)->GetPrimType();
395 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
396 PregIdx pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(ptyp);
397 RegassignNode *regAss = mirBuilder->CreateStmtRegassign(ptyp, pIdx, opnd);
398 newBlk.AddStatement(regAss);
399 GetCurrentFunc()->SetLastFreqMap(regAss->GetStmtID(),
400 static_cast<uint32>(GetCurrentFunc()->GetFreqFromLastStmt(stmt.GetStmtID())));
401 stmt.SetOpnd(mirBuilder->CreateExprRegread(ptyp, pIdx), 0);
402 } else {
403 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(0), newBlk), 0);
404 }
405 }
406
LowerBlock(BlockNode & block)407 BlockNode *CGLowerer::LowerBlock(BlockNode &block)
408 {
409 BlockNode *newBlk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
410 BlockNode *tmpBlockNode = nullptr;
411 if (block.GetFirst() == nullptr) {
412 return newBlk;
413 }
414
415 StmtNode *nextStmt = block.GetFirst();
416 do {
417 StmtNode *stmt = nextStmt;
418 nextStmt = stmt->GetNext();
419 stmt->SetNext(nullptr);
420 currentBlock = newBlk;
421
422 switch (stmt->GetOpCode()) {
423 case OP_switch: {
424 LowerSwitchOpnd(*stmt, *newBlk);
425 auto switchMp = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "switchlowere");
426 MapleAllocator switchAllocator(switchMp.get());
427 SwitchLowerer switchLowerer(mirModule, static_cast<SwitchNode &>(*stmt), switchAllocator);
428 BlockNode *blk = switchLowerer.LowerSwitch();
429 if (blk->GetFirst() != nullptr) {
430 newBlk->AppendStatementsFromBlock(*blk);
431 }
432 needBranchCleanup = true;
433 break;
434 }
435 case OP_block:
436 tmpBlockNode = LowerBlock(static_cast<BlockNode &>(*stmt));
437 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
438 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
439 break;
440 case OP_dassign: {
441 LowerDassign(static_cast<DassignNode &>(*stmt), *newBlk);
442 break;
443 }
444 case OP_regassign: {
445 LowerRegassign(static_cast<RegassignNode &>(*stmt), *newBlk);
446 break;
447 }
448 case OP_iassign: {
449 LowerIassign(static_cast<IassignNode &>(*stmt), *newBlk);
450 break;
451 }
452 case OP_callassigned:
453 case OP_icallassigned:
454 case OP_icallprotoassigned: {
455 // pass the addr of lvar if this is a struct call assignment
456 bool lvar = false;
457 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt, lvar));
458 break;
459 }
460 case OP_intrinsiccallassigned:
461 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt));
462 break;
463 case OP_intrinsiccall:
464 case OP_call:
465 case OP_icall:
466 case OP_icallproto:
467 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
468 // nextStmt could be changed by the call to LowerStructReturn
469 LowerCallStmt(*stmt, nextStmt, *newBlk);
470 #else
471 LowerStmt(*stmt, *newBlk);
472 #endif
473 break;
474 case OP_return: {
475 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
476 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
477 if (GetCurrentFunc()->IsFirstArgReturn() && stmt->NumOpnds() > 0) {
478 newBlk->AppendStatementsFromBlock(
479 *LowerReturnStructUsingFakeParm(static_cast<NaryStmtNode &>(*stmt)));
480 } else {
481 #endif
482 NaryStmtNode *retNode = static_cast<NaryStmtNode *>(stmt);
483 if (retNode->GetNopndSize() == 0) {
484 newBlk->AddStatement(stmt);
485 } else {
486 tmpBlockNode = LowerReturn(*retNode);
487 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
488 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
489 }
490 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
491 }
492 #endif
493 break;
494 }
495 case OP_comment:
496 newBlk->AddStatement(stmt);
497 break;
498 default:
499 LowerStmt(*stmt, *newBlk);
500 newBlk->AddStatement(stmt);
501 break;
502 }
503 CHECK_FATAL(beCommon.GetSizeOfTypeSizeTable() == GlobalTables::GetTypeTable().GetTypeTableSize(), "Error!");
504 } while (nextStmt != nullptr);
505 return newBlk;
506 }
507
LowerTypePtr(BaseNode & node) const508 void CGLowerer::LowerTypePtr(BaseNode &node) const
509 {
510 if ((node.GetPrimType() == PTY_ptr) || (node.GetPrimType() == PTY_ref)) {
511 node.SetPrimType(GetLoweredPtrType());
512 }
513
514 if (kOpcodeInfo.IsTypeCvt(node.GetOpCode())) {
515 auto &cvt = static_cast<TypeCvtNode &>(node);
516 if ((cvt.FromType() == PTY_ptr) || (cvt.FromType() == PTY_ref)) {
517 cvt.SetFromType(GetLoweredPtrType());
518 }
519 } else if (kOpcodeInfo.IsCompare(node.GetOpCode())) {
520 auto &cmp = static_cast<CompareNode &>(node);
521 if ((cmp.GetOpndType() == PTY_ptr) || (cmp.GetOpndType() == PTY_ref)) {
522 cmp.SetOpndType(GetLoweredPtrType());
523 }
524 }
525 }
526
LowerEntry(MIRFunction & func)527 void CGLowerer::LowerEntry(MIRFunction &func)
528 {
529 if (func.IsFirstArgReturn() && func.GetReturnType()->GetPrimType() != PTY_void) {
530 MIRSymbol *retSt = func.GetSymTab()->CreateSymbol(kScopeLocal);
531 retSt->SetStorageClass(kScFormal);
532 retSt->SetSKind(kStVar);
533 std::string retName(".return.");
534 MIRSymbol *funcSt = GlobalTables::GetGsymTable().GetSymbolFromStidx(func.GetStIdx().Idx());
535 DEBUG_ASSERT(funcSt != nullptr, "null ptr check");
536 retName += funcSt->GetName();
537 retSt->SetNameStrIdx(retName);
538 MIRType *pointType = beCommon.BeGetOrCreatePointerType(*func.GetReturnType());
539
540 retSt->SetTyIdx(pointType->GetTypeIndex());
541 std::vector<MIRSymbol *> formals;
542 formals.emplace_back(retSt);
543 for (uint32 i = 0; i < func.GetFormalCount(); ++i) {
544 auto formal = func.GetFormal(i);
545 formals.emplace_back(formal);
546 }
547 func.SetFirstArgReturn();
548
549 beCommon.AddElementToFuncReturnType(func, func.GetReturnTyIdx());
550
551 func.UpdateFuncTypeAndFormalsAndReturnType(formals, TyIdx(PTY_void), true);
552 auto *funcType = func.GetMIRFuncType();
553 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
554 funcType->SetFirstArgReturn();
555 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
556 }
557 }
558
CleanupBranches(MIRFunction & func) const559 void CGLowerer::CleanupBranches(MIRFunction &func) const
560 {
561 BlockNode *block = func.GetBody();
562 StmtNode *prev = nullptr;
563 StmtNode *next = nullptr;
564 for (StmtNode *curr = block->GetFirst(); curr != nullptr; curr = next) {
565 next = curr->GetNext();
566 if (next != nullptr) {
567 CHECK_FATAL(curr == next->GetPrev(), "unexpected node");
568 }
569 if ((next != nullptr) && (prev != nullptr) && (curr->GetOpCode() == OP_goto)) {
570 /*
571 * Skip until find a label.
572 * Note that the CURRent 'goto' statement may be the last statement
573 * when discounting comment statements.
574 * Make sure we don't lose any comments.
575 */
576 StmtNode *cmtB = nullptr;
577 StmtNode *cmtE = nullptr;
578 next = curr->GetNext();
579
580 while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
581 if (next->GetOpCode() == OP_comment) {
582 if (cmtB == nullptr) {
583 cmtB = next;
584 cmtE = next;
585 } else {
586 CHECK_FATAL(cmtE != nullptr, "cmt_e is null in CGLowerer::CleanupBranches");
587 cmtE->SetNext(next);
588 next->SetPrev(cmtE);
589 cmtE = next;
590 }
591 }
592 next = next->GetNext();
593 }
594
595 curr->SetNext(next);
596
597 if (next != nullptr) {
598 next->SetPrev(curr);
599 }
600
601 StmtNode *insertAfter = nullptr;
602
603 if ((next != nullptr) &&
604 ((static_cast<GotoNode *>(curr))->GetOffset() == (static_cast<LabelNode *>(next))->GetLabelIdx())) {
605 insertAfter = prev;
606 prev->SetNext(next); /* skip goto statement (which is pointed by curr) */
607 next->SetPrev(prev);
608 curr = next; /* make curr point to the label statement */
609 next = next->GetNext(); /* advance next to the next statement of the label statement */
610 } else {
611 insertAfter = curr;
612 }
613
614 /* insert comments before 'curr' */
615 if (cmtB != nullptr) {
616 CHECK_FATAL(cmtE != nullptr, "nullptr is not expected");
617 StmtNode *iaNext = insertAfter->GetNext();
618 if (iaNext != nullptr) {
619 iaNext->SetPrev(cmtE);
620 }
621 cmtE->SetNext(iaNext);
622
623 insertAfter->SetNext(cmtB);
624 cmtB->SetPrev(insertAfter);
625
626 if (insertAfter == curr) {
627 curr = cmtE;
628 }
629 }
630 if (next == nullptr) {
631 func.GetBody()->SetLast(curr);
632 }
633 }
634 prev = curr;
635 }
636 CHECK_FATAL(func.GetBody()->GetLast() == prev, "make sure the return value of GetLast equal prev");
637 }
638
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const RegreadNode & rRead,const CGLowerer & cgLowerer)639 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const RegreadNode &rRead,
640 const CGLowerer &cgLowerer)
641 {
642 StIdx stIdx = cgLowerer.GetSymbolReferredToByPseudoRegister(rRead.GetRegIdx());
643 return ((dassign.GetStIdx() == stIdx) && (dassign.GetFieldID() == 0));
644 }
645
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const DreadNode & dread)646 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const DreadNode &dread)
647 {
648 return ((dassign.GetStIdx() == dread.GetStIdx()) && (dassign.GetFieldID() == dread.GetFieldID()));
649 }
650
IsDassignNOP(const DassignNode & dassign)651 inline bool IsDassignNOP(const DassignNode &dassign)
652 {
653 if (dassign.GetRHS()->GetOpCode() == OP_dread) {
654 return IsAccessingTheSameMemoryLocation(dassign, static_cast<DreadNode &>(*dassign.GetRHS()));
655 }
656 return false;
657 }
658
IsConstvalZero(const BaseNode & n)659 inline bool IsConstvalZero(const BaseNode &n)
660 {
661 return ((n.GetOpCode() == OP_constval) && static_cast<const ConstvalNode &>(n).GetConstVal()->IsZero());
662 }
663
664 #define NEXT_ID(x) ((x) + 1)
665 #define INTRN_FIRST_SYNC_ENTER NEXT_ID(INTRN_LAST)
666 #define INTRN_SECOND_SYNC_ENTER NEXT_ID(INTRN_FIRST_SYNC_ENTER)
667 #define INTRN_THIRD_SYNC_ENTER NEXT_ID(INTRN_SECOND_SYNC_ENTER)
668 #define INTRN_FOURTH_SYNC_ENTER NEXT_ID(INTRN_THIRD_SYNC_ENTER)
669 #define INTRN_YNC_EXIT NEXT_ID(INTRN_FOURTH_SYNC_ENTER)
670
671 std::vector<std::pair<CGLowerer::BuiltinFunctionID, PUIdx>> CGLowerer::builtinFuncIDs;
672
GetLabelIdx(MIRFunction & curFunc) const673 LabelIdx CGLowerer::GetLabelIdx(MIRFunction &curFunc) const
674 {
675 std::string suffix = std::to_string(curFunc.GetLabelTab()->GetLabelTableSize());
676 GStrIdx labelStrIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName("__label_BC_" + suffix);
677 LabelIdx labIdx = curFunc.GetLabelTab()->AddLabel(labelStrIdx);
678 return labIdx;
679 }
680
LowerExpr(BaseNode & parent,BaseNode & expr,BlockNode & blkNode)681 BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkNode)
682 {
683 bool isCvtU1Expr = (expr.GetOpCode() == OP_cvt && expr.GetPrimType() == PTY_u1 &&
684 static_cast<TypeCvtNode &>(expr).FromType() != PTY_u1);
685 if (expr.GetPrimType() == PTY_u1) {
686 expr.SetPrimType(PTY_u8);
687 }
688
689 {
690 for (size_t i = 0; i < expr.NumOpnds(); ++i) {
691 expr.SetOpnd(LowerExpr(expr, *expr.Opnd(i), blkNode), i);
692 }
693 }
694 // Convert `cvt u1 xx <expr>` to `ne u8 xx (<expr>, constval xx 0)`
695 // No need to convert `cvt u1 u1 <expr>`
696 if (isCvtU1Expr) {
697 auto &cvtExpr = static_cast<TypeCvtNode &>(expr);
698 PrimType fromType = cvtExpr.FromType();
699 auto *fromMIRType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fromType));
700 // We use u8 instead of u1 because codegen can't recognize u1
701 auto *toMIRType = GlobalTables::GetTypeTable().GetUInt8();
702 auto *zero = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, *fromMIRType);
703 auto *converted = mirBuilder->CreateExprCompare(OP_ne, *toMIRType, *fromMIRType, cvtExpr.Opnd(0),
704 mirBuilder->CreateConstval(zero));
705 return converted;
706 }
707 switch (expr.GetOpCode()) {
708 case OP_dread:
709 return LowerDread(static_cast<DreadNode &>(expr), blkNode);
710
711 case OP_iread:
712 return LowerIread(static_cast<IreadNode &>(expr));
713
714 default:
715 return &expr;
716 }
717 }
718
LowerDread(DreadNode & dread,const BlockNode & block)719 BaseNode *CGLowerer::LowerDread(DreadNode &dread, const BlockNode &block)
720 {
721 /* use PTY_u8 for boolean type in dread/iread */
722 if (dread.GetPrimType() == PTY_u1) {
723 dread.SetPrimType(PTY_u8);
724 }
725 CHECK_FATAL(dread.GetFieldID() == 0, "fieldID must be 0");
726 return LowerDreadToThreadLocal(dread, block);
727 }
728
LowerRegassign(RegassignNode & regNode,BlockNode & newBlk)729 void CGLowerer::LowerRegassign(RegassignNode ®Node, BlockNode &newBlk)
730 {
731 BaseNode *rhsOpnd = regNode.Opnd(0);
732 regNode.SetOpnd(LowerExpr(regNode, *rhsOpnd, newBlk), 0);
733 newBlk.AddStatement(®Node);
734 }
735
ExtractSymbolAddress(const StIdx & stIdx)736 BaseNode *CGLowerer::ExtractSymbolAddress(const StIdx &stIdx)
737 {
738 auto builder = mirModule.GetMIRBuilder();
739 return builder->CreateExprAddrof(0, stIdx);
740 }
741
LowerDreadToThreadLocal(BaseNode & expr,const BlockNode & block)742 BaseNode *CGLowerer::LowerDreadToThreadLocal(BaseNode &expr, const BlockNode &block)
743 {
744 auto *result = &expr;
745 if (expr.GetOpCode() != maple::OP_dread) {
746 return result;
747 }
748 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
749 auto dread = static_cast<DreadNode &>(expr);
750 StIdx stIdx = dread.GetStIdx();
751 if (!stIdx.IsGlobal()) {
752 return result;
753 }
754 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
755 CHECK_FATAL(symbol != nullptr, "symbol should not be nullptr");
756
757 if (symbol->IsThreadLocal()) {
758 // iread <* u32> 0 (regread u64 %addr)
759 auto addr = ExtractSymbolAddress(stIdx);
760 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
761 auto iread = mirModule.GetMIRBuilder()->CreateExprIread(*symbol->GetType(), *ptrType, dread.GetFieldID(), addr);
762 result = iread;
763 }
764 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
765 if (newTypeTableSize != oldTypeTableSize) {
766 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
767 }
768 return result;
769 }
770
LowerDassignToThreadLocal(StmtNode & stmt,const BlockNode & block)771 StmtNode *CGLowerer::LowerDassignToThreadLocal(StmtNode &stmt, const BlockNode &block)
772 {
773 StmtNode *result = &stmt;
774 if (stmt.GetOpCode() != maple::OP_dassign) {
775 return result;
776 }
777 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
778 auto dAssign = static_cast<DassignNode &>(stmt);
779 StIdx stIdx = dAssign.GetStIdx();
780 if (!stIdx.IsGlobal()) {
781 return result;
782 }
783 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
784 if (newTypeTableSize != oldTypeTableSize) {
785 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
786 }
787 return result;
788 }
789
LowerDassign(DassignNode & dsNode,BlockNode & newBlk)790 void CGLowerer::LowerDassign(DassignNode &dsNode, BlockNode &newBlk)
791 {
792 StmtNode *newStmt = nullptr;
793 BaseNode *rhs = nullptr;
794 Opcode op = dsNode.GetRHS()->GetOpCode();
795 CHECK_FATAL(dsNode.GetFieldID() == 0, "fieldID must be 0");
796 if (op == OP_intrinsicop) {
797 IntrinsicopNode *intrinNode = static_cast<IntrinsicopNode *>(dsNode.GetRHS());
798 MIRType *retType = IntrinDesc::intrinTable[intrinNode->GetIntrinsic()].GetReturnType();
799 CHECK_FATAL(retType != nullptr, "retType should not be nullptr");
800 rhs = LowerExpr(dsNode, *intrinNode, newBlk);
801 dsNode.SetRHS(rhs);
802 CHECK_FATAL(dsNode.GetRHS() != nullptr, "dsNode->rhs is null in CGLowerer::LowerDassign");
803 if (!IsDassignNOP(dsNode)) {
804 newStmt = &dsNode;
805 }
806 } else {
807 rhs = LowerExpr(dsNode, *dsNode.GetRHS(), newBlk);
808 dsNode.SetRHS(rhs);
809 newStmt = &dsNode;
810 }
811
812 if (newStmt != nullptr) {
813 newBlk.AddStatement(LowerDassignToThreadLocal(*newStmt, newBlk));
814 }
815 }
816
LowerDefaultIntrinsicCall(IntrinsiccallNode & intrincall,MIRSymbol & st,MIRFunction & fn)817 StmtNode *CGLowerer::LowerDefaultIntrinsicCall(IntrinsiccallNode &intrincall, MIRSymbol &st, MIRFunction &fn)
818 {
819 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
820 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
821 std::vector<TyIdx> funcTyVec;
822 std::vector<TypeAttrs> fnTaVec;
823 MapleVector<BaseNode *> &nOpnds = intrincall.GetNopnd();
824 MIRType *retTy = intrinDesc->GetReturnType();
825 CHECK_FATAL(retTy != nullptr, "retTy should not be nullptr");
826 for (uint32 i = 0; i < nOpnds.size(); ++i) {
827 MIRType *argTy = intrinDesc->GetArgType(i);
828 CHECK_FATAL(argTy != nullptr, "argTy should not be nullptr");
829 funcTyVec.emplace_back(argTy->GetTypeIndex());
830 fnTaVec.emplace_back(TypeAttrs());
831 }
832 MIRType *funcType = beCommon.BeGetOrCreateFunctionType(retTy->GetTypeIndex(), funcTyVec, fnTaVec);
833 st.SetTyIdx(funcType->GetTypeIndex());
834 fn.SetMIRFuncType(static_cast<MIRFuncType *>(funcType));
835 fn.SetReturnTyIdx(retTy->GetTypeIndex());
836 return static_cast<CallNode *>(mirBuilder->CreateStmtCall(fn.GetPuidx(), nOpnds));
837 }
838
LowerIntrinsiccall(IntrinsiccallNode & intrincall,BlockNode & newBlk)839 StmtNode *CGLowerer::LowerIntrinsiccall(IntrinsiccallNode &intrincall, BlockNode &newBlk)
840 {
841 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
842 for (size_t i = 0; i < intrincall.GetNumOpnds(); ++i) {
843 intrincall.SetOpnd(LowerExpr(intrincall, *intrincall.Opnd(i), newBlk), i);
844 }
845 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
846 /* default lowers intrinsic call to real function call. */
847 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
848 CHECK_FATAL(intrinDesc->name != nullptr, "intrinsic's name should not be nullptr");
849 const std::string name = intrinDesc->name;
850 st->SetNameStrIdx(name);
851 st->SetStorageClass(kScText);
852 st->SetSKind(kStFunc);
853 MIRFunction *fn = mirBuilder->GetOrCreateFunction(intrinDesc->name, TyIdx(0));
854 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
855 fn->AllocSymTab();
856 st->SetFunction(fn);
857 st->SetAppearsInCode(true);
858 return LowerDefaultIntrinsicCall(intrincall, *st, *fn);
859 }
860
GetBuiltinToUse(BuiltinFunctionID id) const861 PUIdx CGLowerer::GetBuiltinToUse(BuiltinFunctionID id) const
862 {
863 /*
864 * use std::vector & linear search as the number of entries is small.
865 * we may revisit it if the number of entries gets larger.
866 */
867 for (const auto &funcID : builtinFuncIDs) {
868 if (funcID.first == id) {
869 return funcID.second;
870 }
871 }
872 return kFuncNotFound;
873 }
874
IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const875 bool CGLowerer::IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const
876 {
877 switch (intrinsic) {
878 // js
879 case INTRN_ADD_WITH_OVERFLOW:
880 case INTRN_SUB_WITH_OVERFLOW:
881 case INTRN_MUL_WITH_OVERFLOW:
882 case INTRN_JS_PURE_CALL:
883 case INTRN_HEAP_CONSTANT:
884 case INTRN_GET_HEAP_CONSTANT_TABLE:
885 case INTRN_TAGGED_IS_HEAPOBJECT:
886 case INTRN_IS_STABLE_ELEMENTS:
887 case INTRN_HAS_PENDING_EXCEPTION:
888 case INTRN_TAGGED_OBJECT_IS_STRING:
889 case INTRN_IS_COW_ARRAY:
890 return true;
891 default: {
892 return false;
893 }
894 }
895 }
896
LowerFunc(MIRFunction & func)897 void CGLowerer::LowerFunc(MIRFunction &func)
898 {
899 labelIdx = 0;
900 SetCurrentFunc(&func);
901 LowerEntry(func);
902 BlockNode *origBody = func.GetBody();
903 CHECK_FATAL(origBody != nullptr, "origBody should not be nullptr");
904
905 BlockNode *newBody = LowerBlock(*origBody);
906 func.SetBody(newBody);
907 if (needBranchCleanup) {
908 CleanupBranches(func);
909 }
910
911 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
912 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
913 if (newTypeTableSize != oldTypeTableSize) {
914 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
915 }
916 }
917 } /* namespace maplebe */
918