1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "lower.h"
17 #include "switch_lowerer.h"
18
19 namespace maplebe {
20
21 using namespace maple;
22
23 #define TARGARM32 0
24
25 // input node must be cvt, retype, zext or sext
LowerCastExpr(BaseNode & expr)26 BaseNode *CGLowerer::LowerCastExpr(BaseNode &expr)
27 {
28 return &expr;
29 }
30
31 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
LowerReturnStructUsingFakeParm(NaryStmtNode & retNode)32 BlockNode *CGLowerer::LowerReturnStructUsingFakeParm(NaryStmtNode &retNode)
33 {
34 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
35 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
36 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
37 }
38 BaseNode *opnd0 = retNode.Opnd(0);
39 /* It is possible function never returns and have a dummy return const instead of a struct. */
40 maple::LogInfo::MapleLogger(kLlWarn) << "return struct should have a kid" << std::endl;
41
42 MIRFunction *curFunc = GetCurrentFunc();
43 MIRSymbol *retSt = curFunc->GetFormal(0);
44 MIRPtrType *retTy = static_cast<MIRPtrType *>(retSt->GetType());
45 IassignNode *iassign = mirModule.CurFuncCodeMemPool()->New<IassignNode>();
46 iassign->SetTyIdx(retTy->GetTypeIndex());
47 DEBUG_ASSERT(opnd0 != nullptr, "opnd0 should not be nullptr");
48 iassign->SetFieldID(0);
49 iassign->SetRHS(opnd0);
50 if (retSt->IsPreg()) {
51 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(
52 GetLoweredPtrType(), curFunc->GetPregTab()->GetPregIdxFromPregno(retSt->GetPreg()->GetPregNo()));
53 iassign->SetOpnd(regNode, 0);
54 } else {
55 AddrofNode *dreadNode = mirModule.CurFuncCodeMemPool()->New<AddrofNode>(OP_dread);
56 dreadNode->SetPrimType(GetLoweredPtrType());
57 dreadNode->SetStIdx(retSt->GetStIdx());
58 iassign->SetOpnd(dreadNode, 0);
59 }
60 blk->AddStatement(iassign);
61 retNode.GetNopnd().clear();
62 retNode.SetNumOpnds(0);
63 blk->AddStatement(&retNode);
64 return blk;
65 }
66
67 #endif /* TARGARM32 || TARGAARCH64 || TARGX86_64 */
68
LowerReturn(NaryStmtNode & retNode)69 BlockNode *CGLowerer::LowerReturn(NaryStmtNode &retNode)
70 {
71 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
72 if (retNode.NumOpnds() != 0) {
73 BaseNode *expr = retNode.Opnd(0);
74 Opcode opr = expr->GetOpCode();
75 if (opr == OP_dread) {
76 AddrofNode *retExpr = static_cast<AddrofNode *>(expr);
77 MIRFunction *mirFunc = mirModule.CurFunction();
78 MIRSymbol *sym = mirFunc->GetLocalOrGlobalSymbol(retExpr->GetStIdx());
79 if (sym->GetAttr(ATTR_localrefvar)) {
80 mirFunc->InsertMIRSymbol(sym);
81 }
82 }
83 }
84 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
85 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
86 }
87 blk->AddStatement(&retNode);
88 return blk;
89 }
90
LowerIassign(IassignNode & iassign,BlockNode & newBlk)91 void CGLowerer::LowerIassign(IassignNode &iassign, BlockNode &newBlk)
92 {
93 StmtNode *newStmt = nullptr;
94 CHECK_FATAL(iassign.GetFieldID() == 0, "fieldID must be 0");
95 LowerStmt(iassign, newBlk);
96 newStmt = &iassign;
97 newBlk.AddStatement(newStmt);
98 }
99
NeedRetypeWhenLowerCallAssigned(PrimType pType)100 BaseNode *CGLowerer::NeedRetypeWhenLowerCallAssigned(PrimType pType)
101 {
102 BaseNode *retNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
103 if (IsPrimitiveInteger(pType) && GetPrimTypeBitSize(pType) <= k32BitSize) {
104 auto newPty = IsPrimitiveUnsigned(pType) ? PTY_u64 : PTY_i64;
105 retNode = mirModule.GetMIRBuilder()->CreateExprTypeCvt(OP_cvt, newPty, pType, *retNode);
106 }
107 return retNode;
108 }
109
SaveReturnValueInLocal(StIdx stIdx,uint16 fieldID)110 DassignNode *CGLowerer::SaveReturnValueInLocal(StIdx stIdx, uint16 fieldID)
111 {
112 MIRSymbol *var;
113 if (stIdx.IsGlobal()) {
114 var = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
115 } else {
116 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
117 var = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
118 }
119 CHECK_FATAL(var != nullptr, "var should not be nullptr");
120 PrimType pType;
121 if (var->GetAttr(ATTR_oneelem_simd)) {
122 pType = PTY_f64;
123 } else {
124 pType = GlobalTables::GetTypeTable().GetTypeTable().at(var->GetTyIdx())->GetPrimType();
125 }
126 auto *regRead = NeedRetypeWhenLowerCallAssigned(pType);
127 return mirModule.GetMIRBuilder()->CreateStmtDassign(*var, fieldID, regRead);
128 }
129
130 /* to lower call (including icall) and intrinsicall statements */
LowerCallStmt(StmtNode & stmt,StmtNode * & nextStmt,BlockNode & newBlk,MIRType * retty,bool uselvar,bool isIntrinAssign)131 void CGLowerer::LowerCallStmt(StmtNode &stmt, StmtNode *&nextStmt, BlockNode &newBlk, MIRType *retty, bool uselvar,
132 bool isIntrinAssign)
133 {
134 StmtNode *newStmt = nullptr;
135 if (stmt.GetOpCode() == OP_intrinsiccall) {
136 auto &intrnNode = static_cast<IntrinsiccallNode &>(stmt);
137 if (intrnNode.GetIntrinsic() == maple::INTRN_JS_PURE_CALL) {
138 newStmt = &stmt;
139 } else {
140 newStmt = LowerIntrinsiccall(intrnNode, newBlk);
141 }
142 } else {
143 /* We note the function has a user-defined (i.e., not an intrinsic) call. */
144 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
145 GetCurrentFunc()->SetHasCall();
146 newStmt = &stmt;
147 }
148
149 if (newStmt == nullptr) {
150 return;
151 }
152
153 if (newStmt->GetOpCode() == OP_call || newStmt->GetOpCode() == OP_icall || newStmt->GetOpCode() == OP_icallproto) {
154 auto &callNode = static_cast<NaryStmtNode&>(*newStmt);
155 for (size_t i = 0; i < callNode.GetNopndSize(); ++i) {
156 BaseNode *newOpnd = LowerExpr(callNode, *callNode.GetNopndAt(i), newBlk);
157 callNode.SetOpnd(newOpnd, i);
158 }
159 newStmt = &callNode;
160 }
161 newStmt->SetSrcPos(stmt.GetSrcPos());
162 newBlk.AddStatement(newStmt);
163 }
164
GenCallNode(const StmtNode & stmt,PUIdx & funcCalled,CallNode & origCall)165 StmtNode *CGLowerer::GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNode &origCall)
166 {
167 CallNode *newCall = nullptr;
168 if (stmt.GetOpCode() == OP_callassigned) {
169 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(origCall.GetPUIdx(), origCall.GetNopnd());
170 }
171 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
172 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
173 newCall->SetSrcPos(stmt.GetSrcPos());
174 funcCalled = origCall.GetPUIdx();
175 CHECK_FATAL((newCall->GetOpCode() == OP_call),
176 "virtual call or super class call are not expected");
177 newCall->SetStmtAttrs(stmt.GetStmtAttrs());
178 return newCall;
179 }
180
GenIntrinsiccallNode(const StmtNode & stmt,PUIdx & funcCalled,bool & handledAtLowerLevel,IntrinsiccallNode & origCall)181 StmtNode *CGLowerer::GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalled, bool &handledAtLowerLevel,
182 IntrinsiccallNode &origCall)
183 {
184 StmtNode *newCall = nullptr;
185 handledAtLowerLevel = IsIntrinsicCallHandledAtLowerLevel(origCall.GetIntrinsic());
186 if (handledAtLowerLevel) {
187 /* If the lower level can handle the intrinsic, just let it pass through. */
188 newCall = &origCall;
189 } else {
190 PUIdx bFunc = GetBuiltinToUse(origCall.GetIntrinsic());
191 if (bFunc != kFuncNotFound) {
192 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(bFunc, origCall.GetNopnd());
193 CHECK_FATAL(newCall->GetOpCode() == OP_call, "intrinsicnode except intrinsiccall is not expected");
194 } else {
195 if (stmt.GetOpCode() == OP_intrinsiccallassigned) {
196 newCall =
197 mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
198 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccall,
199 "intrinsicnode except intrinsiccall is not expected");
200 } else {
201 newCall = mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(),
202 origCall.GetNopnd(), origCall.GetTyIdx());
203 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccallwithtype,
204 "intrinsicnode except OP_intrinsiccallwithtype is not expected");
205 }
206 }
207 newCall->SetSrcPos(stmt.GetSrcPos());
208 funcCalled = bFunc;
209 }
210 return newCall;
211 }
212
GenIcallNode(PUIdx & funcCalled,IcallNode & origCall)213 StmtNode *CGLowerer::GenIcallNode(PUIdx &funcCalled, IcallNode &origCall)
214 {
215 IcallNode *newCall = nullptr;
216 if (origCall.GetOpCode() == OP_icallassigned) {
217 newCall = mirModule.GetMIRBuilder()->CreateStmtIcall(origCall.GetNopnd());
218 } else {
219 newCall = mirModule.GetMIRBuilder()->CreateStmtIcallproto(origCall.GetNopnd(), origCall.GetRetTyIdx());
220 newCall->SetRetTyIdx(static_cast<IcallNode &>(origCall).GetRetTyIdx());
221 }
222 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
223 newCall->SetStmtAttrs(origCall.GetStmtAttrs());
224 newCall->SetSrcPos(origCall.GetSrcPos());
225 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
226 funcCalled = kFuncNotFound;
227 return newCall;
228 }
229
GenBlockNode(StmtNode & newCall,const CallReturnVector & p2nRets,const Opcode & opcode,const PUIdx & funcCalled,bool handledAtLowerLevel,bool uselvar)230 BlockNode *CGLowerer::GenBlockNode(StmtNode &newCall, const CallReturnVector &p2nRets, const Opcode &opcode,
231 const PUIdx &funcCalled, bool handledAtLowerLevel, bool uselvar)
232 {
233 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
234 blk->AddStatement(&newCall);
235 if (!handledAtLowerLevel) {
236 CHECK_FATAL(p2nRets.size() <= 1, "make sure p2nRets size <= 1");
237 /* Create DassignStmt to save kSregRetval0. */
238 StmtNode *dStmt = nullptr;
239 MIRType *retType = nullptr;
240 if (p2nRets.size() == 1) {
241 MIRSymbol *sym = nullptr;
242 StIdx stIdx = p2nRets[0].first;
243 if (stIdx.IsGlobal()) {
244 sym = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
245 } else {
246 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
247 sym = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
248 }
249 bool sizeIs0 = false;
250 if (sym != nullptr) {
251 retType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(sym->GetTyIdx());
252 if (beCommon.GetTypeSize(retType->GetTypeIndex().GetIdx()) == 0) {
253 sizeIs0 = true;
254 }
255 }
256 if (!sizeIs0) {
257 RegFieldPair regFieldPair = p2nRets[0].second;
258 if (!regFieldPair.IsReg()) {
259 uint16 fieldID = static_cast<uint16>(regFieldPair.GetFieldID());
260 DassignNode *dn = SaveReturnValueInLocal(stIdx, fieldID);
261 CHECK_FATAL(dn->GetFieldID() == 0, "make sure dn's fieldID return 0");
262 LowerDassign(*dn, *blk);
263 CHECK_FATAL(&newCall == blk->GetLast() || newCall.GetNext() == blk->GetLast(), "");
264 dStmt = (&newCall == blk->GetLast()) ? nullptr : blk->GetLast();
265 CHECK_FATAL(newCall.GetNext() == dStmt, "make sure newCall's next equal dStmt");
266 } else {
267 PregIdx pregIdx = static_cast<PregIdx>(regFieldPair.GetPregIdx());
268 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
269 MIRPreg *mirPreg = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(pregIdx);
270 PrimType pType = mirPreg->GetPrimType();
271 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
272 RegassignNode *regAssign;
273
274 regAssign = mirModule.GetMIRBuilder()->CreateStmtRegassign(
275 mirPreg->GetPrimType(), regFieldPair.GetPregIdx(), regNode);
276 blk->AddStatement(regAssign);
277 dStmt = regAssign;
278 }
279 }
280 }
281 blk->ResetBlock();
282 /* if VerboseCG, insert a comment */
283 if (ShouldAddAdditionalComment()) {
284 CommentNode *cmnt = mirModule.CurFuncCodeMemPool()->New<CommentNode>(mirModule);
285 cmnt->SetComment(kOpcodeInfo.GetName(opcode).c_str());
286 if (funcCalled == kFuncNotFound) {
287 cmnt->Append(" : unknown");
288 } else {
289 cmnt->Append(" : ");
290 cmnt->Append(GlobalTables::GetFunctionTable().GetFunctionFromPuidx(funcCalled)->GetName());
291 }
292 blk->AddStatement(cmnt);
293 }
294 CHECK_FATAL(dStmt == nullptr || dStmt->GetNext() == nullptr, "make sure dStmt or dStmt's next is nullptr");
295 LowerCallStmt(newCall, dStmt, *blk, retType, uselvar, opcode == OP_intrinsiccallassigned);
296 if (!uselvar && dStmt != nullptr) {
297 dStmt->SetSrcPos(newCall.GetSrcPos());
298 blk->AddStatement(dStmt);
299 }
300 }
301 return blk;
302 }
303
LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode & intrinsicCall)304 BlockNode *CGLowerer::LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode &intrinsicCall)
305 {
306 auto *builder = mirModule.GetMIRBuilder();
307 auto *block = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
308 auto intrinsicID = intrinsicCall.GetIntrinsic();
309 auto &opndVector = intrinsicCall.GetNopnd();
310 auto returnPair = intrinsicCall.GetReturnVec().begin();
311 auto regFieldPair = returnPair->second;
312 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
313 if (regFieldPair.IsReg()) {
314 auto regIdx = regFieldPair.GetPregIdx();
315 auto primType = mirModule.CurFunction()->GetPregItem(static_cast<PregIdx>(regIdx))->GetPrimType();
316 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, primType, TyIdx(0), opndVector);
317 auto regAssign = builder->CreateStmtRegassign(primType, regIdx, intrinsicOp);
318 block->AddStatement(regAssign);
319 } else {
320 auto fieldID = regFieldPair.GetFieldID();
321 auto stIdx = returnPair->first;
322 DEBUG_ASSERT(mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx) != nullptr, "nullptr check");
323 auto *type = mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx)->GetType();
324 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, *type, opndVector);
325 auto dAssign = builder->CreateStmtDassign(stIdx, fieldID, intrinsicOp);
326 block->AddStatement(dAssign);
327 }
328 return LowerBlock(*block);
329 }
330
LowerCallAssignedStmt(StmtNode & stmt,bool uselvar)331 BlockNode *CGLowerer::LowerCallAssignedStmt(StmtNode &stmt, bool uselvar)
332 {
333 StmtNode *newCall = nullptr;
334 CallReturnVector *p2nRets = nullptr;
335 PUIdx funcCalled = kFuncNotFound;
336 bool handledAtLowerLevel = false;
337 switch (stmt.GetOpCode()) {
338 case OP_callassigned: {
339 auto &origCall = static_cast<CallNode &>(stmt);
340 newCall = GenCallNode(stmt, funcCalled, origCall);
341 p2nRets = &origCall.GetReturnVec();
342 static_cast<CallNode *>(newCall)->SetReturnVec(*p2nRets);
343 MIRFunction *curFunc = mirModule.CurFunction();
344 curFunc->SetLastFreqMap(newCall->GetStmtID(),
345 static_cast<uint32>(curFunc->GetFreqFromLastStmt(stmt.GetStmtID())));
346 break;
347 }
348 case OP_intrinsiccallassigned: {
349 BlockNode *blockNode = LowerIntrinsiccallToIntrinsicop(stmt);
350 if (blockNode) {
351 return blockNode;
352 }
353 IntrinsiccallNode &intrincall = static_cast<IntrinsiccallNode &>(stmt);
354 newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, intrincall);
355 p2nRets = &intrincall.GetReturnVec();
356 static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
357 break;
358 }
359 case OP_icallprotoassigned:
360 case OP_icallassigned: {
361 auto &origCall = static_cast<IcallNode &>(stmt);
362 newCall = GenIcallNode(funcCalled, origCall);
363 p2nRets = &origCall.GetReturnVec();
364 static_cast<IcallNode *>(newCall)->SetReturnVec(*p2nRets);
365 break;
366 }
367 default:
368 CHECK_FATAL(false, "NIY");
369 return nullptr;
370 }
371
372 /* transfer srcPosition location info */
373 newCall->SetSrcPos(stmt.GetSrcPos());
374 return GenBlockNode(*newCall, *p2nRets, stmt.GetOpCode(), funcCalled, handledAtLowerLevel, uselvar);
375 }
376
LowerIntrinsiccallToIntrinsicop(StmtNode & stmt)377 BlockNode *CGLowerer::LowerIntrinsiccallToIntrinsicop(StmtNode &stmt)
378 {
379 return nullptr;
380 }
381
LowerStmt(StmtNode & stmt,BlockNode & newBlk)382 void CGLowerer::LowerStmt(StmtNode &stmt, BlockNode &newBlk)
383 {
384 for (size_t i = 0; i < stmt.NumOpnds(); ++i) {
385 DEBUG_ASSERT(stmt.Opnd(i) != nullptr, "null ptr check");
386 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(i), newBlk), i);
387 }
388 }
389
LowerSwitchOpnd(StmtNode & stmt,BlockNode & newBlk)390 void CGLowerer::LowerSwitchOpnd(StmtNode &stmt, BlockNode &newBlk)
391 {
392 BaseNode *opnd = LowerExpr(stmt, *stmt.Opnd(0), newBlk);
393 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2 && opnd->GetOpCode() != OP_regread) {
394 PrimType ptyp = stmt.Opnd(0)->GetPrimType();
395 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
396 PregIdx pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(ptyp);
397 RegassignNode *regAss = mirBuilder->CreateStmtRegassign(ptyp, pIdx, opnd);
398 newBlk.AddStatement(regAss);
399 GetCurrentFunc()->SetLastFreqMap(regAss->GetStmtID(),
400 static_cast<uint32>(GetCurrentFunc()->GetFreqFromLastStmt(stmt.GetStmtID())));
401 stmt.SetOpnd(mirBuilder->CreateExprRegread(ptyp, pIdx), 0);
402 } else {
403 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(0), newBlk), 0);
404 }
405 }
406
LowerBlock(BlockNode & block)407 BlockNode *CGLowerer::LowerBlock(BlockNode &block)
408 {
409 BlockNode *newBlk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
410 BlockNode *tmpBlockNode = nullptr;
411 std::vector<StmtNode *> abortNode;
412 if (block.GetFirst() == nullptr) {
413 return newBlk;
414 }
415
416 StmtNode *nextStmt = block.GetFirst();
417 do {
418 StmtNode *stmt = nextStmt;
419 nextStmt = stmt->GetNext();
420 stmt->SetNext(nullptr);
421 currentBlock = newBlk;
422
423 switch (stmt->GetOpCode()) {
424 case OP_switch: {
425 LowerSwitchOpnd(*stmt, *newBlk);
426 auto switchMp = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "switchlowere");
427 MapleAllocator switchAllocator(switchMp.get());
428 SwitchLowerer switchLowerer(mirModule, static_cast<SwitchNode &>(*stmt), switchAllocator);
429 BlockNode *blk = switchLowerer.LowerSwitch();
430 if (blk->GetFirst() != nullptr) {
431 newBlk->AppendStatementsFromBlock(*blk);
432 }
433 needBranchCleanup = true;
434 break;
435 }
436 case OP_block:
437 tmpBlockNode = LowerBlock(static_cast<BlockNode &>(*stmt));
438 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
439 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
440 break;
441 case OP_dassign: {
442 LowerDassign(static_cast<DassignNode &>(*stmt), *newBlk);
443 break;
444 }
445 case OP_regassign: {
446 LowerRegassign(static_cast<RegassignNode &>(*stmt), *newBlk);
447 break;
448 }
449 case OP_iassign: {
450 LowerIassign(static_cast<IassignNode &>(*stmt), *newBlk);
451 break;
452 }
453 case OP_callassigned:
454 case OP_icallassigned:
455 case OP_icallprotoassigned: {
456 // pass the addr of lvar if this is a struct call assignment
457 bool lvar = false;
458 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt, lvar));
459 break;
460 }
461 case OP_intrinsiccallassigned:
462 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt));
463 break;
464 case OP_intrinsiccall:
465 case OP_call:
466 case OP_icall:
467 case OP_icallproto:
468 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
469 // nextStmt could be changed by the call to LowerStructReturn
470 LowerCallStmt(*stmt, nextStmt, *newBlk);
471 #else
472 LowerStmt(*stmt, *newBlk);
473 #endif
474 break;
475 case OP_return: {
476 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
477 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
478 if (GetCurrentFunc()->IsFirstArgReturn() && stmt->NumOpnds() > 0) {
479 newBlk->AppendStatementsFromBlock(
480 *LowerReturnStructUsingFakeParm(static_cast<NaryStmtNode &>(*stmt)));
481 } else {
482 #endif
483 NaryStmtNode *retNode = static_cast<NaryStmtNode *>(stmt);
484 if (retNode->GetNopndSize() == 0) {
485 newBlk->AddStatement(stmt);
486 } else {
487 tmpBlockNode = LowerReturn(*retNode);
488 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
489 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
490 }
491 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
492 }
493 #endif
494 break;
495 }
496 case OP_comment:
497 newBlk->AddStatement(stmt);
498 break;
499 default:
500 LowerStmt(*stmt, *newBlk);
501 newBlk->AddStatement(stmt);
502 break;
503 }
504 CHECK_FATAL(beCommon.GetSizeOfTypeSizeTable() == GlobalTables::GetTypeTable().GetTypeTableSize(), "Error!");
505 } while (nextStmt != nullptr);
506 for (auto node : abortNode) {
507 newBlk->AddStatement(node);
508 }
509 return newBlk;
510 }
511
LowerTypePtr(BaseNode & node) const512 void CGLowerer::LowerTypePtr(BaseNode &node) const
513 {
514 if ((node.GetPrimType() == PTY_ptr) || (node.GetPrimType() == PTY_ref)) {
515 node.SetPrimType(GetLoweredPtrType());
516 }
517
518 if (kOpcodeInfo.IsTypeCvt(node.GetOpCode())) {
519 auto &cvt = static_cast<TypeCvtNode &>(node);
520 if ((cvt.FromType() == PTY_ptr) || (cvt.FromType() == PTY_ref)) {
521 cvt.SetFromType(GetLoweredPtrType());
522 }
523 } else if (kOpcodeInfo.IsCompare(node.GetOpCode())) {
524 auto &cmp = static_cast<CompareNode &>(node);
525 if ((cmp.GetOpndType() == PTY_ptr) || (cmp.GetOpndType() == PTY_ref)) {
526 cmp.SetOpndType(GetLoweredPtrType());
527 }
528 }
529 }
530
LowerEntry(MIRFunction & func)531 void CGLowerer::LowerEntry(MIRFunction &func)
532 {
533 if (func.IsFirstArgReturn() && func.GetReturnType()->GetPrimType() != PTY_void) {
534 MIRSymbol *retSt = func.GetSymTab()->CreateSymbol(kScopeLocal);
535 retSt->SetStorageClass(kScFormal);
536 retSt->SetSKind(kStVar);
537 std::string retName(".return.");
538 MIRSymbol *funcSt = GlobalTables::GetGsymTable().GetSymbolFromStidx(func.GetStIdx().Idx());
539 DEBUG_ASSERT(funcSt != nullptr, "null ptr check");
540 retName += funcSt->GetName();
541 retSt->SetNameStrIdx(retName);
542 MIRType *pointType = beCommon.BeGetOrCreatePointerType(*func.GetReturnType());
543
544 retSt->SetTyIdx(pointType->GetTypeIndex());
545 std::vector<MIRSymbol *> formals;
546 formals.emplace_back(retSt);
547 for (uint32 i = 0; i < func.GetFormalCount(); ++i) {
548 auto formal = func.GetFormal(i);
549 formals.emplace_back(formal);
550 }
551 func.SetFirstArgReturn();
552
553 beCommon.AddElementToFuncReturnType(func, func.GetReturnTyIdx());
554
555 func.UpdateFuncTypeAndFormalsAndReturnType(formals, TyIdx(PTY_void), true);
556 auto *funcType = func.GetMIRFuncType();
557 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
558 funcType->SetFirstArgReturn();
559 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
560 }
561 }
562
CleanupBranches(MIRFunction & func) const563 void CGLowerer::CleanupBranches(MIRFunction &func) const
564 {
565 BlockNode *block = func.GetBody();
566 StmtNode *prev = nullptr;
567 StmtNode *next = nullptr;
568 for (StmtNode *curr = block->GetFirst(); curr != nullptr; curr = next) {
569 next = curr->GetNext();
570 if (next != nullptr) {
571 CHECK_FATAL(curr == next->GetPrev(), "unexpected node");
572 }
573 if ((next != nullptr) && (prev != nullptr) && (curr->GetOpCode() == OP_goto)) {
574 /*
575 * Skip until find a label.
576 * Note that the CURRent 'goto' statement may be the last statement
577 * when discounting comment statements.
578 * Make sure we don't lose any comments.
579 */
580 StmtNode *cmtB = nullptr;
581 StmtNode *cmtE = nullptr;
582 next = curr->GetNext();
583
584 while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
585 if (next->GetOpCode() == OP_comment) {
586 if (cmtB == nullptr) {
587 cmtB = next;
588 cmtE = next;
589 } else {
590 CHECK_FATAL(cmtE != nullptr, "cmt_e is null in CGLowerer::CleanupBranches");
591 cmtE->SetNext(next);
592 next->SetPrev(cmtE);
593 cmtE = next;
594 }
595 }
596 next = next->GetNext();
597 }
598
599 curr->SetNext(next);
600
601 if (next != nullptr) {
602 next->SetPrev(curr);
603 }
604
605 StmtNode *insertAfter = nullptr;
606
607 if ((next != nullptr) &&
608 ((static_cast<GotoNode *>(curr))->GetOffset() == (static_cast<LabelNode *>(next))->GetLabelIdx())) {
609 insertAfter = prev;
610 prev->SetNext(next); /* skip goto statement (which is pointed by curr) */
611 next->SetPrev(prev);
612 curr = next; /* make curr point to the label statement */
613 next = next->GetNext(); /* advance next to the next statement of the label statement */
614 } else {
615 insertAfter = curr;
616 }
617
618 /* insert comments before 'curr' */
619 if (cmtB != nullptr) {
620 CHECK_FATAL(cmtE != nullptr, "nullptr is not expected");
621 StmtNode *iaNext = insertAfter->GetNext();
622 if (iaNext != nullptr) {
623 iaNext->SetPrev(cmtE);
624 }
625 cmtE->SetNext(iaNext);
626
627 insertAfter->SetNext(cmtB);
628 cmtB->SetPrev(insertAfter);
629
630 if (insertAfter == curr) {
631 curr = cmtE;
632 }
633 }
634 if (next == nullptr) {
635 func.GetBody()->SetLast(curr);
636 }
637 }
638 prev = curr;
639 }
640 CHECK_FATAL(func.GetBody()->GetLast() == prev, "make sure the return value of GetLast equal prev");
641 }
642
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const RegreadNode & rRead,const CGLowerer & cgLowerer)643 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const RegreadNode &rRead,
644 const CGLowerer &cgLowerer)
645 {
646 StIdx stIdx = cgLowerer.GetSymbolReferredToByPseudoRegister(rRead.GetRegIdx());
647 return ((dassign.GetStIdx() == stIdx) && (dassign.GetFieldID() == 0));
648 }
649
IsAccessingTheSameMemoryLocation(const DassignNode & dassign,const DreadNode & dread)650 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const DreadNode &dread)
651 {
652 return ((dassign.GetStIdx() == dread.GetStIdx()) && (dassign.GetFieldID() == dread.GetFieldID()));
653 }
654
IsDassignNOP(const DassignNode & dassign)655 inline bool IsDassignNOP(const DassignNode &dassign)
656 {
657 if (dassign.GetRHS()->GetOpCode() == OP_dread) {
658 return IsAccessingTheSameMemoryLocation(dassign, static_cast<DreadNode &>(*dassign.GetRHS()));
659 }
660 return false;
661 }
662
IsConstvalZero(const BaseNode & n)663 inline bool IsConstvalZero(const BaseNode &n)
664 {
665 return ((n.GetOpCode() == OP_constval) && static_cast<const ConstvalNode &>(n).GetConstVal()->IsZero());
666 }
667
668 #define NEXT_ID(x) ((x) + 1)
669 #define INTRN_FIRST_SYNC_ENTER NEXT_ID(INTRN_LAST)
670 #define INTRN_SECOND_SYNC_ENTER NEXT_ID(INTRN_FIRST_SYNC_ENTER)
671 #define INTRN_THIRD_SYNC_ENTER NEXT_ID(INTRN_SECOND_SYNC_ENTER)
672 #define INTRN_FOURTH_SYNC_ENTER NEXT_ID(INTRN_THIRD_SYNC_ENTER)
673 #define INTRN_YNC_EXIT NEXT_ID(INTRN_FOURTH_SYNC_ENTER)
674
675 std::vector<std::pair<CGLowerer::BuiltinFunctionID, PUIdx>> CGLowerer::builtinFuncIDs;
676
GetLabelIdx(MIRFunction & curFunc) const677 LabelIdx CGLowerer::GetLabelIdx(MIRFunction &curFunc) const
678 {
679 std::string suffix = std::to_string(curFunc.GetLabelTab()->GetLabelTableSize());
680 GStrIdx labelStrIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName("__label_BC_" + suffix);
681 LabelIdx labIdx = curFunc.GetLabelTab()->AddLabel(labelStrIdx);
682 return labIdx;
683 }
684
LowerExpr(BaseNode & parent,BaseNode & expr,BlockNode & blkNode)685 BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkNode)
686 {
687 bool isCvtU1Expr = (expr.GetOpCode() == OP_cvt && expr.GetPrimType() == PTY_u1 &&
688 static_cast<TypeCvtNode &>(expr).FromType() != PTY_u1);
689 if (expr.GetPrimType() == PTY_u1) {
690 expr.SetPrimType(PTY_u8);
691 }
692
693 {
694 for (size_t i = 0; i < expr.NumOpnds(); ++i) {
695 expr.SetOpnd(LowerExpr(expr, *expr.Opnd(i), blkNode), i);
696 }
697 }
698 // Convert `cvt u1 xx <expr>` to `ne u8 xx (<expr>, constval xx 0)`
699 // No need to convert `cvt u1 u1 <expr>`
700 if (isCvtU1Expr) {
701 auto &cvtExpr = static_cast<TypeCvtNode &>(expr);
702 PrimType fromType = cvtExpr.FromType();
703 auto *fromMIRType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fromType));
704 // We use u8 instead of u1 because codegen can't recognize u1
705 auto *toMIRType = GlobalTables::GetTypeTable().GetUInt8();
706 auto *zero = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, *fromMIRType);
707 auto *converted = mirBuilder->CreateExprCompare(OP_ne, *toMIRType, *fromMIRType, cvtExpr.Opnd(0),
708 mirBuilder->CreateConstval(zero));
709 return converted;
710 }
711 switch (expr.GetOpCode()) {
712 case OP_dread:
713 return LowerDread(static_cast<DreadNode &>(expr), blkNode);
714
715 case OP_addrof:
716 return LowerAddrof(static_cast<AddrofNode &>(expr));
717
718 case OP_iread:
719 return LowerIread(static_cast<IreadNode &>(expr));
720
721 case OP_cvt:
722 case OP_retype:
723 case OP_zext:
724 case OP_sext:
725 return LowerCastExpr(expr);
726 default:
727 return &expr;
728 }
729 }
730
LowerDread(DreadNode & dread,const BlockNode & block)731 BaseNode *CGLowerer::LowerDread(DreadNode &dread, const BlockNode &block)
732 {
733 /* use PTY_u8 for boolean type in dread/iread */
734 if (dread.GetPrimType() == PTY_u1) {
735 dread.SetPrimType(PTY_u8);
736 }
737 CHECK_FATAL(dread.GetFieldID() == 0, "fieldID must be 0");
738 return LowerDreadToThreadLocal(dread, block);
739 }
740
LowerRegassign(RegassignNode & regNode,BlockNode & newBlk)741 void CGLowerer::LowerRegassign(RegassignNode ®Node, BlockNode &newBlk)
742 {
743 BaseNode *rhsOpnd = regNode.Opnd(0);
744 regNode.SetOpnd(LowerExpr(regNode, *rhsOpnd, newBlk), 0);
745 newBlk.AddStatement(®Node);
746 }
747
ExtractSymbolAddress(const StIdx & stIdx)748 BaseNode *CGLowerer::ExtractSymbolAddress(const StIdx &stIdx)
749 {
750 auto builder = mirModule.GetMIRBuilder();
751 return builder->CreateExprAddrof(0, stIdx);
752 }
753
LowerDreadToThreadLocal(BaseNode & expr,const BlockNode & block)754 BaseNode *CGLowerer::LowerDreadToThreadLocal(BaseNode &expr, const BlockNode &block)
755 {
756 auto *result = &expr;
757 if (expr.GetOpCode() != maple::OP_dread) {
758 return result;
759 }
760 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
761 auto dread = static_cast<DreadNode &>(expr);
762 StIdx stIdx = dread.GetStIdx();
763 if (!stIdx.IsGlobal()) {
764 return result;
765 }
766 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
767 CHECK_FATAL(symbol != nullptr, "symbol should not be nullptr");
768
769 if (symbol->IsThreadLocal()) {
770 // iread <* u32> 0 (regread u64 %addr)
771 auto addr = ExtractSymbolAddress(stIdx);
772 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
773 auto iread = mirModule.GetMIRBuilder()->CreateExprIread(*symbol->GetType(), *ptrType, dread.GetFieldID(), addr);
774 result = iread;
775 }
776 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
777 if (newTypeTableSize != oldTypeTableSize) {
778 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
779 }
780 return result;
781 }
782
LowerDassignToThreadLocal(StmtNode & stmt,const BlockNode & block)783 StmtNode *CGLowerer::LowerDassignToThreadLocal(StmtNode &stmt, const BlockNode &block)
784 {
785 StmtNode *result = &stmt;
786 if (stmt.GetOpCode() != maple::OP_dassign) {
787 return result;
788 }
789 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
790 auto dAssign = static_cast<DassignNode &>(stmt);
791 StIdx stIdx = dAssign.GetStIdx();
792 if (!stIdx.IsGlobal()) {
793 return result;
794 }
795 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
796 DEBUG_ASSERT(symbol != nullptr, "symbol should not be nullptr");
797 if (symbol->IsThreadLocal()) {
798 // iassign <* u32> 0 (regread u64 %addr, dread u32 $x)
799 auto addr = ExtractSymbolAddress(stIdx);
800 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
801 auto iassign =
802 mirModule.GetMIRBuilder()->CreateStmtIassign(*ptrType, dAssign.GetFieldID(), addr, dAssign.GetRHS());
803 result = iassign;
804 }
805 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
806 if (newTypeTableSize != oldTypeTableSize) {
807 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
808 }
809 return result;
810 }
811
LowerDassign(DassignNode & dsNode,BlockNode & newBlk)812 void CGLowerer::LowerDassign(DassignNode &dsNode, BlockNode &newBlk)
813 {
814 StmtNode *newStmt = nullptr;
815 BaseNode *rhs = nullptr;
816 Opcode op = dsNode.GetRHS()->GetOpCode();
817 CHECK_FATAL(dsNode.GetFieldID() == 0, "fieldID must be 0");
818 if (op == OP_intrinsicop) {
819 IntrinsicopNode *intrinNode = static_cast<IntrinsicopNode *>(dsNode.GetRHS());
820 MIRType *retType = IntrinDesc::intrinTable[intrinNode->GetIntrinsic()].GetReturnType();
821 CHECK_FATAL(retType != nullptr, "retType should not be nullptr");
822 rhs = LowerExpr(dsNode, *intrinNode, newBlk);
823 dsNode.SetRHS(rhs);
824 CHECK_FATAL(dsNode.GetRHS() != nullptr, "dsNode->rhs is null in CGLowerer::LowerDassign");
825 if (!IsDassignNOP(dsNode)) {
826 newStmt = &dsNode;
827 }
828 } else {
829 rhs = LowerExpr(dsNode, *dsNode.GetRHS(), newBlk);
830 dsNode.SetRHS(rhs);
831 newStmt = &dsNode;
832 }
833
834 if (newStmt != nullptr) {
835 newBlk.AddStatement(LowerDassignToThreadLocal(*newStmt, newBlk));
836 }
837 }
838
LowerDefaultIntrinsicCall(IntrinsiccallNode & intrincall,MIRSymbol & st,MIRFunction & fn)839 StmtNode *CGLowerer::LowerDefaultIntrinsicCall(IntrinsiccallNode &intrincall, MIRSymbol &st, MIRFunction &fn)
840 {
841 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
842 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
843 std::vector<TyIdx> funcTyVec;
844 std::vector<TypeAttrs> fnTaVec;
845 MapleVector<BaseNode *> &nOpnds = intrincall.GetNopnd();
846 MIRType *retTy = intrinDesc->GetReturnType();
847 CHECK_FATAL(retTy != nullptr, "retTy should not be nullptr");
848 for (uint32 i = 0; i < nOpnds.size(); ++i) {
849 MIRType *argTy = intrinDesc->GetArgType(i);
850 CHECK_FATAL(argTy != nullptr, "argTy should not be nullptr");
851 funcTyVec.emplace_back(argTy->GetTypeIndex());
852 fnTaVec.emplace_back(TypeAttrs());
853 }
854 MIRType *funcType = beCommon.BeGetOrCreateFunctionType(retTy->GetTypeIndex(), funcTyVec, fnTaVec);
855 st.SetTyIdx(funcType->GetTypeIndex());
856 fn.SetMIRFuncType(static_cast<MIRFuncType *>(funcType));
857 fn.SetReturnTyIdx(retTy->GetTypeIndex());
858 return static_cast<CallNode *>(mirBuilder->CreateStmtCall(fn.GetPuidx(), nOpnds));
859 }
860
LowerIntrinsiccall(IntrinsiccallNode & intrincall,BlockNode & newBlk)861 StmtNode *CGLowerer::LowerIntrinsiccall(IntrinsiccallNode &intrincall, BlockNode &newBlk)
862 {
863 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
864 for (size_t i = 0; i < intrincall.GetNumOpnds(); ++i) {
865 intrincall.SetOpnd(LowerExpr(intrincall, *intrincall.Opnd(i), newBlk), i);
866 }
867 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
868 /* default lowers intrinsic call to real function call. */
869 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
870 CHECK_FATAL(intrinDesc->name != nullptr, "intrinsic's name should not be nullptr");
871 const std::string name = intrinDesc->name;
872 st->SetNameStrIdx(name);
873 st->SetStorageClass(kScText);
874 st->SetSKind(kStFunc);
875 MIRFunction *fn = mirBuilder->GetOrCreateFunction(intrinDesc->name, TyIdx(0));
876 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
877 fn->AllocSymTab();
878 st->SetFunction(fn);
879 st->SetAppearsInCode(true);
880 return LowerDefaultIntrinsicCall(intrincall, *st, *fn);
881 }
882
GetBuiltinToUse(BuiltinFunctionID id) const883 PUIdx CGLowerer::GetBuiltinToUse(BuiltinFunctionID id) const
884 {
885 /*
886 * use std::vector & linear search as the number of entries is small.
887 * we may revisit it if the number of entries gets larger.
888 */
889 for (const auto &funcID : builtinFuncIDs) {
890 if (funcID.first == id) {
891 return funcID.second;
892 }
893 }
894 return kFuncNotFound;
895 }
896
IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const897 bool CGLowerer::IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const
898 {
899 switch (intrinsic) {
900 // js
901 case INTRN_ADD_WITH_OVERFLOW:
902 case INTRN_SUB_WITH_OVERFLOW:
903 case INTRN_MUL_WITH_OVERFLOW:
904 case INTRN_JS_PURE_CALL:
905 return true;
906 default: {
907 return false;
908 }
909 }
910 }
911
LowerFunc(MIRFunction & func)912 void CGLowerer::LowerFunc(MIRFunction &func)
913 {
914 labelIdx = 0;
915 SetCurrentFunc(&func);
916 LowerEntry(func);
917 BlockNode *origBody = func.GetBody();
918 CHECK_FATAL(origBody != nullptr, "origBody should not be nullptr");
919
920 BlockNode *newBody = LowerBlock(*origBody);
921 func.SetBody(newBody);
922 if (needBranchCleanup) {
923 CleanupBranches(func);
924 }
925
926 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
927 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
928 if (newTypeTableSize != oldTypeTableSize) {
929 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
930 }
931 }
932 } /* namespace maplebe */
933