• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "becommon.h"
17 #include "rt.h"
18 #include "cg_option.h"
19 #include "mir_builder.h"
20 #include "mpl_logging.h"
21 #include <cinttypes>
22 #include <list>
23 
24 namespace maplebe {
25 using namespace maple;
26 
BECommon(MIRModule & mod)27 BECommon::BECommon(MIRModule &mod)
28     : mirModule(mod),
29       typeSizeTable(GlobalTables::GetTypeTable().GetTypeTable().size(), 0, mirModule.GetMPAllocator().Adapter()),
30       typeAlignTable(GlobalTables::GetTypeTable().GetTypeTable().size(), static_cast<uint8>(mirModule.IsCModule()),
31                      mirModule.GetMPAllocator().Adapter()),
32       typeHasFlexibleArray(GlobalTables::GetTypeTable().GetTypeTable().size(), 0, mirModule.GetMPAllocator().Adapter()),
33       structFieldCountTable(GlobalTables::GetTypeTable().GetTypeTable().size(), 0,
34                             mirModule.GetMPAllocator().Adapter()),
35       jClassLayoutTable(mirModule.GetMPAllocator().Adapter()),
36       funcReturnType(mirModule.GetMPAllocator().Adapter())
37 {
38     for (uint32 i = 1; i < GlobalTables::GetTypeTable().GetTypeTable().size(); ++i) {
39         MIRType *ty = GlobalTables::GetTypeTable().GetTypeTable()[i];
40         ComputeTypeSizesAligns(*ty);
41         LowerTypeAttribute(*ty);
42     }
43 
44     if (mirModule.IsJavaModule()) {
45         for (uint32 i = 0; i < GlobalTables::GetGsymTable().GetSymbolTableSize(); ++i) {
46             MIRSymbol *sym = GlobalTables::GetGsymTable().GetSymbol(i);
47             if (sym == nullptr) {
48                 continue;
49             }
50             LowerJavaVolatileForSymbol(*sym);
51         }
52     }
53 }
54 
55 /*
56  * try to find an available padding slot, and allocate the given field in it.
57  * return the offset of the allocated memory. 0 if not available
58  * Note: this will update lists in paddingSlots
59  * Note: padding slots is a list of un-occupied (small size) slots
60  *       available to allocate new fields. so far, just for 1, 2, 4 bytes
61  *       types (map to array index 0, 1, 2)
62  */
TryAllocInPaddingSlots(std::list<uint32> paddingSlots[],uint32 fieldSize,uint32 fieldAlign,size_t paddingSlotsLength)63 static uint32 TryAllocInPaddingSlots(std::list<uint32> paddingSlots[], uint32 fieldSize, uint32 fieldAlign,
64                                      size_t paddingSlotsLength)
65 {
66     CHECK_FATAL(paddingSlotsLength > 0, "expect paddingSlotsLength > 0");
67     if (fieldSize > 4) { // padding slots are just for size 1/2/4 bytes
68         return 0;
69     }
70 
71     uint32 fieldOffset = 0;
72     /* here is a greedy search */
73     for (size_t freeSlot = static_cast<size_t>(fieldSize >> 1); freeSlot < paddingSlotsLength; ++freeSlot) {
74         if (!paddingSlots[freeSlot].empty()) {
75             uint32 paddingOffset = paddingSlots[freeSlot].front();
76             if (IsAlignedTo(paddingOffset, fieldAlign)) {
77                 /* reuse one padding slot */
78                 paddingSlots[freeSlot].pop_front();
79                 fieldOffset = paddingOffset;
80                 /* check whether there're still space left in this slot */
81                 uint32 leftSize = (1u << freeSlot) - fieldSize;
82                 if (leftSize != 0) {
83                     uint32 leftOffset = paddingOffset + fieldSize;
84                     if (leftSize & 0x1) { /* check whether the last bit is 1 */
85                         paddingSlots[0].push_front(leftOffset);
86                         leftOffset += 1;
87                     }
88                     if (leftSize & 0x2) { /* check whether the penultimate bit is 1 */
89                         paddingSlots[1].push_front(leftOffset);
90                     }
91                 }
92                 break;
93             }
94         }
95     }
96     return fieldOffset;
97 }
98 
AddPaddingSlot(std::list<uint32> paddingSlots[],uint32 offset,uint32 size,size_t paddingSlotsLength)99 static void AddPaddingSlot(std::list<uint32> paddingSlots[], uint32 offset, uint32 size, size_t paddingSlotsLength)
100 {
101     CHECK_FATAL(paddingSlotsLength > 0, "expect paddingSlotsLength > 0");
102     /*
103      * decompose the padding into 1/2/4 bytes slots.
104      * to satisfy alignment constraints.
105      */
106     for (size_t i = 0; i < paddingSlotsLength; ++i) {
107         if (size & (1u << i)) {
108             paddingSlots[i].push_front(offset);
109             offset += (1u << i);
110         }
111     }
112 }
113 
AddNewTypeAfterBecommon(uint32 oldTypeTableSize,uint32 newTypeTableSize)114 void BECommon::AddNewTypeAfterBecommon(uint32 oldTypeTableSize, uint32 newTypeTableSize)
115 {
116     for (auto i = oldTypeTableSize; i < newTypeTableSize; ++i) {
117         MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(i);
118         CHECK_NULL_FATAL(ty);
119         typeSizeTable.emplace_back(0);
120         typeAlignTable.emplace_back(static_cast<uint8>(mirModule.IsCModule()));
121         typeHasFlexibleArray.emplace_back(0);
122         structFieldCountTable.emplace_back(0);
123         ComputeTypeSizesAligns(*ty);
124         LowerTypeAttribute(*ty);
125     }
126 }
127 
ComputeStructTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx)128 void BECommon::ComputeStructTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx)
129 {
130     auto &structType = static_cast<MIRStructType &>(ty);
131     const FieldVector &fields = structType.GetFields();
132     uint64 allocedSize = 0;
133     uint64 allocedSizeInBits = 0;
134     SetStructFieldCount(structType.GetTypeIndex(), fields.size());
135     if (fields.size() == 0) {
136         if (structType.IsCPlusPlus()) {
137             SetTypeSize(tyIdx.GetIdx(), 1); /* empty struct in C++ has size 1 */
138             SetTypeAlign(tyIdx.GetIdx(), 1);
139         } else {
140             SetTypeSize(tyIdx.GetIdx(), 0);
141             SetTypeAlign(tyIdx.GetIdx(), k8ByteSize);
142         }
143         return;
144     }
145     auto structAttr = structType.GetTypeAttrs();
146     auto structPack = static_cast<uint8>(structAttr.GetPack());
147     for (uint32 j = 0; j < fields.size(); ++j) {
148         TyIdx fieldTyIdx = fields[j].second.first;
149         auto fieldAttr = fields[j].second.second;
150         MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
151         uint32 fieldTypeSize = GetTypeSize(fieldTyIdx);
152         if (fieldTypeSize == 0) {
153             ComputeTypeSizesAligns(*fieldType);
154             fieldTypeSize = GetTypeSize(fieldTyIdx);
155         }
156         uint64 fieldSizeBits = fieldTypeSize * kBitsPerByte;
157         auto attrAlign = static_cast<uint8>(fieldAttr.GetAlign());
158         auto originAlign = std::max(attrAlign, GetTypeAlign(fieldTyIdx));
159         uint8 fieldAlign = fieldAttr.IsPacked() ? 1 : std::min(originAlign, structPack);
160         uint64 fieldAlignBits = fieldAlign * kBitsPerByte;
161         CHECK_FATAL(fieldAlign != 0, "expect fieldAlign not equal 0");
162         MIRStructType *subStructType = fieldType->EmbeddedStructType();
163         if (subStructType != nullptr) {
164             AppendStructFieldCount(structType.GetTypeIndex(), GetStructFieldCount(subStructType->GetTypeIndex()));
165         }
166         if (structType.GetKind() != kTypeUnion) {
167             if (fieldType->GetKind() == kTypeBitField) {
168                 uint32 fieldSize = static_cast<MIRBitFieldType *>(fieldType)->GetFieldSize();
169                 /* is this field is crossing the align boundary of its base type? */
170                 if ((!structAttr.IsPacked() &&
171                      ((allocedSizeInBits / fieldSizeBits) != ((allocedSizeInBits + fieldSize - 1u) / fieldSizeBits))) ||
172                     fieldSize == 0) {
173                     allocedSizeInBits = RoundUp(allocedSizeInBits, fieldSizeBits);
174                 }
175                 /* allocate the bitfield */
176                 allocedSizeInBits += fieldSize;
177                 allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
178             } else {
179                 bool leftoverbits = false;
180 
181                 if (allocedSizeInBits == allocedSize * kBitsPerByte) {
182                     allocedSize = RoundUp(allocedSize, fieldAlign);
183                 } else {
184                     /* still some leftover bits on allocated words, we calculate things based on bits then. */
185                     if (allocedSizeInBits / fieldAlignBits !=
186                         (allocedSizeInBits + fieldSizeBits - 1) / fieldAlignBits) {
187                         /* the field is crossing the align boundary of its base type */
188                         allocedSizeInBits = RoundUp(allocedSizeInBits, fieldAlignBits);
189                     }
190                     leftoverbits = true;
191                 }
192                 if (leftoverbits) {
193                     allocedSizeInBits += fieldSizeBits;
194                     allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
195                 } else {
196                     /* pad alloced_size according to the field alignment */
197                     allocedSize = RoundUp(allocedSize, fieldAlign);
198                     allocedSize += fieldTypeSize;
199                     allocedSizeInBits = allocedSize * kBitsPerByte;
200                 }
201             }
202         } else { /* for unions, bitfields are treated as non-bitfields */
203             allocedSize = std::max(allocedSize, static_cast<uint64>(fieldTypeSize));
204         }
205         SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), fieldAlign));
206         /* C99
207          * Last struct element of a struct with more than one member
208          * is a flexible array if it is an array of size 0.
209          */
210         if ((j != 0) && ((j + 1) == fields.size()) && (fieldType->GetKind() == kTypeArray) &&
211             (GetTypeSize(fieldTyIdx.GetIdx()) == 0)) {
212             SetHasFlexibleArray(tyIdx.GetIdx(), true);
213         }
214     }
215     SetTypeSize(tyIdx, RoundUp(allocedSize, GetTypeAlign(tyIdx.GetIdx())));
216 }
217 
ComputeClassTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx,uint8 align)218 void BECommon::ComputeClassTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx, uint8 align)
219 {
220     uint64 allocedSize = 0;
221     const FieldVector &fields = static_cast<MIRStructType &>(ty).GetFields();
222 
223     auto &classType = static_cast<MIRClassType &>(ty);
224     TyIdx prntTyIdx = classType.GetParentTyIdx();
225     /* process parent class */
226     if (prntTyIdx != 0u) {
227         MIRClassType *parentType =
228             static_cast<MIRClassType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(prntTyIdx));
229         uint32 prntSize = GetTypeSize(prntTyIdx);
230         if (prntSize == 0) {
231             ComputeTypeSizesAligns(*parentType);
232             prntSize = GetTypeSize(prntTyIdx);
233         }
234         uint8 prntAlign = GetTypeAlign(prntTyIdx);
235         AppendStructFieldCount(tyIdx, GetStructFieldCount(prntTyIdx) + 1);
236         /* pad alloced_size according to the field alignment */
237         allocedSize = RoundUp(allocedSize, prntAlign);
238 
239         JClassLayout *layout = mirModule.GetMemPool()->New<JClassLayout>(mirModule.GetMPAllocator().Adapter());
240         /* add parent's record to the front */
241         layout->emplace_back(JClassFieldInfo(false, false, false, allocedSize));
242         /* copy parent's layout plan into my plan */
243         if (HasJClassLayout(*parentType)) { /* parent may have incomplete type definition. */
244             const JClassLayout &parentLayout = GetJClassLayout(*parentType);
245             layout->insert(layout->end(), parentLayout.begin(), parentLayout.end());
246             allocedSize += prntSize;
247             SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), prntAlign));
248         } else {
249             LogInfo::MapleLogger() << "Warning:try to layout class with incomplete type:" << parentType->GetName()
250                                    << "\n";
251         }
252         jClassLayoutTable[&classType] = layout;
253     } else {
254         /* This is the root class, say, The Object */
255         jClassLayoutTable[&classType] = mirModule.GetMemPool()->New<JClassLayout>(mirModule.GetMPAllocator().Adapter());
256     }
257 
258     /*
259      * a list of un-occupied (small size) slots available for insertion
260      * so far, just for 1, 2, 4 bytes types (map to array index 0, 1, 2)
261      */
262     std::list<uint32> paddingSlots[3]; // padding slots are just 3 types for size 1/2/4 bytes
263     /* process fields */
264     AppendStructFieldCount(tyIdx, fields.size());
265     if (fields.size() == 0 && mirModule.IsCModule()) {
266         SetTypeAlign(tyIdx.GetIdx(), 1);
267         SetTypeSize(tyIdx.GetIdx(), 1);
268         return;
269     }
270     for (uint32 j = 0; j < fields.size(); ++j) {
271         TyIdx fieldTyIdx = fields[j].second.first;
272         MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
273         FieldAttrs fieldAttr = fields[j].second.second;
274         uint32 fieldSize = GetTypeSize(fieldTyIdx);
275         if (fieldSize == 0) {
276             ComputeTypeSizesAligns(*fieldType);
277             fieldSize = GetTypeSize(fieldTyIdx);
278         }
279         uint8 fieldAlign = GetTypeAlign(fieldTyIdx);
280 
281         if ((fieldType->GetKind() == kTypePointer) && (fieldType->GetPrimType() == PTY_a64)) {
282             /* handle class reference field */
283             fieldSize = static_cast<uint32>(RTSupport::GetRTSupportInstance().GetFieldSize());
284             fieldAlign = RTSupport::GetRTSupportInstance().GetFieldAlign();
285         }
286 
287         /* try to alloc the field in one of previously created padding slots */
288         uint32 currentFieldOffset =
289             TryAllocInPaddingSlots(paddingSlots, fieldSize, fieldAlign, sizeof(paddingSlots) / sizeof(paddingSlots[0]));
290         /* cannot reuse one padding slot. layout to current end */
291         if (currentFieldOffset == 0) {
292             /* pad alloced_size according to the field alignment */
293             currentFieldOffset = RoundUp(allocedSize, fieldAlign);
294             if (currentFieldOffset != allocedSize) {
295                 /* rounded up, create one padding-slot */
296                 uint32 paddingSize = currentFieldOffset - allocedSize;
297                 AddPaddingSlot(paddingSlots, allocedSize, paddingSize, sizeof(paddingSlots) / sizeof(paddingSlots[0]));
298                 allocedSize = currentFieldOffset;
299             }
300             /* need new memory for this field */
301             allocedSize += fieldSize;
302         }
303         AddElementToJClassLayout(classType, JClassFieldInfo(fieldType->GetKind() == kTypePointer,
304                                                             fieldAttr.GetAttr(FLDATTR_rcunowned),
305                                                             fieldAttr.GetAttr(FLDATTR_rcweak), currentFieldOffset));
306         SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), fieldAlign));
307     }
308     SetTypeSize(tyIdx, RoundUp(allocedSize, align));
309 }
310 
ComputeArrayTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx)311 void BECommon::ComputeArrayTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx)
312 {
313     MIRArrayType &arrayType = static_cast<MIRArrayType &>(ty);
314     MIRType *elemType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(arrayType.GetElemTyIdx());
315     uint32 elemSize = GetTypeSize(elemType->GetTypeIndex());
316     if (elemSize == 0) {
317         ComputeTypeSizesAligns(*elemType);
318         elemSize = GetTypeSize(elemType->GetTypeIndex());
319     }
320     if (!mirModule.IsCModule()) {
321         CHECK_FATAL(elemSize != 0, "elemSize should not equal 0");
322         CHECK_FATAL(elemType->GetTypeIndex() != 0u, "elemType's idx should not equal 0");
323     }
324     uint32 arrayAlign = arrayType.GetTypeAttrs().GetAlign();
325     elemSize = std::max(elemSize, static_cast<uint32>(GetTypeAlign(elemType->GetTypeIndex())));
326     elemSize = std::max(elemSize, arrayAlign);
327     /* compute total number of elements from the multipel dimensions */
328     uint64 numElems = 1;
329     for (int d = 0; d < arrayType.GetDim(); ++d) {
330         numElems *= arrayType.GetSizeArrayItem(d);
331     }
332     auto typeSize = elemSize * numElems;
333     SetTypeSize(tyIdx, typeSize);
334     if (typeSize == 0) {
335         SetTypeAlign(tyIdx, static_cast<uint8>(arrayAlign));
336     } else {
337         auto maxAlign = std::max(static_cast<uint32>(GetTypeAlign(elemType->GetTypeIndex())), arrayAlign);
338         SetTypeAlign(tyIdx, static_cast<uint8>(maxAlign));
339     }
340 }
341 
ComputeFArrayOrJArrayTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx)342 void BECommon::ComputeFArrayOrJArrayTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx)
343 {
344     MIRFarrayType &arrayType = static_cast<MIRFarrayType &>(ty);
345     MIRType *elemType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(arrayType.GetElemTyIdx());
346     uint32 elemSize = GetTypeSize(elemType->GetTypeIndex());
347     if (elemSize == 0) {
348         ComputeTypeSizesAligns(*elemType);
349         elemSize = GetTypeSize(elemType->GetTypeIndex());
350     }
351     CHECK_FATAL(elemSize != 0, "elemSize should not equal 0");
352     CHECK_FATAL(GetTypeAlign(elemType->GetTypeIndex()) != 0u, "GetTypeAlign return 0 is not expected");
353     elemSize = std::max(elemSize, static_cast<uint32>(GetTypeAlign(elemType->GetTypeIndex())));
354     SetTypeSize(tyIdx, 0);
355     SetTypeAlign(tyIdx, GetTypeAlign(elemType->GetTypeIndex()));
356 }
357 
358 /* Note: also do java class layout */
ComputeTypeSizesAligns(MIRType & ty,uint8 align)359 void BECommon::ComputeTypeSizesAligns(MIRType &ty, uint8 align)
360 {
361     TyIdx tyIdx = ty.GetTypeIndex();
362     if ((structFieldCountTable.size() > tyIdx) && (GetStructFieldCount(tyIdx) != 0)) {
363         return; /* processed before */
364     }
365 
366     if ((ty.GetPrimType() == PTY_ptr) || (ty.GetPrimType() == PTY_ref)) {
367         ty.SetPrimType(GetLoweredPtrType());
368     }
369 
370     switch (ty.GetKind()) {
371         case kTypeScalar:
372         case kTypePointer:
373         case kTypeBitField:
374         case kTypeFunction:
375             SetTypeSize(tyIdx, GetPrimTypeSize(ty.GetPrimType()));
376             SetTypeAlign(tyIdx, GetTypeSize(tyIdx));
377             break;
378         case kTypeArray: {
379             ComputeArrayTypeSizesAligns(ty, tyIdx);
380             break;
381         }
382         case kTypeFArray:
383         case kTypeJArray: {
384             ComputeFArrayOrJArrayTypeSizesAligns(ty, tyIdx);
385             break;
386         }
387         case kTypeUnion:
388         case kTypeStruct: {
389             ComputeStructTypeSizesAligns(ty, tyIdx);
390             break;
391         }
392         case kTypeInterface: { /* interface shouldn't have instance fields */
393             SetTypeAlign(tyIdx, 0);
394             SetTypeSize(tyIdx, 0);
395             SetStructFieldCount(tyIdx, 0);
396             break;
397         }
398         case kTypeClass: { /* cannot have union or bitfields */
399             ComputeClassTypeSizesAligns(ty, tyIdx, align);
400             break;
401         }
402         case kTypeByName:
403         case kTypeVoid:
404         default:
405             SetTypeSize(tyIdx, 0);
406             break;
407     }
408     /* there may be passed-in align attribute declared with the symbol */
409     SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), align));
410 }
411 
LowerTypeAttribute(MIRType & ty)412 void BECommon::LowerTypeAttribute(MIRType &ty)
413 {
414     if (mirModule.IsJavaModule()) {
415         LowerJavaTypeAttribute(ty);
416     }
417 }
418 
LowerJavaTypeAttribute(MIRType & ty)419 void BECommon::LowerJavaTypeAttribute(MIRType &ty)
420 {
421     /* we process volatile only for now */
422     switch (ty.GetKind()) {
423         case kTypeClass: /* cannot have union or bitfields */
424             LowerJavaVolatileInClassType(static_cast<MIRClassType &>(ty));
425             break;
426 
427         default:
428             break;
429     }
430 }
431 
LowerJavaVolatileInClassType(MIRClassType & ty)432 void BECommon::LowerJavaVolatileInClassType(MIRClassType &ty)
433 {
434     for (auto &field : ty.GetFields()) {
435         if (field.second.second.GetAttr(FLDATTR_volatile)) {
436             field.second.second.SetAttr(FLDATTR_memory_order_acquire);
437             field.second.second.SetAttr(FLDATTR_memory_order_release);
438         } else {
439             MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(field.second.first);
440             if (fieldType->GetKind() == kTypeClass) {
441                 LowerJavaVolatileInClassType(static_cast<MIRClassType &>(*fieldType));
442             }
443         }
444     }
445 }
446 
IsRefField(MIRStructType & structType,FieldID fieldID) const447 bool BECommon::IsRefField(MIRStructType &structType, FieldID fieldID) const
448 {
449     if (structType.GetKind() == kTypeClass) {
450         CHECK_FATAL(HasJClassLayout(static_cast<MIRClassType &>(structType)),
451                     "Cannot found java class layout information");
452         const JClassLayout &layout = GetJClassLayout(static_cast<MIRClassType &>(structType));
453         if (layout.empty()) {
454             ERR(kLncErr, "layout is null in BECommon::IsRefField");
455             return false;
456         }
457         return layout[fieldID - 1].IsRef();
458     }
459     return false;
460 }
461 
LowerJavaVolatileForSymbol(MIRSymbol & sym) const462 void BECommon::LowerJavaVolatileForSymbol(MIRSymbol &sym) const
463 {
464     /* type attr is associated with symbol */
465     if (sym.GetAttr(ATTR_volatile)) {
466         sym.SetAttr(ATTR_memory_order_acquire);
467         sym.SetAttr(ATTR_memory_order_release);
468     }
469 }
470 
GenFieldOffsetMap(const std::string & className)471 void BECommon::GenFieldOffsetMap(const std::string &className)
472 {
473     MIRType *type = GlobalTables::GetTypeTable().GetOrCreateClassType(className, mirModule);
474     CHECK_FATAL(type != nullptr, "unknown class, type should not be nullptr");
475     MIRClassType *classType = static_cast<MIRClassType *>(type);
476     for (FieldID i = 1; i <= GetStructFieldCount(classType->GetTypeIndex()); ++i) {
477         FieldID fieldID = i;
478         FieldPair fp = classType->TraverseToFieldRef(fieldID);
479         GStrIdx strIdx = fp.first;
480         if (strIdx == 0u) {
481             continue;
482         }
483 
484         const std::string &fieldName = GlobalTables::GetStrTable().GetStringFromStrIdx(strIdx);
485 
486         TyIdx fieldTyIdx = fp.second.first;
487         uint64 fieldSize = GetTypeSize(fieldTyIdx);
488         MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
489 
490         if ((fieldType->GetKind() == kTypePointer) && (fieldType->GetPrimType() == PTY_a64)) {
491             /* handle class reference field */
492             fieldSize = RTSupport::GetRTSupportInstance().GetFieldSize();
493         }
494 
495         std::pair<int32, int32> p = GetFieldOffset(*classType, i);
496         CHECK_FATAL(p.second == 0, "expect p.second equals 0");
497         LogInfo::MapleLogger() << "CLASS_FIELD_OFFSET_MAP(" << className.c_str() << "," << fieldName.c_str() << ","
498                                << p.first << "," << fieldSize << ")\n";
499     }
500 }
501 
GenFieldOffsetMap(MIRClassType & classType,FILE & outFile)502 void BECommon::GenFieldOffsetMap(MIRClassType &classType, FILE &outFile)
503 {
504     const std::string &className = classType.GetName();
505 
506     /*
507      * We only enumerate fields defined in the current class.  There are cases
508      * where a parent classes may define private fields that have the same name as
509      * a field in the current class.This table is generated for the convenience of
510      * C programmers.  If the C programmer wants to access parent class fields,
511      * the programmer should access them as `Parent.field`.
512      */
513     FieldID myEnd = structFieldCountTable.at(classType.GetTypeIndex());
514     FieldID myBegin = (myEnd - static_cast<FieldID>(classType.GetFieldsSize())) + 1;
515 
516     for (FieldID i = myBegin; i <= myEnd; ++i) {
517         FieldID fieldID = i;
518         FieldPair fp = classType.TraverseToFieldRef(fieldID);
519         GStrIdx strIdx = fp.first;
520         if (strIdx == 0u) {
521             continue;
522         }
523         FieldAttrs attrs = fp.second.second;
524         if (attrs.GetAttr(FLDATTR_static)) {
525             continue;
526         }
527 
528         const std::string &fieldName = GlobalTables::GetStrTable().GetStringFromStrIdx(strIdx);
529 
530         TyIdx fieldTyIdx = fp.second.first;
531         uint64 fieldSize = GetTypeSize(fieldTyIdx);
532         MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
533 
534         if ((fieldType->GetKind() == kTypePointer) && (fieldType->GetPrimType() == PTY_a64)) {
535             /* handle class reference field */
536             fieldSize = RTSupport::GetRTSupportInstance().GetFieldSize();
537             ;
538         }
539 
540         std::pair<int32, int32> p = GetFieldOffset(classType, i);
541         CHECK_FATAL(p.second == 0, "expect p.second equals 0");
542         (void)fprintf(&outFile, "__MRT_CLASS_FIELD(%s, %s, %d, %lu)\n", className.c_str(), fieldName.c_str(), p.first,
543                       static_cast<unsigned long>(fieldSize));
544     }
545 }
546 
GenObjSize(const MIRClassType & classType,FILE & outFile)547 void BECommon::GenObjSize(const MIRClassType &classType, FILE &outFile)
548 {
549     const std::string &className = classType.GetName();
550     uint64_t objSize = GetTypeSize(classType.GetTypeIndex());
551     if (objSize == 0) {
552         return;
553     }
554 
555     TyIdx parentTypeIdx = classType.GetParentTyIdx();
556     MIRType *parentType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(parentTypeIdx);
557     const char *parentName = nullptr;
558     if (parentType != nullptr) {
559         MIRClassType *parentClass = static_cast<MIRClassType *>(parentType);
560         parentName = parentClass->GetName().c_str();
561     } else {
562         parentName = "THIS_IS_ROOT";
563     }
564     fprintf(&outFile, "__MRT_CLASS(%s, %" PRIu64 ", %s)\n", className.c_str(), objSize, parentName);
565 }
566 
567 /*
568  * compute the offset of the field given by fieldID within the structure type
569  * structy; it returns the answer in the pair (byteoffset, bitoffset) such that
570  * if it is a bitfield, byteoffset gives the offset of the container for
571  * extracting the bitfield and bitoffset is with respect to the container
572  */
GetFieldOffset(MIRStructType & structType,FieldID fieldID)573 std::pair<int32, int32> BECommon::GetFieldOffset(MIRStructType &structType, FieldID fieldID)
574 {
575     CHECK_FATAL(fieldID <= GetStructFieldCount(structType.GetTypeIndex()), "GetFieldOFfset: fieldID too large");
576     uint64 allocedSize = 0;
577     uint64 allocedSizeInBits = 0;
578     FieldID curFieldID = 1;
579     if (fieldID == 0) {
580         return std::pair<int32, int32>(0, 0);
581     }
582 
583     if (structType.GetKind() == kTypeClass) {
584         CHECK_FATAL(HasJClassLayout(static_cast<MIRClassType &>(structType)),
585                     "Cannot found java class layout information");
586         const JClassLayout &layout = GetJClassLayout(static_cast<MIRClassType &>(structType));
587         CHECK_FATAL(static_cast<uint32>(fieldID) - 1 < layout.size(), "subscript out of range");
588         return std::pair<int32, int32>(static_cast<int32>(layout[fieldID - 1].GetOffset()), 0);
589     }
590 
591     /* process the struct fields */
592     FieldVector fields = structType.GetFields();
593     auto structPack = static_cast<uint8>(structType.GetTypeAttrs().GetPack());
594     for (uint32 j = 0; j < fields.size(); ++j) {
595         TyIdx fieldTyIdx = fields[j].second.first;
596         auto fieldAttr = fields[j].second.second;
597         MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
598         uint32 fieldTypeSize = GetTypeSize(fieldTyIdx);
599         uint64 fieldSizeBits = fieldTypeSize * kBitsPerByte;
600         auto originAlign = GetTypeAlign(fieldTyIdx);
601         auto fieldAlign = fieldAttr.IsPacked() ? 1 : std::min(originAlign, structPack);
602         uint64 fieldAlignBits = fieldAlign * kBitsPerByte;
603         CHECK_FATAL(fieldAlign != 0, "fieldAlign should not equal 0");
604         if (structType.GetKind() != kTypeUnion) {
605             if (fieldType->GetKind() == kTypeBitField) {
606                 uint32 fieldSize = static_cast<MIRBitFieldType *>(fieldType)->GetFieldSize();
607                 /*
608                  * Is this field is crossing the align boundary of its base type? Or,
609                  * is field a zero-with bit field?
610                  * Refer to C99 standard (§6.7.2.1) :
611                  * > As a special case, a bit-field structure member with a width of 0 indicates that no further
612                  * > bit-field is to be packed into the unit in which the previous bit-field, if any, was placed.
613                  *
614                  * We know that A zero-width bit field can cause the next field to be aligned on the next container
615                  * boundary where the container is the same size as the underlying type of the bit field.
616                  */
617                 if ((!structType.GetTypeAttrs().IsPacked() &&
618                      ((allocedSizeInBits / fieldSizeBits) != ((allocedSizeInBits + fieldSize - 1u) / fieldSizeBits))) ||
619                     fieldSize == 0) {
620                     /*
621                      * the field is crossing the align boundary of its base type;
622                      * align alloced_size_in_bits to fieldAlign
623                      */
624                     allocedSizeInBits = RoundUp(allocedSizeInBits, fieldSizeBits);
625                 }
626                 /* allocate the bitfield */
627                 if (curFieldID == fieldID) {
628                     return std::pair<int32, int32>((allocedSizeInBits / fieldAlignBits) * fieldAlign,
629                                                    allocedSizeInBits % fieldAlignBits);
630                 } else {
631                     ++curFieldID;
632                 }
633                 allocedSizeInBits += fieldSize;
634                 allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
635             } else {
636                 bool leftOverBits = false;
637                 uint64 offset = 0;
638 
639                 if (allocedSizeInBits == allocedSize * k8BitSize) {
640                     allocedSize = RoundUp(allocedSize, fieldAlign);
641                     offset = allocedSize;
642                 } else {
643                     /* still some leftover bits on allocated words, we calculate things based on bits then. */
644                     if (allocedSizeInBits / fieldAlignBits !=
645                         (allocedSizeInBits + fieldSizeBits - k1BitSize) / fieldAlignBits) {
646                         /* the field is crossing the align boundary of its base type */
647                         allocedSizeInBits = RoundUp(allocedSizeInBits, fieldAlignBits);
648                     }
649                     allocedSize = RoundUp(allocedSize, fieldAlign);
650                     offset = (allocedSizeInBits / fieldAlignBits) * fieldAlign;
651                     leftOverBits = true;
652                 }
653 
654                 if (curFieldID == fieldID) {
655                     return std::pair<int32, int32>(offset, 0);
656                 } else {
657                     MIRStructType *subStructType = fieldType->EmbeddedStructType();
658                     if (subStructType == nullptr) {
659                         ++curFieldID;
660                     } else {
661                         if ((curFieldID + GetStructFieldCount(subStructType->GetTypeIndex())) < fieldID) {
662                             curFieldID += GetStructFieldCount(subStructType->GetTypeIndex()) + 1;
663                         } else {
664                             std::pair<int32, int32> result = GetFieldOffset(*subStructType, fieldID - curFieldID);
665                             return std::pair<int32, int32>(result.first + allocedSize, result.second);
666                         }
667                     }
668                 }
669 
670                 if (leftOverBits) {
671                     allocedSizeInBits += fieldSizeBits;
672                     allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
673                 } else {
674                     allocedSize += fieldTypeSize;
675                     allocedSizeInBits = allocedSize * kBitsPerByte;
676                 }
677             }
678         } else { /* for unions, bitfields are treated as non-bitfields */
679             if (curFieldID == fieldID) {
680                 return std::pair<int32, int32>(0, 0);
681             } else {
682                 MIRStructType *subStructType = fieldType->EmbeddedStructType();
683                 if (subStructType == nullptr) {
684                     curFieldID++;
685                 } else {
686                     if ((curFieldID + GetStructFieldCount(subStructType->GetTypeIndex())) < fieldID) {
687                         curFieldID += GetStructFieldCount(subStructType->GetTypeIndex()) + 1;
688                     } else {
689                         return GetFieldOffset(*subStructType, fieldID - curFieldID);
690                     }
691                 }
692             }
693         }
694     }
695     CHECK_FATAL(false, "GetFieldOffset() fails to find field");
696     return std::pair<int32, int32>(0, 0);
697 }
698 
TyIsInSizeAlignTable(const MIRType & ty) const699 bool BECommon::TyIsInSizeAlignTable(const MIRType &ty) const
700 {
701     if (typeSizeTable.size() != typeAlignTable.size()) {
702         return false;
703     }
704     return ty.GetTypeIndex() < typeSizeTable.size();
705 }
706 
AddAndComputeSizeAlign(MIRType & ty)707 void BECommon::AddAndComputeSizeAlign(MIRType &ty)
708 {
709     FinalizeTypeTable(ty);
710     typeAlignTable.emplace_back(mirModule.IsCModule());
711     typeSizeTable.emplace_back(0);
712     ComputeTypeSizesAligns(ty);
713 }
714 
AddElementToJClassLayout(MIRClassType & klass,JClassFieldInfo info)715 void BECommon::AddElementToJClassLayout(MIRClassType &klass, JClassFieldInfo info)
716 {
717     JClassLayout &layout = *(jClassLayoutTable.at(&klass));
718     layout.emplace_back(info);
719 }
720 
AddElementToFuncReturnType(MIRFunction & func,const TyIdx tyIdx)721 void BECommon::AddElementToFuncReturnType(MIRFunction &func, const TyIdx tyIdx)
722 {
723     funcReturnType[&func] = tyIdx;
724 }
725 
BeGetOrCreatePointerType(const MIRType & pointedType)726 MIRType *BECommon::BeGetOrCreatePointerType(const MIRType &pointedType)
727 {
728     MIRType *newType = GlobalTables::GetTypeTable().GetOrCreatePointerType(pointedType, GetLoweredPtrType());
729     if (TyIsInSizeAlignTable(*newType)) {
730         return newType;
731     }
732     AddAndComputeSizeAlign(*newType);
733     return newType;
734 }
735 
BeGetOrCreateFunctionType(TyIdx tyIdx,const std::vector<TyIdx> & vecTy,const std::vector<TypeAttrs> & vecAt)736 MIRType *BECommon::BeGetOrCreateFunctionType(TyIdx tyIdx, const std::vector<TyIdx> &vecTy,
737                                              const std::vector<TypeAttrs> &vecAt)
738 {
739     MIRType *newType = GlobalTables::GetTypeTable().GetOrCreateFunctionType(tyIdx, vecTy, vecAt);
740     if (TyIsInSizeAlignTable(*newType)) {
741         return newType;
742     }
743     AddAndComputeSizeAlign(*newType);
744     return newType;
745 }
746 
FinalizeTypeTable(const MIRType & ty)747 void BECommon::FinalizeTypeTable(const MIRType &ty)
748 {
749     if (ty.GetTypeIndex() > GetSizeOfTypeSizeTable()) {
750         if (mirModule.GetSrcLang() == kSrcLangC) {
751             for (uint32 i = GetSizeOfTypeSizeTable(); i < ty.GetTypeIndex(); ++i) {
752                 MIRType *tyTmp = GlobalTables::GetTypeTable().GetTypeFromTyIdx(i);
753                 AddAndComputeSizeAlign(*tyTmp);
754             }
755         } else {
756             CHECK_FATAL(ty.GetTypeIndex() == typeSizeTable.size(), "make sure the ty idx is exactly the table size");
757         }
758     }
759 }
760 
GetAddressOfNode(const BaseNode & node)761 BaseNode *BECommon::GetAddressOfNode(const BaseNode &node)
762 {
763     switch (node.GetOpCode()) {
764         case OP_dread: {
765             const DreadNode &dNode = static_cast<const DreadNode &>(node);
766             const StIdx &index = dNode.GetStIdx();
767             return mirModule.GetMIRBuilder()->CreateAddrof(*mirModule.CurFunction()->GetLocalOrGlobalSymbol(index));
768         }
769         case OP_iread: {
770             const IreadNode &iNode = static_cast<const IreadNode &>(node);
771             if (iNode.GetFieldID() == 0) {
772                 return iNode.Opnd(0);
773             }
774 
775             uint32 index = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeTable().at(iNode.GetTyIdx()))
776                            ->GetPointedTyIdx();
777             MIRType *pointedType = GlobalTables::GetTypeTable().GetTypeTable().at(index);
778             std::pair<int32, int32> byteBitOffset =
779                 GetFieldOffset(static_cast<MIRStructType &>(*pointedType), iNode.GetFieldID());
780 #if TARGAARCH64 || TARGRISCV64
781             DEBUG_ASSERT(GetAddressPrimType() == GetLoweredPtrType(),
782                          "incorrect address type, expect a GetLoweredPtrType()");
783 #endif
784             return mirModule.GetMIRBuilder()->CreateExprBinary(
785                 OP_add, *GlobalTables::GetTypeTable().GetPrimType(GetAddressPrimType()),
786                 static_cast<BaseNode *>(iNode.Opnd(0)),
787                 mirModule.GetMIRBuilder()->CreateIntConst(byteBitOffset.first, PTY_u32));
788         }
789         default:
790             return nullptr;
791     }
792 }
793 
CallIsOfAttr(FuncAttrKind attr,const StmtNode * narynode) const794 bool BECommon::CallIsOfAttr(FuncAttrKind attr, const StmtNode *narynode) const
795 {
796     (void)attr;
797     (void)narynode;
798     return false;
799 
800     /* For now, all 64x1_t types object are not propagated to become pregs by mplme, so the following
801        is not needed for now. We need to revisit this later when types are enhanced with attributes */
802 #if TO_BE_RESURRECTED
803     bool attrFunc = false;
804     if (narynode->GetOpCode() == OP_call) {
805         CallNode *callNode = static_cast<CallNode *>(narynode);
806         MIRFunction *func = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callNode->GetPUIdx());
807         attrFunc = (mirModule.GetSrcLang() == kSrcLangC && func->GetAttr(attr)) ? true : false;
808     } else if (narynode->GetOpCode() == OP_icall) {
809         IcallNode *icallNode = static_cast<IcallNode *>(narynode);
810         BaseNode *fNode = icallNode->Opnd(0);
811         MIRFuncType *fType = nullptr;
812         MIRPtrType *pType = nullptr;
813         if (fNode->GetOpCode() == OP_dread) {
814             DreadNode *dNode = static_cast<DreadNode *>(fNode);
815             MIRSymbol *symbol = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dNode->GetStIdx());
816             pType = static_cast<MIRPtrType *>(symbol->GetType());
817             MIRType *ty = pType;
818             if (dNode->GetFieldID() != 0) {
819                 DEBUG_ASSERT(ty->GetKind() == kTypeStruct || ty->GetKind() == kTypeClass, "");
820                 FieldPair thepair;
821                 if (ty->GetKind() == kTypeStruct) {
822                     thepair = static_cast<MIRStructType *>(ty)->TraverseToField(dNode->GetFieldID());
823                 } else {
824                     thepair = static_cast<MIRClassType *>(ty)->TraverseToField(dNode->GetFieldID());
825                 }
826                 pType = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(thepair.second.first));
827             }
828             fType = static_cast<MIRFuncType *>(pType->GetPointedType());
829         } else if (fNode->GetOpCode() == OP_iread) {
830             IreadNode *iNode = static_cast<IreadNode *>(fNode);
831             MIRPtrType *pointerty =
832                 static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(iNode->GetTyIdx()));
833             MIRType *pointedType = pointerty->GetPointedType();
834             if (iNode->GetFieldID() != 0) {
835                 pointedType = static_cast<MIRStructType *>(pointedType)->GetFieldType(iNode->GetFieldID());
836             }
837             if (pointedType->GetKind() == kTypeFunction) {
838                 fType = static_cast<MIRFuncType *>(pointedType);
839             } else if (pointedType->GetKind() == kTypePointer) {
840                 return false; /* assert? */
841             }
842         } else if (fNode->GetOpCode() == OP_select) {
843             TernaryNode *sNode = static_cast<TernaryNode *>(fNode);
844             BaseNode *expr = sNode->Opnd(1);
845             // both function ptrs under select should have the same signature, chk op1 only
846             AddroffuncNode *afNode = static_cast<AddroffuncNode *>(expr);
847             MIRFunction *func = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(afNode->GetPUIdx());
848             attrFunc = mirModule.GetSrcLang() == kSrcLangC && func->GetAttr(attr);
849         } else if (fNode->GetOpCode() == OP_regread) {
850             RegreadNode *rNode = static_cast<RegreadNode *>(fNode);
851             PregIdx pregidx = rNode->GetRegIdx();
852             MIRPreg *preg = mirModule.CurFunction()->GetPregTab()->PregFromPregIdx(pregidx);
853             MIRType *type = preg->GetMIRType();
854             if (type == nullptr) {
855                 return false;
856             }
857             MIRPtrType *pType = static_cast<MIRPtrType *>(type);
858             type = pType->GetPointedType();
859             if (type == nullptr) {
860                 return false;
861             }
862         } else if (fNode->GetOpCode() == OP_retype) {
863             RetypeNode *rNode = static_cast<RetypeNode *>(fNode);
864             pType = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(rNode->GetTyIdx()));
865             fType = static_cast<MIRFuncType *>(pType->GetPointedType());
866         } else {
867             return false; /* assert? */
868         }
869     }
870     return attrFunc;
871 #endif
872 }
873 } /* namespace maplebe */
874