1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "becommon.h"
17 #include "rt.h"
18 #include "cg_option.h"
19 #include "mir_builder.h"
20 #include "mpl_logging.h"
21 #include <cinttypes>
22 #include <list>
23
24 namespace maplebe {
25 using namespace maple;
26
BECommon(MIRModule & mod)27 BECommon::BECommon(MIRModule &mod)
28 : mirModule(mod),
29 typeSizeTable(GlobalTables::GetTypeTable().GetTypeTable().size(), 0, mirModule.GetMPAllocator().Adapter()),
30 typeAlignTable(GlobalTables::GetTypeTable().GetTypeTable().size(), static_cast<uint8>(mirModule.IsCModule()),
31 mirModule.GetMPAllocator().Adapter()),
32 typeHasFlexibleArray(GlobalTables::GetTypeTable().GetTypeTable().size(), 0, mirModule.GetMPAllocator().Adapter()),
33 structFieldCountTable(GlobalTables::GetTypeTable().GetTypeTable().size(), 0,
34 mirModule.GetMPAllocator().Adapter()),
35 jClassLayoutTable(mirModule.GetMPAllocator().Adapter()),
36 funcReturnType(mirModule.GetMPAllocator().Adapter())
37 {
38 for (uint32 i = 1; i < GlobalTables::GetTypeTable().GetTypeTable().size(); ++i) {
39 MIRType *ty = GlobalTables::GetTypeTable().GetTypeTable()[i];
40 ComputeTypeSizesAligns(*ty);
41 }
42 }
43
44 /*
45 * try to find an available padding slot, and allocate the given field in it.
46 * return the offset of the allocated memory. 0 if not available
47 * Note: this will update lists in paddingSlots
48 * Note: padding slots is a list of un-occupied (small size) slots
49 * available to allocate new fields. so far, just for 1, 2, 4 bytes
50 * types (map to array index 0, 1, 2)
51 */
TryAllocInPaddingSlots(std::list<uint32> paddingSlots[],uint32 fieldSize,uint32 fieldAlign,size_t paddingSlotsLength)52 static uint32 TryAllocInPaddingSlots(std::list<uint32> paddingSlots[], uint32 fieldSize, uint32 fieldAlign,
53 size_t paddingSlotsLength)
54 {
55 CHECK_FATAL(paddingSlotsLength > 0, "expect paddingSlotsLength > 0");
56 if (fieldSize > 4) { // padding slots are just for size 1/2/4 bytes
57 return 0;
58 }
59
60 uint32 fieldOffset = 0;
61 /* here is a greedy search */
62 for (size_t freeSlot = static_cast<size_t>(fieldSize >> 1); freeSlot < paddingSlotsLength; ++freeSlot) {
63 if (!paddingSlots[freeSlot].empty()) {
64 uint32 paddingOffset = paddingSlots[freeSlot].front();
65 if (IsAlignedTo(paddingOffset, fieldAlign)) {
66 /* reuse one padding slot */
67 paddingSlots[freeSlot].pop_front();
68 fieldOffset = paddingOffset;
69 /* check whether there're still space left in this slot */
70 uint32 leftSize = (1u << freeSlot) - fieldSize;
71 if (leftSize != 0) {
72 uint32 leftOffset = paddingOffset + fieldSize;
73 if (leftSize & 0x1) { /* check whether the last bit is 1 */
74 paddingSlots[0].push_front(leftOffset);
75 leftOffset += 1;
76 }
77 if (leftSize & 0x2) { /* check whether the penultimate bit is 1 */
78 paddingSlots[1].push_front(leftOffset);
79 }
80 }
81 break;
82 }
83 }
84 }
85 return fieldOffset;
86 }
87
AddPaddingSlot(std::list<uint32> paddingSlots[],uint32 offset,uint32 size,size_t paddingSlotsLength)88 static void AddPaddingSlot(std::list<uint32> paddingSlots[], uint32 offset, uint32 size, size_t paddingSlotsLength)
89 {
90 CHECK_FATAL(paddingSlotsLength > 0, "expect paddingSlotsLength > 0");
91 /*
92 * decompose the padding into 1/2/4 bytes slots.
93 * to satisfy alignment constraints.
94 */
95 for (size_t i = 0; i < paddingSlotsLength; ++i) {
96 if (size & (1u << i)) {
97 paddingSlots[i].push_front(offset);
98 offset += (1u << i);
99 }
100 }
101 }
102
AddNewTypeAfterBecommon(uint32 oldTypeTableSize,uint32 newTypeTableSize)103 void BECommon::AddNewTypeAfterBecommon(uint32 oldTypeTableSize, uint32 newTypeTableSize)
104 {
105 for (auto i = oldTypeTableSize; i < newTypeTableSize; ++i) {
106 MIRType *ty = GlobalTables::GetTypeTable().GetTypeFromTyIdx(i);
107 CHECK_NULL_FATAL(ty);
108 typeSizeTable.emplace_back(0);
109 typeAlignTable.emplace_back(static_cast<uint8>(mirModule.IsCModule()));
110 typeHasFlexibleArray.emplace_back(0);
111 structFieldCountTable.emplace_back(0);
112 ComputeTypeSizesAligns(*ty);
113 }
114 }
115
ComputeStructTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx)116 void BECommon::ComputeStructTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx)
117 {
118 auto &structType = static_cast<MIRStructType &>(ty);
119 const FieldVector &fields = structType.GetFields();
120 uint64 allocedSize = 0;
121 uint64 allocedSizeInBits = 0;
122 SetStructFieldCount(structType.GetTypeIndex(), fields.size());
123 if (fields.size() == 0) {
124 if (structType.IsCPlusPlus()) {
125 SetTypeSize(tyIdx.GetIdx(), 1); /* empty struct in C++ has size 1 */
126 SetTypeAlign(tyIdx.GetIdx(), 1);
127 } else {
128 SetTypeSize(tyIdx.GetIdx(), 0);
129 SetTypeAlign(tyIdx.GetIdx(), k8ByteSize);
130 }
131 return;
132 }
133 auto structAttr = structType.GetTypeAttrs();
134 auto structPack = static_cast<uint8>(structAttr.GetPack());
135 for (uint32 j = 0; j < fields.size(); ++j) {
136 TyIdx fieldTyIdx = fields[j].second.first;
137 auto fieldAttr = fields[j].second.second;
138 MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
139 uint32 fieldTypeSize = GetTypeSize(fieldTyIdx);
140 if (fieldTypeSize == 0) {
141 ComputeTypeSizesAligns(*fieldType);
142 fieldTypeSize = GetTypeSize(fieldTyIdx);
143 }
144 uint64 fieldSizeBits = fieldTypeSize * kBitsPerByte;
145 auto attrAlign = static_cast<uint8>(fieldAttr.GetAlign());
146 auto originAlign = std::max(attrAlign, GetTypeAlign(fieldTyIdx));
147 uint8 fieldAlign = fieldAttr.IsPacked() ? 1 : std::min(originAlign, structPack);
148 uint64 fieldAlignBits = fieldAlign * kBitsPerByte;
149 CHECK_FATAL(fieldAlign != 0, "expect fieldAlign not equal 0");
150 MIRStructType *subStructType = fieldType->EmbeddedStructType();
151 if (subStructType != nullptr) {
152 AppendStructFieldCount(structType.GetTypeIndex(), GetStructFieldCount(subStructType->GetTypeIndex()));
153 }
154 if (structType.GetKind() != kTypeUnion) {
155 if (fieldType->GetKind() == kTypeBitField) {
156 uint32 fieldSize = static_cast<MIRBitFieldType *>(fieldType)->GetFieldSize();
157 /* is this field is crossing the align boundary of its base type? */
158 if ((!structAttr.IsPacked() &&
159 ((allocedSizeInBits / fieldSizeBits) != ((allocedSizeInBits + fieldSize - 1u) / fieldSizeBits))) ||
160 fieldSize == 0) {
161 allocedSizeInBits = RoundUp(allocedSizeInBits, fieldSizeBits);
162 }
163 /* allocate the bitfield */
164 allocedSizeInBits += fieldSize;
165 allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
166 } else {
167 bool leftoverbits = false;
168
169 if (allocedSizeInBits == allocedSize * kBitsPerByte) {
170 allocedSize = RoundUp(allocedSize, fieldAlign);
171 } else {
172 /* still some leftover bits on allocated words, we calculate things based on bits then. */
173 if (allocedSizeInBits / fieldAlignBits !=
174 (allocedSizeInBits + fieldSizeBits - 1) / fieldAlignBits) {
175 /* the field is crossing the align boundary of its base type */
176 allocedSizeInBits = RoundUp(allocedSizeInBits, fieldAlignBits);
177 }
178 leftoverbits = true;
179 }
180 if (leftoverbits) {
181 allocedSizeInBits += fieldSizeBits;
182 allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
183 } else {
184 /* pad alloced_size according to the field alignment */
185 allocedSize = RoundUp(allocedSize, fieldAlign);
186 allocedSize += fieldTypeSize;
187 allocedSizeInBits = allocedSize * kBitsPerByte;
188 }
189 }
190 } else { /* for unions, bitfields are treated as non-bitfields */
191 allocedSize = std::max(allocedSize, static_cast<uint64>(fieldTypeSize));
192 }
193 SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), fieldAlign));
194 /* C99
195 * Last struct element of a struct with more than one member
196 * is a flexible array if it is an array of size 0.
197 */
198 if ((j != 0) && ((j + 1) == fields.size()) && (fieldType->GetKind() == kTypeArray) &&
199 (GetTypeSize(fieldTyIdx.GetIdx()) == 0)) {
200 SetHasFlexibleArray(tyIdx.GetIdx(), true);
201 }
202 }
203 SetTypeSize(tyIdx, RoundUp(allocedSize, GetTypeAlign(tyIdx.GetIdx())));
204 }
205
ComputeClassTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx,uint8 align)206 void BECommon::ComputeClassTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx, uint8 align)
207 {
208 uint64 allocedSize = 0;
209 const FieldVector &fields = static_cast<MIRStructType &>(ty).GetFields();
210
211 auto &classType = static_cast<MIRClassType &>(ty);
212 TyIdx prntTyIdx = classType.GetParentTyIdx();
213 /* process parent class */
214 if (prntTyIdx != 0u) {
215 MIRClassType *parentType =
216 static_cast<MIRClassType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(prntTyIdx));
217 uint32 prntSize = GetTypeSize(prntTyIdx);
218 if (prntSize == 0) {
219 ComputeTypeSizesAligns(*parentType);
220 prntSize = GetTypeSize(prntTyIdx);
221 }
222 uint8 prntAlign = GetTypeAlign(prntTyIdx);
223 AppendStructFieldCount(tyIdx, GetStructFieldCount(prntTyIdx) + 1);
224 /* pad alloced_size according to the field alignment */
225 allocedSize = RoundUp(allocedSize, prntAlign);
226
227 JClassLayout *layout = mirModule.GetMemPool()->New<JClassLayout>(mirModule.GetMPAllocator().Adapter());
228 /* add parent's record to the front */
229 layout->emplace_back(JClassFieldInfo(false, false, false, allocedSize));
230 /* copy parent's layout plan into my plan */
231 if (HasJClassLayout(*parentType)) { /* parent may have incomplete type definition. */
232 const JClassLayout &parentLayout = GetJClassLayout(*parentType);
233 layout->insert(layout->end(), parentLayout.begin(), parentLayout.end());
234 allocedSize += prntSize;
235 SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), prntAlign));
236 } else {
237 LogInfo::MapleLogger() << "Warning:try to layout class with incomplete type:" << parentType->GetName()
238 << "\n";
239 }
240 jClassLayoutTable[&classType] = layout;
241 } else {
242 /* This is the root class, say, The Object */
243 jClassLayoutTable[&classType] = mirModule.GetMemPool()->New<JClassLayout>(mirModule.GetMPAllocator().Adapter());
244 }
245
246 /*
247 * a list of un-occupied (small size) slots available for insertion
248 * so far, just for 1, 2, 4 bytes types (map to array index 0, 1, 2)
249 */
250 std::list<uint32> paddingSlots[3]; // padding slots are just 3 types for size 1/2/4 bytes
251 /* process fields */
252 AppendStructFieldCount(tyIdx, fields.size());
253 if (fields.size() == 0 && mirModule.IsCModule()) {
254 SetTypeAlign(tyIdx.GetIdx(), 1);
255 SetTypeSize(tyIdx.GetIdx(), 1);
256 return;
257 }
258 for (uint32 j = 0; j < fields.size(); ++j) {
259 TyIdx fieldTyIdx = fields[j].second.first;
260 MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
261 FieldAttrs fieldAttr = fields[j].second.second;
262 uint32 fieldSize = GetTypeSize(fieldTyIdx);
263 if (fieldSize == 0) {
264 ComputeTypeSizesAligns(*fieldType);
265 fieldSize = GetTypeSize(fieldTyIdx);
266 }
267 uint8 fieldAlign = GetTypeAlign(fieldTyIdx);
268
269 if ((fieldType->GetKind() == kTypePointer) && (fieldType->GetPrimType() == PTY_a64)) {
270 /* handle class reference field */
271 fieldSize = static_cast<uint32>(RTSupport::GetRTSupportInstance().GetFieldSize());
272 fieldAlign = RTSupport::GetRTSupportInstance().GetFieldAlign();
273 }
274
275 /* try to alloc the field in one of previously created padding slots */
276 uint32 currentFieldOffset =
277 TryAllocInPaddingSlots(paddingSlots, fieldSize, fieldAlign, sizeof(paddingSlots) / sizeof(paddingSlots[0]));
278 /* cannot reuse one padding slot. layout to current end */
279 if (currentFieldOffset == 0) {
280 /* pad alloced_size according to the field alignment */
281 currentFieldOffset = RoundUp(allocedSize, fieldAlign);
282 if (currentFieldOffset != allocedSize) {
283 /* rounded up, create one padding-slot */
284 uint32 paddingSize = currentFieldOffset - allocedSize;
285 AddPaddingSlot(paddingSlots, allocedSize, paddingSize, sizeof(paddingSlots) / sizeof(paddingSlots[0]));
286 allocedSize = currentFieldOffset;
287 }
288 /* need new memory for this field */
289 allocedSize += fieldSize;
290 }
291 AddElementToJClassLayout(classType, JClassFieldInfo(fieldType->GetKind() == kTypePointer,
292 fieldAttr.GetAttr(FLDATTR_rcunowned),
293 fieldAttr.GetAttr(FLDATTR_rcweak), currentFieldOffset));
294 SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), fieldAlign));
295 }
296 SetTypeSize(tyIdx, RoundUp(allocedSize, align));
297 }
298
ComputeArrayTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx)299 void BECommon::ComputeArrayTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx)
300 {
301 MIRArrayType &arrayType = static_cast<MIRArrayType &>(ty);
302 MIRType *elemType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(arrayType.GetElemTyIdx());
303 uint32 elemSize = GetTypeSize(elemType->GetTypeIndex());
304 if (elemSize == 0) {
305 ComputeTypeSizesAligns(*elemType);
306 elemSize = GetTypeSize(elemType->GetTypeIndex());
307 }
308 if (!mirModule.IsCModule()) {
309 CHECK_FATAL(elemSize != 0, "elemSize should not equal 0");
310 CHECK_FATAL(elemType->GetTypeIndex() != 0u, "elemType's idx should not equal 0");
311 }
312 uint32 arrayAlign = arrayType.GetTypeAttrs().GetAlign();
313 elemSize = std::max(elemSize, static_cast<uint32>(GetTypeAlign(elemType->GetTypeIndex())));
314 elemSize = std::max(elemSize, arrayAlign);
315 /* compute total number of elements from the multipel dimensions */
316 uint64 numElems = 1;
317 for (int d = 0; d < arrayType.GetDim(); ++d) {
318 numElems *= arrayType.GetSizeArrayItem(d);
319 }
320 auto typeSize = elemSize * numElems;
321 SetTypeSize(tyIdx, typeSize);
322 if (typeSize == 0) {
323 SetTypeAlign(tyIdx, static_cast<uint8>(arrayAlign));
324 } else {
325 auto maxAlign = std::max(static_cast<uint32>(GetTypeAlign(elemType->GetTypeIndex())), arrayAlign);
326 SetTypeAlign(tyIdx, static_cast<uint8>(maxAlign));
327 }
328 }
329
ComputeFArrayOrJArrayTypeSizesAligns(MIRType & ty,const TyIdx & tyIdx)330 void BECommon::ComputeFArrayOrJArrayTypeSizesAligns(MIRType &ty, const TyIdx &tyIdx)
331 {
332 MIRFarrayType &arrayType = static_cast<MIRFarrayType &>(ty);
333 MIRType *elemType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(arrayType.GetElemTyIdx());
334 uint32 elemSize = GetTypeSize(elemType->GetTypeIndex());
335 if (elemSize == 0) {
336 ComputeTypeSizesAligns(*elemType);
337 elemSize = GetTypeSize(elemType->GetTypeIndex());
338 }
339 CHECK_FATAL(elemSize != 0, "elemSize should not equal 0");
340 CHECK_FATAL(GetTypeAlign(elemType->GetTypeIndex()) != 0u, "GetTypeAlign return 0 is not expected");
341 elemSize = std::max(elemSize, static_cast<uint32>(GetTypeAlign(elemType->GetTypeIndex())));
342 SetTypeSize(tyIdx, 0);
343 SetTypeAlign(tyIdx, GetTypeAlign(elemType->GetTypeIndex()));
344 }
345
ComputeTypeSizesAligns(MIRType & ty,uint8 align)346 void BECommon::ComputeTypeSizesAligns(MIRType &ty, uint8 align)
347 {
348 TyIdx tyIdx = ty.GetTypeIndex();
349 if ((structFieldCountTable.size() > tyIdx) && (GetStructFieldCount(tyIdx) != 0)) {
350 return; /* processed before */
351 }
352
353 if ((ty.GetPrimType() == PTY_ptr) || (ty.GetPrimType() == PTY_ref)) {
354 ty.SetPrimType(GetLoweredPtrType());
355 }
356
357 switch (ty.GetKind()) {
358 case kTypeScalar:
359 case kTypePointer:
360 case kTypeBitField:
361 case kTypeFunction:
362 SetTypeSize(tyIdx, GetPrimTypeSize(ty.GetPrimType()));
363 SetTypeAlign(tyIdx, GetTypeSize(tyIdx));
364 break;
365 case kTypeArray: {
366 ComputeArrayTypeSizesAligns(ty, tyIdx);
367 break;
368 }
369 case kTypeFArray:
370 case kTypeJArray: {
371 ComputeFArrayOrJArrayTypeSizesAligns(ty, tyIdx);
372 break;
373 }
374 case kTypeUnion:
375 case kTypeStruct: {
376 ComputeStructTypeSizesAligns(ty, tyIdx);
377 break;
378 }
379 case kTypeInterface: { /* interface shouldn't have instance fields */
380 SetTypeAlign(tyIdx, 0);
381 SetTypeSize(tyIdx, 0);
382 SetStructFieldCount(tyIdx, 0);
383 break;
384 }
385 case kTypeClass: { /* cannot have union or bitfields */
386 ComputeClassTypeSizesAligns(ty, tyIdx, align);
387 break;
388 }
389 case kTypeByName:
390 case kTypeVoid:
391 default:
392 SetTypeSize(tyIdx, 0);
393 break;
394 }
395 /* there may be passed-in align attribute declared with the symbol */
396 SetTypeAlign(tyIdx, std::max(GetTypeAlign(tyIdx), align));
397 }
398
IsRefField(MIRStructType & structType,FieldID fieldID) const399 bool BECommon::IsRefField(MIRStructType &structType, FieldID fieldID) const
400 {
401 if (structType.GetKind() == kTypeClass) {
402 CHECK_FATAL(HasJClassLayout(static_cast<MIRClassType &>(structType)),
403 "Cannot found jclass layout information");
404 const JClassLayout &layout = GetJClassLayout(static_cast<MIRClassType &>(structType));
405 if (layout.empty()) {
406 ERR(kLncErr, "layout is null in BECommon::IsRefField");
407 return false;
408 }
409 return layout[fieldID - 1].IsRef();
410 }
411 return false;
412 }
413
GenFieldOffsetMap(const std::string & className)414 void BECommon::GenFieldOffsetMap(const std::string &className)
415 {
416 MIRType *type = GlobalTables::GetTypeTable().GetOrCreateClassType(className, mirModule);
417 CHECK_FATAL(type != nullptr, "unknown class, type should not be nullptr");
418 MIRClassType *classType = static_cast<MIRClassType *>(type);
419 for (FieldID i = 1; i <= GetStructFieldCount(classType->GetTypeIndex()); ++i) {
420 FieldID fieldID = i;
421 FieldPair fp = classType->TraverseToFieldRef(fieldID);
422 GStrIdx strIdx = fp.first;
423 if (strIdx == 0u) {
424 continue;
425 }
426
427 const std::string &fieldName = GlobalTables::GetStrTable().GetStringFromStrIdx(strIdx);
428
429 TyIdx fieldTyIdx = fp.second.first;
430 uint64 fieldSize = GetTypeSize(fieldTyIdx);
431 MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
432
433 if ((fieldType->GetKind() == kTypePointer) && (fieldType->GetPrimType() == PTY_a64)) {
434 /* handle class reference field */
435 fieldSize = RTSupport::GetRTSupportInstance().GetFieldSize();
436 }
437
438 std::pair<int32, int32> p = GetFieldOffset(*classType, i);
439 CHECK_FATAL(p.second == 0, "expect p.second equals 0");
440 LogInfo::MapleLogger() << "CLASS_FIELD_OFFSET_MAP(" << className.c_str() << "," << fieldName.c_str() << ","
441 << p.first << "," << fieldSize << ")\n";
442 }
443 }
444
GenFieldOffsetMap(MIRClassType & classType,FILE & outFile)445 void BECommon::GenFieldOffsetMap(MIRClassType &classType, FILE &outFile)
446 {
447 const std::string &className = classType.GetName();
448
449 /*
450 * We only enumerate fields defined in the current class. There are cases
451 * where a parent classes may define private fields that have the same name as
452 * a field in the current class.This table is generated for the convenience of
453 * C programmers. If the C programmer wants to access parent class fields,
454 * the programmer should access them as `Parent.field`.
455 */
456 FieldID myEnd = structFieldCountTable.at(classType.GetTypeIndex());
457 FieldID myBegin = (myEnd - static_cast<FieldID>(classType.GetFieldsSize())) + 1;
458
459 for (FieldID i = myBegin; i <= myEnd; ++i) {
460 FieldID fieldID = i;
461 FieldPair fp = classType.TraverseToFieldRef(fieldID);
462 GStrIdx strIdx = fp.first;
463 if (strIdx == 0u) {
464 continue;
465 }
466 FieldAttrs attrs = fp.second.second;
467 if (attrs.GetAttr(FLDATTR_static)) {
468 continue;
469 }
470
471 const std::string &fieldName = GlobalTables::GetStrTable().GetStringFromStrIdx(strIdx);
472
473 TyIdx fieldTyIdx = fp.second.first;
474 uint64 fieldSize = GetTypeSize(fieldTyIdx);
475 MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
476
477 if ((fieldType->GetKind() == kTypePointer) && (fieldType->GetPrimType() == PTY_a64)) {
478 /* handle class reference field */
479 fieldSize = RTSupport::GetRTSupportInstance().GetFieldSize();
480 ;
481 }
482
483 std::pair<int32, int32> p = GetFieldOffset(classType, i);
484 CHECK_FATAL(p.second == 0, "expect p.second equals 0");
485 (void)fprintf(&outFile, "__MRT_CLASS_FIELD(%s, %s, %d, %lu)\n", className.c_str(), fieldName.c_str(), p.first,
486 static_cast<unsigned long>(fieldSize));
487 }
488 }
489
GenObjSize(const MIRClassType & classType,FILE & outFile)490 void BECommon::GenObjSize(const MIRClassType &classType, FILE &outFile)
491 {
492 const std::string &className = classType.GetName();
493 uint64_t objSize = GetTypeSize(classType.GetTypeIndex());
494 if (objSize == 0) {
495 return;
496 }
497
498 TyIdx parentTypeIdx = classType.GetParentTyIdx();
499 MIRType *parentType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(parentTypeIdx);
500 const char *parentName = nullptr;
501 if (parentType != nullptr) {
502 MIRClassType *parentClass = static_cast<MIRClassType *>(parentType);
503 parentName = parentClass->GetName().c_str();
504 } else {
505 parentName = "THIS_IS_ROOT";
506 }
507 fprintf(&outFile, "__MRT_CLASS(%s, %" PRIu64 ", %s)\n", className.c_str(), objSize, parentName);
508 }
509
GetJClassFieldOffset(MIRStructType & classType,FieldID fieldID) const510 FieldInfo BECommon::GetJClassFieldOffset(MIRStructType &classType, FieldID fieldID) const
511 {
512 CHECK_FATAL(fieldID <= GetStructFieldCount(classType.GetTypeIndex()), "GetFieldOFfset: fieldID too large");
513 if (fieldID == 0) {
514 return {0, 0};
515 }
516 CHECK_FATAL(HasJClassLayout(static_cast<MIRClassType &>(classType)), "Cannot found jclass layout information");
517 const JClassLayout &layout = GetJClassLayout(static_cast<MIRClassType &>(classType));
518 CHECK_FATAL(static_cast<uint32>(fieldID) - 1 < layout.size(), "subscript out of range");
519 return {static_cast<uint32>(layout[static_cast<unsigned long>(fieldID) - 1].GetOffset()), 0};
520 }
521
522 /*
523 * compute the offset of the field given by fieldID within the structure type
524 * structy; it returns the answer in the pair (byteoffset, bitoffset) such that
525 * if it is a bitfield, byteoffset gives the offset of the container for
526 * extracting the bitfield and bitoffset is with respect to the container
527 */
GetFieldOffset(MIRStructType & structType,FieldID fieldID)528 std::pair<int32, int32> BECommon::GetFieldOffset(MIRStructType &structType, FieldID fieldID)
529 {
530 CHECK_FATAL(fieldID <= GetStructFieldCount(structType.GetTypeIndex()), "GetFieldOFfset: fieldID too large");
531 uint64 allocedSize = 0;
532 uint64 allocedSizeInBits = 0;
533 FieldID curFieldID = 1;
534 if (fieldID == 0) {
535 return std::pair<int32, int32>(0, 0);
536 }
537
538 if (structType.GetKind() == kTypeClass) {
539 CHECK_FATAL(HasJClassLayout(static_cast<MIRClassType &>(structType)),
540 "Cannot found jclass layout information");
541 const JClassLayout &layout = GetJClassLayout(static_cast<MIRClassType &>(structType));
542 CHECK_FATAL(static_cast<uint32>(fieldID) - 1 < layout.size(), "subscript out of range");
543 return std::pair<int32, int32>(static_cast<int32>(layout[fieldID - 1].GetOffset()), 0);
544 }
545
546 /* process the struct fields */
547 FieldVector fields = structType.GetFields();
548 auto structPack = static_cast<uint8>(structType.GetTypeAttrs().GetPack());
549 for (uint32 j = 0; j < fields.size(); ++j) {
550 TyIdx fieldTyIdx = fields[j].second.first;
551 auto fieldAttr = fields[j].second.second;
552 MIRType *fieldType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(fieldTyIdx);
553 uint32 fieldTypeSize = GetTypeSize(fieldTyIdx);
554 uint64 fieldSizeBits = fieldTypeSize * kBitsPerByte;
555 auto originAlign = GetTypeAlign(fieldTyIdx);
556 auto fieldAlign = fieldAttr.IsPacked() ? 1 : std::min(originAlign, structPack);
557 uint64 fieldAlignBits = static_cast<uint64>(fieldAlign * kBitsPerByte);
558 CHECK_FATAL(fieldAlign != 0, "fieldAlign should not equal 0");
559 if (structType.GetKind() != kTypeUnion) {
560 if (fieldType->GetKind() == kTypeBitField) {
561 uint32 fieldSize = static_cast<MIRBitFieldType *>(fieldType)->GetFieldSize();
562 /*
563 * Is this field is crossing the align boundary of its base type? Or,
564 * is field a zero-with bit field?
565 * Refer to C99 standard (§6.7.2.1) :
566 * > As a special case, a bit-field structure member with a width of 0 indicates that no further
567 * > bit-field is to be packed into the unit in which the previous bit-field, if any, was placed.
568 *
569 * We know that A zero-width bit field can cause the next field to be aligned on the next container
570 * boundary where the container is the same size as the underlying type of the bit field.
571 */
572 CHECK_FATAL(allocedSizeInBits <= UINT64_MAX - fieldSize, "must not be zero");
573 DEBUG_ASSERT(allocedSizeInBits + fieldSize >= 1, "allocedSizeInBits + fieldSize - 1u must be unsigned");
574 if ((!structType.GetTypeAttrs().IsPacked() &&
575 ((allocedSizeInBits / fieldSizeBits) != ((allocedSizeInBits + fieldSize - 1u) / fieldSizeBits))) ||
576 fieldSize == 0) {
577 /*
578 * the field is crossing the align boundary of its base type;
579 * align alloced_size_in_bits to fieldAlign
580 */
581 allocedSizeInBits = RoundUp(allocedSizeInBits, fieldSizeBits);
582 }
583 /* allocate the bitfield */
584 if (curFieldID == fieldID) {
585 return std::pair<int32, int32>((allocedSizeInBits / fieldAlignBits) * fieldAlign,
586 allocedSizeInBits % fieldAlignBits);
587 } else {
588 ++curFieldID;
589 }
590 allocedSizeInBits += fieldSize;
591 allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
592 } else {
593 bool leftOverBits = false;
594 uint64 offset = 0;
595
596 if (allocedSizeInBits == allocedSize * k8BitSize) {
597 allocedSize = RoundUp(allocedSize, fieldAlign);
598 offset = allocedSize;
599 } else {
600 /* still some leftover bits on allocated words, we calculate things based on bits then. */
601 if (allocedSizeInBits / fieldAlignBits !=
602 (allocedSizeInBits + fieldSizeBits - k1BitSize) / fieldAlignBits) {
603 /* the field is crossing the align boundary of its base type */
604 allocedSizeInBits = RoundUp(allocedSizeInBits, fieldAlignBits);
605 }
606 allocedSize = RoundUp(allocedSize, fieldAlign);
607 offset = static_cast<uint64>((allocedSizeInBits / fieldAlignBits) * fieldAlign);
608 leftOverBits = true;
609 }
610
611 if (curFieldID == fieldID) {
612 return std::pair<int32, int32>(offset, 0);
613 } else {
614 MIRStructType *subStructType = fieldType->EmbeddedStructType();
615 if (subStructType == nullptr) {
616 ++curFieldID;
617 } else {
618 if ((curFieldID + GetStructFieldCount(subStructType->GetTypeIndex())) < fieldID) {
619 curFieldID += GetStructFieldCount(subStructType->GetTypeIndex()) + 1;
620 } else {
621 std::pair<int32, int32> result = GetFieldOffset(*subStructType, fieldID - curFieldID);
622 return std::pair<int32, int32>(result.first + allocedSize, result.second);
623 }
624 }
625 }
626
627 if (leftOverBits) {
628 allocedSizeInBits += fieldSizeBits;
629 allocedSize = std::max(allocedSize, RoundUp(allocedSizeInBits, fieldAlignBits) / kBitsPerByte);
630 } else {
631 allocedSize += fieldTypeSize;
632 allocedSizeInBits = allocedSize * kBitsPerByte;
633 }
634 }
635 } else { /* for unions, bitfields are treated as non-bitfields */
636 if (curFieldID == fieldID) {
637 return std::pair<int32, int32>(0, 0);
638 } else {
639 MIRStructType *subStructType = fieldType->EmbeddedStructType();
640 if (subStructType == nullptr) {
641 curFieldID++;
642 } else {
643 if ((curFieldID + GetStructFieldCount(subStructType->GetTypeIndex())) < fieldID) {
644 curFieldID += GetStructFieldCount(subStructType->GetTypeIndex()) + 1;
645 } else {
646 return GetFieldOffset(*subStructType, fieldID - curFieldID);
647 }
648 }
649 }
650 }
651 }
652 CHECK_FATAL(false, "GetFieldOffset() fails to find field");
653 return std::pair<int32, int32>(0, 0);
654 }
655
TyIsInSizeAlignTable(const MIRType & ty) const656 bool BECommon::TyIsInSizeAlignTable(const MIRType &ty) const
657 {
658 if (typeSizeTable.size() != typeAlignTable.size()) {
659 return false;
660 }
661 return ty.GetTypeIndex() < typeSizeTable.size();
662 }
663
AddAndComputeSizeAlign(MIRType & ty)664 void BECommon::AddAndComputeSizeAlign(MIRType &ty)
665 {
666 FinalizeTypeTable(ty);
667 typeAlignTable.emplace_back(mirModule.IsCModule());
668 typeSizeTable.emplace_back(0);
669 ComputeTypeSizesAligns(ty);
670 }
671
AddElementToJClassLayout(MIRClassType & klass,JClassFieldInfo info)672 void BECommon::AddElementToJClassLayout(MIRClassType &klass, JClassFieldInfo info)
673 {
674 JClassLayout &layout = *(jClassLayoutTable.at(&klass));
675 layout.emplace_back(info);
676 }
677
AddElementToFuncReturnType(MIRFunction & func,const TyIdx tyIdx)678 void BECommon::AddElementToFuncReturnType(MIRFunction &func, const TyIdx tyIdx)
679 {
680 funcReturnType[&func] = tyIdx;
681 }
682
BeGetOrCreatePointerType(const MIRType & pointedType)683 MIRType *BECommon::BeGetOrCreatePointerType(const MIRType &pointedType)
684 {
685 MIRType *newType = GlobalTables::GetTypeTable().GetOrCreatePointerType(pointedType, GetLoweredPtrType());
686 if (TyIsInSizeAlignTable(*newType)) {
687 return newType;
688 }
689 AddAndComputeSizeAlign(*newType);
690 return newType;
691 }
692
BeGetOrCreateFunctionType(TyIdx tyIdx,const std::vector<TyIdx> & vecTy,const std::vector<TypeAttrs> & vecAt)693 MIRType *BECommon::BeGetOrCreateFunctionType(TyIdx tyIdx, const std::vector<TyIdx> &vecTy,
694 const std::vector<TypeAttrs> &vecAt)
695 {
696 MIRType *newType = GlobalTables::GetTypeTable().GetOrCreateFunctionType(tyIdx, vecTy, vecAt);
697 if (TyIsInSizeAlignTable(*newType)) {
698 return newType;
699 }
700 AddAndComputeSizeAlign(*newType);
701 return newType;
702 }
703
FinalizeTypeTable(const MIRType & ty)704 void BECommon::FinalizeTypeTable(const MIRType &ty)
705 {
706 if (ty.GetTypeIndex() > GetSizeOfTypeSizeTable()) {
707 if (mirModule.GetSrcLang() == kSrcLangC) {
708 for (uint32 i = GetSizeOfTypeSizeTable(); i < ty.GetTypeIndex(); ++i) {
709 MIRType *tyTmp = GlobalTables::GetTypeTable().GetTypeFromTyIdx(i);
710 AddAndComputeSizeAlign(*tyTmp);
711 }
712 } else {
713 CHECK_FATAL(ty.GetTypeIndex() == typeSizeTable.size(), "make sure the ty idx is exactly the table size");
714 }
715 }
716 }
717
GetAddressOfNode(const BaseNode & node)718 BaseNode *BECommon::GetAddressOfNode(const BaseNode &node)
719 {
720 switch (node.GetOpCode()) {
721 case OP_dread: {
722 const DreadNode &dNode = static_cast<const DreadNode &>(node);
723 const StIdx &index = dNode.GetStIdx();
724 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "curFunction should not be nullptr");
725 DEBUG_ASSERT(mirModule.CurFunction()->GetLocalOrGlobalSymbol(index) != nullptr, "nullptr check");
726 return mirModule.GetMIRBuilder()->CreateAddrof(*mirModule.CurFunction()->GetLocalOrGlobalSymbol(index));
727 }
728 case OP_iread: {
729 const IreadNode &iNode = static_cast<const IreadNode &>(node);
730 if (iNode.GetFieldID() == 0) {
731 return iNode.Opnd(0);
732 }
733
734 uint32 index = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeTable().at(iNode.GetTyIdx()))
735 ->GetPointedTyIdx();
736 MIRType *pointedType = GlobalTables::GetTypeTable().GetTypeTable().at(index);
737 std::pair<int32, int32> byteBitOffset =
738 GetFieldOffset(static_cast<MIRStructType &>(*pointedType), iNode.GetFieldID());
739 return mirModule.GetMIRBuilder()->CreateExprBinary(
740 OP_add, *GlobalTables::GetTypeTable().GetPrimType(GetAddressPrimType()),
741 static_cast<BaseNode *>(iNode.Opnd(0)),
742 mirModule.GetMIRBuilder()->CreateIntConst(byteBitOffset.first, PTY_u32));
743 }
744 default:
745 return nullptr;
746 }
747 }
748
CallIsOfAttr(FuncAttrKind attr,const StmtNode * narynode) const749 bool BECommon::CallIsOfAttr(FuncAttrKind attr, const StmtNode *narynode) const
750 {
751 (void)attr;
752 (void)narynode;
753 return false;
754
755 /* For now, all 64x1_t types object are not propagated to become pregs by mplme, so the following
756 is not needed for now. We need to revisit this later when types are enhanced with attributes */
757 #if TO_BE_RESURRECTED
758 bool attrFunc = false;
759 if (narynode->GetOpCode() == OP_call) {
760 CallNode *callNode = static_cast<CallNode *>(narynode);
761 MIRFunction *func = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(callNode->GetPUIdx());
762 attrFunc = (mirModule.GetSrcLang() == kSrcLangC && func->GetAttr(attr)) ? true : false;
763 } else if (narynode->GetOpCode() == OP_icall) {
764 IcallNode *icallNode = static_cast<IcallNode *>(narynode);
765 BaseNode *fNode = icallNode->Opnd(0);
766 MIRFuncType *fType = nullptr;
767 MIRPtrType *pType = nullptr;
768 if (fNode->GetOpCode() == OP_dread) {
769 DreadNode *dNode = static_cast<DreadNode *>(fNode);
770 MIRSymbol *symbol = mirModule.CurFunction()->GetLocalOrGlobalSymbol(dNode->GetStIdx());
771 pType = static_cast<MIRPtrType *>(symbol->GetType());
772 MIRType *ty = pType;
773 if (dNode->GetFieldID() != 0) {
774 DEBUG_ASSERT(ty->GetKind() == kTypeStruct || ty->GetKind() == kTypeClass, "");
775 FieldPair thepair;
776 if (ty->GetKind() == kTypeStruct) {
777 thepair = static_cast<MIRStructType *>(ty)->TraverseToField(dNode->GetFieldID());
778 } else {
779 thepair = static_cast<MIRClassType *>(ty)->TraverseToField(dNode->GetFieldID());
780 }
781 pType = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(thepair.second.first));
782 }
783 fType = static_cast<MIRFuncType *>(pType->GetPointedType());
784 } else if (fNode->GetOpCode() == OP_iread) {
785 IreadNode *iNode = static_cast<IreadNode *>(fNode);
786 MIRPtrType *pointerty =
787 static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(iNode->GetTyIdx()));
788 MIRType *pointedType = pointerty->GetPointedType();
789 if (iNode->GetFieldID() != 0) {
790 pointedType = static_cast<MIRStructType *>(pointedType)->GetFieldType(iNode->GetFieldID());
791 }
792 if (pointedType->GetKind() == kTypeFunction) {
793 fType = static_cast<MIRFuncType *>(pointedType);
794 } else if (pointedType->GetKind() == kTypePointer) {
795 return false; /* assert? */
796 }
797 } else if (fNode->GetOpCode() == OP_select) {
798 TernaryNode *sNode = static_cast<TernaryNode *>(fNode);
799 BaseNode *expr = sNode->Opnd(1);
800 // both function ptrs under select should have the same signature, chk op1 only
801 AddroffuncNode *afNode = static_cast<AddroffuncNode *>(expr);
802 MIRFunction *func = GlobalTables::GetFunctionTable().GetFunctionFromPuidx(afNode->GetPUIdx());
803 attrFunc = mirModule.GetSrcLang() == kSrcLangC && func->GetAttr(attr);
804 } else if (fNode->GetOpCode() == OP_regread) {
805 RegreadNode *rNode = static_cast<RegreadNode *>(fNode);
806 PregIdx pregidx = rNode->GetRegIdx();
807 MIRPreg *preg = mirModule.CurFunction()->GetPregTab()->PregFromPregIdx(pregidx);
808 MIRType *type = preg->GetMIRType();
809 if (type == nullptr) {
810 return false;
811 }
812 MIRPtrType *pType = static_cast<MIRPtrType *>(type);
813 type = pType->GetPointedType();
814 if (type == nullptr) {
815 return false;
816 }
817 } else if (fNode->GetOpCode() == OP_retype) {
818 RetypeNode *rNode = static_cast<RetypeNode *>(fNode);
819 pType = static_cast<MIRPtrType *>(GlobalTables::GetTypeTable().GetTypeFromTyIdx(rNode->GetTyIdx()));
820 fType = static_cast<MIRFuncType *>(pType->GetPointedType());
821 } else {
822 return false; /* assert? */
823 }
824 }
825 return attrFunc;
826 #endif
827 }
828 } /* namespace maplebe */
829