1 //=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9
10 #include "clang/AST/RecordLayout.h"
11 #include "clang/AST/ASTContext.h"
12 #include "clang/AST/Attr.h"
13 #include "clang/AST/CXXInheritance.h"
14 #include "clang/AST/Decl.h"
15 #include "clang/AST/DeclCXX.h"
16 #include "clang/AST/DeclObjC.h"
17 #include "clang/AST/Expr.h"
18 #include "clang/Basic/TargetInfo.h"
19 #include "clang/Sema/SemaDiagnostic.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/Support/CrashRecoveryContext.h"
22 #include "llvm/Support/Format.h"
23 #include "llvm/Support/MathExtras.h"
24
25 using namespace clang;
26
27 namespace {
28
29 /// BaseSubobjectInfo - Represents a single base subobject in a complete class.
30 /// For a class hierarchy like
31 ///
32 /// class A { };
33 /// class B : A { };
34 /// class C : A, B { };
35 ///
36 /// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo
37 /// instances, one for B and two for A.
38 ///
39 /// If a base is virtual, it will only have one BaseSubobjectInfo allocated.
40 struct BaseSubobjectInfo {
41 /// Class - The class for this base info.
42 const CXXRecordDecl *Class;
43
44 /// IsVirtual - Whether the BaseInfo represents a virtual base or not.
45 bool IsVirtual;
46
47 /// Bases - Information about the base subobjects.
48 SmallVector<BaseSubobjectInfo*, 4> Bases;
49
50 /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
51 /// of this base info (if one exists).
52 BaseSubobjectInfo *PrimaryVirtualBaseInfo;
53
54 // FIXME: Document.
55 const BaseSubobjectInfo *Derived;
56 };
57
58 /// EmptySubobjectMap - Keeps track of which empty subobjects exist at different
59 /// offsets while laying out a C++ class.
60 class EmptySubobjectMap {
61 const ASTContext &Context;
62 uint64_t CharWidth;
63
64 /// Class - The class whose empty entries we're keeping track of.
65 const CXXRecordDecl *Class;
66
67 /// EmptyClassOffsets - A map from offsets to empty record decls.
68 typedef SmallVector<const CXXRecordDecl *, 1> ClassVectorTy;
69 typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy;
70 EmptyClassOffsetsMapTy EmptyClassOffsets;
71
72 /// MaxEmptyClassOffset - The highest offset known to contain an empty
73 /// base subobject.
74 CharUnits MaxEmptyClassOffset;
75
76 /// ComputeEmptySubobjectSizes - Compute the size of the largest base or
77 /// member subobject that is empty.
78 void ComputeEmptySubobjectSizes();
79
80 void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset);
81
82 void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
83 CharUnits Offset, bool PlacingEmptyBase);
84
85 void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
86 const CXXRecordDecl *Class,
87 CharUnits Offset);
88 void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset);
89
90 /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty
91 /// subobjects beyond the given offset.
AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const92 bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const {
93 return Offset <= MaxEmptyClassOffset;
94 }
95
96 CharUnits
getFieldOffset(const ASTRecordLayout & Layout,unsigned FieldNo) const97 getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const {
98 uint64_t FieldOffset = Layout.getFieldOffset(FieldNo);
99 assert(FieldOffset % CharWidth == 0 &&
100 "Field offset not at char boundary!");
101
102 return Context.toCharUnitsFromBits(FieldOffset);
103 }
104
105 protected:
106 bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
107 CharUnits Offset) const;
108
109 bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
110 CharUnits Offset);
111
112 bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
113 const CXXRecordDecl *Class,
114 CharUnits Offset) const;
115 bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
116 CharUnits Offset) const;
117
118 public:
119 /// This holds the size of the largest empty subobject (either a base
120 /// or a member). Will be zero if the record being built doesn't contain
121 /// any empty classes.
122 CharUnits SizeOfLargestEmptySubobject;
123
EmptySubobjectMap(const ASTContext & Context,const CXXRecordDecl * Class)124 EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
125 : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
126 ComputeEmptySubobjectSizes();
127 }
128
129 /// CanPlaceBaseAtOffset - Return whether the given base class can be placed
130 /// at the given offset.
131 /// Returns false if placing the record will result in two components
132 /// (direct or indirect) of the same type having the same offset.
133 bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
134 CharUnits Offset);
135
136 /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
137 /// offset.
138 bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset);
139 };
140
ComputeEmptySubobjectSizes()141 void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
142 // Check the bases.
143 for (CXXRecordDecl::base_class_const_iterator I = Class->bases_begin(),
144 E = Class->bases_end(); I != E; ++I) {
145 const CXXRecordDecl *BaseDecl =
146 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
147
148 CharUnits EmptySize;
149 const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
150 if (BaseDecl->isEmpty()) {
151 // If the class decl is empty, get its size.
152 EmptySize = Layout.getSize();
153 } else {
154 // Otherwise, we get the largest empty subobject for the decl.
155 EmptySize = Layout.getSizeOfLargestEmptySubobject();
156 }
157
158 if (EmptySize > SizeOfLargestEmptySubobject)
159 SizeOfLargestEmptySubobject = EmptySize;
160 }
161
162 // Check the fields.
163 for (CXXRecordDecl::field_iterator I = Class->field_begin(),
164 E = Class->field_end(); I != E; ++I) {
165
166 const RecordType *RT =
167 Context.getBaseElementType(I->getType())->getAs<RecordType>();
168
169 // We only care about record types.
170 if (!RT)
171 continue;
172
173 CharUnits EmptySize;
174 const CXXRecordDecl *MemberDecl = cast<CXXRecordDecl>(RT->getDecl());
175 const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl);
176 if (MemberDecl->isEmpty()) {
177 // If the class decl is empty, get its size.
178 EmptySize = Layout.getSize();
179 } else {
180 // Otherwise, we get the largest empty subobject for the decl.
181 EmptySize = Layout.getSizeOfLargestEmptySubobject();
182 }
183
184 if (EmptySize > SizeOfLargestEmptySubobject)
185 SizeOfLargestEmptySubobject = EmptySize;
186 }
187 }
188
189 bool
CanPlaceSubobjectAtOffset(const CXXRecordDecl * RD,CharUnits Offset) const190 EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
191 CharUnits Offset) const {
192 // We only need to check empty bases.
193 if (!RD->isEmpty())
194 return true;
195
196 EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
197 if (I == EmptyClassOffsets.end())
198 return true;
199
200 const ClassVectorTy& Classes = I->second;
201 if (std::find(Classes.begin(), Classes.end(), RD) == Classes.end())
202 return true;
203
204 // There is already an empty class of the same type at this offset.
205 return false;
206 }
207
AddSubobjectAtOffset(const CXXRecordDecl * RD,CharUnits Offset)208 void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD,
209 CharUnits Offset) {
210 // We only care about empty bases.
211 if (!RD->isEmpty())
212 return;
213
214 // If we have empty structures inside an union, we can assign both
215 // the same offset. Just avoid pushing them twice in the list.
216 ClassVectorTy& Classes = EmptyClassOffsets[Offset];
217 if (std::find(Classes.begin(), Classes.end(), RD) != Classes.end())
218 return;
219
220 Classes.push_back(RD);
221
222 // Update the empty class offset.
223 if (Offset > MaxEmptyClassOffset)
224 MaxEmptyClassOffset = Offset;
225 }
226
227 bool
CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo * Info,CharUnits Offset)228 EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
229 CharUnits Offset) {
230 // We don't have to keep looking past the maximum offset that's known to
231 // contain an empty class.
232 if (!AnyEmptySubobjectsBeyondOffset(Offset))
233 return true;
234
235 if (!CanPlaceSubobjectAtOffset(Info->Class, Offset))
236 return false;
237
238 // Traverse all non-virtual bases.
239 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
240 for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
241 BaseSubobjectInfo* Base = Info->Bases[I];
242 if (Base->IsVirtual)
243 continue;
244
245 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
246
247 if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset))
248 return false;
249 }
250
251 if (Info->PrimaryVirtualBaseInfo) {
252 BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
253
254 if (Info == PrimaryVirtualBaseInfo->Derived) {
255 if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset))
256 return false;
257 }
258 }
259
260 // Traverse all member variables.
261 unsigned FieldNo = 0;
262 for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
263 E = Info->Class->field_end(); I != E; ++I, ++FieldNo) {
264 if (I->isBitField())
265 continue;
266
267 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
268 if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
269 return false;
270 }
271
272 return true;
273 }
274
UpdateEmptyBaseSubobjects(const BaseSubobjectInfo * Info,CharUnits Offset,bool PlacingEmptyBase)275 void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
276 CharUnits Offset,
277 bool PlacingEmptyBase) {
278 if (!PlacingEmptyBase && Offset >= SizeOfLargestEmptySubobject) {
279 // We know that the only empty subobjects that can conflict with empty
280 // subobject of non-empty bases, are empty bases that can be placed at
281 // offset zero. Because of this, we only need to keep track of empty base
282 // subobjects with offsets less than the size of the largest empty
283 // subobject for our class.
284 return;
285 }
286
287 AddSubobjectAtOffset(Info->Class, Offset);
288
289 // Traverse all non-virtual bases.
290 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
291 for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
292 BaseSubobjectInfo* Base = Info->Bases[I];
293 if (Base->IsVirtual)
294 continue;
295
296 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
297 UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase);
298 }
299
300 if (Info->PrimaryVirtualBaseInfo) {
301 BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
302
303 if (Info == PrimaryVirtualBaseInfo->Derived)
304 UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset,
305 PlacingEmptyBase);
306 }
307
308 // Traverse all member variables.
309 unsigned FieldNo = 0;
310 for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(),
311 E = Info->Class->field_end(); I != E; ++I, ++FieldNo) {
312 if (I->isBitField())
313 continue;
314
315 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
316 UpdateEmptyFieldSubobjects(*I, FieldOffset);
317 }
318 }
319
CanPlaceBaseAtOffset(const BaseSubobjectInfo * Info,CharUnits Offset)320 bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
321 CharUnits Offset) {
322 // If we know this class doesn't have any empty subobjects we don't need to
323 // bother checking.
324 if (SizeOfLargestEmptySubobject.isZero())
325 return true;
326
327 if (!CanPlaceBaseSubobjectAtOffset(Info, Offset))
328 return false;
329
330 // We are able to place the base at this offset. Make sure to update the
331 // empty base subobject map.
332 UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty());
333 return true;
334 }
335
336 bool
CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl * RD,const CXXRecordDecl * Class,CharUnits Offset) const337 EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
338 const CXXRecordDecl *Class,
339 CharUnits Offset) const {
340 // We don't have to keep looking past the maximum offset that's known to
341 // contain an empty class.
342 if (!AnyEmptySubobjectsBeyondOffset(Offset))
343 return true;
344
345 if (!CanPlaceSubobjectAtOffset(RD, Offset))
346 return false;
347
348 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
349
350 // Traverse all non-virtual bases.
351 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
352 E = RD->bases_end(); I != E; ++I) {
353 if (I->isVirtual())
354 continue;
355
356 const CXXRecordDecl *BaseDecl =
357 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
358
359 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
360 if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
361 return false;
362 }
363
364 if (RD == Class) {
365 // This is the most derived class, traverse virtual bases as well.
366 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
367 E = RD->vbases_end(); I != E; ++I) {
368 const CXXRecordDecl *VBaseDecl =
369 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
370
371 CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
372 if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
373 return false;
374 }
375 }
376
377 // Traverse all member variables.
378 unsigned FieldNo = 0;
379 for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
380 I != E; ++I, ++FieldNo) {
381 if (I->isBitField())
382 continue;
383
384 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
385
386 if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
387 return false;
388 }
389
390 return true;
391 }
392
393 bool
CanPlaceFieldSubobjectAtOffset(const FieldDecl * FD,CharUnits Offset) const394 EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
395 CharUnits Offset) const {
396 // We don't have to keep looking past the maximum offset that's known to
397 // contain an empty class.
398 if (!AnyEmptySubobjectsBeyondOffset(Offset))
399 return true;
400
401 QualType T = FD->getType();
402 if (const RecordType *RT = T->getAs<RecordType>()) {
403 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
404 return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset);
405 }
406
407 // If we have an array type we need to look at every element.
408 if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
409 QualType ElemTy = Context.getBaseElementType(AT);
410 const RecordType *RT = ElemTy->getAs<RecordType>();
411 if (!RT)
412 return true;
413
414 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
415 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
416
417 uint64_t NumElements = Context.getConstantArrayElementCount(AT);
418 CharUnits ElementOffset = Offset;
419 for (uint64_t I = 0; I != NumElements; ++I) {
420 // We don't have to keep looking past the maximum offset that's known to
421 // contain an empty class.
422 if (!AnyEmptySubobjectsBeyondOffset(ElementOffset))
423 return true;
424
425 if (!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset))
426 return false;
427
428 ElementOffset += Layout.getSize();
429 }
430 }
431
432 return true;
433 }
434
435 bool
CanPlaceFieldAtOffset(const FieldDecl * FD,CharUnits Offset)436 EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD,
437 CharUnits Offset) {
438 if (!CanPlaceFieldSubobjectAtOffset(FD, Offset))
439 return false;
440
441 // We are able to place the member variable at this offset.
442 // Make sure to update the empty base subobject map.
443 UpdateEmptyFieldSubobjects(FD, Offset);
444 return true;
445 }
446
UpdateEmptyFieldSubobjects(const CXXRecordDecl * RD,const CXXRecordDecl * Class,CharUnits Offset)447 void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
448 const CXXRecordDecl *Class,
449 CharUnits Offset) {
450 // We know that the only empty subobjects that can conflict with empty
451 // field subobjects are subobjects of empty bases that can be placed at offset
452 // zero. Because of this, we only need to keep track of empty field
453 // subobjects with offsets less than the size of the largest empty
454 // subobject for our class.
455 if (Offset >= SizeOfLargestEmptySubobject)
456 return;
457
458 AddSubobjectAtOffset(RD, Offset);
459
460 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
461
462 // Traverse all non-virtual bases.
463 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
464 E = RD->bases_end(); I != E; ++I) {
465 if (I->isVirtual())
466 continue;
467
468 const CXXRecordDecl *BaseDecl =
469 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
470
471 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
472 UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset);
473 }
474
475 if (RD == Class) {
476 // This is the most derived class, traverse virtual bases as well.
477 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
478 E = RD->vbases_end(); I != E; ++I) {
479 const CXXRecordDecl *VBaseDecl =
480 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
481
482 CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
483 UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset);
484 }
485 }
486
487 // Traverse all member variables.
488 unsigned FieldNo = 0;
489 for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
490 I != E; ++I, ++FieldNo) {
491 if (I->isBitField())
492 continue;
493
494 CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
495
496 UpdateEmptyFieldSubobjects(*I, FieldOffset);
497 }
498 }
499
UpdateEmptyFieldSubobjects(const FieldDecl * FD,CharUnits Offset)500 void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const FieldDecl *FD,
501 CharUnits Offset) {
502 QualType T = FD->getType();
503 if (const RecordType *RT = T->getAs<RecordType>()) {
504 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
505 UpdateEmptyFieldSubobjects(RD, RD, Offset);
506 return;
507 }
508
509 // If we have an array type we need to update every element.
510 if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
511 QualType ElemTy = Context.getBaseElementType(AT);
512 const RecordType *RT = ElemTy->getAs<RecordType>();
513 if (!RT)
514 return;
515
516 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
517 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
518
519 uint64_t NumElements = Context.getConstantArrayElementCount(AT);
520 CharUnits ElementOffset = Offset;
521
522 for (uint64_t I = 0; I != NumElements; ++I) {
523 // We know that the only empty subobjects that can conflict with empty
524 // field subobjects are subobjects of empty bases that can be placed at
525 // offset zero. Because of this, we only need to keep track of empty field
526 // subobjects with offsets less than the size of the largest empty
527 // subobject for our class.
528 if (ElementOffset >= SizeOfLargestEmptySubobject)
529 return;
530
531 UpdateEmptyFieldSubobjects(RD, RD, ElementOffset);
532 ElementOffset += Layout.getSize();
533 }
534 }
535 }
536
537 typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
538
539 class RecordLayoutBuilder {
540 protected:
541 // FIXME: Remove this and make the appropriate fields public.
542 friend class clang::ASTContext;
543
544 const ASTContext &Context;
545
546 EmptySubobjectMap *EmptySubobjects;
547
548 /// Size - The current size of the record layout.
549 uint64_t Size;
550
551 /// Alignment - The current alignment of the record layout.
552 CharUnits Alignment;
553
554 /// \brief The alignment if attribute packed is not used.
555 CharUnits UnpackedAlignment;
556
557 SmallVector<uint64_t, 16> FieldOffsets;
558
559 /// \brief Whether the external AST source has provided a layout for this
560 /// record.
561 unsigned ExternalLayout : 1;
562
563 /// \brief Whether we need to infer alignment, even when we have an
564 /// externally-provided layout.
565 unsigned InferAlignment : 1;
566
567 /// Packed - Whether the record is packed or not.
568 unsigned Packed : 1;
569
570 unsigned IsUnion : 1;
571
572 unsigned IsMac68kAlign : 1;
573
574 unsigned IsMsStruct : 1;
575
576 /// UnfilledBitsInLastByte - If the last field laid out was a bitfield,
577 /// this contains the number of bits in the last byte that can be used for
578 /// an adjacent bitfield if necessary.
579 unsigned char UnfilledBitsInLastByte;
580
581 /// MaxFieldAlignment - The maximum allowed field alignment. This is set by
582 /// #pragma pack.
583 CharUnits MaxFieldAlignment;
584
585 /// DataSize - The data size of the record being laid out.
586 uint64_t DataSize;
587
588 CharUnits NonVirtualSize;
589 CharUnits NonVirtualAlignment;
590
591 FieldDecl *ZeroLengthBitfield;
592
593 /// PrimaryBase - the primary base class (if one exists) of the class
594 /// we're laying out.
595 const CXXRecordDecl *PrimaryBase;
596
597 /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying
598 /// out is virtual.
599 bool PrimaryBaseIsVirtual;
600
601 /// HasOwnVFPtr - Whether the class provides its own vtable/vftbl
602 /// pointer, as opposed to inheriting one from a primary base class.
603 bool HasOwnVFPtr;
604
605 /// VBPtrOffset - Virtual base table offset. Only for MS layout.
606 CharUnits VBPtrOffset;
607
608 typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
609
610 /// Bases - base classes and their offsets in the record.
611 BaseOffsetsMapTy Bases;
612
613 // VBases - virtual base classes and their offsets in the record.
614 ASTRecordLayout::VBaseOffsetsMapTy VBases;
615
616 /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
617 /// primary base classes for some other direct or indirect base class.
618 CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
619
620 /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in
621 /// inheritance graph order. Used for determining the primary base class.
622 const CXXRecordDecl *FirstNearlyEmptyVBase;
623
624 /// VisitedVirtualBases - A set of all the visited virtual bases, used to
625 /// avoid visiting virtual bases more than once.
626 llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases;
627
628 /// \brief Externally-provided size.
629 uint64_t ExternalSize;
630
631 /// \brief Externally-provided alignment.
632 uint64_t ExternalAlign;
633
634 /// \brief Externally-provided field offsets.
635 llvm::DenseMap<const FieldDecl *, uint64_t> ExternalFieldOffsets;
636
637 /// \brief Externally-provided direct, non-virtual base offsets.
638 llvm::DenseMap<const CXXRecordDecl *, CharUnits> ExternalBaseOffsets;
639
640 /// \brief Externally-provided virtual base offsets.
641 llvm::DenseMap<const CXXRecordDecl *, CharUnits> ExternalVirtualBaseOffsets;
642
RecordLayoutBuilder(const ASTContext & Context,EmptySubobjectMap * EmptySubobjects)643 RecordLayoutBuilder(const ASTContext &Context,
644 EmptySubobjectMap *EmptySubobjects)
645 : Context(Context), EmptySubobjects(EmptySubobjects), Size(0),
646 Alignment(CharUnits::One()), UnpackedAlignment(CharUnits::One()),
647 ExternalLayout(false), InferAlignment(false),
648 Packed(false), IsUnion(false), IsMac68kAlign(false), IsMsStruct(false),
649 UnfilledBitsInLastByte(0), MaxFieldAlignment(CharUnits::Zero()),
650 DataSize(0), NonVirtualSize(CharUnits::Zero()),
651 NonVirtualAlignment(CharUnits::One()),
652 ZeroLengthBitfield(0), PrimaryBase(0),
653 PrimaryBaseIsVirtual(false),
654 HasOwnVFPtr(false),
655 VBPtrOffset(CharUnits::fromQuantity(-1)),
656 FirstNearlyEmptyVBase(0) { }
657
658 /// Reset this RecordLayoutBuilder to a fresh state, using the given
659 /// alignment as the initial alignment. This is used for the
660 /// correct layout of vb-table pointers in MSVC.
resetWithTargetAlignment(CharUnits TargetAlignment)661 void resetWithTargetAlignment(CharUnits TargetAlignment) {
662 const ASTContext &Context = this->Context;
663 EmptySubobjectMap *EmptySubobjects = this->EmptySubobjects;
664 this->~RecordLayoutBuilder();
665 new (this) RecordLayoutBuilder(Context, EmptySubobjects);
666 Alignment = UnpackedAlignment = TargetAlignment;
667 }
668
669 void Layout(const RecordDecl *D);
670 void Layout(const CXXRecordDecl *D);
671 void Layout(const ObjCInterfaceDecl *D);
672
673 void LayoutFields(const RecordDecl *D);
674 void LayoutField(const FieldDecl *D);
675 void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize,
676 bool FieldPacked, const FieldDecl *D);
677 void LayoutBitField(const FieldDecl *D);
678
getCXXABI() const679 TargetCXXABI getCXXABI() const {
680 return Context.getTargetInfo().getCXXABI();
681 }
682
isMicrosoftCXXABI() const683 bool isMicrosoftCXXABI() const {
684 return getCXXABI().isMicrosoft();
685 }
686
687 void MSLayoutVirtualBases(const CXXRecordDecl *RD);
688
689 /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
690 llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
691
692 typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
693 BaseSubobjectInfoMapTy;
694
695 /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
696 /// of the class we're laying out to their base subobject info.
697 BaseSubobjectInfoMapTy VirtualBaseInfo;
698
699 /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the
700 /// class we're laying out to their base subobject info.
701 BaseSubobjectInfoMapTy NonVirtualBaseInfo;
702
703 /// ComputeBaseSubobjectInfo - Compute the base subobject information for the
704 /// bases of the given class.
705 void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD);
706
707 /// ComputeBaseSubobjectInfo - Compute the base subobject information for a
708 /// single class and all of its base classes.
709 BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD,
710 bool IsVirtual,
711 BaseSubobjectInfo *Derived);
712
713 /// DeterminePrimaryBase - Determine the primary base of the given class.
714 void DeterminePrimaryBase(const CXXRecordDecl *RD);
715
716 void SelectPrimaryVBase(const CXXRecordDecl *RD);
717
718 void EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign);
719
720 /// LayoutNonVirtualBases - Determines the primary base class (if any) and
721 /// lays it out. Will then proceed to lay out all non-virtual base clasess.
722 void LayoutNonVirtualBases(const CXXRecordDecl *RD);
723
724 /// LayoutNonVirtualBase - Lays out a single non-virtual base.
725 void LayoutNonVirtualBase(const BaseSubobjectInfo *Base);
726
727 void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
728 CharUnits Offset);
729
730 bool needsVFTable(const CXXRecordDecl *RD) const;
731 bool hasNewVirtualFunction(const CXXRecordDecl *RD,
732 bool IgnoreDestructor = false) const;
733 bool isPossiblePrimaryBase(const CXXRecordDecl *Base) const;
734
735 void computeVtordisps(const CXXRecordDecl *RD,
736 ClassSetTy &VtordispVBases);
737
738 /// LayoutVirtualBases - Lays out all the virtual bases.
739 void LayoutVirtualBases(const CXXRecordDecl *RD,
740 const CXXRecordDecl *MostDerivedClass);
741
742 /// LayoutVirtualBase - Lays out a single virtual base.
743 void LayoutVirtualBase(const BaseSubobjectInfo *Base,
744 bool IsVtordispNeed = false);
745
746 /// LayoutBase - Will lay out a base and return the offset where it was
747 /// placed, in chars.
748 CharUnits LayoutBase(const BaseSubobjectInfo *Base);
749
750 /// InitializeLayout - Initialize record layout for the given record decl.
751 void InitializeLayout(const Decl *D);
752
753 /// FinishLayout - Finalize record layout. Adjust record size based on the
754 /// alignment.
755 void FinishLayout(const NamedDecl *D);
756
757 void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment);
UpdateAlignment(CharUnits NewAlignment)758 void UpdateAlignment(CharUnits NewAlignment) {
759 UpdateAlignment(NewAlignment, NewAlignment);
760 }
761
762 /// \brief Retrieve the externally-supplied field offset for the given
763 /// field.
764 ///
765 /// \param Field The field whose offset is being queried.
766 /// \param ComputedOffset The offset that we've computed for this field.
767 uint64_t updateExternalFieldOffset(const FieldDecl *Field,
768 uint64_t ComputedOffset);
769
770 void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
771 uint64_t UnpackedOffset, unsigned UnpackedAlign,
772 bool isPacked, const FieldDecl *D);
773
774 DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
775
getSize() const776 CharUnits getSize() const {
777 assert(Size % Context.getCharWidth() == 0);
778 return Context.toCharUnitsFromBits(Size);
779 }
getSizeInBits() const780 uint64_t getSizeInBits() const { return Size; }
781
setSize(CharUnits NewSize)782 void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); }
setSize(uint64_t NewSize)783 void setSize(uint64_t NewSize) { Size = NewSize; }
784
getAligment() const785 CharUnits getAligment() const { return Alignment; }
786
getDataSize() const787 CharUnits getDataSize() const {
788 assert(DataSize % Context.getCharWidth() == 0);
789 return Context.toCharUnitsFromBits(DataSize);
790 }
getDataSizeInBits() const791 uint64_t getDataSizeInBits() const { return DataSize; }
792
setDataSize(CharUnits NewSize)793 void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); }
setDataSize(uint64_t NewSize)794 void setDataSize(uint64_t NewSize) { DataSize = NewSize; }
795
796 RecordLayoutBuilder(const RecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
797 void operator=(const RecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
798 };
799 } // end anonymous namespace
800
801 void
SelectPrimaryVBase(const CXXRecordDecl * RD)802 RecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) {
803 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
804 E = RD->bases_end(); I != E; ++I) {
805 assert(!I->getType()->isDependentType() &&
806 "Cannot layout class with dependent bases.");
807
808 const CXXRecordDecl *Base =
809 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
810
811 // Check if this is a nearly empty virtual base.
812 if (I->isVirtual() && Context.isNearlyEmpty(Base)) {
813 // If it's not an indirect primary base, then we've found our primary
814 // base.
815 if (!IndirectPrimaryBases.count(Base)) {
816 PrimaryBase = Base;
817 PrimaryBaseIsVirtual = true;
818 return;
819 }
820
821 // Is this the first nearly empty virtual base?
822 if (!FirstNearlyEmptyVBase)
823 FirstNearlyEmptyVBase = Base;
824 }
825
826 SelectPrimaryVBase(Base);
827 if (PrimaryBase)
828 return;
829 }
830 }
831
832 /// DeterminePrimaryBase - Determine the primary base of the given class.
DeterminePrimaryBase(const CXXRecordDecl * RD)833 void RecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) {
834 // If the class isn't dynamic, it won't have a primary base.
835 if (!RD->isDynamicClass())
836 return;
837
838 // Compute all the primary virtual bases for all of our direct and
839 // indirect bases, and record all their primary virtual base classes.
840 RD->getIndirectPrimaryBases(IndirectPrimaryBases);
841
842 // If the record has a dynamic base class, attempt to choose a primary base
843 // class. It is the first (in direct base class order) non-virtual dynamic
844 // base class, if one exists.
845 for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
846 e = RD->bases_end(); i != e; ++i) {
847 // Ignore virtual bases.
848 if (i->isVirtual())
849 continue;
850
851 const CXXRecordDecl *Base =
852 cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
853
854 if (isPossiblePrimaryBase(Base)) {
855 // We found it.
856 PrimaryBase = Base;
857 PrimaryBaseIsVirtual = false;
858 return;
859 }
860 }
861
862 // The Microsoft ABI doesn't have primary virtual bases.
863 if (isMicrosoftCXXABI()) {
864 assert(!PrimaryBase && "Should not get here with a primary base!");
865 return;
866 }
867
868 // Under the Itanium ABI, if there is no non-virtual primary base class,
869 // try to compute the primary virtual base. The primary virtual base is
870 // the first nearly empty virtual base that is not an indirect primary
871 // virtual base class, if one exists.
872 if (RD->getNumVBases() != 0) {
873 SelectPrimaryVBase(RD);
874 if (PrimaryBase)
875 return;
876 }
877
878 // Otherwise, it is the first indirect primary base class, if one exists.
879 if (FirstNearlyEmptyVBase) {
880 PrimaryBase = FirstNearlyEmptyVBase;
881 PrimaryBaseIsVirtual = true;
882 return;
883 }
884
885 assert(!PrimaryBase && "Should not get here with a primary base!");
886 }
887
888 BaseSubobjectInfo *
ComputeBaseSubobjectInfo(const CXXRecordDecl * RD,bool IsVirtual,BaseSubobjectInfo * Derived)889 RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD,
890 bool IsVirtual,
891 BaseSubobjectInfo *Derived) {
892 BaseSubobjectInfo *Info;
893
894 if (IsVirtual) {
895 // Check if we already have info about this virtual base.
896 BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD];
897 if (InfoSlot) {
898 assert(InfoSlot->Class == RD && "Wrong class for virtual base info!");
899 return InfoSlot;
900 }
901
902 // We don't, create it.
903 InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
904 Info = InfoSlot;
905 } else {
906 Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
907 }
908
909 Info->Class = RD;
910 Info->IsVirtual = IsVirtual;
911 Info->Derived = 0;
912 Info->PrimaryVirtualBaseInfo = 0;
913
914 const CXXRecordDecl *PrimaryVirtualBase = 0;
915 BaseSubobjectInfo *PrimaryVirtualBaseInfo = 0;
916
917 // Check if this base has a primary virtual base.
918 if (RD->getNumVBases()) {
919 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
920 if (Layout.isPrimaryBaseVirtual()) {
921 // This base does have a primary virtual base.
922 PrimaryVirtualBase = Layout.getPrimaryBase();
923 assert(PrimaryVirtualBase && "Didn't have a primary virtual base!");
924
925 // Now check if we have base subobject info about this primary base.
926 PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
927
928 if (PrimaryVirtualBaseInfo) {
929 if (PrimaryVirtualBaseInfo->Derived) {
930 // We did have info about this primary base, and it turns out that it
931 // has already been claimed as a primary virtual base for another
932 // base.
933 PrimaryVirtualBase = 0;
934 } else {
935 // We can claim this base as our primary base.
936 Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
937 PrimaryVirtualBaseInfo->Derived = Info;
938 }
939 }
940 }
941 }
942
943 // Now go through all direct bases.
944 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
945 E = RD->bases_end(); I != E; ++I) {
946 bool IsVirtual = I->isVirtual();
947
948 const CXXRecordDecl *BaseDecl =
949 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
950
951 Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info));
952 }
953
954 if (PrimaryVirtualBase && !PrimaryVirtualBaseInfo) {
955 // Traversing the bases must have created the base info for our primary
956 // virtual base.
957 PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
958 assert(PrimaryVirtualBaseInfo &&
959 "Did not create a primary virtual base!");
960
961 // Claim the primary virtual base as our primary virtual base.
962 Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
963 PrimaryVirtualBaseInfo->Derived = Info;
964 }
965
966 return Info;
967 }
968
ComputeBaseSubobjectInfo(const CXXRecordDecl * RD)969 void RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD) {
970 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
971 E = RD->bases_end(); I != E; ++I) {
972 bool IsVirtual = I->isVirtual();
973
974 const CXXRecordDecl *BaseDecl =
975 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
976
977 // Compute the base subobject info for this base.
978 BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, 0);
979
980 if (IsVirtual) {
981 // ComputeBaseInfo has already added this base for us.
982 assert(VirtualBaseInfo.count(BaseDecl) &&
983 "Did not add virtual base!");
984 } else {
985 // Add the base info to the map of non-virtual bases.
986 assert(!NonVirtualBaseInfo.count(BaseDecl) &&
987 "Non-virtual base already exists!");
988 NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info));
989 }
990 }
991 }
992
993 void
EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign)994 RecordLayoutBuilder::EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign) {
995 CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign;
996
997 // The maximum field alignment overrides base align.
998 if (!MaxFieldAlignment.isZero()) {
999 BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1000 UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1001 }
1002
1003 // Round up the current record size to pointer alignment.
1004 setSize(getSize().RoundUpToAlignment(BaseAlign));
1005 setDataSize(getSize());
1006
1007 // Update the alignment.
1008 UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1009 }
1010
1011 void
LayoutNonVirtualBases(const CXXRecordDecl * RD)1012 RecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD) {
1013 // Then, determine the primary base class.
1014 DeterminePrimaryBase(RD);
1015
1016 // Compute base subobject info.
1017 ComputeBaseSubobjectInfo(RD);
1018
1019 // If we have a primary base class, lay it out.
1020 if (PrimaryBase) {
1021 if (PrimaryBaseIsVirtual) {
1022 // If the primary virtual base was a primary virtual base of some other
1023 // base class we'll have to steal it.
1024 BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase);
1025 PrimaryBaseInfo->Derived = 0;
1026
1027 // We have a virtual primary base, insert it as an indirect primary base.
1028 IndirectPrimaryBases.insert(PrimaryBase);
1029
1030 assert(!VisitedVirtualBases.count(PrimaryBase) &&
1031 "vbase already visited!");
1032 VisitedVirtualBases.insert(PrimaryBase);
1033
1034 LayoutVirtualBase(PrimaryBaseInfo);
1035 } else {
1036 BaseSubobjectInfo *PrimaryBaseInfo =
1037 NonVirtualBaseInfo.lookup(PrimaryBase);
1038 assert(PrimaryBaseInfo &&
1039 "Did not find base info for non-virtual primary base!");
1040
1041 LayoutNonVirtualBase(PrimaryBaseInfo);
1042 }
1043
1044 // If this class needs a vtable/vf-table and didn't get one from a
1045 // primary base, add it in now.
1046 } else if (needsVFTable(RD)) {
1047 assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
1048 CharUnits PtrWidth =
1049 Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1050 CharUnits PtrAlign =
1051 Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1052 EnsureVTablePointerAlignment(PtrAlign);
1053 HasOwnVFPtr = true;
1054 setSize(getSize() + PtrWidth);
1055 setDataSize(getSize());
1056 }
1057
1058 bool HasDirectVirtualBases = false;
1059 bool HasNonVirtualBaseWithVBTable = false;
1060
1061 // Now lay out the non-virtual bases.
1062 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1063 E = RD->bases_end(); I != E; ++I) {
1064
1065 // Ignore virtual bases, but remember that we saw one.
1066 if (I->isVirtual()) {
1067 HasDirectVirtualBases = true;
1068 continue;
1069 }
1070
1071 const CXXRecordDecl *BaseDecl =
1072 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
1073
1074 // Remember if this base has virtual bases itself.
1075 if (BaseDecl->getNumVBases())
1076 HasNonVirtualBaseWithVBTable = true;
1077
1078 // Skip the primary base, because we've already laid it out. The
1079 // !PrimaryBaseIsVirtual check is required because we might have a
1080 // non-virtual base of the same type as a primary virtual base.
1081 if (BaseDecl == PrimaryBase && !PrimaryBaseIsVirtual)
1082 continue;
1083
1084 // Lay out the base.
1085 BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl);
1086 assert(BaseInfo && "Did not find base info for non-virtual base!");
1087
1088 LayoutNonVirtualBase(BaseInfo);
1089 }
1090
1091 // In the MS ABI, add the vb-table pointer if we need one, which is
1092 // whenever we have a virtual base and we can't re-use a vb-table
1093 // pointer from a non-virtual base.
1094 if (isMicrosoftCXXABI() &&
1095 HasDirectVirtualBases && !HasNonVirtualBaseWithVBTable) {
1096 CharUnits PtrWidth =
1097 Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1098 CharUnits PtrAlign =
1099 Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
1100
1101 // MSVC potentially over-aligns the vb-table pointer by giving it
1102 // the max alignment of all the non-virtual objects in the class.
1103 // This is completely unnecessary, but we're not here to pass
1104 // judgment.
1105 //
1106 // Note that we've only laid out the non-virtual bases, so on the
1107 // first pass Alignment won't be set correctly here, but if the
1108 // vb-table doesn't end up aligned correctly we'll come through
1109 // and redo the layout from scratch with the right alignment.
1110 //
1111 // TODO: Instead of doing this, just lay out the fields as if the
1112 // vb-table were at offset zero, then retroactively bump the field
1113 // offsets up.
1114 PtrAlign = std::max(PtrAlign, Alignment);
1115
1116 EnsureVTablePointerAlignment(PtrAlign);
1117 VBPtrOffset = getSize();
1118 setSize(getSize() + PtrWidth);
1119 setDataSize(getSize());
1120 }
1121 }
1122
LayoutNonVirtualBase(const BaseSubobjectInfo * Base)1123 void RecordLayoutBuilder::LayoutNonVirtualBase(const BaseSubobjectInfo *Base) {
1124 // Layout the base.
1125 CharUnits Offset = LayoutBase(Base);
1126
1127 // Add its base class offset.
1128 assert(!Bases.count(Base->Class) && "base offset already exists!");
1129 Bases.insert(std::make_pair(Base->Class, Offset));
1130
1131 AddPrimaryVirtualBaseOffsets(Base, Offset);
1132 }
1133
1134 void
AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo * Info,CharUnits Offset)1135 RecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
1136 CharUnits Offset) {
1137 // This base isn't interesting, it has no virtual bases.
1138 if (!Info->Class->getNumVBases())
1139 return;
1140
1141 // First, check if we have a virtual primary base to add offsets for.
1142 if (Info->PrimaryVirtualBaseInfo) {
1143 assert(Info->PrimaryVirtualBaseInfo->IsVirtual &&
1144 "Primary virtual base is not virtual!");
1145 if (Info->PrimaryVirtualBaseInfo->Derived == Info) {
1146 // Add the offset.
1147 assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) &&
1148 "primary vbase offset already exists!");
1149 VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class,
1150 ASTRecordLayout::VBaseInfo(Offset, false)));
1151
1152 // Traverse the primary virtual base.
1153 AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset);
1154 }
1155 }
1156
1157 // Now go through all direct non-virtual bases.
1158 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
1159 for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
1160 const BaseSubobjectInfo *Base = Info->Bases[I];
1161 if (Base->IsVirtual)
1162 continue;
1163
1164 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
1165 AddPrimaryVirtualBaseOffsets(Base, BaseOffset);
1166 }
1167 }
1168
1169 /// needsVFTable - Return true if this class needs a vtable or vf-table
1170 /// when laid out as a base class. These are treated the same because
1171 /// they're both always laid out at offset zero.
1172 ///
1173 /// This function assumes that the class has no primary base.
needsVFTable(const CXXRecordDecl * RD) const1174 bool RecordLayoutBuilder::needsVFTable(const CXXRecordDecl *RD) const {
1175 assert(!PrimaryBase);
1176
1177 // In the Itanium ABI, every dynamic class needs a vtable: even if
1178 // this class has no virtual functions as a base class (i.e. it's
1179 // non-polymorphic or only has virtual functions from virtual
1180 // bases),x it still needs a vtable to locate its virtual bases.
1181 if (!isMicrosoftCXXABI())
1182 return RD->isDynamicClass();
1183
1184 // In the MS ABI, we need a vfptr if the class has virtual functions
1185 // other than those declared by its virtual bases. The AST doesn't
1186 // tell us that directly, and checking manually for virtual
1187 // functions that aren't overrides is expensive, but there are
1188 // some important shortcuts:
1189
1190 // - Non-polymorphic classes have no virtual functions at all.
1191 if (!RD->isPolymorphic()) return false;
1192
1193 // - Polymorphic classes with no virtual bases must either declare
1194 // virtual functions directly or inherit them, but in the latter
1195 // case we would have a primary base.
1196 if (RD->getNumVBases() == 0) return true;
1197
1198 return hasNewVirtualFunction(RD);
1199 }
1200
1201 /// Does the given class inherit non-virtually from any of the classes
1202 /// in the given set?
hasNonVirtualBaseInSet(const CXXRecordDecl * RD,const ClassSetTy & set)1203 static bool hasNonVirtualBaseInSet(const CXXRecordDecl *RD,
1204 const ClassSetTy &set) {
1205 for (CXXRecordDecl::base_class_const_iterator
1206 I = RD->bases_begin(), E = RD->bases_end(); I != E; ++I) {
1207 // Ignore virtual links.
1208 if (I->isVirtual()) continue;
1209
1210 // Check whether the set contains the base.
1211 const CXXRecordDecl *base = I->getType()->getAsCXXRecordDecl();
1212 if (set.count(base))
1213 return true;
1214
1215 // Otherwise, recurse and propagate.
1216 if (hasNonVirtualBaseInSet(base, set))
1217 return true;
1218 }
1219
1220 return false;
1221 }
1222
1223 /// Does the given method (B::foo()) already override a method (A::foo())
1224 /// such that A requires a vtordisp in B? If so, we don't need to add a
1225 /// new vtordisp for B in a yet-more-derived class C providing C::foo().
overridesMethodRequiringVtorDisp(const ASTContext & Context,const CXXMethodDecl * M)1226 static bool overridesMethodRequiringVtorDisp(const ASTContext &Context,
1227 const CXXMethodDecl *M) {
1228 CXXMethodDecl::method_iterator
1229 I = M->begin_overridden_methods(), E = M->end_overridden_methods();
1230 if (I == E) return false;
1231
1232 const ASTRecordLayout::VBaseOffsetsMapTy &offsets =
1233 Context.getASTRecordLayout(M->getParent()).getVBaseOffsetsMap();
1234 do {
1235 const CXXMethodDecl *overridden = *I;
1236
1237 // If the overridden method's class isn't recognized as a virtual
1238 // base in the derived class, ignore it.
1239 ASTRecordLayout::VBaseOffsetsMapTy::const_iterator
1240 it = offsets.find(overridden->getParent());
1241 if (it == offsets.end()) continue;
1242
1243 // Otherwise, check if the overridden method's class needs a vtordisp.
1244 if (it->second.hasVtorDisp()) return true;
1245
1246 } while (++I != E);
1247 return false;
1248 }
1249
1250 /// In the Microsoft ABI, decide which of the virtual bases require a
1251 /// vtordisp field.
computeVtordisps(const CXXRecordDecl * RD,ClassSetTy & vtordispVBases)1252 void RecordLayoutBuilder::computeVtordisps(const CXXRecordDecl *RD,
1253 ClassSetTy &vtordispVBases) {
1254 // Bail out if we have no virtual bases.
1255 assert(RD->getNumVBases());
1256
1257 // Build up the set of virtual bases that we haven't decided yet.
1258 ClassSetTy undecidedVBases;
1259 for (CXXRecordDecl::base_class_const_iterator
1260 I = RD->vbases_begin(), E = RD->vbases_end(); I != E; ++I) {
1261 const CXXRecordDecl *vbase = I->getType()->getAsCXXRecordDecl();
1262 undecidedVBases.insert(vbase);
1263 }
1264 assert(!undecidedVBases.empty());
1265
1266 // A virtual base requires a vtordisp field in a derived class if it
1267 // requires a vtordisp field in a base class. Walk all the direct
1268 // bases and collect this information.
1269 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1270 E = RD->bases_end(); I != E; ++I) {
1271 const CXXRecordDecl *base = I->getType()->getAsCXXRecordDecl();
1272 const ASTRecordLayout &baseLayout = Context.getASTRecordLayout(base);
1273
1274 // Iterate over the set of virtual bases provided by this class.
1275 for (ASTRecordLayout::VBaseOffsetsMapTy::const_iterator
1276 VI = baseLayout.getVBaseOffsetsMap().begin(),
1277 VE = baseLayout.getVBaseOffsetsMap().end(); VI != VE; ++VI) {
1278 // If it doesn't need a vtordisp in this base, ignore it.
1279 if (!VI->second.hasVtorDisp()) continue;
1280
1281 // If we've already seen it and decided it needs a vtordisp, ignore it.
1282 if (!undecidedVBases.erase(VI->first))
1283 continue;
1284
1285 // Add it.
1286 vtordispVBases.insert(VI->first);
1287
1288 // Quit as soon as we've decided everything.
1289 if (undecidedVBases.empty())
1290 return;
1291 }
1292 }
1293
1294 // Okay, we have virtual bases that we haven't yet decided about. A
1295 // virtual base requires a vtordisp if any the non-destructor
1296 // virtual methods declared in this class directly override a method
1297 // provided by that virtual base. (If so, we need to emit a thunk
1298 // for that method, to be used in the construction vftable, which
1299 // applies an additional 'vtordisp' this-adjustment.)
1300
1301 // Collect the set of bases directly overridden by any method in this class.
1302 // It's possible that some of these classes won't be virtual bases, or won't be
1303 // provided by virtual bases, or won't be virtual bases in the overridden
1304 // instance but are virtual bases elsewhere. Only the last matters for what
1305 // we're doing, and we can ignore those: if we don't directly override
1306 // a method provided by a virtual copy of a base class, but we do directly
1307 // override a method provided by a non-virtual copy of that base class,
1308 // then we must indirectly override the method provided by the virtual base,
1309 // and so we should already have collected it in the loop above.
1310 ClassSetTy overriddenBases;
1311 for (CXXRecordDecl::method_iterator
1312 M = RD->method_begin(), E = RD->method_end(); M != E; ++M) {
1313 // Ignore non-virtual methods and destructors.
1314 if (isa<CXXDestructorDecl>(*M) || !M->isVirtual())
1315 continue;
1316
1317 for (CXXMethodDecl::method_iterator I = M->begin_overridden_methods(),
1318 E = M->end_overridden_methods(); I != E; ++I) {
1319 const CXXMethodDecl *overriddenMethod = (*I);
1320
1321 // Ignore methods that override methods from vbases that require
1322 // require vtordisps.
1323 if (overridesMethodRequiringVtorDisp(Context, overriddenMethod))
1324 continue;
1325
1326 // As an optimization, check immediately whether we're overriding
1327 // something from the undecided set.
1328 const CXXRecordDecl *overriddenBase = overriddenMethod->getParent();
1329 if (undecidedVBases.erase(overriddenBase)) {
1330 vtordispVBases.insert(overriddenBase);
1331 if (undecidedVBases.empty()) return;
1332
1333 // We can't 'continue;' here because one of our undecided
1334 // vbases might non-virtually inherit from this base.
1335 // Consider:
1336 // struct A { virtual void foo(); };
1337 // struct B : A {};
1338 // struct C : virtual A, virtual B { virtual void foo(); };
1339 // We need a vtordisp for B here.
1340 }
1341
1342 // Otherwise, just collect it.
1343 overriddenBases.insert(overriddenBase);
1344 }
1345 }
1346
1347 // Walk the undecided v-bases and check whether they (non-virtually)
1348 // provide any of the overridden bases. We don't need to consider
1349 // virtual links because the vtordisp inheres to the layout
1350 // subobject containing the base.
1351 for (ClassSetTy::const_iterator
1352 I = undecidedVBases.begin(), E = undecidedVBases.end(); I != E; ++I) {
1353 if (hasNonVirtualBaseInSet(*I, overriddenBases))
1354 vtordispVBases.insert(*I);
1355 }
1356 }
1357
1358 /// hasNewVirtualFunction - Does the given polymorphic class declare a
1359 /// virtual function that does not override a method from any of its
1360 /// base classes?
1361 bool
hasNewVirtualFunction(const CXXRecordDecl * RD,bool IgnoreDestructor) const1362 RecordLayoutBuilder::hasNewVirtualFunction(const CXXRecordDecl *RD,
1363 bool IgnoreDestructor) const {
1364 if (!RD->getNumBases())
1365 return true;
1366
1367 for (CXXRecordDecl::method_iterator method = RD->method_begin();
1368 method != RD->method_end();
1369 ++method) {
1370 if (method->isVirtual() && !method->size_overridden_methods() &&
1371 !(IgnoreDestructor && method->getKind() == Decl::CXXDestructor)) {
1372 return true;
1373 }
1374 }
1375 return false;
1376 }
1377
1378 /// isPossiblePrimaryBase - Is the given base class an acceptable
1379 /// primary base class?
1380 bool
isPossiblePrimaryBase(const CXXRecordDecl * base) const1381 RecordLayoutBuilder::isPossiblePrimaryBase(const CXXRecordDecl *base) const {
1382 // In the Itanium ABI, a class can be a primary base class if it has
1383 // a vtable for any reason.
1384 if (!isMicrosoftCXXABI())
1385 return base->isDynamicClass();
1386
1387 // In the MS ABI, a class can only be a primary base class if it
1388 // provides a vf-table at a static offset. That means it has to be
1389 // non-virtual base. The existence of a separate vb-table means
1390 // that it's possible to get virtual functions only from a virtual
1391 // base, which we have to guard against.
1392
1393 // First off, it has to have virtual functions.
1394 if (!base->isPolymorphic()) return false;
1395
1396 // If it has no virtual bases, then the vfptr must be at a static offset.
1397 if (!base->getNumVBases()) return true;
1398
1399 // Otherwise, the necessary information is cached in the layout.
1400 const ASTRecordLayout &layout = Context.getASTRecordLayout(base);
1401
1402 // If the base has its own vfptr, it can be a primary base.
1403 if (layout.hasOwnVFPtr()) return true;
1404
1405 // If the base has a primary base class, then it can be a primary base.
1406 if (layout.getPrimaryBase()) return true;
1407
1408 // Otherwise it can't.
1409 return false;
1410 }
1411
1412 void
LayoutVirtualBases(const CXXRecordDecl * RD,const CXXRecordDecl * MostDerivedClass)1413 RecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
1414 const CXXRecordDecl *MostDerivedClass) {
1415 const CXXRecordDecl *PrimaryBase;
1416 bool PrimaryBaseIsVirtual;
1417
1418 if (MostDerivedClass == RD) {
1419 PrimaryBase = this->PrimaryBase;
1420 PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual;
1421 } else {
1422 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
1423 PrimaryBase = Layout.getPrimaryBase();
1424 PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
1425 }
1426
1427 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1428 E = RD->bases_end(); I != E; ++I) {
1429 assert(!I->getType()->isDependentType() &&
1430 "Cannot layout class with dependent bases.");
1431
1432 const CXXRecordDecl *BaseDecl =
1433 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
1434
1435 if (I->isVirtual()) {
1436 if (PrimaryBase != BaseDecl || !PrimaryBaseIsVirtual) {
1437 bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
1438
1439 // Only lay out the virtual base if it's not an indirect primary base.
1440 if (!IndirectPrimaryBase) {
1441 // Only visit virtual bases once.
1442 if (!VisitedVirtualBases.insert(BaseDecl))
1443 continue;
1444
1445 const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1446 assert(BaseInfo && "Did not find virtual base info!");
1447 LayoutVirtualBase(BaseInfo);
1448 }
1449 }
1450 }
1451
1452 if (!BaseDecl->getNumVBases()) {
1453 // This base isn't interesting since it doesn't have any virtual bases.
1454 continue;
1455 }
1456
1457 LayoutVirtualBases(BaseDecl, MostDerivedClass);
1458 }
1459 }
1460
MSLayoutVirtualBases(const CXXRecordDecl * RD)1461 void RecordLayoutBuilder::MSLayoutVirtualBases(const CXXRecordDecl *RD) {
1462 if (!RD->getNumVBases())
1463 return;
1464
1465 ClassSetTy VtordispVBases;
1466 computeVtordisps(RD, VtordispVBases);
1467
1468 // This is substantially simplified because there are no virtual
1469 // primary bases.
1470 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
1471 E = RD->vbases_end(); I != E; ++I) {
1472 const CXXRecordDecl *BaseDecl = I->getType()->getAsCXXRecordDecl();
1473 const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
1474 assert(BaseInfo && "Did not find virtual base info!");
1475
1476 // If this base requires a vtordisp, add enough space for an int field.
1477 // This is apparently always 32-bits, even on x64.
1478 bool vtordispNeeded = false;
1479 if (VtordispVBases.count(BaseDecl)) {
1480 CharUnits IntSize =
1481 CharUnits::fromQuantity(Context.getTargetInfo().getIntWidth() / 8);
1482
1483 setSize(getSize() + IntSize);
1484 setDataSize(getSize());
1485 vtordispNeeded = true;
1486 }
1487
1488 LayoutVirtualBase(BaseInfo, vtordispNeeded);
1489 }
1490 }
1491
LayoutVirtualBase(const BaseSubobjectInfo * Base,bool IsVtordispNeed)1492 void RecordLayoutBuilder::LayoutVirtualBase(const BaseSubobjectInfo *Base,
1493 bool IsVtordispNeed) {
1494 assert(!Base->Derived && "Trying to lay out a primary virtual base!");
1495
1496 // Layout the base.
1497 CharUnits Offset = LayoutBase(Base);
1498
1499 // Add its base class offset.
1500 assert(!VBases.count(Base->Class) && "vbase offset already exists!");
1501 VBases.insert(std::make_pair(Base->Class,
1502 ASTRecordLayout::VBaseInfo(Offset, IsVtordispNeed)));
1503
1504 if (!isMicrosoftCXXABI())
1505 AddPrimaryVirtualBaseOffsets(Base, Offset);
1506 }
1507
LayoutBase(const BaseSubobjectInfo * Base)1508 CharUnits RecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) {
1509 const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class);
1510
1511
1512 CharUnits Offset;
1513
1514 // Query the external layout to see if it provides an offset.
1515 bool HasExternalLayout = false;
1516 if (ExternalLayout) {
1517 llvm::DenseMap<const CXXRecordDecl *, CharUnits>::iterator Known;
1518 if (Base->IsVirtual) {
1519 Known = ExternalVirtualBaseOffsets.find(Base->Class);
1520 if (Known != ExternalVirtualBaseOffsets.end()) {
1521 Offset = Known->second;
1522 HasExternalLayout = true;
1523 }
1524 } else {
1525 Known = ExternalBaseOffsets.find(Base->Class);
1526 if (Known != ExternalBaseOffsets.end()) {
1527 Offset = Known->second;
1528 HasExternalLayout = true;
1529 }
1530 }
1531 }
1532
1533 // If we have an empty base class, try to place it at offset 0.
1534 if (Base->Class->isEmpty() &&
1535 (!HasExternalLayout || Offset == CharUnits::Zero()) &&
1536 EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())) {
1537 setSize(std::max(getSize(), Layout.getSize()));
1538
1539 return CharUnits::Zero();
1540 }
1541
1542 CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlign();
1543 CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign;
1544
1545 // The maximum field alignment overrides base align.
1546 if (!MaxFieldAlignment.isZero()) {
1547 BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
1548 UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
1549 }
1550
1551 if (!HasExternalLayout) {
1552 // Round up the current record size to the base's alignment boundary.
1553 Offset = getDataSize().RoundUpToAlignment(BaseAlign);
1554
1555 // Try to place the base.
1556 while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset))
1557 Offset += BaseAlign;
1558 } else {
1559 bool Allowed = EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset);
1560 (void)Allowed;
1561 assert(Allowed && "Base subobject externally placed at overlapping offset");
1562
1563 if (InferAlignment && Offset < getDataSize().RoundUpToAlignment(BaseAlign)){
1564 // The externally-supplied base offset is before the base offset we
1565 // computed. Assume that the structure is packed.
1566 Alignment = CharUnits::One();
1567 InferAlignment = false;
1568 }
1569 }
1570
1571 if (!Base->Class->isEmpty()) {
1572 // Update the data size.
1573 setDataSize(Offset + Layout.getNonVirtualSize());
1574
1575 setSize(std::max(getSize(), getDataSize()));
1576 } else
1577 setSize(std::max(getSize(), Offset + Layout.getSize()));
1578
1579 // Remember max struct/class alignment.
1580 UpdateAlignment(BaseAlign, UnpackedBaseAlign);
1581
1582 return Offset;
1583 }
1584
InitializeLayout(const Decl * D)1585 void RecordLayoutBuilder::InitializeLayout(const Decl *D) {
1586 if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
1587 IsUnion = RD->isUnion();
1588 IsMsStruct = RD->isMsStruct(Context);
1589 }
1590
1591 Packed = D->hasAttr<PackedAttr>();
1592
1593 // Honor the default struct packing maximum alignment flag.
1594 if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct) {
1595 MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
1596 }
1597
1598 // mac68k alignment supersedes maximum field alignment and attribute aligned,
1599 // and forces all structures to have 2-byte alignment. The IBM docs on it
1600 // allude to additional (more complicated) semantics, especially with regard
1601 // to bit-fields, but gcc appears not to follow that.
1602 if (D->hasAttr<AlignMac68kAttr>()) {
1603 IsMac68kAlign = true;
1604 MaxFieldAlignment = CharUnits::fromQuantity(2);
1605 Alignment = CharUnits::fromQuantity(2);
1606 } else {
1607 if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>())
1608 MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
1609
1610 if (unsigned MaxAlign = D->getMaxAlignment())
1611 UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign));
1612 }
1613
1614 // If there is an external AST source, ask it for the various offsets.
1615 if (const RecordDecl *RD = dyn_cast<RecordDecl>(D))
1616 if (ExternalASTSource *External = Context.getExternalSource()) {
1617 ExternalLayout = External->layoutRecordType(RD,
1618 ExternalSize,
1619 ExternalAlign,
1620 ExternalFieldOffsets,
1621 ExternalBaseOffsets,
1622 ExternalVirtualBaseOffsets);
1623
1624 // Update based on external alignment.
1625 if (ExternalLayout) {
1626 if (ExternalAlign > 0) {
1627 Alignment = Context.toCharUnitsFromBits(ExternalAlign);
1628 } else {
1629 // The external source didn't have alignment information; infer it.
1630 InferAlignment = true;
1631 }
1632 }
1633 }
1634 }
1635
Layout(const RecordDecl * D)1636 void RecordLayoutBuilder::Layout(const RecordDecl *D) {
1637 InitializeLayout(D);
1638 LayoutFields(D);
1639
1640 // Finally, round the size of the total struct up to the alignment of the
1641 // struct itself.
1642 FinishLayout(D);
1643 }
1644
Layout(const CXXRecordDecl * RD)1645 void RecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
1646 InitializeLayout(RD);
1647
1648 // Lay out the vtable and the non-virtual bases.
1649 LayoutNonVirtualBases(RD);
1650
1651 LayoutFields(RD);
1652
1653 NonVirtualSize = Context.toCharUnitsFromBits(
1654 llvm::RoundUpToAlignment(getSizeInBits(),
1655 Context.getTargetInfo().getCharAlign()));
1656 NonVirtualAlignment = Alignment;
1657
1658 if (isMicrosoftCXXABI()) {
1659 if (NonVirtualSize != NonVirtualSize.RoundUpToAlignment(Alignment)) {
1660 CharUnits AlignMember =
1661 NonVirtualSize.RoundUpToAlignment(Alignment) - NonVirtualSize;
1662
1663 setSize(getSize() + AlignMember);
1664 setDataSize(getSize());
1665
1666 NonVirtualSize = Context.toCharUnitsFromBits(
1667 llvm::RoundUpToAlignment(getSizeInBits(),
1668 Context.getTargetInfo().getCharAlign()));
1669 }
1670
1671 MSLayoutVirtualBases(RD);
1672 } else {
1673 // Lay out the virtual bases and add the primary virtual base offsets.
1674 LayoutVirtualBases(RD, RD);
1675 }
1676
1677 // Finally, round the size of the total struct up to the alignment
1678 // of the struct itself.
1679 FinishLayout(RD);
1680
1681 #ifndef NDEBUG
1682 // Check that we have base offsets for all bases.
1683 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1684 E = RD->bases_end(); I != E; ++I) {
1685 if (I->isVirtual())
1686 continue;
1687
1688 const CXXRecordDecl *BaseDecl =
1689 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1690
1691 assert(Bases.count(BaseDecl) && "Did not find base offset!");
1692 }
1693
1694 // And all virtual bases.
1695 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
1696 E = RD->vbases_end(); I != E; ++I) {
1697 const CXXRecordDecl *BaseDecl =
1698 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1699
1700 assert(VBases.count(BaseDecl) && "Did not find base offset!");
1701 }
1702 #endif
1703 }
1704
Layout(const ObjCInterfaceDecl * D)1705 void RecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) {
1706 if (ObjCInterfaceDecl *SD = D->getSuperClass()) {
1707 const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD);
1708
1709 UpdateAlignment(SL.getAlignment());
1710
1711 // We start laying out ivars not at the end of the superclass
1712 // structure, but at the next byte following the last field.
1713 setSize(SL.getDataSize());
1714 setDataSize(getSize());
1715 }
1716
1717 InitializeLayout(D);
1718 // Layout each ivar sequentially.
1719 for (const ObjCIvarDecl *IVD = D->all_declared_ivar_begin(); IVD;
1720 IVD = IVD->getNextIvar())
1721 LayoutField(IVD);
1722
1723 // Finally, round the size of the total struct up to the alignment of the
1724 // struct itself.
1725 FinishLayout(D);
1726 }
1727
LayoutFields(const RecordDecl * D)1728 void RecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
1729 // Layout each field, for now, just sequentially, respecting alignment. In
1730 // the future, this will need to be tweakable by targets.
1731 const FieldDecl *LastFD = 0;
1732 ZeroLengthBitfield = 0;
1733 unsigned RemainingInAlignment = 0;
1734 for (RecordDecl::field_iterator Field = D->field_begin(),
1735 FieldEnd = D->field_end(); Field != FieldEnd; ++Field) {
1736 if (IsMsStruct) {
1737 FieldDecl *FD = *Field;
1738 if (Context.ZeroBitfieldFollowsBitfield(FD, LastFD))
1739 ZeroLengthBitfield = FD;
1740 // Zero-length bitfields following non-bitfield members are
1741 // ignored:
1742 else if (Context.ZeroBitfieldFollowsNonBitfield(FD, LastFD))
1743 continue;
1744 // FIXME. streamline these conditions into a simple one.
1745 else if (Context.BitfieldFollowsBitfield(FD, LastFD) ||
1746 Context.BitfieldFollowsNonBitfield(FD, LastFD) ||
1747 Context.NonBitfieldFollowsBitfield(FD, LastFD)) {
1748 // 1) Adjacent bit fields are packed into the same 1-, 2-, or
1749 // 4-byte allocation unit if the integral types are the same
1750 // size and if the next bit field fits into the current
1751 // allocation unit without crossing the boundary imposed by the
1752 // common alignment requirements of the bit fields.
1753 // 2) Establish a new alignment for a bitfield following
1754 // a non-bitfield if size of their types differ.
1755 // 3) Establish a new alignment for a non-bitfield following
1756 // a bitfield if size of their types differ.
1757 std::pair<uint64_t, unsigned> FieldInfo =
1758 Context.getTypeInfo(FD->getType());
1759 uint64_t TypeSize = FieldInfo.first;
1760 unsigned FieldAlign = FieldInfo.second;
1761 // This check is needed for 'long long' in -m32 mode.
1762 if (TypeSize > FieldAlign &&
1763 (Context.hasSameType(FD->getType(),
1764 Context.UnsignedLongLongTy)
1765 ||Context.hasSameType(FD->getType(),
1766 Context.LongLongTy)))
1767 FieldAlign = TypeSize;
1768 FieldInfo = Context.getTypeInfo(LastFD->getType());
1769 uint64_t TypeSizeLastFD = FieldInfo.first;
1770 unsigned FieldAlignLastFD = FieldInfo.second;
1771 // This check is needed for 'long long' in -m32 mode.
1772 if (TypeSizeLastFD > FieldAlignLastFD &&
1773 (Context.hasSameType(LastFD->getType(),
1774 Context.UnsignedLongLongTy)
1775 || Context.hasSameType(LastFD->getType(),
1776 Context.LongLongTy)))
1777 FieldAlignLastFD = TypeSizeLastFD;
1778
1779 if (TypeSizeLastFD != TypeSize) {
1780 if (RemainingInAlignment &&
1781 LastFD && LastFD->isBitField() &&
1782 LastFD->getBitWidthValue(Context)) {
1783 // If previous field was a bitfield with some remaining unfilled
1784 // bits, pad the field so current field starts on its type boundary.
1785 uint64_t FieldOffset =
1786 getDataSizeInBits() - UnfilledBitsInLastByte;
1787 uint64_t NewSizeInBits = RemainingInAlignment + FieldOffset;
1788 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits,
1789 Context.getTargetInfo().getCharAlign()));
1790 setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1791 RemainingInAlignment = 0;
1792 }
1793
1794 uint64_t UnpaddedFieldOffset =
1795 getDataSizeInBits() - UnfilledBitsInLastByte;
1796 FieldAlign = std::max(FieldAlign, FieldAlignLastFD);
1797
1798 // The maximum field alignment overrides the aligned attribute.
1799 if (!MaxFieldAlignment.isZero()) {
1800 unsigned MaxFieldAlignmentInBits =
1801 Context.toBits(MaxFieldAlignment);
1802 FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
1803 }
1804
1805 uint64_t NewSizeInBits =
1806 llvm::RoundUpToAlignment(UnpaddedFieldOffset, FieldAlign);
1807 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits,
1808 Context.getTargetInfo().getCharAlign()));
1809 UnfilledBitsInLastByte = getDataSizeInBits() - NewSizeInBits;
1810 setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1811 }
1812 if (FD->isBitField()) {
1813 uint64_t FieldSize = FD->getBitWidthValue(Context);
1814 assert (FieldSize > 0 && "LayoutFields - ms_struct layout");
1815 if (RemainingInAlignment < FieldSize)
1816 RemainingInAlignment = TypeSize - FieldSize;
1817 else
1818 RemainingInAlignment -= FieldSize;
1819 }
1820 }
1821 else if (FD->isBitField()) {
1822 uint64_t FieldSize = FD->getBitWidthValue(Context);
1823 std::pair<uint64_t, unsigned> FieldInfo =
1824 Context.getTypeInfo(FD->getType());
1825 uint64_t TypeSize = FieldInfo.first;
1826 RemainingInAlignment = TypeSize - FieldSize;
1827 }
1828 LastFD = FD;
1829 }
1830 else if (!Context.getTargetInfo().useBitFieldTypeAlignment() &&
1831 Context.getTargetInfo().useZeroLengthBitfieldAlignment()) {
1832 if (Field->isBitField() && Field->getBitWidthValue(Context) == 0)
1833 ZeroLengthBitfield = *Field;
1834 }
1835 LayoutField(*Field);
1836 }
1837 if (IsMsStruct && RemainingInAlignment &&
1838 LastFD && LastFD->isBitField() && LastFD->getBitWidthValue(Context)) {
1839 // If we ended a bitfield before the full length of the type then
1840 // pad the struct out to the full length of the last type.
1841 uint64_t FieldOffset =
1842 getDataSizeInBits() - UnfilledBitsInLastByte;
1843 uint64_t NewSizeInBits = RemainingInAlignment + FieldOffset;
1844 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits,
1845 Context.getTargetInfo().getCharAlign()));
1846 setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1847 }
1848 }
1849
LayoutWideBitField(uint64_t FieldSize,uint64_t TypeSize,bool FieldPacked,const FieldDecl * D)1850 void RecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
1851 uint64_t TypeSize,
1852 bool FieldPacked,
1853 const FieldDecl *D) {
1854 assert(Context.getLangOpts().CPlusPlus &&
1855 "Can only have wide bit-fields in C++!");
1856
1857 // Itanium C++ ABI 2.4:
1858 // If sizeof(T)*8 < n, let T' be the largest integral POD type with
1859 // sizeof(T')*8 <= n.
1860
1861 QualType IntegralPODTypes[] = {
1862 Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy,
1863 Context.UnsignedLongTy, Context.UnsignedLongLongTy
1864 };
1865
1866 QualType Type;
1867 for (unsigned I = 0, E = llvm::array_lengthof(IntegralPODTypes);
1868 I != E; ++I) {
1869 uint64_t Size = Context.getTypeSize(IntegralPODTypes[I]);
1870
1871 if (Size > FieldSize)
1872 break;
1873
1874 Type = IntegralPODTypes[I];
1875 }
1876 assert(!Type.isNull() && "Did not find a type!");
1877
1878 CharUnits TypeAlign = Context.getTypeAlignInChars(Type);
1879
1880 // We're not going to use any of the unfilled bits in the last byte.
1881 UnfilledBitsInLastByte = 0;
1882
1883 uint64_t FieldOffset;
1884 uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastByte;
1885
1886 if (IsUnion) {
1887 setDataSize(std::max(getDataSizeInBits(), FieldSize));
1888 FieldOffset = 0;
1889 } else {
1890 // The bitfield is allocated starting at the next offset aligned
1891 // appropriately for T', with length n bits.
1892 FieldOffset = llvm::RoundUpToAlignment(getDataSizeInBits(),
1893 Context.toBits(TypeAlign));
1894
1895 uint64_t NewSizeInBits = FieldOffset + FieldSize;
1896
1897 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits,
1898 Context.getTargetInfo().getCharAlign()));
1899 UnfilledBitsInLastByte = getDataSizeInBits() - NewSizeInBits;
1900 }
1901
1902 // Place this field at the current location.
1903 FieldOffsets.push_back(FieldOffset);
1904
1905 CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset,
1906 Context.toBits(TypeAlign), FieldPacked, D);
1907
1908 // Update the size.
1909 setSize(std::max(getSizeInBits(), getDataSizeInBits()));
1910
1911 // Remember max struct/class alignment.
1912 UpdateAlignment(TypeAlign);
1913 }
1914
LayoutBitField(const FieldDecl * D)1915 void RecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
1916 bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
1917 uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastByte;
1918 uint64_t FieldOffset = IsUnion ? 0 : UnpaddedFieldOffset;
1919 uint64_t FieldSize = D->getBitWidthValue(Context);
1920
1921 std::pair<uint64_t, unsigned> FieldInfo = Context.getTypeInfo(D->getType());
1922 uint64_t TypeSize = FieldInfo.first;
1923 unsigned FieldAlign = FieldInfo.second;
1924
1925 // This check is needed for 'long long' in -m32 mode.
1926 if (IsMsStruct && (TypeSize > FieldAlign) &&
1927 (Context.hasSameType(D->getType(),
1928 Context.UnsignedLongLongTy)
1929 || Context.hasSameType(D->getType(), Context.LongLongTy)))
1930 FieldAlign = TypeSize;
1931
1932 if (ZeroLengthBitfield) {
1933 std::pair<uint64_t, unsigned> FieldInfo;
1934 unsigned ZeroLengthBitfieldAlignment;
1935 if (IsMsStruct) {
1936 // If a zero-length bitfield is inserted after a bitfield,
1937 // and the alignment of the zero-length bitfield is
1938 // greater than the member that follows it, `bar', `bar'
1939 // will be aligned as the type of the zero-length bitfield.
1940 if (ZeroLengthBitfield != D) {
1941 FieldInfo = Context.getTypeInfo(ZeroLengthBitfield->getType());
1942 ZeroLengthBitfieldAlignment = FieldInfo.second;
1943 // Ignore alignment of subsequent zero-length bitfields.
1944 if ((ZeroLengthBitfieldAlignment > FieldAlign) || (FieldSize == 0))
1945 FieldAlign = ZeroLengthBitfieldAlignment;
1946 if (FieldSize)
1947 ZeroLengthBitfield = 0;
1948 }
1949 } else {
1950 // The alignment of a zero-length bitfield affects the alignment
1951 // of the next member. The alignment is the max of the zero
1952 // length bitfield's alignment and a target specific fixed value.
1953 unsigned ZeroLengthBitfieldBoundary =
1954 Context.getTargetInfo().getZeroLengthBitfieldBoundary();
1955 if (ZeroLengthBitfieldBoundary > FieldAlign)
1956 FieldAlign = ZeroLengthBitfieldBoundary;
1957 }
1958 }
1959
1960 if (FieldSize > TypeSize) {
1961 LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D);
1962 return;
1963 }
1964
1965 // The align if the field is not packed. This is to check if the attribute
1966 // was unnecessary (-Wpacked).
1967 unsigned UnpackedFieldAlign = FieldAlign;
1968 uint64_t UnpackedFieldOffset = FieldOffset;
1969 if (!Context.getTargetInfo().useBitFieldTypeAlignment() && !ZeroLengthBitfield)
1970 UnpackedFieldAlign = 1;
1971
1972 if (FieldPacked ||
1973 (!Context.getTargetInfo().useBitFieldTypeAlignment() && !ZeroLengthBitfield))
1974 FieldAlign = 1;
1975 FieldAlign = std::max(FieldAlign, D->getMaxAlignment());
1976 UnpackedFieldAlign = std::max(UnpackedFieldAlign, D->getMaxAlignment());
1977
1978 // The maximum field alignment overrides the aligned attribute.
1979 if (!MaxFieldAlignment.isZero() && FieldSize != 0) {
1980 unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment);
1981 FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
1982 UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits);
1983 }
1984
1985 // Check if we need to add padding to give the field the correct alignment.
1986 if (FieldSize == 0 ||
1987 (MaxFieldAlignment.isZero() &&
1988 (FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize))
1989 FieldOffset = llvm::RoundUpToAlignment(FieldOffset, FieldAlign);
1990
1991 if (FieldSize == 0 ||
1992 (MaxFieldAlignment.isZero() &&
1993 (UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize))
1994 UnpackedFieldOffset = llvm::RoundUpToAlignment(UnpackedFieldOffset,
1995 UnpackedFieldAlign);
1996
1997 // Padding members don't affect overall alignment, unless zero length bitfield
1998 // alignment is enabled.
1999 if (!D->getIdentifier() && !Context.getTargetInfo().useZeroLengthBitfieldAlignment())
2000 FieldAlign = UnpackedFieldAlign = 1;
2001
2002 if (!IsMsStruct)
2003 ZeroLengthBitfield = 0;
2004
2005 if (ExternalLayout)
2006 FieldOffset = updateExternalFieldOffset(D, FieldOffset);
2007
2008 // Place this field at the current location.
2009 FieldOffsets.push_back(FieldOffset);
2010
2011 if (!ExternalLayout)
2012 CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset,
2013 UnpackedFieldAlign, FieldPacked, D);
2014
2015 // Update DataSize to include the last byte containing (part of) the bitfield.
2016 if (IsUnion) {
2017 // FIXME: I think FieldSize should be TypeSize here.
2018 setDataSize(std::max(getDataSizeInBits(), FieldSize));
2019 } else {
2020 uint64_t NewSizeInBits = FieldOffset + FieldSize;
2021
2022 setDataSize(llvm::RoundUpToAlignment(NewSizeInBits,
2023 Context.getTargetInfo().getCharAlign()));
2024 UnfilledBitsInLastByte = getDataSizeInBits() - NewSizeInBits;
2025 }
2026
2027 // Update the size.
2028 setSize(std::max(getSizeInBits(), getDataSizeInBits()));
2029
2030 // Remember max struct/class alignment.
2031 UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign),
2032 Context.toCharUnitsFromBits(UnpackedFieldAlign));
2033 }
2034
LayoutField(const FieldDecl * D)2035 void RecordLayoutBuilder::LayoutField(const FieldDecl *D) {
2036 if (D->isBitField()) {
2037 LayoutBitField(D);
2038 return;
2039 }
2040
2041 uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastByte;
2042
2043 // Reset the unfilled bits.
2044 UnfilledBitsInLastByte = 0;
2045
2046 bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
2047 CharUnits FieldOffset =
2048 IsUnion ? CharUnits::Zero() : getDataSize();
2049 CharUnits FieldSize;
2050 CharUnits FieldAlign;
2051
2052 if (D->getType()->isIncompleteArrayType()) {
2053 // This is a flexible array member; we can't directly
2054 // query getTypeInfo about these, so we figure it out here.
2055 // Flexible array members don't have any size, but they
2056 // have to be aligned appropriately for their element type.
2057 FieldSize = CharUnits::Zero();
2058 const ArrayType* ATy = Context.getAsArrayType(D->getType());
2059 FieldAlign = Context.getTypeAlignInChars(ATy->getElementType());
2060 } else if (const ReferenceType *RT = D->getType()->getAs<ReferenceType>()) {
2061 unsigned AS = RT->getPointeeType().getAddressSpace();
2062 FieldSize =
2063 Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(AS));
2064 FieldAlign =
2065 Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(AS));
2066 } else {
2067 std::pair<CharUnits, CharUnits> FieldInfo =
2068 Context.getTypeInfoInChars(D->getType());
2069 FieldSize = FieldInfo.first;
2070 FieldAlign = FieldInfo.second;
2071
2072 if (ZeroLengthBitfield) {
2073 CharUnits ZeroLengthBitfieldBoundary =
2074 Context.toCharUnitsFromBits(
2075 Context.getTargetInfo().getZeroLengthBitfieldBoundary());
2076 if (ZeroLengthBitfieldBoundary == CharUnits::Zero()) {
2077 // If a zero-length bitfield is inserted after a bitfield,
2078 // and the alignment of the zero-length bitfield is
2079 // greater than the member that follows it, `bar', `bar'
2080 // will be aligned as the type of the zero-length bitfield.
2081 std::pair<CharUnits, CharUnits> FieldInfo =
2082 Context.getTypeInfoInChars(ZeroLengthBitfield->getType());
2083 CharUnits ZeroLengthBitfieldAlignment = FieldInfo.second;
2084 if (ZeroLengthBitfieldAlignment > FieldAlign)
2085 FieldAlign = ZeroLengthBitfieldAlignment;
2086 } else if (ZeroLengthBitfieldBoundary > FieldAlign) {
2087 // Align 'bar' based on a fixed alignment specified by the target.
2088 assert(Context.getTargetInfo().useZeroLengthBitfieldAlignment() &&
2089 "ZeroLengthBitfieldBoundary should only be used in conjunction"
2090 " with useZeroLengthBitfieldAlignment.");
2091 FieldAlign = ZeroLengthBitfieldBoundary;
2092 }
2093 ZeroLengthBitfield = 0;
2094 }
2095
2096 if (IsMsStruct) {
2097 // If MS bitfield layout is required, figure out what type is being
2098 // laid out and align the field to the width of that type.
2099
2100 // Resolve all typedefs down to their base type and round up the field
2101 // alignment if necessary.
2102 QualType T = Context.getBaseElementType(D->getType());
2103 if (const BuiltinType *BTy = T->getAs<BuiltinType>()) {
2104 CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
2105 if (TypeSize > FieldAlign)
2106 FieldAlign = TypeSize;
2107 }
2108 }
2109 }
2110
2111 // The align if the field is not packed. This is to check if the attribute
2112 // was unnecessary (-Wpacked).
2113 CharUnits UnpackedFieldAlign = FieldAlign;
2114 CharUnits UnpackedFieldOffset = FieldOffset;
2115
2116 if (FieldPacked)
2117 FieldAlign = CharUnits::One();
2118 CharUnits MaxAlignmentInChars =
2119 Context.toCharUnitsFromBits(D->getMaxAlignment());
2120 FieldAlign = std::max(FieldAlign, MaxAlignmentInChars);
2121 UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars);
2122
2123 // The maximum field alignment overrides the aligned attribute.
2124 if (!MaxFieldAlignment.isZero()) {
2125 FieldAlign = std::min(FieldAlign, MaxFieldAlignment);
2126 UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
2127 }
2128
2129 // Round up the current record size to the field's alignment boundary.
2130 FieldOffset = FieldOffset.RoundUpToAlignment(FieldAlign);
2131 UnpackedFieldOffset =
2132 UnpackedFieldOffset.RoundUpToAlignment(UnpackedFieldAlign);
2133
2134 if (ExternalLayout) {
2135 FieldOffset = Context.toCharUnitsFromBits(
2136 updateExternalFieldOffset(D, Context.toBits(FieldOffset)));
2137
2138 if (!IsUnion && EmptySubobjects) {
2139 // Record the fact that we're placing a field at this offset.
2140 bool Allowed = EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset);
2141 (void)Allowed;
2142 assert(Allowed && "Externally-placed field cannot be placed here");
2143 }
2144 } else {
2145 if (!IsUnion && EmptySubobjects) {
2146 // Check if we can place the field at this offset.
2147 while (!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)) {
2148 // We couldn't place the field at the offset. Try again at a new offset.
2149 FieldOffset += FieldAlign;
2150 }
2151 }
2152 }
2153
2154 // Place this field at the current location.
2155 FieldOffsets.push_back(Context.toBits(FieldOffset));
2156
2157 if (!ExternalLayout)
2158 CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset,
2159 Context.toBits(UnpackedFieldOffset),
2160 Context.toBits(UnpackedFieldAlign), FieldPacked, D);
2161
2162 // Reserve space for this field.
2163 uint64_t FieldSizeInBits = Context.toBits(FieldSize);
2164 if (IsUnion)
2165 setDataSize(std::max(getDataSizeInBits(), FieldSizeInBits));
2166 else
2167 setDataSize(FieldOffset + FieldSize);
2168
2169 // Update the size.
2170 setSize(std::max(getSizeInBits(), getDataSizeInBits()));
2171
2172 // Remember max struct/class alignment.
2173 UpdateAlignment(FieldAlign, UnpackedFieldAlign);
2174 }
2175
FinishLayout(const NamedDecl * D)2176 void RecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
2177 // In C++, records cannot be of size 0.
2178 if (Context.getLangOpts().CPlusPlus && getSizeInBits() == 0) {
2179 if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
2180 // Compatibility with gcc requires a class (pod or non-pod)
2181 // which is not empty but of size 0; such as having fields of
2182 // array of zero-length, remains of Size 0
2183 if (RD->isEmpty())
2184 setSize(CharUnits::One());
2185 }
2186 else
2187 setSize(CharUnits::One());
2188 }
2189
2190 // Finally, round the size of the record up to the alignment of the
2191 // record itself.
2192 uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastByte;
2193 uint64_t UnpackedSizeInBits =
2194 llvm::RoundUpToAlignment(getSizeInBits(),
2195 Context.toBits(UnpackedAlignment));
2196 CharUnits UnpackedSize = Context.toCharUnitsFromBits(UnpackedSizeInBits);
2197 uint64_t RoundedSize
2198 = llvm::RoundUpToAlignment(getSizeInBits(), Context.toBits(Alignment));
2199
2200 if (ExternalLayout) {
2201 // If we're inferring alignment, and the external size is smaller than
2202 // our size after we've rounded up to alignment, conservatively set the
2203 // alignment to 1.
2204 if (InferAlignment && ExternalSize < RoundedSize) {
2205 Alignment = CharUnits::One();
2206 InferAlignment = false;
2207 }
2208 setSize(ExternalSize);
2209 return;
2210 }
2211
2212
2213 // MSVC doesn't round up to the alignment of the record with virtual bases.
2214 if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
2215 if (isMicrosoftCXXABI() && RD->getNumVBases())
2216 return;
2217 }
2218
2219 // Set the size to the final size.
2220 setSize(RoundedSize);
2221
2222 unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2223 if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
2224 // Warn if padding was introduced to the struct/class/union.
2225 if (getSizeInBits() > UnpaddedSize) {
2226 unsigned PadSize = getSizeInBits() - UnpaddedSize;
2227 bool InBits = true;
2228 if (PadSize % CharBitNum == 0) {
2229 PadSize = PadSize / CharBitNum;
2230 InBits = false;
2231 }
2232 Diag(RD->getLocation(), diag::warn_padded_struct_size)
2233 << Context.getTypeDeclType(RD)
2234 << PadSize
2235 << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not
2236 }
2237
2238 // Warn if we packed it unnecessarily. If the alignment is 1 byte don't
2239 // bother since there won't be alignment issues.
2240 if (Packed && UnpackedAlignment > CharUnits::One() &&
2241 getSize() == UnpackedSize)
2242 Diag(D->getLocation(), diag::warn_unnecessary_packed)
2243 << Context.getTypeDeclType(RD);
2244 }
2245 }
2246
UpdateAlignment(CharUnits NewAlignment,CharUnits UnpackedNewAlignment)2247 void RecordLayoutBuilder::UpdateAlignment(CharUnits NewAlignment,
2248 CharUnits UnpackedNewAlignment) {
2249 // The alignment is not modified when using 'mac68k' alignment or when
2250 // we have an externally-supplied layout that also provides overall alignment.
2251 if (IsMac68kAlign || (ExternalLayout && !InferAlignment))
2252 return;
2253
2254 if (NewAlignment > Alignment) {
2255 assert(llvm::isPowerOf2_32(NewAlignment.getQuantity() &&
2256 "Alignment not a power of 2"));
2257 Alignment = NewAlignment;
2258 }
2259
2260 if (UnpackedNewAlignment > UnpackedAlignment) {
2261 assert(llvm::isPowerOf2_32(UnpackedNewAlignment.getQuantity() &&
2262 "Alignment not a power of 2"));
2263 UnpackedAlignment = UnpackedNewAlignment;
2264 }
2265 }
2266
2267 uint64_t
updateExternalFieldOffset(const FieldDecl * Field,uint64_t ComputedOffset)2268 RecordLayoutBuilder::updateExternalFieldOffset(const FieldDecl *Field,
2269 uint64_t ComputedOffset) {
2270 assert(ExternalFieldOffsets.find(Field) != ExternalFieldOffsets.end() &&
2271 "Field does not have an external offset");
2272
2273 uint64_t ExternalFieldOffset = ExternalFieldOffsets[Field];
2274
2275 if (InferAlignment && ExternalFieldOffset < ComputedOffset) {
2276 // The externally-supplied field offset is before the field offset we
2277 // computed. Assume that the structure is packed.
2278 Alignment = CharUnits::One();
2279 InferAlignment = false;
2280 }
2281
2282 // Use the externally-supplied field offset.
2283 return ExternalFieldOffset;
2284 }
2285
2286 /// \brief Get diagnostic %select index for tag kind for
2287 /// field padding diagnostic message.
2288 /// WARNING: Indexes apply to particular diagnostics only!
2289 ///
2290 /// \returns diagnostic %select index.
getPaddingDiagFromTagKind(TagTypeKind Tag)2291 static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
2292 switch (Tag) {
2293 case TTK_Struct: return 0;
2294 case TTK_Interface: return 1;
2295 case TTK_Class: return 2;
2296 default: llvm_unreachable("Invalid tag kind for field padding diagnostic!");
2297 }
2298 }
2299
CheckFieldPadding(uint64_t Offset,uint64_t UnpaddedOffset,uint64_t UnpackedOffset,unsigned UnpackedAlign,bool isPacked,const FieldDecl * D)2300 void RecordLayoutBuilder::CheckFieldPadding(uint64_t Offset,
2301 uint64_t UnpaddedOffset,
2302 uint64_t UnpackedOffset,
2303 unsigned UnpackedAlign,
2304 bool isPacked,
2305 const FieldDecl *D) {
2306 // We let objc ivars without warning, objc interfaces generally are not used
2307 // for padding tricks.
2308 if (isa<ObjCIvarDecl>(D))
2309 return;
2310
2311 // Don't warn about structs created without a SourceLocation. This can
2312 // be done by clients of the AST, such as codegen.
2313 if (D->getLocation().isInvalid())
2314 return;
2315
2316 unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
2317
2318 // Warn if padding was introduced to the struct/class.
2319 if (!IsUnion && Offset > UnpaddedOffset) {
2320 unsigned PadSize = Offset - UnpaddedOffset;
2321 bool InBits = true;
2322 if (PadSize % CharBitNum == 0) {
2323 PadSize = PadSize / CharBitNum;
2324 InBits = false;
2325 }
2326 if (D->getIdentifier())
2327 Diag(D->getLocation(), diag::warn_padded_struct_field)
2328 << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2329 << Context.getTypeDeclType(D->getParent())
2330 << PadSize
2331 << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1) // plural or not
2332 << D->getIdentifier();
2333 else
2334 Diag(D->getLocation(), diag::warn_padded_struct_anon_field)
2335 << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
2336 << Context.getTypeDeclType(D->getParent())
2337 << PadSize
2338 << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not
2339 }
2340
2341 // Warn if we packed it unnecessarily. If the alignment is 1 byte don't
2342 // bother since there won't be alignment issues.
2343 if (isPacked && UnpackedAlign > CharBitNum && Offset == UnpackedOffset)
2344 Diag(D->getLocation(), diag::warn_unnecessary_packed)
2345 << D->getIdentifier();
2346 }
2347
computeKeyFunction(ASTContext & Context,const CXXRecordDecl * RD)2348 static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
2349 const CXXRecordDecl *RD) {
2350 // If a class isn't polymorphic it doesn't have a key function.
2351 if (!RD->isPolymorphic())
2352 return 0;
2353
2354 // A class that is not externally visible doesn't have a key function. (Or
2355 // at least, there's no point to assigning a key function to such a class;
2356 // this doesn't affect the ABI.)
2357 if (RD->getLinkage() != ExternalLinkage)
2358 return 0;
2359
2360 // Template instantiations don't have key functions,see Itanium C++ ABI 5.2.6.
2361 // Same behavior as GCC.
2362 TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
2363 if (TSK == TSK_ImplicitInstantiation ||
2364 TSK == TSK_ExplicitInstantiationDefinition)
2365 return 0;
2366
2367 bool allowInlineFunctions =
2368 Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
2369
2370 for (CXXRecordDecl::method_iterator I = RD->method_begin(),
2371 E = RD->method_end(); I != E; ++I) {
2372 const CXXMethodDecl *MD = *I;
2373
2374 if (!MD->isVirtual())
2375 continue;
2376
2377 if (MD->isPure())
2378 continue;
2379
2380 // Ignore implicit member functions, they are always marked as inline, but
2381 // they don't have a body until they're defined.
2382 if (MD->isImplicit())
2383 continue;
2384
2385 if (MD->isInlineSpecified())
2386 continue;
2387
2388 if (MD->hasInlineBody())
2389 continue;
2390
2391 // Ignore inline deleted or defaulted functions.
2392 if (!MD->isUserProvided())
2393 continue;
2394
2395 // In certain ABIs, ignore functions with out-of-line inline definitions.
2396 if (!allowInlineFunctions) {
2397 const FunctionDecl *Def;
2398 if (MD->hasBody(Def) && Def->isInlineSpecified())
2399 continue;
2400 }
2401
2402 // We found it.
2403 return MD;
2404 }
2405
2406 return 0;
2407 }
2408
2409 DiagnosticBuilder
Diag(SourceLocation Loc,unsigned DiagID)2410 RecordLayoutBuilder::Diag(SourceLocation Loc, unsigned DiagID) {
2411 return Context.getDiagnostics().Report(Loc, DiagID);
2412 }
2413
2414 /// Does the target C++ ABI require us to skip over the tail-padding
2415 /// of the given class (considering it as a base class) when allocating
2416 /// objects?
mustSkipTailPadding(TargetCXXABI ABI,const CXXRecordDecl * RD)2417 static bool mustSkipTailPadding(TargetCXXABI ABI, const CXXRecordDecl *RD) {
2418 switch (ABI.getTailPaddingUseRules()) {
2419 case TargetCXXABI::AlwaysUseTailPadding:
2420 return false;
2421
2422 case TargetCXXABI::UseTailPaddingUnlessPOD03:
2423 // FIXME: To the extent that this is meant to cover the Itanium ABI
2424 // rules, we should implement the restrictions about over-sized
2425 // bitfields:
2426 //
2427 // http://mentorembedded.github.com/cxx-abi/abi.html#POD :
2428 // In general, a type is considered a POD for the purposes of
2429 // layout if it is a POD type (in the sense of ISO C++
2430 // [basic.types]). However, a POD-struct or POD-union (in the
2431 // sense of ISO C++ [class]) with a bitfield member whose
2432 // declared width is wider than the declared type of the
2433 // bitfield is not a POD for the purpose of layout. Similarly,
2434 // an array type is not a POD for the purpose of layout if the
2435 // element type of the array is not a POD for the purpose of
2436 // layout.
2437 //
2438 // Where references to the ISO C++ are made in this paragraph,
2439 // the Technical Corrigendum 1 version of the standard is
2440 // intended.
2441 return RD->isPOD();
2442
2443 case TargetCXXABI::UseTailPaddingUnlessPOD11:
2444 // This is equivalent to RD->getTypeForDecl().isCXX11PODType(),
2445 // but with a lot of abstraction penalty stripped off. This does
2446 // assume that these properties are set correctly even in C++98
2447 // mode; fortunately, that is true because we want to assign
2448 // consistently semantics to the type-traits intrinsics (or at
2449 // least as many of them as possible).
2450 return RD->isTrivial() && RD->isStandardLayout();
2451 }
2452
2453 llvm_unreachable("bad tail-padding use kind");
2454 }
2455
2456 /// getASTRecordLayout - Get or compute information about the layout of the
2457 /// specified record (struct/union/class), which indicates its size and field
2458 /// position information.
2459 const ASTRecordLayout &
getASTRecordLayout(const RecordDecl * D) const2460 ASTContext::getASTRecordLayout(const RecordDecl *D) const {
2461 // These asserts test different things. A record has a definition
2462 // as soon as we begin to parse the definition. That definition is
2463 // not a complete definition (which is what isDefinition() tests)
2464 // until we *finish* parsing the definition.
2465
2466 if (D->hasExternalLexicalStorage() && !D->getDefinition())
2467 getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
2468
2469 D = D->getDefinition();
2470 assert(D && "Cannot get layout of forward declarations!");
2471 assert(D->isCompleteDefinition() && "Cannot layout type before complete!");
2472
2473 // Look up this layout, if already laid out, return what we have.
2474 // Note that we can't save a reference to the entry because this function
2475 // is recursive.
2476 const ASTRecordLayout *Entry = ASTRecordLayouts[D];
2477 if (Entry) return *Entry;
2478
2479 const ASTRecordLayout *NewEntry;
2480
2481 if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
2482 EmptySubobjectMap EmptySubobjects(*this, RD);
2483 RecordLayoutBuilder Builder(*this, &EmptySubobjects);
2484 Builder.Layout(RD);
2485
2486 // MSVC gives the vb-table pointer an alignment equal to that of
2487 // the non-virtual part of the structure. That's an inherently
2488 // multi-pass operation. If our first pass doesn't give us
2489 // adequate alignment, try again with the specified minimum
2490 // alignment. This is *much* more maintainable than computing the
2491 // alignment in advance in a separately-coded pass; it's also
2492 // significantly more efficient in the common case where the
2493 // vb-table doesn't need extra padding.
2494 if (Builder.VBPtrOffset != CharUnits::fromQuantity(-1) &&
2495 (Builder.VBPtrOffset % Builder.NonVirtualAlignment) != 0) {
2496 Builder.resetWithTargetAlignment(Builder.NonVirtualAlignment);
2497 Builder.Layout(RD);
2498 }
2499
2500 // In certain situations, we are allowed to lay out objects in the
2501 // tail-padding of base classes. This is ABI-dependent.
2502 // FIXME: this should be stored in the record layout.
2503 bool skipTailPadding =
2504 mustSkipTailPadding(getTargetInfo().getCXXABI(), cast<CXXRecordDecl>(D));
2505
2506 // FIXME: This should be done in FinalizeLayout.
2507 CharUnits DataSize =
2508 skipTailPadding ? Builder.getSize() : Builder.getDataSize();
2509 CharUnits NonVirtualSize =
2510 skipTailPadding ? DataSize : Builder.NonVirtualSize;
2511
2512 NewEntry =
2513 new (*this) ASTRecordLayout(*this, Builder.getSize(),
2514 Builder.Alignment,
2515 Builder.HasOwnVFPtr,
2516 Builder.VBPtrOffset,
2517 DataSize,
2518 Builder.FieldOffsets.data(),
2519 Builder.FieldOffsets.size(),
2520 NonVirtualSize,
2521 Builder.NonVirtualAlignment,
2522 EmptySubobjects.SizeOfLargestEmptySubobject,
2523 Builder.PrimaryBase,
2524 Builder.PrimaryBaseIsVirtual,
2525 Builder.Bases, Builder.VBases);
2526 } else {
2527 RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/0);
2528 Builder.Layout(D);
2529
2530 NewEntry =
2531 new (*this) ASTRecordLayout(*this, Builder.getSize(),
2532 Builder.Alignment,
2533 Builder.getSize(),
2534 Builder.FieldOffsets.data(),
2535 Builder.FieldOffsets.size());
2536 }
2537
2538 ASTRecordLayouts[D] = NewEntry;
2539
2540 if (getLangOpts().DumpRecordLayouts) {
2541 llvm::errs() << "\n*** Dumping AST Record Layout\n";
2542 DumpRecordLayout(D, llvm::errs(), getLangOpts().DumpRecordLayoutsSimple);
2543 }
2544
2545 return *NewEntry;
2546 }
2547
getCurrentKeyFunction(const CXXRecordDecl * RD)2548 const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
2549 assert(RD->getDefinition() && "Cannot get key function for forward decl!");
2550 RD = cast<CXXRecordDecl>(RD->getDefinition());
2551
2552 const CXXMethodDecl *&entry = KeyFunctions[RD];
2553 if (!entry) {
2554 entry = computeKeyFunction(*this, RD);
2555 }
2556
2557 return entry;
2558 }
2559
setNonKeyFunction(const CXXMethodDecl * method)2560 void ASTContext::setNonKeyFunction(const CXXMethodDecl *method) {
2561 assert(method == method->getFirstDeclaration() &&
2562 "not working with method declaration from class definition");
2563
2564 // Look up the cache entry. Since we're working with the first
2565 // declaration, its parent must be the class definition, which is
2566 // the correct key for the KeyFunctions hash.
2567 llvm::DenseMap<const CXXRecordDecl*, const CXXMethodDecl*>::iterator
2568 i = KeyFunctions.find(method->getParent());
2569
2570 // If it's not cached, there's nothing to do.
2571 if (i == KeyFunctions.end()) return;
2572
2573 // If it is cached, check whether it's the target method, and if so,
2574 // remove it from the cache.
2575 if (i->second == method) {
2576 // FIXME: remember that we did this for module / chained PCH state?
2577 KeyFunctions.erase(i);
2578 }
2579 }
2580
getFieldOffset(const ASTContext & C,const FieldDecl * FD)2581 static uint64_t getFieldOffset(const ASTContext &C, const FieldDecl *FD) {
2582 const ASTRecordLayout &Layout = C.getASTRecordLayout(FD->getParent());
2583 return Layout.getFieldOffset(FD->getFieldIndex());
2584 }
2585
getFieldOffset(const ValueDecl * VD) const2586 uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
2587 uint64_t OffsetInBits;
2588 if (const FieldDecl *FD = dyn_cast<FieldDecl>(VD)) {
2589 OffsetInBits = ::getFieldOffset(*this, FD);
2590 } else {
2591 const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
2592
2593 OffsetInBits = 0;
2594 for (IndirectFieldDecl::chain_iterator CI = IFD->chain_begin(),
2595 CE = IFD->chain_end();
2596 CI != CE; ++CI)
2597 OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(*CI));
2598 }
2599
2600 return OffsetInBits;
2601 }
2602
2603 /// getObjCLayout - Get or compute information about the layout of the
2604 /// given interface.
2605 ///
2606 /// \param Impl - If given, also include the layout of the interface's
2607 /// implementation. This may differ by including synthesized ivars.
2608 const ASTRecordLayout &
getObjCLayout(const ObjCInterfaceDecl * D,const ObjCImplementationDecl * Impl) const2609 ASTContext::getObjCLayout(const ObjCInterfaceDecl *D,
2610 const ObjCImplementationDecl *Impl) const {
2611 // Retrieve the definition
2612 if (D->hasExternalLexicalStorage() && !D->getDefinition())
2613 getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
2614 D = D->getDefinition();
2615 assert(D && D->isThisDeclarationADefinition() && "Invalid interface decl!");
2616
2617 // Look up this layout, if already laid out, return what we have.
2618 const ObjCContainerDecl *Key =
2619 Impl ? (const ObjCContainerDecl*) Impl : (const ObjCContainerDecl*) D;
2620 if (const ASTRecordLayout *Entry = ObjCLayouts[Key])
2621 return *Entry;
2622
2623 // Add in synthesized ivar count if laying out an implementation.
2624 if (Impl) {
2625 unsigned SynthCount = CountNonClassIvars(D);
2626 // If there aren't any sythesized ivars then reuse the interface
2627 // entry. Note we can't cache this because we simply free all
2628 // entries later; however we shouldn't look up implementations
2629 // frequently.
2630 if (SynthCount == 0)
2631 return getObjCLayout(D, 0);
2632 }
2633
2634 RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/0);
2635 Builder.Layout(D);
2636
2637 const ASTRecordLayout *NewEntry =
2638 new (*this) ASTRecordLayout(*this, Builder.getSize(),
2639 Builder.Alignment,
2640 Builder.getDataSize(),
2641 Builder.FieldOffsets.data(),
2642 Builder.FieldOffsets.size());
2643
2644 ObjCLayouts[Key] = NewEntry;
2645
2646 return *NewEntry;
2647 }
2648
PrintOffset(raw_ostream & OS,CharUnits Offset,unsigned IndentLevel)2649 static void PrintOffset(raw_ostream &OS,
2650 CharUnits Offset, unsigned IndentLevel) {
2651 OS << llvm::format("%4" PRId64 " | ", (int64_t)Offset.getQuantity());
2652 OS.indent(IndentLevel * 2);
2653 }
2654
PrintIndentNoOffset(raw_ostream & OS,unsigned IndentLevel)2655 static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
2656 OS << " | ";
2657 OS.indent(IndentLevel * 2);
2658 }
2659
DumpCXXRecordLayout(raw_ostream & OS,const CXXRecordDecl * RD,const ASTContext & C,CharUnits Offset,unsigned IndentLevel,const char * Description,bool IncludeVirtualBases)2660 static void DumpCXXRecordLayout(raw_ostream &OS,
2661 const CXXRecordDecl *RD, const ASTContext &C,
2662 CharUnits Offset,
2663 unsigned IndentLevel,
2664 const char* Description,
2665 bool IncludeVirtualBases) {
2666 const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
2667
2668 PrintOffset(OS, Offset, IndentLevel);
2669 OS << C.getTypeDeclType(const_cast<CXXRecordDecl *>(RD)).getAsString();
2670 if (Description)
2671 OS << ' ' << Description;
2672 if (RD->isEmpty())
2673 OS << " (empty)";
2674 OS << '\n';
2675
2676 IndentLevel++;
2677
2678 const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
2679 bool HasVfptr = Layout.hasOwnVFPtr();
2680 bool HasVbptr = Layout.getVBPtrOffset() != CharUnits::fromQuantity(-1);
2681
2682 // Vtable pointer.
2683 if (RD->isDynamicClass() && !PrimaryBase &&
2684 !C.getTargetInfo().getCXXABI().isMicrosoft()) {
2685 PrintOffset(OS, Offset, IndentLevel);
2686 OS << '(' << *RD << " vtable pointer)\n";
2687 }
2688
2689 // Dump (non-virtual) bases
2690 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
2691 E = RD->bases_end(); I != E; ++I) {
2692 assert(!I->getType()->isDependentType() &&
2693 "Cannot layout class with dependent bases.");
2694 if (I->isVirtual())
2695 continue;
2696
2697 const CXXRecordDecl *Base =
2698 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
2699
2700 CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
2701
2702 DumpCXXRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
2703 Base == PrimaryBase ? "(primary base)" : "(base)",
2704 /*IncludeVirtualBases=*/false);
2705 }
2706
2707 // vfptr and vbptr (for Microsoft C++ ABI)
2708 if (HasVfptr) {
2709 PrintOffset(OS, Offset, IndentLevel);
2710 OS << '(' << *RD << " vftable pointer)\n";
2711 }
2712 if (HasVbptr) {
2713 PrintOffset(OS, Offset + Layout.getVBPtrOffset(), IndentLevel);
2714 OS << '(' << *RD << " vbtable pointer)\n";
2715 }
2716
2717 // Dump fields.
2718 uint64_t FieldNo = 0;
2719 for (CXXRecordDecl::field_iterator I = RD->field_begin(),
2720 E = RD->field_end(); I != E; ++I, ++FieldNo) {
2721 const FieldDecl &Field = **I;
2722 CharUnits FieldOffset = Offset +
2723 C.toCharUnitsFromBits(Layout.getFieldOffset(FieldNo));
2724
2725 if (const RecordType *RT = Field.getType()->getAs<RecordType>()) {
2726 if (const CXXRecordDecl *D = dyn_cast<CXXRecordDecl>(RT->getDecl())) {
2727 DumpCXXRecordLayout(OS, D, C, FieldOffset, IndentLevel,
2728 Field.getName().data(),
2729 /*IncludeVirtualBases=*/true);
2730 continue;
2731 }
2732 }
2733
2734 PrintOffset(OS, FieldOffset, IndentLevel);
2735 OS << Field.getType().getAsString() << ' ' << Field << '\n';
2736 }
2737
2738 if (!IncludeVirtualBases)
2739 return;
2740
2741 // Dump virtual bases.
2742 const ASTRecordLayout::VBaseOffsetsMapTy &vtordisps =
2743 Layout.getVBaseOffsetsMap();
2744 for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
2745 E = RD->vbases_end(); I != E; ++I) {
2746 assert(I->isVirtual() && "Found non-virtual class!");
2747 const CXXRecordDecl *VBase =
2748 cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
2749
2750 CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
2751
2752 if (vtordisps.find(VBase)->second.hasVtorDisp()) {
2753 PrintOffset(OS, VBaseOffset - CharUnits::fromQuantity(4), IndentLevel);
2754 OS << "(vtordisp for vbase " << *VBase << ")\n";
2755 }
2756
2757 DumpCXXRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
2758 VBase == PrimaryBase ?
2759 "(primary virtual base)" : "(virtual base)",
2760 /*IncludeVirtualBases=*/false);
2761 }
2762
2763 PrintIndentNoOffset(OS, IndentLevel - 1);
2764 OS << "[sizeof=" << Layout.getSize().getQuantity();
2765 OS << ", dsize=" << Layout.getDataSize().getQuantity();
2766 OS << ", align=" << Layout.getAlignment().getQuantity() << '\n';
2767
2768 PrintIndentNoOffset(OS, IndentLevel - 1);
2769 OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity();
2770 OS << ", nvalign=" << Layout.getNonVirtualAlign().getQuantity() << "]\n";
2771 OS << '\n';
2772 }
2773
DumpRecordLayout(const RecordDecl * RD,raw_ostream & OS,bool Simple) const2774 void ASTContext::DumpRecordLayout(const RecordDecl *RD,
2775 raw_ostream &OS,
2776 bool Simple) const {
2777 const ASTRecordLayout &Info = getASTRecordLayout(RD);
2778
2779 if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD))
2780 if (!Simple)
2781 return DumpCXXRecordLayout(OS, CXXRD, *this, CharUnits(), 0, 0,
2782 /*IncludeVirtualBases=*/true);
2783
2784 OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n";
2785 if (!Simple) {
2786 OS << "Record: ";
2787 RD->dump();
2788 }
2789 OS << "\nLayout: ";
2790 OS << "<ASTRecordLayout\n";
2791 OS << " Size:" << toBits(Info.getSize()) << "\n";
2792 OS << " DataSize:" << toBits(Info.getDataSize()) << "\n";
2793 OS << " Alignment:" << toBits(Info.getAlignment()) << "\n";
2794 OS << " FieldOffsets: [";
2795 for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i) {
2796 if (i) OS << ", ";
2797 OS << Info.getFieldOffset(i);
2798 }
2799 OS << "]>\n";
2800 }
2801