1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This contains code dealing with C++ code generation of classes
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "CGBlocks.h"
15 #include "CGDebugInfo.h"
16 #include "CodeGenFunction.h"
17 #include "clang/AST/CXXInheritance.h"
18 #include "clang/AST/EvaluatedExprVisitor.h"
19 #include "clang/AST/RecordLayout.h"
20 #include "clang/AST/StmtCXX.h"
21 #include "clang/Frontend/CodeGenOptions.h"
22
23 using namespace clang;
24 using namespace CodeGen;
25
26 static CharUnits
ComputeNonVirtualBaseClassOffset(ASTContext & Context,const CXXRecordDecl * DerivedClass,CastExpr::path_const_iterator Start,CastExpr::path_const_iterator End)27 ComputeNonVirtualBaseClassOffset(ASTContext &Context,
28 const CXXRecordDecl *DerivedClass,
29 CastExpr::path_const_iterator Start,
30 CastExpr::path_const_iterator End) {
31 CharUnits Offset = CharUnits::Zero();
32
33 const CXXRecordDecl *RD = DerivedClass;
34
35 for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
36 const CXXBaseSpecifier *Base = *I;
37 assert(!Base->isVirtual() && "Should not see virtual bases here!");
38
39 // Get the layout.
40 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
41
42 const CXXRecordDecl *BaseDecl =
43 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
44
45 // Add the offset.
46 Offset += Layout.getBaseClassOffset(BaseDecl);
47
48 RD = BaseDecl;
49 }
50
51 return Offset;
52 }
53
54 llvm::Constant *
GetNonVirtualBaseClassOffset(const CXXRecordDecl * ClassDecl,CastExpr::path_const_iterator PathBegin,CastExpr::path_const_iterator PathEnd)55 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
56 CastExpr::path_const_iterator PathBegin,
57 CastExpr::path_const_iterator PathEnd) {
58 assert(PathBegin != PathEnd && "Base path should not be empty!");
59
60 CharUnits Offset =
61 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl,
62 PathBegin, PathEnd);
63 if (Offset.isZero())
64 return 0;
65
66 llvm::Type *PtrDiffTy =
67 Types.ConvertType(getContext().getPointerDiffType());
68
69 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
70 }
71
72 /// Gets the address of a direct base class within a complete object.
73 /// This should only be used for (1) non-virtual bases or (2) virtual bases
74 /// when the type is known to be complete (e.g. in complete destructors).
75 ///
76 /// The object pointed to by 'This' is assumed to be non-null.
77 llvm::Value *
GetAddressOfDirectBaseInCompleteClass(llvm::Value * This,const CXXRecordDecl * Derived,const CXXRecordDecl * Base,bool BaseIsVirtual)78 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This,
79 const CXXRecordDecl *Derived,
80 const CXXRecordDecl *Base,
81 bool BaseIsVirtual) {
82 // 'this' must be a pointer (in some address space) to Derived.
83 assert(This->getType()->isPointerTy() &&
84 cast<llvm::PointerType>(This->getType())->getElementType()
85 == ConvertType(Derived));
86
87 // Compute the offset of the virtual base.
88 CharUnits Offset;
89 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
90 if (BaseIsVirtual)
91 Offset = Layout.getVBaseClassOffset(Base);
92 else
93 Offset = Layout.getBaseClassOffset(Base);
94
95 // Shift and cast down to the base type.
96 // TODO: for complete types, this should be possible with a GEP.
97 llvm::Value *V = This;
98 if (Offset.isPositive()) {
99 V = Builder.CreateBitCast(V, Int8PtrTy);
100 V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity());
101 }
102 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo());
103
104 return V;
105 }
106
107 static llvm::Value *
ApplyNonVirtualAndVirtualOffset(CodeGenFunction & CGF,llvm::Value * ptr,CharUnits nonVirtualOffset,llvm::Value * virtualOffset)108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr,
109 CharUnits nonVirtualOffset,
110 llvm::Value *virtualOffset) {
111 // Assert that we have something to do.
112 assert(!nonVirtualOffset.isZero() || virtualOffset != 0);
113
114 // Compute the offset from the static and dynamic components.
115 llvm::Value *baseOffset;
116 if (!nonVirtualOffset.isZero()) {
117 baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy,
118 nonVirtualOffset.getQuantity());
119 if (virtualOffset) {
120 baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);
121 }
122 } else {
123 baseOffset = virtualOffset;
124 }
125
126 // Apply the base offset.
127 ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy);
128 ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr");
129 return ptr;
130 }
131
132 llvm::Value *
GetAddressOfBaseClass(llvm::Value * Value,const CXXRecordDecl * Derived,CastExpr::path_const_iterator PathBegin,CastExpr::path_const_iterator PathEnd,bool NullCheckValue)133 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value,
134 const CXXRecordDecl *Derived,
135 CastExpr::path_const_iterator PathBegin,
136 CastExpr::path_const_iterator PathEnd,
137 bool NullCheckValue) {
138 assert(PathBegin != PathEnd && "Base path should not be empty!");
139
140 CastExpr::path_const_iterator Start = PathBegin;
141 const CXXRecordDecl *VBase = 0;
142
143 // Sema has done some convenient canonicalization here: if the
144 // access path involved any virtual steps, the conversion path will
145 // *start* with a step down to the correct virtual base subobject,
146 // and hence will not require any further steps.
147 if ((*Start)->isVirtual()) {
148 VBase =
149 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
150 ++Start;
151 }
152
153 // Compute the static offset of the ultimate destination within its
154 // allocating subobject (the virtual base, if there is one, or else
155 // the "complete" object that we see).
156 CharUnits NonVirtualOffset =
157 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived,
158 Start, PathEnd);
159
160 // If there's a virtual step, we can sometimes "devirtualize" it.
161 // For now, that's limited to when the derived type is final.
162 // TODO: "devirtualize" this for accesses to known-complete objects.
163 if (VBase && Derived->hasAttr<FinalAttr>()) {
164 const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);
165 CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);
166 NonVirtualOffset += vBaseOffset;
167 VBase = 0; // we no longer have a virtual step
168 }
169
170 // Get the base pointer type.
171 llvm::Type *BasePtrTy =
172 ConvertType((PathEnd[-1])->getType())->getPointerTo();
173
174 // If the static offset is zero and we don't have a virtual step,
175 // just do a bitcast; null checks are unnecessary.
176 if (NonVirtualOffset.isZero() && !VBase) {
177 return Builder.CreateBitCast(Value, BasePtrTy);
178 }
179
180 llvm::BasicBlock *origBB = 0;
181 llvm::BasicBlock *endBB = 0;
182
183 // Skip over the offset (and the vtable load) if we're supposed to
184 // null-check the pointer.
185 if (NullCheckValue) {
186 origBB = Builder.GetInsertBlock();
187 llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");
188 endBB = createBasicBlock("cast.end");
189
190 llvm::Value *isNull = Builder.CreateIsNull(Value);
191 Builder.CreateCondBr(isNull, endBB, notNullBB);
192 EmitBlock(notNullBB);
193 }
194
195 // Compute the virtual offset.
196 llvm::Value *VirtualOffset = 0;
197 if (VBase) {
198 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase);
199 }
200
201 // Apply both offsets.
202 Value = ApplyNonVirtualAndVirtualOffset(*this, Value,
203 NonVirtualOffset,
204 VirtualOffset);
205
206 // Cast to the destination type.
207 Value = Builder.CreateBitCast(Value, BasePtrTy);
208
209 // Build a phi if we needed a null check.
210 if (NullCheckValue) {
211 llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
212 Builder.CreateBr(endBB);
213 EmitBlock(endBB);
214
215 llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result");
216 PHI->addIncoming(Value, notNullBB);
217 PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB);
218 Value = PHI;
219 }
220
221 return Value;
222 }
223
224 llvm::Value *
GetAddressOfDerivedClass(llvm::Value * Value,const CXXRecordDecl * Derived,CastExpr::path_const_iterator PathBegin,CastExpr::path_const_iterator PathEnd,bool NullCheckValue)225 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value,
226 const CXXRecordDecl *Derived,
227 CastExpr::path_const_iterator PathBegin,
228 CastExpr::path_const_iterator PathEnd,
229 bool NullCheckValue) {
230 assert(PathBegin != PathEnd && "Base path should not be empty!");
231
232 QualType DerivedTy =
233 getContext().getCanonicalType(getContext().getTagDeclType(Derived));
234 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
235
236 llvm::Value *NonVirtualOffset =
237 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
238
239 if (!NonVirtualOffset) {
240 // No offset, we can just cast back.
241 return Builder.CreateBitCast(Value, DerivedPtrTy);
242 }
243
244 llvm::BasicBlock *CastNull = 0;
245 llvm::BasicBlock *CastNotNull = 0;
246 llvm::BasicBlock *CastEnd = 0;
247
248 if (NullCheckValue) {
249 CastNull = createBasicBlock("cast.null");
250 CastNotNull = createBasicBlock("cast.notnull");
251 CastEnd = createBasicBlock("cast.end");
252
253 llvm::Value *IsNull = Builder.CreateIsNull(Value);
254 Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
255 EmitBlock(CastNotNull);
256 }
257
258 // Apply the offset.
259 Value = Builder.CreateBitCast(Value, Int8PtrTy);
260 Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset),
261 "sub.ptr");
262
263 // Just cast.
264 Value = Builder.CreateBitCast(Value, DerivedPtrTy);
265
266 if (NullCheckValue) {
267 Builder.CreateBr(CastEnd);
268 EmitBlock(CastNull);
269 Builder.CreateBr(CastEnd);
270 EmitBlock(CastEnd);
271
272 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
273 PHI->addIncoming(Value, CastNotNull);
274 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
275 CastNull);
276 Value = PHI;
277 }
278
279 return Value;
280 }
281
282 /// GetVTTParameter - Return the VTT parameter that should be passed to a
283 /// base constructor/destructor with virtual bases.
GetVTTParameter(CodeGenFunction & CGF,GlobalDecl GD,bool ForVirtualBase)284 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
285 bool ForVirtualBase) {
286 if (!CodeGenVTables::needsVTTParameter(GD)) {
287 // This constructor/destructor does not need a VTT parameter.
288 return 0;
289 }
290
291 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent();
292 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
293
294 llvm::Value *VTT;
295
296 uint64_t SubVTTIndex;
297
298 // If the record matches the base, this is the complete ctor/dtor
299 // variant calling the base variant in a class with virtual bases.
300 if (RD == Base) {
301 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) &&
302 "doing no-op VTT offset in base dtor/ctor?");
303 assert(!ForVirtualBase && "Can't have same class as virtual base!");
304 SubVTTIndex = 0;
305 } else {
306 const ASTRecordLayout &Layout =
307 CGF.getContext().getASTRecordLayout(RD);
308 CharUnits BaseOffset = ForVirtualBase ?
309 Layout.getVBaseClassOffset(Base) :
310 Layout.getBaseClassOffset(Base);
311
312 SubVTTIndex =
313 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
314 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
315 }
316
317 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) {
318 // A VTT parameter was passed to the constructor, use it.
319 VTT = CGF.LoadCXXVTT();
320 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
321 } else {
322 // We're the complete constructor, so get the VTT by name.
323 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD);
324 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
325 }
326
327 return VTT;
328 }
329
330 namespace {
331 /// Call the destructor for a direct base class.
332 struct CallBaseDtor : EHScopeStack::Cleanup {
333 const CXXRecordDecl *BaseClass;
334 bool BaseIsVirtual;
CallBaseDtor__anone0b144a60111::CallBaseDtor335 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
336 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
337
Emit__anone0b144a60111::CallBaseDtor338 void Emit(CodeGenFunction &CGF, Flags flags) {
339 const CXXRecordDecl *DerivedClass =
340 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
341
342 const CXXDestructorDecl *D = BaseClass->getDestructor();
343 llvm::Value *Addr =
344 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(),
345 DerivedClass, BaseClass,
346 BaseIsVirtual);
347 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr);
348 }
349 };
350
351 /// A visitor which checks whether an initializer uses 'this' in a
352 /// way which requires the vtable to be properly set.
353 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> {
354 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super;
355
356 bool UsesThis;
357
DynamicThisUseChecker__anone0b144a60111::DynamicThisUseChecker358 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {}
359
360 // Black-list all explicit and implicit references to 'this'.
361 //
362 // Do we need to worry about external references to 'this' derived
363 // from arbitrary code? If so, then anything which runs arbitrary
364 // external code might potentially access the vtable.
VisitCXXThisExpr__anone0b144a60111::DynamicThisUseChecker365 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; }
366 };
367 }
368
BaseInitializerUsesThis(ASTContext & C,const Expr * Init)369 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
370 DynamicThisUseChecker Checker(C);
371 Checker.Visit(const_cast<Expr*>(Init));
372 return Checker.UsesThis;
373 }
374
EmitBaseInitializer(CodeGenFunction & CGF,const CXXRecordDecl * ClassDecl,CXXCtorInitializer * BaseInit,CXXCtorType CtorType)375 static void EmitBaseInitializer(CodeGenFunction &CGF,
376 const CXXRecordDecl *ClassDecl,
377 CXXCtorInitializer *BaseInit,
378 CXXCtorType CtorType) {
379 assert(BaseInit->isBaseInitializer() &&
380 "Must have base initializer!");
381
382 llvm::Value *ThisPtr = CGF.LoadCXXThis();
383
384 const Type *BaseType = BaseInit->getBaseClass();
385 CXXRecordDecl *BaseClassDecl =
386 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
387
388 bool isBaseVirtual = BaseInit->isBaseVirtual();
389
390 // The base constructor doesn't construct virtual bases.
391 if (CtorType == Ctor_Base && isBaseVirtual)
392 return;
393
394 // If the initializer for the base (other than the constructor
395 // itself) accesses 'this' in any way, we need to initialize the
396 // vtables.
397 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
398 CGF.InitializeVTablePointers(ClassDecl);
399
400 // We can pretend to be a complete class because it only matters for
401 // virtual bases, and we only do virtual bases for complete ctors.
402 llvm::Value *V =
403 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
404 BaseClassDecl,
405 isBaseVirtual);
406 CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType);
407 AggValueSlot AggSlot =
408 AggValueSlot::forAddr(V, Alignment, Qualifiers(),
409 AggValueSlot::IsDestructed,
410 AggValueSlot::DoesNotNeedGCBarriers,
411 AggValueSlot::IsNotAliased);
412
413 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
414
415 if (CGF.CGM.getLangOpts().Exceptions &&
416 !BaseClassDecl->hasTrivialDestructor())
417 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
418 isBaseVirtual);
419 }
420
EmitAggMemberInitializer(CodeGenFunction & CGF,LValue LHS,Expr * Init,llvm::Value * ArrayIndexVar,QualType T,ArrayRef<VarDecl * > ArrayIndexes,unsigned Index)421 static void EmitAggMemberInitializer(CodeGenFunction &CGF,
422 LValue LHS,
423 Expr *Init,
424 llvm::Value *ArrayIndexVar,
425 QualType T,
426 ArrayRef<VarDecl *> ArrayIndexes,
427 unsigned Index) {
428 if (Index == ArrayIndexes.size()) {
429 LValue LV = LHS;
430 { // Scope for Cleanups.
431 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
432
433 if (ArrayIndexVar) {
434 // If we have an array index variable, load it and use it as an offset.
435 // Then, increment the value.
436 llvm::Value *Dest = LHS.getAddress();
437 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
438 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
439 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
440 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
441 CGF.Builder.CreateStore(Next, ArrayIndexVar);
442
443 // Update the LValue.
444 LV.setAddress(Dest);
445 CharUnits Align = CGF.getContext().getTypeAlignInChars(T);
446 LV.setAlignment(std::min(Align, LV.getAlignment()));
447 }
448
449 if (!CGF.hasAggregateLLVMType(T)) {
450 CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false);
451 } else if (T->isAnyComplexType()) {
452 CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(),
453 LV.isVolatileQualified());
454 } else {
455 AggValueSlot Slot =
456 AggValueSlot::forLValue(LV,
457 AggValueSlot::IsDestructed,
458 AggValueSlot::DoesNotNeedGCBarriers,
459 AggValueSlot::IsNotAliased);
460
461 CGF.EmitAggExpr(Init, Slot);
462 }
463 }
464
465 // Now, outside of the initializer cleanup scope, destroy the backing array
466 // for a std::initializer_list member.
467 CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init);
468
469 return;
470 }
471
472 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
473 assert(Array && "Array initialization without the array type?");
474 llvm::Value *IndexVar
475 = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]);
476 assert(IndexVar && "Array index variable not loaded");
477
478 // Initialize this index variable to zero.
479 llvm::Value* Zero
480 = llvm::Constant::getNullValue(
481 CGF.ConvertType(CGF.getContext().getSizeType()));
482 CGF.Builder.CreateStore(Zero, IndexVar);
483
484 // Start the loop with a block that tests the condition.
485 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
486 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
487
488 CGF.EmitBlock(CondBlock);
489
490 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
491 // Generate: if (loop-index < number-of-elements) fall to the loop body,
492 // otherwise, go to the block after the for-loop.
493 uint64_t NumElements = Array->getSize().getZExtValue();
494 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
495 llvm::Value *NumElementsPtr =
496 llvm::ConstantInt::get(Counter->getType(), NumElements);
497 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
498 "isless");
499
500 // If the condition is true, execute the body.
501 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
502
503 CGF.EmitBlock(ForBody);
504 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
505
506 {
507 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
508
509 // Inside the loop body recurse to emit the inner loop or, eventually, the
510 // constructor call.
511 EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar,
512 Array->getElementType(), ArrayIndexes, Index + 1);
513 }
514
515 CGF.EmitBlock(ContinueBlock);
516
517 // Emit the increment of the loop counter.
518 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
519 Counter = CGF.Builder.CreateLoad(IndexVar);
520 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
521 CGF.Builder.CreateStore(NextVal, IndexVar);
522
523 // Finally, branch back up to the condition for the next iteration.
524 CGF.EmitBranch(CondBlock);
525
526 // Emit the fall-through block.
527 CGF.EmitBlock(AfterFor, true);
528 }
529
530 namespace {
531 struct CallMemberDtor : EHScopeStack::Cleanup {
532 llvm::Value *V;
533 CXXDestructorDecl *Dtor;
534
CallMemberDtor__anone0b144a60211::CallMemberDtor535 CallMemberDtor(llvm::Value *V, CXXDestructorDecl *Dtor)
536 : V(V), Dtor(Dtor) {}
537
Emit__anone0b144a60211::CallMemberDtor538 void Emit(CodeGenFunction &CGF, Flags flags) {
539 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
540 V);
541 }
542 };
543 }
544
hasTrivialCopyOrMoveConstructor(const CXXRecordDecl * Record,bool Moving)545 static bool hasTrivialCopyOrMoveConstructor(const CXXRecordDecl *Record,
546 bool Moving) {
547 return Moving ? Record->hasTrivialMoveConstructor() :
548 Record->hasTrivialCopyConstructor();
549 }
550
EmitMemberInitializer(CodeGenFunction & CGF,const CXXRecordDecl * ClassDecl,CXXCtorInitializer * MemberInit,const CXXConstructorDecl * Constructor,FunctionArgList & Args)551 static void EmitMemberInitializer(CodeGenFunction &CGF,
552 const CXXRecordDecl *ClassDecl,
553 CXXCtorInitializer *MemberInit,
554 const CXXConstructorDecl *Constructor,
555 FunctionArgList &Args) {
556 assert(MemberInit->isAnyMemberInitializer() &&
557 "Must have member initializer!");
558 assert(MemberInit->getInit() && "Must have initializer!");
559
560 // non-static data member initializers.
561 FieldDecl *Field = MemberInit->getAnyMember();
562 QualType FieldType = Field->getType();
563
564 llvm::Value *ThisPtr = CGF.LoadCXXThis();
565 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
566 LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);
567
568 if (MemberInit->isIndirectMemberInitializer()) {
569 // If we are initializing an anonymous union field, drill down to
570 // the field.
571 IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();
572 IndirectFieldDecl::chain_iterator I = IndirectField->chain_begin(),
573 IEnd = IndirectField->chain_end();
574 for ( ; I != IEnd; ++I)
575 LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(*I));
576 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType();
577 } else {
578 LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);
579 }
580
581 // Special case: if we are in a copy or move constructor, and we are copying
582 // an array of PODs or classes with trivial copy constructors, ignore the
583 // AST and perform the copy we know is equivalent.
584 // FIXME: This is hacky at best... if we had a bit more explicit information
585 // in the AST, we could generalize it more easily.
586 const ConstantArrayType *Array
587 = CGF.getContext().getAsConstantArrayType(FieldType);
588 if (Array && Constructor->isImplicitlyDefined() &&
589 Constructor->isCopyOrMoveConstructor()) {
590 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
591 const CXXRecordDecl *Record = BaseElementTy->getAsCXXRecordDecl();
592 if (BaseElementTy.isPODType(CGF.getContext()) ||
593 (Record && hasTrivialCopyOrMoveConstructor(Record,
594 Constructor->isMoveConstructor()))) {
595 // Find the source pointer. We knows it's the last argument because
596 // we know we're in a copy constructor.
597 unsigned SrcArgIndex = Args.size() - 1;
598 llvm::Value *SrcPtr
599 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
600 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
601 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);
602
603 // Copy the aggregate.
604 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
605 LHS.isVolatileQualified());
606 return;
607 }
608 }
609
610 ArrayRef<VarDecl *> ArrayIndexes;
611 if (MemberInit->getNumArrayIndices())
612 ArrayIndexes = MemberInit->getArrayIndexes();
613 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes);
614 }
615
EmitInitializerForField(FieldDecl * Field,LValue LHS,Expr * Init,ArrayRef<VarDecl * > ArrayIndexes)616 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field,
617 LValue LHS, Expr *Init,
618 ArrayRef<VarDecl *> ArrayIndexes) {
619 QualType FieldType = Field->getType();
620 if (!hasAggregateLLVMType(FieldType)) {
621 if (LHS.isSimple()) {
622 EmitExprAsInit(Init, Field, LHS, false);
623 } else {
624 RValue RHS = RValue::get(EmitScalarExpr(Init));
625 EmitStoreThroughLValue(RHS, LHS);
626 }
627 } else if (FieldType->isAnyComplexType()) {
628 EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified());
629 } else {
630 llvm::Value *ArrayIndexVar = 0;
631 if (ArrayIndexes.size()) {
632 llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
633
634 // The LHS is a pointer to the first object we'll be constructing, as
635 // a flat array.
636 QualType BaseElementTy = getContext().getBaseElementType(FieldType);
637 llvm::Type *BasePtr = ConvertType(BaseElementTy);
638 BasePtr = llvm::PointerType::getUnqual(BasePtr);
639 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(),
640 BasePtr);
641 LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy);
642
643 // Create an array index that will be used to walk over all of the
644 // objects we're constructing.
645 ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index");
646 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
647 Builder.CreateStore(Zero, ArrayIndexVar);
648
649
650 // Emit the block variables for the array indices, if any.
651 for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I)
652 EmitAutoVarDecl(*ArrayIndexes[I]);
653 }
654
655 EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType,
656 ArrayIndexes, 0);
657
658 if (!CGM.getLangOpts().Exceptions)
659 return;
660
661 // FIXME: If we have an array of classes w/ non-trivial destructors,
662 // we need to destroy in reverse order of construction along the exception
663 // path.
664 const RecordType *RT = FieldType->getAs<RecordType>();
665 if (!RT)
666 return;
667
668 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
669 if (!RD->hasTrivialDestructor())
670 EHStack.pushCleanup<CallMemberDtor>(EHCleanup, LHS.getAddress(),
671 RD->getDestructor());
672 }
673 }
674
675 /// Checks whether the given constructor is a valid subject for the
676 /// complete-to-base constructor delegation optimization, i.e.
677 /// emitting the complete constructor as a simple call to the base
678 /// constructor.
IsConstructorDelegationValid(const CXXConstructorDecl * Ctor)679 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
680
681 // Currently we disable the optimization for classes with virtual
682 // bases because (1) the addresses of parameter variables need to be
683 // consistent across all initializers but (2) the delegate function
684 // call necessarily creates a second copy of the parameter variable.
685 //
686 // The limiting example (purely theoretical AFAIK):
687 // struct A { A(int &c) { c++; } };
688 // struct B : virtual A {
689 // B(int count) : A(count) { printf("%d\n", count); }
690 // };
691 // ...although even this example could in principle be emitted as a
692 // delegation since the address of the parameter doesn't escape.
693 if (Ctor->getParent()->getNumVBases()) {
694 // TODO: white-list trivial vbase initializers. This case wouldn't
695 // be subject to the restrictions below.
696
697 // TODO: white-list cases where:
698 // - there are no non-reference parameters to the constructor
699 // - the initializers don't access any non-reference parameters
700 // - the initializers don't take the address of non-reference
701 // parameters
702 // - etc.
703 // If we ever add any of the above cases, remember that:
704 // - function-try-blocks will always blacklist this optimization
705 // - we need to perform the constructor prologue and cleanup in
706 // EmitConstructorBody.
707
708 return false;
709 }
710
711 // We also disable the optimization for variadic functions because
712 // it's impossible to "re-pass" varargs.
713 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
714 return false;
715
716 // FIXME: Decide if we can do a delegation of a delegating constructor.
717 if (Ctor->isDelegatingConstructor())
718 return false;
719
720 return true;
721 }
722
723 /// EmitConstructorBody - Emits the body of the current constructor.
EmitConstructorBody(FunctionArgList & Args)724 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
725 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
726 CXXCtorType CtorType = CurGD.getCtorType();
727
728 // Before we go any further, try the complete->base constructor
729 // delegation optimization.
730 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) &&
731 CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) {
732 if (CGDebugInfo *DI = getDebugInfo())
733 DI->EmitLocation(Builder, Ctor->getLocEnd());
734 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args);
735 return;
736 }
737
738 Stmt *Body = Ctor->getBody();
739
740 // Enter the function-try-block before the constructor prologue if
741 // applicable.
742 bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
743 if (IsTryBody)
744 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
745
746 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
747
748 // TODO: in restricted cases, we can emit the vbase initializers of
749 // a complete ctor and then delegate to the base ctor.
750
751 // Emit the constructor prologue, i.e. the base and member
752 // initializers.
753 EmitCtorPrologue(Ctor, CtorType, Args);
754
755 // Emit the body of the statement.
756 if (IsTryBody)
757 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
758 else if (Body)
759 EmitStmt(Body);
760
761 // Emit any cleanup blocks associated with the member or base
762 // initializers, which includes (along the exceptional path) the
763 // destructors for those members and bases that were fully
764 // constructed.
765 PopCleanupBlocks(CleanupDepth);
766
767 if (IsTryBody)
768 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
769 }
770
771 /// EmitCtorPrologue - This routine generates necessary code to initialize
772 /// base classes and non-static data members belonging to this constructor.
EmitCtorPrologue(const CXXConstructorDecl * CD,CXXCtorType CtorType,FunctionArgList & Args)773 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
774 CXXCtorType CtorType,
775 FunctionArgList &Args) {
776 if (CD->isDelegatingConstructor())
777 return EmitDelegatingCXXConstructorCall(CD, Args);
778
779 const CXXRecordDecl *ClassDecl = CD->getParent();
780
781 SmallVector<CXXCtorInitializer *, 8> MemberInitializers;
782
783 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
784 E = CD->init_end();
785 B != E; ++B) {
786 CXXCtorInitializer *Member = (*B);
787
788 if (Member->isBaseInitializer()) {
789 EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
790 } else {
791 assert(Member->isAnyMemberInitializer() &&
792 "Delegating initializer on non-delegating constructor");
793 MemberInitializers.push_back(Member);
794 }
795 }
796
797 InitializeVTablePointers(ClassDecl);
798
799 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
800 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
801 }
802
803 static bool
804 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
805
806 static bool
HasTrivialDestructorBody(ASTContext & Context,const CXXRecordDecl * BaseClassDecl,const CXXRecordDecl * MostDerivedClassDecl)807 HasTrivialDestructorBody(ASTContext &Context,
808 const CXXRecordDecl *BaseClassDecl,
809 const CXXRecordDecl *MostDerivedClassDecl)
810 {
811 // If the destructor is trivial we don't have to check anything else.
812 if (BaseClassDecl->hasTrivialDestructor())
813 return true;
814
815 if (!BaseClassDecl->getDestructor()->hasTrivialBody())
816 return false;
817
818 // Check fields.
819 for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(),
820 E = BaseClassDecl->field_end(); I != E; ++I) {
821 const FieldDecl *Field = *I;
822
823 if (!FieldHasTrivialDestructorBody(Context, Field))
824 return false;
825 }
826
827 // Check non-virtual bases.
828 for (CXXRecordDecl::base_class_const_iterator I =
829 BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end();
830 I != E; ++I) {
831 if (I->isVirtual())
832 continue;
833
834 const CXXRecordDecl *NonVirtualBase =
835 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
836 if (!HasTrivialDestructorBody(Context, NonVirtualBase,
837 MostDerivedClassDecl))
838 return false;
839 }
840
841 if (BaseClassDecl == MostDerivedClassDecl) {
842 // Check virtual bases.
843 for (CXXRecordDecl::base_class_const_iterator I =
844 BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end();
845 I != E; ++I) {
846 const CXXRecordDecl *VirtualBase =
847 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
848 if (!HasTrivialDestructorBody(Context, VirtualBase,
849 MostDerivedClassDecl))
850 return false;
851 }
852 }
853
854 return true;
855 }
856
857 static bool
FieldHasTrivialDestructorBody(ASTContext & Context,const FieldDecl * Field)858 FieldHasTrivialDestructorBody(ASTContext &Context,
859 const FieldDecl *Field)
860 {
861 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
862
863 const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
864 if (!RT)
865 return true;
866
867 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
868 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
869 }
870
871 /// CanSkipVTablePointerInitialization - Check whether we need to initialize
872 /// any vtable pointers before calling this destructor.
CanSkipVTablePointerInitialization(ASTContext & Context,const CXXDestructorDecl * Dtor)873 static bool CanSkipVTablePointerInitialization(ASTContext &Context,
874 const CXXDestructorDecl *Dtor) {
875 if (!Dtor->hasTrivialBody())
876 return false;
877
878 // Check the fields.
879 const CXXRecordDecl *ClassDecl = Dtor->getParent();
880 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
881 E = ClassDecl->field_end(); I != E; ++I) {
882 const FieldDecl *Field = *I;
883
884 if (!FieldHasTrivialDestructorBody(Context, Field))
885 return false;
886 }
887
888 return true;
889 }
890
891 /// EmitDestructorBody - Emits the body of the current destructor.
EmitDestructorBody(FunctionArgList & Args)892 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
893 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
894 CXXDtorType DtorType = CurGD.getDtorType();
895
896 // The call to operator delete in a deleting destructor happens
897 // outside of the function-try-block, which means it's always
898 // possible to delegate the destructor body to the complete
899 // destructor. Do so.
900 if (DtorType == Dtor_Deleting) {
901 EnterDtorCleanups(Dtor, Dtor_Deleting);
902 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
903 LoadCXXThis());
904 PopCleanupBlock();
905 return;
906 }
907
908 Stmt *Body = Dtor->getBody();
909
910 // If the body is a function-try-block, enter the try before
911 // anything else.
912 bool isTryBody = (Body && isa<CXXTryStmt>(Body));
913 if (isTryBody)
914 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
915
916 // Enter the epilogue cleanups.
917 RunCleanupsScope DtorEpilogue(*this);
918
919 // If this is the complete variant, just invoke the base variant;
920 // the epilogue will destruct the virtual bases. But we can't do
921 // this optimization if the body is a function-try-block, because
922 // we'd introduce *two* handler blocks.
923 switch (DtorType) {
924 case Dtor_Deleting: llvm_unreachable("already handled deleting case");
925
926 case Dtor_Complete:
927 // Enter the cleanup scopes for virtual bases.
928 EnterDtorCleanups(Dtor, Dtor_Complete);
929
930 if (!isTryBody && CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) {
931 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
932 LoadCXXThis());
933 break;
934 }
935 // Fallthrough: act like we're in the base variant.
936
937 case Dtor_Base:
938 // Enter the cleanup scopes for fields and non-virtual bases.
939 EnterDtorCleanups(Dtor, Dtor_Base);
940
941 // Initialize the vtable pointers before entering the body.
942 if (!CanSkipVTablePointerInitialization(getContext(), Dtor))
943 InitializeVTablePointers(Dtor->getParent());
944
945 if (isTryBody)
946 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
947 else if (Body)
948 EmitStmt(Body);
949 else {
950 assert(Dtor->isImplicit() && "bodyless dtor not implicit");
951 // nothing to do besides what's in the epilogue
952 }
953 // -fapple-kext must inline any call to this dtor into
954 // the caller's body.
955 if (getContext().getLangOpts().AppleKext)
956 CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
957 break;
958 }
959
960 // Jump out through the epilogue cleanups.
961 DtorEpilogue.ForceCleanup();
962
963 // Exit the try if applicable.
964 if (isTryBody)
965 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
966 }
967
968 namespace {
969 /// Call the operator delete associated with the current destructor.
970 struct CallDtorDelete : EHScopeStack::Cleanup {
CallDtorDelete__anone0b144a60311::CallDtorDelete971 CallDtorDelete() {}
972
Emit__anone0b144a60311::CallDtorDelete973 void Emit(CodeGenFunction &CGF, Flags flags) {
974 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
975 const CXXRecordDecl *ClassDecl = Dtor->getParent();
976 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
977 CGF.getContext().getTagDeclType(ClassDecl));
978 }
979 };
980
981 class DestroyField : public EHScopeStack::Cleanup {
982 const FieldDecl *field;
983 CodeGenFunction::Destroyer *destroyer;
984 bool useEHCleanupForArray;
985
986 public:
DestroyField(const FieldDecl * field,CodeGenFunction::Destroyer * destroyer,bool useEHCleanupForArray)987 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
988 bool useEHCleanupForArray)
989 : field(field), destroyer(destroyer),
990 useEHCleanupForArray(useEHCleanupForArray) {}
991
Emit(CodeGenFunction & CGF,Flags flags)992 void Emit(CodeGenFunction &CGF, Flags flags) {
993 // Find the address of the field.
994 llvm::Value *thisValue = CGF.LoadCXXThis();
995 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());
996 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);
997 LValue LV = CGF.EmitLValueForField(ThisLV, field);
998 assert(LV.isSimple());
999
1000 CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,
1001 flags.isForNormalCleanup() && useEHCleanupForArray);
1002 }
1003 };
1004 }
1005
1006 /// EmitDtorEpilogue - Emit all code that comes at the end of class's
1007 /// destructor. This is to call destructors on members and base classes
1008 /// in reverse order of their construction.
EnterDtorCleanups(const CXXDestructorDecl * DD,CXXDtorType DtorType)1009 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
1010 CXXDtorType DtorType) {
1011 assert(!DD->isTrivial() &&
1012 "Should not emit dtor epilogue for trivial dtor!");
1013
1014 // The deleting-destructor phase just needs to call the appropriate
1015 // operator delete that Sema picked up.
1016 if (DtorType == Dtor_Deleting) {
1017 assert(DD->getOperatorDelete() &&
1018 "operator delete missing - EmitDtorEpilogue");
1019 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
1020 return;
1021 }
1022
1023 const CXXRecordDecl *ClassDecl = DD->getParent();
1024
1025 // Unions have no bases and do not call field destructors.
1026 if (ClassDecl->isUnion())
1027 return;
1028
1029 // The complete-destructor phase just destructs all the virtual bases.
1030 if (DtorType == Dtor_Complete) {
1031
1032 // We push them in the forward order so that they'll be popped in
1033 // the reverse order.
1034 for (CXXRecordDecl::base_class_const_iterator I =
1035 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end();
1036 I != E; ++I) {
1037 const CXXBaseSpecifier &Base = *I;
1038 CXXRecordDecl *BaseClassDecl
1039 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
1040
1041 // Ignore trivial destructors.
1042 if (BaseClassDecl->hasTrivialDestructor())
1043 continue;
1044
1045 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1046 BaseClassDecl,
1047 /*BaseIsVirtual*/ true);
1048 }
1049
1050 return;
1051 }
1052
1053 assert(DtorType == Dtor_Base);
1054
1055 // Destroy non-virtual bases.
1056 for (CXXRecordDecl::base_class_const_iterator I =
1057 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) {
1058 const CXXBaseSpecifier &Base = *I;
1059
1060 // Ignore virtual bases.
1061 if (Base.isVirtual())
1062 continue;
1063
1064 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
1065
1066 // Ignore trivial destructors.
1067 if (BaseClassDecl->hasTrivialDestructor())
1068 continue;
1069
1070 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1071 BaseClassDecl,
1072 /*BaseIsVirtual*/ false);
1073 }
1074
1075 // Destroy direct fields.
1076 SmallVector<const FieldDecl *, 16> FieldDecls;
1077 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
1078 E = ClassDecl->field_end(); I != E; ++I) {
1079 const FieldDecl *field = *I;
1080 QualType type = field->getType();
1081 QualType::DestructionKind dtorKind = type.isDestructedType();
1082 if (!dtorKind) continue;
1083
1084 // Anonymous union members do not have their destructors called.
1085 const RecordType *RT = type->getAsUnionType();
1086 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue;
1087
1088 CleanupKind cleanupKind = getCleanupKind(dtorKind);
1089 EHStack.pushCleanup<DestroyField>(cleanupKind, field,
1090 getDestroyer(dtorKind),
1091 cleanupKind & EHCleanup);
1092 }
1093 }
1094
1095 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1096 /// constructor for each of several members of an array.
1097 ///
1098 /// \param ctor the constructor to call for each element
1099 /// \param argBegin,argEnd the arguments to evaluate and pass to the
1100 /// constructor
1101 /// \param arrayType the type of the array to initialize
1102 /// \param arrayBegin an arrayType*
1103 /// \param zeroInitialize true if each element should be
1104 /// zero-initialized before it is constructed
1105 void
EmitCXXAggrConstructorCall(const CXXConstructorDecl * ctor,const ConstantArrayType * arrayType,llvm::Value * arrayBegin,CallExpr::const_arg_iterator argBegin,CallExpr::const_arg_iterator argEnd,bool zeroInitialize)1106 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
1107 const ConstantArrayType *arrayType,
1108 llvm::Value *arrayBegin,
1109 CallExpr::const_arg_iterator argBegin,
1110 CallExpr::const_arg_iterator argEnd,
1111 bool zeroInitialize) {
1112 QualType elementType;
1113 llvm::Value *numElements =
1114 emitArrayLength(arrayType, elementType, arrayBegin);
1115
1116 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin,
1117 argBegin, argEnd, zeroInitialize);
1118 }
1119
1120 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1121 /// constructor for each of several members of an array.
1122 ///
1123 /// \param ctor the constructor to call for each element
1124 /// \param numElements the number of elements in the array;
1125 /// may be zero
1126 /// \param argBegin,argEnd the arguments to evaluate and pass to the
1127 /// constructor
1128 /// \param arrayBegin a T*, where T is the type constructed by ctor
1129 /// \param zeroInitialize true if each element should be
1130 /// zero-initialized before it is constructed
1131 void
EmitCXXAggrConstructorCall(const CXXConstructorDecl * ctor,llvm::Value * numElements,llvm::Value * arrayBegin,CallExpr::const_arg_iterator argBegin,CallExpr::const_arg_iterator argEnd,bool zeroInitialize)1132 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
1133 llvm::Value *numElements,
1134 llvm::Value *arrayBegin,
1135 CallExpr::const_arg_iterator argBegin,
1136 CallExpr::const_arg_iterator argEnd,
1137 bool zeroInitialize) {
1138
1139 // It's legal for numElements to be zero. This can happen both
1140 // dynamically, because x can be zero in 'new A[x]', and statically,
1141 // because of GCC extensions that permit zero-length arrays. There
1142 // are probably legitimate places where we could assume that this
1143 // doesn't happen, but it's not clear that it's worth it.
1144 llvm::BranchInst *zeroCheckBranch = 0;
1145
1146 // Optimize for a constant count.
1147 llvm::ConstantInt *constantCount
1148 = dyn_cast<llvm::ConstantInt>(numElements);
1149 if (constantCount) {
1150 // Just skip out if the constant count is zero.
1151 if (constantCount->isZero()) return;
1152
1153 // Otherwise, emit the check.
1154 } else {
1155 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
1156 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
1157 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
1158 EmitBlock(loopBB);
1159 }
1160
1161 // Find the end of the array.
1162 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements,
1163 "arrayctor.end");
1164
1165 // Enter the loop, setting up a phi for the current location to initialize.
1166 llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
1167 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
1168 EmitBlock(loopBB);
1169 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
1170 "arrayctor.cur");
1171 cur->addIncoming(arrayBegin, entryBB);
1172
1173 // Inside the loop body, emit the constructor call on the array element.
1174
1175 QualType type = getContext().getTypeDeclType(ctor->getParent());
1176
1177 // Zero initialize the storage, if requested.
1178 if (zeroInitialize)
1179 EmitNullInitialization(cur, type);
1180
1181 // C++ [class.temporary]p4:
1182 // There are two contexts in which temporaries are destroyed at a different
1183 // point than the end of the full-expression. The first context is when a
1184 // default constructor is called to initialize an element of an array.
1185 // If the constructor has one or more default arguments, the destruction of
1186 // every temporary created in a default argument expression is sequenced
1187 // before the construction of the next array element, if any.
1188
1189 {
1190 RunCleanupsScope Scope(*this);
1191
1192 // Evaluate the constructor and its arguments in a regular
1193 // partial-destroy cleanup.
1194 if (getLangOpts().Exceptions &&
1195 !ctor->getParent()->hasTrivialDestructor()) {
1196 Destroyer *destroyer = destroyCXXObject;
1197 pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer);
1198 }
1199
1200 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false,
1201 cur, argBegin, argEnd);
1202 }
1203
1204 // Go to the next element.
1205 llvm::Value *next =
1206 Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1),
1207 "arrayctor.next");
1208 cur->addIncoming(next, Builder.GetInsertBlock());
1209
1210 // Check whether that's the end of the loop.
1211 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
1212 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
1213 Builder.CreateCondBr(done, contBB, loopBB);
1214
1215 // Patch the earlier check to skip over the loop.
1216 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);
1217
1218 EmitBlock(contBB);
1219 }
1220
destroyCXXObject(CodeGenFunction & CGF,llvm::Value * addr,QualType type)1221 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
1222 llvm::Value *addr,
1223 QualType type) {
1224 const RecordType *rtype = type->castAs<RecordType>();
1225 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
1226 const CXXDestructorDecl *dtor = record->getDestructor();
1227 assert(!dtor->isTrivial());
1228 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
1229 addr);
1230 }
1231
1232 void
EmitCXXConstructorCall(const CXXConstructorDecl * D,CXXCtorType Type,bool ForVirtualBase,llvm::Value * This,CallExpr::const_arg_iterator ArgBeg,CallExpr::const_arg_iterator ArgEnd)1233 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
1234 CXXCtorType Type, bool ForVirtualBase,
1235 llvm::Value *This,
1236 CallExpr::const_arg_iterator ArgBeg,
1237 CallExpr::const_arg_iterator ArgEnd) {
1238
1239 CGDebugInfo *DI = getDebugInfo();
1240 if (DI &&
1241 CGM.getCodeGenOpts().DebugInfo == CodeGenOptions::LimitedDebugInfo) {
1242 // If debug info for this class has not been emitted then this is the
1243 // right time to do so.
1244 const CXXRecordDecl *Parent = D->getParent();
1245 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent),
1246 Parent->getLocation());
1247 }
1248
1249 if (D->isTrivial()) {
1250 if (ArgBeg == ArgEnd) {
1251 // Trivial default constructor, no codegen required.
1252 assert(D->isDefaultConstructor() &&
1253 "trivial 0-arg ctor not a default ctor");
1254 return;
1255 }
1256
1257 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1258 assert(D->isCopyOrMoveConstructor() &&
1259 "trivial 1-arg ctor not a copy/move ctor");
1260
1261 const Expr *E = (*ArgBeg);
1262 QualType Ty = E->getType();
1263 llvm::Value *Src = EmitLValue(E).getAddress();
1264 EmitAggregateCopy(This, Src, Ty);
1265 return;
1266 }
1267
1268 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase);
1269 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type);
1270
1271 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd);
1272 }
1273
1274 void
EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl * D,llvm::Value * This,llvm::Value * Src,CallExpr::const_arg_iterator ArgBeg,CallExpr::const_arg_iterator ArgEnd)1275 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
1276 llvm::Value *This, llvm::Value *Src,
1277 CallExpr::const_arg_iterator ArgBeg,
1278 CallExpr::const_arg_iterator ArgEnd) {
1279 if (D->isTrivial()) {
1280 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1281 assert(D->isCopyOrMoveConstructor() &&
1282 "trivial 1-arg ctor not a copy/move ctor");
1283 EmitAggregateCopy(This, Src, (*ArgBeg)->getType());
1284 return;
1285 }
1286 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D,
1287 clang::Ctor_Complete);
1288 assert(D->isInstance() &&
1289 "Trying to emit a member call expr on a static method!");
1290
1291 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>();
1292
1293 CallArgList Args;
1294
1295 // Push the this ptr.
1296 Args.add(RValue::get(This), D->getThisType(getContext()));
1297
1298
1299 // Push the src ptr.
1300 QualType QT = *(FPT->arg_type_begin());
1301 llvm::Type *t = CGM.getTypes().ConvertType(QT);
1302 Src = Builder.CreateBitCast(Src, t);
1303 Args.add(RValue::get(Src), QT);
1304
1305 // Skip over first argument (Src).
1306 ++ArgBeg;
1307 CallExpr::const_arg_iterator Arg = ArgBeg;
1308 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1,
1309 E = FPT->arg_type_end(); I != E; ++I, ++Arg) {
1310 assert(Arg != ArgEnd && "Running over edge of argument list!");
1311 EmitCallArg(Args, *Arg, *I);
1312 }
1313 // Either we've emitted all the call args, or we have a call to a
1314 // variadic function.
1315 assert((Arg == ArgEnd || FPT->isVariadic()) &&
1316 "Extra arguments in non-variadic function!");
1317 // If we still have any arguments, emit them using the type of the argument.
1318 for (; Arg != ArgEnd; ++Arg) {
1319 QualType ArgType = Arg->getType();
1320 EmitCallArg(Args, *Arg, ArgType);
1321 }
1322
1323 EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All),
1324 Callee, ReturnValueSlot(), Args, D);
1325 }
1326
1327 void
EmitDelegateCXXConstructorCall(const CXXConstructorDecl * Ctor,CXXCtorType CtorType,const FunctionArgList & Args)1328 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
1329 CXXCtorType CtorType,
1330 const FunctionArgList &Args) {
1331 CallArgList DelegateArgs;
1332
1333 FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
1334 assert(I != E && "no parameters to constructor");
1335
1336 // this
1337 DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType());
1338 ++I;
1339
1340 // vtt
1341 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType),
1342 /*ForVirtualBase=*/false)) {
1343 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
1344 DelegateArgs.add(RValue::get(VTT), VoidPP);
1345
1346 if (CodeGenVTables::needsVTTParameter(CurGD)) {
1347 assert(I != E && "cannot skip vtt parameter, already done with args");
1348 assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type");
1349 ++I;
1350 }
1351 }
1352
1353 // Explicit arguments.
1354 for (; I != E; ++I) {
1355 const VarDecl *param = *I;
1356 EmitDelegateCallArg(DelegateArgs, param);
1357 }
1358
1359 EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType),
1360 CGM.GetAddrOfCXXConstructor(Ctor, CtorType),
1361 ReturnValueSlot(), DelegateArgs, Ctor);
1362 }
1363
1364 namespace {
1365 struct CallDelegatingCtorDtor : EHScopeStack::Cleanup {
1366 const CXXDestructorDecl *Dtor;
1367 llvm::Value *Addr;
1368 CXXDtorType Type;
1369
CallDelegatingCtorDtor__anone0b144a60411::CallDelegatingCtorDtor1370 CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr,
1371 CXXDtorType Type)
1372 : Dtor(D), Addr(Addr), Type(Type) {}
1373
Emit__anone0b144a60411::CallDelegatingCtorDtor1374 void Emit(CodeGenFunction &CGF, Flags flags) {
1375 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
1376 Addr);
1377 }
1378 };
1379 }
1380
1381 void
EmitDelegatingCXXConstructorCall(const CXXConstructorDecl * Ctor,const FunctionArgList & Args)1382 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
1383 const FunctionArgList &Args) {
1384 assert(Ctor->isDelegatingConstructor());
1385
1386 llvm::Value *ThisPtr = LoadCXXThis();
1387
1388 QualType Ty = getContext().getTagDeclType(Ctor->getParent());
1389 CharUnits Alignment = getContext().getTypeAlignInChars(Ty);
1390 AggValueSlot AggSlot =
1391 AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(),
1392 AggValueSlot::IsDestructed,
1393 AggValueSlot::DoesNotNeedGCBarriers,
1394 AggValueSlot::IsNotAliased);
1395
1396 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
1397
1398 const CXXRecordDecl *ClassDecl = Ctor->getParent();
1399 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {
1400 CXXDtorType Type =
1401 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;
1402
1403 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
1404 ClassDecl->getDestructor(),
1405 ThisPtr, Type);
1406 }
1407 }
1408
EmitCXXDestructorCall(const CXXDestructorDecl * DD,CXXDtorType Type,bool ForVirtualBase,llvm::Value * This)1409 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
1410 CXXDtorType Type,
1411 bool ForVirtualBase,
1412 llvm::Value *This) {
1413 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type),
1414 ForVirtualBase);
1415 llvm::Value *Callee = 0;
1416 if (getContext().getLangOpts().AppleKext)
1417 Callee = BuildAppleKextVirtualDestructorCall(DD, Type,
1418 DD->getParent());
1419
1420 if (!Callee)
1421 Callee = CGM.GetAddrOfCXXDestructor(DD, Type);
1422
1423 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
1424 }
1425
1426 namespace {
1427 struct CallLocalDtor : EHScopeStack::Cleanup {
1428 const CXXDestructorDecl *Dtor;
1429 llvm::Value *Addr;
1430
CallLocalDtor__anone0b144a60511::CallLocalDtor1431 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr)
1432 : Dtor(D), Addr(Addr) {}
1433
Emit__anone0b144a60511::CallLocalDtor1434 void Emit(CodeGenFunction &CGF, Flags flags) {
1435 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
1436 /*ForVirtualBase=*/false, Addr);
1437 }
1438 };
1439 }
1440
PushDestructorCleanup(const CXXDestructorDecl * D,llvm::Value * Addr)1441 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
1442 llvm::Value *Addr) {
1443 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
1444 }
1445
PushDestructorCleanup(QualType T,llvm::Value * Addr)1446 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
1447 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
1448 if (!ClassDecl) return;
1449 if (ClassDecl->hasTrivialDestructor()) return;
1450
1451 const CXXDestructorDecl *D = ClassDecl->getDestructor();
1452 assert(D && D->isUsed() && "destructor not marked as used!");
1453 PushDestructorCleanup(D, Addr);
1454 }
1455
1456 llvm::Value *
GetVirtualBaseClassOffset(llvm::Value * This,const CXXRecordDecl * ClassDecl,const CXXRecordDecl * BaseClassDecl)1457 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
1458 const CXXRecordDecl *ClassDecl,
1459 const CXXRecordDecl *BaseClassDecl) {
1460 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy);
1461 CharUnits VBaseOffsetOffset =
1462 CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl);
1463
1464 llvm::Value *VBaseOffsetPtr =
1465 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(),
1466 "vbase.offset.ptr");
1467 llvm::Type *PtrDiffTy =
1468 ConvertType(getContext().getPointerDiffType());
1469
1470 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr,
1471 PtrDiffTy->getPointerTo());
1472
1473 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset");
1474
1475 return VBaseOffset;
1476 }
1477
1478 void
InitializeVTablePointer(BaseSubobject Base,const CXXRecordDecl * NearestVBase,CharUnits OffsetFromNearestVBase,llvm::Constant * VTable,const CXXRecordDecl * VTableClass)1479 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base,
1480 const CXXRecordDecl *NearestVBase,
1481 CharUnits OffsetFromNearestVBase,
1482 llvm::Constant *VTable,
1483 const CXXRecordDecl *VTableClass) {
1484 const CXXRecordDecl *RD = Base.getBase();
1485
1486 // Compute the address point.
1487 llvm::Value *VTableAddressPoint;
1488
1489 // Check if we need to use a vtable from the VTT.
1490 if (CodeGenVTables::needsVTTParameter(CurGD) &&
1491 (RD->getNumVBases() || NearestVBase)) {
1492 // Get the secondary vpointer index.
1493 uint64_t VirtualPointerIndex =
1494 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
1495
1496 /// Load the VTT.
1497 llvm::Value *VTT = LoadCXXVTT();
1498 if (VirtualPointerIndex)
1499 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex);
1500
1501 // And load the address point from the VTT.
1502 VTableAddressPoint = Builder.CreateLoad(VTT);
1503 } else {
1504 uint64_t AddressPoint =
1505 CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base);
1506 VTableAddressPoint =
1507 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint);
1508 }
1509
1510 // Compute where to store the address point.
1511 llvm::Value *VirtualOffset = 0;
1512 CharUnits NonVirtualOffset = CharUnits::Zero();
1513
1514 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) {
1515 // We need to use the virtual base offset offset because the virtual base
1516 // might have a different offset in the most derived class.
1517 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass,
1518 NearestVBase);
1519 NonVirtualOffset = OffsetFromNearestVBase;
1520 } else {
1521 // We can just use the base offset in the complete class.
1522 NonVirtualOffset = Base.getBaseOffset();
1523 }
1524
1525 // Apply the offsets.
1526 llvm::Value *VTableField = LoadCXXThis();
1527
1528 if (!NonVirtualOffset.isZero() || VirtualOffset)
1529 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField,
1530 NonVirtualOffset,
1531 VirtualOffset);
1532
1533 // Finally, store the address point.
1534 llvm::Type *AddressPointPtrTy =
1535 VTableAddressPoint->getType()->getPointerTo();
1536 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy);
1537 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
1538 CGM.DecorateInstruction(Store, CGM.getTBAAInfoForVTablePtr());
1539 }
1540
1541 void
InitializeVTablePointers(BaseSubobject Base,const CXXRecordDecl * NearestVBase,CharUnits OffsetFromNearestVBase,bool BaseIsNonVirtualPrimaryBase,llvm::Constant * VTable,const CXXRecordDecl * VTableClass,VisitedVirtualBasesSetTy & VBases)1542 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base,
1543 const CXXRecordDecl *NearestVBase,
1544 CharUnits OffsetFromNearestVBase,
1545 bool BaseIsNonVirtualPrimaryBase,
1546 llvm::Constant *VTable,
1547 const CXXRecordDecl *VTableClass,
1548 VisitedVirtualBasesSetTy& VBases) {
1549 // If this base is a non-virtual primary base the address point has already
1550 // been set.
1551 if (!BaseIsNonVirtualPrimaryBase) {
1552 // Initialize the vtable pointer for this base.
1553 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase,
1554 VTable, VTableClass);
1555 }
1556
1557 const CXXRecordDecl *RD = Base.getBase();
1558
1559 // Traverse bases.
1560 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1561 E = RD->bases_end(); I != E; ++I) {
1562 CXXRecordDecl *BaseDecl
1563 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1564
1565 // Ignore classes without a vtable.
1566 if (!BaseDecl->isDynamicClass())
1567 continue;
1568
1569 CharUnits BaseOffset;
1570 CharUnits BaseOffsetFromNearestVBase;
1571 bool BaseDeclIsNonVirtualPrimaryBase;
1572
1573 if (I->isVirtual()) {
1574 // Check if we've visited this virtual base before.
1575 if (!VBases.insert(BaseDecl))
1576 continue;
1577
1578 const ASTRecordLayout &Layout =
1579 getContext().getASTRecordLayout(VTableClass);
1580
1581 BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
1582 BaseOffsetFromNearestVBase = CharUnits::Zero();
1583 BaseDeclIsNonVirtualPrimaryBase = false;
1584 } else {
1585 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
1586
1587 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
1588 BaseOffsetFromNearestVBase =
1589 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
1590 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
1591 }
1592
1593 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset),
1594 I->isVirtual() ? BaseDecl : NearestVBase,
1595 BaseOffsetFromNearestVBase,
1596 BaseDeclIsNonVirtualPrimaryBase,
1597 VTable, VTableClass, VBases);
1598 }
1599 }
1600
InitializeVTablePointers(const CXXRecordDecl * RD)1601 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
1602 // Ignore classes without a vtable.
1603 if (!RD->isDynamicClass())
1604 return;
1605
1606 // Get the VTable.
1607 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD);
1608
1609 // Initialize the vtable pointers for this class and all of its bases.
1610 VisitedVirtualBasesSetTy VBases;
1611 InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()),
1612 /*NearestVBase=*/0,
1613 /*OffsetFromNearestVBase=*/CharUnits::Zero(),
1614 /*BaseIsNonVirtualPrimaryBase=*/false,
1615 VTable, RD, VBases);
1616 }
1617
GetVTablePtr(llvm::Value * This,llvm::Type * Ty)1618 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This,
1619 llvm::Type *Ty) {
1620 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo());
1621 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
1622 CGM.DecorateInstruction(VTable, CGM.getTBAAInfoForVTablePtr());
1623 return VTable;
1624 }
1625
getMostDerivedClassDecl(const Expr * Base)1626 static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) {
1627 const Expr *E = Base;
1628
1629 while (true) {
1630 E = E->IgnoreParens();
1631 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
1632 if (CE->getCastKind() == CK_DerivedToBase ||
1633 CE->getCastKind() == CK_UncheckedDerivedToBase ||
1634 CE->getCastKind() == CK_NoOp) {
1635 E = CE->getSubExpr();
1636 continue;
1637 }
1638 }
1639
1640 break;
1641 }
1642
1643 QualType DerivedType = E->getType();
1644 if (const PointerType *PTy = DerivedType->getAs<PointerType>())
1645 DerivedType = PTy->getPointeeType();
1646
1647 return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl());
1648 }
1649
1650 // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do
1651 // quite what we want.
skipNoOpCastsAndParens(const Expr * E)1652 static const Expr *skipNoOpCastsAndParens(const Expr *E) {
1653 while (true) {
1654 if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) {
1655 E = PE->getSubExpr();
1656 continue;
1657 }
1658
1659 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
1660 if (CE->getCastKind() == CK_NoOp) {
1661 E = CE->getSubExpr();
1662 continue;
1663 }
1664 }
1665 if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) {
1666 if (UO->getOpcode() == UO_Extension) {
1667 E = UO->getSubExpr();
1668 continue;
1669 }
1670 }
1671 return E;
1672 }
1673 }
1674
1675 /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member
1676 /// function call on the given expr can be devirtualized.
canDevirtualizeMemberFunctionCall(const Expr * Base,const CXXMethodDecl * MD)1677 static bool canDevirtualizeMemberFunctionCall(const Expr *Base,
1678 const CXXMethodDecl *MD) {
1679 // If the most derived class is marked final, we know that no subclass can
1680 // override this member function and so we can devirtualize it. For example:
1681 //
1682 // struct A { virtual void f(); }
1683 // struct B final : A { };
1684 //
1685 // void f(B *b) {
1686 // b->f();
1687 // }
1688 //
1689 const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base);
1690 if (MostDerivedClassDecl->hasAttr<FinalAttr>())
1691 return true;
1692
1693 // If the member function is marked 'final', we know that it can't be
1694 // overridden and can therefore devirtualize it.
1695 if (MD->hasAttr<FinalAttr>())
1696 return true;
1697
1698 // Similarly, if the class itself is marked 'final' it can't be overridden
1699 // and we can therefore devirtualize the member function call.
1700 if (MD->getParent()->hasAttr<FinalAttr>())
1701 return true;
1702
1703 Base = skipNoOpCastsAndParens(Base);
1704 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) {
1705 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) {
1706 // This is a record decl. We know the type and can devirtualize it.
1707 return VD->getType()->isRecordType();
1708 }
1709
1710 return false;
1711 }
1712
1713 // We can always devirtualize calls on temporary object expressions.
1714 if (isa<CXXConstructExpr>(Base))
1715 return true;
1716
1717 // And calls on bound temporaries.
1718 if (isa<CXXBindTemporaryExpr>(Base))
1719 return true;
1720
1721 // Check if this is a call expr that returns a record type.
1722 if (const CallExpr *CE = dyn_cast<CallExpr>(Base))
1723 return CE->getCallReturnType()->isRecordType();
1724
1725 // We can't devirtualize the call.
1726 return false;
1727 }
1728
UseVirtualCall(ASTContext & Context,const CXXOperatorCallExpr * CE,const CXXMethodDecl * MD)1729 static bool UseVirtualCall(ASTContext &Context,
1730 const CXXOperatorCallExpr *CE,
1731 const CXXMethodDecl *MD) {
1732 if (!MD->isVirtual())
1733 return false;
1734
1735 // When building with -fapple-kext, all calls must go through the vtable since
1736 // the kernel linker can do runtime patching of vtables.
1737 if (Context.getLangOpts().AppleKext)
1738 return true;
1739
1740 return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD);
1741 }
1742
1743 llvm::Value *
EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr * E,const CXXMethodDecl * MD,llvm::Value * This)1744 CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E,
1745 const CXXMethodDecl *MD,
1746 llvm::Value *This) {
1747 llvm::FunctionType *fnType =
1748 CGM.getTypes().GetFunctionType(
1749 CGM.getTypes().arrangeCXXMethodDeclaration(MD));
1750
1751 if (UseVirtualCall(getContext(), E, MD))
1752 return BuildVirtualCall(MD, This, fnType);
1753
1754 return CGM.GetAddrOfFunction(MD, fnType);
1755 }
1756
EmitForwardingCallToLambda(const CXXRecordDecl * lambda,CallArgList & callArgs)1757 void CodeGenFunction::EmitForwardingCallToLambda(const CXXRecordDecl *lambda,
1758 CallArgList &callArgs) {
1759 // Lookup the call operator
1760 DeclarationName operatorName
1761 = getContext().DeclarationNames.getCXXOperatorName(OO_Call);
1762 CXXMethodDecl *callOperator =
1763 cast<CXXMethodDecl>(*lambda->lookup(operatorName).first);
1764
1765 // Get the address of the call operator.
1766 const CGFunctionInfo &calleeFnInfo =
1767 CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);
1768 llvm::Value *callee =
1769 CGM.GetAddrOfFunction(GlobalDecl(callOperator),
1770 CGM.getTypes().GetFunctionType(calleeFnInfo));
1771
1772 // Prepare the return slot.
1773 const FunctionProtoType *FPT =
1774 callOperator->getType()->castAs<FunctionProtoType>();
1775 QualType resultType = FPT->getResultType();
1776 ReturnValueSlot returnSlot;
1777 if (!resultType->isVoidType() &&
1778 calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect &&
1779 hasAggregateLLVMType(calleeFnInfo.getReturnType()))
1780 returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified());
1781
1782 // We don't need to separately arrange the call arguments because
1783 // the call can't be variadic anyway --- it's impossible to forward
1784 // variadic arguments.
1785
1786 // Now emit our call.
1787 RValue RV = EmitCall(calleeFnInfo, callee, returnSlot,
1788 callArgs, callOperator);
1789
1790 // If necessary, copy the returned value into the slot.
1791 if (!resultType->isVoidType() && returnSlot.isNull())
1792 EmitReturnOfRValue(RV, resultType);
1793 }
1794
EmitLambdaBlockInvokeBody()1795 void CodeGenFunction::EmitLambdaBlockInvokeBody() {
1796 const BlockDecl *BD = BlockInfo->getBlockDecl();
1797 const VarDecl *variable = BD->capture_begin()->getVariable();
1798 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
1799
1800 // Start building arguments for forwarding call
1801 CallArgList CallArgs;
1802
1803 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
1804 llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false);
1805 CallArgs.add(RValue::get(ThisPtr), ThisType);
1806
1807 // Add the rest of the parameters.
1808 for (BlockDecl::param_const_iterator I = BD->param_begin(),
1809 E = BD->param_end(); I != E; ++I) {
1810 ParmVarDecl *param = *I;
1811 EmitDelegateCallArg(CallArgs, param);
1812 }
1813
1814 EmitForwardingCallToLambda(Lambda, CallArgs);
1815 }
1816
EmitLambdaToBlockPointerBody(FunctionArgList & Args)1817 void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) {
1818 if (cast<CXXMethodDecl>(CurFuncDecl)->isVariadic()) {
1819 // FIXME: Making this work correctly is nasty because it requires either
1820 // cloning the body of the call operator or making the call operator forward.
1821 CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to variadic function");
1822 return;
1823 }
1824
1825 EmitFunctionBody(Args);
1826 }
1827
EmitLambdaDelegatingInvokeBody(const CXXMethodDecl * MD)1828 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
1829 const CXXRecordDecl *Lambda = MD->getParent();
1830
1831 // Start building arguments for forwarding call
1832 CallArgList CallArgs;
1833
1834 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
1835 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType));
1836 CallArgs.add(RValue::get(ThisPtr), ThisType);
1837
1838 // Add the rest of the parameters.
1839 for (FunctionDecl::param_const_iterator I = MD->param_begin(),
1840 E = MD->param_end(); I != E; ++I) {
1841 ParmVarDecl *param = *I;
1842 EmitDelegateCallArg(CallArgs, param);
1843 }
1844
1845 EmitForwardingCallToLambda(Lambda, CallArgs);
1846 }
1847
EmitLambdaStaticInvokeFunction(const CXXMethodDecl * MD)1848 void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) {
1849 if (MD->isVariadic()) {
1850 // FIXME: Making this work correctly is nasty because it requires either
1851 // cloning the body of the call operator or making the call operator forward.
1852 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
1853 return;
1854 }
1855
1856 EmitLambdaDelegatingInvokeBody(MD);
1857 }
1858