1 //===--- CGExprConstant.cpp - Emit LLVM Code from Constant Expressions ----===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This contains code to emit Constant Expr nodes as LLVM code.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "CodeGenFunction.h"
15 #include "CGCXXABI.h"
16 #include "CGObjCRuntime.h"
17 #include "CGRecordLayout.h"
18 #include "CodeGenModule.h"
19 #include "clang/AST/APValue.h"
20 #include "clang/AST/ASTContext.h"
21 #include "clang/AST/RecordLayout.h"
22 #include "clang/AST/StmtVisitor.h"
23 #include "clang/Basic/Builtins.h"
24 #include "llvm/IR/Constants.h"
25 #include "llvm/IR/DataLayout.h"
26 #include "llvm/IR/Function.h"
27 #include "llvm/IR/GlobalVariable.h"
28 using namespace clang;
29 using namespace CodeGen;
30
31 //===----------------------------------------------------------------------===//
32 // ConstStructBuilder
33 //===----------------------------------------------------------------------===//
34
35 namespace {
36 class ConstStructBuilder {
37 CodeGenModule &CGM;
38 CodeGenFunction *CGF;
39
40 bool Packed;
41 CharUnits NextFieldOffsetInChars;
42 CharUnits LLVMStructAlignment;
43 SmallVector<llvm::Constant *, 32> Elements;
44 public:
45 static llvm::Constant *BuildStruct(CodeGenModule &CGM, CodeGenFunction *CGF,
46 InitListExpr *ILE);
47 static llvm::Constant *BuildStruct(CodeGenModule &CGM, CodeGenFunction *CGF,
48 const APValue &Value, QualType ValTy);
49
50 private:
ConstStructBuilder(CodeGenModule & CGM,CodeGenFunction * CGF)51 ConstStructBuilder(CodeGenModule &CGM, CodeGenFunction *CGF)
52 : CGM(CGM), CGF(CGF), Packed(false),
53 NextFieldOffsetInChars(CharUnits::Zero()),
54 LLVMStructAlignment(CharUnits::One()) { }
55
56 void AppendField(const FieldDecl *Field, uint64_t FieldOffset,
57 llvm::Constant *InitExpr);
58
59 void AppendBytes(CharUnits FieldOffsetInChars, llvm::Constant *InitCst);
60
61 void AppendBitField(const FieldDecl *Field, uint64_t FieldOffset,
62 llvm::ConstantInt *InitExpr);
63
64 void AppendPadding(CharUnits PadSize);
65
66 void AppendTailPadding(CharUnits RecordSize);
67
68 void ConvertStructToPacked();
69
70 bool Build(InitListExpr *ILE);
71 void Build(const APValue &Val, const RecordDecl *RD, bool IsPrimaryBase,
72 const CXXRecordDecl *VTableClass, CharUnits BaseOffset);
73 llvm::Constant *Finalize(QualType Ty);
74
getAlignment(const llvm::Constant * C) const75 CharUnits getAlignment(const llvm::Constant *C) const {
76 if (Packed) return CharUnits::One();
77 return CharUnits::fromQuantity(
78 CGM.getDataLayout().getABITypeAlignment(C->getType()));
79 }
80
getSizeInChars(const llvm::Constant * C) const81 CharUnits getSizeInChars(const llvm::Constant *C) const {
82 return CharUnits::fromQuantity(
83 CGM.getDataLayout().getTypeAllocSize(C->getType()));
84 }
85 };
86
87 void ConstStructBuilder::
AppendField(const FieldDecl * Field,uint64_t FieldOffset,llvm::Constant * InitCst)88 AppendField(const FieldDecl *Field, uint64_t FieldOffset,
89 llvm::Constant *InitCst) {
90 const ASTContext &Context = CGM.getContext();
91
92 CharUnits FieldOffsetInChars = Context.toCharUnitsFromBits(FieldOffset);
93
94 AppendBytes(FieldOffsetInChars, InitCst);
95 }
96
97 void ConstStructBuilder::
AppendBytes(CharUnits FieldOffsetInChars,llvm::Constant * InitCst)98 AppendBytes(CharUnits FieldOffsetInChars, llvm::Constant *InitCst) {
99
100 assert(NextFieldOffsetInChars <= FieldOffsetInChars
101 && "Field offset mismatch!");
102
103 CharUnits FieldAlignment = getAlignment(InitCst);
104
105 // Round up the field offset to the alignment of the field type.
106 CharUnits AlignedNextFieldOffsetInChars =
107 NextFieldOffsetInChars.RoundUpToAlignment(FieldAlignment);
108
109 if (AlignedNextFieldOffsetInChars < FieldOffsetInChars) {
110 // We need to append padding.
111 AppendPadding(FieldOffsetInChars - NextFieldOffsetInChars);
112
113 assert(NextFieldOffsetInChars == FieldOffsetInChars &&
114 "Did not add enough padding!");
115
116 AlignedNextFieldOffsetInChars =
117 NextFieldOffsetInChars.RoundUpToAlignment(FieldAlignment);
118 }
119
120 if (AlignedNextFieldOffsetInChars > FieldOffsetInChars) {
121 assert(!Packed && "Alignment is wrong even with a packed struct!");
122
123 // Convert the struct to a packed struct.
124 ConvertStructToPacked();
125
126 // After we pack the struct, we may need to insert padding.
127 if (NextFieldOffsetInChars < FieldOffsetInChars) {
128 // We need to append padding.
129 AppendPadding(FieldOffsetInChars - NextFieldOffsetInChars);
130
131 assert(NextFieldOffsetInChars == FieldOffsetInChars &&
132 "Did not add enough padding!");
133 }
134 AlignedNextFieldOffsetInChars = NextFieldOffsetInChars;
135 }
136
137 // Add the field.
138 Elements.push_back(InitCst);
139 NextFieldOffsetInChars = AlignedNextFieldOffsetInChars +
140 getSizeInChars(InitCst);
141
142 if (Packed)
143 assert(LLVMStructAlignment == CharUnits::One() &&
144 "Packed struct not byte-aligned!");
145 else
146 LLVMStructAlignment = std::max(LLVMStructAlignment, FieldAlignment);
147 }
148
AppendBitField(const FieldDecl * Field,uint64_t FieldOffset,llvm::ConstantInt * CI)149 void ConstStructBuilder::AppendBitField(const FieldDecl *Field,
150 uint64_t FieldOffset,
151 llvm::ConstantInt *CI) {
152 const ASTContext &Context = CGM.getContext();
153 const uint64_t CharWidth = Context.getCharWidth();
154 uint64_t NextFieldOffsetInBits = Context.toBits(NextFieldOffsetInChars);
155 if (FieldOffset > NextFieldOffsetInBits) {
156 // We need to add padding.
157 CharUnits PadSize = Context.toCharUnitsFromBits(
158 llvm::RoundUpToAlignment(FieldOffset - NextFieldOffsetInBits,
159 Context.getTargetInfo().getCharAlign()));
160
161 AppendPadding(PadSize);
162 }
163
164 uint64_t FieldSize = Field->getBitWidthValue(Context);
165
166 llvm::APInt FieldValue = CI->getValue();
167
168 // Promote the size of FieldValue if necessary
169 // FIXME: This should never occur, but currently it can because initializer
170 // constants are cast to bool, and because clang is not enforcing bitfield
171 // width limits.
172 if (FieldSize > FieldValue.getBitWidth())
173 FieldValue = FieldValue.zext(FieldSize);
174
175 // Truncate the size of FieldValue to the bit field size.
176 if (FieldSize < FieldValue.getBitWidth())
177 FieldValue = FieldValue.trunc(FieldSize);
178
179 NextFieldOffsetInBits = Context.toBits(NextFieldOffsetInChars);
180 if (FieldOffset < NextFieldOffsetInBits) {
181 // Either part of the field or the entire field can go into the previous
182 // byte.
183 assert(!Elements.empty() && "Elements can't be empty!");
184
185 unsigned BitsInPreviousByte = NextFieldOffsetInBits - FieldOffset;
186
187 bool FitsCompletelyInPreviousByte =
188 BitsInPreviousByte >= FieldValue.getBitWidth();
189
190 llvm::APInt Tmp = FieldValue;
191
192 if (!FitsCompletelyInPreviousByte) {
193 unsigned NewFieldWidth = FieldSize - BitsInPreviousByte;
194
195 if (CGM.getDataLayout().isBigEndian()) {
196 Tmp = Tmp.lshr(NewFieldWidth);
197 Tmp = Tmp.trunc(BitsInPreviousByte);
198
199 // We want the remaining high bits.
200 FieldValue = FieldValue.trunc(NewFieldWidth);
201 } else {
202 Tmp = Tmp.trunc(BitsInPreviousByte);
203
204 // We want the remaining low bits.
205 FieldValue = FieldValue.lshr(BitsInPreviousByte);
206 FieldValue = FieldValue.trunc(NewFieldWidth);
207 }
208 }
209
210 Tmp = Tmp.zext(CharWidth);
211 if (CGM.getDataLayout().isBigEndian()) {
212 if (FitsCompletelyInPreviousByte)
213 Tmp = Tmp.shl(BitsInPreviousByte - FieldValue.getBitWidth());
214 } else {
215 Tmp = Tmp.shl(CharWidth - BitsInPreviousByte);
216 }
217
218 // 'or' in the bits that go into the previous byte.
219 llvm::Value *LastElt = Elements.back();
220 if (llvm::ConstantInt *Val = dyn_cast<llvm::ConstantInt>(LastElt))
221 Tmp |= Val->getValue();
222 else {
223 assert(isa<llvm::UndefValue>(LastElt));
224 // If there is an undef field that we're adding to, it can either be a
225 // scalar undef (in which case, we just replace it with our field) or it
226 // is an array. If it is an array, we have to pull one byte off the
227 // array so that the other undef bytes stay around.
228 if (!isa<llvm::IntegerType>(LastElt->getType())) {
229 // The undef padding will be a multibyte array, create a new smaller
230 // padding and then an hole for our i8 to get plopped into.
231 assert(isa<llvm::ArrayType>(LastElt->getType()) &&
232 "Expected array padding of undefs");
233 llvm::ArrayType *AT = cast<llvm::ArrayType>(LastElt->getType());
234 assert(AT->getElementType()->isIntegerTy(CharWidth) &&
235 AT->getNumElements() != 0 &&
236 "Expected non-empty array padding of undefs");
237
238 // Remove the padding array.
239 NextFieldOffsetInChars -= CharUnits::fromQuantity(AT->getNumElements());
240 Elements.pop_back();
241
242 // Add the padding back in two chunks.
243 AppendPadding(CharUnits::fromQuantity(AT->getNumElements()-1));
244 AppendPadding(CharUnits::One());
245 assert(isa<llvm::UndefValue>(Elements.back()) &&
246 Elements.back()->getType()->isIntegerTy(CharWidth) &&
247 "Padding addition didn't work right");
248 }
249 }
250
251 Elements.back() = llvm::ConstantInt::get(CGM.getLLVMContext(), Tmp);
252
253 if (FitsCompletelyInPreviousByte)
254 return;
255 }
256
257 while (FieldValue.getBitWidth() > CharWidth) {
258 llvm::APInt Tmp;
259
260 if (CGM.getDataLayout().isBigEndian()) {
261 // We want the high bits.
262 Tmp =
263 FieldValue.lshr(FieldValue.getBitWidth() - CharWidth).trunc(CharWidth);
264 } else {
265 // We want the low bits.
266 Tmp = FieldValue.trunc(CharWidth);
267
268 FieldValue = FieldValue.lshr(CharWidth);
269 }
270
271 Elements.push_back(llvm::ConstantInt::get(CGM.getLLVMContext(), Tmp));
272 ++NextFieldOffsetInChars;
273
274 FieldValue = FieldValue.trunc(FieldValue.getBitWidth() - CharWidth);
275 }
276
277 assert(FieldValue.getBitWidth() > 0 &&
278 "Should have at least one bit left!");
279 assert(FieldValue.getBitWidth() <= CharWidth &&
280 "Should not have more than a byte left!");
281
282 if (FieldValue.getBitWidth() < CharWidth) {
283 if (CGM.getDataLayout().isBigEndian()) {
284 unsigned BitWidth = FieldValue.getBitWidth();
285
286 FieldValue = FieldValue.zext(CharWidth) << (CharWidth - BitWidth);
287 } else
288 FieldValue = FieldValue.zext(CharWidth);
289 }
290
291 // Append the last element.
292 Elements.push_back(llvm::ConstantInt::get(CGM.getLLVMContext(),
293 FieldValue));
294 ++NextFieldOffsetInChars;
295 }
296
AppendPadding(CharUnits PadSize)297 void ConstStructBuilder::AppendPadding(CharUnits PadSize) {
298 if (PadSize.isZero())
299 return;
300
301 llvm::Type *Ty = CGM.Int8Ty;
302 if (PadSize > CharUnits::One())
303 Ty = llvm::ArrayType::get(Ty, PadSize.getQuantity());
304
305 llvm::Constant *C = llvm::UndefValue::get(Ty);
306 Elements.push_back(C);
307 assert(getAlignment(C) == CharUnits::One() &&
308 "Padding must have 1 byte alignment!");
309
310 NextFieldOffsetInChars += getSizeInChars(C);
311 }
312
AppendTailPadding(CharUnits RecordSize)313 void ConstStructBuilder::AppendTailPadding(CharUnits RecordSize) {
314 assert(NextFieldOffsetInChars <= RecordSize &&
315 "Size mismatch!");
316
317 AppendPadding(RecordSize - NextFieldOffsetInChars);
318 }
319
ConvertStructToPacked()320 void ConstStructBuilder::ConvertStructToPacked() {
321 SmallVector<llvm::Constant *, 16> PackedElements;
322 CharUnits ElementOffsetInChars = CharUnits::Zero();
323
324 for (unsigned i = 0, e = Elements.size(); i != e; ++i) {
325 llvm::Constant *C = Elements[i];
326
327 CharUnits ElementAlign = CharUnits::fromQuantity(
328 CGM.getDataLayout().getABITypeAlignment(C->getType()));
329 CharUnits AlignedElementOffsetInChars =
330 ElementOffsetInChars.RoundUpToAlignment(ElementAlign);
331
332 if (AlignedElementOffsetInChars > ElementOffsetInChars) {
333 // We need some padding.
334 CharUnits NumChars =
335 AlignedElementOffsetInChars - ElementOffsetInChars;
336
337 llvm::Type *Ty = CGM.Int8Ty;
338 if (NumChars > CharUnits::One())
339 Ty = llvm::ArrayType::get(Ty, NumChars.getQuantity());
340
341 llvm::Constant *Padding = llvm::UndefValue::get(Ty);
342 PackedElements.push_back(Padding);
343 ElementOffsetInChars += getSizeInChars(Padding);
344 }
345
346 PackedElements.push_back(C);
347 ElementOffsetInChars += getSizeInChars(C);
348 }
349
350 assert(ElementOffsetInChars == NextFieldOffsetInChars &&
351 "Packing the struct changed its size!");
352
353 Elements.swap(PackedElements);
354 LLVMStructAlignment = CharUnits::One();
355 Packed = true;
356 }
357
Build(InitListExpr * ILE)358 bool ConstStructBuilder::Build(InitListExpr *ILE) {
359 RecordDecl *RD = ILE->getType()->getAs<RecordType>()->getDecl();
360 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
361
362 unsigned FieldNo = 0;
363 unsigned ElementNo = 0;
364
365 for (RecordDecl::field_iterator Field = RD->field_begin(),
366 FieldEnd = RD->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
367 // If this is a union, skip all the fields that aren't being initialized.
368 if (RD->isUnion() && ILE->getInitializedFieldInUnion() != *Field)
369 continue;
370
371 // Don't emit anonymous bitfields, they just affect layout.
372 if (Field->isUnnamedBitfield())
373 continue;
374
375 // Get the initializer. A struct can include fields without initializers,
376 // we just use explicit null values for them.
377 llvm::Constant *EltInit;
378 if (ElementNo < ILE->getNumInits())
379 EltInit = CGM.EmitConstantExpr(ILE->getInit(ElementNo++),
380 Field->getType(), CGF);
381 else
382 EltInit = CGM.EmitNullConstant(Field->getType());
383
384 if (!EltInit)
385 return false;
386
387 if (!Field->isBitField()) {
388 // Handle non-bitfield members.
389 AppendField(*Field, Layout.getFieldOffset(FieldNo), EltInit);
390 } else {
391 // Otherwise we have a bitfield.
392 if (auto *CI = dyn_cast<llvm::ConstantInt>(EltInit)) {
393 AppendBitField(*Field, Layout.getFieldOffset(FieldNo), CI);
394 } else {
395 // We are trying to initialize a bitfield with a non-trivial constant,
396 // this must require run-time code.
397 return false;
398 }
399 }
400 }
401
402 return true;
403 }
404
405 namespace {
406 struct BaseInfo {
BaseInfo__anon0a23bf330111::__anon0a23bf330211::BaseInfo407 BaseInfo(const CXXRecordDecl *Decl, CharUnits Offset, unsigned Index)
408 : Decl(Decl), Offset(Offset), Index(Index) {
409 }
410
411 const CXXRecordDecl *Decl;
412 CharUnits Offset;
413 unsigned Index;
414
operator <__anon0a23bf330111::__anon0a23bf330211::BaseInfo415 bool operator<(const BaseInfo &O) const { return Offset < O.Offset; }
416 };
417 }
418
Build(const APValue & Val,const RecordDecl * RD,bool IsPrimaryBase,const CXXRecordDecl * VTableClass,CharUnits Offset)419 void ConstStructBuilder::Build(const APValue &Val, const RecordDecl *RD,
420 bool IsPrimaryBase,
421 const CXXRecordDecl *VTableClass,
422 CharUnits Offset) {
423 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
424
425 if (const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD)) {
426 // Add a vtable pointer, if we need one and it hasn't already been added.
427 if (CD->isDynamicClass() && !IsPrimaryBase) {
428 llvm::Constant *VTableAddressPoint =
429 CGM.getCXXABI().getVTableAddressPointForConstExpr(
430 BaseSubobject(CD, Offset), VTableClass);
431 AppendBytes(Offset, VTableAddressPoint);
432 }
433
434 // Accumulate and sort bases, in order to visit them in address order, which
435 // may not be the same as declaration order.
436 SmallVector<BaseInfo, 8> Bases;
437 Bases.reserve(CD->getNumBases());
438 unsigned BaseNo = 0;
439 for (CXXRecordDecl::base_class_const_iterator Base = CD->bases_begin(),
440 BaseEnd = CD->bases_end(); Base != BaseEnd; ++Base, ++BaseNo) {
441 assert(!Base->isVirtual() && "should not have virtual bases here");
442 const CXXRecordDecl *BD = Base->getType()->getAsCXXRecordDecl();
443 CharUnits BaseOffset = Layout.getBaseClassOffset(BD);
444 Bases.push_back(BaseInfo(BD, BaseOffset, BaseNo));
445 }
446 std::stable_sort(Bases.begin(), Bases.end());
447
448 for (unsigned I = 0, N = Bases.size(); I != N; ++I) {
449 BaseInfo &Base = Bases[I];
450
451 bool IsPrimaryBase = Layout.getPrimaryBase() == Base.Decl;
452 Build(Val.getStructBase(Base.Index), Base.Decl, IsPrimaryBase,
453 VTableClass, Offset + Base.Offset);
454 }
455 }
456
457 unsigned FieldNo = 0;
458 uint64_t OffsetBits = CGM.getContext().toBits(Offset);
459
460 for (RecordDecl::field_iterator Field = RD->field_begin(),
461 FieldEnd = RD->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
462 // If this is a union, skip all the fields that aren't being initialized.
463 if (RD->isUnion() && Val.getUnionField() != *Field)
464 continue;
465
466 // Don't emit anonymous bitfields, they just affect layout.
467 if (Field->isUnnamedBitfield())
468 continue;
469
470 // Emit the value of the initializer.
471 const APValue &FieldValue =
472 RD->isUnion() ? Val.getUnionValue() : Val.getStructField(FieldNo);
473 llvm::Constant *EltInit =
474 CGM.EmitConstantValueForMemory(FieldValue, Field->getType(), CGF);
475 assert(EltInit && "EmitConstantValue can't fail");
476
477 if (!Field->isBitField()) {
478 // Handle non-bitfield members.
479 AppendField(*Field, Layout.getFieldOffset(FieldNo) + OffsetBits, EltInit);
480 } else {
481 // Otherwise we have a bitfield.
482 AppendBitField(*Field, Layout.getFieldOffset(FieldNo) + OffsetBits,
483 cast<llvm::ConstantInt>(EltInit));
484 }
485 }
486 }
487
Finalize(QualType Ty)488 llvm::Constant *ConstStructBuilder::Finalize(QualType Ty) {
489 RecordDecl *RD = Ty->getAs<RecordType>()->getDecl();
490 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
491
492 CharUnits LayoutSizeInChars = Layout.getSize();
493
494 if (NextFieldOffsetInChars > LayoutSizeInChars) {
495 // If the struct is bigger than the size of the record type,
496 // we must have a flexible array member at the end.
497 assert(RD->hasFlexibleArrayMember() &&
498 "Must have flexible array member if struct is bigger than type!");
499
500 // No tail padding is necessary.
501 } else {
502 // Append tail padding if necessary.
503 CharUnits LLVMSizeInChars =
504 NextFieldOffsetInChars.RoundUpToAlignment(LLVMStructAlignment);
505
506 if (LLVMSizeInChars != LayoutSizeInChars)
507 AppendTailPadding(LayoutSizeInChars);
508
509 LLVMSizeInChars =
510 NextFieldOffsetInChars.RoundUpToAlignment(LLVMStructAlignment);
511
512 // Check if we need to convert the struct to a packed struct.
513 if (NextFieldOffsetInChars <= LayoutSizeInChars &&
514 LLVMSizeInChars > LayoutSizeInChars) {
515 assert(!Packed && "Size mismatch!");
516
517 ConvertStructToPacked();
518 assert(NextFieldOffsetInChars <= LayoutSizeInChars &&
519 "Converting to packed did not help!");
520 }
521
522 LLVMSizeInChars =
523 NextFieldOffsetInChars.RoundUpToAlignment(LLVMStructAlignment);
524
525 assert(LayoutSizeInChars == LLVMSizeInChars &&
526 "Tail padding mismatch!");
527 }
528
529 // Pick the type to use. If the type is layout identical to the ConvertType
530 // type then use it, otherwise use whatever the builder produced for us.
531 llvm::StructType *STy =
532 llvm::ConstantStruct::getTypeForElements(CGM.getLLVMContext(),
533 Elements, Packed);
534 llvm::Type *ValTy = CGM.getTypes().ConvertType(Ty);
535 if (llvm::StructType *ValSTy = dyn_cast<llvm::StructType>(ValTy)) {
536 if (ValSTy->isLayoutIdentical(STy))
537 STy = ValSTy;
538 }
539
540 llvm::Constant *Result = llvm::ConstantStruct::get(STy, Elements);
541
542 assert(NextFieldOffsetInChars.RoundUpToAlignment(getAlignment(Result)) ==
543 getSizeInChars(Result) && "Size mismatch!");
544
545 return Result;
546 }
547
BuildStruct(CodeGenModule & CGM,CodeGenFunction * CGF,InitListExpr * ILE)548 llvm::Constant *ConstStructBuilder::BuildStruct(CodeGenModule &CGM,
549 CodeGenFunction *CGF,
550 InitListExpr *ILE) {
551 ConstStructBuilder Builder(CGM, CGF);
552
553 if (!Builder.Build(ILE))
554 return nullptr;
555
556 return Builder.Finalize(ILE->getType());
557 }
558
BuildStruct(CodeGenModule & CGM,CodeGenFunction * CGF,const APValue & Val,QualType ValTy)559 llvm::Constant *ConstStructBuilder::BuildStruct(CodeGenModule &CGM,
560 CodeGenFunction *CGF,
561 const APValue &Val,
562 QualType ValTy) {
563 ConstStructBuilder Builder(CGM, CGF);
564
565 const RecordDecl *RD = ValTy->castAs<RecordType>()->getDecl();
566 const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD);
567 Builder.Build(Val, RD, false, CD, CharUnits::Zero());
568
569 return Builder.Finalize(ValTy);
570 }
571
572
573 //===----------------------------------------------------------------------===//
574 // ConstExprEmitter
575 //===----------------------------------------------------------------------===//
576
577 /// This class only needs to handle two cases:
578 /// 1) Literals (this is used by APValue emission to emit literals).
579 /// 2) Arrays, structs and unions (outside C++11 mode, we don't currently
580 /// constant fold these types).
581 class ConstExprEmitter :
582 public StmtVisitor<ConstExprEmitter, llvm::Constant*> {
583 CodeGenModule &CGM;
584 CodeGenFunction *CGF;
585 llvm::LLVMContext &VMContext;
586 public:
ConstExprEmitter(CodeGenModule & cgm,CodeGenFunction * cgf)587 ConstExprEmitter(CodeGenModule &cgm, CodeGenFunction *cgf)
588 : CGM(cgm), CGF(cgf), VMContext(cgm.getLLVMContext()) {
589 }
590
591 //===--------------------------------------------------------------------===//
592 // Visitor Methods
593 //===--------------------------------------------------------------------===//
594
VisitStmt(Stmt * S)595 llvm::Constant *VisitStmt(Stmt *S) {
596 return nullptr;
597 }
598
VisitParenExpr(ParenExpr * PE)599 llvm::Constant *VisitParenExpr(ParenExpr *PE) {
600 return Visit(PE->getSubExpr());
601 }
602
603 llvm::Constant *
VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr * PE)604 VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr *PE) {
605 return Visit(PE->getReplacement());
606 }
607
VisitGenericSelectionExpr(GenericSelectionExpr * GE)608 llvm::Constant *VisitGenericSelectionExpr(GenericSelectionExpr *GE) {
609 return Visit(GE->getResultExpr());
610 }
611
VisitChooseExpr(ChooseExpr * CE)612 llvm::Constant *VisitChooseExpr(ChooseExpr *CE) {
613 return Visit(CE->getChosenSubExpr());
614 }
615
VisitCompoundLiteralExpr(CompoundLiteralExpr * E)616 llvm::Constant *VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
617 return Visit(E->getInitializer());
618 }
619
VisitCastExpr(CastExpr * E)620 llvm::Constant *VisitCastExpr(CastExpr* E) {
621 Expr *subExpr = E->getSubExpr();
622 llvm::Constant *C = CGM.EmitConstantExpr(subExpr, subExpr->getType(), CGF);
623 if (!C) return nullptr;
624
625 llvm::Type *destType = ConvertType(E->getType());
626
627 switch (E->getCastKind()) {
628 case CK_ToUnion: {
629 // GCC cast to union extension
630 assert(E->getType()->isUnionType() &&
631 "Destination type is not union type!");
632
633 // Build a struct with the union sub-element as the first member,
634 // and padded to the appropriate size
635 SmallVector<llvm::Constant*, 2> Elts;
636 SmallVector<llvm::Type*, 2> Types;
637 Elts.push_back(C);
638 Types.push_back(C->getType());
639 unsigned CurSize = CGM.getDataLayout().getTypeAllocSize(C->getType());
640 unsigned TotalSize = CGM.getDataLayout().getTypeAllocSize(destType);
641
642 assert(CurSize <= TotalSize && "Union size mismatch!");
643 if (unsigned NumPadBytes = TotalSize - CurSize) {
644 llvm::Type *Ty = CGM.Int8Ty;
645 if (NumPadBytes > 1)
646 Ty = llvm::ArrayType::get(Ty, NumPadBytes);
647
648 Elts.push_back(llvm::UndefValue::get(Ty));
649 Types.push_back(Ty);
650 }
651
652 llvm::StructType* STy =
653 llvm::StructType::get(C->getType()->getContext(), Types, false);
654 return llvm::ConstantStruct::get(STy, Elts);
655 }
656
657 case CK_AddressSpaceConversion:
658 return llvm::ConstantExpr::getAddrSpaceCast(C, destType);
659
660 case CK_LValueToRValue:
661 case CK_AtomicToNonAtomic:
662 case CK_NonAtomicToAtomic:
663 case CK_NoOp:
664 case CK_ConstructorConversion:
665 return C;
666
667 case CK_Dependent: llvm_unreachable("saw dependent cast!");
668
669 case CK_BuiltinFnToFnPtr:
670 llvm_unreachable("builtin functions are handled elsewhere");
671
672 case CK_ReinterpretMemberPointer:
673 case CK_DerivedToBaseMemberPointer:
674 case CK_BaseToDerivedMemberPointer:
675 return CGM.getCXXABI().EmitMemberPointerConversion(E, C);
676
677 // These will never be supported.
678 case CK_ObjCObjectLValueCast:
679 case CK_ARCProduceObject:
680 case CK_ARCConsumeObject:
681 case CK_ARCReclaimReturnedObject:
682 case CK_ARCExtendBlockObject:
683 case CK_CopyAndAutoreleaseBlockObject:
684 return nullptr;
685
686 // These don't need to be handled here because Evaluate knows how to
687 // evaluate them in the cases where they can be folded.
688 case CK_BitCast:
689 case CK_ToVoid:
690 case CK_Dynamic:
691 case CK_LValueBitCast:
692 case CK_NullToMemberPointer:
693 case CK_UserDefinedConversion:
694 case CK_CPointerToObjCPointerCast:
695 case CK_BlockPointerToObjCPointerCast:
696 case CK_AnyPointerToBlockPointerCast:
697 case CK_ArrayToPointerDecay:
698 case CK_FunctionToPointerDecay:
699 case CK_BaseToDerived:
700 case CK_DerivedToBase:
701 case CK_UncheckedDerivedToBase:
702 case CK_MemberPointerToBoolean:
703 case CK_VectorSplat:
704 case CK_FloatingRealToComplex:
705 case CK_FloatingComplexToReal:
706 case CK_FloatingComplexToBoolean:
707 case CK_FloatingComplexCast:
708 case CK_FloatingComplexToIntegralComplex:
709 case CK_IntegralRealToComplex:
710 case CK_IntegralComplexToReal:
711 case CK_IntegralComplexToBoolean:
712 case CK_IntegralComplexCast:
713 case CK_IntegralComplexToFloatingComplex:
714 case CK_PointerToIntegral:
715 case CK_PointerToBoolean:
716 case CK_NullToPointer:
717 case CK_IntegralCast:
718 case CK_IntegralToPointer:
719 case CK_IntegralToBoolean:
720 case CK_IntegralToFloating:
721 case CK_FloatingToIntegral:
722 case CK_FloatingToBoolean:
723 case CK_FloatingCast:
724 case CK_ZeroToOCLEvent:
725 return nullptr;
726 }
727 llvm_unreachable("Invalid CastKind");
728 }
729
VisitCXXDefaultArgExpr(CXXDefaultArgExpr * DAE)730 llvm::Constant *VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
731 return Visit(DAE->getExpr());
732 }
733
VisitCXXDefaultInitExpr(CXXDefaultInitExpr * DIE)734 llvm::Constant *VisitCXXDefaultInitExpr(CXXDefaultInitExpr *DIE) {
735 // No need for a DefaultInitExprScope: we don't handle 'this' in a
736 // constant expression.
737 return Visit(DIE->getExpr());
738 }
739
VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr * E)740 llvm::Constant *VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E) {
741 return Visit(E->GetTemporaryExpr());
742 }
743
EmitArrayInitialization(InitListExpr * ILE)744 llvm::Constant *EmitArrayInitialization(InitListExpr *ILE) {
745 if (ILE->isStringLiteralInit())
746 return Visit(ILE->getInit(0));
747
748 llvm::ArrayType *AType =
749 cast<llvm::ArrayType>(ConvertType(ILE->getType()));
750 llvm::Type *ElemTy = AType->getElementType();
751 unsigned NumInitElements = ILE->getNumInits();
752 unsigned NumElements = AType->getNumElements();
753
754 // Initialising an array requires us to automatically
755 // initialise any elements that have not been initialised explicitly
756 unsigned NumInitableElts = std::min(NumInitElements, NumElements);
757
758 // Initialize remaining array elements.
759 // FIXME: This doesn't handle member pointers correctly!
760 llvm::Constant *fillC;
761 if (Expr *filler = ILE->getArrayFiller())
762 fillC = CGM.EmitConstantExpr(filler, filler->getType(), CGF);
763 else
764 fillC = llvm::Constant::getNullValue(ElemTy);
765 if (!fillC)
766 return nullptr;
767
768 // Try to use a ConstantAggregateZero if we can.
769 if (fillC->isNullValue() && !NumInitableElts)
770 return llvm::ConstantAggregateZero::get(AType);
771
772 // Copy initializer elements.
773 std::vector<llvm::Constant*> Elts;
774 Elts.reserve(NumInitableElts + NumElements);
775
776 bool RewriteType = false;
777 for (unsigned i = 0; i < NumInitableElts; ++i) {
778 Expr *Init = ILE->getInit(i);
779 llvm::Constant *C = CGM.EmitConstantExpr(Init, Init->getType(), CGF);
780 if (!C)
781 return nullptr;
782 RewriteType |= (C->getType() != ElemTy);
783 Elts.push_back(C);
784 }
785
786 RewriteType |= (fillC->getType() != ElemTy);
787 Elts.resize(NumElements, fillC);
788
789 if (RewriteType) {
790 // FIXME: Try to avoid packing the array
791 std::vector<llvm::Type*> Types;
792 Types.reserve(NumInitableElts + NumElements);
793 for (unsigned i = 0, e = Elts.size(); i < e; ++i)
794 Types.push_back(Elts[i]->getType());
795 llvm::StructType *SType = llvm::StructType::get(AType->getContext(),
796 Types, true);
797 return llvm::ConstantStruct::get(SType, Elts);
798 }
799
800 return llvm::ConstantArray::get(AType, Elts);
801 }
802
EmitRecordInitialization(InitListExpr * ILE)803 llvm::Constant *EmitRecordInitialization(InitListExpr *ILE) {
804 return ConstStructBuilder::BuildStruct(CGM, CGF, ILE);
805 }
806
VisitImplicitValueInitExpr(ImplicitValueInitExpr * E)807 llvm::Constant *VisitImplicitValueInitExpr(ImplicitValueInitExpr* E) {
808 return CGM.EmitNullConstant(E->getType());
809 }
810
VisitInitListExpr(InitListExpr * ILE)811 llvm::Constant *VisitInitListExpr(InitListExpr *ILE) {
812 if (ILE->getType()->isArrayType())
813 return EmitArrayInitialization(ILE);
814
815 if (ILE->getType()->isRecordType())
816 return EmitRecordInitialization(ILE);
817
818 return nullptr;
819 }
820
VisitCXXConstructExpr(CXXConstructExpr * E)821 llvm::Constant *VisitCXXConstructExpr(CXXConstructExpr *E) {
822 if (!E->getConstructor()->isTrivial())
823 return nullptr;
824
825 QualType Ty = E->getType();
826
827 // FIXME: We should not have to call getBaseElementType here.
828 const RecordType *RT =
829 CGM.getContext().getBaseElementType(Ty)->getAs<RecordType>();
830 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
831
832 // If the class doesn't have a trivial destructor, we can't emit it as a
833 // constant expr.
834 if (!RD->hasTrivialDestructor())
835 return nullptr;
836
837 // Only copy and default constructors can be trivial.
838
839
840 if (E->getNumArgs()) {
841 assert(E->getNumArgs() == 1 && "trivial ctor with > 1 argument");
842 assert(E->getConstructor()->isCopyOrMoveConstructor() &&
843 "trivial ctor has argument but isn't a copy/move ctor");
844
845 Expr *Arg = E->getArg(0);
846 assert(CGM.getContext().hasSameUnqualifiedType(Ty, Arg->getType()) &&
847 "argument to copy ctor is of wrong type");
848
849 return Visit(Arg);
850 }
851
852 return CGM.EmitNullConstant(Ty);
853 }
854
VisitStringLiteral(StringLiteral * E)855 llvm::Constant *VisitStringLiteral(StringLiteral *E) {
856 return CGM.GetConstantArrayFromStringLiteral(E);
857 }
858
VisitObjCEncodeExpr(ObjCEncodeExpr * E)859 llvm::Constant *VisitObjCEncodeExpr(ObjCEncodeExpr *E) {
860 // This must be an @encode initializing an array in a static initializer.
861 // Don't emit it as the address of the string, emit the string data itself
862 // as an inline array.
863 std::string Str;
864 CGM.getContext().getObjCEncodingForType(E->getEncodedType(), Str);
865 QualType T = E->getType();
866 if (T->getTypeClass() == Type::TypeOfExpr)
867 T = cast<TypeOfExprType>(T)->getUnderlyingExpr()->getType();
868 const ConstantArrayType *CAT = cast<ConstantArrayType>(T);
869
870 // Resize the string to the right size, adding zeros at the end, or
871 // truncating as needed.
872 Str.resize(CAT->getSize().getZExtValue(), '\0');
873 return llvm::ConstantDataArray::getString(VMContext, Str, false);
874 }
875
VisitUnaryExtension(const UnaryOperator * E)876 llvm::Constant *VisitUnaryExtension(const UnaryOperator *E) {
877 return Visit(E->getSubExpr());
878 }
879
880 // Utility methods
ConvertType(QualType T)881 llvm::Type *ConvertType(QualType T) {
882 return CGM.getTypes().ConvertType(T);
883 }
884
885 public:
EmitLValue(APValue::LValueBase LVBase)886 llvm::Constant *EmitLValue(APValue::LValueBase LVBase) {
887 if (const ValueDecl *Decl = LVBase.dyn_cast<const ValueDecl*>()) {
888 if (Decl->hasAttr<WeakRefAttr>())
889 return CGM.GetWeakRefReference(Decl);
890 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Decl))
891 return CGM.GetAddrOfFunction(FD);
892 if (const VarDecl* VD = dyn_cast<VarDecl>(Decl)) {
893 // We can never refer to a variable with local storage.
894 if (!VD->hasLocalStorage()) {
895 if (VD->isFileVarDecl() || VD->hasExternalStorage())
896 return CGM.GetAddrOfGlobalVar(VD);
897 else if (VD->isLocalVarDecl())
898 return CGM.getOrCreateStaticVarDecl(
899 *VD, CGM.getLLVMLinkageVarDefinition(VD, /*isConstant=*/false));
900 }
901 }
902 return nullptr;
903 }
904
905 Expr *E = const_cast<Expr*>(LVBase.get<const Expr*>());
906 switch (E->getStmtClass()) {
907 default: break;
908 case Expr::CompoundLiteralExprClass: {
909 // Note that due to the nature of compound literals, this is guaranteed
910 // to be the only use of the variable, so we just generate it here.
911 CompoundLiteralExpr *CLE = cast<CompoundLiteralExpr>(E);
912 llvm::Constant* C = CGM.EmitConstantExpr(CLE->getInitializer(),
913 CLE->getType(), CGF);
914 // FIXME: "Leaked" on failure.
915 if (C)
916 C = new llvm::GlobalVariable(CGM.getModule(), C->getType(),
917 E->getType().isConstant(CGM.getContext()),
918 llvm::GlobalValue::InternalLinkage,
919 C, ".compoundliteral", nullptr,
920 llvm::GlobalVariable::NotThreadLocal,
921 CGM.getContext().getTargetAddressSpace(E->getType()));
922 return C;
923 }
924 case Expr::StringLiteralClass:
925 return CGM.GetAddrOfConstantStringFromLiteral(cast<StringLiteral>(E));
926 case Expr::ObjCEncodeExprClass:
927 return CGM.GetAddrOfConstantStringFromObjCEncode(cast<ObjCEncodeExpr>(E));
928 case Expr::ObjCStringLiteralClass: {
929 ObjCStringLiteral* SL = cast<ObjCStringLiteral>(E);
930 llvm::Constant *C =
931 CGM.getObjCRuntime().GenerateConstantString(SL->getString());
932 return llvm::ConstantExpr::getBitCast(C, ConvertType(E->getType()));
933 }
934 case Expr::PredefinedExprClass: {
935 unsigned Type = cast<PredefinedExpr>(E)->getIdentType();
936 if (CGF) {
937 LValue Res = CGF->EmitPredefinedLValue(cast<PredefinedExpr>(E));
938 return cast<llvm::Constant>(Res.getAddress());
939 } else if (Type == PredefinedExpr::PrettyFunction) {
940 return CGM.GetAddrOfConstantCString("top level", ".tmp");
941 }
942
943 return CGM.GetAddrOfConstantCString("", ".tmp");
944 }
945 case Expr::AddrLabelExprClass: {
946 assert(CGF && "Invalid address of label expression outside function.");
947 llvm::Constant *Ptr =
948 CGF->GetAddrOfLabel(cast<AddrLabelExpr>(E)->getLabel());
949 return llvm::ConstantExpr::getBitCast(Ptr, ConvertType(E->getType()));
950 }
951 case Expr::CallExprClass: {
952 CallExpr* CE = cast<CallExpr>(E);
953 unsigned builtin = CE->getBuiltinCallee();
954 if (builtin !=
955 Builtin::BI__builtin___CFStringMakeConstantString &&
956 builtin !=
957 Builtin::BI__builtin___NSStringMakeConstantString)
958 break;
959 const Expr *Arg = CE->getArg(0)->IgnoreParenCasts();
960 const StringLiteral *Literal = cast<StringLiteral>(Arg);
961 if (builtin ==
962 Builtin::BI__builtin___NSStringMakeConstantString) {
963 return CGM.getObjCRuntime().GenerateConstantString(Literal);
964 }
965 // FIXME: need to deal with UCN conversion issues.
966 return CGM.GetAddrOfConstantCFString(Literal);
967 }
968 case Expr::BlockExprClass: {
969 std::string FunctionName;
970 if (CGF)
971 FunctionName = CGF->CurFn->getName();
972 else
973 FunctionName = "global";
974
975 return CGM.GetAddrOfGlobalBlock(cast<BlockExpr>(E), FunctionName.c_str());
976 }
977 case Expr::CXXTypeidExprClass: {
978 CXXTypeidExpr *Typeid = cast<CXXTypeidExpr>(E);
979 QualType T;
980 if (Typeid->isTypeOperand())
981 T = Typeid->getTypeOperand(CGM.getContext());
982 else
983 T = Typeid->getExprOperand()->getType();
984 return CGM.GetAddrOfRTTIDescriptor(T);
985 }
986 case Expr::CXXUuidofExprClass: {
987 return CGM.GetAddrOfUuidDescriptor(cast<CXXUuidofExpr>(E));
988 }
989 case Expr::MaterializeTemporaryExprClass: {
990 MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(E);
991 assert(MTE->getStorageDuration() == SD_Static);
992 SmallVector<const Expr *, 2> CommaLHSs;
993 SmallVector<SubobjectAdjustment, 2> Adjustments;
994 const Expr *Inner = MTE->GetTemporaryExpr()
995 ->skipRValueSubobjectAdjustments(CommaLHSs, Adjustments);
996 return CGM.GetAddrOfGlobalTemporary(MTE, Inner);
997 }
998 }
999
1000 return nullptr;
1001 }
1002 };
1003
1004 } // end anonymous namespace.
1005
EmitConstantInit(const VarDecl & D,CodeGenFunction * CGF)1006 llvm::Constant *CodeGenModule::EmitConstantInit(const VarDecl &D,
1007 CodeGenFunction *CGF) {
1008 // Make a quick check if variable can be default NULL initialized
1009 // and avoid going through rest of code which may do, for c++11,
1010 // initialization of memory to all NULLs.
1011 if (!D.hasLocalStorage()) {
1012 QualType Ty = D.getType();
1013 if (Ty->isArrayType())
1014 Ty = Context.getBaseElementType(Ty);
1015 if (Ty->isRecordType())
1016 if (const CXXConstructExpr *E =
1017 dyn_cast_or_null<CXXConstructExpr>(D.getInit())) {
1018 const CXXConstructorDecl *CD = E->getConstructor();
1019 if (CD->isTrivial() && CD->isDefaultConstructor())
1020 return EmitNullConstant(D.getType());
1021 }
1022 }
1023
1024 if (const APValue *Value = D.evaluateValue())
1025 return EmitConstantValueForMemory(*Value, D.getType(), CGF);
1026
1027 // FIXME: Implement C++11 [basic.start.init]p2: if the initializer of a
1028 // reference is a constant expression, and the reference binds to a temporary,
1029 // then constant initialization is performed. ConstExprEmitter will
1030 // incorrectly emit a prvalue constant in this case, and the calling code
1031 // interprets that as the (pointer) value of the reference, rather than the
1032 // desired value of the referee.
1033 if (D.getType()->isReferenceType())
1034 return nullptr;
1035
1036 const Expr *E = D.getInit();
1037 assert(E && "No initializer to emit");
1038
1039 llvm::Constant* C = ConstExprEmitter(*this, CGF).Visit(const_cast<Expr*>(E));
1040 if (C && C->getType()->isIntegerTy(1)) {
1041 llvm::Type *BoolTy = getTypes().ConvertTypeForMem(E->getType());
1042 C = llvm::ConstantExpr::getZExt(C, BoolTy);
1043 }
1044 return C;
1045 }
1046
EmitConstantExpr(const Expr * E,QualType DestType,CodeGenFunction * CGF)1047 llvm::Constant *CodeGenModule::EmitConstantExpr(const Expr *E,
1048 QualType DestType,
1049 CodeGenFunction *CGF) {
1050 Expr::EvalResult Result;
1051
1052 bool Success = false;
1053
1054 if (DestType->isReferenceType())
1055 Success = E->EvaluateAsLValue(Result, Context);
1056 else
1057 Success = E->EvaluateAsRValue(Result, Context);
1058
1059 llvm::Constant *C = nullptr;
1060 if (Success && !Result.HasSideEffects)
1061 C = EmitConstantValue(Result.Val, DestType, CGF);
1062 else
1063 C = ConstExprEmitter(*this, CGF).Visit(const_cast<Expr*>(E));
1064
1065 if (C && C->getType()->isIntegerTy(1)) {
1066 llvm::Type *BoolTy = getTypes().ConvertTypeForMem(E->getType());
1067 C = llvm::ConstantExpr::getZExt(C, BoolTy);
1068 }
1069 return C;
1070 }
1071
EmitConstantValue(const APValue & Value,QualType DestType,CodeGenFunction * CGF)1072 llvm::Constant *CodeGenModule::EmitConstantValue(const APValue &Value,
1073 QualType DestType,
1074 CodeGenFunction *CGF) {
1075 // For an _Atomic-qualified constant, we may need to add tail padding.
1076 if (auto *AT = DestType->getAs<AtomicType>()) {
1077 QualType InnerType = AT->getValueType();
1078 auto *Inner = EmitConstantValue(Value, InnerType, CGF);
1079
1080 uint64_t InnerSize = Context.getTypeSize(InnerType);
1081 uint64_t OuterSize = Context.getTypeSize(DestType);
1082 if (InnerSize == OuterSize)
1083 return Inner;
1084
1085 assert(InnerSize < OuterSize && "emitted over-large constant for atomic");
1086 llvm::Constant *Elts[] = {
1087 Inner,
1088 llvm::ConstantAggregateZero::get(
1089 llvm::ArrayType::get(Int8Ty, (OuterSize - InnerSize) / 8))
1090 };
1091 return llvm::ConstantStruct::getAnon(Elts);
1092 }
1093
1094 switch (Value.getKind()) {
1095 case APValue::Uninitialized:
1096 llvm_unreachable("Constant expressions should be initialized.");
1097 case APValue::LValue: {
1098 llvm::Type *DestTy = getTypes().ConvertTypeForMem(DestType);
1099 llvm::Constant *Offset =
1100 llvm::ConstantInt::get(Int64Ty, Value.getLValueOffset().getQuantity());
1101
1102 llvm::Constant *C;
1103 if (APValue::LValueBase LVBase = Value.getLValueBase()) {
1104 // An array can be represented as an lvalue referring to the base.
1105 if (isa<llvm::ArrayType>(DestTy)) {
1106 assert(Offset->isNullValue() && "offset on array initializer");
1107 return ConstExprEmitter(*this, CGF).Visit(
1108 const_cast<Expr*>(LVBase.get<const Expr*>()));
1109 }
1110
1111 C = ConstExprEmitter(*this, CGF).EmitLValue(LVBase);
1112
1113 // Apply offset if necessary.
1114 if (!Offset->isNullValue()) {
1115 unsigned AS = C->getType()->getPointerAddressSpace();
1116 llvm::Type *CharPtrTy = Int8Ty->getPointerTo(AS);
1117 llvm::Constant *Casted = llvm::ConstantExpr::getBitCast(C, CharPtrTy);
1118 Casted = llvm::ConstantExpr::getGetElementPtr(Int8Ty, Casted, Offset);
1119 C = llvm::ConstantExpr::getPointerCast(Casted, C->getType());
1120 }
1121
1122 // Convert to the appropriate type; this could be an lvalue for
1123 // an integer.
1124 if (isa<llvm::PointerType>(DestTy))
1125 return llvm::ConstantExpr::getPointerCast(C, DestTy);
1126
1127 return llvm::ConstantExpr::getPtrToInt(C, DestTy);
1128 } else {
1129 C = Offset;
1130
1131 // Convert to the appropriate type; this could be an lvalue for
1132 // an integer.
1133 if (isa<llvm::PointerType>(DestTy))
1134 return llvm::ConstantExpr::getIntToPtr(C, DestTy);
1135
1136 // If the types don't match this should only be a truncate.
1137 if (C->getType() != DestTy)
1138 return llvm::ConstantExpr::getTrunc(C, DestTy);
1139
1140 return C;
1141 }
1142 }
1143 case APValue::Int:
1144 return llvm::ConstantInt::get(VMContext, Value.getInt());
1145 case APValue::ComplexInt: {
1146 llvm::Constant *Complex[2];
1147
1148 Complex[0] = llvm::ConstantInt::get(VMContext,
1149 Value.getComplexIntReal());
1150 Complex[1] = llvm::ConstantInt::get(VMContext,
1151 Value.getComplexIntImag());
1152
1153 // FIXME: the target may want to specify that this is packed.
1154 llvm::StructType *STy = llvm::StructType::get(Complex[0]->getType(),
1155 Complex[1]->getType(),
1156 nullptr);
1157 return llvm::ConstantStruct::get(STy, Complex);
1158 }
1159 case APValue::Float: {
1160 const llvm::APFloat &Init = Value.getFloat();
1161 if (&Init.getSemantics() == &llvm::APFloat::IEEEhalf &&
1162 !Context.getLangOpts().NativeHalfType &&
1163 !Context.getLangOpts().HalfArgsAndReturns)
1164 return llvm::ConstantInt::get(VMContext, Init.bitcastToAPInt());
1165 else
1166 return llvm::ConstantFP::get(VMContext, Init);
1167 }
1168 case APValue::ComplexFloat: {
1169 llvm::Constant *Complex[2];
1170
1171 Complex[0] = llvm::ConstantFP::get(VMContext,
1172 Value.getComplexFloatReal());
1173 Complex[1] = llvm::ConstantFP::get(VMContext,
1174 Value.getComplexFloatImag());
1175
1176 // FIXME: the target may want to specify that this is packed.
1177 llvm::StructType *STy = llvm::StructType::get(Complex[0]->getType(),
1178 Complex[1]->getType(),
1179 nullptr);
1180 return llvm::ConstantStruct::get(STy, Complex);
1181 }
1182 case APValue::Vector: {
1183 SmallVector<llvm::Constant *, 4> Inits;
1184 unsigned NumElts = Value.getVectorLength();
1185
1186 for (unsigned i = 0; i != NumElts; ++i) {
1187 const APValue &Elt = Value.getVectorElt(i);
1188 if (Elt.isInt())
1189 Inits.push_back(llvm::ConstantInt::get(VMContext, Elt.getInt()));
1190 else
1191 Inits.push_back(llvm::ConstantFP::get(VMContext, Elt.getFloat()));
1192 }
1193 return llvm::ConstantVector::get(Inits);
1194 }
1195 case APValue::AddrLabelDiff: {
1196 const AddrLabelExpr *LHSExpr = Value.getAddrLabelDiffLHS();
1197 const AddrLabelExpr *RHSExpr = Value.getAddrLabelDiffRHS();
1198 llvm::Constant *LHS = EmitConstantExpr(LHSExpr, LHSExpr->getType(), CGF);
1199 llvm::Constant *RHS = EmitConstantExpr(RHSExpr, RHSExpr->getType(), CGF);
1200
1201 // Compute difference
1202 llvm::Type *ResultType = getTypes().ConvertType(DestType);
1203 LHS = llvm::ConstantExpr::getPtrToInt(LHS, IntPtrTy);
1204 RHS = llvm::ConstantExpr::getPtrToInt(RHS, IntPtrTy);
1205 llvm::Constant *AddrLabelDiff = llvm::ConstantExpr::getSub(LHS, RHS);
1206
1207 // LLVM is a bit sensitive about the exact format of the
1208 // address-of-label difference; make sure to truncate after
1209 // the subtraction.
1210 return llvm::ConstantExpr::getTruncOrBitCast(AddrLabelDiff, ResultType);
1211 }
1212 case APValue::Struct:
1213 case APValue::Union:
1214 return ConstStructBuilder::BuildStruct(*this, CGF, Value, DestType);
1215 case APValue::Array: {
1216 const ArrayType *CAT = Context.getAsArrayType(DestType);
1217 unsigned NumElements = Value.getArraySize();
1218 unsigned NumInitElts = Value.getArrayInitializedElts();
1219
1220 // Emit array filler, if there is one.
1221 llvm::Constant *Filler = nullptr;
1222 if (Value.hasArrayFiller())
1223 Filler = EmitConstantValueForMemory(Value.getArrayFiller(),
1224 CAT->getElementType(), CGF);
1225
1226 // Emit initializer elements.
1227 llvm::Type *CommonElementType =
1228 getTypes().ConvertType(CAT->getElementType());
1229
1230 // Try to use a ConstantAggregateZero if we can.
1231 if (Filler && Filler->isNullValue() && !NumInitElts) {
1232 llvm::ArrayType *AType =
1233 llvm::ArrayType::get(CommonElementType, NumElements);
1234 return llvm::ConstantAggregateZero::get(AType);
1235 }
1236
1237 std::vector<llvm::Constant*> Elts;
1238 Elts.reserve(NumElements);
1239 for (unsigned I = 0; I < NumElements; ++I) {
1240 llvm::Constant *C = Filler;
1241 if (I < NumInitElts)
1242 C = EmitConstantValueForMemory(Value.getArrayInitializedElt(I),
1243 CAT->getElementType(), CGF);
1244 else
1245 assert(Filler && "Missing filler for implicit elements of initializer");
1246 if (I == 0)
1247 CommonElementType = C->getType();
1248 else if (C->getType() != CommonElementType)
1249 CommonElementType = nullptr;
1250 Elts.push_back(C);
1251 }
1252
1253 if (!CommonElementType) {
1254 // FIXME: Try to avoid packing the array
1255 std::vector<llvm::Type*> Types;
1256 Types.reserve(NumElements);
1257 for (unsigned i = 0, e = Elts.size(); i < e; ++i)
1258 Types.push_back(Elts[i]->getType());
1259 llvm::StructType *SType = llvm::StructType::get(VMContext, Types, true);
1260 return llvm::ConstantStruct::get(SType, Elts);
1261 }
1262
1263 llvm::ArrayType *AType =
1264 llvm::ArrayType::get(CommonElementType, NumElements);
1265 return llvm::ConstantArray::get(AType, Elts);
1266 }
1267 case APValue::MemberPointer:
1268 return getCXXABI().EmitMemberPointer(Value, DestType);
1269 }
1270 llvm_unreachable("Unknown APValue kind");
1271 }
1272
1273 llvm::Constant *
EmitConstantValueForMemory(const APValue & Value,QualType DestType,CodeGenFunction * CGF)1274 CodeGenModule::EmitConstantValueForMemory(const APValue &Value,
1275 QualType DestType,
1276 CodeGenFunction *CGF) {
1277 llvm::Constant *C = EmitConstantValue(Value, DestType, CGF);
1278 if (C->getType()->isIntegerTy(1)) {
1279 llvm::Type *BoolTy = getTypes().ConvertTypeForMem(DestType);
1280 C = llvm::ConstantExpr::getZExt(C, BoolTy);
1281 }
1282 return C;
1283 }
1284
1285 llvm::Constant *
GetAddrOfConstantCompoundLiteral(const CompoundLiteralExpr * E)1286 CodeGenModule::GetAddrOfConstantCompoundLiteral(const CompoundLiteralExpr *E) {
1287 assert(E->isFileScope() && "not a file-scope compound literal expr");
1288 return ConstExprEmitter(*this, nullptr).EmitLValue(E);
1289 }
1290
1291 llvm::Constant *
getMemberPointerConstant(const UnaryOperator * uo)1292 CodeGenModule::getMemberPointerConstant(const UnaryOperator *uo) {
1293 // Member pointer constants always have a very particular form.
1294 const MemberPointerType *type = cast<MemberPointerType>(uo->getType());
1295 const ValueDecl *decl = cast<DeclRefExpr>(uo->getSubExpr())->getDecl();
1296
1297 // A member function pointer.
1298 if (const CXXMethodDecl *method = dyn_cast<CXXMethodDecl>(decl))
1299 return getCXXABI().EmitMemberPointer(method);
1300
1301 // Otherwise, a member data pointer.
1302 uint64_t fieldOffset = getContext().getFieldOffset(decl);
1303 CharUnits chars = getContext().toCharUnitsFromBits((int64_t) fieldOffset);
1304 return getCXXABI().EmitMemberDataPointer(type, chars);
1305 }
1306
1307 static llvm::Constant *EmitNullConstantForBase(CodeGenModule &CGM,
1308 llvm::Type *baseType,
1309 const CXXRecordDecl *base);
1310
EmitNullConstant(CodeGenModule & CGM,const CXXRecordDecl * record,bool asCompleteObject)1311 static llvm::Constant *EmitNullConstant(CodeGenModule &CGM,
1312 const CXXRecordDecl *record,
1313 bool asCompleteObject) {
1314 const CGRecordLayout &layout = CGM.getTypes().getCGRecordLayout(record);
1315 llvm::StructType *structure =
1316 (asCompleteObject ? layout.getLLVMType()
1317 : layout.getBaseSubobjectLLVMType());
1318
1319 unsigned numElements = structure->getNumElements();
1320 std::vector<llvm::Constant *> elements(numElements);
1321
1322 // Fill in all the bases.
1323 for (const auto &I : record->bases()) {
1324 if (I.isVirtual()) {
1325 // Ignore virtual bases; if we're laying out for a complete
1326 // object, we'll lay these out later.
1327 continue;
1328 }
1329
1330 const CXXRecordDecl *base =
1331 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1332
1333 // Ignore empty bases.
1334 if (base->isEmpty())
1335 continue;
1336
1337 unsigned fieldIndex = layout.getNonVirtualBaseLLVMFieldNo(base);
1338 llvm::Type *baseType = structure->getElementType(fieldIndex);
1339 elements[fieldIndex] = EmitNullConstantForBase(CGM, baseType, base);
1340 }
1341
1342 // Fill in all the fields.
1343 for (const auto *Field : record->fields()) {
1344 // Fill in non-bitfields. (Bitfields always use a zero pattern, which we
1345 // will fill in later.)
1346 if (!Field->isBitField()) {
1347 unsigned fieldIndex = layout.getLLVMFieldNo(Field);
1348 elements[fieldIndex] = CGM.EmitNullConstant(Field->getType());
1349 }
1350
1351 // For unions, stop after the first named field.
1352 if (record->isUnion() && Field->getDeclName())
1353 break;
1354 }
1355
1356 // Fill in the virtual bases, if we're working with the complete object.
1357 if (asCompleteObject) {
1358 for (const auto &I : record->vbases()) {
1359 const CXXRecordDecl *base =
1360 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1361
1362 // Ignore empty bases.
1363 if (base->isEmpty())
1364 continue;
1365
1366 unsigned fieldIndex = layout.getVirtualBaseIndex(base);
1367
1368 // We might have already laid this field out.
1369 if (elements[fieldIndex]) continue;
1370
1371 llvm::Type *baseType = structure->getElementType(fieldIndex);
1372 elements[fieldIndex] = EmitNullConstantForBase(CGM, baseType, base);
1373 }
1374 }
1375
1376 // Now go through all other fields and zero them out.
1377 for (unsigned i = 0; i != numElements; ++i) {
1378 if (!elements[i])
1379 elements[i] = llvm::Constant::getNullValue(structure->getElementType(i));
1380 }
1381
1382 return llvm::ConstantStruct::get(structure, elements);
1383 }
1384
1385 /// Emit the null constant for a base subobject.
EmitNullConstantForBase(CodeGenModule & CGM,llvm::Type * baseType,const CXXRecordDecl * base)1386 static llvm::Constant *EmitNullConstantForBase(CodeGenModule &CGM,
1387 llvm::Type *baseType,
1388 const CXXRecordDecl *base) {
1389 const CGRecordLayout &baseLayout = CGM.getTypes().getCGRecordLayout(base);
1390
1391 // Just zero out bases that don't have any pointer to data members.
1392 if (baseLayout.isZeroInitializableAsBase())
1393 return llvm::Constant::getNullValue(baseType);
1394
1395 // Otherwise, we can just use its null constant.
1396 return EmitNullConstant(CGM, base, /*asCompleteObject=*/false);
1397 }
1398
EmitNullConstant(QualType T)1399 llvm::Constant *CodeGenModule::EmitNullConstant(QualType T) {
1400 if (getTypes().isZeroInitializable(T))
1401 return llvm::Constant::getNullValue(getTypes().ConvertTypeForMem(T));
1402
1403 if (const ConstantArrayType *CAT = Context.getAsConstantArrayType(T)) {
1404 llvm::ArrayType *ATy =
1405 cast<llvm::ArrayType>(getTypes().ConvertTypeForMem(T));
1406
1407 QualType ElementTy = CAT->getElementType();
1408
1409 llvm::Constant *Element = EmitNullConstant(ElementTy);
1410 unsigned NumElements = CAT->getSize().getZExtValue();
1411
1412 if (Element->isNullValue())
1413 return llvm::ConstantAggregateZero::get(ATy);
1414
1415 SmallVector<llvm::Constant *, 8> Array(NumElements, Element);
1416 return llvm::ConstantArray::get(ATy, Array);
1417 }
1418
1419 if (const RecordType *RT = T->getAs<RecordType>()) {
1420 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
1421 return ::EmitNullConstant(*this, RD, /*complete object*/ true);
1422 }
1423
1424 assert(T->isMemberPointerType() && "Should only see member pointers here!");
1425 assert(!T->getAs<MemberPointerType>()->getPointeeType()->isFunctionType() &&
1426 "Should only see pointers to data members here!");
1427
1428 return getCXXABI().EmitNullMemberPointer(T->castAs<MemberPointerType>());
1429 }
1430
1431 llvm::Constant *
EmitNullConstantForBase(const CXXRecordDecl * Record)1432 CodeGenModule::EmitNullConstantForBase(const CXXRecordDecl *Record) {
1433 return ::EmitNullConstant(*this, Record, false);
1434 }
1435