1 //===--- CGExprConstant.cpp - Emit LLVM Code from Constant Expressions ----===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This contains code to emit Constant Expr nodes as LLVM code.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "CodeGenFunction.h"
15 #include "CGCXXABI.h"
16 #include "CGObjCRuntime.h"
17 #include "CGRecordLayout.h"
18 #include "CodeGenModule.h"
19 #include "clang/AST/APValue.h"
20 #include "clang/AST/ASTContext.h"
21 #include "clang/AST/RecordLayout.h"
22 #include "clang/AST/StmtVisitor.h"
23 #include "clang/Basic/Builtins.h"
24 #include "llvm/IR/Constants.h"
25 #include "llvm/IR/DataLayout.h"
26 #include "llvm/IR/Function.h"
27 #include "llvm/IR/GlobalVariable.h"
28 using namespace clang;
29 using namespace CodeGen;
30
31 //===----------------------------------------------------------------------===//
32 // ConstStructBuilder
33 //===----------------------------------------------------------------------===//
34
35 namespace {
36 class ConstExprEmitter;
37 class ConstStructBuilder {
38 CodeGenModule &CGM;
39 CodeGenFunction *CGF;
40
41 bool Packed;
42 CharUnits NextFieldOffsetInChars;
43 CharUnits LLVMStructAlignment;
44 SmallVector<llvm::Constant *, 32> Elements;
45 public:
46 static llvm::Constant *BuildStruct(CodeGenModule &CGM, CodeGenFunction *CFG,
47 ConstExprEmitter *Emitter,
48 llvm::ConstantStruct *Base,
49 InitListExpr *Updater);
50 static llvm::Constant *BuildStruct(CodeGenModule &CGM, CodeGenFunction *CGF,
51 InitListExpr *ILE);
52 static llvm::Constant *BuildStruct(CodeGenModule &CGM, CodeGenFunction *CGF,
53 const APValue &Value, QualType ValTy);
54
55 private:
ConstStructBuilder(CodeGenModule & CGM,CodeGenFunction * CGF)56 ConstStructBuilder(CodeGenModule &CGM, CodeGenFunction *CGF)
57 : CGM(CGM), CGF(CGF), Packed(false),
58 NextFieldOffsetInChars(CharUnits::Zero()),
59 LLVMStructAlignment(CharUnits::One()) { }
60
61 void AppendField(const FieldDecl *Field, uint64_t FieldOffset,
62 llvm::Constant *InitExpr);
63
64 void AppendBytes(CharUnits FieldOffsetInChars, llvm::Constant *InitCst);
65
66 void AppendBitField(const FieldDecl *Field, uint64_t FieldOffset,
67 llvm::ConstantInt *InitExpr);
68
69 void AppendPadding(CharUnits PadSize);
70
71 void AppendTailPadding(CharUnits RecordSize);
72
73 void ConvertStructToPacked();
74
75 bool Build(InitListExpr *ILE);
76 bool Build(ConstExprEmitter *Emitter, llvm::ConstantStruct *Base,
77 InitListExpr *Updater);
78 void Build(const APValue &Val, const RecordDecl *RD, bool IsPrimaryBase,
79 const CXXRecordDecl *VTableClass, CharUnits BaseOffset);
80 llvm::Constant *Finalize(QualType Ty);
81
getAlignment(const llvm::Constant * C) const82 CharUnits getAlignment(const llvm::Constant *C) const {
83 if (Packed) return CharUnits::One();
84 return CharUnits::fromQuantity(
85 CGM.getDataLayout().getABITypeAlignment(C->getType()));
86 }
87
getSizeInChars(const llvm::Constant * C) const88 CharUnits getSizeInChars(const llvm::Constant *C) const {
89 return CharUnits::fromQuantity(
90 CGM.getDataLayout().getTypeAllocSize(C->getType()));
91 }
92 };
93
94 void ConstStructBuilder::
AppendField(const FieldDecl * Field,uint64_t FieldOffset,llvm::Constant * InitCst)95 AppendField(const FieldDecl *Field, uint64_t FieldOffset,
96 llvm::Constant *InitCst) {
97 const ASTContext &Context = CGM.getContext();
98
99 CharUnits FieldOffsetInChars = Context.toCharUnitsFromBits(FieldOffset);
100
101 AppendBytes(FieldOffsetInChars, InitCst);
102 }
103
104 void ConstStructBuilder::
AppendBytes(CharUnits FieldOffsetInChars,llvm::Constant * InitCst)105 AppendBytes(CharUnits FieldOffsetInChars, llvm::Constant *InitCst) {
106
107 assert(NextFieldOffsetInChars <= FieldOffsetInChars
108 && "Field offset mismatch!");
109
110 CharUnits FieldAlignment = getAlignment(InitCst);
111
112 // Round up the field offset to the alignment of the field type.
113 CharUnits AlignedNextFieldOffsetInChars =
114 NextFieldOffsetInChars.alignTo(FieldAlignment);
115
116 if (AlignedNextFieldOffsetInChars < FieldOffsetInChars) {
117 // We need to append padding.
118 AppendPadding(FieldOffsetInChars - NextFieldOffsetInChars);
119
120 assert(NextFieldOffsetInChars == FieldOffsetInChars &&
121 "Did not add enough padding!");
122
123 AlignedNextFieldOffsetInChars =
124 NextFieldOffsetInChars.alignTo(FieldAlignment);
125 }
126
127 if (AlignedNextFieldOffsetInChars > FieldOffsetInChars) {
128 assert(!Packed && "Alignment is wrong even with a packed struct!");
129
130 // Convert the struct to a packed struct.
131 ConvertStructToPacked();
132
133 // After we pack the struct, we may need to insert padding.
134 if (NextFieldOffsetInChars < FieldOffsetInChars) {
135 // We need to append padding.
136 AppendPadding(FieldOffsetInChars - NextFieldOffsetInChars);
137
138 assert(NextFieldOffsetInChars == FieldOffsetInChars &&
139 "Did not add enough padding!");
140 }
141 AlignedNextFieldOffsetInChars = NextFieldOffsetInChars;
142 }
143
144 // Add the field.
145 Elements.push_back(InitCst);
146 NextFieldOffsetInChars = AlignedNextFieldOffsetInChars +
147 getSizeInChars(InitCst);
148
149 if (Packed)
150 assert(LLVMStructAlignment == CharUnits::One() &&
151 "Packed struct not byte-aligned!");
152 else
153 LLVMStructAlignment = std::max(LLVMStructAlignment, FieldAlignment);
154 }
155
AppendBitField(const FieldDecl * Field,uint64_t FieldOffset,llvm::ConstantInt * CI)156 void ConstStructBuilder::AppendBitField(const FieldDecl *Field,
157 uint64_t FieldOffset,
158 llvm::ConstantInt *CI) {
159 const ASTContext &Context = CGM.getContext();
160 const uint64_t CharWidth = Context.getCharWidth();
161 uint64_t NextFieldOffsetInBits = Context.toBits(NextFieldOffsetInChars);
162 if (FieldOffset > NextFieldOffsetInBits) {
163 // We need to add padding.
164 CharUnits PadSize = Context.toCharUnitsFromBits(
165 llvm::alignTo(FieldOffset - NextFieldOffsetInBits,
166 Context.getTargetInfo().getCharAlign()));
167
168 AppendPadding(PadSize);
169 }
170
171 uint64_t FieldSize = Field->getBitWidthValue(Context);
172
173 llvm::APInt FieldValue = CI->getValue();
174
175 // Promote the size of FieldValue if necessary
176 // FIXME: This should never occur, but currently it can because initializer
177 // constants are cast to bool, and because clang is not enforcing bitfield
178 // width limits.
179 if (FieldSize > FieldValue.getBitWidth())
180 FieldValue = FieldValue.zext(FieldSize);
181
182 // Truncate the size of FieldValue to the bit field size.
183 if (FieldSize < FieldValue.getBitWidth())
184 FieldValue = FieldValue.trunc(FieldSize);
185
186 NextFieldOffsetInBits = Context.toBits(NextFieldOffsetInChars);
187 if (FieldOffset < NextFieldOffsetInBits) {
188 // Either part of the field or the entire field can go into the previous
189 // byte.
190 assert(!Elements.empty() && "Elements can't be empty!");
191
192 unsigned BitsInPreviousByte = NextFieldOffsetInBits - FieldOffset;
193
194 bool FitsCompletelyInPreviousByte =
195 BitsInPreviousByte >= FieldValue.getBitWidth();
196
197 llvm::APInt Tmp = FieldValue;
198
199 if (!FitsCompletelyInPreviousByte) {
200 unsigned NewFieldWidth = FieldSize - BitsInPreviousByte;
201
202 if (CGM.getDataLayout().isBigEndian()) {
203 Tmp = Tmp.lshr(NewFieldWidth);
204 Tmp = Tmp.trunc(BitsInPreviousByte);
205
206 // We want the remaining high bits.
207 FieldValue = FieldValue.trunc(NewFieldWidth);
208 } else {
209 Tmp = Tmp.trunc(BitsInPreviousByte);
210
211 // We want the remaining low bits.
212 FieldValue = FieldValue.lshr(BitsInPreviousByte);
213 FieldValue = FieldValue.trunc(NewFieldWidth);
214 }
215 }
216
217 Tmp = Tmp.zext(CharWidth);
218 if (CGM.getDataLayout().isBigEndian()) {
219 if (FitsCompletelyInPreviousByte)
220 Tmp = Tmp.shl(BitsInPreviousByte - FieldValue.getBitWidth());
221 } else {
222 Tmp = Tmp.shl(CharWidth - BitsInPreviousByte);
223 }
224
225 // 'or' in the bits that go into the previous byte.
226 llvm::Value *LastElt = Elements.back();
227 if (llvm::ConstantInt *Val = dyn_cast<llvm::ConstantInt>(LastElt))
228 Tmp |= Val->getValue();
229 else {
230 assert(isa<llvm::UndefValue>(LastElt));
231 // If there is an undef field that we're adding to, it can either be a
232 // scalar undef (in which case, we just replace it with our field) or it
233 // is an array. If it is an array, we have to pull one byte off the
234 // array so that the other undef bytes stay around.
235 if (!isa<llvm::IntegerType>(LastElt->getType())) {
236 // The undef padding will be a multibyte array, create a new smaller
237 // padding and then an hole for our i8 to get plopped into.
238 assert(isa<llvm::ArrayType>(LastElt->getType()) &&
239 "Expected array padding of undefs");
240 llvm::ArrayType *AT = cast<llvm::ArrayType>(LastElt->getType());
241 assert(AT->getElementType()->isIntegerTy(CharWidth) &&
242 AT->getNumElements() != 0 &&
243 "Expected non-empty array padding of undefs");
244
245 // Remove the padding array.
246 NextFieldOffsetInChars -= CharUnits::fromQuantity(AT->getNumElements());
247 Elements.pop_back();
248
249 // Add the padding back in two chunks.
250 AppendPadding(CharUnits::fromQuantity(AT->getNumElements()-1));
251 AppendPadding(CharUnits::One());
252 assert(isa<llvm::UndefValue>(Elements.back()) &&
253 Elements.back()->getType()->isIntegerTy(CharWidth) &&
254 "Padding addition didn't work right");
255 }
256 }
257
258 Elements.back() = llvm::ConstantInt::get(CGM.getLLVMContext(), Tmp);
259
260 if (FitsCompletelyInPreviousByte)
261 return;
262 }
263
264 while (FieldValue.getBitWidth() > CharWidth) {
265 llvm::APInt Tmp;
266
267 if (CGM.getDataLayout().isBigEndian()) {
268 // We want the high bits.
269 Tmp =
270 FieldValue.lshr(FieldValue.getBitWidth() - CharWidth).trunc(CharWidth);
271 } else {
272 // We want the low bits.
273 Tmp = FieldValue.trunc(CharWidth);
274
275 FieldValue = FieldValue.lshr(CharWidth);
276 }
277
278 Elements.push_back(llvm::ConstantInt::get(CGM.getLLVMContext(), Tmp));
279 ++NextFieldOffsetInChars;
280
281 FieldValue = FieldValue.trunc(FieldValue.getBitWidth() - CharWidth);
282 }
283
284 assert(FieldValue.getBitWidth() > 0 &&
285 "Should have at least one bit left!");
286 assert(FieldValue.getBitWidth() <= CharWidth &&
287 "Should not have more than a byte left!");
288
289 if (FieldValue.getBitWidth() < CharWidth) {
290 if (CGM.getDataLayout().isBigEndian()) {
291 unsigned BitWidth = FieldValue.getBitWidth();
292
293 FieldValue = FieldValue.zext(CharWidth) << (CharWidth - BitWidth);
294 } else
295 FieldValue = FieldValue.zext(CharWidth);
296 }
297
298 // Append the last element.
299 Elements.push_back(llvm::ConstantInt::get(CGM.getLLVMContext(),
300 FieldValue));
301 ++NextFieldOffsetInChars;
302 }
303
AppendPadding(CharUnits PadSize)304 void ConstStructBuilder::AppendPadding(CharUnits PadSize) {
305 if (PadSize.isZero())
306 return;
307
308 llvm::Type *Ty = CGM.Int8Ty;
309 if (PadSize > CharUnits::One())
310 Ty = llvm::ArrayType::get(Ty, PadSize.getQuantity());
311
312 llvm::Constant *C = llvm::UndefValue::get(Ty);
313 Elements.push_back(C);
314 assert(getAlignment(C) == CharUnits::One() &&
315 "Padding must have 1 byte alignment!");
316
317 NextFieldOffsetInChars += getSizeInChars(C);
318 }
319
AppendTailPadding(CharUnits RecordSize)320 void ConstStructBuilder::AppendTailPadding(CharUnits RecordSize) {
321 assert(NextFieldOffsetInChars <= RecordSize &&
322 "Size mismatch!");
323
324 AppendPadding(RecordSize - NextFieldOffsetInChars);
325 }
326
ConvertStructToPacked()327 void ConstStructBuilder::ConvertStructToPacked() {
328 SmallVector<llvm::Constant *, 16> PackedElements;
329 CharUnits ElementOffsetInChars = CharUnits::Zero();
330
331 for (unsigned i = 0, e = Elements.size(); i != e; ++i) {
332 llvm::Constant *C = Elements[i];
333
334 CharUnits ElementAlign = CharUnits::fromQuantity(
335 CGM.getDataLayout().getABITypeAlignment(C->getType()));
336 CharUnits AlignedElementOffsetInChars =
337 ElementOffsetInChars.alignTo(ElementAlign);
338
339 if (AlignedElementOffsetInChars > ElementOffsetInChars) {
340 // We need some padding.
341 CharUnits NumChars =
342 AlignedElementOffsetInChars - ElementOffsetInChars;
343
344 llvm::Type *Ty = CGM.Int8Ty;
345 if (NumChars > CharUnits::One())
346 Ty = llvm::ArrayType::get(Ty, NumChars.getQuantity());
347
348 llvm::Constant *Padding = llvm::UndefValue::get(Ty);
349 PackedElements.push_back(Padding);
350 ElementOffsetInChars += getSizeInChars(Padding);
351 }
352
353 PackedElements.push_back(C);
354 ElementOffsetInChars += getSizeInChars(C);
355 }
356
357 assert(ElementOffsetInChars == NextFieldOffsetInChars &&
358 "Packing the struct changed its size!");
359
360 Elements.swap(PackedElements);
361 LLVMStructAlignment = CharUnits::One();
362 Packed = true;
363 }
364
Build(InitListExpr * ILE)365 bool ConstStructBuilder::Build(InitListExpr *ILE) {
366 RecordDecl *RD = ILE->getType()->getAs<RecordType>()->getDecl();
367 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
368
369 unsigned FieldNo = 0;
370 unsigned ElementNo = 0;
371
372 // Bail out if we have base classes. We could support these, but they only
373 // arise in C++1z where we will have already constant folded most interesting
374 // cases. FIXME: There are still a few more cases we can handle this way.
375 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD))
376 if (CXXRD->getNumBases())
377 return false;
378
379 for (RecordDecl::field_iterator Field = RD->field_begin(),
380 FieldEnd = RD->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
381 // If this is a union, skip all the fields that aren't being initialized.
382 if (RD->isUnion() && ILE->getInitializedFieldInUnion() != *Field)
383 continue;
384
385 // Don't emit anonymous bitfields, they just affect layout.
386 if (Field->isUnnamedBitfield())
387 continue;
388
389 // Get the initializer. A struct can include fields without initializers,
390 // we just use explicit null values for them.
391 llvm::Constant *EltInit;
392 if (ElementNo < ILE->getNumInits())
393 EltInit = CGM.EmitConstantExpr(ILE->getInit(ElementNo++),
394 Field->getType(), CGF);
395 else
396 EltInit = CGM.EmitNullConstant(Field->getType());
397
398 if (!EltInit)
399 return false;
400
401 if (!Field->isBitField()) {
402 // Handle non-bitfield members.
403 AppendField(*Field, Layout.getFieldOffset(FieldNo), EltInit);
404 } else {
405 // Otherwise we have a bitfield.
406 if (auto *CI = dyn_cast<llvm::ConstantInt>(EltInit)) {
407 AppendBitField(*Field, Layout.getFieldOffset(FieldNo), CI);
408 } else {
409 // We are trying to initialize a bitfield with a non-trivial constant,
410 // this must require run-time code.
411 return false;
412 }
413 }
414 }
415
416 return true;
417 }
418
419 namespace {
420 struct BaseInfo {
BaseInfo__anon84d2c4700111::__anon84d2c4700211::BaseInfo421 BaseInfo(const CXXRecordDecl *Decl, CharUnits Offset, unsigned Index)
422 : Decl(Decl), Offset(Offset), Index(Index) {
423 }
424
425 const CXXRecordDecl *Decl;
426 CharUnits Offset;
427 unsigned Index;
428
operator <__anon84d2c4700111::__anon84d2c4700211::BaseInfo429 bool operator<(const BaseInfo &O) const { return Offset < O.Offset; }
430 };
431 }
432
Build(const APValue & Val,const RecordDecl * RD,bool IsPrimaryBase,const CXXRecordDecl * VTableClass,CharUnits Offset)433 void ConstStructBuilder::Build(const APValue &Val, const RecordDecl *RD,
434 bool IsPrimaryBase,
435 const CXXRecordDecl *VTableClass,
436 CharUnits Offset) {
437 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
438
439 if (const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD)) {
440 // Add a vtable pointer, if we need one and it hasn't already been added.
441 if (CD->isDynamicClass() && !IsPrimaryBase) {
442 llvm::Constant *VTableAddressPoint =
443 CGM.getCXXABI().getVTableAddressPointForConstExpr(
444 BaseSubobject(CD, Offset), VTableClass);
445 AppendBytes(Offset, VTableAddressPoint);
446 }
447
448 // Accumulate and sort bases, in order to visit them in address order, which
449 // may not be the same as declaration order.
450 SmallVector<BaseInfo, 8> Bases;
451 Bases.reserve(CD->getNumBases());
452 unsigned BaseNo = 0;
453 for (CXXRecordDecl::base_class_const_iterator Base = CD->bases_begin(),
454 BaseEnd = CD->bases_end(); Base != BaseEnd; ++Base, ++BaseNo) {
455 assert(!Base->isVirtual() && "should not have virtual bases here");
456 const CXXRecordDecl *BD = Base->getType()->getAsCXXRecordDecl();
457 CharUnits BaseOffset = Layout.getBaseClassOffset(BD);
458 Bases.push_back(BaseInfo(BD, BaseOffset, BaseNo));
459 }
460 std::stable_sort(Bases.begin(), Bases.end());
461
462 for (unsigned I = 0, N = Bases.size(); I != N; ++I) {
463 BaseInfo &Base = Bases[I];
464
465 bool IsPrimaryBase = Layout.getPrimaryBase() == Base.Decl;
466 Build(Val.getStructBase(Base.Index), Base.Decl, IsPrimaryBase,
467 VTableClass, Offset + Base.Offset);
468 }
469 }
470
471 unsigned FieldNo = 0;
472 uint64_t OffsetBits = CGM.getContext().toBits(Offset);
473
474 for (RecordDecl::field_iterator Field = RD->field_begin(),
475 FieldEnd = RD->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
476 // If this is a union, skip all the fields that aren't being initialized.
477 if (RD->isUnion() && Val.getUnionField() != *Field)
478 continue;
479
480 // Don't emit anonymous bitfields, they just affect layout.
481 if (Field->isUnnamedBitfield())
482 continue;
483
484 // Emit the value of the initializer.
485 const APValue &FieldValue =
486 RD->isUnion() ? Val.getUnionValue() : Val.getStructField(FieldNo);
487 llvm::Constant *EltInit =
488 CGM.EmitConstantValueForMemory(FieldValue, Field->getType(), CGF);
489 assert(EltInit && "EmitConstantValue can't fail");
490
491 if (!Field->isBitField()) {
492 // Handle non-bitfield members.
493 AppendField(*Field, Layout.getFieldOffset(FieldNo) + OffsetBits, EltInit);
494 } else {
495 // Otherwise we have a bitfield.
496 AppendBitField(*Field, Layout.getFieldOffset(FieldNo) + OffsetBits,
497 cast<llvm::ConstantInt>(EltInit));
498 }
499 }
500 }
501
Finalize(QualType Ty)502 llvm::Constant *ConstStructBuilder::Finalize(QualType Ty) {
503 RecordDecl *RD = Ty->getAs<RecordType>()->getDecl();
504 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
505
506 CharUnits LayoutSizeInChars = Layout.getSize();
507
508 if (NextFieldOffsetInChars > LayoutSizeInChars) {
509 // If the struct is bigger than the size of the record type,
510 // we must have a flexible array member at the end.
511 assert(RD->hasFlexibleArrayMember() &&
512 "Must have flexible array member if struct is bigger than type!");
513
514 // No tail padding is necessary.
515 } else {
516 // Append tail padding if necessary.
517 CharUnits LLVMSizeInChars =
518 NextFieldOffsetInChars.alignTo(LLVMStructAlignment);
519
520 if (LLVMSizeInChars != LayoutSizeInChars)
521 AppendTailPadding(LayoutSizeInChars);
522
523 LLVMSizeInChars = NextFieldOffsetInChars.alignTo(LLVMStructAlignment);
524
525 // Check if we need to convert the struct to a packed struct.
526 if (NextFieldOffsetInChars <= LayoutSizeInChars &&
527 LLVMSizeInChars > LayoutSizeInChars) {
528 assert(!Packed && "Size mismatch!");
529
530 ConvertStructToPacked();
531 assert(NextFieldOffsetInChars <= LayoutSizeInChars &&
532 "Converting to packed did not help!");
533 }
534
535 LLVMSizeInChars = NextFieldOffsetInChars.alignTo(LLVMStructAlignment);
536
537 assert(LayoutSizeInChars == LLVMSizeInChars &&
538 "Tail padding mismatch!");
539 }
540
541 // Pick the type to use. If the type is layout identical to the ConvertType
542 // type then use it, otherwise use whatever the builder produced for us.
543 llvm::StructType *STy =
544 llvm::ConstantStruct::getTypeForElements(CGM.getLLVMContext(),
545 Elements, Packed);
546 llvm::Type *ValTy = CGM.getTypes().ConvertType(Ty);
547 if (llvm::StructType *ValSTy = dyn_cast<llvm::StructType>(ValTy)) {
548 if (ValSTy->isLayoutIdentical(STy))
549 STy = ValSTy;
550 }
551
552 llvm::Constant *Result = llvm::ConstantStruct::get(STy, Elements);
553
554 assert(NextFieldOffsetInChars.alignTo(getAlignment(Result)) ==
555 getSizeInChars(Result) &&
556 "Size mismatch!");
557
558 return Result;
559 }
560
BuildStruct(CodeGenModule & CGM,CodeGenFunction * CGF,ConstExprEmitter * Emitter,llvm::ConstantStruct * Base,InitListExpr * Updater)561 llvm::Constant *ConstStructBuilder::BuildStruct(CodeGenModule &CGM,
562 CodeGenFunction *CGF,
563 ConstExprEmitter *Emitter,
564 llvm::ConstantStruct *Base,
565 InitListExpr *Updater) {
566 ConstStructBuilder Builder(CGM, CGF);
567 if (!Builder.Build(Emitter, Base, Updater))
568 return nullptr;
569 return Builder.Finalize(Updater->getType());
570 }
571
BuildStruct(CodeGenModule & CGM,CodeGenFunction * CGF,InitListExpr * ILE)572 llvm::Constant *ConstStructBuilder::BuildStruct(CodeGenModule &CGM,
573 CodeGenFunction *CGF,
574 InitListExpr *ILE) {
575 ConstStructBuilder Builder(CGM, CGF);
576
577 if (!Builder.Build(ILE))
578 return nullptr;
579
580 return Builder.Finalize(ILE->getType());
581 }
582
BuildStruct(CodeGenModule & CGM,CodeGenFunction * CGF,const APValue & Val,QualType ValTy)583 llvm::Constant *ConstStructBuilder::BuildStruct(CodeGenModule &CGM,
584 CodeGenFunction *CGF,
585 const APValue &Val,
586 QualType ValTy) {
587 ConstStructBuilder Builder(CGM, CGF);
588
589 const RecordDecl *RD = ValTy->castAs<RecordType>()->getDecl();
590 const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD);
591 Builder.Build(Val, RD, false, CD, CharUnits::Zero());
592
593 return Builder.Finalize(ValTy);
594 }
595
596
597 //===----------------------------------------------------------------------===//
598 // ConstExprEmitter
599 //===----------------------------------------------------------------------===//
600
601 /// This class only needs to handle two cases:
602 /// 1) Literals (this is used by APValue emission to emit literals).
603 /// 2) Arrays, structs and unions (outside C++11 mode, we don't currently
604 /// constant fold these types).
605 class ConstExprEmitter :
606 public StmtVisitor<ConstExprEmitter, llvm::Constant*> {
607 CodeGenModule &CGM;
608 CodeGenFunction *CGF;
609 llvm::LLVMContext &VMContext;
610 public:
ConstExprEmitter(CodeGenModule & cgm,CodeGenFunction * cgf)611 ConstExprEmitter(CodeGenModule &cgm, CodeGenFunction *cgf)
612 : CGM(cgm), CGF(cgf), VMContext(cgm.getLLVMContext()) {
613 }
614
615 //===--------------------------------------------------------------------===//
616 // Visitor Methods
617 //===--------------------------------------------------------------------===//
618
VisitStmt(Stmt * S)619 llvm::Constant *VisitStmt(Stmt *S) {
620 return nullptr;
621 }
622
VisitParenExpr(ParenExpr * PE)623 llvm::Constant *VisitParenExpr(ParenExpr *PE) {
624 return Visit(PE->getSubExpr());
625 }
626
627 llvm::Constant *
VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr * PE)628 VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr *PE) {
629 return Visit(PE->getReplacement());
630 }
631
VisitGenericSelectionExpr(GenericSelectionExpr * GE)632 llvm::Constant *VisitGenericSelectionExpr(GenericSelectionExpr *GE) {
633 return Visit(GE->getResultExpr());
634 }
635
VisitChooseExpr(ChooseExpr * CE)636 llvm::Constant *VisitChooseExpr(ChooseExpr *CE) {
637 return Visit(CE->getChosenSubExpr());
638 }
639
VisitCompoundLiteralExpr(CompoundLiteralExpr * E)640 llvm::Constant *VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
641 return Visit(E->getInitializer());
642 }
643
VisitCastExpr(CastExpr * E)644 llvm::Constant *VisitCastExpr(CastExpr* E) {
645 if (const auto *ECE = dyn_cast<ExplicitCastExpr>(E))
646 CGM.EmitExplicitCastExprType(ECE, CGF);
647 Expr *subExpr = E->getSubExpr();
648 llvm::Constant *C = CGM.EmitConstantExpr(subExpr, subExpr->getType(), CGF);
649 if (!C) return nullptr;
650
651 llvm::Type *destType = ConvertType(E->getType());
652
653 switch (E->getCastKind()) {
654 case CK_ToUnion: {
655 // GCC cast to union extension
656 assert(E->getType()->isUnionType() &&
657 "Destination type is not union type!");
658
659 // Build a struct with the union sub-element as the first member,
660 // and padded to the appropriate size
661 SmallVector<llvm::Constant*, 2> Elts;
662 SmallVector<llvm::Type*, 2> Types;
663 Elts.push_back(C);
664 Types.push_back(C->getType());
665 unsigned CurSize = CGM.getDataLayout().getTypeAllocSize(C->getType());
666 unsigned TotalSize = CGM.getDataLayout().getTypeAllocSize(destType);
667
668 assert(CurSize <= TotalSize && "Union size mismatch!");
669 if (unsigned NumPadBytes = TotalSize - CurSize) {
670 llvm::Type *Ty = CGM.Int8Ty;
671 if (NumPadBytes > 1)
672 Ty = llvm::ArrayType::get(Ty, NumPadBytes);
673
674 Elts.push_back(llvm::UndefValue::get(Ty));
675 Types.push_back(Ty);
676 }
677
678 llvm::StructType* STy =
679 llvm::StructType::get(C->getType()->getContext(), Types, false);
680 return llvm::ConstantStruct::get(STy, Elts);
681 }
682
683 case CK_AddressSpaceConversion:
684 return llvm::ConstantExpr::getAddrSpaceCast(C, destType);
685
686 case CK_LValueToRValue:
687 case CK_AtomicToNonAtomic:
688 case CK_NonAtomicToAtomic:
689 case CK_NoOp:
690 case CK_ConstructorConversion:
691 return C;
692
693 case CK_Dependent: llvm_unreachable("saw dependent cast!");
694
695 case CK_BuiltinFnToFnPtr:
696 llvm_unreachable("builtin functions are handled elsewhere");
697
698 case CK_ReinterpretMemberPointer:
699 case CK_DerivedToBaseMemberPointer:
700 case CK_BaseToDerivedMemberPointer:
701 return CGM.getCXXABI().EmitMemberPointerConversion(E, C);
702
703 // These will never be supported.
704 case CK_ObjCObjectLValueCast:
705 case CK_ARCProduceObject:
706 case CK_ARCConsumeObject:
707 case CK_ARCReclaimReturnedObject:
708 case CK_ARCExtendBlockObject:
709 case CK_CopyAndAutoreleaseBlockObject:
710 return nullptr;
711
712 // These don't need to be handled here because Evaluate knows how to
713 // evaluate them in the cases where they can be folded.
714 case CK_BitCast:
715 case CK_ToVoid:
716 case CK_Dynamic:
717 case CK_LValueBitCast:
718 case CK_NullToMemberPointer:
719 case CK_UserDefinedConversion:
720 case CK_CPointerToObjCPointerCast:
721 case CK_BlockPointerToObjCPointerCast:
722 case CK_AnyPointerToBlockPointerCast:
723 case CK_ArrayToPointerDecay:
724 case CK_FunctionToPointerDecay:
725 case CK_BaseToDerived:
726 case CK_DerivedToBase:
727 case CK_UncheckedDerivedToBase:
728 case CK_MemberPointerToBoolean:
729 case CK_VectorSplat:
730 case CK_FloatingRealToComplex:
731 case CK_FloatingComplexToReal:
732 case CK_FloatingComplexToBoolean:
733 case CK_FloatingComplexCast:
734 case CK_FloatingComplexToIntegralComplex:
735 case CK_IntegralRealToComplex:
736 case CK_IntegralComplexToReal:
737 case CK_IntegralComplexToBoolean:
738 case CK_IntegralComplexCast:
739 case CK_IntegralComplexToFloatingComplex:
740 case CK_PointerToIntegral:
741 case CK_PointerToBoolean:
742 case CK_NullToPointer:
743 case CK_IntegralCast:
744 case CK_BooleanToSignedIntegral:
745 case CK_IntegralToPointer:
746 case CK_IntegralToBoolean:
747 case CK_IntegralToFloating:
748 case CK_FloatingToIntegral:
749 case CK_FloatingToBoolean:
750 case CK_FloatingCast:
751 case CK_ZeroToOCLEvent:
752 return nullptr;
753 }
754 llvm_unreachable("Invalid CastKind");
755 }
756
VisitCXXDefaultArgExpr(CXXDefaultArgExpr * DAE)757 llvm::Constant *VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
758 return Visit(DAE->getExpr());
759 }
760
VisitCXXDefaultInitExpr(CXXDefaultInitExpr * DIE)761 llvm::Constant *VisitCXXDefaultInitExpr(CXXDefaultInitExpr *DIE) {
762 // No need for a DefaultInitExprScope: we don't handle 'this' in a
763 // constant expression.
764 return Visit(DIE->getExpr());
765 }
766
VisitExprWithCleanups(ExprWithCleanups * E)767 llvm::Constant *VisitExprWithCleanups(ExprWithCleanups *E) {
768 if (!E->cleanupsHaveSideEffects())
769 return Visit(E->getSubExpr());
770 return nullptr;
771 }
772
VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr * E)773 llvm::Constant *VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E) {
774 return Visit(E->GetTemporaryExpr());
775 }
776
EmitArrayInitialization(InitListExpr * ILE)777 llvm::Constant *EmitArrayInitialization(InitListExpr *ILE) {
778 if (ILE->isStringLiteralInit())
779 return Visit(ILE->getInit(0));
780
781 llvm::ArrayType *AType =
782 cast<llvm::ArrayType>(ConvertType(ILE->getType()));
783 llvm::Type *ElemTy = AType->getElementType();
784 unsigned NumInitElements = ILE->getNumInits();
785 unsigned NumElements = AType->getNumElements();
786
787 // Initialising an array requires us to automatically
788 // initialise any elements that have not been initialised explicitly
789 unsigned NumInitableElts = std::min(NumInitElements, NumElements);
790
791 // Initialize remaining array elements.
792 // FIXME: This doesn't handle member pointers correctly!
793 llvm::Constant *fillC;
794 if (Expr *filler = ILE->getArrayFiller())
795 fillC = CGM.EmitConstantExpr(filler, filler->getType(), CGF);
796 else
797 fillC = llvm::Constant::getNullValue(ElemTy);
798 if (!fillC)
799 return nullptr;
800
801 // Try to use a ConstantAggregateZero if we can.
802 if (fillC->isNullValue() && !NumInitableElts)
803 return llvm::ConstantAggregateZero::get(AType);
804
805 // Copy initializer elements.
806 std::vector<llvm::Constant*> Elts;
807 Elts.reserve(NumInitableElts + NumElements);
808
809 bool RewriteType = false;
810 for (unsigned i = 0; i < NumInitableElts; ++i) {
811 Expr *Init = ILE->getInit(i);
812 llvm::Constant *C = CGM.EmitConstantExpr(Init, Init->getType(), CGF);
813 if (!C)
814 return nullptr;
815 RewriteType |= (C->getType() != ElemTy);
816 Elts.push_back(C);
817 }
818
819 RewriteType |= (fillC->getType() != ElemTy);
820 Elts.resize(NumElements, fillC);
821
822 if (RewriteType) {
823 // FIXME: Try to avoid packing the array
824 std::vector<llvm::Type*> Types;
825 Types.reserve(NumInitableElts + NumElements);
826 for (unsigned i = 0, e = Elts.size(); i < e; ++i)
827 Types.push_back(Elts[i]->getType());
828 llvm::StructType *SType = llvm::StructType::get(AType->getContext(),
829 Types, true);
830 return llvm::ConstantStruct::get(SType, Elts);
831 }
832
833 return llvm::ConstantArray::get(AType, Elts);
834 }
835
EmitRecordInitialization(InitListExpr * ILE)836 llvm::Constant *EmitRecordInitialization(InitListExpr *ILE) {
837 return ConstStructBuilder::BuildStruct(CGM, CGF, ILE);
838 }
839
VisitImplicitValueInitExpr(ImplicitValueInitExpr * E)840 llvm::Constant *VisitImplicitValueInitExpr(ImplicitValueInitExpr* E) {
841 return CGM.EmitNullConstant(E->getType());
842 }
843
VisitInitListExpr(InitListExpr * ILE)844 llvm::Constant *VisitInitListExpr(InitListExpr *ILE) {
845 if (ILE->getType()->isArrayType())
846 return EmitArrayInitialization(ILE);
847
848 if (ILE->getType()->isRecordType())
849 return EmitRecordInitialization(ILE);
850
851 return nullptr;
852 }
853
EmitDesignatedInitUpdater(llvm::Constant * Base,InitListExpr * Updater)854 llvm::Constant *EmitDesignatedInitUpdater(llvm::Constant *Base,
855 InitListExpr *Updater) {
856 QualType ExprType = Updater->getType();
857
858 if (ExprType->isArrayType()) {
859 llvm::ArrayType *AType = cast<llvm::ArrayType>(ConvertType(ExprType));
860 llvm::Type *ElemType = AType->getElementType();
861
862 unsigned NumInitElements = Updater->getNumInits();
863 unsigned NumElements = AType->getNumElements();
864
865 std::vector<llvm::Constant *> Elts;
866 Elts.reserve(NumElements);
867
868 if (llvm::ConstantDataArray *DataArray =
869 dyn_cast<llvm::ConstantDataArray>(Base))
870 for (unsigned i = 0; i != NumElements; ++i)
871 Elts.push_back(DataArray->getElementAsConstant(i));
872 else if (llvm::ConstantArray *Array =
873 dyn_cast<llvm::ConstantArray>(Base))
874 for (unsigned i = 0; i != NumElements; ++i)
875 Elts.push_back(Array->getOperand(i));
876 else
877 return nullptr; // FIXME: other array types not implemented
878
879 llvm::Constant *fillC = nullptr;
880 if (Expr *filler = Updater->getArrayFiller())
881 if (!isa<NoInitExpr>(filler))
882 fillC = CGM.EmitConstantExpr(filler, filler->getType(), CGF);
883 bool RewriteType = (fillC && fillC->getType() != ElemType);
884
885 for (unsigned i = 0; i != NumElements; ++i) {
886 Expr *Init = nullptr;
887 if (i < NumInitElements)
888 Init = Updater->getInit(i);
889
890 if (!Init && fillC)
891 Elts[i] = fillC;
892 else if (!Init || isa<NoInitExpr>(Init))
893 ; // Do nothing.
894 else if (InitListExpr *ChildILE = dyn_cast<InitListExpr>(Init))
895 Elts[i] = EmitDesignatedInitUpdater(Elts[i], ChildILE);
896 else
897 Elts[i] = CGM.EmitConstantExpr(Init, Init->getType(), CGF);
898
899 if (!Elts[i])
900 return nullptr;
901 RewriteType |= (Elts[i]->getType() != ElemType);
902 }
903
904 if (RewriteType) {
905 std::vector<llvm::Type *> Types;
906 Types.reserve(NumElements);
907 for (unsigned i = 0; i != NumElements; ++i)
908 Types.push_back(Elts[i]->getType());
909 llvm::StructType *SType = llvm::StructType::get(AType->getContext(),
910 Types, true);
911 return llvm::ConstantStruct::get(SType, Elts);
912 }
913
914 return llvm::ConstantArray::get(AType, Elts);
915 }
916
917 if (ExprType->isRecordType())
918 return ConstStructBuilder::BuildStruct(CGM, CGF, this,
919 dyn_cast<llvm::ConstantStruct>(Base), Updater);
920
921 return nullptr;
922 }
923
VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr * E)924 llvm::Constant *VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E) {
925 return EmitDesignatedInitUpdater(
926 CGM.EmitConstantExpr(E->getBase(), E->getType(), CGF),
927 E->getUpdater());
928 }
929
VisitCXXConstructExpr(CXXConstructExpr * E)930 llvm::Constant *VisitCXXConstructExpr(CXXConstructExpr *E) {
931 if (!E->getConstructor()->isTrivial())
932 return nullptr;
933
934 QualType Ty = E->getType();
935
936 // FIXME: We should not have to call getBaseElementType here.
937 const RecordType *RT =
938 CGM.getContext().getBaseElementType(Ty)->getAs<RecordType>();
939 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
940
941 // If the class doesn't have a trivial destructor, we can't emit it as a
942 // constant expr.
943 if (!RD->hasTrivialDestructor())
944 return nullptr;
945
946 // Only copy and default constructors can be trivial.
947
948
949 if (E->getNumArgs()) {
950 assert(E->getNumArgs() == 1 && "trivial ctor with > 1 argument");
951 assert(E->getConstructor()->isCopyOrMoveConstructor() &&
952 "trivial ctor has argument but isn't a copy/move ctor");
953
954 Expr *Arg = E->getArg(0);
955 assert(CGM.getContext().hasSameUnqualifiedType(Ty, Arg->getType()) &&
956 "argument to copy ctor is of wrong type");
957
958 return Visit(Arg);
959 }
960
961 return CGM.EmitNullConstant(Ty);
962 }
963
VisitStringLiteral(StringLiteral * E)964 llvm::Constant *VisitStringLiteral(StringLiteral *E) {
965 return CGM.GetConstantArrayFromStringLiteral(E);
966 }
967
VisitObjCEncodeExpr(ObjCEncodeExpr * E)968 llvm::Constant *VisitObjCEncodeExpr(ObjCEncodeExpr *E) {
969 // This must be an @encode initializing an array in a static initializer.
970 // Don't emit it as the address of the string, emit the string data itself
971 // as an inline array.
972 std::string Str;
973 CGM.getContext().getObjCEncodingForType(E->getEncodedType(), Str);
974 QualType T = E->getType();
975 if (T->getTypeClass() == Type::TypeOfExpr)
976 T = cast<TypeOfExprType>(T)->getUnderlyingExpr()->getType();
977 const ConstantArrayType *CAT = cast<ConstantArrayType>(T);
978
979 // Resize the string to the right size, adding zeros at the end, or
980 // truncating as needed.
981 Str.resize(CAT->getSize().getZExtValue(), '\0');
982 return llvm::ConstantDataArray::getString(VMContext, Str, false);
983 }
984
VisitUnaryExtension(const UnaryOperator * E)985 llvm::Constant *VisitUnaryExtension(const UnaryOperator *E) {
986 return Visit(E->getSubExpr());
987 }
988
989 // Utility methods
ConvertType(QualType T)990 llvm::Type *ConvertType(QualType T) {
991 return CGM.getTypes().ConvertType(T);
992 }
993
994 public:
EmitLValue(APValue::LValueBase LVBase)995 ConstantAddress EmitLValue(APValue::LValueBase LVBase) {
996 if (const ValueDecl *Decl = LVBase.dyn_cast<const ValueDecl*>()) {
997 if (Decl->hasAttr<WeakRefAttr>())
998 return CGM.GetWeakRefReference(Decl);
999 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Decl))
1000 return ConstantAddress(CGM.GetAddrOfFunction(FD), CharUnits::One());
1001 if (const VarDecl* VD = dyn_cast<VarDecl>(Decl)) {
1002 // We can never refer to a variable with local storage.
1003 if (!VD->hasLocalStorage()) {
1004 CharUnits Align = CGM.getContext().getDeclAlign(VD);
1005 if (VD->isFileVarDecl() || VD->hasExternalStorage())
1006 return ConstantAddress(CGM.GetAddrOfGlobalVar(VD), Align);
1007 else if (VD->isLocalVarDecl()) {
1008 auto Ptr = CGM.getOrCreateStaticVarDecl(
1009 *VD, CGM.getLLVMLinkageVarDefinition(VD, /*isConstant=*/false));
1010 return ConstantAddress(Ptr, Align);
1011 }
1012 }
1013 }
1014 return ConstantAddress::invalid();
1015 }
1016
1017 Expr *E = const_cast<Expr*>(LVBase.get<const Expr*>());
1018 switch (E->getStmtClass()) {
1019 default: break;
1020 case Expr::CompoundLiteralExprClass: {
1021 // Note that due to the nature of compound literals, this is guaranteed
1022 // to be the only use of the variable, so we just generate it here.
1023 CompoundLiteralExpr *CLE = cast<CompoundLiteralExpr>(E);
1024 llvm::Constant* C = CGM.EmitConstantExpr(CLE->getInitializer(),
1025 CLE->getType(), CGF);
1026 // FIXME: "Leaked" on failure.
1027 if (!C) return ConstantAddress::invalid();
1028
1029 CharUnits Align = CGM.getContext().getTypeAlignInChars(E->getType());
1030
1031 auto GV = new llvm::GlobalVariable(CGM.getModule(), C->getType(),
1032 E->getType().isConstant(CGM.getContext()),
1033 llvm::GlobalValue::InternalLinkage,
1034 C, ".compoundliteral", nullptr,
1035 llvm::GlobalVariable::NotThreadLocal,
1036 CGM.getContext().getTargetAddressSpace(E->getType()));
1037 GV->setAlignment(Align.getQuantity());
1038 return ConstantAddress(GV, Align);
1039 }
1040 case Expr::StringLiteralClass:
1041 return CGM.GetAddrOfConstantStringFromLiteral(cast<StringLiteral>(E));
1042 case Expr::ObjCEncodeExprClass:
1043 return CGM.GetAddrOfConstantStringFromObjCEncode(cast<ObjCEncodeExpr>(E));
1044 case Expr::ObjCStringLiteralClass: {
1045 ObjCStringLiteral* SL = cast<ObjCStringLiteral>(E);
1046 ConstantAddress C =
1047 CGM.getObjCRuntime().GenerateConstantString(SL->getString());
1048 return C.getElementBitCast(ConvertType(E->getType()));
1049 }
1050 case Expr::PredefinedExprClass: {
1051 unsigned Type = cast<PredefinedExpr>(E)->getIdentType();
1052 if (CGF) {
1053 LValue Res = CGF->EmitPredefinedLValue(cast<PredefinedExpr>(E));
1054 return cast<ConstantAddress>(Res.getAddress());
1055 } else if (Type == PredefinedExpr::PrettyFunction) {
1056 return CGM.GetAddrOfConstantCString("top level", ".tmp");
1057 }
1058
1059 return CGM.GetAddrOfConstantCString("", ".tmp");
1060 }
1061 case Expr::AddrLabelExprClass: {
1062 assert(CGF && "Invalid address of label expression outside function.");
1063 llvm::Constant *Ptr =
1064 CGF->GetAddrOfLabel(cast<AddrLabelExpr>(E)->getLabel());
1065 Ptr = llvm::ConstantExpr::getBitCast(Ptr, ConvertType(E->getType()));
1066 return ConstantAddress(Ptr, CharUnits::One());
1067 }
1068 case Expr::CallExprClass: {
1069 CallExpr* CE = cast<CallExpr>(E);
1070 unsigned builtin = CE->getBuiltinCallee();
1071 if (builtin !=
1072 Builtin::BI__builtin___CFStringMakeConstantString &&
1073 builtin !=
1074 Builtin::BI__builtin___NSStringMakeConstantString)
1075 break;
1076 const Expr *Arg = CE->getArg(0)->IgnoreParenCasts();
1077 const StringLiteral *Literal = cast<StringLiteral>(Arg);
1078 if (builtin ==
1079 Builtin::BI__builtin___NSStringMakeConstantString) {
1080 return CGM.getObjCRuntime().GenerateConstantString(Literal);
1081 }
1082 // FIXME: need to deal with UCN conversion issues.
1083 return CGM.GetAddrOfConstantCFString(Literal);
1084 }
1085 case Expr::BlockExprClass: {
1086 std::string FunctionName;
1087 if (CGF)
1088 FunctionName = CGF->CurFn->getName();
1089 else
1090 FunctionName = "global";
1091
1092 // This is not really an l-value.
1093 llvm::Constant *Ptr =
1094 CGM.GetAddrOfGlobalBlock(cast<BlockExpr>(E), FunctionName.c_str());
1095 return ConstantAddress(Ptr, CGM.getPointerAlign());
1096 }
1097 case Expr::CXXTypeidExprClass: {
1098 CXXTypeidExpr *Typeid = cast<CXXTypeidExpr>(E);
1099 QualType T;
1100 if (Typeid->isTypeOperand())
1101 T = Typeid->getTypeOperand(CGM.getContext());
1102 else
1103 T = Typeid->getExprOperand()->getType();
1104 return ConstantAddress(CGM.GetAddrOfRTTIDescriptor(T),
1105 CGM.getPointerAlign());
1106 }
1107 case Expr::CXXUuidofExprClass: {
1108 return CGM.GetAddrOfUuidDescriptor(cast<CXXUuidofExpr>(E));
1109 }
1110 case Expr::MaterializeTemporaryExprClass: {
1111 MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(E);
1112 assert(MTE->getStorageDuration() == SD_Static);
1113 SmallVector<const Expr *, 2> CommaLHSs;
1114 SmallVector<SubobjectAdjustment, 2> Adjustments;
1115 const Expr *Inner = MTE->GetTemporaryExpr()
1116 ->skipRValueSubobjectAdjustments(CommaLHSs, Adjustments);
1117 return CGM.GetAddrOfGlobalTemporary(MTE, Inner);
1118 }
1119 }
1120
1121 return ConstantAddress::invalid();
1122 }
1123 };
1124
1125 } // end anonymous namespace.
1126
Build(ConstExprEmitter * Emitter,llvm::ConstantStruct * Base,InitListExpr * Updater)1127 bool ConstStructBuilder::Build(ConstExprEmitter *Emitter,
1128 llvm::ConstantStruct *Base,
1129 InitListExpr *Updater) {
1130 assert(Base && "base expression should not be empty");
1131
1132 QualType ExprType = Updater->getType();
1133 RecordDecl *RD = ExprType->getAs<RecordType>()->getDecl();
1134 const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
1135 const llvm::StructLayout *BaseLayout = CGM.getDataLayout().getStructLayout(
1136 Base->getType());
1137 unsigned FieldNo = -1;
1138 unsigned ElementNo = 0;
1139
1140 // Bail out if we have base classes. We could support these, but they only
1141 // arise in C++1z where we will have already constant folded most interesting
1142 // cases. FIXME: There are still a few more cases we can handle this way.
1143 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD))
1144 if (CXXRD->getNumBases())
1145 return false;
1146
1147 for (FieldDecl *Field : RD->fields()) {
1148 ++FieldNo;
1149
1150 if (RD->isUnion() && Updater->getInitializedFieldInUnion() != Field)
1151 continue;
1152
1153 // Skip anonymous bitfields.
1154 if (Field->isUnnamedBitfield())
1155 continue;
1156
1157 llvm::Constant *EltInit = Base->getOperand(ElementNo);
1158
1159 // Bail out if the type of the ConstantStruct does not have the same layout
1160 // as the type of the InitListExpr.
1161 if (CGM.getTypes().ConvertType(Field->getType()) != EltInit->getType() ||
1162 Layout.getFieldOffset(ElementNo) !=
1163 BaseLayout->getElementOffsetInBits(ElementNo))
1164 return false;
1165
1166 // Get the initializer. If we encounter an empty field or a NoInitExpr,
1167 // we use values from the base expression.
1168 Expr *Init = nullptr;
1169 if (ElementNo < Updater->getNumInits())
1170 Init = Updater->getInit(ElementNo);
1171
1172 if (!Init || isa<NoInitExpr>(Init))
1173 ; // Do nothing.
1174 else if (InitListExpr *ChildILE = dyn_cast<InitListExpr>(Init))
1175 EltInit = Emitter->EmitDesignatedInitUpdater(EltInit, ChildILE);
1176 else
1177 EltInit = CGM.EmitConstantExpr(Init, Field->getType(), CGF);
1178
1179 ++ElementNo;
1180
1181 if (!EltInit)
1182 return false;
1183
1184 if (!Field->isBitField())
1185 AppendField(Field, Layout.getFieldOffset(FieldNo), EltInit);
1186 else if (llvm::ConstantInt *CI = dyn_cast<llvm::ConstantInt>(EltInit))
1187 AppendBitField(Field, Layout.getFieldOffset(FieldNo), CI);
1188 else
1189 // Initializing a bitfield with a non-trivial constant?
1190 return false;
1191 }
1192
1193 return true;
1194 }
1195
EmitConstantInit(const VarDecl & D,CodeGenFunction * CGF)1196 llvm::Constant *CodeGenModule::EmitConstantInit(const VarDecl &D,
1197 CodeGenFunction *CGF) {
1198 // Make a quick check if variable can be default NULL initialized
1199 // and avoid going through rest of code which may do, for c++11,
1200 // initialization of memory to all NULLs.
1201 if (!D.hasLocalStorage()) {
1202 QualType Ty = D.getType();
1203 if (Ty->isArrayType())
1204 Ty = Context.getBaseElementType(Ty);
1205 if (Ty->isRecordType())
1206 if (const CXXConstructExpr *E =
1207 dyn_cast_or_null<CXXConstructExpr>(D.getInit())) {
1208 const CXXConstructorDecl *CD = E->getConstructor();
1209 if (CD->isTrivial() && CD->isDefaultConstructor())
1210 return EmitNullConstant(D.getType());
1211 }
1212 }
1213
1214 if (const APValue *Value = D.evaluateValue())
1215 return EmitConstantValueForMemory(*Value, D.getType(), CGF);
1216
1217 // FIXME: Implement C++11 [basic.start.init]p2: if the initializer of a
1218 // reference is a constant expression, and the reference binds to a temporary,
1219 // then constant initialization is performed. ConstExprEmitter will
1220 // incorrectly emit a prvalue constant in this case, and the calling code
1221 // interprets that as the (pointer) value of the reference, rather than the
1222 // desired value of the referee.
1223 if (D.getType()->isReferenceType())
1224 return nullptr;
1225
1226 const Expr *E = D.getInit();
1227 assert(E && "No initializer to emit");
1228
1229 llvm::Constant* C = ConstExprEmitter(*this, CGF).Visit(const_cast<Expr*>(E));
1230 if (C && C->getType()->isIntegerTy(1)) {
1231 llvm::Type *BoolTy = getTypes().ConvertTypeForMem(E->getType());
1232 C = llvm::ConstantExpr::getZExt(C, BoolTy);
1233 }
1234 return C;
1235 }
1236
EmitConstantExpr(const Expr * E,QualType DestType,CodeGenFunction * CGF)1237 llvm::Constant *CodeGenModule::EmitConstantExpr(const Expr *E,
1238 QualType DestType,
1239 CodeGenFunction *CGF) {
1240 Expr::EvalResult Result;
1241
1242 bool Success = false;
1243
1244 if (DestType->isReferenceType())
1245 Success = E->EvaluateAsLValue(Result, Context);
1246 else
1247 Success = E->EvaluateAsRValue(Result, Context);
1248
1249 llvm::Constant *C = nullptr;
1250 if (Success && !Result.HasSideEffects)
1251 C = EmitConstantValue(Result.Val, DestType, CGF);
1252 else
1253 C = ConstExprEmitter(*this, CGF).Visit(const_cast<Expr*>(E));
1254
1255 if (C && C->getType()->isIntegerTy(1)) {
1256 llvm::Type *BoolTy = getTypes().ConvertTypeForMem(E->getType());
1257 C = llvm::ConstantExpr::getZExt(C, BoolTy);
1258 }
1259 return C;
1260 }
1261
EmitConstantValue(const APValue & Value,QualType DestType,CodeGenFunction * CGF)1262 llvm::Constant *CodeGenModule::EmitConstantValue(const APValue &Value,
1263 QualType DestType,
1264 CodeGenFunction *CGF) {
1265 // For an _Atomic-qualified constant, we may need to add tail padding.
1266 if (auto *AT = DestType->getAs<AtomicType>()) {
1267 QualType InnerType = AT->getValueType();
1268 auto *Inner = EmitConstantValue(Value, InnerType, CGF);
1269
1270 uint64_t InnerSize = Context.getTypeSize(InnerType);
1271 uint64_t OuterSize = Context.getTypeSize(DestType);
1272 if (InnerSize == OuterSize)
1273 return Inner;
1274
1275 assert(InnerSize < OuterSize && "emitted over-large constant for atomic");
1276 llvm::Constant *Elts[] = {
1277 Inner,
1278 llvm::ConstantAggregateZero::get(
1279 llvm::ArrayType::get(Int8Ty, (OuterSize - InnerSize) / 8))
1280 };
1281 return llvm::ConstantStruct::getAnon(Elts);
1282 }
1283
1284 switch (Value.getKind()) {
1285 case APValue::Uninitialized:
1286 llvm_unreachable("Constant expressions should be initialized.");
1287 case APValue::LValue: {
1288 llvm::Type *DestTy = getTypes().ConvertTypeForMem(DestType);
1289 llvm::Constant *Offset =
1290 llvm::ConstantInt::get(Int64Ty, Value.getLValueOffset().getQuantity());
1291
1292 llvm::Constant *C = nullptr;
1293 if (APValue::LValueBase LVBase = Value.getLValueBase()) {
1294 // An array can be represented as an lvalue referring to the base.
1295 if (isa<llvm::ArrayType>(DestTy)) {
1296 assert(Offset->isNullValue() && "offset on array initializer");
1297 return ConstExprEmitter(*this, CGF).Visit(
1298 const_cast<Expr*>(LVBase.get<const Expr*>()));
1299 }
1300
1301 C = ConstExprEmitter(*this, CGF).EmitLValue(LVBase).getPointer();
1302
1303 // Apply offset if necessary.
1304 if (!Offset->isNullValue()) {
1305 unsigned AS = C->getType()->getPointerAddressSpace();
1306 llvm::Type *CharPtrTy = Int8Ty->getPointerTo(AS);
1307 llvm::Constant *Casted = llvm::ConstantExpr::getBitCast(C, CharPtrTy);
1308 Casted = llvm::ConstantExpr::getGetElementPtr(Int8Ty, Casted, Offset);
1309 C = llvm::ConstantExpr::getPointerCast(Casted, C->getType());
1310 }
1311
1312 // Convert to the appropriate type; this could be an lvalue for
1313 // an integer.
1314 if (isa<llvm::PointerType>(DestTy))
1315 return llvm::ConstantExpr::getPointerCast(C, DestTy);
1316
1317 return llvm::ConstantExpr::getPtrToInt(C, DestTy);
1318 } else {
1319 C = Offset;
1320
1321 // Convert to the appropriate type; this could be an lvalue for
1322 // an integer.
1323 if (isa<llvm::PointerType>(DestTy)) {
1324 // Convert the integer to a pointer-sized integer before converting it
1325 // to a pointer.
1326 C = llvm::ConstantExpr::getIntegerCast(
1327 C, getDataLayout().getIntPtrType(DestTy),
1328 /*isSigned=*/false);
1329 return llvm::ConstantExpr::getIntToPtr(C, DestTy);
1330 }
1331
1332 // If the types don't match this should only be a truncate.
1333 if (C->getType() != DestTy)
1334 return llvm::ConstantExpr::getTrunc(C, DestTy);
1335
1336 return C;
1337 }
1338 }
1339 case APValue::Int:
1340 return llvm::ConstantInt::get(VMContext, Value.getInt());
1341 case APValue::ComplexInt: {
1342 llvm::Constant *Complex[2];
1343
1344 Complex[0] = llvm::ConstantInt::get(VMContext,
1345 Value.getComplexIntReal());
1346 Complex[1] = llvm::ConstantInt::get(VMContext,
1347 Value.getComplexIntImag());
1348
1349 // FIXME: the target may want to specify that this is packed.
1350 llvm::StructType *STy = llvm::StructType::get(Complex[0]->getType(),
1351 Complex[1]->getType(),
1352 nullptr);
1353 return llvm::ConstantStruct::get(STy, Complex);
1354 }
1355 case APValue::Float: {
1356 const llvm::APFloat &Init = Value.getFloat();
1357 if (&Init.getSemantics() == &llvm::APFloat::IEEEhalf &&
1358 !Context.getLangOpts().NativeHalfType &&
1359 !Context.getLangOpts().HalfArgsAndReturns)
1360 return llvm::ConstantInt::get(VMContext, Init.bitcastToAPInt());
1361 else
1362 return llvm::ConstantFP::get(VMContext, Init);
1363 }
1364 case APValue::ComplexFloat: {
1365 llvm::Constant *Complex[2];
1366
1367 Complex[0] = llvm::ConstantFP::get(VMContext,
1368 Value.getComplexFloatReal());
1369 Complex[1] = llvm::ConstantFP::get(VMContext,
1370 Value.getComplexFloatImag());
1371
1372 // FIXME: the target may want to specify that this is packed.
1373 llvm::StructType *STy = llvm::StructType::get(Complex[0]->getType(),
1374 Complex[1]->getType(),
1375 nullptr);
1376 return llvm::ConstantStruct::get(STy, Complex);
1377 }
1378 case APValue::Vector: {
1379 unsigned NumElts = Value.getVectorLength();
1380 SmallVector<llvm::Constant *, 4> Inits(NumElts);
1381
1382 for (unsigned I = 0; I != NumElts; ++I) {
1383 const APValue &Elt = Value.getVectorElt(I);
1384 if (Elt.isInt())
1385 Inits[I] = llvm::ConstantInt::get(VMContext, Elt.getInt());
1386 else if (Elt.isFloat())
1387 Inits[I] = llvm::ConstantFP::get(VMContext, Elt.getFloat());
1388 else
1389 llvm_unreachable("unsupported vector element type");
1390 }
1391 return llvm::ConstantVector::get(Inits);
1392 }
1393 case APValue::AddrLabelDiff: {
1394 const AddrLabelExpr *LHSExpr = Value.getAddrLabelDiffLHS();
1395 const AddrLabelExpr *RHSExpr = Value.getAddrLabelDiffRHS();
1396 llvm::Constant *LHS = EmitConstantExpr(LHSExpr, LHSExpr->getType(), CGF);
1397 llvm::Constant *RHS = EmitConstantExpr(RHSExpr, RHSExpr->getType(), CGF);
1398
1399 // Compute difference
1400 llvm::Type *ResultType = getTypes().ConvertType(DestType);
1401 LHS = llvm::ConstantExpr::getPtrToInt(LHS, IntPtrTy);
1402 RHS = llvm::ConstantExpr::getPtrToInt(RHS, IntPtrTy);
1403 llvm::Constant *AddrLabelDiff = llvm::ConstantExpr::getSub(LHS, RHS);
1404
1405 // LLVM is a bit sensitive about the exact format of the
1406 // address-of-label difference; make sure to truncate after
1407 // the subtraction.
1408 return llvm::ConstantExpr::getTruncOrBitCast(AddrLabelDiff, ResultType);
1409 }
1410 case APValue::Struct:
1411 case APValue::Union:
1412 return ConstStructBuilder::BuildStruct(*this, CGF, Value, DestType);
1413 case APValue::Array: {
1414 const ArrayType *CAT = Context.getAsArrayType(DestType);
1415 unsigned NumElements = Value.getArraySize();
1416 unsigned NumInitElts = Value.getArrayInitializedElts();
1417
1418 // Emit array filler, if there is one.
1419 llvm::Constant *Filler = nullptr;
1420 if (Value.hasArrayFiller())
1421 Filler = EmitConstantValueForMemory(Value.getArrayFiller(),
1422 CAT->getElementType(), CGF);
1423
1424 // Emit initializer elements.
1425 llvm::Type *CommonElementType =
1426 getTypes().ConvertType(CAT->getElementType());
1427
1428 // Try to use a ConstantAggregateZero if we can.
1429 if (Filler && Filler->isNullValue() && !NumInitElts) {
1430 llvm::ArrayType *AType =
1431 llvm::ArrayType::get(CommonElementType, NumElements);
1432 return llvm::ConstantAggregateZero::get(AType);
1433 }
1434
1435 std::vector<llvm::Constant*> Elts;
1436 Elts.reserve(NumElements);
1437 for (unsigned I = 0; I < NumElements; ++I) {
1438 llvm::Constant *C = Filler;
1439 if (I < NumInitElts)
1440 C = EmitConstantValueForMemory(Value.getArrayInitializedElt(I),
1441 CAT->getElementType(), CGF);
1442 else
1443 assert(Filler && "Missing filler for implicit elements of initializer");
1444 if (I == 0)
1445 CommonElementType = C->getType();
1446 else if (C->getType() != CommonElementType)
1447 CommonElementType = nullptr;
1448 Elts.push_back(C);
1449 }
1450
1451 if (!CommonElementType) {
1452 // FIXME: Try to avoid packing the array
1453 std::vector<llvm::Type*> Types;
1454 Types.reserve(NumElements);
1455 for (unsigned i = 0, e = Elts.size(); i < e; ++i)
1456 Types.push_back(Elts[i]->getType());
1457 llvm::StructType *SType = llvm::StructType::get(VMContext, Types, true);
1458 return llvm::ConstantStruct::get(SType, Elts);
1459 }
1460
1461 llvm::ArrayType *AType =
1462 llvm::ArrayType::get(CommonElementType, NumElements);
1463 return llvm::ConstantArray::get(AType, Elts);
1464 }
1465 case APValue::MemberPointer:
1466 return getCXXABI().EmitMemberPointer(Value, DestType);
1467 }
1468 llvm_unreachable("Unknown APValue kind");
1469 }
1470
1471 llvm::Constant *
EmitConstantValueForMemory(const APValue & Value,QualType DestType,CodeGenFunction * CGF)1472 CodeGenModule::EmitConstantValueForMemory(const APValue &Value,
1473 QualType DestType,
1474 CodeGenFunction *CGF) {
1475 llvm::Constant *C = EmitConstantValue(Value, DestType, CGF);
1476 if (C->getType()->isIntegerTy(1)) {
1477 llvm::Type *BoolTy = getTypes().ConvertTypeForMem(DestType);
1478 C = llvm::ConstantExpr::getZExt(C, BoolTy);
1479 }
1480 return C;
1481 }
1482
1483 ConstantAddress
GetAddrOfConstantCompoundLiteral(const CompoundLiteralExpr * E)1484 CodeGenModule::GetAddrOfConstantCompoundLiteral(const CompoundLiteralExpr *E) {
1485 assert(E->isFileScope() && "not a file-scope compound literal expr");
1486 return ConstExprEmitter(*this, nullptr).EmitLValue(E);
1487 }
1488
1489 llvm::Constant *
getMemberPointerConstant(const UnaryOperator * uo)1490 CodeGenModule::getMemberPointerConstant(const UnaryOperator *uo) {
1491 // Member pointer constants always have a very particular form.
1492 const MemberPointerType *type = cast<MemberPointerType>(uo->getType());
1493 const ValueDecl *decl = cast<DeclRefExpr>(uo->getSubExpr())->getDecl();
1494
1495 // A member function pointer.
1496 if (const CXXMethodDecl *method = dyn_cast<CXXMethodDecl>(decl))
1497 return getCXXABI().EmitMemberFunctionPointer(method);
1498
1499 // Otherwise, a member data pointer.
1500 uint64_t fieldOffset = getContext().getFieldOffset(decl);
1501 CharUnits chars = getContext().toCharUnitsFromBits((int64_t) fieldOffset);
1502 return getCXXABI().EmitMemberDataPointer(type, chars);
1503 }
1504
1505 static llvm::Constant *EmitNullConstantForBase(CodeGenModule &CGM,
1506 llvm::Type *baseType,
1507 const CXXRecordDecl *base);
1508
EmitNullConstant(CodeGenModule & CGM,const CXXRecordDecl * record,bool asCompleteObject)1509 static llvm::Constant *EmitNullConstant(CodeGenModule &CGM,
1510 const CXXRecordDecl *record,
1511 bool asCompleteObject) {
1512 const CGRecordLayout &layout = CGM.getTypes().getCGRecordLayout(record);
1513 llvm::StructType *structure =
1514 (asCompleteObject ? layout.getLLVMType()
1515 : layout.getBaseSubobjectLLVMType());
1516
1517 unsigned numElements = structure->getNumElements();
1518 std::vector<llvm::Constant *> elements(numElements);
1519
1520 // Fill in all the bases.
1521 for (const auto &I : record->bases()) {
1522 if (I.isVirtual()) {
1523 // Ignore virtual bases; if we're laying out for a complete
1524 // object, we'll lay these out later.
1525 continue;
1526 }
1527
1528 const CXXRecordDecl *base =
1529 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1530
1531 // Ignore empty bases.
1532 if (base->isEmpty())
1533 continue;
1534
1535 unsigned fieldIndex = layout.getNonVirtualBaseLLVMFieldNo(base);
1536 llvm::Type *baseType = structure->getElementType(fieldIndex);
1537 elements[fieldIndex] = EmitNullConstantForBase(CGM, baseType, base);
1538 }
1539
1540 // Fill in all the fields.
1541 for (const auto *Field : record->fields()) {
1542 // Fill in non-bitfields. (Bitfields always use a zero pattern, which we
1543 // will fill in later.)
1544 if (!Field->isBitField()) {
1545 unsigned fieldIndex = layout.getLLVMFieldNo(Field);
1546 elements[fieldIndex] = CGM.EmitNullConstant(Field->getType());
1547 }
1548
1549 // For unions, stop after the first named field.
1550 if (record->isUnion()) {
1551 if (Field->getIdentifier())
1552 break;
1553 if (const auto *FieldRD =
1554 dyn_cast_or_null<RecordDecl>(Field->getType()->getAsTagDecl()))
1555 if (FieldRD->findFirstNamedDataMember())
1556 break;
1557 }
1558 }
1559
1560 // Fill in the virtual bases, if we're working with the complete object.
1561 if (asCompleteObject) {
1562 for (const auto &I : record->vbases()) {
1563 const CXXRecordDecl *base =
1564 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1565
1566 // Ignore empty bases.
1567 if (base->isEmpty())
1568 continue;
1569
1570 unsigned fieldIndex = layout.getVirtualBaseIndex(base);
1571
1572 // We might have already laid this field out.
1573 if (elements[fieldIndex]) continue;
1574
1575 llvm::Type *baseType = structure->getElementType(fieldIndex);
1576 elements[fieldIndex] = EmitNullConstantForBase(CGM, baseType, base);
1577 }
1578 }
1579
1580 // Now go through all other fields and zero them out.
1581 for (unsigned i = 0; i != numElements; ++i) {
1582 if (!elements[i])
1583 elements[i] = llvm::Constant::getNullValue(structure->getElementType(i));
1584 }
1585
1586 return llvm::ConstantStruct::get(structure, elements);
1587 }
1588
1589 /// Emit the null constant for a base subobject.
EmitNullConstantForBase(CodeGenModule & CGM,llvm::Type * baseType,const CXXRecordDecl * base)1590 static llvm::Constant *EmitNullConstantForBase(CodeGenModule &CGM,
1591 llvm::Type *baseType,
1592 const CXXRecordDecl *base) {
1593 const CGRecordLayout &baseLayout = CGM.getTypes().getCGRecordLayout(base);
1594
1595 // Just zero out bases that don't have any pointer to data members.
1596 if (baseLayout.isZeroInitializableAsBase())
1597 return llvm::Constant::getNullValue(baseType);
1598
1599 // Otherwise, we can just use its null constant.
1600 return EmitNullConstant(CGM, base, /*asCompleteObject=*/false);
1601 }
1602
EmitNullConstant(QualType T)1603 llvm::Constant *CodeGenModule::EmitNullConstant(QualType T) {
1604 if (getTypes().isZeroInitializable(T))
1605 return llvm::Constant::getNullValue(getTypes().ConvertTypeForMem(T));
1606
1607 if (const ConstantArrayType *CAT = Context.getAsConstantArrayType(T)) {
1608 llvm::ArrayType *ATy =
1609 cast<llvm::ArrayType>(getTypes().ConvertTypeForMem(T));
1610
1611 QualType ElementTy = CAT->getElementType();
1612
1613 llvm::Constant *Element = EmitNullConstant(ElementTy);
1614 unsigned NumElements = CAT->getSize().getZExtValue();
1615 SmallVector<llvm::Constant *, 8> Array(NumElements, Element);
1616 return llvm::ConstantArray::get(ATy, Array);
1617 }
1618
1619 if (const RecordType *RT = T->getAs<RecordType>()) {
1620 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
1621 return ::EmitNullConstant(*this, RD, /*complete object*/ true);
1622 }
1623
1624 assert(T->isMemberDataPointerType() &&
1625 "Should only see pointers to data members here!");
1626
1627 return getCXXABI().EmitNullMemberPointer(T->castAs<MemberPointerType>());
1628 }
1629
1630 llvm::Constant *
EmitNullConstantForBase(const CXXRecordDecl * Record)1631 CodeGenModule::EmitNullConstantForBase(const CXXRecordDecl *Record) {
1632 return ::EmitNullConstant(*this, Record, false);
1633 }
1634