xref: /freebsd/contrib/llvm-project/clang/lib/CodeGen/CGClass.cpp (revision 770cf0a5f02dc8983a89c6568d741fbc25baa999)
1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This contains code dealing with C++ code generation of classes
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "ABIInfoImpl.h"
14 #include "CGBlocks.h"
15 #include "CGCXXABI.h"
16 #include "CGDebugInfo.h"
17 #include "CGRecordLayout.h"
18 #include "CodeGenFunction.h"
19 #include "TargetInfo.h"
20 #include "clang/AST/Attr.h"
21 #include "clang/AST/CXXInheritance.h"
22 #include "clang/AST/CharUnits.h"
23 #include "clang/AST/DeclTemplate.h"
24 #include "clang/AST/EvaluatedExprVisitor.h"
25 #include "clang/AST/RecordLayout.h"
26 #include "clang/AST/StmtCXX.h"
27 #include "clang/Basic/CodeGenOptions.h"
28 #include "clang/CodeGen/CGFunctionInfo.h"
29 #include "llvm/IR/Intrinsics.h"
30 #include "llvm/IR/Metadata.h"
31 #include "llvm/Support/SaveAndRestore.h"
32 #include "llvm/Transforms/Utils/SanitizerStats.h"
33 #include <optional>
34 
35 using namespace clang;
36 using namespace CodeGen;
37 
38 /// Return the best known alignment for an unknown pointer to a
39 /// particular class.
40 CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) {
41   if (!RD->hasDefinition())
42     return CharUnits::One(); // Hopefully won't be used anywhere.
43 
44   auto &layout = getContext().getASTRecordLayout(RD);
45 
46   // If the class is final, then we know that the pointer points to an
47   // object of that type and can use the full alignment.
48   if (RD->isEffectivelyFinal())
49     return layout.getAlignment();
50 
51   // Otherwise, we have to assume it could be a subclass.
52   return layout.getNonVirtualAlignment();
53 }
54 
55 /// Return the smallest possible amount of storage that might be allocated
56 /// starting from the beginning of an object of a particular class.
57 ///
58 /// This may be smaller than sizeof(RD) if RD has virtual base classes.
59 CharUnits CodeGenModule::getMinimumClassObjectSize(const CXXRecordDecl *RD) {
60   if (!RD->hasDefinition())
61     return CharUnits::One();
62 
63   auto &layout = getContext().getASTRecordLayout(RD);
64 
65   // If the class is final, then we know that the pointer points to an
66   // object of that type and can use the full alignment.
67   if (RD->isEffectivelyFinal())
68     return layout.getSize();
69 
70   // Otherwise, we have to assume it could be a subclass.
71   return std::max(layout.getNonVirtualSize(), CharUnits::One());
72 }
73 
74 /// Return the best known alignment for a pointer to a virtual base,
75 /// given the alignment of a pointer to the derived class.
76 CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign,
77                                            const CXXRecordDecl *derivedClass,
78                                            const CXXRecordDecl *vbaseClass) {
79   // The basic idea here is that an underaligned derived pointer might
80   // indicate an underaligned base pointer.
81 
82   assert(vbaseClass->isCompleteDefinition());
83   auto &baseLayout = getContext().getASTRecordLayout(vbaseClass);
84   CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment();
85 
86   return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass,
87                                    expectedVBaseAlign);
88 }
89 
90 CharUnits
91 CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign,
92                                          const CXXRecordDecl *baseDecl,
93                                          CharUnits expectedTargetAlign) {
94   // If the base is an incomplete type (which is, alas, possible with
95   // member pointers), be pessimistic.
96   if (!baseDecl->isCompleteDefinition())
97     return std::min(actualBaseAlign, expectedTargetAlign);
98 
99   auto &baseLayout = getContext().getASTRecordLayout(baseDecl);
100   CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment();
101 
102   // If the class is properly aligned, assume the target offset is, too.
103   //
104   // This actually isn't necessarily the right thing to do --- if the
105   // class is a complete object, but it's only properly aligned for a
106   // base subobject, then the alignments of things relative to it are
107   // probably off as well.  (Note that this requires the alignment of
108   // the target to be greater than the NV alignment of the derived
109   // class.)
110   //
111   // However, our approach to this kind of under-alignment can only
112   // ever be best effort; after all, we're never going to propagate
113   // alignments through variables or parameters.  Note, in particular,
114   // that constructing a polymorphic type in an address that's less
115   // than pointer-aligned will generally trap in the constructor,
116   // unless we someday add some sort of attribute to change the
117   // assumed alignment of 'this'.  So our goal here is pretty much
118   // just to allow the user to explicitly say that a pointer is
119   // under-aligned and then safely access its fields and vtables.
120   if (actualBaseAlign >= expectedBaseAlign) {
121     return expectedTargetAlign;
122   }
123 
124   // Otherwise, we might be offset by an arbitrary multiple of the
125   // actual alignment.  The correct adjustment is to take the min of
126   // the two alignments.
127   return std::min(actualBaseAlign, expectedTargetAlign);
128 }
129 
130 Address CodeGenFunction::LoadCXXThisAddress() {
131   assert(CurFuncDecl && "loading 'this' without a func declaration?");
132   auto *MD = cast<CXXMethodDecl>(CurFuncDecl);
133 
134   // Lazily compute CXXThisAlignment.
135   if (CXXThisAlignment.isZero()) {
136     // Just use the best known alignment for the parent.
137     // TODO: if we're currently emitting a complete-object ctor/dtor,
138     // we can always use the complete-object alignment.
139     CXXThisAlignment = CGM.getClassPointerAlignment(MD->getParent());
140   }
141 
142   return makeNaturalAddressForPointer(
143       LoadCXXThis(), MD->getFunctionObjectParameterType(), CXXThisAlignment,
144       false, nullptr, nullptr, KnownNonNull);
145 }
146 
147 /// Emit the address of a field using a member data pointer.
148 ///
149 /// \param E Only used for emergency diagnostics
150 Address CodeGenFunction::EmitCXXMemberDataPointerAddress(
151     const Expr *E, Address base, llvm::Value *memberPtr,
152     const MemberPointerType *memberPtrType, bool IsInBounds,
153     LValueBaseInfo *BaseInfo, TBAAAccessInfo *TBAAInfo) {
154   // Ask the ABI to compute the actual address.
155   llvm::Value *ptr = CGM.getCXXABI().EmitMemberDataPointerAddress(
156       *this, E, base, memberPtr, memberPtrType, IsInBounds);
157 
158   QualType memberType = memberPtrType->getPointeeType();
159   CharUnits memberAlign =
160       CGM.getNaturalTypeAlignment(memberType, BaseInfo, TBAAInfo);
161   memberAlign = CGM.getDynamicOffsetAlignment(
162       base.getAlignment(), memberPtrType->getMostRecentCXXRecordDecl(),
163       memberAlign);
164   return Address(ptr, ConvertTypeForMem(memberPtrType->getPointeeType()),
165                  memberAlign);
166 }
167 
168 CharUnits CodeGenModule::computeNonVirtualBaseClassOffset(
169     const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start,
170     CastExpr::path_const_iterator End) {
171   CharUnits Offset = CharUnits::Zero();
172 
173   const ASTContext &Context = getContext();
174   const CXXRecordDecl *RD = DerivedClass;
175 
176   for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
177     const CXXBaseSpecifier *Base = *I;
178     assert(!Base->isVirtual() && "Should not see virtual bases here!");
179 
180     // Get the layout.
181     const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
182 
183     const auto *BaseDecl =
184         cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());
185 
186     // Add the offset.
187     Offset += Layout.getBaseClassOffset(BaseDecl);
188 
189     RD = BaseDecl;
190   }
191 
192   return Offset;
193 }
194 
195 llvm::Constant *
196 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
197                                    CastExpr::path_const_iterator PathBegin,
198                                    CastExpr::path_const_iterator PathEnd) {
199   assert(PathBegin != PathEnd && "Base path should not be empty!");
200 
201   CharUnits Offset =
202       computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd);
203   if (Offset.isZero())
204     return nullptr;
205 
206   llvm::Type *PtrDiffTy =
207       getTypes().ConvertType(getContext().getPointerDiffType());
208 
209   return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
210 }
211 
212 /// Gets the address of a direct base class within a complete object.
213 /// This should only be used for (1) non-virtual bases or (2) virtual bases
214 /// when the type is known to be complete (e.g. in complete destructors).
215 ///
216 /// The object pointed to by 'This' is assumed to be non-null.
217 Address
218 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This,
219                                                    const CXXRecordDecl *Derived,
220                                                    const CXXRecordDecl *Base,
221                                                    bool BaseIsVirtual) {
222   // 'this' must be a pointer (in some address space) to Derived.
223   assert(This.getElementType() == ConvertType(Derived));
224 
225   // Compute the offset of the virtual base.
226   CharUnits Offset;
227   const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
228   if (BaseIsVirtual)
229     Offset = Layout.getVBaseClassOffset(Base);
230   else
231     Offset = Layout.getBaseClassOffset(Base);
232 
233   // Shift and cast down to the base type.
234   // TODO: for complete types, this should be possible with a GEP.
235   Address V = This;
236   if (!Offset.isZero()) {
237     V = V.withElementType(Int8Ty);
238     V = Builder.CreateConstInBoundsByteGEP(V, Offset);
239   }
240   return V.withElementType(ConvertType(Base));
241 }
242 
243 static Address
244 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr,
245                                 CharUnits nonVirtualOffset,
246                                 llvm::Value *virtualOffset,
247                                 const CXXRecordDecl *derivedClass,
248                                 const CXXRecordDecl *nearestVBase) {
249   // Assert that we have something to do.
250   assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr);
251 
252   // Compute the offset from the static and dynamic components.
253   llvm::Value *baseOffset;
254   if (!nonVirtualOffset.isZero()) {
255     llvm::Type *OffsetType =
256         (CGF.CGM.getTarget().getCXXABI().isItaniumFamily() &&
257          CGF.CGM.getItaniumVTableContext().isRelativeLayout())
258             ? CGF.Int32Ty
259             : CGF.PtrDiffTy;
260     baseOffset =
261         llvm::ConstantInt::get(OffsetType, nonVirtualOffset.getQuantity());
262     if (virtualOffset) {
263       baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);
264     }
265   } else {
266     baseOffset = virtualOffset;
267   }
268 
269   // Apply the base offset.
270   llvm::Value *ptr = addr.emitRawPointer(CGF);
271   ptr = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, ptr, baseOffset, "add.ptr");
272 
273   // If we have a virtual component, the alignment of the result will
274   // be relative only to the known alignment of that vbase.
275   CharUnits alignment;
276   if (virtualOffset) {
277     assert(nearestVBase && "virtual offset without vbase?");
278     alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(),
279                                           derivedClass, nearestVBase);
280   } else {
281     alignment = addr.getAlignment();
282   }
283   alignment = alignment.alignmentAtOffset(nonVirtualOffset);
284 
285   return Address(ptr, CGF.Int8Ty, alignment);
286 }
287 
288 Address CodeGenFunction::GetAddressOfBaseClass(
289     Address Value, const CXXRecordDecl *Derived,
290     CastExpr::path_const_iterator PathBegin,
291     CastExpr::path_const_iterator PathEnd, bool NullCheckValue,
292     SourceLocation Loc) {
293   assert(PathBegin != PathEnd && "Base path should not be empty!");
294 
295   CastExpr::path_const_iterator Start = PathBegin;
296   const CXXRecordDecl *VBase = nullptr;
297 
298   // Sema has done some convenient canonicalization here: if the
299   // access path involved any virtual steps, the conversion path will
300   // *start* with a step down to the correct virtual base subobject,
301   // and hence will not require any further steps.
302   if ((*Start)->isVirtual()) {
303     VBase = cast<CXXRecordDecl>(
304         (*Start)->getType()->castAs<RecordType>()->getDecl());
305     ++Start;
306   }
307 
308   // Compute the static offset of the ultimate destination within its
309   // allocating subobject (the virtual base, if there is one, or else
310   // the "complete" object that we see).
311   CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset(
312       VBase ? VBase : Derived, Start, PathEnd);
313 
314   // If there's a virtual step, we can sometimes "devirtualize" it.
315   // For now, that's limited to when the derived type is final.
316   // TODO: "devirtualize" this for accesses to known-complete objects.
317   if (VBase && Derived->hasAttr<FinalAttr>()) {
318     const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);
319     CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);
320     NonVirtualOffset += vBaseOffset;
321     VBase = nullptr; // we no longer have a virtual step
322   }
323 
324   // Get the base pointer type.
325   llvm::Type *BaseValueTy = ConvertType((PathEnd[-1])->getType());
326   llvm::Type *PtrTy = llvm::PointerType::get(
327       CGM.getLLVMContext(), Value.getType()->getPointerAddressSpace());
328 
329   QualType DerivedTy = getContext().getRecordType(Derived);
330   CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived);
331 
332   // If the static offset is zero and we don't have a virtual step,
333   // just do a bitcast; null checks are unnecessary.
334   if (NonVirtualOffset.isZero() && !VBase) {
335     if (sanitizePerformTypeCheck()) {
336       SanitizerSet SkippedChecks;
337       SkippedChecks.set(SanitizerKind::Null, !NullCheckValue);
338       EmitTypeCheck(TCK_Upcast, Loc, Value.emitRawPointer(*this), DerivedTy,
339                     DerivedAlign, SkippedChecks);
340     }
341     return Value.withElementType(BaseValueTy);
342   }
343 
344   llvm::BasicBlock *origBB = nullptr;
345   llvm::BasicBlock *endBB = nullptr;
346 
347   // Skip over the offset (and the vtable load) if we're supposed to
348   // null-check the pointer.
349   if (NullCheckValue) {
350     origBB = Builder.GetInsertBlock();
351     llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");
352     endBB = createBasicBlock("cast.end");
353 
354     llvm::Value *isNull = Builder.CreateIsNull(Value);
355     Builder.CreateCondBr(isNull, endBB, notNullBB);
356     EmitBlock(notNullBB);
357   }
358 
359   if (sanitizePerformTypeCheck()) {
360     SanitizerSet SkippedChecks;
361     SkippedChecks.set(SanitizerKind::Null, true);
362     EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc,
363                   Value.emitRawPointer(*this), DerivedTy, DerivedAlign,
364                   SkippedChecks);
365   }
366 
367   // Compute the virtual offset.
368   llvm::Value *VirtualOffset = nullptr;
369   if (VBase) {
370     VirtualOffset =
371         CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase);
372   }
373 
374   // Apply both offsets.
375   Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
376                                           VirtualOffset, Derived, VBase);
377 
378   // Cast to the destination type.
379   Value = Value.withElementType(BaseValueTy);
380 
381   // Build a phi if we needed a null check.
382   if (NullCheckValue) {
383     llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
384     Builder.CreateBr(endBB);
385     EmitBlock(endBB);
386 
387     llvm::PHINode *PHI = Builder.CreatePHI(PtrTy, 2, "cast.result");
388     PHI->addIncoming(Value.emitRawPointer(*this), notNullBB);
389     PHI->addIncoming(llvm::Constant::getNullValue(PtrTy), origBB);
390     Value = Value.withPointer(PHI, NotKnownNonNull);
391   }
392 
393   return Value;
394 }
395 
396 Address
397 CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr,
398                                           const CXXRecordDecl *Derived,
399                                         CastExpr::path_const_iterator PathBegin,
400                                           CastExpr::path_const_iterator PathEnd,
401                                           bool NullCheckValue) {
402   assert(PathBegin != PathEnd && "Base path should not be empty!");
403 
404   QualType DerivedTy =
405       getContext().getCanonicalType(getContext().getTagDeclType(Derived));
406   llvm::Type *DerivedValueTy = ConvertType(DerivedTy);
407 
408   llvm::Value *NonVirtualOffset =
409     CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
410 
411   if (!NonVirtualOffset) {
412     // No offset, we can just cast back.
413     return BaseAddr.withElementType(DerivedValueTy);
414   }
415 
416   llvm::BasicBlock *CastNull = nullptr;
417   llvm::BasicBlock *CastNotNull = nullptr;
418   llvm::BasicBlock *CastEnd = nullptr;
419 
420   if (NullCheckValue) {
421     CastNull = createBasicBlock("cast.null");
422     CastNotNull = createBasicBlock("cast.notnull");
423     CastEnd = createBasicBlock("cast.end");
424 
425     llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr);
426     Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
427     EmitBlock(CastNotNull);
428   }
429 
430   // Apply the offset.
431   Address Addr = BaseAddr.withElementType(Int8Ty);
432   Addr = Builder.CreateInBoundsGEP(
433       Addr, Builder.CreateNeg(NonVirtualOffset), Int8Ty,
434       CGM.getClassPointerAlignment(Derived), "sub.ptr");
435 
436   // Just cast.
437   Addr = Addr.withElementType(DerivedValueTy);
438 
439   // Produce a PHI if we had a null-check.
440   if (NullCheckValue) {
441     Builder.CreateBr(CastEnd);
442     EmitBlock(CastNull);
443     Builder.CreateBr(CastEnd);
444     EmitBlock(CastEnd);
445 
446     llvm::Value *Value = Addr.emitRawPointer(*this);
447     llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
448     PHI->addIncoming(Value, CastNotNull);
449     PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
450     return Address(PHI, Addr.getElementType(),
451                    CGM.getClassPointerAlignment(Derived));
452   }
453 
454   return Addr;
455 }
456 
457 llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD,
458                                               bool ForVirtualBase,
459                                               bool Delegating) {
460   if (!CGM.getCXXABI().NeedsVTTParameter(GD)) {
461     // This constructor/destructor does not need a VTT parameter.
462     return nullptr;
463   }
464 
465   const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent();
466   const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
467 
468   uint64_t SubVTTIndex;
469 
470   if (Delegating) {
471     // If this is a delegating constructor call, just load the VTT.
472     return LoadCXXVTT();
473   } else if (RD == Base) {
474     // If the record matches the base, this is the complete ctor/dtor
475     // variant calling the base variant in a class with virtual bases.
476     assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) &&
477            "doing no-op VTT offset in base dtor/ctor?");
478     assert(!ForVirtualBase && "Can't have same class as virtual base!");
479     SubVTTIndex = 0;
480   } else {
481     const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
482     CharUnits BaseOffset = ForVirtualBase ?
483       Layout.getVBaseClassOffset(Base) :
484       Layout.getBaseClassOffset(Base);
485 
486     SubVTTIndex =
487       CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
488     assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
489   }
490 
491   if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
492     // A VTT parameter was passed to the constructor, use it.
493     llvm::Value *VTT = LoadCXXVTT();
494     return Builder.CreateConstInBoundsGEP1_64(VoidPtrTy, VTT, SubVTTIndex);
495   } else {
496     // We're the complete constructor, so get the VTT by name.
497     llvm::GlobalValue *VTT = CGM.getVTables().GetAddrOfVTT(RD);
498     return Builder.CreateConstInBoundsGEP2_64(
499         VTT->getValueType(), VTT, 0, SubVTTIndex);
500   }
501 }
502 
503 namespace {
504   /// Call the destructor for a direct base class.
505   struct CallBaseDtor final : EHScopeStack::Cleanup {
506     const CXXRecordDecl *BaseClass;
507     bool BaseIsVirtual;
508     CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
509       : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
510 
511     void Emit(CodeGenFunction &CGF, Flags flags) override {
512       const CXXRecordDecl *DerivedClass =
513         cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
514 
515       const CXXDestructorDecl *D = BaseClass->getDestructor();
516       // We are already inside a destructor, so presumably the object being
517       // destroyed should have the expected type.
518       QualType ThisTy = D->getFunctionObjectParameterType();
519       Address Addr =
520         CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(),
521                                                   DerivedClass, BaseClass,
522                                                   BaseIsVirtual);
523       CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual,
524                                 /*Delegating=*/false, Addr, ThisTy);
525     }
526   };
527 
528   /// A visitor which checks whether an initializer uses 'this' in a
529   /// way which requires the vtable to be properly set.
530   struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> {
531     typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super;
532 
533     bool UsesThis;
534 
535     DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {}
536 
537     // Black-list all explicit and implicit references to 'this'.
538     //
539     // Do we need to worry about external references to 'this' derived
540     // from arbitrary code?  If so, then anything which runs arbitrary
541     // external code might potentially access the vtable.
542     void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; }
543   };
544 } // end anonymous namespace
545 
546 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
547   DynamicThisUseChecker Checker(C);
548   Checker.Visit(Init);
549   return Checker.UsesThis;
550 }
551 
552 static void EmitBaseInitializer(CodeGenFunction &CGF,
553                                 const CXXRecordDecl *ClassDecl,
554                                 CXXCtorInitializer *BaseInit) {
555   assert(BaseInit->isBaseInitializer() &&
556          "Must have base initializer!");
557 
558   Address ThisPtr = CGF.LoadCXXThisAddress();
559 
560   const Type *BaseType = BaseInit->getBaseClass();
561   const auto *BaseClassDecl =
562       cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());
563 
564   bool isBaseVirtual = BaseInit->isBaseVirtual();
565 
566   // If the initializer for the base (other than the constructor
567   // itself) accesses 'this' in any way, we need to initialize the
568   // vtables.
569   if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
570     CGF.InitializeVTablePointers(ClassDecl);
571 
572   // We can pretend to be a complete class because it only matters for
573   // virtual bases, and we only do virtual bases for complete ctors.
574   Address V =
575     CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
576                                               BaseClassDecl,
577                                               isBaseVirtual);
578   AggValueSlot AggSlot =
579       AggValueSlot::forAddr(
580           V, Qualifiers(),
581           AggValueSlot::IsDestructed,
582           AggValueSlot::DoesNotNeedGCBarriers,
583           AggValueSlot::IsNotAliased,
584           CGF.getOverlapForBaseInit(ClassDecl, BaseClassDecl, isBaseVirtual));
585 
586   CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
587 
588   if (CGF.CGM.getLangOpts().Exceptions &&
589       !BaseClassDecl->hasTrivialDestructor())
590     CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
591                                           isBaseVirtual);
592 }
593 
594 static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) {
595   auto *CD = dyn_cast<CXXConstructorDecl>(D);
596   if (!(CD && CD->isCopyOrMoveConstructor()) &&
597       !D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator())
598     return false;
599 
600   // We can emit a memcpy for a trivial copy or move constructor/assignment.
601   if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding())
602     return true;
603 
604   // We *must* emit a memcpy for a defaulted union copy or move op.
605   if (D->getParent()->isUnion() && D->isDefaulted())
606     return true;
607 
608   return false;
609 }
610 
611 static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF,
612                                                 CXXCtorInitializer *MemberInit,
613                                                 LValue &LHS) {
614   FieldDecl *Field = MemberInit->getAnyMember();
615   if (MemberInit->isIndirectMemberInitializer()) {
616     // If we are initializing an anonymous union field, drill down to the field.
617     IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();
618     for (const auto *I : IndirectField->chain())
619       LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I));
620   } else {
621     LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);
622   }
623 }
624 
625 static void EmitMemberInitializer(CodeGenFunction &CGF,
626                                   const CXXRecordDecl *ClassDecl,
627                                   CXXCtorInitializer *MemberInit,
628                                   const CXXConstructorDecl *Constructor,
629                                   FunctionArgList &Args) {
630   ApplyAtomGroup Grp(CGF.getDebugInfo());
631   ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation());
632   assert(MemberInit->isAnyMemberInitializer() &&
633          "Must have member initializer!");
634   assert(MemberInit->getInit() && "Must have initializer!");
635 
636   // non-static data member initializers.
637   FieldDecl *Field = MemberInit->getAnyMember();
638   QualType FieldType = Field->getType();
639 
640   llvm::Value *ThisPtr = CGF.LoadCXXThis();
641   QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
642   LValue LHS;
643 
644   // If a base constructor is being emitted, create an LValue that has the
645   // non-virtual alignment.
646   if (CGF.CurGD.getCtorType() == Ctor_Base)
647     LHS = CGF.MakeNaturalAlignPointeeAddrLValue(ThisPtr, RecordTy);
648   else
649     LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);
650 
651   EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS);
652 
653   // Special case: if we are in a copy or move constructor, and we are copying
654   // an array of PODs or classes with trivial copy constructors, ignore the
655   // AST and perform the copy we know is equivalent.
656   // FIXME: This is hacky at best... if we had a bit more explicit information
657   // in the AST, we could generalize it more easily.
658   const ConstantArrayType *Array
659     = CGF.getContext().getAsConstantArrayType(FieldType);
660   if (Array && Constructor->isDefaulted() &&
661       Constructor->isCopyOrMoveConstructor()) {
662     QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
663     CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
664     if (BaseElementTy.isPODType(CGF.getContext()) ||
665         (CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) {
666       unsigned SrcArgIndex =
667           CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args);
668       llvm::Value *SrcPtr
669         = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
670       LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
671       LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);
672 
673       // Copy the aggregate.
674       CGF.EmitAggregateCopy(LHS, Src, FieldType, CGF.getOverlapForFieldInit(Field),
675                             LHS.isVolatileQualified());
676       // Ensure that we destroy the objects if an exception is thrown later in
677       // the constructor.
678       QualType::DestructionKind dtorKind = FieldType.isDestructedType();
679       if (CGF.needsEHCleanup(dtorKind))
680         CGF.pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
681       return;
682     }
683   }
684 
685   CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit());
686 }
687 
688 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,
689                                               Expr *Init) {
690   QualType FieldType = Field->getType();
691   switch (getEvaluationKind(FieldType)) {
692   case TEK_Scalar:
693     if (LHS.isSimple()) {
694       EmitExprAsInit(Init, Field, LHS, false);
695     } else {
696       RValue RHS = RValue::get(EmitScalarExpr(Init));
697       EmitStoreThroughLValue(RHS, LHS);
698     }
699     break;
700   case TEK_Complex:
701     EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true);
702     break;
703   case TEK_Aggregate: {
704     AggValueSlot Slot = AggValueSlot::forLValue(
705         LHS, AggValueSlot::IsDestructed, AggValueSlot::DoesNotNeedGCBarriers,
706         AggValueSlot::IsNotAliased, getOverlapForFieldInit(Field),
707         AggValueSlot::IsNotZeroed,
708         // Checks are made by the code that calls constructor.
709         AggValueSlot::IsSanitizerChecked);
710     EmitAggExpr(Init, Slot);
711     break;
712   }
713   }
714 
715   // Ensure that we destroy this object if an exception is thrown
716   // later in the constructor.
717   QualType::DestructionKind dtorKind = FieldType.isDestructedType();
718   if (needsEHCleanup(dtorKind))
719     pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
720 }
721 
722 /// Checks whether the given constructor is a valid subject for the
723 /// complete-to-base constructor delegation optimization, i.e.
724 /// emitting the complete constructor as a simple call to the base
725 /// constructor.
726 bool CodeGenFunction::IsConstructorDelegationValid(
727     const CXXConstructorDecl *Ctor) {
728 
729   // Currently we disable the optimization for classes with virtual
730   // bases because (1) the addresses of parameter variables need to be
731   // consistent across all initializers but (2) the delegate function
732   // call necessarily creates a second copy of the parameter variable.
733   //
734   // The limiting example (purely theoretical AFAIK):
735   //   struct A { A(int &c) { c++; } };
736   //   struct B : virtual A {
737   //     B(int count) : A(count) { printf("%d\n", count); }
738   //   };
739   // ...although even this example could in principle be emitted as a
740   // delegation since the address of the parameter doesn't escape.
741   if (Ctor->getParent()->getNumVBases()) {
742     // TODO: white-list trivial vbase initializers.  This case wouldn't
743     // be subject to the restrictions below.
744 
745     // TODO: white-list cases where:
746     //  - there are no non-reference parameters to the constructor
747     //  - the initializers don't access any non-reference parameters
748     //  - the initializers don't take the address of non-reference
749     //    parameters
750     //  - etc.
751     // If we ever add any of the above cases, remember that:
752     //  - function-try-blocks will always exclude this optimization
753     //  - we need to perform the constructor prologue and cleanup in
754     //    EmitConstructorBody.
755 
756     return false;
757   }
758 
759   // We also disable the optimization for variadic functions because
760   // it's impossible to "re-pass" varargs.
761   if (Ctor->getType()->castAs<FunctionProtoType>()->isVariadic())
762     return false;
763 
764   // FIXME: Decide if we can do a delegation of a delegating constructor.
765   if (Ctor->isDelegatingConstructor())
766     return false;
767 
768   return true;
769 }
770 
771 // Emit code in ctor (Prologue==true) or dtor (Prologue==false)
772 // to poison the extra field paddings inserted under
773 // -fsanitize-address-field-padding=1|2.
774 void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) {
775   ASTContext &Context = getContext();
776   const CXXRecordDecl *ClassDecl =
777       Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent()
778                : cast<CXXDestructorDecl>(CurGD.getDecl())->getParent();
779   if (!ClassDecl->mayInsertExtraPadding()) return;
780 
781   struct SizeAndOffset {
782     uint64_t Size;
783     uint64_t Offset;
784   };
785 
786   unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits();
787   const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl);
788 
789   // Populate sizes and offsets of fields.
790   SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount());
791   for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i)
792     SSV[i].Offset =
793         Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity();
794 
795   size_t NumFields = 0;
796   for (const auto *Field : ClassDecl->fields()) {
797     const FieldDecl *D = Field;
798     auto FieldInfo = Context.getTypeInfoInChars(D->getType());
799     CharUnits FieldSize = FieldInfo.Width;
800     assert(NumFields < SSV.size());
801     SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity();
802     NumFields++;
803   }
804   assert(NumFields == SSV.size());
805   if (SSV.size() <= 1) return;
806 
807   // We will insert calls to __asan_* run-time functions.
808   // LLVM AddressSanitizer pass may decide to inline them later.
809   llvm::Type *Args[2] = {IntPtrTy, IntPtrTy};
810   llvm::FunctionType *FTy =
811       llvm::FunctionType::get(CGM.VoidTy, Args, false);
812   llvm::FunctionCallee F = CGM.CreateRuntimeFunction(
813       FTy, Prologue ? "__asan_poison_intra_object_redzone"
814                     : "__asan_unpoison_intra_object_redzone");
815 
816   llvm::Value *ThisPtr = LoadCXXThis();
817   ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy);
818   uint64_t TypeSize = Info.getNonVirtualSize().getQuantity();
819   // For each field check if it has sufficient padding,
820   // if so (un)poison it with a call.
821   for (size_t i = 0; i < SSV.size(); i++) {
822     uint64_t AsanAlignment = 8;
823     uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset;
824     uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size;
825     uint64_t EndOffset = SSV[i].Offset + SSV[i].Size;
826     if (PoisonSize < AsanAlignment || !SSV[i].Size ||
827         (NextField % AsanAlignment) != 0)
828       continue;
829     Builder.CreateCall(
830         F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)),
831             Builder.getIntN(PtrSize, PoisonSize)});
832   }
833 }
834 
835 /// EmitConstructorBody - Emits the body of the current constructor.
836 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
837   EmitAsanPrologueOrEpilogue(true);
838   const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
839   CXXCtorType CtorType = CurGD.getCtorType();
840 
841   assert((CGM.getTarget().getCXXABI().hasConstructorVariants() ||
842           CtorType == Ctor_Complete) &&
843          "can only generate complete ctor for this ABI");
844 
845   // Before we go any further, try the complete->base constructor
846   // delegation optimization.
847   if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) &&
848       CGM.getTarget().getCXXABI().hasConstructorVariants()) {
849     EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getEndLoc());
850     return;
851   }
852 
853   const FunctionDecl *Definition = nullptr;
854   Stmt *Body = Ctor->getBody(Definition);
855   assert(Definition == Ctor && "emitting wrong constructor body");
856 
857   // Enter the function-try-block before the constructor prologue if
858   // applicable.
859   bool IsTryBody = isa_and_nonnull<CXXTryStmt>(Body);
860   if (IsTryBody)
861     EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
862 
863   incrementProfileCounter(Body);
864   maybeCreateMCDCCondBitmap();
865 
866   RunCleanupsScope RunCleanups(*this);
867 
868   // TODO: in restricted cases, we can emit the vbase initializers of
869   // a complete ctor and then delegate to the base ctor.
870 
871   // Emit the constructor prologue, i.e. the base and member
872   // initializers.
873   EmitCtorPrologue(Ctor, CtorType, Args);
874 
875   // Emit the body of the statement.
876   if (IsTryBody)
877     EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
878   else if (Body)
879     EmitStmt(Body);
880 
881   // Emit any cleanup blocks associated with the member or base
882   // initializers, which includes (along the exceptional path) the
883   // destructors for those members and bases that were fully
884   // constructed.
885   RunCleanups.ForceCleanup();
886 
887   if (IsTryBody)
888     ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
889 }
890 
891 namespace {
892   /// RAII object to indicate that codegen is copying the value representation
893   /// instead of the object representation. Useful when copying a struct or
894   /// class which has uninitialized members and we're only performing
895   /// lvalue-to-rvalue conversion on the object but not its members.
896   class CopyingValueRepresentation {
897   public:
898     explicit CopyingValueRepresentation(CodeGenFunction &CGF)
899         : CGF(CGF), OldSanOpts(CGF.SanOpts) {
900       CGF.SanOpts.set(SanitizerKind::Bool, false);
901       CGF.SanOpts.set(SanitizerKind::Enum, false);
902     }
903     ~CopyingValueRepresentation() {
904       CGF.SanOpts = OldSanOpts;
905     }
906   private:
907     CodeGenFunction &CGF;
908     SanitizerSet OldSanOpts;
909   };
910 } // end anonymous namespace
911 
912 namespace {
913   class FieldMemcpyizer {
914   public:
915     FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl,
916                     const VarDecl *SrcRec)
917       : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec),
918         RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)),
919         FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0),
920         LastFieldOffset(0), LastAddedFieldIndex(0) {}
921 
922     bool isMemcpyableField(FieldDecl *F) const {
923       // Never memcpy fields when we are adding poisoned paddings.
924       if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding)
925         return false;
926       Qualifiers Qual = F->getType().getQualifiers();
927       if (Qual.hasVolatile() || Qual.hasObjCLifetime())
928         return false;
929       if (PointerAuthQualifier Q = F->getType().getPointerAuth();
930           Q && Q.isAddressDiscriminated())
931         return false;
932       return true;
933     }
934 
935     void addMemcpyableField(FieldDecl *F) {
936       if (isEmptyFieldForLayout(CGF.getContext(), F))
937         return;
938       if (!FirstField)
939         addInitialField(F);
940       else
941         addNextField(F);
942     }
943 
944     CharUnits getMemcpySize(uint64_t FirstByteOffset) const {
945       ASTContext &Ctx = CGF.getContext();
946       unsigned LastFieldSize =
947           LastField->isBitField()
948               ? LastField->getBitWidthValue()
949               : Ctx.toBits(
950                     Ctx.getTypeInfoDataSizeInChars(LastField->getType()).Width);
951       uint64_t MemcpySizeBits = LastFieldOffset + LastFieldSize -
952                                 FirstByteOffset + Ctx.getCharWidth() - 1;
953       CharUnits MemcpySize = Ctx.toCharUnitsFromBits(MemcpySizeBits);
954       return MemcpySize;
955     }
956 
957     void emitMemcpy() {
958       // Give the subclass a chance to bail out if it feels the memcpy isn't
959       // worth it (e.g. Hasn't aggregated enough data).
960       if (!FirstField) {
961         return;
962       }
963 
964       uint64_t FirstByteOffset;
965       if (FirstField->isBitField()) {
966         const CGRecordLayout &RL =
967           CGF.getTypes().getCGRecordLayout(FirstField->getParent());
968         const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField);
969         // FirstFieldOffset is not appropriate for bitfields,
970         // we need to use the storage offset instead.
971         FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset);
972       } else {
973         FirstByteOffset = FirstFieldOffset;
974       }
975 
976       CharUnits MemcpySize = getMemcpySize(FirstByteOffset);
977       QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
978       Address ThisPtr = CGF.LoadCXXThisAddress();
979       LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy);
980       LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField);
981       llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec));
982       LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
983       LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField);
984 
985       emitMemcpyIR(
986           Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(),
987           Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(),
988           MemcpySize);
989       reset();
990     }
991 
992     void reset() {
993       FirstField = nullptr;
994     }
995 
996   protected:
997     CodeGenFunction &CGF;
998     const CXXRecordDecl *ClassDecl;
999 
1000   private:
1001     void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) {
1002       DestPtr = DestPtr.withElementType(CGF.Int8Ty);
1003       SrcPtr = SrcPtr.withElementType(CGF.Int8Ty);
1004       auto *I = CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity());
1005       CGF.addInstToCurrentSourceAtom(I, nullptr);
1006     }
1007 
1008     void addInitialField(FieldDecl *F) {
1009       FirstField = F;
1010       LastField = F;
1011       FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());
1012       LastFieldOffset = FirstFieldOffset;
1013       LastAddedFieldIndex = F->getFieldIndex();
1014     }
1015 
1016     void addNextField(FieldDecl *F) {
1017       // For the most part, the following invariant will hold:
1018       //   F->getFieldIndex() == LastAddedFieldIndex + 1
1019       // The one exception is that Sema won't add a copy-initializer for an
1020       // unnamed bitfield, which will show up here as a gap in the sequence.
1021       assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 &&
1022              "Cannot aggregate fields out of order.");
1023       LastAddedFieldIndex = F->getFieldIndex();
1024 
1025       // The 'first' and 'last' fields are chosen by offset, rather than field
1026       // index. This allows the code to support bitfields, as well as regular
1027       // fields.
1028       uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex());
1029       if (FOffset < FirstFieldOffset) {
1030         FirstField = F;
1031         FirstFieldOffset = FOffset;
1032       } else if (FOffset >= LastFieldOffset) {
1033         LastField = F;
1034         LastFieldOffset = FOffset;
1035       }
1036     }
1037 
1038     const VarDecl *SrcRec;
1039     const ASTRecordLayout &RecLayout;
1040     FieldDecl *FirstField;
1041     FieldDecl *LastField;
1042     uint64_t FirstFieldOffset, LastFieldOffset;
1043     unsigned LastAddedFieldIndex;
1044   };
1045 
1046   class ConstructorMemcpyizer : public FieldMemcpyizer {
1047   private:
1048     /// Get source argument for copy constructor. Returns null if not a copy
1049     /// constructor.
1050     static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF,
1051                                                const CXXConstructorDecl *CD,
1052                                                FunctionArgList &Args) {
1053       if (CD->isCopyOrMoveConstructor() && CD->isDefaulted())
1054         return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)];
1055       return nullptr;
1056     }
1057 
1058     // Returns true if a CXXCtorInitializer represents a member initialization
1059     // that can be rolled into a memcpy.
1060     bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const {
1061       if (!MemcpyableCtor)
1062         return false;
1063       FieldDecl *Field = MemberInit->getMember();
1064       assert(Field && "No field for member init.");
1065       QualType FieldType = Field->getType();
1066       CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
1067 
1068       // Bail out on non-memcpyable, not-trivially-copyable members.
1069       if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) &&
1070           !(FieldType.isTriviallyCopyableType(CGF.getContext()) ||
1071             FieldType->isReferenceType()))
1072         return false;
1073 
1074       // Bail out on volatile fields.
1075       if (!isMemcpyableField(Field))
1076         return false;
1077 
1078       // Otherwise we're good.
1079       return true;
1080     }
1081 
1082   public:
1083     ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD,
1084                           FunctionArgList &Args)
1085       : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)),
1086         ConstructorDecl(CD),
1087         MemcpyableCtor(CD->isDefaulted() &&
1088                        CD->isCopyOrMoveConstructor() &&
1089                        CGF.getLangOpts().getGC() == LangOptions::NonGC),
1090         Args(Args) { }
1091 
1092     void addMemberInitializer(CXXCtorInitializer *MemberInit) {
1093       if (isMemberInitMemcpyable(MemberInit)) {
1094         AggregatedInits.push_back(MemberInit);
1095         addMemcpyableField(MemberInit->getMember());
1096       } else {
1097         emitAggregatedInits();
1098         EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit,
1099                               ConstructorDecl, Args);
1100       }
1101     }
1102 
1103     void emitAggregatedInits() {
1104       if (AggregatedInits.size() <= 1) {
1105         // This memcpy is too small to be worthwhile. Fall back on default
1106         // codegen.
1107         if (!AggregatedInits.empty()) {
1108           CopyingValueRepresentation CVR(CGF);
1109           EmitMemberInitializer(CGF, ConstructorDecl->getParent(),
1110                                 AggregatedInits[0], ConstructorDecl, Args);
1111           AggregatedInits.clear();
1112         }
1113         reset();
1114         return;
1115       }
1116 
1117       pushEHDestructors();
1118       ApplyAtomGroup Grp(CGF.getDebugInfo());
1119       emitMemcpy();
1120       AggregatedInits.clear();
1121     }
1122 
1123     void pushEHDestructors() {
1124       Address ThisPtr = CGF.LoadCXXThisAddress();
1125       QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
1126       LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy);
1127 
1128       for (unsigned i = 0; i < AggregatedInits.size(); ++i) {
1129         CXXCtorInitializer *MemberInit = AggregatedInits[i];
1130         QualType FieldType = MemberInit->getAnyMember()->getType();
1131         QualType::DestructionKind dtorKind = FieldType.isDestructedType();
1132         if (!CGF.needsEHCleanup(dtorKind))
1133           continue;
1134         LValue FieldLHS = LHS;
1135         EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS);
1136         CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(), FieldType);
1137       }
1138     }
1139 
1140     void finish() {
1141       emitAggregatedInits();
1142     }
1143 
1144   private:
1145     const CXXConstructorDecl *ConstructorDecl;
1146     bool MemcpyableCtor;
1147     FunctionArgList &Args;
1148     SmallVector<CXXCtorInitializer*, 16> AggregatedInits;
1149   };
1150 
1151   class AssignmentMemcpyizer : public FieldMemcpyizer {
1152   private:
1153     // Returns the memcpyable field copied by the given statement, if one
1154     // exists. Otherwise returns null.
1155     FieldDecl *getMemcpyableField(Stmt *S) {
1156       if (!AssignmentsMemcpyable)
1157         return nullptr;
1158       if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) {
1159         // Recognise trivial assignments.
1160         if (BO->getOpcode() != BO_Assign)
1161           return nullptr;
1162         MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS());
1163         if (!ME)
1164           return nullptr;
1165         FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1166         if (!Field || !isMemcpyableField(Field))
1167           return nullptr;
1168         Stmt *RHS = BO->getRHS();
1169         if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS))
1170           RHS = EC->getSubExpr();
1171         if (!RHS)
1172           return nullptr;
1173         if (MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS)) {
1174           if (ME2->getMemberDecl() == Field)
1175             return Field;
1176         }
1177         return nullptr;
1178       } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) {
1179         CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl());
1180         if (!(MD && isMemcpyEquivalentSpecialMember(MD)))
1181           return nullptr;
1182         MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument());
1183         if (!IOA)
1184           return nullptr;
1185         FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl());
1186         if (!Field || !isMemcpyableField(Field))
1187           return nullptr;
1188         MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0));
1189         if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl()))
1190           return nullptr;
1191         return Field;
1192       } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) {
1193         FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl());
1194         if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy)
1195           return nullptr;
1196         Expr *DstPtr = CE->getArg(0);
1197         if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr))
1198           DstPtr = DC->getSubExpr();
1199         UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr);
1200         if (!DUO || DUO->getOpcode() != UO_AddrOf)
1201           return nullptr;
1202         MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr());
1203         if (!ME)
1204           return nullptr;
1205         FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1206         if (!Field || !isMemcpyableField(Field))
1207           return nullptr;
1208         Expr *SrcPtr = CE->getArg(1);
1209         if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr))
1210           SrcPtr = SC->getSubExpr();
1211         UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr);
1212         if (!SUO || SUO->getOpcode() != UO_AddrOf)
1213           return nullptr;
1214         MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr());
1215         if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl()))
1216           return nullptr;
1217         return Field;
1218       }
1219 
1220       return nullptr;
1221     }
1222 
1223     bool AssignmentsMemcpyable;
1224     SmallVector<Stmt*, 16> AggregatedStmts;
1225 
1226   public:
1227     AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD,
1228                          FunctionArgList &Args)
1229       : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]),
1230         AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) {
1231       assert(Args.size() == 2);
1232     }
1233 
1234     void emitAssignment(Stmt *S) {
1235       FieldDecl *F = getMemcpyableField(S);
1236       if (F) {
1237         addMemcpyableField(F);
1238         AggregatedStmts.push_back(S);
1239       } else {
1240         emitAggregatedStmts();
1241         CGF.EmitStmt(S);
1242       }
1243     }
1244 
1245     void emitAggregatedStmts() {
1246       if (AggregatedStmts.size() <= 1) {
1247         if (!AggregatedStmts.empty()) {
1248           CopyingValueRepresentation CVR(CGF);
1249           CGF.EmitStmt(AggregatedStmts[0]);
1250         }
1251         reset();
1252       }
1253 
1254       ApplyAtomGroup Grp(CGF.getDebugInfo());
1255       emitMemcpy();
1256       AggregatedStmts.clear();
1257     }
1258 
1259     void finish() {
1260       emitAggregatedStmts();
1261     }
1262   };
1263 } // end anonymous namespace
1264 
1265 static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) {
1266   const Type *BaseType = BaseInit->getBaseClass();
1267   const auto *BaseClassDecl =
1268       cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());
1269   return BaseClassDecl->isDynamicClass();
1270 }
1271 
1272 /// EmitCtorPrologue - This routine generates necessary code to initialize
1273 /// base classes and non-static data members belonging to this constructor.
1274 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
1275                                        CXXCtorType CtorType,
1276                                        FunctionArgList &Args) {
1277   if (CD->isDelegatingConstructor())
1278     return EmitDelegatingCXXConstructorCall(CD, Args);
1279 
1280   const CXXRecordDecl *ClassDecl = CD->getParent();
1281 
1282   CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
1283                                           E = CD->init_end();
1284 
1285   // Virtual base initializers first, if any. They aren't needed if:
1286   // - This is a base ctor variant
1287   // - There are no vbases
1288   // - The class is abstract, so a complete object of it cannot be constructed
1289   //
1290   // The check for an abstract class is necessary because sema may not have
1291   // marked virtual base destructors referenced.
1292   bool ConstructVBases = CtorType != Ctor_Base &&
1293                          ClassDecl->getNumVBases() != 0 &&
1294                          !ClassDecl->isAbstract();
1295 
1296   // In the Microsoft C++ ABI, there are no constructor variants. Instead, the
1297   // constructor of a class with virtual bases takes an additional parameter to
1298   // conditionally construct the virtual bases. Emit that check here.
1299   llvm::BasicBlock *BaseCtorContinueBB = nullptr;
1300   if (ConstructVBases &&
1301       !CGM.getTarget().getCXXABI().hasConstructorVariants()) {
1302     BaseCtorContinueBB =
1303         CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl);
1304     assert(BaseCtorContinueBB);
1305   }
1306 
1307   for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) {
1308     if (!ConstructVBases)
1309       continue;
1310     SaveAndRestore ThisRAII(CXXThisValue);
1311     if (CGM.getCodeGenOpts().StrictVTablePointers &&
1312         CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1313         isInitializerOfDynamicClass(*B))
1314       CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1315     EmitBaseInitializer(*this, ClassDecl, *B);
1316   }
1317 
1318   if (BaseCtorContinueBB) {
1319     // Complete object handler should continue to the remaining initializers.
1320     Builder.CreateBr(BaseCtorContinueBB);
1321     EmitBlock(BaseCtorContinueBB);
1322   }
1323 
1324   // Then, non-virtual base initializers.
1325   for (; B != E && (*B)->isBaseInitializer(); B++) {
1326     assert(!(*B)->isBaseVirtual());
1327     SaveAndRestore ThisRAII(CXXThisValue);
1328     if (CGM.getCodeGenOpts().StrictVTablePointers &&
1329         CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1330         isInitializerOfDynamicClass(*B))
1331       CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1332     EmitBaseInitializer(*this, ClassDecl, *B);
1333   }
1334 
1335   InitializeVTablePointers(ClassDecl);
1336 
1337   // And finally, initialize class members.
1338   FieldConstructionScope FCS(*this, LoadCXXThisAddress());
1339   ConstructorMemcpyizer CM(*this, CD, Args);
1340   for (; B != E; B++) {
1341     CXXCtorInitializer *Member = (*B);
1342     assert(!Member->isBaseInitializer());
1343     assert(Member->isAnyMemberInitializer() &&
1344            "Delegating initializer on non-delegating constructor");
1345     CM.addMemberInitializer(Member);
1346   }
1347 
1348   CM.finish();
1349 }
1350 
1351 static bool
1352 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
1353 
1354 static bool
1355 HasTrivialDestructorBody(ASTContext &Context,
1356                          const CXXRecordDecl *BaseClassDecl,
1357                          const CXXRecordDecl *MostDerivedClassDecl)
1358 {
1359   // If the destructor is trivial we don't have to check anything else.
1360   if (BaseClassDecl->hasTrivialDestructor())
1361     return true;
1362 
1363   if (!BaseClassDecl->getDestructor()->hasTrivialBody())
1364     return false;
1365 
1366   // Check fields.
1367   for (const auto *Field : BaseClassDecl->fields())
1368     if (!FieldHasTrivialDestructorBody(Context, Field))
1369       return false;
1370 
1371   // Check non-virtual bases.
1372   for (const auto &I : BaseClassDecl->bases()) {
1373     if (I.isVirtual())
1374       continue;
1375 
1376     const CXXRecordDecl *NonVirtualBase =
1377       cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1378     if (!HasTrivialDestructorBody(Context, NonVirtualBase,
1379                                   MostDerivedClassDecl))
1380       return false;
1381   }
1382 
1383   if (BaseClassDecl == MostDerivedClassDecl) {
1384     // Check virtual bases.
1385     for (const auto &I : BaseClassDecl->vbases()) {
1386       const CXXRecordDecl *VirtualBase =
1387         cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1388       if (!HasTrivialDestructorBody(Context, VirtualBase,
1389                                     MostDerivedClassDecl))
1390         return false;
1391     }
1392   }
1393 
1394   return true;
1395 }
1396 
1397 static bool
1398 FieldHasTrivialDestructorBody(ASTContext &Context,
1399                                           const FieldDecl *Field)
1400 {
1401   QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
1402 
1403   const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
1404   if (!RT)
1405     return true;
1406 
1407   CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
1408 
1409   // The destructor for an implicit anonymous union member is never invoked.
1410   if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion())
1411     return true;
1412 
1413   return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
1414 }
1415 
1416 /// CanSkipVTablePointerInitialization - Check whether we need to initialize
1417 /// any vtable pointers before calling this destructor.
1418 static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF,
1419                                                const CXXDestructorDecl *Dtor) {
1420   const CXXRecordDecl *ClassDecl = Dtor->getParent();
1421   if (!ClassDecl->isDynamicClass())
1422     return true;
1423 
1424   // For a final class, the vtable pointer is known to already point to the
1425   // class's vtable.
1426   if (ClassDecl->isEffectivelyFinal())
1427     return true;
1428 
1429   if (!Dtor->hasTrivialBody())
1430     return false;
1431 
1432   // Check the fields.
1433   for (const auto *Field : ClassDecl->fields())
1434     if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field))
1435       return false;
1436 
1437   return true;
1438 }
1439 
1440 /// EmitDestructorBody - Emits the body of the current destructor.
1441 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
1442   const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
1443   CXXDtorType DtorType = CurGD.getDtorType();
1444 
1445   // For an abstract class, non-base destructors are never used (and can't
1446   // be emitted in general, because vbase dtors may not have been validated
1447   // by Sema), but the Itanium ABI doesn't make them optional and Clang may
1448   // in fact emit references to them from other compilations, so emit them
1449   // as functions containing a trap instruction.
1450   if (DtorType != Dtor_Base && Dtor->getParent()->isAbstract()) {
1451     llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap);
1452     TrapCall->setDoesNotReturn();
1453     TrapCall->setDoesNotThrow();
1454     Builder.CreateUnreachable();
1455     Builder.ClearInsertionPoint();
1456     return;
1457   }
1458 
1459   Stmt *Body = Dtor->getBody();
1460   if (Body) {
1461     incrementProfileCounter(Body);
1462     maybeCreateMCDCCondBitmap();
1463   }
1464 
1465   // The call to operator delete in a deleting destructor happens
1466   // outside of the function-try-block, which means it's always
1467   // possible to delegate the destructor body to the complete
1468   // destructor.  Do so.
1469   if (DtorType == Dtor_Deleting) {
1470     RunCleanupsScope DtorEpilogue(*this);
1471     EnterDtorCleanups(Dtor, Dtor_Deleting);
1472     if (HaveInsertPoint()) {
1473       QualType ThisTy = Dtor->getFunctionObjectParameterType();
1474       EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
1475                             /*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
1476     }
1477     return;
1478   }
1479 
1480   // If the body is a function-try-block, enter the try before
1481   // anything else.
1482   bool isTryBody = isa_and_nonnull<CXXTryStmt>(Body);
1483   if (isTryBody)
1484     EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1485   EmitAsanPrologueOrEpilogue(false);
1486 
1487   // Enter the epilogue cleanups.
1488   RunCleanupsScope DtorEpilogue(*this);
1489 
1490   // If this is the complete variant, just invoke the base variant;
1491   // the epilogue will destruct the virtual bases.  But we can't do
1492   // this optimization if the body is a function-try-block, because
1493   // we'd introduce *two* handler blocks.  In the Microsoft ABI, we
1494   // always delegate because we might not have a definition in this TU.
1495   switch (DtorType) {
1496   case Dtor_Comdat: llvm_unreachable("not expecting a COMDAT");
1497   case Dtor_Deleting: llvm_unreachable("already handled deleting case");
1498 
1499   case Dtor_Complete:
1500     assert((Body || getTarget().getCXXABI().isMicrosoft()) &&
1501            "can't emit a dtor without a body for non-Microsoft ABIs");
1502 
1503     // Enter the cleanup scopes for virtual bases.
1504     EnterDtorCleanups(Dtor, Dtor_Complete);
1505 
1506     if (!isTryBody) {
1507       QualType ThisTy = Dtor->getFunctionObjectParameterType();
1508       EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
1509                             /*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
1510       break;
1511     }
1512 
1513     // Fallthrough: act like we're in the base variant.
1514     [[fallthrough]];
1515 
1516   case Dtor_Base:
1517     assert(Body);
1518 
1519     // Enter the cleanup scopes for fields and non-virtual bases.
1520     EnterDtorCleanups(Dtor, Dtor_Base);
1521 
1522     // Initialize the vtable pointers before entering the body.
1523     if (!CanSkipVTablePointerInitialization(*this, Dtor)) {
1524       // Insert the llvm.launder.invariant.group intrinsic before initializing
1525       // the vptrs to cancel any previous assumptions we might have made.
1526       if (CGM.getCodeGenOpts().StrictVTablePointers &&
1527           CGM.getCodeGenOpts().OptimizationLevel > 0)
1528         CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
1529       InitializeVTablePointers(Dtor->getParent());
1530     }
1531 
1532     if (isTryBody)
1533       EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
1534     else if (Body)
1535       EmitStmt(Body);
1536     else {
1537       assert(Dtor->isImplicit() && "bodyless dtor not implicit");
1538       // nothing to do besides what's in the epilogue
1539     }
1540     // -fapple-kext must inline any call to this dtor into
1541     // the caller's body.
1542     if (getLangOpts().AppleKext)
1543       CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
1544 
1545     break;
1546   }
1547 
1548   // Jump out through the epilogue cleanups.
1549   DtorEpilogue.ForceCleanup();
1550 
1551   // Exit the try if applicable.
1552   if (isTryBody)
1553     ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1554 }
1555 
1556 void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) {
1557   const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl());
1558   const Stmt *RootS = AssignOp->getBody();
1559   assert(isa<CompoundStmt>(RootS) &&
1560          "Body of an implicit assignment operator should be compound stmt.");
1561   const CompoundStmt *RootCS = cast<CompoundStmt>(RootS);
1562 
1563   LexicalScope Scope(*this, RootCS->getSourceRange());
1564 
1565   incrementProfileCounter(RootCS);
1566   maybeCreateMCDCCondBitmap();
1567   AssignmentMemcpyizer AM(*this, AssignOp, Args);
1568   for (auto *I : RootCS->body())
1569     AM.emitAssignment(I);
1570 
1571   AM.finish();
1572 }
1573 
1574 namespace {
1575   llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF,
1576                                      const CXXDestructorDecl *DD) {
1577     if (Expr *ThisArg = DD->getOperatorDeleteThisArg())
1578       return CGF.EmitScalarExpr(ThisArg);
1579     return CGF.LoadCXXThis();
1580   }
1581 
1582   /// Call the operator delete associated with the current destructor.
1583   struct CallDtorDelete final : EHScopeStack::Cleanup {
1584     CallDtorDelete() {}
1585 
1586     void Emit(CodeGenFunction &CGF, Flags flags) override {
1587       const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1588       const CXXRecordDecl *ClassDecl = Dtor->getParent();
1589       CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
1590                          LoadThisForDtorDelete(CGF, Dtor),
1591                          CGF.getContext().getTagDeclType(ClassDecl));
1592     }
1593   };
1594 
1595   void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF,
1596                                      llvm::Value *ShouldDeleteCondition,
1597                                      bool ReturnAfterDelete) {
1598     llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");
1599     llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");
1600     llvm::Value *ShouldCallDelete
1601       = CGF.Builder.CreateIsNull(ShouldDeleteCondition);
1602     CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);
1603 
1604     CGF.EmitBlock(callDeleteBB);
1605     const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1606     const CXXRecordDecl *ClassDecl = Dtor->getParent();
1607     CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
1608                        LoadThisForDtorDelete(CGF, Dtor),
1609                        CGF.getContext().getTagDeclType(ClassDecl));
1610     assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() ==
1611                ReturnAfterDelete &&
1612            "unexpected value for ReturnAfterDelete");
1613     if (ReturnAfterDelete)
1614       CGF.EmitBranchThroughCleanup(CGF.ReturnBlock);
1615     else
1616       CGF.Builder.CreateBr(continueBB);
1617 
1618     CGF.EmitBlock(continueBB);
1619   }
1620 
1621   struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {
1622     llvm::Value *ShouldDeleteCondition;
1623 
1624   public:
1625     CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition)
1626         : ShouldDeleteCondition(ShouldDeleteCondition) {
1627       assert(ShouldDeleteCondition != nullptr);
1628     }
1629 
1630     void Emit(CodeGenFunction &CGF, Flags flags) override {
1631       EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition,
1632                                     /*ReturnAfterDelete*/false);
1633     }
1634   };
1635 
1636   class DestroyField  final : public EHScopeStack::Cleanup {
1637     const FieldDecl *field;
1638     CodeGenFunction::Destroyer *destroyer;
1639     bool useEHCleanupForArray;
1640 
1641   public:
1642     DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
1643                  bool useEHCleanupForArray)
1644         : field(field), destroyer(destroyer),
1645           useEHCleanupForArray(useEHCleanupForArray) {}
1646 
1647     void Emit(CodeGenFunction &CGF, Flags flags) override {
1648       // Find the address of the field.
1649       Address thisValue = CGF.LoadCXXThisAddress();
1650       QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());
1651       LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);
1652       LValue LV = CGF.EmitLValueForField(ThisLV, field);
1653       assert(LV.isSimple());
1654 
1655       CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,
1656                       flags.isForNormalCleanup() && useEHCleanupForArray);
1657     }
1658   };
1659 
1660   class DeclAsInlineDebugLocation {
1661     CGDebugInfo *DI;
1662     llvm::MDNode *InlinedAt;
1663     std::optional<ApplyDebugLocation> Location;
1664 
1665   public:
1666     DeclAsInlineDebugLocation(CodeGenFunction &CGF, const NamedDecl &Decl)
1667         : DI(CGF.getDebugInfo()) {
1668       if (!DI)
1669         return;
1670       InlinedAt = DI->getInlinedAt();
1671       DI->setInlinedAt(CGF.Builder.getCurrentDebugLocation());
1672       Location.emplace(CGF, Decl.getLocation());
1673     }
1674 
1675     ~DeclAsInlineDebugLocation() {
1676       if (!DI)
1677         return;
1678       Location.reset();
1679       DI->setInlinedAt(InlinedAt);
1680     }
1681   };
1682 
1683   static void EmitSanitizerDtorCallback(
1684       CodeGenFunction &CGF, StringRef Name, llvm::Value *Ptr,
1685       std::optional<CharUnits::QuantityType> PoisonSize = {}) {
1686     CodeGenFunction::SanitizerScope SanScope(&CGF);
1687     // Pass in void pointer and size of region as arguments to runtime
1688     // function
1689     SmallVector<llvm::Value *, 2> Args = {Ptr};
1690     SmallVector<llvm::Type *, 2> ArgTypes = {CGF.VoidPtrTy};
1691 
1692     if (PoisonSize.has_value()) {
1693       Args.emplace_back(llvm::ConstantInt::get(CGF.SizeTy, *PoisonSize));
1694       ArgTypes.emplace_back(CGF.SizeTy);
1695     }
1696 
1697     llvm::FunctionType *FnType =
1698         llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false);
1699     llvm::FunctionCallee Fn = CGF.CGM.CreateRuntimeFunction(FnType, Name);
1700 
1701     CGF.EmitNounwindRuntimeCall(Fn, Args);
1702   }
1703 
1704   static void
1705   EmitSanitizerDtorFieldsCallback(CodeGenFunction &CGF, llvm::Value *Ptr,
1706                                   CharUnits::QuantityType PoisonSize) {
1707     EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_fields", Ptr,
1708                               PoisonSize);
1709   }
1710 
1711   /// Poison base class with a trivial destructor.
1712   struct SanitizeDtorTrivialBase final : EHScopeStack::Cleanup {
1713     const CXXRecordDecl *BaseClass;
1714     bool BaseIsVirtual;
1715     SanitizeDtorTrivialBase(const CXXRecordDecl *Base, bool BaseIsVirtual)
1716         : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
1717 
1718     void Emit(CodeGenFunction &CGF, Flags flags) override {
1719       const CXXRecordDecl *DerivedClass =
1720           cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
1721 
1722       Address Addr = CGF.GetAddressOfDirectBaseInCompleteClass(
1723           CGF.LoadCXXThisAddress(), DerivedClass, BaseClass, BaseIsVirtual);
1724 
1725       const ASTRecordLayout &BaseLayout =
1726           CGF.getContext().getASTRecordLayout(BaseClass);
1727       CharUnits BaseSize = BaseLayout.getSize();
1728 
1729       if (!BaseSize.isPositive())
1730         return;
1731 
1732       // Use the base class declaration location as inline DebugLocation. All
1733       // fields of the class are destroyed.
1734       DeclAsInlineDebugLocation InlineHere(CGF, *BaseClass);
1735       EmitSanitizerDtorFieldsCallback(CGF, Addr.emitRawPointer(CGF),
1736                                       BaseSize.getQuantity());
1737 
1738       // Prevent the current stack frame from disappearing from the stack trace.
1739       CGF.CurFn->addFnAttr("disable-tail-calls", "true");
1740     }
1741   };
1742 
1743   class SanitizeDtorFieldRange final : public EHScopeStack::Cleanup {
1744     const CXXDestructorDecl *Dtor;
1745     unsigned StartIndex;
1746     unsigned EndIndex;
1747 
1748   public:
1749     SanitizeDtorFieldRange(const CXXDestructorDecl *Dtor, unsigned StartIndex,
1750                            unsigned EndIndex)
1751         : Dtor(Dtor), StartIndex(StartIndex), EndIndex(EndIndex) {}
1752 
1753     // Generate function call for handling object poisoning.
1754     // Disables tail call elimination, to prevent the current stack frame
1755     // from disappearing from the stack trace.
1756     void Emit(CodeGenFunction &CGF, Flags flags) override {
1757       const ASTContext &Context = CGF.getContext();
1758       const ASTRecordLayout &Layout =
1759           Context.getASTRecordLayout(Dtor->getParent());
1760 
1761       // It's a first trivial field so it should be at the begining of a char,
1762       // still round up start offset just in case.
1763       CharUnits PoisonStart = Context.toCharUnitsFromBits(
1764           Layout.getFieldOffset(StartIndex) + Context.getCharWidth() - 1);
1765       llvm::ConstantInt *OffsetSizePtr =
1766           llvm::ConstantInt::get(CGF.SizeTy, PoisonStart.getQuantity());
1767 
1768       llvm::Value *OffsetPtr =
1769           CGF.Builder.CreateGEP(CGF.Int8Ty, CGF.LoadCXXThis(), OffsetSizePtr);
1770 
1771       CharUnits PoisonEnd;
1772       if (EndIndex >= Layout.getFieldCount()) {
1773         PoisonEnd = Layout.getNonVirtualSize();
1774       } else {
1775         PoisonEnd =
1776             Context.toCharUnitsFromBits(Layout.getFieldOffset(EndIndex));
1777       }
1778       CharUnits PoisonSize = PoisonEnd - PoisonStart;
1779       if (!PoisonSize.isPositive())
1780         return;
1781 
1782       // Use the top field declaration location as inline DebugLocation.
1783       DeclAsInlineDebugLocation InlineHere(
1784           CGF, **std::next(Dtor->getParent()->field_begin(), StartIndex));
1785       EmitSanitizerDtorFieldsCallback(CGF, OffsetPtr, PoisonSize.getQuantity());
1786 
1787       // Prevent the current stack frame from disappearing from the stack trace.
1788       CGF.CurFn->addFnAttr("disable-tail-calls", "true");
1789     }
1790   };
1791 
1792  class SanitizeDtorVTable final : public EHScopeStack::Cleanup {
1793     const CXXDestructorDecl *Dtor;
1794 
1795   public:
1796     SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
1797 
1798     // Generate function call for handling vtable pointer poisoning.
1799     void Emit(CodeGenFunction &CGF, Flags flags) override {
1800       assert(Dtor->getParent()->isDynamicClass());
1801       (void)Dtor;
1802       // Poison vtable and vtable ptr if they exist for this class.
1803       llvm::Value *VTablePtr = CGF.LoadCXXThis();
1804 
1805       // Pass in void pointer and size of region as arguments to runtime
1806       // function
1807       EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_vptr",
1808                                 VTablePtr);
1809     }
1810  };
1811 
1812  class SanitizeDtorCleanupBuilder {
1813    ASTContext &Context;
1814    EHScopeStack &EHStack;
1815    const CXXDestructorDecl *DD;
1816    std::optional<unsigned> StartIndex;
1817 
1818  public:
1819    SanitizeDtorCleanupBuilder(ASTContext &Context, EHScopeStack &EHStack,
1820                               const CXXDestructorDecl *DD)
1821        : Context(Context), EHStack(EHStack), DD(DD), StartIndex(std::nullopt) {}
1822    void PushCleanupForField(const FieldDecl *Field) {
1823      if (isEmptyFieldForLayout(Context, Field))
1824        return;
1825      unsigned FieldIndex = Field->getFieldIndex();
1826      if (FieldHasTrivialDestructorBody(Context, Field)) {
1827        if (!StartIndex)
1828          StartIndex = FieldIndex;
1829      } else if (StartIndex) {
1830        EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD,
1831                                                    *StartIndex, FieldIndex);
1832        StartIndex = std::nullopt;
1833      }
1834    }
1835    void End() {
1836      if (StartIndex)
1837        EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD,
1838                                                    *StartIndex, -1);
1839    }
1840  };
1841 } // end anonymous namespace
1842 
1843 /// Emit all code that comes at the end of class's
1844 /// destructor. This is to call destructors on members and base classes
1845 /// in reverse order of their construction.
1846 ///
1847 /// For a deleting destructor, this also handles the case where a destroying
1848 /// operator delete completely overrides the definition.
1849 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
1850                                         CXXDtorType DtorType) {
1851   assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) &&
1852          "Should not emit dtor epilogue for non-exported trivial dtor!");
1853 
1854   // The deleting-destructor phase just needs to call the appropriate
1855   // operator delete that Sema picked up.
1856   if (DtorType == Dtor_Deleting) {
1857     assert(DD->getOperatorDelete() &&
1858            "operator delete missing - EnterDtorCleanups");
1859     if (CXXStructorImplicitParamValue) {
1860       // If there is an implicit param to the deleting dtor, it's a boolean
1861       // telling whether this is a deleting destructor.
1862       if (DD->getOperatorDelete()->isDestroyingOperatorDelete())
1863         EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue,
1864                                       /*ReturnAfterDelete*/true);
1865       else
1866         EHStack.pushCleanup<CallDtorDeleteConditional>(
1867             NormalAndEHCleanup, CXXStructorImplicitParamValue);
1868     } else {
1869       if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) {
1870         const CXXRecordDecl *ClassDecl = DD->getParent();
1871         EmitDeleteCall(DD->getOperatorDelete(),
1872                        LoadThisForDtorDelete(*this, DD),
1873                        getContext().getTagDeclType(ClassDecl));
1874         EmitBranchThroughCleanup(ReturnBlock);
1875       } else {
1876         EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
1877       }
1878     }
1879     return;
1880   }
1881 
1882   const CXXRecordDecl *ClassDecl = DD->getParent();
1883 
1884   // Unions have no bases and do not call field destructors.
1885   if (ClassDecl->isUnion())
1886     return;
1887 
1888   // The complete-destructor phase just destructs all the virtual bases.
1889   if (DtorType == Dtor_Complete) {
1890     // Poison the vtable pointer such that access after the base
1891     // and member destructors are invoked is invalid.
1892     if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1893         SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() &&
1894         ClassDecl->isPolymorphic())
1895       EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1896 
1897     // We push them in the forward order so that they'll be popped in
1898     // the reverse order.
1899     for (const auto &Base : ClassDecl->vbases()) {
1900       auto *BaseClassDecl =
1901           cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl());
1902 
1903       if (BaseClassDecl->hasTrivialDestructor()) {
1904         // Under SanitizeMemoryUseAfterDtor, poison the trivial base class
1905         // memory. For non-trival base classes the same is done in the class
1906         // destructor.
1907         if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1908             SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty())
1909           EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup,
1910                                                        BaseClassDecl,
1911                                                        /*BaseIsVirtual*/ true);
1912       } else {
1913         EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl,
1914                                           /*BaseIsVirtual*/ true);
1915       }
1916     }
1917 
1918     return;
1919   }
1920 
1921   assert(DtorType == Dtor_Base);
1922   // Poison the vtable pointer if it has no virtual bases, but inherits
1923   // virtual functions.
1924   if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1925       SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() &&
1926       ClassDecl->isPolymorphic())
1927     EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1928 
1929   // Destroy non-virtual bases.
1930   for (const auto &Base : ClassDecl->bases()) {
1931     // Ignore virtual bases.
1932     if (Base.isVirtual())
1933       continue;
1934 
1935     CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
1936 
1937     if (BaseClassDecl->hasTrivialDestructor()) {
1938       if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1939           SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty())
1940         EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup,
1941                                                      BaseClassDecl,
1942                                                      /*BaseIsVirtual*/ false);
1943     } else {
1944       EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl,
1945                                         /*BaseIsVirtual*/ false);
1946     }
1947   }
1948 
1949   // Poison fields such that access after their destructors are
1950   // invoked, and before the base class destructor runs, is invalid.
1951   bool SanitizeFields = CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1952                         SanOpts.has(SanitizerKind::Memory);
1953   SanitizeDtorCleanupBuilder SanitizeBuilder(getContext(), EHStack, DD);
1954 
1955   // Destroy direct fields.
1956   for (const auto *Field : ClassDecl->fields()) {
1957     if (SanitizeFields)
1958       SanitizeBuilder.PushCleanupForField(Field);
1959 
1960     QualType type = Field->getType();
1961     QualType::DestructionKind dtorKind = type.isDestructedType();
1962     if (!dtorKind)
1963       continue;
1964 
1965     // Anonymous union members do not have their destructors called.
1966     const RecordType *RT = type->getAsUnionType();
1967     if (RT && RT->getDecl()->isAnonymousStructOrUnion())
1968       continue;
1969 
1970     CleanupKind cleanupKind = getCleanupKind(dtorKind);
1971     EHStack.pushCleanup<DestroyField>(
1972         cleanupKind, Field, getDestroyer(dtorKind), cleanupKind & EHCleanup);
1973   }
1974 
1975   if (SanitizeFields)
1976     SanitizeBuilder.End();
1977 }
1978 
1979 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1980 /// constructor for each of several members of an array.
1981 ///
1982 /// \param ctor the constructor to call for each element
1983 /// \param arrayType the type of the array to initialize
1984 /// \param arrayBegin an arrayType*
1985 /// \param zeroInitialize true if each element should be
1986 ///   zero-initialized before it is constructed
1987 void CodeGenFunction::EmitCXXAggrConstructorCall(
1988     const CXXConstructorDecl *ctor, const ArrayType *arrayType,
1989     Address arrayBegin, const CXXConstructExpr *E, bool NewPointerIsChecked,
1990     bool zeroInitialize) {
1991   QualType elementType;
1992   llvm::Value *numElements =
1993     emitArrayLength(arrayType, elementType, arrayBegin);
1994 
1995   EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E,
1996                              NewPointerIsChecked, zeroInitialize);
1997 }
1998 
1999 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
2000 /// constructor for each of several members of an array.
2001 ///
2002 /// \param ctor the constructor to call for each element
2003 /// \param numElements the number of elements in the array;
2004 ///   may be zero
2005 /// \param arrayBase a T*, where T is the type constructed by ctor
2006 /// \param zeroInitialize true if each element should be
2007 ///   zero-initialized before it is constructed
2008 void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
2009                                                  llvm::Value *numElements,
2010                                                  Address arrayBase,
2011                                                  const CXXConstructExpr *E,
2012                                                  bool NewPointerIsChecked,
2013                                                  bool zeroInitialize) {
2014   // It's legal for numElements to be zero.  This can happen both
2015   // dynamically, because x can be zero in 'new A[x]', and statically,
2016   // because of GCC extensions that permit zero-length arrays.  There
2017   // are probably legitimate places where we could assume that this
2018   // doesn't happen, but it's not clear that it's worth it.
2019   llvm::BranchInst *zeroCheckBranch = nullptr;
2020 
2021   // Optimize for a constant count.
2022   llvm::ConstantInt *constantCount
2023     = dyn_cast<llvm::ConstantInt>(numElements);
2024   if (constantCount) {
2025     // Just skip out if the constant count is zero.
2026     if (constantCount->isZero()) return;
2027 
2028   // Otherwise, emit the check.
2029   } else {
2030     llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
2031     llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
2032     zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
2033     EmitBlock(loopBB);
2034   }
2035 
2036   // Find the end of the array.
2037   llvm::Type *elementType = arrayBase.getElementType();
2038   llvm::Value *arrayBegin = arrayBase.emitRawPointer(*this);
2039   llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(
2040       elementType, arrayBegin, numElements, "arrayctor.end");
2041 
2042   // Enter the loop, setting up a phi for the current location to initialize.
2043   llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
2044   llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
2045   EmitBlock(loopBB);
2046   llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
2047                                          "arrayctor.cur");
2048   cur->addIncoming(arrayBegin, entryBB);
2049 
2050   // Inside the loop body, emit the constructor call on the array element.
2051   if (CGM.shouldEmitConvergenceTokens())
2052     ConvergenceTokenStack.push_back(emitConvergenceLoopToken(loopBB));
2053 
2054   // The alignment of the base, adjusted by the size of a single element,
2055   // provides a conservative estimate of the alignment of every element.
2056   // (This assumes we never start tracking offsetted alignments.)
2057   //
2058   // Note that these are complete objects and so we don't need to
2059   // use the non-virtual size or alignment.
2060   QualType type = getContext().getTypeDeclType(ctor->getParent());
2061   CharUnits eltAlignment =
2062     arrayBase.getAlignment()
2063              .alignmentOfArrayElement(getContext().getTypeSizeInChars(type));
2064   Address curAddr = Address(cur, elementType, eltAlignment);
2065 
2066   // Zero initialize the storage, if requested.
2067   if (zeroInitialize)
2068     EmitNullInitialization(curAddr, type);
2069 
2070   // C++ [class.temporary]p4:
2071   // There are two contexts in which temporaries are destroyed at a different
2072   // point than the end of the full-expression. The first context is when a
2073   // default constructor is called to initialize an element of an array.
2074   // If the constructor has one or more default arguments, the destruction of
2075   // every temporary created in a default argument expression is sequenced
2076   // before the construction of the next array element, if any.
2077 
2078   {
2079     RunCleanupsScope Scope(*this);
2080 
2081     // Evaluate the constructor and its arguments in a regular
2082     // partial-destroy cleanup.
2083     if (getLangOpts().Exceptions &&
2084         !ctor->getParent()->hasTrivialDestructor()) {
2085       Destroyer *destroyer = destroyCXXObject;
2086       pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment,
2087                                      *destroyer);
2088     }
2089     auto currAVS = AggValueSlot::forAddr(
2090         curAddr, type.getQualifiers(), AggValueSlot::IsDestructed,
2091         AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,
2092         AggValueSlot::DoesNotOverlap, AggValueSlot::IsNotZeroed,
2093         NewPointerIsChecked ? AggValueSlot::IsSanitizerChecked
2094                             : AggValueSlot::IsNotSanitizerChecked);
2095     EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false,
2096                            /*Delegating=*/false, currAVS, E);
2097   }
2098 
2099   // Go to the next element.
2100   llvm::Value *next = Builder.CreateInBoundsGEP(
2101       elementType, cur, llvm::ConstantInt::get(SizeTy, 1), "arrayctor.next");
2102   cur->addIncoming(next, Builder.GetInsertBlock());
2103 
2104   // Check whether that's the end of the loop.
2105   llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
2106   llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
2107   Builder.CreateCondBr(done, contBB, loopBB);
2108 
2109   // Patch the earlier check to skip over the loop.
2110   if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);
2111 
2112   if (CGM.shouldEmitConvergenceTokens())
2113     ConvergenceTokenStack.pop_back();
2114 
2115   EmitBlock(contBB);
2116 }
2117 
2118 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
2119                                        Address addr,
2120                                        QualType type) {
2121   const RecordType *rtype = type->castAs<RecordType>();
2122   const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
2123   const CXXDestructorDecl *dtor = record->getDestructor();
2124   assert(!dtor->isTrivial());
2125   CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
2126                             /*Delegating=*/false, addr, type);
2127 }
2128 
2129 void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
2130                                              CXXCtorType Type,
2131                                              bool ForVirtualBase,
2132                                              bool Delegating,
2133                                              AggValueSlot ThisAVS,
2134                                              const CXXConstructExpr *E) {
2135   CallArgList Args;
2136   Address This = ThisAVS.getAddress();
2137   LangAS SlotAS = ThisAVS.getQualifiers().getAddressSpace();
2138   LangAS ThisAS = D->getFunctionObjectParameterType().getAddressSpace();
2139   llvm::Value *ThisPtr =
2140       getAsNaturalPointerTo(This, D->getThisType()->getPointeeType());
2141 
2142   if (SlotAS != ThisAS) {
2143     unsigned TargetThisAS = getContext().getTargetAddressSpace(ThisAS);
2144     llvm::Type *NewType =
2145         llvm::PointerType::get(getLLVMContext(), TargetThisAS);
2146     ThisPtr =
2147         getTargetHooks().performAddrSpaceCast(*this, ThisPtr, ThisAS, NewType);
2148   }
2149 
2150   // Push the this ptr.
2151   Args.add(RValue::get(ThisPtr), D->getThisType());
2152 
2153   // If this is a trivial constructor, emit a memcpy now before we lose
2154   // the alignment information on the argument.
2155   // FIXME: It would be better to preserve alignment information into CallArg.
2156   if (isMemcpyEquivalentSpecialMember(D)) {
2157     assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
2158 
2159     const Expr *Arg = E->getArg(0);
2160     LValue Src = EmitLValue(Arg);
2161     QualType DestTy = getContext().getTypeDeclType(D->getParent());
2162     LValue Dest = MakeAddrLValue(This, DestTy);
2163     EmitAggregateCopyCtor(Dest, Src, ThisAVS.mayOverlap());
2164     return;
2165   }
2166 
2167   // Add the rest of the user-supplied arguments.
2168   const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2169   EvaluationOrder Order = E->isListInitialization()
2170                               ? EvaluationOrder::ForceLeftToRight
2171                               : EvaluationOrder::Default;
2172   EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(),
2173                /*ParamsToSkip*/ 0, Order);
2174 
2175   EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args,
2176                          ThisAVS.mayOverlap(), E->getExprLoc(),
2177                          ThisAVS.isSanitizerChecked());
2178 }
2179 
2180 static bool canEmitDelegateCallArgs(CodeGenFunction &CGF,
2181                                     const CXXConstructorDecl *Ctor,
2182                                     CXXCtorType Type, CallArgList &Args) {
2183   // We can't forward a variadic call.
2184   if (Ctor->isVariadic())
2185     return false;
2186 
2187   if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
2188     // If the parameters are callee-cleanup, it's not safe to forward.
2189     for (auto *P : Ctor->parameters())
2190       if (P->needsDestruction(CGF.getContext()))
2191         return false;
2192 
2193     // Likewise if they're inalloca.
2194     const CGFunctionInfo &Info =
2195         CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0);
2196     if (Info.usesInAlloca())
2197       return false;
2198   }
2199 
2200   // Anything else should be OK.
2201   return true;
2202 }
2203 
2204 void CodeGenFunction::EmitCXXConstructorCall(
2205     const CXXConstructorDecl *D, CXXCtorType Type, bool ForVirtualBase,
2206     bool Delegating, Address This, CallArgList &Args,
2207     AggValueSlot::Overlap_t Overlap, SourceLocation Loc,
2208     bool NewPointerIsChecked, llvm::CallBase **CallOrInvoke) {
2209   const CXXRecordDecl *ClassDecl = D->getParent();
2210 
2211   if (!NewPointerIsChecked)
2212     EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, Loc, This,
2213                   getContext().getRecordType(ClassDecl), CharUnits::Zero());
2214 
2215   if (D->isTrivial() && D->isDefaultConstructor()) {
2216     assert(Args.size() == 1 && "trivial default ctor with args");
2217     return;
2218   }
2219 
2220   // If this is a trivial constructor, just emit what's needed. If this is a
2221   // union copy constructor, we must emit a memcpy, because the AST does not
2222   // model that copy.
2223   if (isMemcpyEquivalentSpecialMember(D)) {
2224     assert(Args.size() == 2 && "unexpected argcount for trivial ctor");
2225     QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType();
2226     Address Src = makeNaturalAddressForPointer(
2227         Args[1].getRValue(*this).getScalarVal(), SrcTy);
2228     LValue SrcLVal = MakeAddrLValue(Src, SrcTy);
2229     QualType DestTy = getContext().getTypeDeclType(ClassDecl);
2230     LValue DestLVal = MakeAddrLValue(This, DestTy);
2231     EmitAggregateCopyCtor(DestLVal, SrcLVal, Overlap);
2232     return;
2233   }
2234 
2235   bool PassPrototypeArgs = true;
2236   // Check whether we can actually emit the constructor before trying to do so.
2237   if (auto Inherited = D->getInheritedConstructor()) {
2238     PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type);
2239     if (PassPrototypeArgs && !canEmitDelegateCallArgs(*this, D, Type, Args)) {
2240       EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase,
2241                                               Delegating, Args);
2242       return;
2243     }
2244   }
2245 
2246   // Insert any ABI-specific implicit constructor arguments.
2247   CGCXXABI::AddedStructorArgCounts ExtraArgs =
2248       CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase,
2249                                                  Delegating, Args);
2250 
2251   // Emit the call.
2252   llvm::Constant *CalleePtr = CGM.getAddrOfCXXStructor(GlobalDecl(D, Type));
2253   const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall(
2254       Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs);
2255   CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(D, Type));
2256   EmitCall(Info, Callee, ReturnValueSlot(), Args, CallOrInvoke, false, Loc);
2257 
2258   // Generate vtable assumptions if we're constructing a complete object
2259   // with a vtable.  We don't do this for base subobjects for two reasons:
2260   // first, it's incorrect for classes with virtual bases, and second, we're
2261   // about to overwrite the vptrs anyway.
2262   // We also have to make sure if we can refer to vtable:
2263   // - Otherwise we can refer to vtable if it's safe to speculatively emit.
2264   // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are
2265   // sure that definition of vtable is not hidden,
2266   // then we are always safe to refer to it.
2267   // FIXME: It looks like InstCombine is very inefficient on dealing with
2268   // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily.
2269   if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2270       ClassDecl->isDynamicClass() && Type != Ctor_Base &&
2271       CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) &&
2272       CGM.getCodeGenOpts().StrictVTablePointers)
2273     EmitVTableAssumptionLoads(ClassDecl, This);
2274 }
2275 
2276 void CodeGenFunction::EmitInheritedCXXConstructorCall(
2277     const CXXConstructorDecl *D, bool ForVirtualBase, Address This,
2278     bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) {
2279   CallArgList Args;
2280   CallArg ThisArg(RValue::get(getAsNaturalPointerTo(
2281                       This, D->getThisType()->getPointeeType())),
2282                   D->getThisType());
2283 
2284   // Forward the parameters.
2285   if (InheritedFromVBase &&
2286       CGM.getTarget().getCXXABI().hasConstructorVariants()) {
2287     // Nothing to do; this construction is not responsible for constructing
2288     // the base class containing the inherited constructor.
2289     // FIXME: Can we just pass undef's for the remaining arguments if we don't
2290     // have constructor variants?
2291     Args.push_back(ThisArg);
2292   } else if (!CXXInheritedCtorInitExprArgs.empty()) {
2293     // The inheriting constructor was inlined; just inject its arguments.
2294     assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() &&
2295            "wrong number of parameters for inherited constructor call");
2296     Args = CXXInheritedCtorInitExprArgs;
2297     Args[0] = ThisArg;
2298   } else {
2299     // The inheriting constructor was not inlined. Emit delegating arguments.
2300     Args.push_back(ThisArg);
2301     const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl);
2302     assert(OuterCtor->getNumParams() == D->getNumParams());
2303     assert(!OuterCtor->isVariadic() && "should have been inlined");
2304 
2305     for (const auto *Param : OuterCtor->parameters()) {
2306       assert(getContext().hasSameUnqualifiedType(
2307           OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(),
2308           Param->getType()));
2309       EmitDelegateCallArg(Args, Param, E->getLocation());
2310 
2311       // Forward __attribute__(pass_object_size).
2312       if (Param->hasAttr<PassObjectSizeAttr>()) {
2313         auto *POSParam = SizeArguments[Param];
2314         assert(POSParam && "missing pass_object_size value for forwarding");
2315         EmitDelegateCallArg(Args, POSParam, E->getLocation());
2316       }
2317     }
2318   }
2319 
2320   EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false,
2321                          This, Args, AggValueSlot::MayOverlap,
2322                          E->getLocation(), /*NewPointerIsChecked*/true);
2323 }
2324 
2325 void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall(
2326     const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase,
2327     bool Delegating, CallArgList &Args) {
2328   GlobalDecl GD(Ctor, CtorType);
2329   InlinedInheritingConstructorScope Scope(*this, GD);
2330   ApplyInlineDebugLocation DebugScope(*this, GD);
2331   RunCleanupsScope RunCleanups(*this);
2332 
2333   // Save the arguments to be passed to the inherited constructor.
2334   CXXInheritedCtorInitExprArgs = Args;
2335 
2336   FunctionArgList Params;
2337   QualType RetType = BuildFunctionArgList(CurGD, Params);
2338   FnRetTy = RetType;
2339 
2340   // Insert any ABI-specific implicit constructor arguments.
2341   CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType,
2342                                              ForVirtualBase, Delegating, Args);
2343 
2344   // Emit a simplified prolog. We only need to emit the implicit params.
2345   assert(Args.size() >= Params.size() && "too few arguments for call");
2346   for (unsigned I = 0, N = Args.size(); I != N; ++I) {
2347     if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) {
2348       const RValue &RV = Args[I].getRValue(*this);
2349       assert(!RV.isComplex() && "complex indirect params not supported");
2350       ParamValue Val = RV.isScalar()
2351                            ? ParamValue::forDirect(RV.getScalarVal())
2352                            : ParamValue::forIndirect(RV.getAggregateAddress());
2353       EmitParmDecl(*Params[I], Val, I + 1);
2354     }
2355   }
2356 
2357   // Create a return value slot if the ABI implementation wants one.
2358   // FIXME: This is dumb, we should ask the ABI not to try to set the return
2359   // value instead.
2360   if (!RetType->isVoidType())
2361     ReturnValue = CreateIRTemp(RetType, "retval.inhctor");
2362 
2363   CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
2364   CXXThisValue = CXXABIThisValue;
2365 
2366   // Directly emit the constructor initializers.
2367   EmitCtorPrologue(Ctor, CtorType, Params);
2368 }
2369 
2370 void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) {
2371   llvm::Value *VTableGlobal =
2372       CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass);
2373   if (!VTableGlobal)
2374     return;
2375 
2376   // We can just use the base offset in the complete class.
2377   CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset();
2378 
2379   if (!NonVirtualOffset.isZero())
2380     This =
2381         ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr,
2382                                         Vptr.VTableClass, Vptr.NearestVBase);
2383 
2384   llvm::Value *VPtrValue =
2385       GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass);
2386   llvm::Value *Cmp =
2387       Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables");
2388   Builder.CreateAssumption(Cmp);
2389 }
2390 
2391 void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl,
2392                                                 Address This) {
2393   if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl))
2394     for (const VPtr &Vptr : getVTablePointers(ClassDecl))
2395       EmitVTableAssumptionLoad(Vptr, This);
2396 }
2397 
2398 void
2399 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
2400                                                 Address This, Address Src,
2401                                                 const CXXConstructExpr *E) {
2402   const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2403 
2404   CallArgList Args;
2405 
2406   // Push the this ptr.
2407   Args.add(RValue::get(getAsNaturalPointerTo(This, D->getThisType())),
2408            D->getThisType());
2409 
2410   // Push the src ptr.
2411   QualType QT = *(FPT->param_type_begin());
2412   llvm::Type *t = CGM.getTypes().ConvertType(QT);
2413   llvm::Value *Val = getAsNaturalPointerTo(Src, D->getThisType());
2414   llvm::Value *SrcVal = Builder.CreateBitCast(Val, t);
2415   Args.add(RValue::get(SrcVal), QT);
2416 
2417   // Skip over first argument (Src).
2418   EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(),
2419                /*ParamsToSkip*/ 1);
2420 
2421   EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase*/false,
2422                          /*Delegating*/false, This, Args,
2423                          AggValueSlot::MayOverlap, E->getExprLoc(),
2424                          /*NewPointerIsChecked*/false);
2425 }
2426 
2427 void
2428 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
2429                                                 CXXCtorType CtorType,
2430                                                 const FunctionArgList &Args,
2431                                                 SourceLocation Loc) {
2432   CallArgList DelegateArgs;
2433 
2434   FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
2435   assert(I != E && "no parameters to constructor");
2436 
2437   // this
2438   Address This = LoadCXXThisAddress();
2439   DelegateArgs.add(RValue::get(getAsNaturalPointerTo(
2440                        This, (*I)->getType()->getPointeeType())),
2441                    (*I)->getType());
2442   ++I;
2443 
2444   // FIXME: The location of the VTT parameter in the parameter list is
2445   // specific to the Itanium ABI and shouldn't be hardcoded here.
2446   if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
2447     assert(I != E && "cannot skip vtt parameter, already done with args");
2448     assert((*I)->getType()->isPointerType() &&
2449            "skipping parameter not of vtt type");
2450     ++I;
2451   }
2452 
2453   // Explicit arguments.
2454   for (; I != E; ++I) {
2455     const VarDecl *param = *I;
2456     // FIXME: per-argument source location
2457     EmitDelegateCallArg(DelegateArgs, param, Loc);
2458   }
2459 
2460   EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false,
2461                          /*Delegating=*/true, This, DelegateArgs,
2462                          AggValueSlot::MayOverlap, Loc,
2463                          /*NewPointerIsChecked=*/true);
2464 }
2465 
2466 namespace {
2467   struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup {
2468     const CXXDestructorDecl *Dtor;
2469     Address Addr;
2470     CXXDtorType Type;
2471 
2472     CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr,
2473                            CXXDtorType Type)
2474       : Dtor(D), Addr(Addr), Type(Type) {}
2475 
2476     void Emit(CodeGenFunction &CGF, Flags flags) override {
2477       // We are calling the destructor from within the constructor.
2478       // Therefore, "this" should have the expected type.
2479       QualType ThisTy = Dtor->getFunctionObjectParameterType();
2480       CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
2481                                 /*Delegating=*/true, Addr, ThisTy);
2482     }
2483   };
2484 } // end anonymous namespace
2485 
2486 void
2487 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
2488                                                   const FunctionArgList &Args) {
2489   assert(Ctor->isDelegatingConstructor());
2490 
2491   Address ThisPtr = LoadCXXThisAddress();
2492 
2493   AggValueSlot AggSlot =
2494     AggValueSlot::forAddr(ThisPtr, Qualifiers(),
2495                           AggValueSlot::IsDestructed,
2496                           AggValueSlot::DoesNotNeedGCBarriers,
2497                           AggValueSlot::IsNotAliased,
2498                           AggValueSlot::MayOverlap,
2499                           AggValueSlot::IsNotZeroed,
2500                           // Checks are made by the code that calls constructor.
2501                           AggValueSlot::IsSanitizerChecked);
2502 
2503   EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
2504 
2505   const CXXRecordDecl *ClassDecl = Ctor->getParent();
2506   if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {
2507     CXXDtorType Type =
2508       CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;
2509 
2510     EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
2511                                                 ClassDecl->getDestructor(),
2512                                                 ThisPtr, Type);
2513   }
2514 }
2515 
2516 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
2517                                             CXXDtorType Type,
2518                                             bool ForVirtualBase,
2519                                             bool Delegating, Address This,
2520                                             QualType ThisTy) {
2521   CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase,
2522                                      Delegating, This, ThisTy);
2523 }
2524 
2525 namespace {
2526   struct CallLocalDtor final : EHScopeStack::Cleanup {
2527     const CXXDestructorDecl *Dtor;
2528     Address Addr;
2529     QualType Ty;
2530 
2531     CallLocalDtor(const CXXDestructorDecl *D, Address Addr, QualType Ty)
2532         : Dtor(D), Addr(Addr), Ty(Ty) {}
2533 
2534     void Emit(CodeGenFunction &CGF, Flags flags) override {
2535       CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
2536                                 /*ForVirtualBase=*/false,
2537                                 /*Delegating=*/false, Addr, Ty);
2538     }
2539   };
2540 } // end anonymous namespace
2541 
2542 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
2543                                             QualType T, Address Addr) {
2544   EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr, T);
2545 }
2546 
2547 void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) {
2548   CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
2549   if (!ClassDecl) return;
2550   if (ClassDecl->hasTrivialDestructor()) return;
2551 
2552   const CXXDestructorDecl *D = ClassDecl->getDestructor();
2553   assert(D && D->isUsed() && "destructor not marked as used!");
2554   PushDestructorCleanup(D, T, Addr);
2555 }
2556 
2557 void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) {
2558   // Compute the address point.
2559   llvm::Value *VTableAddressPoint =
2560       CGM.getCXXABI().getVTableAddressPointInStructor(
2561           *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase);
2562 
2563   if (!VTableAddressPoint)
2564     return;
2565 
2566   // Compute where to store the address point.
2567   llvm::Value *VirtualOffset = nullptr;
2568   CharUnits NonVirtualOffset = CharUnits::Zero();
2569 
2570   if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) {
2571     // We need to use the virtual base offset offset because the virtual base
2572     // might have a different offset in the most derived class.
2573 
2574     VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(
2575         *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase);
2576     NonVirtualOffset = Vptr.OffsetFromNearestVBase;
2577   } else {
2578     // We can just use the base offset in the complete class.
2579     NonVirtualOffset = Vptr.Base.getBaseOffset();
2580   }
2581 
2582   // Apply the offsets.
2583   Address VTableField = LoadCXXThisAddress();
2584   if (!NonVirtualOffset.isZero() || VirtualOffset)
2585     VTableField = ApplyNonVirtualAndVirtualOffset(
2586         *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass,
2587         Vptr.NearestVBase);
2588 
2589   // Finally, store the address point. Use the same LLVM types as the field to
2590   // support optimization.
2591   unsigned GlobalsAS = CGM.getDataLayout().getDefaultGlobalsAddressSpace();
2592   llvm::Type *PtrTy = llvm::PointerType::get(CGM.getLLVMContext(), GlobalsAS);
2593   // vtable field is derived from `this` pointer, therefore they should be in
2594   // the same addr space. Note that this might not be LLVM address space 0.
2595   VTableField = VTableField.withElementType(PtrTy);
2596 
2597   if (auto AuthenticationInfo = CGM.getVTablePointerAuthInfo(
2598           this, Vptr.Base.getBase(), VTableField.emitRawPointer(*this)))
2599     VTableAddressPoint =
2600         EmitPointerAuthSign(*AuthenticationInfo, VTableAddressPoint);
2601 
2602   llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
2603   TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(PtrTy);
2604   CGM.DecorateInstructionWithTBAA(Store, TBAAInfo);
2605   if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2606       CGM.getCodeGenOpts().StrictVTablePointers)
2607     CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass);
2608 }
2609 
2610 CodeGenFunction::VPtrsVector
2611 CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) {
2612   CodeGenFunction::VPtrsVector VPtrsResult;
2613   VisitedVirtualBasesSetTy VBases;
2614   getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()),
2615                     /*NearestVBase=*/nullptr,
2616                     /*OffsetFromNearestVBase=*/CharUnits::Zero(),
2617                     /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases,
2618                     VPtrsResult);
2619   return VPtrsResult;
2620 }
2621 
2622 void CodeGenFunction::getVTablePointers(BaseSubobject Base,
2623                                         const CXXRecordDecl *NearestVBase,
2624                                         CharUnits OffsetFromNearestVBase,
2625                                         bool BaseIsNonVirtualPrimaryBase,
2626                                         const CXXRecordDecl *VTableClass,
2627                                         VisitedVirtualBasesSetTy &VBases,
2628                                         VPtrsVector &Vptrs) {
2629   // If this base is a non-virtual primary base the address point has already
2630   // been set.
2631   if (!BaseIsNonVirtualPrimaryBase) {
2632     // Initialize the vtable pointer for this base.
2633     VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass};
2634     Vptrs.push_back(Vptr);
2635   }
2636 
2637   const CXXRecordDecl *RD = Base.getBase();
2638 
2639   // Traverse bases.
2640   for (const auto &I : RD->bases()) {
2641     auto *BaseDecl =
2642         cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
2643 
2644     // Ignore classes without a vtable.
2645     if (!BaseDecl->isDynamicClass())
2646       continue;
2647 
2648     CharUnits BaseOffset;
2649     CharUnits BaseOffsetFromNearestVBase;
2650     bool BaseDeclIsNonVirtualPrimaryBase;
2651 
2652     if (I.isVirtual()) {
2653       // Check if we've visited this virtual base before.
2654       if (!VBases.insert(BaseDecl).second)
2655         continue;
2656 
2657       const ASTRecordLayout &Layout =
2658         getContext().getASTRecordLayout(VTableClass);
2659 
2660       BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
2661       BaseOffsetFromNearestVBase = CharUnits::Zero();
2662       BaseDeclIsNonVirtualPrimaryBase = false;
2663     } else {
2664       const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
2665 
2666       BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
2667       BaseOffsetFromNearestVBase =
2668         OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
2669       BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
2670     }
2671 
2672     getVTablePointers(
2673         BaseSubobject(BaseDecl, BaseOffset),
2674         I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase,
2675         BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs);
2676   }
2677 }
2678 
2679 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
2680   // Ignore classes without a vtable.
2681   if (!RD->isDynamicClass())
2682     return;
2683 
2684   // Initialize the vtable pointers for this class and all of its bases.
2685   if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD))
2686     for (const VPtr &Vptr : getVTablePointers(RD))
2687       InitializeVTablePointer(Vptr);
2688 
2689   if (RD->getNumVBases())
2690     CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD);
2691 }
2692 
2693 llvm::Value *CodeGenFunction::GetVTablePtr(Address This,
2694                                            llvm::Type *VTableTy,
2695                                            const CXXRecordDecl *RD,
2696                                            VTableAuthMode AuthMode) {
2697   Address VTablePtrSrc = This.withElementType(VTableTy);
2698   llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
2699   TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy);
2700   CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo);
2701 
2702   if (auto AuthenticationInfo =
2703           CGM.getVTablePointerAuthInfo(this, RD, This.emitRawPointer(*this))) {
2704     if (AuthMode != VTableAuthMode::UnsafeUbsanStrip) {
2705       VTable = cast<llvm::Instruction>(
2706           EmitPointerAuthAuth(*AuthenticationInfo, VTable));
2707       if (AuthMode == VTableAuthMode::MustTrap) {
2708         // This is clearly suboptimal but until we have an ability
2709         // to rely on the authentication intrinsic trapping and force
2710         // an authentication to occur we don't really have a choice.
2711         VTable =
2712             cast<llvm::Instruction>(Builder.CreateBitCast(VTable, Int8PtrTy));
2713         Builder.CreateLoad(RawAddress(VTable, Int8Ty, CGM.getPointerAlign()),
2714                            /* IsVolatile */ true);
2715       }
2716     } else {
2717       VTable = cast<llvm::Instruction>(EmitPointerAuthAuth(
2718           CGPointerAuthInfo(0, PointerAuthenticationMode::Strip, false, false,
2719                             nullptr),
2720           VTable));
2721     }
2722   }
2723 
2724   if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2725       CGM.getCodeGenOpts().StrictVTablePointers)
2726     CGM.DecorateInstructionWithInvariantGroup(VTable, RD);
2727 
2728   return VTable;
2729 }
2730 
2731 // If a class has a single non-virtual base and does not introduce or override
2732 // virtual member functions or fields, it will have the same layout as its base.
2733 // This function returns the least derived such class.
2734 //
2735 // Casting an instance of a base class to such a derived class is technically
2736 // undefined behavior, but it is a relatively common hack for introducing member
2737 // functions on class instances with specific properties (e.g. llvm::Operator)
2738 // that works under most compilers and should not have security implications, so
2739 // we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict.
2740 static const CXXRecordDecl *
2741 LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {
2742   if (!RD->field_empty())
2743     return RD;
2744 
2745   if (RD->getNumVBases() != 0)
2746     return RD;
2747 
2748   if (RD->getNumBases() != 1)
2749     return RD;
2750 
2751   for (const CXXMethodDecl *MD : RD->methods()) {
2752     if (MD->isVirtual()) {
2753       // Virtual member functions are only ok if they are implicit destructors
2754       // because the implicit destructor will have the same semantics as the
2755       // base class's destructor if no fields are added.
2756       if (isa<CXXDestructorDecl>(MD) && MD->isImplicit())
2757         continue;
2758       return RD;
2759     }
2760   }
2761 
2762   return LeastDerivedClassWithSameLayout(
2763       RD->bases_begin()->getType()->getAsCXXRecordDecl());
2764 }
2765 
2766 void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
2767                                                    llvm::Value *VTable,
2768                                                    SourceLocation Loc) {
2769   if (SanOpts.has(SanitizerKind::CFIVCall))
2770     EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
2771   else if (CGM.getCodeGenOpts().WholeProgramVTables &&
2772            // Don't insert type test assumes if we are forcing public
2773            // visibility.
2774            !CGM.AlwaysHasLTOVisibilityPublic(RD)) {
2775     QualType Ty = QualType(RD->getTypeForDecl(), 0);
2776     llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType(Ty);
2777     llvm::Value *TypeId =
2778         llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2779 
2780     // If we already know that the call has hidden LTO visibility, emit
2781     // @llvm.type.test(). Otherwise emit @llvm.public.type.test(), which WPD
2782     // will convert to @llvm.type.test() if we assert at link time that we have
2783     // whole program visibility.
2784     llvm::Intrinsic::ID IID = CGM.HasHiddenLTOVisibility(RD)
2785                                   ? llvm::Intrinsic::type_test
2786                                   : llvm::Intrinsic::public_type_test;
2787     llvm::Value *TypeTest =
2788         Builder.CreateCall(CGM.getIntrinsic(IID), {VTable, TypeId});
2789     Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);
2790   }
2791 }
2792 
2793 /// Converts the CFITypeCheckKind into SanitizerKind::SanitizerOrdinal and
2794 /// llvm::SanitizerStatKind.
2795 static std::pair<SanitizerKind::SanitizerOrdinal, llvm::SanitizerStatKind>
2796 SanitizerInfoFromCFICheckKind(CodeGenFunction::CFITypeCheckKind TCK) {
2797   switch (TCK) {
2798   case CodeGenFunction::CFITCK_VCall:
2799     return std::make_pair(SanitizerKind::SO_CFIVCall, llvm::SanStat_CFI_VCall);
2800   case CodeGenFunction::CFITCK_NVCall:
2801     return std::make_pair(SanitizerKind::SO_CFINVCall,
2802                           llvm::SanStat_CFI_NVCall);
2803   case CodeGenFunction::CFITCK_DerivedCast:
2804     return std::make_pair(SanitizerKind::SO_CFIDerivedCast,
2805                           llvm::SanStat_CFI_DerivedCast);
2806   case CodeGenFunction::CFITCK_UnrelatedCast:
2807     return std::make_pair(SanitizerKind::SO_CFIUnrelatedCast,
2808                           llvm::SanStat_CFI_UnrelatedCast);
2809   case CodeGenFunction::CFITCK_ICall:
2810   case CodeGenFunction::CFITCK_NVMFCall:
2811   case CodeGenFunction::CFITCK_VMFCall:
2812     llvm_unreachable("unexpected sanitizer kind");
2813   }
2814   llvm_unreachable("Unknown CFITypeCheckKind enum");
2815 }
2816 
2817 void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,
2818                                                 llvm::Value *VTable,
2819                                                 CFITypeCheckKind TCK,
2820                                                 SourceLocation Loc) {
2821   if (!SanOpts.has(SanitizerKind::CFICastStrict))
2822     RD = LeastDerivedClassWithSameLayout(RD);
2823 
2824   auto [Ordinal, _] = SanitizerInfoFromCFICheckKind(TCK);
2825   SanitizerDebugLocation SanScope(this, {Ordinal},
2826                                   SanitizerHandler::CFICheckFail);
2827 
2828   EmitVTablePtrCheck(RD, VTable, TCK, Loc);
2829 }
2830 
2831 void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, Address Derived,
2832                                                 bool MayBeNull,
2833                                                 CFITypeCheckKind TCK,
2834                                                 SourceLocation Loc) {
2835   if (!getLangOpts().CPlusPlus)
2836     return;
2837 
2838   auto *ClassTy = T->getAs<RecordType>();
2839   if (!ClassTy)
2840     return;
2841 
2842   const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl());
2843 
2844   if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass())
2845     return;
2846 
2847   if (!SanOpts.has(SanitizerKind::CFICastStrict))
2848     ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl);
2849 
2850   auto [Ordinal, _] = SanitizerInfoFromCFICheckKind(TCK);
2851   SanitizerDebugLocation SanScope(this, {Ordinal},
2852                                   SanitizerHandler::CFICheckFail);
2853 
2854   llvm::BasicBlock *ContBlock = nullptr;
2855 
2856   if (MayBeNull) {
2857     llvm::Value *DerivedNotNull =
2858         Builder.CreateIsNotNull(Derived.emitRawPointer(*this), "cast.nonnull");
2859 
2860     llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check");
2861     ContBlock = createBasicBlock("cast.cont");
2862 
2863     Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock);
2864 
2865     EmitBlock(CheckBlock);
2866   }
2867 
2868   llvm::Value *VTable;
2869   std::tie(VTable, ClassDecl) =
2870       CGM.getCXXABI().LoadVTablePtr(*this, Derived, ClassDecl);
2871 
2872   EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);
2873 
2874   if (MayBeNull) {
2875     Builder.CreateBr(ContBlock);
2876     EmitBlock(ContBlock);
2877   }
2878 }
2879 
2880 void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
2881                                          llvm::Value *VTable,
2882                                          CFITypeCheckKind TCK,
2883                                          SourceLocation Loc) {
2884   assert(IsSanitizerScope);
2885 
2886   if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso &&
2887       !CGM.HasHiddenLTOVisibility(RD))
2888     return;
2889 
2890   auto [M, SSK] = SanitizerInfoFromCFICheckKind(TCK);
2891 
2892   std::string TypeName = RD->getQualifiedNameAsString();
2893   if (getContext().getNoSanitizeList().containsType(
2894           SanitizerMask::bitPosToMask(M), TypeName))
2895     return;
2896 
2897   EmitSanitizerStatReport(SSK);
2898 
2899   llvm::Metadata *MD =
2900       CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2901   llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);
2902 
2903   llvm::Value *TypeTest = Builder.CreateCall(
2904       CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, TypeId});
2905 
2906   llvm::Constant *StaticData[] = {
2907       llvm::ConstantInt::get(Int8Ty, TCK),
2908       EmitCheckSourceLocation(Loc),
2909       EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)),
2910   };
2911 
2912   auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);
2913   if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) {
2914     EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, VTable, StaticData);
2915     return;
2916   }
2917 
2918   if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) {
2919     bool NoMerge = !CGM.getCodeGenOpts().SanitizeMergeHandlers.has(M);
2920     EmitTrapCheck(TypeTest, SanitizerHandler::CFICheckFail, NoMerge);
2921     return;
2922   }
2923 
2924   llvm::Value *AllVtables = llvm::MetadataAsValue::get(
2925       CGM.getLLVMContext(),
2926       llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
2927   llvm::Value *ValidVtable = Builder.CreateCall(
2928       CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables});
2929   EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail,
2930             StaticData, {VTable, ValidVtable});
2931 }
2932 
2933 bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {
2934   if (!CGM.getCodeGenOpts().WholeProgramVTables ||
2935       !CGM.HasHiddenLTOVisibility(RD))
2936     return false;
2937 
2938   if (CGM.getCodeGenOpts().VirtualFunctionElimination)
2939     return true;
2940 
2941   if (!SanOpts.has(SanitizerKind::CFIVCall) ||
2942       !CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall))
2943     return false;
2944 
2945   std::string TypeName = RD->getQualifiedNameAsString();
2946   return !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,
2947                                                         TypeName);
2948 }
2949 
2950 llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(
2951     const CXXRecordDecl *RD, llvm::Value *VTable, llvm::Type *VTableTy,
2952     uint64_t VTableByteOffset) {
2953   auto CheckOrdinal = SanitizerKind::SO_CFIVCall;
2954   auto CheckHandler = SanitizerHandler::CFICheckFail;
2955   SanitizerDebugLocation SanScope(this, {CheckOrdinal}, CheckHandler);
2956 
2957   EmitSanitizerStatReport(llvm::SanStat_CFI_VCall);
2958 
2959   llvm::Metadata *MD =
2960       CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2961   llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2962 
2963   auto CheckedLoadIntrinsic = CGM.getVTables().useRelativeLayout()
2964                                   ? llvm::Intrinsic::type_checked_load_relative
2965                                   : llvm::Intrinsic::type_checked_load;
2966   llvm::Value *CheckedLoad = Builder.CreateCall(
2967       CGM.getIntrinsic(CheckedLoadIntrinsic),
2968       {VTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset), TypeId});
2969 
2970   llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
2971 
2972   std::string TypeName = RD->getQualifiedNameAsString();
2973   if (SanOpts.has(SanitizerKind::CFIVCall) &&
2974       !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,
2975                                                      TypeName)) {
2976     EmitCheck(std::make_pair(CheckResult, CheckOrdinal), CheckHandler, {}, {});
2977   }
2978 
2979   return Builder.CreateBitCast(Builder.CreateExtractValue(CheckedLoad, 0),
2980                                VTableTy);
2981 }
2982 
2983 void CodeGenFunction::EmitForwardingCallToLambda(
2984     const CXXMethodDecl *callOperator, CallArgList &callArgs,
2985     const CGFunctionInfo *calleeFnInfo, llvm::Constant *calleePtr) {
2986   // Get the address of the call operator.
2987   if (!calleeFnInfo)
2988     calleeFnInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);
2989 
2990   if (!calleePtr)
2991     calleePtr =
2992         CGM.GetAddrOfFunction(GlobalDecl(callOperator),
2993                               CGM.getTypes().GetFunctionType(*calleeFnInfo));
2994 
2995   // Prepare the return slot.
2996   const FunctionProtoType *FPT =
2997     callOperator->getType()->castAs<FunctionProtoType>();
2998   QualType resultType = FPT->getReturnType();
2999   ReturnValueSlot returnSlot;
3000   if (!resultType->isVoidType() &&
3001       calleeFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect &&
3002       !hasScalarEvaluationKind(calleeFnInfo->getReturnType()))
3003     returnSlot =
3004         ReturnValueSlot(ReturnValue, resultType.isVolatileQualified(),
3005                         /*IsUnused=*/false, /*IsExternallyDestructed=*/true);
3006 
3007   // We don't need to separately arrange the call arguments because
3008   // the call can't be variadic anyway --- it's impossible to forward
3009   // variadic arguments.
3010 
3011   // Now emit our call.
3012   auto callee = CGCallee::forDirect(calleePtr, GlobalDecl(callOperator));
3013   RValue RV = EmitCall(*calleeFnInfo, callee, returnSlot, callArgs);
3014 
3015   // If necessary, copy the returned value into the slot.
3016   if (!resultType->isVoidType() && returnSlot.isNull()) {
3017     if (getLangOpts().ObjCAutoRefCount && resultType->isObjCRetainableType()) {
3018       RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal()));
3019     }
3020     EmitReturnOfRValue(RV, resultType);
3021   } else
3022     EmitBranchThroughCleanup(ReturnBlock);
3023 }
3024 
3025 void CodeGenFunction::EmitLambdaBlockInvokeBody() {
3026   const BlockDecl *BD = BlockInfo->getBlockDecl();
3027   const VarDecl *variable = BD->capture_begin()->getVariable();
3028   const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
3029   const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
3030 
3031   if (CallOp->isVariadic()) {
3032     // FIXME: Making this work correctly is nasty because it requires either
3033     // cloning the body of the call operator or making the call operator
3034     // forward.
3035     CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");
3036     return;
3037   }
3038 
3039   // Start building arguments for forwarding call
3040   CallArgList CallArgs;
3041 
3042   QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
3043   Address ThisPtr = GetAddrOfBlockDecl(variable);
3044   CallArgs.add(RValue::get(getAsNaturalPointerTo(ThisPtr, ThisType)), ThisType);
3045 
3046   // Add the rest of the parameters.
3047   for (auto *param : BD->parameters())
3048     EmitDelegateCallArg(CallArgs, param, param->getBeginLoc());
3049 
3050   assert(!Lambda->isGenericLambda() &&
3051             "generic lambda interconversion to block not implemented");
3052   EmitForwardingCallToLambda(CallOp, CallArgs);
3053 }
3054 
3055 void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) {
3056   if (MD->isVariadic()) {
3057     // FIXME: Making this work correctly is nasty because it requires either
3058     // cloning the body of the call operator or making the call operator
3059     // forward.
3060     CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
3061     return;
3062   }
3063 
3064   const CXXRecordDecl *Lambda = MD->getParent();
3065 
3066   // Start building arguments for forwarding call
3067   CallArgList CallArgs;
3068 
3069   QualType LambdaType = getContext().getRecordType(Lambda);
3070   QualType ThisType = getContext().getPointerType(LambdaType);
3071   Address ThisPtr = CreateMemTemp(LambdaType, "unused.capture");
3072   CallArgs.add(RValue::get(ThisPtr.emitRawPointer(*this)), ThisType);
3073 
3074   EmitLambdaDelegatingInvokeBody(MD, CallArgs);
3075 }
3076 
3077 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD,
3078                                                      CallArgList &CallArgs) {
3079   // Add the rest of the forwarded parameters.
3080   for (auto *Param : MD->parameters())
3081     EmitDelegateCallArg(CallArgs, Param, Param->getBeginLoc());
3082 
3083   const CXXRecordDecl *Lambda = MD->getParent();
3084   const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
3085   // For a generic lambda, find the corresponding call operator specialization
3086   // to which the call to the static-invoker shall be forwarded.
3087   if (Lambda->isGenericLambda()) {
3088     assert(MD->isFunctionTemplateSpecialization());
3089     const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs();
3090     FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate();
3091     void *InsertPos = nullptr;
3092     FunctionDecl *CorrespondingCallOpSpecialization =
3093         CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos);
3094     assert(CorrespondingCallOpSpecialization);
3095     CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization);
3096   }
3097 
3098   // Special lambda forwarding when there are inalloca parameters.
3099   if (hasInAllocaArg(MD)) {
3100     const CGFunctionInfo *ImplFnInfo = nullptr;
3101     llvm::Function *ImplFn = nullptr;
3102     EmitLambdaInAllocaImplFn(CallOp, &ImplFnInfo, &ImplFn);
3103 
3104     EmitForwardingCallToLambda(CallOp, CallArgs, ImplFnInfo, ImplFn);
3105     return;
3106   }
3107 
3108   EmitForwardingCallToLambda(CallOp, CallArgs);
3109 }
3110 
3111 void CodeGenFunction::EmitLambdaInAllocaCallOpBody(const CXXMethodDecl *MD) {
3112   if (MD->isVariadic()) {
3113     // FIXME: Making this work correctly is nasty because it requires either
3114     // cloning the body of the call operator or making the call operator forward.
3115     CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
3116     return;
3117   }
3118 
3119   // Forward %this argument.
3120   CallArgList CallArgs;
3121   QualType LambdaType = getContext().getRecordType(MD->getParent());
3122   QualType ThisType = getContext().getPointerType(LambdaType);
3123   llvm::Value *ThisArg = CurFn->getArg(0);
3124   CallArgs.add(RValue::get(ThisArg), ThisType);
3125 
3126   EmitLambdaDelegatingInvokeBody(MD, CallArgs);
3127 }
3128 
3129 void CodeGenFunction::EmitLambdaInAllocaImplFn(
3130     const CXXMethodDecl *CallOp, const CGFunctionInfo **ImplFnInfo,
3131     llvm::Function **ImplFn) {
3132   const CGFunctionInfo &FnInfo =
3133       CGM.getTypes().arrangeCXXMethodDeclaration(CallOp);
3134   llvm::Function *CallOpFn =
3135       cast<llvm::Function>(CGM.GetAddrOfFunction(GlobalDecl(CallOp)));
3136 
3137   // Emit function containing the original call op body. __invoke will delegate
3138   // to this function.
3139   SmallVector<CanQualType, 4> ArgTypes;
3140   for (auto I = FnInfo.arg_begin(); I != FnInfo.arg_end(); ++I)
3141     ArgTypes.push_back(I->type);
3142   *ImplFnInfo = &CGM.getTypes().arrangeLLVMFunctionInfo(
3143       FnInfo.getReturnType(), FnInfoOpts::IsDelegateCall, ArgTypes,
3144       FnInfo.getExtInfo(), {}, FnInfo.getRequiredArgs());
3145 
3146   // Create mangled name as if this was a method named __impl. If for some
3147   // reason the name doesn't look as expected then just tack __impl to the
3148   // front.
3149   // TODO: Use the name mangler to produce the right name instead of using
3150   // string replacement.
3151   StringRef CallOpName = CallOpFn->getName();
3152   std::string ImplName;
3153   if (size_t Pos = CallOpName.find_first_of("<lambda"))
3154     ImplName = ("?__impl@" + CallOpName.drop_front(Pos)).str();
3155   else
3156     ImplName = ("__impl" + CallOpName).str();
3157 
3158   llvm::Function *Fn = CallOpFn->getParent()->getFunction(ImplName);
3159   if (!Fn) {
3160     Fn = llvm::Function::Create(CGM.getTypes().GetFunctionType(**ImplFnInfo),
3161                                 llvm::GlobalValue::InternalLinkage, ImplName,
3162                                 CGM.getModule());
3163     CGM.SetInternalFunctionAttributes(CallOp, Fn, **ImplFnInfo);
3164 
3165     const GlobalDecl &GD = GlobalDecl(CallOp);
3166     const auto *D = cast<FunctionDecl>(GD.getDecl());
3167     CodeGenFunction(CGM).GenerateCode(GD, Fn, **ImplFnInfo);
3168     CGM.SetLLVMFunctionAttributesForDefinition(D, Fn);
3169   }
3170   *ImplFn = Fn;
3171 }
3172