xref: /freebsd/contrib/llvm-project/clang/lib/CodeGen/CGVTables.cpp (revision a7dea1671b87c07d2d266f836bfa8b58efc7c134)
1 //===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This contains code dealing with C++ code generation of virtual tables.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "CGCXXABI.h"
14 #include "CodeGenFunction.h"
15 #include "CodeGenModule.h"
16 #include "clang/AST/CXXInheritance.h"
17 #include "clang/AST/RecordLayout.h"
18 #include "clang/Basic/CodeGenOptions.h"
19 #include "clang/CodeGen/CGFunctionInfo.h"
20 #include "clang/CodeGen/ConstantInitBuilder.h"
21 #include "llvm/IR/IntrinsicInst.h"
22 #include "llvm/Support/Format.h"
23 #include "llvm/Transforms/Utils/Cloning.h"
24 #include <algorithm>
25 #include <cstdio>
26 
27 using namespace clang;
28 using namespace CodeGen;
29 
30 CodeGenVTables::CodeGenVTables(CodeGenModule &CGM)
31     : CGM(CGM), VTContext(CGM.getContext().getVTableContext()) {}
32 
33 llvm::Constant *CodeGenModule::GetAddrOfThunk(StringRef Name, llvm::Type *FnTy,
34                                               GlobalDecl GD) {
35   return GetOrCreateLLVMFunction(Name, FnTy, GD, /*ForVTable=*/true,
36                                  /*DontDefer=*/true, /*IsThunk=*/true);
37 }
38 
39 static void setThunkProperties(CodeGenModule &CGM, const ThunkInfo &Thunk,
40                                llvm::Function *ThunkFn, bool ForVTable,
41                                GlobalDecl GD) {
42   CGM.setFunctionLinkage(GD, ThunkFn);
43   CGM.getCXXABI().setThunkLinkage(ThunkFn, ForVTable, GD,
44                                   !Thunk.Return.isEmpty());
45 
46   // Set the right visibility.
47   CGM.setGVProperties(ThunkFn, GD);
48 
49   if (!CGM.getCXXABI().exportThunk()) {
50     ThunkFn->setDLLStorageClass(llvm::GlobalValue::DefaultStorageClass);
51     ThunkFn->setDSOLocal(true);
52   }
53 
54   if (CGM.supportsCOMDAT() && ThunkFn->isWeakForLinker())
55     ThunkFn->setComdat(CGM.getModule().getOrInsertComdat(ThunkFn->getName()));
56 }
57 
58 #ifndef NDEBUG
59 static bool similar(const ABIArgInfo &infoL, CanQualType typeL,
60                     const ABIArgInfo &infoR, CanQualType typeR) {
61   return (infoL.getKind() == infoR.getKind() &&
62           (typeL == typeR ||
63            (isa<PointerType>(typeL) && isa<PointerType>(typeR)) ||
64            (isa<ReferenceType>(typeL) && isa<ReferenceType>(typeR))));
65 }
66 #endif
67 
68 static RValue PerformReturnAdjustment(CodeGenFunction &CGF,
69                                       QualType ResultType, RValue RV,
70                                       const ThunkInfo &Thunk) {
71   // Emit the return adjustment.
72   bool NullCheckValue = !ResultType->isReferenceType();
73 
74   llvm::BasicBlock *AdjustNull = nullptr;
75   llvm::BasicBlock *AdjustNotNull = nullptr;
76   llvm::BasicBlock *AdjustEnd = nullptr;
77 
78   llvm::Value *ReturnValue = RV.getScalarVal();
79 
80   if (NullCheckValue) {
81     AdjustNull = CGF.createBasicBlock("adjust.null");
82     AdjustNotNull = CGF.createBasicBlock("adjust.notnull");
83     AdjustEnd = CGF.createBasicBlock("adjust.end");
84 
85     llvm::Value *IsNull = CGF.Builder.CreateIsNull(ReturnValue);
86     CGF.Builder.CreateCondBr(IsNull, AdjustNull, AdjustNotNull);
87     CGF.EmitBlock(AdjustNotNull);
88   }
89 
90   auto ClassDecl = ResultType->getPointeeType()->getAsCXXRecordDecl();
91   auto ClassAlign = CGF.CGM.getClassPointerAlignment(ClassDecl);
92   ReturnValue = CGF.CGM.getCXXABI().performReturnAdjustment(CGF,
93                                             Address(ReturnValue, ClassAlign),
94                                             Thunk.Return);
95 
96   if (NullCheckValue) {
97     CGF.Builder.CreateBr(AdjustEnd);
98     CGF.EmitBlock(AdjustNull);
99     CGF.Builder.CreateBr(AdjustEnd);
100     CGF.EmitBlock(AdjustEnd);
101 
102     llvm::PHINode *PHI = CGF.Builder.CreatePHI(ReturnValue->getType(), 2);
103     PHI->addIncoming(ReturnValue, AdjustNotNull);
104     PHI->addIncoming(llvm::Constant::getNullValue(ReturnValue->getType()),
105                      AdjustNull);
106     ReturnValue = PHI;
107   }
108 
109   return RValue::get(ReturnValue);
110 }
111 
112 /// This function clones a function's DISubprogram node and enters it into
113 /// a value map with the intent that the map can be utilized by the cloner
114 /// to short-circuit Metadata node mapping.
115 /// Furthermore, the function resolves any DILocalVariable nodes referenced
116 /// by dbg.value intrinsics so they can be properly mapped during cloning.
117 static void resolveTopLevelMetadata(llvm::Function *Fn,
118                                     llvm::ValueToValueMapTy &VMap) {
119   // Clone the DISubprogram node and put it into the Value map.
120   auto *DIS = Fn->getSubprogram();
121   if (!DIS)
122     return;
123   auto *NewDIS = DIS->replaceWithDistinct(DIS->clone());
124   VMap.MD()[DIS].reset(NewDIS);
125 
126   // Find all llvm.dbg.declare intrinsics and resolve the DILocalVariable nodes
127   // they are referencing.
128   for (auto &BB : Fn->getBasicBlockList()) {
129     for (auto &I : BB) {
130       if (auto *DII = dyn_cast<llvm::DbgVariableIntrinsic>(&I)) {
131         auto *DILocal = DII->getVariable();
132         if (!DILocal->isResolved())
133           DILocal->resolve();
134       }
135     }
136   }
137 }
138 
139 // This function does roughly the same thing as GenerateThunk, but in a
140 // very different way, so that va_start and va_end work correctly.
141 // FIXME: This function assumes "this" is the first non-sret LLVM argument of
142 //        a function, and that there is an alloca built in the entry block
143 //        for all accesses to "this".
144 // FIXME: This function assumes there is only one "ret" statement per function.
145 // FIXME: Cloning isn't correct in the presence of indirect goto!
146 // FIXME: This implementation of thunks bloats codesize by duplicating the
147 //        function definition.  There are alternatives:
148 //        1. Add some sort of stub support to LLVM for cases where we can
149 //           do a this adjustment, then a sibcall.
150 //        2. We could transform the definition to take a va_list instead of an
151 //           actual variable argument list, then have the thunks (including a
152 //           no-op thunk for the regular definition) call va_start/va_end.
153 //           There's a bit of per-call overhead for this solution, but it's
154 //           better for codesize if the definition is long.
155 llvm::Function *
156 CodeGenFunction::GenerateVarArgsThunk(llvm::Function *Fn,
157                                       const CGFunctionInfo &FnInfo,
158                                       GlobalDecl GD, const ThunkInfo &Thunk) {
159   const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
160   const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
161   QualType ResultType = FPT->getReturnType();
162 
163   // Get the original function
164   assert(FnInfo.isVariadic());
165   llvm::Type *Ty = CGM.getTypes().GetFunctionType(FnInfo);
166   llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
167   llvm::Function *BaseFn = cast<llvm::Function>(Callee);
168 
169   // Cloning can't work if we don't have a definition. The Microsoft ABI may
170   // require thunks when a definition is not available. Emit an error in these
171   // cases.
172   if (!MD->isDefined()) {
173     CGM.ErrorUnsupported(MD, "return-adjusting thunk with variadic arguments");
174     return Fn;
175   }
176   assert(!BaseFn->isDeclaration() && "cannot clone undefined variadic method");
177 
178   // Clone to thunk.
179   llvm::ValueToValueMapTy VMap;
180 
181   // We are cloning a function while some Metadata nodes are still unresolved.
182   // Ensure that the value mapper does not encounter any of them.
183   resolveTopLevelMetadata(BaseFn, VMap);
184   llvm::Function *NewFn = llvm::CloneFunction(BaseFn, VMap);
185   Fn->replaceAllUsesWith(NewFn);
186   NewFn->takeName(Fn);
187   Fn->eraseFromParent();
188   Fn = NewFn;
189 
190   // "Initialize" CGF (minimally).
191   CurFn = Fn;
192 
193   // Get the "this" value
194   llvm::Function::arg_iterator AI = Fn->arg_begin();
195   if (CGM.ReturnTypeUsesSRet(FnInfo))
196     ++AI;
197 
198   // Find the first store of "this", which will be to the alloca associated
199   // with "this".
200   Address ThisPtr(&*AI, CGM.getClassPointerAlignment(MD->getParent()));
201   llvm::BasicBlock *EntryBB = &Fn->front();
202   llvm::BasicBlock::iterator ThisStore =
203       std::find_if(EntryBB->begin(), EntryBB->end(), [&](llvm::Instruction &I) {
204         return isa<llvm::StoreInst>(I) &&
205                I.getOperand(0) == ThisPtr.getPointer();
206       });
207   assert(ThisStore != EntryBB->end() &&
208          "Store of this should be in entry block?");
209   // Adjust "this", if necessary.
210   Builder.SetInsertPoint(&*ThisStore);
211   llvm::Value *AdjustedThisPtr =
212       CGM.getCXXABI().performThisAdjustment(*this, ThisPtr, Thunk.This);
213   AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr,
214                                           ThisStore->getOperand(0)->getType());
215   ThisStore->setOperand(0, AdjustedThisPtr);
216 
217   if (!Thunk.Return.isEmpty()) {
218     // Fix up the returned value, if necessary.
219     for (llvm::BasicBlock &BB : *Fn) {
220       llvm::Instruction *T = BB.getTerminator();
221       if (isa<llvm::ReturnInst>(T)) {
222         RValue RV = RValue::get(T->getOperand(0));
223         T->eraseFromParent();
224         Builder.SetInsertPoint(&BB);
225         RV = PerformReturnAdjustment(*this, ResultType, RV, Thunk);
226         Builder.CreateRet(RV.getScalarVal());
227         break;
228       }
229     }
230   }
231 
232   return Fn;
233 }
234 
235 void CodeGenFunction::StartThunk(llvm::Function *Fn, GlobalDecl GD,
236                                  const CGFunctionInfo &FnInfo,
237                                  bool IsUnprototyped) {
238   assert(!CurGD.getDecl() && "CurGD was already set!");
239   CurGD = GD;
240   CurFuncIsThunk = true;
241 
242   // Build FunctionArgs.
243   const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
244   QualType ThisType = MD->getThisType();
245   QualType ResultType;
246   if (IsUnprototyped)
247     ResultType = CGM.getContext().VoidTy;
248   else if (CGM.getCXXABI().HasThisReturn(GD))
249     ResultType = ThisType;
250   else if (CGM.getCXXABI().hasMostDerivedReturn(GD))
251     ResultType = CGM.getContext().VoidPtrTy;
252   else
253     ResultType = MD->getType()->castAs<FunctionProtoType>()->getReturnType();
254   FunctionArgList FunctionArgs;
255 
256   // Create the implicit 'this' parameter declaration.
257   CGM.getCXXABI().buildThisParam(*this, FunctionArgs);
258 
259   // Add the rest of the parameters, if we have a prototype to work with.
260   if (!IsUnprototyped) {
261     FunctionArgs.append(MD->param_begin(), MD->param_end());
262 
263     if (isa<CXXDestructorDecl>(MD))
264       CGM.getCXXABI().addImplicitStructorParams(*this, ResultType,
265                                                 FunctionArgs);
266   }
267 
268   // Start defining the function.
269   auto NL = ApplyDebugLocation::CreateEmpty(*this);
270   StartFunction(GlobalDecl(), ResultType, Fn, FnInfo, FunctionArgs,
271                 MD->getLocation());
272   // Create a scope with an artificial location for the body of this function.
273   auto AL = ApplyDebugLocation::CreateArtificial(*this);
274 
275   // Since we didn't pass a GlobalDecl to StartFunction, do this ourselves.
276   CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
277   CXXThisValue = CXXABIThisValue;
278   CurCodeDecl = MD;
279   CurFuncDecl = MD;
280 }
281 
282 void CodeGenFunction::FinishThunk() {
283   // Clear these to restore the invariants expected by
284   // StartFunction/FinishFunction.
285   CurCodeDecl = nullptr;
286   CurFuncDecl = nullptr;
287 
288   FinishFunction();
289 }
290 
291 void CodeGenFunction::EmitCallAndReturnForThunk(llvm::FunctionCallee Callee,
292                                                 const ThunkInfo *Thunk,
293                                                 bool IsUnprototyped) {
294   assert(isa<CXXMethodDecl>(CurGD.getDecl()) &&
295          "Please use a new CGF for this thunk");
296   const CXXMethodDecl *MD = cast<CXXMethodDecl>(CurGD.getDecl());
297 
298   // Adjust the 'this' pointer if necessary
299   llvm::Value *AdjustedThisPtr =
300     Thunk ? CGM.getCXXABI().performThisAdjustment(
301                           *this, LoadCXXThisAddress(), Thunk->This)
302           : LoadCXXThis();
303 
304   // If perfect forwarding is required a variadic method, a method using
305   // inalloca, or an unprototyped thunk, use musttail. Emit an error if this
306   // thunk requires a return adjustment, since that is impossible with musttail.
307   if (CurFnInfo->usesInAlloca() || CurFnInfo->isVariadic() || IsUnprototyped) {
308     if (Thunk && !Thunk->Return.isEmpty()) {
309       if (IsUnprototyped)
310         CGM.ErrorUnsupported(
311             MD, "return-adjusting thunk with incomplete parameter type");
312       else if (CurFnInfo->isVariadic())
313         llvm_unreachable("shouldn't try to emit musttail return-adjusting "
314                          "thunks for variadic functions");
315       else
316         CGM.ErrorUnsupported(
317             MD, "non-trivial argument copy for return-adjusting thunk");
318     }
319     EmitMustTailThunk(CurGD, AdjustedThisPtr, Callee);
320     return;
321   }
322 
323   // Start building CallArgs.
324   CallArgList CallArgs;
325   QualType ThisType = MD->getThisType();
326   CallArgs.add(RValue::get(AdjustedThisPtr), ThisType);
327 
328   if (isa<CXXDestructorDecl>(MD))
329     CGM.getCXXABI().adjustCallArgsForDestructorThunk(*this, CurGD, CallArgs);
330 
331 #ifndef NDEBUG
332   unsigned PrefixArgs = CallArgs.size() - 1;
333 #endif
334   // Add the rest of the arguments.
335   for (const ParmVarDecl *PD : MD->parameters())
336     EmitDelegateCallArg(CallArgs, PD, SourceLocation());
337 
338   const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
339 
340 #ifndef NDEBUG
341   const CGFunctionInfo &CallFnInfo = CGM.getTypes().arrangeCXXMethodCall(
342       CallArgs, FPT, RequiredArgs::forPrototypePlus(FPT, 1), PrefixArgs);
343   assert(CallFnInfo.getRegParm() == CurFnInfo->getRegParm() &&
344          CallFnInfo.isNoReturn() == CurFnInfo->isNoReturn() &&
345          CallFnInfo.getCallingConvention() == CurFnInfo->getCallingConvention());
346   assert(isa<CXXDestructorDecl>(MD) || // ignore dtor return types
347          similar(CallFnInfo.getReturnInfo(), CallFnInfo.getReturnType(),
348                  CurFnInfo->getReturnInfo(), CurFnInfo->getReturnType()));
349   assert(CallFnInfo.arg_size() == CurFnInfo->arg_size());
350   for (unsigned i = 0, e = CurFnInfo->arg_size(); i != e; ++i)
351     assert(similar(CallFnInfo.arg_begin()[i].info,
352                    CallFnInfo.arg_begin()[i].type,
353                    CurFnInfo->arg_begin()[i].info,
354                    CurFnInfo->arg_begin()[i].type));
355 #endif
356 
357   // Determine whether we have a return value slot to use.
358   QualType ResultType = CGM.getCXXABI().HasThisReturn(CurGD)
359                             ? ThisType
360                             : CGM.getCXXABI().hasMostDerivedReturn(CurGD)
361                                   ? CGM.getContext().VoidPtrTy
362                                   : FPT->getReturnType();
363   ReturnValueSlot Slot;
364   if (!ResultType->isVoidType() &&
365       CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect)
366     Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified());
367 
368   // Now emit our call.
369   llvm::CallBase *CallOrInvoke;
370   RValue RV = EmitCall(*CurFnInfo, CGCallee::forDirect(Callee, CurGD), Slot,
371                        CallArgs, &CallOrInvoke);
372 
373   // Consider return adjustment if we have ThunkInfo.
374   if (Thunk && !Thunk->Return.isEmpty())
375     RV = PerformReturnAdjustment(*this, ResultType, RV, *Thunk);
376   else if (llvm::CallInst* Call = dyn_cast<llvm::CallInst>(CallOrInvoke))
377     Call->setTailCallKind(llvm::CallInst::TCK_Tail);
378 
379   // Emit return.
380   if (!ResultType->isVoidType() && Slot.isNull())
381     CGM.getCXXABI().EmitReturnFromThunk(*this, RV, ResultType);
382 
383   // Disable the final ARC autorelease.
384   AutoreleaseResult = false;
385 
386   FinishThunk();
387 }
388 
389 void CodeGenFunction::EmitMustTailThunk(GlobalDecl GD,
390                                         llvm::Value *AdjustedThisPtr,
391                                         llvm::FunctionCallee Callee) {
392   // Emitting a musttail call thunk doesn't use any of the CGCall.cpp machinery
393   // to translate AST arguments into LLVM IR arguments.  For thunks, we know
394   // that the caller prototype more or less matches the callee prototype with
395   // the exception of 'this'.
396   SmallVector<llvm::Value *, 8> Args;
397   for (llvm::Argument &A : CurFn->args())
398     Args.push_back(&A);
399 
400   // Set the adjusted 'this' pointer.
401   const ABIArgInfo &ThisAI = CurFnInfo->arg_begin()->info;
402   if (ThisAI.isDirect()) {
403     const ABIArgInfo &RetAI = CurFnInfo->getReturnInfo();
404     int ThisArgNo = RetAI.isIndirect() && !RetAI.isSRetAfterThis() ? 1 : 0;
405     llvm::Type *ThisType = Args[ThisArgNo]->getType();
406     if (ThisType != AdjustedThisPtr->getType())
407       AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr, ThisType);
408     Args[ThisArgNo] = AdjustedThisPtr;
409   } else {
410     assert(ThisAI.isInAlloca() && "this is passed directly or inalloca");
411     Address ThisAddr = GetAddrOfLocalVar(CXXABIThisDecl);
412     llvm::Type *ThisType = ThisAddr.getElementType();
413     if (ThisType != AdjustedThisPtr->getType())
414       AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr, ThisType);
415     Builder.CreateStore(AdjustedThisPtr, ThisAddr);
416   }
417 
418   // Emit the musttail call manually.  Even if the prologue pushed cleanups, we
419   // don't actually want to run them.
420   llvm::CallInst *Call = Builder.CreateCall(Callee, Args);
421   Call->setTailCallKind(llvm::CallInst::TCK_MustTail);
422 
423   // Apply the standard set of call attributes.
424   unsigned CallingConv;
425   llvm::AttributeList Attrs;
426   CGM.ConstructAttributeList(Callee.getCallee()->getName(), *CurFnInfo, GD,
427                              Attrs, CallingConv, /*AttrOnCallSite=*/true);
428   Call->setAttributes(Attrs);
429   Call->setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv));
430 
431   if (Call->getType()->isVoidTy())
432     Builder.CreateRetVoid();
433   else
434     Builder.CreateRet(Call);
435 
436   // Finish the function to maintain CodeGenFunction invariants.
437   // FIXME: Don't emit unreachable code.
438   EmitBlock(createBasicBlock());
439   FinishFunction();
440 }
441 
442 void CodeGenFunction::generateThunk(llvm::Function *Fn,
443                                     const CGFunctionInfo &FnInfo, GlobalDecl GD,
444                                     const ThunkInfo &Thunk,
445                                     bool IsUnprototyped) {
446   StartThunk(Fn, GD, FnInfo, IsUnprototyped);
447   // Create a scope with an artificial location for the body of this function.
448   auto AL = ApplyDebugLocation::CreateArtificial(*this);
449 
450   // Get our callee. Use a placeholder type if this method is unprototyped so
451   // that CodeGenModule doesn't try to set attributes.
452   llvm::Type *Ty;
453   if (IsUnprototyped)
454     Ty = llvm::StructType::get(getLLVMContext());
455   else
456     Ty = CGM.getTypes().GetFunctionType(FnInfo);
457 
458   llvm::Constant *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
459 
460   // Fix up the function type for an unprototyped musttail call.
461   if (IsUnprototyped)
462     Callee = llvm::ConstantExpr::getBitCast(Callee, Fn->getType());
463 
464   // Make the call and return the result.
465   EmitCallAndReturnForThunk(llvm::FunctionCallee(Fn->getFunctionType(), Callee),
466                             &Thunk, IsUnprototyped);
467 }
468 
469 static bool shouldEmitVTableThunk(CodeGenModule &CGM, const CXXMethodDecl *MD,
470                                   bool IsUnprototyped, bool ForVTable) {
471   // Always emit thunks in the MS C++ ABI. We cannot rely on other TUs to
472   // provide thunks for us.
473   if (CGM.getTarget().getCXXABI().isMicrosoft())
474     return true;
475 
476   // In the Itanium C++ ABI, vtable thunks are provided by TUs that provide
477   // definitions of the main method. Therefore, emitting thunks with the vtable
478   // is purely an optimization. Emit the thunk if optimizations are enabled and
479   // all of the parameter types are complete.
480   if (ForVTable)
481     return CGM.getCodeGenOpts().OptimizationLevel && !IsUnprototyped;
482 
483   // Always emit thunks along with the method definition.
484   return true;
485 }
486 
487 llvm::Constant *CodeGenVTables::maybeEmitThunk(GlobalDecl GD,
488                                                const ThunkInfo &TI,
489                                                bool ForVTable) {
490   const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
491 
492   // First, get a declaration. Compute the mangled name. Don't worry about
493   // getting the function prototype right, since we may only need this
494   // declaration to fill in a vtable slot.
495   SmallString<256> Name;
496   MangleContext &MCtx = CGM.getCXXABI().getMangleContext();
497   llvm::raw_svector_ostream Out(Name);
498   if (const CXXDestructorDecl *DD = dyn_cast<CXXDestructorDecl>(MD))
499     MCtx.mangleCXXDtorThunk(DD, GD.getDtorType(), TI.This, Out);
500   else
501     MCtx.mangleThunk(MD, TI, Out);
502   llvm::Type *ThunkVTableTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
503   llvm::Constant *Thunk = CGM.GetAddrOfThunk(Name, ThunkVTableTy, GD);
504 
505   // If we don't need to emit a definition, return this declaration as is.
506   bool IsUnprototyped = !CGM.getTypes().isFuncTypeConvertible(
507       MD->getType()->castAs<FunctionType>());
508   if (!shouldEmitVTableThunk(CGM, MD, IsUnprototyped, ForVTable))
509     return Thunk;
510 
511   // Arrange a function prototype appropriate for a function definition. In some
512   // cases in the MS ABI, we may need to build an unprototyped musttail thunk.
513   const CGFunctionInfo &FnInfo =
514       IsUnprototyped ? CGM.getTypes().arrangeUnprototypedMustTailThunk(MD)
515                      : CGM.getTypes().arrangeGlobalDeclaration(GD);
516   llvm::FunctionType *ThunkFnTy = CGM.getTypes().GetFunctionType(FnInfo);
517 
518   // If the type of the underlying GlobalValue is wrong, we'll have to replace
519   // it. It should be a declaration.
520   llvm::Function *ThunkFn = cast<llvm::Function>(Thunk->stripPointerCasts());
521   if (ThunkFn->getFunctionType() != ThunkFnTy) {
522     llvm::GlobalValue *OldThunkFn = ThunkFn;
523 
524     assert(OldThunkFn->isDeclaration() && "Shouldn't replace non-declaration");
525 
526     // Remove the name from the old thunk function and get a new thunk.
527     OldThunkFn->setName(StringRef());
528     ThunkFn = llvm::Function::Create(ThunkFnTy, llvm::Function::ExternalLinkage,
529                                      Name.str(), &CGM.getModule());
530     CGM.SetLLVMFunctionAttributes(MD, FnInfo, ThunkFn);
531 
532     // If needed, replace the old thunk with a bitcast.
533     if (!OldThunkFn->use_empty()) {
534       llvm::Constant *NewPtrForOldDecl =
535           llvm::ConstantExpr::getBitCast(ThunkFn, OldThunkFn->getType());
536       OldThunkFn->replaceAllUsesWith(NewPtrForOldDecl);
537     }
538 
539     // Remove the old thunk.
540     OldThunkFn->eraseFromParent();
541   }
542 
543   bool ABIHasKeyFunctions = CGM.getTarget().getCXXABI().hasKeyFunctions();
544   bool UseAvailableExternallyLinkage = ForVTable && ABIHasKeyFunctions;
545 
546   if (!ThunkFn->isDeclaration()) {
547     if (!ABIHasKeyFunctions || UseAvailableExternallyLinkage) {
548       // There is already a thunk emitted for this function, do nothing.
549       return ThunkFn;
550     }
551 
552     setThunkProperties(CGM, TI, ThunkFn, ForVTable, GD);
553     return ThunkFn;
554   }
555 
556   // If this will be unprototyped, add the "thunk" attribute so that LLVM knows
557   // that the return type is meaningless. These thunks can be used to call
558   // functions with differing return types, and the caller is required to cast
559   // the prototype appropriately to extract the correct value.
560   if (IsUnprototyped)
561     ThunkFn->addFnAttr("thunk");
562 
563   CGM.SetLLVMFunctionAttributesForDefinition(GD.getDecl(), ThunkFn);
564 
565   // Thunks for variadic methods are special because in general variadic
566   // arguments cannot be perferctly forwarded. In the general case, clang
567   // implements such thunks by cloning the original function body. However, for
568   // thunks with no return adjustment on targets that support musttail, we can
569   // use musttail to perfectly forward the variadic arguments.
570   bool ShouldCloneVarArgs = false;
571   if (!IsUnprototyped && ThunkFn->isVarArg()) {
572     ShouldCloneVarArgs = true;
573     if (TI.Return.isEmpty()) {
574       switch (CGM.getTriple().getArch()) {
575       case llvm::Triple::x86_64:
576       case llvm::Triple::x86:
577       case llvm::Triple::aarch64:
578         ShouldCloneVarArgs = false;
579         break;
580       default:
581         break;
582       }
583     }
584   }
585 
586   if (ShouldCloneVarArgs) {
587     if (UseAvailableExternallyLinkage)
588       return ThunkFn;
589     ThunkFn =
590         CodeGenFunction(CGM).GenerateVarArgsThunk(ThunkFn, FnInfo, GD, TI);
591   } else {
592     // Normal thunk body generation.
593     CodeGenFunction(CGM).generateThunk(ThunkFn, FnInfo, GD, TI, IsUnprototyped);
594   }
595 
596   setThunkProperties(CGM, TI, ThunkFn, ForVTable, GD);
597   return ThunkFn;
598 }
599 
600 void CodeGenVTables::EmitThunks(GlobalDecl GD) {
601   const CXXMethodDecl *MD =
602     cast<CXXMethodDecl>(GD.getDecl())->getCanonicalDecl();
603 
604   // We don't need to generate thunks for the base destructor.
605   if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base)
606     return;
607 
608   const VTableContextBase::ThunkInfoVectorTy *ThunkInfoVector =
609       VTContext->getThunkInfo(GD);
610 
611   if (!ThunkInfoVector)
612     return;
613 
614   for (const ThunkInfo& Thunk : *ThunkInfoVector)
615     maybeEmitThunk(GD, Thunk, /*ForVTable=*/false);
616 }
617 
618 void CodeGenVTables::addVTableComponent(
619     ConstantArrayBuilder &builder, const VTableLayout &layout,
620     unsigned idx, llvm::Constant *rtti, unsigned &nextVTableThunkIndex) {
621   auto &component = layout.vtable_components()[idx];
622 
623   auto addOffsetConstant = [&](CharUnits offset) {
624     builder.add(llvm::ConstantExpr::getIntToPtr(
625         llvm::ConstantInt::get(CGM.PtrDiffTy, offset.getQuantity()),
626         CGM.Int8PtrTy));
627   };
628 
629   switch (component.getKind()) {
630   case VTableComponent::CK_VCallOffset:
631     return addOffsetConstant(component.getVCallOffset());
632 
633   case VTableComponent::CK_VBaseOffset:
634     return addOffsetConstant(component.getVBaseOffset());
635 
636   case VTableComponent::CK_OffsetToTop:
637     return addOffsetConstant(component.getOffsetToTop());
638 
639   case VTableComponent::CK_RTTI:
640     return builder.add(llvm::ConstantExpr::getBitCast(rtti, CGM.Int8PtrTy));
641 
642   case VTableComponent::CK_FunctionPointer:
643   case VTableComponent::CK_CompleteDtorPointer:
644   case VTableComponent::CK_DeletingDtorPointer: {
645     GlobalDecl GD;
646 
647     // Get the right global decl.
648     switch (component.getKind()) {
649     default:
650       llvm_unreachable("Unexpected vtable component kind");
651     case VTableComponent::CK_FunctionPointer:
652       GD = component.getFunctionDecl();
653       break;
654     case VTableComponent::CK_CompleteDtorPointer:
655       GD = GlobalDecl(component.getDestructorDecl(), Dtor_Complete);
656       break;
657     case VTableComponent::CK_DeletingDtorPointer:
658       GD = GlobalDecl(component.getDestructorDecl(), Dtor_Deleting);
659       break;
660     }
661 
662     if (CGM.getLangOpts().CUDA) {
663       // Emit NULL for methods we can't codegen on this
664       // side. Otherwise we'd end up with vtable with unresolved
665       // references.
666       const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
667       // OK on device side: functions w/ __device__ attribute
668       // OK on host side: anything except __device__-only functions.
669       bool CanEmitMethod =
670           CGM.getLangOpts().CUDAIsDevice
671               ? MD->hasAttr<CUDADeviceAttr>()
672               : (MD->hasAttr<CUDAHostAttr>() || !MD->hasAttr<CUDADeviceAttr>());
673       if (!CanEmitMethod)
674         return builder.addNullPointer(CGM.Int8PtrTy);
675       // Method is acceptable, continue processing as usual.
676     }
677 
678     auto getSpecialVirtualFn = [&](StringRef name) {
679       llvm::FunctionType *fnTy =
680           llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
681       llvm::Constant *fn = cast<llvm::Constant>(
682           CGM.CreateRuntimeFunction(fnTy, name).getCallee());
683       if (auto f = dyn_cast<llvm::Function>(fn))
684         f->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
685       return llvm::ConstantExpr::getBitCast(fn, CGM.Int8PtrTy);
686     };
687 
688     llvm::Constant *fnPtr;
689 
690     // Pure virtual member functions.
691     if (cast<CXXMethodDecl>(GD.getDecl())->isPure()) {
692       if (!PureVirtualFn)
693         PureVirtualFn =
694           getSpecialVirtualFn(CGM.getCXXABI().GetPureVirtualCallName());
695       fnPtr = PureVirtualFn;
696 
697     // Deleted virtual member functions.
698     } else if (cast<CXXMethodDecl>(GD.getDecl())->isDeleted()) {
699       if (!DeletedVirtualFn)
700         DeletedVirtualFn =
701           getSpecialVirtualFn(CGM.getCXXABI().GetDeletedVirtualCallName());
702       fnPtr = DeletedVirtualFn;
703 
704     // Thunks.
705     } else if (nextVTableThunkIndex < layout.vtable_thunks().size() &&
706                layout.vtable_thunks()[nextVTableThunkIndex].first == idx) {
707       auto &thunkInfo = layout.vtable_thunks()[nextVTableThunkIndex].second;
708 
709       nextVTableThunkIndex++;
710       fnPtr = maybeEmitThunk(GD, thunkInfo, /*ForVTable=*/true);
711 
712     // Otherwise we can use the method definition directly.
713     } else {
714       llvm::Type *fnTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
715       fnPtr = CGM.GetAddrOfFunction(GD, fnTy, /*ForVTable=*/true);
716     }
717 
718     fnPtr = llvm::ConstantExpr::getBitCast(fnPtr, CGM.Int8PtrTy);
719     builder.add(fnPtr);
720     return;
721   }
722 
723   case VTableComponent::CK_UnusedFunctionPointer:
724     return builder.addNullPointer(CGM.Int8PtrTy);
725   }
726 
727   llvm_unreachable("Unexpected vtable component kind");
728 }
729 
730 llvm::Type *CodeGenVTables::getVTableType(const VTableLayout &layout) {
731   SmallVector<llvm::Type *, 4> tys;
732   for (unsigned i = 0, e = layout.getNumVTables(); i != e; ++i) {
733     tys.push_back(llvm::ArrayType::get(CGM.Int8PtrTy, layout.getVTableSize(i)));
734   }
735 
736   return llvm::StructType::get(CGM.getLLVMContext(), tys);
737 }
738 
739 void CodeGenVTables::createVTableInitializer(ConstantStructBuilder &builder,
740                                              const VTableLayout &layout,
741                                              llvm::Constant *rtti) {
742   unsigned nextVTableThunkIndex = 0;
743   for (unsigned i = 0, e = layout.getNumVTables(); i != e; ++i) {
744     auto vtableElem = builder.beginArray(CGM.Int8PtrTy);
745     size_t thisIndex = layout.getVTableOffset(i);
746     size_t nextIndex = thisIndex + layout.getVTableSize(i);
747     for (unsigned i = thisIndex; i != nextIndex; ++i) {
748       addVTableComponent(vtableElem, layout, i, rtti, nextVTableThunkIndex);
749     }
750     vtableElem.finishAndAddTo(builder);
751   }
752 }
753 
754 llvm::GlobalVariable *
755 CodeGenVTables::GenerateConstructionVTable(const CXXRecordDecl *RD,
756                                       const BaseSubobject &Base,
757                                       bool BaseIsVirtual,
758                                    llvm::GlobalVariable::LinkageTypes Linkage,
759                                       VTableAddressPointsMapTy& AddressPoints) {
760   if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
761     DI->completeClassData(Base.getBase());
762 
763   std::unique_ptr<VTableLayout> VTLayout(
764       getItaniumVTableContext().createConstructionVTableLayout(
765           Base.getBase(), Base.getBaseOffset(), BaseIsVirtual, RD));
766 
767   // Add the address points.
768   AddressPoints = VTLayout->getAddressPoints();
769 
770   // Get the mangled construction vtable name.
771   SmallString<256> OutName;
772   llvm::raw_svector_ostream Out(OutName);
773   cast<ItaniumMangleContext>(CGM.getCXXABI().getMangleContext())
774       .mangleCXXCtorVTable(RD, Base.getBaseOffset().getQuantity(),
775                            Base.getBase(), Out);
776   StringRef Name = OutName.str();
777 
778   llvm::Type *VTType = getVTableType(*VTLayout);
779 
780   // Construction vtable symbols are not part of the Itanium ABI, so we cannot
781   // guarantee that they actually will be available externally. Instead, when
782   // emitting an available_externally VTT, we provide references to an internal
783   // linkage construction vtable. The ABI only requires complete-object vtables
784   // to be the same for all instances of a type, not construction vtables.
785   if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
786     Linkage = llvm::GlobalVariable::InternalLinkage;
787 
788   unsigned Align = CGM.getDataLayout().getABITypeAlignment(VTType);
789 
790   // Create the variable that will hold the construction vtable.
791   llvm::GlobalVariable *VTable =
792       CGM.CreateOrReplaceCXXRuntimeVariable(Name, VTType, Linkage, Align);
793 
794   // V-tables are always unnamed_addr.
795   VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
796 
797   llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(
798       CGM.getContext().getTagDeclType(Base.getBase()));
799 
800   // Create and set the initializer.
801   ConstantInitBuilder builder(CGM);
802   auto components = builder.beginStruct();
803   createVTableInitializer(components, *VTLayout, RTTI);
804   components.finishAndSetAsInitializer(VTable);
805 
806   // Set properties only after the initializer has been set to ensure that the
807   // GV is treated as definition and not declaration.
808   assert(!VTable->isDeclaration() && "Shouldn't set properties on declaration");
809   CGM.setGVProperties(VTable, RD);
810 
811   CGM.EmitVTableTypeMetadata(RD, VTable, *VTLayout.get());
812 
813   return VTable;
814 }
815 
816 static bool shouldEmitAvailableExternallyVTable(const CodeGenModule &CGM,
817                                                 const CXXRecordDecl *RD) {
818   return CGM.getCodeGenOpts().OptimizationLevel > 0 &&
819          CGM.getCXXABI().canSpeculativelyEmitVTable(RD);
820 }
821 
822 /// Compute the required linkage of the vtable for the given class.
823 ///
824 /// Note that we only call this at the end of the translation unit.
825 llvm::GlobalVariable::LinkageTypes
826 CodeGenModule::getVTableLinkage(const CXXRecordDecl *RD) {
827   if (!RD->isExternallyVisible())
828     return llvm::GlobalVariable::InternalLinkage;
829 
830   // We're at the end of the translation unit, so the current key
831   // function is fully correct.
832   const CXXMethodDecl *keyFunction = Context.getCurrentKeyFunction(RD);
833   if (keyFunction && !RD->hasAttr<DLLImportAttr>()) {
834     // If this class has a key function, use that to determine the
835     // linkage of the vtable.
836     const FunctionDecl *def = nullptr;
837     if (keyFunction->hasBody(def))
838       keyFunction = cast<CXXMethodDecl>(def);
839 
840     switch (keyFunction->getTemplateSpecializationKind()) {
841       case TSK_Undeclared:
842       case TSK_ExplicitSpecialization:
843         assert((def || CodeGenOpts.OptimizationLevel > 0 ||
844                 CodeGenOpts.getDebugInfo() != codegenoptions::NoDebugInfo) &&
845                "Shouldn't query vtable linkage without key function, "
846                "optimizations, or debug info");
847         if (!def && CodeGenOpts.OptimizationLevel > 0)
848           return llvm::GlobalVariable::AvailableExternallyLinkage;
849 
850         if (keyFunction->isInlined())
851           return !Context.getLangOpts().AppleKext ?
852                    llvm::GlobalVariable::LinkOnceODRLinkage :
853                    llvm::Function::InternalLinkage;
854 
855         return llvm::GlobalVariable::ExternalLinkage;
856 
857       case TSK_ImplicitInstantiation:
858         return !Context.getLangOpts().AppleKext ?
859                  llvm::GlobalVariable::LinkOnceODRLinkage :
860                  llvm::Function::InternalLinkage;
861 
862       case TSK_ExplicitInstantiationDefinition:
863         return !Context.getLangOpts().AppleKext ?
864                  llvm::GlobalVariable::WeakODRLinkage :
865                  llvm::Function::InternalLinkage;
866 
867       case TSK_ExplicitInstantiationDeclaration:
868         llvm_unreachable("Should not have been asked to emit this");
869     }
870   }
871 
872   // -fapple-kext mode does not support weak linkage, so we must use
873   // internal linkage.
874   if (Context.getLangOpts().AppleKext)
875     return llvm::Function::InternalLinkage;
876 
877   llvm::GlobalVariable::LinkageTypes DiscardableODRLinkage =
878       llvm::GlobalValue::LinkOnceODRLinkage;
879   llvm::GlobalVariable::LinkageTypes NonDiscardableODRLinkage =
880       llvm::GlobalValue::WeakODRLinkage;
881   if (RD->hasAttr<DLLExportAttr>()) {
882     // Cannot discard exported vtables.
883     DiscardableODRLinkage = NonDiscardableODRLinkage;
884   } else if (RD->hasAttr<DLLImportAttr>()) {
885     // Imported vtables are available externally.
886     DiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
887     NonDiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
888   }
889 
890   switch (RD->getTemplateSpecializationKind()) {
891     case TSK_Undeclared:
892     case TSK_ExplicitSpecialization:
893     case TSK_ImplicitInstantiation:
894       return DiscardableODRLinkage;
895 
896     case TSK_ExplicitInstantiationDeclaration:
897       // Explicit instantiations in MSVC do not provide vtables, so we must emit
898       // our own.
899       if (getTarget().getCXXABI().isMicrosoft())
900         return DiscardableODRLinkage;
901       return shouldEmitAvailableExternallyVTable(*this, RD)
902                  ? llvm::GlobalVariable::AvailableExternallyLinkage
903                  : llvm::GlobalVariable::ExternalLinkage;
904 
905     case TSK_ExplicitInstantiationDefinition:
906       return NonDiscardableODRLinkage;
907   }
908 
909   llvm_unreachable("Invalid TemplateSpecializationKind!");
910 }
911 
912 /// This is a callback from Sema to tell us that a particular vtable is
913 /// required to be emitted in this translation unit.
914 ///
915 /// This is only called for vtables that _must_ be emitted (mainly due to key
916 /// functions).  For weak vtables, CodeGen tracks when they are needed and
917 /// emits them as-needed.
918 void CodeGenModule::EmitVTable(CXXRecordDecl *theClass) {
919   VTables.GenerateClassData(theClass);
920 }
921 
922 void
923 CodeGenVTables::GenerateClassData(const CXXRecordDecl *RD) {
924   if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
925     DI->completeClassData(RD);
926 
927   if (RD->getNumVBases())
928     CGM.getCXXABI().emitVirtualInheritanceTables(RD);
929 
930   CGM.getCXXABI().emitVTableDefinitions(*this, RD);
931 }
932 
933 /// At this point in the translation unit, does it appear that can we
934 /// rely on the vtable being defined elsewhere in the program?
935 ///
936 /// The response is really only definitive when called at the end of
937 /// the translation unit.
938 ///
939 /// The only semantic restriction here is that the object file should
940 /// not contain a vtable definition when that vtable is defined
941 /// strongly elsewhere.  Otherwise, we'd just like to avoid emitting
942 /// vtables when unnecessary.
943 bool CodeGenVTables::isVTableExternal(const CXXRecordDecl *RD) {
944   assert(RD->isDynamicClass() && "Non-dynamic classes have no VTable.");
945 
946   // We always synthesize vtables if they are needed in the MS ABI. MSVC doesn't
947   // emit them even if there is an explicit template instantiation.
948   if (CGM.getTarget().getCXXABI().isMicrosoft())
949     return false;
950 
951   // If we have an explicit instantiation declaration (and not a
952   // definition), the vtable is defined elsewhere.
953   TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
954   if (TSK == TSK_ExplicitInstantiationDeclaration)
955     return true;
956 
957   // Otherwise, if the class is an instantiated template, the
958   // vtable must be defined here.
959   if (TSK == TSK_ImplicitInstantiation ||
960       TSK == TSK_ExplicitInstantiationDefinition)
961     return false;
962 
963   // Otherwise, if the class doesn't have a key function (possibly
964   // anymore), the vtable must be defined here.
965   const CXXMethodDecl *keyFunction = CGM.getContext().getCurrentKeyFunction(RD);
966   if (!keyFunction)
967     return false;
968 
969   // Otherwise, if we don't have a definition of the key function, the
970   // vtable must be defined somewhere else.
971   return !keyFunction->hasBody();
972 }
973 
974 /// Given that we're currently at the end of the translation unit, and
975 /// we've emitted a reference to the vtable for this class, should
976 /// we define that vtable?
977 static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule &CGM,
978                                                    const CXXRecordDecl *RD) {
979   // If vtable is internal then it has to be done.
980   if (!CGM.getVTables().isVTableExternal(RD))
981     return true;
982 
983   // If it's external then maybe we will need it as available_externally.
984   return shouldEmitAvailableExternallyVTable(CGM, RD);
985 }
986 
987 /// Given that at some point we emitted a reference to one or more
988 /// vtables, and that we are now at the end of the translation unit,
989 /// decide whether we should emit them.
990 void CodeGenModule::EmitDeferredVTables() {
991 #ifndef NDEBUG
992   // Remember the size of DeferredVTables, because we're going to assume
993   // that this entire operation doesn't modify it.
994   size_t savedSize = DeferredVTables.size();
995 #endif
996 
997   for (const CXXRecordDecl *RD : DeferredVTables)
998     if (shouldEmitVTableAtEndOfTranslationUnit(*this, RD))
999       VTables.GenerateClassData(RD);
1000     else if (shouldOpportunisticallyEmitVTables())
1001       OpportunisticVTables.push_back(RD);
1002 
1003   assert(savedSize == DeferredVTables.size() &&
1004          "deferred extra vtables during vtable emission?");
1005   DeferredVTables.clear();
1006 }
1007 
1008 bool CodeGenModule::HasHiddenLTOVisibility(const CXXRecordDecl *RD) {
1009   LinkageInfo LV = RD->getLinkageAndVisibility();
1010   if (!isExternallyVisible(LV.getLinkage()))
1011     return true;
1012 
1013   if (RD->hasAttr<LTOVisibilityPublicAttr>() || RD->hasAttr<UuidAttr>())
1014     return false;
1015 
1016   if (getTriple().isOSBinFormatCOFF()) {
1017     if (RD->hasAttr<DLLExportAttr>() || RD->hasAttr<DLLImportAttr>())
1018       return false;
1019   } else {
1020     if (LV.getVisibility() != HiddenVisibility)
1021       return false;
1022   }
1023 
1024   if (getCodeGenOpts().LTOVisibilityPublicStd) {
1025     const DeclContext *DC = RD;
1026     while (1) {
1027       auto *D = cast<Decl>(DC);
1028       DC = DC->getParent();
1029       if (isa<TranslationUnitDecl>(DC->getRedeclContext())) {
1030         if (auto *ND = dyn_cast<NamespaceDecl>(D))
1031           if (const IdentifierInfo *II = ND->getIdentifier())
1032             if (II->isStr("std") || II->isStr("stdext"))
1033               return false;
1034         break;
1035       }
1036     }
1037   }
1038 
1039   return true;
1040 }
1041 
1042 llvm::GlobalObject::VCallVisibility
1043 CodeGenModule::GetVCallVisibilityLevel(const CXXRecordDecl *RD) {
1044   LinkageInfo LV = RD->getLinkageAndVisibility();
1045   llvm::GlobalObject::VCallVisibility TypeVis;
1046   if (!isExternallyVisible(LV.getLinkage()))
1047     TypeVis = llvm::GlobalObject::VCallVisibilityTranslationUnit;
1048   else if (HasHiddenLTOVisibility(RD))
1049     TypeVis = llvm::GlobalObject::VCallVisibilityLinkageUnit;
1050   else
1051     TypeVis = llvm::GlobalObject::VCallVisibilityPublic;
1052 
1053   for (auto B : RD->bases())
1054     if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1055       TypeVis = std::min(TypeVis,
1056                     GetVCallVisibilityLevel(B.getType()->getAsCXXRecordDecl()));
1057 
1058   for (auto B : RD->vbases())
1059     if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1060       TypeVis = std::min(TypeVis,
1061                     GetVCallVisibilityLevel(B.getType()->getAsCXXRecordDecl()));
1062 
1063   return TypeVis;
1064 }
1065 
1066 void CodeGenModule::EmitVTableTypeMetadata(const CXXRecordDecl *RD,
1067                                            llvm::GlobalVariable *VTable,
1068                                            const VTableLayout &VTLayout) {
1069   if (!getCodeGenOpts().LTOUnit)
1070     return;
1071 
1072   CharUnits PointerWidth =
1073       Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1074 
1075   typedef std::pair<const CXXRecordDecl *, unsigned> AddressPoint;
1076   std::vector<AddressPoint> AddressPoints;
1077   for (auto &&AP : VTLayout.getAddressPoints())
1078     AddressPoints.push_back(std::make_pair(
1079         AP.first.getBase(), VTLayout.getVTableOffset(AP.second.VTableIndex) +
1080                                 AP.second.AddressPointIndex));
1081 
1082   // Sort the address points for determinism.
1083   llvm::sort(AddressPoints, [this](const AddressPoint &AP1,
1084                                    const AddressPoint &AP2) {
1085     if (&AP1 == &AP2)
1086       return false;
1087 
1088     std::string S1;
1089     llvm::raw_string_ostream O1(S1);
1090     getCXXABI().getMangleContext().mangleTypeName(
1091         QualType(AP1.first->getTypeForDecl(), 0), O1);
1092     O1.flush();
1093 
1094     std::string S2;
1095     llvm::raw_string_ostream O2(S2);
1096     getCXXABI().getMangleContext().mangleTypeName(
1097         QualType(AP2.first->getTypeForDecl(), 0), O2);
1098     O2.flush();
1099 
1100     if (S1 < S2)
1101       return true;
1102     if (S1 != S2)
1103       return false;
1104 
1105     return AP1.second < AP2.second;
1106   });
1107 
1108   ArrayRef<VTableComponent> Comps = VTLayout.vtable_components();
1109   for (auto AP : AddressPoints) {
1110     // Create type metadata for the address point.
1111     AddVTableTypeMetadata(VTable, PointerWidth * AP.second, AP.first);
1112 
1113     // The class associated with each address point could also potentially be
1114     // used for indirect calls via a member function pointer, so we need to
1115     // annotate the address of each function pointer with the appropriate member
1116     // function pointer type.
1117     for (unsigned I = 0; I != Comps.size(); ++I) {
1118       if (Comps[I].getKind() != VTableComponent::CK_FunctionPointer)
1119         continue;
1120       llvm::Metadata *MD = CreateMetadataIdentifierForVirtualMemPtrType(
1121           Context.getMemberPointerType(
1122               Comps[I].getFunctionDecl()->getType(),
1123               Context.getRecordType(AP.first).getTypePtr()));
1124       VTable->addTypeMetadata((PointerWidth * I).getQuantity(), MD);
1125     }
1126   }
1127 
1128   if (getCodeGenOpts().VirtualFunctionElimination) {
1129     llvm::GlobalObject::VCallVisibility TypeVis = GetVCallVisibilityLevel(RD);
1130     if (TypeVis != llvm::GlobalObject::VCallVisibilityPublic)
1131       VTable->addVCallVisibilityMetadata(TypeVis);
1132   }
1133 }
1134