1//===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This contains code dealing with C++ code generation of virtual tables.
10//
11//===----------------------------------------------------------------------===//
12
13#include "CGCXXABI.h"
14#include "CodeGenFunction.h"
15#include "CodeGenModule.h"
16#include "clang/AST/Attr.h"
17#include "clang/AST/CXXInheritance.h"
18#include "clang/AST/RecordLayout.h"
19#include "clang/Basic/CodeGenOptions.h"
20#include "clang/CodeGen/CGFunctionInfo.h"
21#include "clang/CodeGen/ConstantInitBuilder.h"
22#include "llvm/IR/IntrinsicInst.h"
23#include "llvm/Support/Format.h"
24#include "llvm/Transforms/Utils/Cloning.h"
25#include <algorithm>
26#include <cstdio>
27#include <utility>
28
29using namespace clang;
30using namespace CodeGen;
31
32CodeGenVTables::CodeGenVTables(CodeGenModule &CGM)
33 : CGM(CGM), VTContext(CGM.getContext().getVTableContext()) {}
34
35llvm::Constant *CodeGenModule::GetAddrOfThunk(StringRef Name, llvm::Type *FnTy,
36 GlobalDecl GD) {
37 return GetOrCreateLLVMFunction(MangledName: Name, Ty: FnTy, D: GD, /*ForVTable=*/true,
38 /*DontDefer=*/true, /*IsThunk=*/true);
39}
40
41static void setThunkProperties(CodeGenModule &CGM, const ThunkInfo &Thunk,
42 llvm::Function *ThunkFn, bool ForVTable,
43 GlobalDecl GD) {
44 CGM.setFunctionLinkage(GD, F: ThunkFn);
45 CGM.getCXXABI().setThunkLinkage(Thunk: ThunkFn, ForVTable, GD,
46 ReturnAdjustment: !Thunk.Return.isEmpty());
47
48 // Set the right visibility.
49 CGM.setGVProperties(GV: ThunkFn, GD);
50
51 if (!CGM.getCXXABI().exportThunk()) {
52 ThunkFn->setDLLStorageClass(llvm::GlobalValue::DefaultStorageClass);
53 ThunkFn->setDSOLocal(true);
54 }
55
56 if (CGM.supportsCOMDAT() && ThunkFn->isWeakForLinker())
57 ThunkFn->setComdat(CGM.getModule().getOrInsertComdat(Name: ThunkFn->getName()));
58}
59
60#ifndef NDEBUG
61static bool similar(const ABIArgInfo &infoL, CanQualType typeL,
62 const ABIArgInfo &infoR, CanQualType typeR) {
63 return (infoL.getKind() == infoR.getKind() &&
64 (typeL == typeR ||
65 (isa<PointerType>(Val: typeL) && isa<PointerType>(Val: typeR)) ||
66 (isa<ReferenceType>(Val: typeL) && isa<ReferenceType>(Val: typeR))));
67}
68#endif
69
70static RValue PerformReturnAdjustment(CodeGenFunction &CGF,
71 QualType ResultType, RValue RV,
72 const ThunkInfo &Thunk) {
73 // Emit the return adjustment.
74 bool NullCheckValue = !ResultType->isReferenceType();
75
76 llvm::BasicBlock *AdjustNull = nullptr;
77 llvm::BasicBlock *AdjustNotNull = nullptr;
78 llvm::BasicBlock *AdjustEnd = nullptr;
79
80 llvm::Value *ReturnValue = RV.getScalarVal();
81
82 if (NullCheckValue) {
83 AdjustNull = CGF.createBasicBlock(name: "adjust.null");
84 AdjustNotNull = CGF.createBasicBlock(name: "adjust.notnull");
85 AdjustEnd = CGF.createBasicBlock(name: "adjust.end");
86
87 llvm::Value *IsNull = CGF.Builder.CreateIsNull(Arg: ReturnValue);
88 CGF.Builder.CreateCondBr(Cond: IsNull, True: AdjustNull, False: AdjustNotNull);
89 CGF.EmitBlock(BB: AdjustNotNull);
90 }
91
92 auto ClassDecl = ResultType->getPointeeType()->getAsCXXRecordDecl();
93 auto ClassAlign = CGF.CGM.getClassPointerAlignment(CD: ClassDecl);
94 ReturnValue = CGF.CGM.getCXXABI().performReturnAdjustment(
95 CGF,
96 Ret: Address(ReturnValue, CGF.ConvertTypeForMem(T: ResultType->getPointeeType()),
97 ClassAlign),
98 RA: Thunk.Return);
99
100 if (NullCheckValue) {
101 CGF.Builder.CreateBr(Dest: AdjustEnd);
102 CGF.EmitBlock(BB: AdjustNull);
103 CGF.Builder.CreateBr(Dest: AdjustEnd);
104 CGF.EmitBlock(BB: AdjustEnd);
105
106 llvm::PHINode *PHI = CGF.Builder.CreatePHI(Ty: ReturnValue->getType(), NumReservedValues: 2);
107 PHI->addIncoming(V: ReturnValue, BB: AdjustNotNull);
108 PHI->addIncoming(V: llvm::Constant::getNullValue(Ty: ReturnValue->getType()),
109 BB: AdjustNull);
110 ReturnValue = PHI;
111 }
112
113 return RValue::get(V: ReturnValue);
114}
115
116/// This function clones a function's DISubprogram node and enters it into
117/// a value map with the intent that the map can be utilized by the cloner
118/// to short-circuit Metadata node mapping.
119/// Furthermore, the function resolves any DILocalVariable nodes referenced
120/// by dbg.value intrinsics so they can be properly mapped during cloning.
121static void resolveTopLevelMetadata(llvm::Function *Fn,
122 llvm::ValueToValueMapTy &VMap) {
123 // Clone the DISubprogram node and put it into the Value map.
124 auto *DIS = Fn->getSubprogram();
125 if (!DIS)
126 return;
127 auto *NewDIS = DIS->replaceWithDistinct(N: DIS->clone());
128 VMap.MD()[DIS].reset(MD: NewDIS);
129
130 // Find all llvm.dbg.declare intrinsics and resolve the DILocalVariable nodes
131 // they are referencing.
132 for (auto &BB : *Fn) {
133 for (auto &I : BB) {
134 if (auto *DII = dyn_cast<llvm::DbgVariableIntrinsic>(Val: &I)) {
135 auto *DILocal = DII->getVariable();
136 if (!DILocal->isResolved())
137 DILocal->resolve();
138 }
139 }
140 }
141}
142
143// This function does roughly the same thing as GenerateThunk, but in a
144// very different way, so that va_start and va_end work correctly.
145// FIXME: This function assumes "this" is the first non-sret LLVM argument of
146// a function, and that there is an alloca built in the entry block
147// for all accesses to "this".
148// FIXME: This function assumes there is only one "ret" statement per function.
149// FIXME: Cloning isn't correct in the presence of indirect goto!
150// FIXME: This implementation of thunks bloats codesize by duplicating the
151// function definition. There are alternatives:
152// 1. Add some sort of stub support to LLVM for cases where we can
153// do a this adjustment, then a sibcall.
154// 2. We could transform the definition to take a va_list instead of an
155// actual variable argument list, then have the thunks (including a
156// no-op thunk for the regular definition) call va_start/va_end.
157// There's a bit of per-call overhead for this solution, but it's
158// better for codesize if the definition is long.
159llvm::Function *
160CodeGenFunction::GenerateVarArgsThunk(llvm::Function *Fn,
161 const CGFunctionInfo &FnInfo,
162 GlobalDecl GD, const ThunkInfo &Thunk) {
163 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
164 const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
165 QualType ResultType = FPT->getReturnType();
166
167 // Get the original function
168 assert(FnInfo.isVariadic());
169 llvm::Type *Ty = CGM.getTypes().GetFunctionType(Info: FnInfo);
170 llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
171 llvm::Function *BaseFn = cast<llvm::Function>(Val: Callee);
172
173 // Cloning can't work if we don't have a definition. The Microsoft ABI may
174 // require thunks when a definition is not available. Emit an error in these
175 // cases.
176 if (!MD->isDefined()) {
177 CGM.ErrorUnsupported(MD, "return-adjusting thunk with variadic arguments");
178 return Fn;
179 }
180 assert(!BaseFn->isDeclaration() && "cannot clone undefined variadic method");
181
182 // Clone to thunk.
183 llvm::ValueToValueMapTy VMap;
184
185 // We are cloning a function while some Metadata nodes are still unresolved.
186 // Ensure that the value mapper does not encounter any of them.
187 resolveTopLevelMetadata(Fn: BaseFn, VMap);
188 llvm::Function *NewFn = llvm::CloneFunction(F: BaseFn, VMap);
189 Fn->replaceAllUsesWith(V: NewFn);
190 NewFn->takeName(V: Fn);
191 Fn->eraseFromParent();
192 Fn = NewFn;
193
194 // "Initialize" CGF (minimally).
195 CurFn = Fn;
196
197 // Get the "this" value
198 llvm::Function::arg_iterator AI = Fn->arg_begin();
199 if (CGM.ReturnTypeUsesSRet(FI: FnInfo))
200 ++AI;
201
202 // Find the first store of "this", which will be to the alloca associated
203 // with "this".
204 Address ThisPtr = makeNaturalAddressForPointer(
205 Ptr: &*AI, T: MD->getFunctionObjectParameterType(),
206 Alignment: CGM.getClassPointerAlignment(CD: MD->getParent()));
207 llvm::BasicBlock *EntryBB = &Fn->front();
208 llvm::BasicBlock::iterator ThisStore =
209 llvm::find_if(Range&: *EntryBB, P: [&](llvm::Instruction &I) {
210 return isa<llvm::StoreInst>(Val: I) && I.getOperand(i: 0) == &*AI;
211 });
212 assert(ThisStore != EntryBB->end() &&
213 "Store of this should be in entry block?");
214 // Adjust "this", if necessary.
215 Builder.SetInsertPoint(&*ThisStore);
216 llvm::Value *AdjustedThisPtr =
217 CGM.getCXXABI().performThisAdjustment(CGF&: *this, This: ThisPtr, TA: Thunk.This);
218 AdjustedThisPtr = Builder.CreateBitCast(V: AdjustedThisPtr,
219 DestTy: ThisStore->getOperand(i: 0)->getType());
220 ThisStore->setOperand(i: 0, Val: AdjustedThisPtr);
221
222 if (!Thunk.Return.isEmpty()) {
223 // Fix up the returned value, if necessary.
224 for (llvm::BasicBlock &BB : *Fn) {
225 llvm::Instruction *T = BB.getTerminator();
226 if (isa<llvm::ReturnInst>(Val: T)) {
227 RValue RV = RValue::get(V: T->getOperand(i: 0));
228 T->eraseFromParent();
229 Builder.SetInsertPoint(&BB);
230 RV = PerformReturnAdjustment(CGF&: *this, ResultType, RV, Thunk);
231 Builder.CreateRet(V: RV.getScalarVal());
232 break;
233 }
234 }
235 }
236
237 return Fn;
238}
239
240void CodeGenFunction::StartThunk(llvm::Function *Fn, GlobalDecl GD,
241 const CGFunctionInfo &FnInfo,
242 bool IsUnprototyped) {
243 assert(!CurGD.getDecl() && "CurGD was already set!");
244 CurGD = GD;
245 CurFuncIsThunk = true;
246
247 // Build FunctionArgs.
248 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
249 QualType ThisType = MD->getThisType();
250 QualType ResultType;
251 if (IsUnprototyped)
252 ResultType = CGM.getContext().VoidTy;
253 else if (CGM.getCXXABI().HasThisReturn(GD))
254 ResultType = ThisType;
255 else if (CGM.getCXXABI().hasMostDerivedReturn(GD))
256 ResultType = CGM.getContext().VoidPtrTy;
257 else
258 ResultType = MD->getType()->castAs<FunctionProtoType>()->getReturnType();
259 FunctionArgList FunctionArgs;
260
261 // Create the implicit 'this' parameter declaration.
262 CGM.getCXXABI().buildThisParam(CGF&: *this, Params&: FunctionArgs);
263
264 // Add the rest of the parameters, if we have a prototype to work with.
265 if (!IsUnprototyped) {
266 FunctionArgs.append(MD->param_begin(), MD->param_end());
267
268 if (isa<CXXDestructorDecl>(Val: MD))
269 CGM.getCXXABI().addImplicitStructorParams(CGF&: *this, ResTy&: ResultType,
270 Params&: FunctionArgs);
271 }
272
273 // Start defining the function.
274 auto NL = ApplyDebugLocation::CreateEmpty(CGF&: *this);
275 StartFunction(GD: GlobalDecl(), RetTy: ResultType, Fn, FnInfo, Args: FunctionArgs,
276 Loc: MD->getLocation());
277 // Create a scope with an artificial location for the body of this function.
278 auto AL = ApplyDebugLocation::CreateArtificial(CGF&: *this);
279
280 // Since we didn't pass a GlobalDecl to StartFunction, do this ourselves.
281 CGM.getCXXABI().EmitInstanceFunctionProlog(CGF&: *this);
282 CXXThisValue = CXXABIThisValue;
283 CurCodeDecl = MD;
284 CurFuncDecl = MD;
285}
286
287void CodeGenFunction::FinishThunk() {
288 // Clear these to restore the invariants expected by
289 // StartFunction/FinishFunction.
290 CurCodeDecl = nullptr;
291 CurFuncDecl = nullptr;
292
293 FinishFunction();
294}
295
296void CodeGenFunction::EmitCallAndReturnForThunk(llvm::FunctionCallee Callee,
297 const ThunkInfo *Thunk,
298 bool IsUnprototyped) {
299 assert(isa<CXXMethodDecl>(CurGD.getDecl()) &&
300 "Please use a new CGF for this thunk");
301 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: CurGD.getDecl());
302
303 // Adjust the 'this' pointer if necessary
304 llvm::Value *AdjustedThisPtr =
305 Thunk ? CGM.getCXXABI().performThisAdjustment(
306 CGF&: *this, This: LoadCXXThisAddress(), TA: Thunk->This)
307 : LoadCXXThis();
308
309 // If perfect forwarding is required a variadic method, a method using
310 // inalloca, or an unprototyped thunk, use musttail. Emit an error if this
311 // thunk requires a return adjustment, since that is impossible with musttail.
312 if (CurFnInfo->usesInAlloca() || CurFnInfo->isVariadic() || IsUnprototyped) {
313 if (Thunk && !Thunk->Return.isEmpty()) {
314 if (IsUnprototyped)
315 CGM.ErrorUnsupported(
316 MD, "return-adjusting thunk with incomplete parameter type");
317 else if (CurFnInfo->isVariadic())
318 llvm_unreachable("shouldn't try to emit musttail return-adjusting "
319 "thunks for variadic functions");
320 else
321 CGM.ErrorUnsupported(
322 MD, "non-trivial argument copy for return-adjusting thunk");
323 }
324 EmitMustTailThunk(GD: CurGD, AdjustedThisPtr, Callee);
325 return;
326 }
327
328 // Start building CallArgs.
329 CallArgList CallArgs;
330 QualType ThisType = MD->getThisType();
331 CallArgs.add(rvalue: RValue::get(V: AdjustedThisPtr), type: ThisType);
332
333 if (isa<CXXDestructorDecl>(Val: MD))
334 CGM.getCXXABI().adjustCallArgsForDestructorThunk(CGF&: *this, GD: CurGD, CallArgs);
335
336#ifndef NDEBUG
337 unsigned PrefixArgs = CallArgs.size() - 1;
338#endif
339 // Add the rest of the arguments.
340 for (const ParmVarDecl *PD : MD->parameters())
341 EmitDelegateCallArg(CallArgs, PD, SourceLocation());
342
343 const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
344
345#ifndef NDEBUG
346 const CGFunctionInfo &CallFnInfo = CGM.getTypes().arrangeCXXMethodCall(
347 args: CallArgs, type: FPT, required: RequiredArgs::forPrototypePlus(prototype: FPT, additional: 1), numPrefixArgs: PrefixArgs);
348 assert(CallFnInfo.getRegParm() == CurFnInfo->getRegParm() &&
349 CallFnInfo.isNoReturn() == CurFnInfo->isNoReturn() &&
350 CallFnInfo.getCallingConvention() == CurFnInfo->getCallingConvention());
351 assert(isa<CXXDestructorDecl>(MD) || // ignore dtor return types
352 similar(CallFnInfo.getReturnInfo(), CallFnInfo.getReturnType(),
353 CurFnInfo->getReturnInfo(), CurFnInfo->getReturnType()));
354 assert(CallFnInfo.arg_size() == CurFnInfo->arg_size());
355 for (unsigned i = 0, e = CurFnInfo->arg_size(); i != e; ++i)
356 assert(similar(CallFnInfo.arg_begin()[i].info,
357 CallFnInfo.arg_begin()[i].type,
358 CurFnInfo->arg_begin()[i].info,
359 CurFnInfo->arg_begin()[i].type));
360#endif
361
362 // Determine whether we have a return value slot to use.
363 QualType ResultType = CGM.getCXXABI().HasThisReturn(GD: CurGD)
364 ? ThisType
365 : CGM.getCXXABI().hasMostDerivedReturn(GD: CurGD)
366 ? CGM.getContext().VoidPtrTy
367 : FPT->getReturnType();
368 ReturnValueSlot Slot;
369 if (!ResultType->isVoidType() &&
370 (CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect ||
371 hasAggregateEvaluationKind(T: ResultType)))
372 Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified(),
373 /*IsUnused=*/false, /*IsExternallyDestructed=*/true);
374
375 // Now emit our call.
376 llvm::CallBase *CallOrInvoke;
377 RValue RV = EmitCall(CallInfo: *CurFnInfo, Callee: CGCallee::forDirect(functionPtr: Callee, abstractInfo: CurGD), ReturnValue: Slot,
378 Args: CallArgs, callOrInvoke: &CallOrInvoke);
379
380 // Consider return adjustment if we have ThunkInfo.
381 if (Thunk && !Thunk->Return.isEmpty())
382 RV = PerformReturnAdjustment(CGF&: *this, ResultType, RV, Thunk: *Thunk);
383 else if (llvm::CallInst* Call = dyn_cast<llvm::CallInst>(Val: CallOrInvoke))
384 Call->setTailCallKind(llvm::CallInst::TCK_Tail);
385
386 // Emit return.
387 if (!ResultType->isVoidType() && Slot.isNull())
388 CGM.getCXXABI().EmitReturnFromThunk(CGF&: *this, RV, ResultType);
389
390 // Disable the final ARC autorelease.
391 AutoreleaseResult = false;
392
393 FinishThunk();
394}
395
396void CodeGenFunction::EmitMustTailThunk(GlobalDecl GD,
397 llvm::Value *AdjustedThisPtr,
398 llvm::FunctionCallee Callee) {
399 // Emitting a musttail call thunk doesn't use any of the CGCall.cpp machinery
400 // to translate AST arguments into LLVM IR arguments. For thunks, we know
401 // that the caller prototype more or less matches the callee prototype with
402 // the exception of 'this'.
403 SmallVector<llvm::Value *, 8> Args(llvm::make_pointer_range(Range: CurFn->args()));
404
405 // Set the adjusted 'this' pointer.
406 const ABIArgInfo &ThisAI = CurFnInfo->arg_begin()->info;
407 if (ThisAI.isDirect()) {
408 const ABIArgInfo &RetAI = CurFnInfo->getReturnInfo();
409 int ThisArgNo = RetAI.isIndirect() && !RetAI.isSRetAfterThis() ? 1 : 0;
410 llvm::Type *ThisType = Args[ThisArgNo]->getType();
411 if (ThisType != AdjustedThisPtr->getType())
412 AdjustedThisPtr = Builder.CreateBitCast(V: AdjustedThisPtr, DestTy: ThisType);
413 Args[ThisArgNo] = AdjustedThisPtr;
414 } else {
415 assert(ThisAI.isInAlloca() && "this is passed directly or inalloca");
416 Address ThisAddr = GetAddrOfLocalVar(CXXABIThisDecl);
417 llvm::Type *ThisType = ThisAddr.getElementType();
418 if (ThisType != AdjustedThisPtr->getType())
419 AdjustedThisPtr = Builder.CreateBitCast(V: AdjustedThisPtr, DestTy: ThisType);
420 Builder.CreateStore(Val: AdjustedThisPtr, Addr: ThisAddr);
421 }
422
423 // Emit the musttail call manually. Even if the prologue pushed cleanups, we
424 // don't actually want to run them.
425 llvm::CallInst *Call = Builder.CreateCall(Callee, Args);
426 Call->setTailCallKind(llvm::CallInst::TCK_MustTail);
427
428 // Apply the standard set of call attributes.
429 unsigned CallingConv;
430 llvm::AttributeList Attrs;
431 CGM.ConstructAttributeList(Name: Callee.getCallee()->getName(), Info: *CurFnInfo, CalleeInfo: GD,
432 Attrs, CallingConv, /*AttrOnCallSite=*/true,
433 /*IsThunk=*/false);
434 Call->setAttributes(Attrs);
435 Call->setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv));
436
437 if (Call->getType()->isVoidTy())
438 Builder.CreateRetVoid();
439 else
440 Builder.CreateRet(V: Call);
441
442 // Finish the function to maintain CodeGenFunction invariants.
443 // FIXME: Don't emit unreachable code.
444 EmitBlock(BB: createBasicBlock());
445
446 FinishThunk();
447}
448
449void CodeGenFunction::generateThunk(llvm::Function *Fn,
450 const CGFunctionInfo &FnInfo, GlobalDecl GD,
451 const ThunkInfo &Thunk,
452 bool IsUnprototyped) {
453 StartThunk(Fn, GD, FnInfo, IsUnprototyped);
454 // Create a scope with an artificial location for the body of this function.
455 auto AL = ApplyDebugLocation::CreateArtificial(CGF&: *this);
456
457 // Get our callee. Use a placeholder type if this method is unprototyped so
458 // that CodeGenModule doesn't try to set attributes.
459 llvm::Type *Ty;
460 if (IsUnprototyped)
461 Ty = llvm::StructType::get(Context&: getLLVMContext());
462 else
463 Ty = CGM.getTypes().GetFunctionType(Info: FnInfo);
464
465 llvm::Constant *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
466
467 // Make the call and return the result.
468 EmitCallAndReturnForThunk(Callee: llvm::FunctionCallee(Fn->getFunctionType(), Callee),
469 Thunk: &Thunk, IsUnprototyped);
470}
471
472static bool shouldEmitVTableThunk(CodeGenModule &CGM, const CXXMethodDecl *MD,
473 bool IsUnprototyped, bool ForVTable) {
474 // Always emit thunks in the MS C++ ABI. We cannot rely on other TUs to
475 // provide thunks for us.
476 if (CGM.getTarget().getCXXABI().isMicrosoft())
477 return true;
478
479 // In the Itanium C++ ABI, vtable thunks are provided by TUs that provide
480 // definitions of the main method. Therefore, emitting thunks with the vtable
481 // is purely an optimization. Emit the thunk if optimizations are enabled and
482 // all of the parameter types are complete.
483 if (ForVTable)
484 return CGM.getCodeGenOpts().OptimizationLevel && !IsUnprototyped;
485
486 // Always emit thunks along with the method definition.
487 return true;
488}
489
490llvm::Constant *CodeGenVTables::maybeEmitThunk(GlobalDecl GD,
491 const ThunkInfo &TI,
492 bool ForVTable) {
493 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
494
495 // First, get a declaration. Compute the mangled name. Don't worry about
496 // getting the function prototype right, since we may only need this
497 // declaration to fill in a vtable slot.
498 SmallString<256> Name;
499 MangleContext &MCtx = CGM.getCXXABI().getMangleContext();
500 llvm::raw_svector_ostream Out(Name);
501 if (const CXXDestructorDecl *DD = dyn_cast<CXXDestructorDecl>(Val: MD))
502 MCtx.mangleCXXDtorThunk(DD, Type: GD.getDtorType(), ThisAdjustment: TI.This, Out);
503 else
504 MCtx.mangleThunk(MD, Thunk: TI, Out);
505 llvm::Type *ThunkVTableTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
506 llvm::Constant *Thunk = CGM.GetAddrOfThunk(Name, FnTy: ThunkVTableTy, GD);
507
508 // If we don't need to emit a definition, return this declaration as is.
509 bool IsUnprototyped = !CGM.getTypes().isFuncTypeConvertible(
510 FT: MD->getType()->castAs<FunctionType>());
511 if (!shouldEmitVTableThunk(CGM, MD, IsUnprototyped, ForVTable))
512 return Thunk;
513
514 // Arrange a function prototype appropriate for a function definition. In some
515 // cases in the MS ABI, we may need to build an unprototyped musttail thunk.
516 const CGFunctionInfo &FnInfo =
517 IsUnprototyped ? CGM.getTypes().arrangeUnprototypedMustTailThunk(MD)
518 : CGM.getTypes().arrangeGlobalDeclaration(GD);
519 llvm::FunctionType *ThunkFnTy = CGM.getTypes().GetFunctionType(Info: FnInfo);
520
521 // If the type of the underlying GlobalValue is wrong, we'll have to replace
522 // it. It should be a declaration.
523 llvm::Function *ThunkFn = cast<llvm::Function>(Val: Thunk->stripPointerCasts());
524 if (ThunkFn->getFunctionType() != ThunkFnTy) {
525 llvm::GlobalValue *OldThunkFn = ThunkFn;
526
527 assert(OldThunkFn->isDeclaration() && "Shouldn't replace non-declaration");
528
529 // Remove the name from the old thunk function and get a new thunk.
530 OldThunkFn->setName(StringRef());
531 ThunkFn = llvm::Function::Create(Ty: ThunkFnTy, Linkage: llvm::Function::ExternalLinkage,
532 N: Name.str(), M: &CGM.getModule());
533 CGM.SetLLVMFunctionAttributes(MD, FnInfo, ThunkFn, /*IsThunk=*/false);
534
535 if (!OldThunkFn->use_empty()) {
536 OldThunkFn->replaceAllUsesWith(V: ThunkFn);
537 }
538
539 // Remove the old thunk.
540 OldThunkFn->eraseFromParent();
541 }
542
543 bool ABIHasKeyFunctions = CGM.getTarget().getCXXABI().hasKeyFunctions();
544 bool UseAvailableExternallyLinkage = ForVTable && ABIHasKeyFunctions;
545
546 if (!ThunkFn->isDeclaration()) {
547 if (!ABIHasKeyFunctions || UseAvailableExternallyLinkage) {
548 // There is already a thunk emitted for this function, do nothing.
549 return ThunkFn;
550 }
551
552 setThunkProperties(CGM, Thunk: TI, ThunkFn, ForVTable, GD);
553 return ThunkFn;
554 }
555
556 // If this will be unprototyped, add the "thunk" attribute so that LLVM knows
557 // that the return type is meaningless. These thunks can be used to call
558 // functions with differing return types, and the caller is required to cast
559 // the prototype appropriately to extract the correct value.
560 if (IsUnprototyped)
561 ThunkFn->addFnAttr(Kind: "thunk");
562
563 CGM.SetLLVMFunctionAttributesForDefinition(D: GD.getDecl(), F: ThunkFn);
564
565 // Thunks for variadic methods are special because in general variadic
566 // arguments cannot be perfectly forwarded. In the general case, clang
567 // implements such thunks by cloning the original function body. However, for
568 // thunks with no return adjustment on targets that support musttail, we can
569 // use musttail to perfectly forward the variadic arguments.
570 bool ShouldCloneVarArgs = false;
571 if (!IsUnprototyped && ThunkFn->isVarArg()) {
572 ShouldCloneVarArgs = true;
573 if (TI.Return.isEmpty()) {
574 switch (CGM.getTriple().getArch()) {
575 case llvm::Triple::x86_64:
576 case llvm::Triple::x86:
577 case llvm::Triple::aarch64:
578 ShouldCloneVarArgs = false;
579 break;
580 default:
581 break;
582 }
583 }
584 }
585
586 if (ShouldCloneVarArgs) {
587 if (UseAvailableExternallyLinkage)
588 return ThunkFn;
589 ThunkFn =
590 CodeGenFunction(CGM).GenerateVarArgsThunk(Fn: ThunkFn, FnInfo, GD, Thunk: TI);
591 } else {
592 // Normal thunk body generation.
593 CodeGenFunction(CGM).generateThunk(Fn: ThunkFn, FnInfo, GD, Thunk: TI, IsUnprototyped);
594 }
595
596 setThunkProperties(CGM, Thunk: TI, ThunkFn, ForVTable, GD);
597 return ThunkFn;
598}
599
600void CodeGenVTables::EmitThunks(GlobalDecl GD) {
601 const CXXMethodDecl *MD =
602 cast<CXXMethodDecl>(Val: GD.getDecl())->getCanonicalDecl();
603
604 // We don't need to generate thunks for the base destructor.
605 if (isa<CXXDestructorDecl>(Val: MD) && GD.getDtorType() == Dtor_Base)
606 return;
607
608 const VTableContextBase::ThunkInfoVectorTy *ThunkInfoVector =
609 VTContext->getThunkInfo(GD);
610
611 if (!ThunkInfoVector)
612 return;
613
614 for (const ThunkInfo& Thunk : *ThunkInfoVector)
615 maybeEmitThunk(GD, TI: Thunk, /*ForVTable=*/false);
616}
617
618void CodeGenVTables::addRelativeComponent(ConstantArrayBuilder &builder,
619 llvm::Constant *component,
620 unsigned vtableAddressPoint,
621 bool vtableHasLocalLinkage,
622 bool isCompleteDtor) const {
623 // No need to get the offset of a nullptr.
624 if (component->isNullValue())
625 return builder.add(value: llvm::ConstantInt::get(Ty: CGM.Int32Ty, V: 0));
626
627 auto *globalVal =
628 cast<llvm::GlobalValue>(Val: component->stripPointerCastsAndAliases());
629 llvm::Module &module = CGM.getModule();
630
631 // We don't want to copy the linkage of the vtable exactly because we still
632 // want the stub/proxy to be emitted for properly calculating the offset.
633 // Examples where there would be no symbol emitted are available_externally
634 // and private linkages.
635 //
636 // `internal` linkage results in STB_LOCAL Elf binding while still manifesting a
637 // local symbol.
638 //
639 // `linkonce_odr` linkage results in a STB_DEFAULT Elf binding but also allows for
640 // the rtti_proxy to be transparently replaced with a GOTPCREL reloc by a
641 // target that supports this replacement.
642 auto stubLinkage = vtableHasLocalLinkage
643 ? llvm::GlobalValue::InternalLinkage
644 : llvm::GlobalValue::LinkOnceODRLinkage;
645
646 llvm::Constant *target;
647 if (auto *func = dyn_cast<llvm::Function>(Val: globalVal)) {
648 target = llvm::DSOLocalEquivalent::get(GV: func);
649 } else {
650 llvm::SmallString<16> rttiProxyName(globalVal->getName());
651 rttiProxyName.append(RHS: ".rtti_proxy");
652
653 // The RTTI component may not always be emitted in the same linkage unit as
654 // the vtable. As a general case, we can make a dso_local proxy to the RTTI
655 // that points to the actual RTTI struct somewhere. This will result in a
656 // GOTPCREL relocation when taking the relative offset to the proxy.
657 llvm::GlobalVariable *proxy = module.getNamedGlobal(Name: rttiProxyName);
658 if (!proxy) {
659 proxy = new llvm::GlobalVariable(module, globalVal->getType(),
660 /*isConstant=*/true, stubLinkage,
661 globalVal, rttiProxyName);
662 proxy->setDSOLocal(true);
663 proxy->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
664 if (!proxy->hasLocalLinkage()) {
665 proxy->setVisibility(llvm::GlobalValue::HiddenVisibility);
666 proxy->setComdat(module.getOrInsertComdat(Name: rttiProxyName));
667 }
668 // Do not instrument the rtti proxies with hwasan to avoid a duplicate
669 // symbol error. Aliases generated by hwasan will retain the same namebut
670 // the addresses they are set to may have different tags from different
671 // compilation units. We don't run into this without hwasan because the
672 // proxies are in comdat groups, but those aren't propagated to the alias.
673 RemoveHwasanMetadata(GV: proxy);
674 }
675 target = proxy;
676 }
677
678 builder.addRelativeOffsetToPosition(type: CGM.Int32Ty, target,
679 /*position=*/vtableAddressPoint);
680}
681
682static bool UseRelativeLayout(const CodeGenModule &CGM) {
683 return CGM.getTarget().getCXXABI().isItaniumFamily() &&
684 CGM.getItaniumVTableContext().isRelativeLayout();
685}
686
687bool CodeGenVTables::useRelativeLayout() const {
688 return UseRelativeLayout(CGM);
689}
690
691llvm::Type *CodeGenModule::getVTableComponentType() const {
692 if (UseRelativeLayout(CGM: *this))
693 return Int32Ty;
694 return GlobalsInt8PtrTy;
695}
696
697llvm::Type *CodeGenVTables::getVTableComponentType() const {
698 return CGM.getVTableComponentType();
699}
700
701static void AddPointerLayoutOffset(const CodeGenModule &CGM,
702 ConstantArrayBuilder &builder,
703 CharUnits offset) {
704 builder.add(value: llvm::ConstantExpr::getIntToPtr(
705 C: llvm::ConstantInt::get(Ty: CGM.PtrDiffTy, V: offset.getQuantity()),
706 Ty: CGM.GlobalsInt8PtrTy));
707}
708
709static void AddRelativeLayoutOffset(const CodeGenModule &CGM,
710 ConstantArrayBuilder &builder,
711 CharUnits offset) {
712 builder.add(value: llvm::ConstantInt::get(Ty: CGM.Int32Ty, V: offset.getQuantity()));
713}
714
715void CodeGenVTables::addVTableComponent(ConstantArrayBuilder &builder,
716 const VTableLayout &layout,
717 unsigned componentIndex,
718 llvm::Constant *rtti,
719 unsigned &nextVTableThunkIndex,
720 unsigned vtableAddressPoint,
721 bool vtableHasLocalLinkage) {
722 auto &component = layout.vtable_components()[componentIndex];
723
724 auto addOffsetConstant =
725 useRelativeLayout() ? AddRelativeLayoutOffset : AddPointerLayoutOffset;
726
727 switch (component.getKind()) {
728 case VTableComponent::CK_VCallOffset:
729 return addOffsetConstant(CGM, builder, component.getVCallOffset());
730
731 case VTableComponent::CK_VBaseOffset:
732 return addOffsetConstant(CGM, builder, component.getVBaseOffset());
733
734 case VTableComponent::CK_OffsetToTop:
735 return addOffsetConstant(CGM, builder, component.getOffsetToTop());
736
737 case VTableComponent::CK_RTTI:
738 if (useRelativeLayout())
739 return addRelativeComponent(builder, component: rtti, vtableAddressPoint,
740 vtableHasLocalLinkage,
741 /*isCompleteDtor=*/false);
742 else
743 return builder.add(value: rtti);
744
745 case VTableComponent::CK_FunctionPointer:
746 case VTableComponent::CK_CompleteDtorPointer:
747 case VTableComponent::CK_DeletingDtorPointer: {
748 GlobalDecl GD = component.getGlobalDecl();
749
750 if (CGM.getLangOpts().CUDA) {
751 // Emit NULL for methods we can't codegen on this
752 // side. Otherwise we'd end up with vtable with unresolved
753 // references.
754 const CXXMethodDecl *MD = cast<CXXMethodDecl>(Val: GD.getDecl());
755 // OK on device side: functions w/ __device__ attribute
756 // OK on host side: anything except __device__-only functions.
757 bool CanEmitMethod =
758 CGM.getLangOpts().CUDAIsDevice
759 ? MD->hasAttr<CUDADeviceAttr>()
760 : (MD->hasAttr<CUDAHostAttr>() || !MD->hasAttr<CUDADeviceAttr>());
761 if (!CanEmitMethod)
762 return builder.add(
763 value: llvm::ConstantExpr::getNullValue(Ty: CGM.GlobalsInt8PtrTy));
764 // Method is acceptable, continue processing as usual.
765 }
766
767 auto getSpecialVirtualFn = [&](StringRef name) -> llvm::Constant * {
768 // FIXME(PR43094): When merging comdat groups, lld can select a local
769 // symbol as the signature symbol even though it cannot be accessed
770 // outside that symbol's TU. The relative vtables ABI would make
771 // __cxa_pure_virtual and __cxa_deleted_virtual local symbols, and
772 // depending on link order, the comdat groups could resolve to the one
773 // with the local symbol. As a temporary solution, fill these components
774 // with zero. We shouldn't be calling these in the first place anyway.
775 if (useRelativeLayout())
776 return llvm::ConstantPointerNull::get(T: CGM.GlobalsInt8PtrTy);
777
778 // For NVPTX devices in OpenMP emit special functon as null pointers,
779 // otherwise linking ends up with unresolved references.
780 if (CGM.getLangOpts().OpenMP && CGM.getLangOpts().OpenMPIsTargetDevice &&
781 CGM.getTriple().isNVPTX())
782 return llvm::ConstantPointerNull::get(T: CGM.GlobalsInt8PtrTy);
783 llvm::FunctionType *fnTy =
784 llvm::FunctionType::get(Result: CGM.VoidTy, /*isVarArg=*/false);
785 llvm::Constant *fn = cast<llvm::Constant>(
786 Val: CGM.CreateRuntimeFunction(Ty: fnTy, Name: name).getCallee());
787 if (auto f = dyn_cast<llvm::Function>(Val: fn))
788 f->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
789 return fn;
790 };
791
792 llvm::Constant *fnPtr;
793
794 // Pure virtual member functions.
795 if (cast<CXXMethodDecl>(Val: GD.getDecl())->isPureVirtual()) {
796 if (!PureVirtualFn)
797 PureVirtualFn =
798 getSpecialVirtualFn(CGM.getCXXABI().GetPureVirtualCallName());
799 fnPtr = PureVirtualFn;
800
801 // Deleted virtual member functions.
802 } else if (cast<CXXMethodDecl>(Val: GD.getDecl())->isDeleted()) {
803 if (!DeletedVirtualFn)
804 DeletedVirtualFn =
805 getSpecialVirtualFn(CGM.getCXXABI().GetDeletedVirtualCallName());
806 fnPtr = DeletedVirtualFn;
807
808 // Thunks.
809 } else if (nextVTableThunkIndex < layout.vtable_thunks().size() &&
810 layout.vtable_thunks()[nextVTableThunkIndex].first ==
811 componentIndex) {
812 auto &thunkInfo = layout.vtable_thunks()[nextVTableThunkIndex].second;
813
814 nextVTableThunkIndex++;
815 fnPtr = maybeEmitThunk(GD, TI: thunkInfo, /*ForVTable=*/true);
816
817 // Otherwise we can use the method definition directly.
818 } else {
819 llvm::Type *fnTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
820 fnPtr = CGM.GetAddrOfFunction(GD, Ty: fnTy, /*ForVTable=*/true);
821 }
822
823 if (useRelativeLayout()) {
824 return addRelativeComponent(
825 builder, component: fnPtr, vtableAddressPoint, vtableHasLocalLinkage,
826 isCompleteDtor: component.getKind() == VTableComponent::CK_CompleteDtorPointer);
827 } else {
828 // TODO: this icky and only exists due to functions being in the generic
829 // address space, rather than the global one, even though they are
830 // globals; fixing said issue might be intrusive, and will be done
831 // later.
832 unsigned FnAS = fnPtr->getType()->getPointerAddressSpace();
833 unsigned GVAS = CGM.GlobalsInt8PtrTy->getPointerAddressSpace();
834
835 if (FnAS != GVAS)
836 fnPtr =
837 llvm::ConstantExpr::getAddrSpaceCast(C: fnPtr, Ty: CGM.GlobalsInt8PtrTy);
838 return builder.add(value: fnPtr);
839 }
840 }
841
842 case VTableComponent::CK_UnusedFunctionPointer:
843 if (useRelativeLayout())
844 return builder.add(value: llvm::ConstantExpr::getNullValue(Ty: CGM.Int32Ty));
845 else
846 return builder.addNullPointer(ptrTy: CGM.GlobalsInt8PtrTy);
847 }
848
849 llvm_unreachable("Unexpected vtable component kind");
850}
851
852llvm::Type *CodeGenVTables::getVTableType(const VTableLayout &layout) {
853 SmallVector<llvm::Type *, 4> tys;
854 llvm::Type *componentType = getVTableComponentType();
855 for (unsigned i = 0, e = layout.getNumVTables(); i != e; ++i)
856 tys.push_back(Elt: llvm::ArrayType::get(ElementType: componentType, NumElements: layout.getVTableSize(i)));
857
858 return llvm::StructType::get(Context&: CGM.getLLVMContext(), Elements: tys);
859}
860
861void CodeGenVTables::createVTableInitializer(ConstantStructBuilder &builder,
862 const VTableLayout &layout,
863 llvm::Constant *rtti,
864 bool vtableHasLocalLinkage) {
865 llvm::Type *componentType = getVTableComponentType();
866
867 const auto &addressPoints = layout.getAddressPointIndices();
868 unsigned nextVTableThunkIndex = 0;
869 for (unsigned vtableIndex = 0, endIndex = layout.getNumVTables();
870 vtableIndex != endIndex; ++vtableIndex) {
871 auto vtableElem = builder.beginArray(eltTy: componentType);
872
873 size_t vtableStart = layout.getVTableOffset(i: vtableIndex);
874 size_t vtableEnd = vtableStart + layout.getVTableSize(i: vtableIndex);
875 for (size_t componentIndex = vtableStart; componentIndex < vtableEnd;
876 ++componentIndex) {
877 addVTableComponent(builder&: vtableElem, layout, componentIndex, rtti,
878 nextVTableThunkIndex, vtableAddressPoint: addressPoints[vtableIndex],
879 vtableHasLocalLinkage);
880 }
881 vtableElem.finishAndAddTo(parent&: builder);
882 }
883}
884
885llvm::GlobalVariable *CodeGenVTables::GenerateConstructionVTable(
886 const CXXRecordDecl *RD, const BaseSubobject &Base, bool BaseIsVirtual,
887 llvm::GlobalVariable::LinkageTypes Linkage,
888 VTableAddressPointsMapTy &AddressPoints) {
889 if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
890 DI->completeClassData(Base.getBase());
891
892 std::unique_ptr<VTableLayout> VTLayout(
893 getItaniumVTableContext().createConstructionVTableLayout(
894 MostDerivedClass: Base.getBase(), MostDerivedClassOffset: Base.getBaseOffset(), MostDerivedClassIsVirtual: BaseIsVirtual, LayoutClass: RD));
895
896 // Add the address points.
897 AddressPoints = VTLayout->getAddressPoints();
898
899 // Get the mangled construction vtable name.
900 SmallString<256> OutName;
901 llvm::raw_svector_ostream Out(OutName);
902 cast<ItaniumMangleContext>(Val&: CGM.getCXXABI().getMangleContext())
903 .mangleCXXCtorVTable(RD, Offset: Base.getBaseOffset().getQuantity(),
904 Type: Base.getBase(), Out);
905 SmallString<256> Name(OutName);
906
907 bool UsingRelativeLayout = getItaniumVTableContext().isRelativeLayout();
908 bool VTableAliasExists =
909 UsingRelativeLayout && CGM.getModule().getNamedAlias(Name);
910 if (VTableAliasExists) {
911 // We previously made the vtable hidden and changed its name.
912 Name.append(RHS: ".local");
913 }
914
915 llvm::Type *VTType = getVTableType(layout: *VTLayout);
916
917 // Construction vtable symbols are not part of the Itanium ABI, so we cannot
918 // guarantee that they actually will be available externally. Instead, when
919 // emitting an available_externally VTT, we provide references to an internal
920 // linkage construction vtable. The ABI only requires complete-object vtables
921 // to be the same for all instances of a type, not construction vtables.
922 if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
923 Linkage = llvm::GlobalVariable::InternalLinkage;
924
925 llvm::Align Align = CGM.getDataLayout().getABITypeAlign(Ty: VTType);
926
927 // Create the variable that will hold the construction vtable.
928 llvm::GlobalVariable *VTable =
929 CGM.CreateOrReplaceCXXRuntimeVariable(Name, Ty: VTType, Linkage, Alignment: Align);
930
931 // V-tables are always unnamed_addr.
932 VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
933
934 llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(
935 Ty: CGM.getContext().getTagDeclType(Base.getBase()));
936
937 // Create and set the initializer.
938 ConstantInitBuilder builder(CGM);
939 auto components = builder.beginStruct();
940 createVTableInitializer(builder&: components, layout: *VTLayout, rtti: RTTI,
941 vtableHasLocalLinkage: VTable->hasLocalLinkage());
942 components.finishAndSetAsInitializer(global: VTable);
943
944 // Set properties only after the initializer has been set to ensure that the
945 // GV is treated as definition and not declaration.
946 assert(!VTable->isDeclaration() && "Shouldn't set properties on declaration");
947 CGM.setGVProperties(VTable, RD);
948
949 CGM.EmitVTableTypeMetadata(RD, VTable, VTLayout: *VTLayout.get());
950
951 if (UsingRelativeLayout) {
952 RemoveHwasanMetadata(GV: VTable);
953 if (!VTable->isDSOLocal())
954 GenerateRelativeVTableAlias(VTable, AliasNameRef: OutName);
955 }
956
957 return VTable;
958}
959
960// Ensure this vtable is not instrumented by hwasan. That is, a global alias is
961// not generated for it. This is mainly used by the relative-vtables ABI where
962// vtables instead contain 32-bit offsets between the vtable and function
963// pointers. Hwasan is disabled for these vtables for now because the tag in a
964// vtable pointer may fail the overflow check when resolving 32-bit PLT
965// relocations. A future alternative for this would be finding which usages of
966// the vtable can continue to use the untagged hwasan value without any loss of
967// value in hwasan.
968void CodeGenVTables::RemoveHwasanMetadata(llvm::GlobalValue *GV) const {
969 if (CGM.getLangOpts().Sanitize.has(K: SanitizerKind::HWAddress)) {
970 llvm::GlobalValue::SanitizerMetadata Meta;
971 if (GV->hasSanitizerMetadata())
972 Meta = GV->getSanitizerMetadata();
973 Meta.NoHWAddress = true;
974 GV->setSanitizerMetadata(Meta);
975 }
976}
977
978// If the VTable is not dso_local, then we will not be able to indicate that
979// the VTable does not need a relocation and move into rodata. A frequent
980// time this can occur is for classes that should be made public from a DSO
981// (like in libc++). For cases like these, we can make the vtable hidden or
982// private and create a public alias with the same visibility and linkage as
983// the original vtable type.
984void CodeGenVTables::GenerateRelativeVTableAlias(llvm::GlobalVariable *VTable,
985 llvm::StringRef AliasNameRef) {
986 assert(getItaniumVTableContext().isRelativeLayout() &&
987 "Can only use this if the relative vtable ABI is used");
988 assert(!VTable->isDSOLocal() && "This should be called only if the vtable is "
989 "not guaranteed to be dso_local");
990
991 // If the vtable is available_externally, we shouldn't (or need to) generate
992 // an alias for it in the first place since the vtable won't actually by
993 // emitted in this compilation unit.
994 if (VTable->hasAvailableExternallyLinkage())
995 return;
996
997 // Create a new string in the event the alias is already the name of the
998 // vtable. Using the reference directly could lead to use of an inititialized
999 // value in the module's StringMap.
1000 llvm::SmallString<256> AliasName(AliasNameRef);
1001 VTable->setName(AliasName + ".local");
1002
1003 auto Linkage = VTable->getLinkage();
1004 assert(llvm::GlobalAlias::isValidLinkage(Linkage) &&
1005 "Invalid vtable alias linkage");
1006
1007 llvm::GlobalAlias *VTableAlias = CGM.getModule().getNamedAlias(Name: AliasName);
1008 if (!VTableAlias) {
1009 VTableAlias = llvm::GlobalAlias::create(Ty: VTable->getValueType(),
1010 AddressSpace: VTable->getAddressSpace(), Linkage,
1011 Name: AliasName, Parent: &CGM.getModule());
1012 } else {
1013 assert(VTableAlias->getValueType() == VTable->getValueType());
1014 assert(VTableAlias->getLinkage() == Linkage);
1015 }
1016 VTableAlias->setVisibility(VTable->getVisibility());
1017 VTableAlias->setUnnamedAddr(VTable->getUnnamedAddr());
1018
1019 // Both of these imply dso_local for the vtable.
1020 if (!VTable->hasComdat()) {
1021 // If this is in a comdat, then we shouldn't make the linkage private due to
1022 // an issue in lld where private symbols can be used as the key symbol when
1023 // choosing the prevelant group. This leads to "relocation refers to a
1024 // symbol in a discarded section".
1025 VTable->setLinkage(llvm::GlobalValue::PrivateLinkage);
1026 } else {
1027 // We should at least make this hidden since we don't want to expose it.
1028 VTable->setVisibility(llvm::GlobalValue::HiddenVisibility);
1029 }
1030
1031 VTableAlias->setAliasee(VTable);
1032}
1033
1034static bool shouldEmitAvailableExternallyVTable(const CodeGenModule &CGM,
1035 const CXXRecordDecl *RD) {
1036 return CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1037 CGM.getCXXABI().canSpeculativelyEmitVTable(RD);
1038}
1039
1040/// Compute the required linkage of the vtable for the given class.
1041///
1042/// Note that we only call this at the end of the translation unit.
1043llvm::GlobalVariable::LinkageTypes
1044CodeGenModule::getVTableLinkage(const CXXRecordDecl *RD) {
1045 if (!RD->isExternallyVisible())
1046 return llvm::GlobalVariable::InternalLinkage;
1047
1048 // We're at the end of the translation unit, so the current key
1049 // function is fully correct.
1050 const CXXMethodDecl *keyFunction = Context.getCurrentKeyFunction(RD);
1051 if (keyFunction && !RD->hasAttr<DLLImportAttr>()) {
1052 // If this class has a key function, use that to determine the
1053 // linkage of the vtable.
1054 const FunctionDecl *def = nullptr;
1055 if (keyFunction->hasBody(def))
1056 keyFunction = cast<CXXMethodDecl>(Val: def);
1057
1058 switch (keyFunction->getTemplateSpecializationKind()) {
1059 case TSK_Undeclared:
1060 case TSK_ExplicitSpecialization:
1061 assert(
1062 (def || CodeGenOpts.OptimizationLevel > 0 ||
1063 CodeGenOpts.getDebugInfo() != llvm::codegenoptions::NoDebugInfo) &&
1064 "Shouldn't query vtable linkage without key function, "
1065 "optimizations, or debug info");
1066 if (!def && CodeGenOpts.OptimizationLevel > 0)
1067 return llvm::GlobalVariable::AvailableExternallyLinkage;
1068
1069 if (keyFunction->isInlined())
1070 return !Context.getLangOpts().AppleKext
1071 ? llvm::GlobalVariable::LinkOnceODRLinkage
1072 : llvm::Function::InternalLinkage;
1073
1074 return llvm::GlobalVariable::ExternalLinkage;
1075
1076 case TSK_ImplicitInstantiation:
1077 return !Context.getLangOpts().AppleKext ?
1078 llvm::GlobalVariable::LinkOnceODRLinkage :
1079 llvm::Function::InternalLinkage;
1080
1081 case TSK_ExplicitInstantiationDefinition:
1082 return !Context.getLangOpts().AppleKext ?
1083 llvm::GlobalVariable::WeakODRLinkage :
1084 llvm::Function::InternalLinkage;
1085
1086 case TSK_ExplicitInstantiationDeclaration:
1087 llvm_unreachable("Should not have been asked to emit this");
1088 }
1089 }
1090
1091 // -fapple-kext mode does not support weak linkage, so we must use
1092 // internal linkage.
1093 if (Context.getLangOpts().AppleKext)
1094 return llvm::Function::InternalLinkage;
1095
1096 llvm::GlobalVariable::LinkageTypes DiscardableODRLinkage =
1097 llvm::GlobalValue::LinkOnceODRLinkage;
1098 llvm::GlobalVariable::LinkageTypes NonDiscardableODRLinkage =
1099 llvm::GlobalValue::WeakODRLinkage;
1100 if (RD->hasAttr<DLLExportAttr>()) {
1101 // Cannot discard exported vtables.
1102 DiscardableODRLinkage = NonDiscardableODRLinkage;
1103 } else if (RD->hasAttr<DLLImportAttr>()) {
1104 // Imported vtables are available externally.
1105 DiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
1106 NonDiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
1107 }
1108
1109 switch (RD->getTemplateSpecializationKind()) {
1110 case TSK_Undeclared:
1111 case TSK_ExplicitSpecialization:
1112 case TSK_ImplicitInstantiation:
1113 return DiscardableODRLinkage;
1114
1115 case TSK_ExplicitInstantiationDeclaration:
1116 // Explicit instantiations in MSVC do not provide vtables, so we must emit
1117 // our own.
1118 if (getTarget().getCXXABI().isMicrosoft())
1119 return DiscardableODRLinkage;
1120 return shouldEmitAvailableExternallyVTable(CGM: *this, RD)
1121 ? llvm::GlobalVariable::AvailableExternallyLinkage
1122 : llvm::GlobalVariable::ExternalLinkage;
1123
1124 case TSK_ExplicitInstantiationDefinition:
1125 return NonDiscardableODRLinkage;
1126 }
1127
1128 llvm_unreachable("Invalid TemplateSpecializationKind!");
1129}
1130
1131/// This is a callback from Sema to tell us that a particular vtable is
1132/// required to be emitted in this translation unit.
1133///
1134/// This is only called for vtables that _must_ be emitted (mainly due to key
1135/// functions). For weak vtables, CodeGen tracks when they are needed and
1136/// emits them as-needed.
1137void CodeGenModule::EmitVTable(CXXRecordDecl *theClass) {
1138 VTables.GenerateClassData(RD: theClass);
1139}
1140
1141void
1142CodeGenVTables::GenerateClassData(const CXXRecordDecl *RD) {
1143 if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
1144 DI->completeClassData(RD);
1145
1146 if (RD->getNumVBases())
1147 CGM.getCXXABI().emitVirtualInheritanceTables(RD);
1148
1149 CGM.getCXXABI().emitVTableDefinitions(CGVT&: *this, RD);
1150}
1151
1152/// At this point in the translation unit, does it appear that can we
1153/// rely on the vtable being defined elsewhere in the program?
1154///
1155/// The response is really only definitive when called at the end of
1156/// the translation unit.
1157///
1158/// The only semantic restriction here is that the object file should
1159/// not contain a vtable definition when that vtable is defined
1160/// strongly elsewhere. Otherwise, we'd just like to avoid emitting
1161/// vtables when unnecessary.
1162bool CodeGenVTables::isVTableExternal(const CXXRecordDecl *RD) {
1163 assert(RD->isDynamicClass() && "Non-dynamic classes have no VTable.");
1164
1165 // We always synthesize vtables if they are needed in the MS ABI. MSVC doesn't
1166 // emit them even if there is an explicit template instantiation.
1167 if (CGM.getTarget().getCXXABI().isMicrosoft())
1168 return false;
1169
1170 // If we have an explicit instantiation declaration (and not a
1171 // definition), the vtable is defined elsewhere.
1172 TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
1173 if (TSK == TSK_ExplicitInstantiationDeclaration)
1174 return true;
1175
1176 // Otherwise, if the class is an instantiated template, the
1177 // vtable must be defined here.
1178 if (TSK == TSK_ImplicitInstantiation ||
1179 TSK == TSK_ExplicitInstantiationDefinition)
1180 return false;
1181
1182 // Otherwise, if the class doesn't have a key function (possibly
1183 // anymore), the vtable must be defined here.
1184 const CXXMethodDecl *keyFunction = CGM.getContext().getCurrentKeyFunction(RD);
1185 if (!keyFunction)
1186 return false;
1187
1188 const FunctionDecl *Def;
1189 // Otherwise, if we don't have a definition of the key function, the
1190 // vtable must be defined somewhere else.
1191 if (!keyFunction->hasBody(Def))
1192 return true;
1193
1194 assert(Def && "The body of the key function is not assigned to Def?");
1195 // If the non-inline key function comes from another module unit, the vtable
1196 // must be defined there.
1197 return Def->isInAnotherModuleUnit() && !Def->isInlineSpecified();
1198}
1199
1200/// Given that we're currently at the end of the translation unit, and
1201/// we've emitted a reference to the vtable for this class, should
1202/// we define that vtable?
1203static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule &CGM,
1204 const CXXRecordDecl *RD) {
1205 // If vtable is internal then it has to be done.
1206 if (!CGM.getVTables().isVTableExternal(RD))
1207 return true;
1208
1209 // If it's external then maybe we will need it as available_externally.
1210 return shouldEmitAvailableExternallyVTable(CGM, RD);
1211}
1212
1213/// Given that at some point we emitted a reference to one or more
1214/// vtables, and that we are now at the end of the translation unit,
1215/// decide whether we should emit them.
1216void CodeGenModule::EmitDeferredVTables() {
1217#ifndef NDEBUG
1218 // Remember the size of DeferredVTables, because we're going to assume
1219 // that this entire operation doesn't modify it.
1220 size_t savedSize = DeferredVTables.size();
1221#endif
1222
1223 for (const CXXRecordDecl *RD : DeferredVTables)
1224 if (shouldEmitVTableAtEndOfTranslationUnit(CGM&: *this, RD))
1225 VTables.GenerateClassData(RD);
1226 else if (shouldOpportunisticallyEmitVTables())
1227 OpportunisticVTables.push_back(x: RD);
1228
1229 assert(savedSize == DeferredVTables.size() &&
1230 "deferred extra vtables during vtable emission?");
1231 DeferredVTables.clear();
1232}
1233
1234bool CodeGenModule::AlwaysHasLTOVisibilityPublic(const CXXRecordDecl *RD) {
1235 if (RD->hasAttr<LTOVisibilityPublicAttr>() || RD->hasAttr<UuidAttr>() ||
1236 RD->hasAttr<DLLExportAttr>() || RD->hasAttr<DLLImportAttr>())
1237 return true;
1238
1239 if (!getCodeGenOpts().LTOVisibilityPublicStd)
1240 return false;
1241
1242 const DeclContext *DC = RD;
1243 while (true) {
1244 auto *D = cast<Decl>(Val: DC);
1245 DC = DC->getParent();
1246 if (isa<TranslationUnitDecl>(Val: DC->getRedeclContext())) {
1247 if (auto *ND = dyn_cast<NamespaceDecl>(D))
1248 if (const IdentifierInfo *II = ND->getIdentifier())
1249 if (II->isStr(Str: "std") || II->isStr(Str: "stdext"))
1250 return true;
1251 break;
1252 }
1253 }
1254
1255 return false;
1256}
1257
1258bool CodeGenModule::HasHiddenLTOVisibility(const CXXRecordDecl *RD) {
1259 LinkageInfo LV = RD->getLinkageAndVisibility();
1260 if (!isExternallyVisible(L: LV.getLinkage()))
1261 return true;
1262
1263 if (!getTriple().isOSBinFormatCOFF() &&
1264 LV.getVisibility() != HiddenVisibility)
1265 return false;
1266
1267 return !AlwaysHasLTOVisibilityPublic(RD);
1268}
1269
1270llvm::GlobalObject::VCallVisibility CodeGenModule::GetVCallVisibilityLevel(
1271 const CXXRecordDecl *RD, llvm::DenseSet<const CXXRecordDecl *> &Visited) {
1272 // If we have already visited this RD (which means this is a recursive call
1273 // since the initial call should have an empty Visited set), return the max
1274 // visibility. The recursive calls below compute the min between the result
1275 // of the recursive call and the current TypeVis, so returning the max here
1276 // ensures that it will have no effect on the current TypeVis.
1277 if (!Visited.insert(V: RD).second)
1278 return llvm::GlobalObject::VCallVisibilityTranslationUnit;
1279
1280 LinkageInfo LV = RD->getLinkageAndVisibility();
1281 llvm::GlobalObject::VCallVisibility TypeVis;
1282 if (!isExternallyVisible(L: LV.getLinkage()))
1283 TypeVis = llvm::GlobalObject::VCallVisibilityTranslationUnit;
1284 else if (HasHiddenLTOVisibility(RD))
1285 TypeVis = llvm::GlobalObject::VCallVisibilityLinkageUnit;
1286 else
1287 TypeVis = llvm::GlobalObject::VCallVisibilityPublic;
1288
1289 for (const auto &B : RD->bases())
1290 if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1291 TypeVis = std::min(
1292 a: TypeVis,
1293 b: GetVCallVisibilityLevel(RD: B.getType()->getAsCXXRecordDecl(), Visited));
1294
1295 for (const auto &B : RD->vbases())
1296 if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1297 TypeVis = std::min(
1298 a: TypeVis,
1299 b: GetVCallVisibilityLevel(RD: B.getType()->getAsCXXRecordDecl(), Visited));
1300
1301 return TypeVis;
1302}
1303
1304void CodeGenModule::EmitVTableTypeMetadata(const CXXRecordDecl *RD,
1305 llvm::GlobalVariable *VTable,
1306 const VTableLayout &VTLayout) {
1307 // Emit type metadata on vtables with LTO or IR instrumentation.
1308 // In IR instrumentation, the type metadata is used to find out vtable
1309 // definitions (for type profiling) among all global variables.
1310 if (!getCodeGenOpts().LTOUnit && !getCodeGenOpts().hasProfileIRInstr())
1311 return;
1312
1313 CharUnits ComponentWidth = GetTargetTypeStoreSize(Ty: getVTableComponentType());
1314
1315 struct AddressPoint {
1316 const CXXRecordDecl *Base;
1317 size_t Offset;
1318 std::string TypeName;
1319 bool operator<(const AddressPoint &RHS) const {
1320 int D = TypeName.compare(str: RHS.TypeName);
1321 return D < 0 || (D == 0 && Offset < RHS.Offset);
1322 }
1323 };
1324 std::vector<AddressPoint> AddressPoints;
1325 for (auto &&AP : VTLayout.getAddressPoints()) {
1326 AddressPoint N{.Base: AP.first.getBase(),
1327 .Offset: VTLayout.getVTableOffset(i: AP.second.VTableIndex) +
1328 AP.second.AddressPointIndex,
1329 .TypeName: {}};
1330 llvm::raw_string_ostream Stream(N.TypeName);
1331 getCXXABI().getMangleContext().mangleCanonicalTypeName(
1332 T: QualType(N.Base->getTypeForDecl(), 0), Stream);
1333 AddressPoints.push_back(x: std::move(N));
1334 }
1335
1336 // Sort the address points for determinism.
1337 llvm::sort(C&: AddressPoints);
1338
1339 ArrayRef<VTableComponent> Comps = VTLayout.vtable_components();
1340 for (auto AP : AddressPoints) {
1341 // Create type metadata for the address point.
1342 AddVTableTypeMetadata(VTable, Offset: ComponentWidth * AP.Offset, RD: AP.Base);
1343
1344 // The class associated with each address point could also potentially be
1345 // used for indirect calls via a member function pointer, so we need to
1346 // annotate the address of each function pointer with the appropriate member
1347 // function pointer type.
1348 for (unsigned I = 0; I != Comps.size(); ++I) {
1349 if (Comps[I].getKind() != VTableComponent::CK_FunctionPointer)
1350 continue;
1351 llvm::Metadata *MD = CreateMetadataIdentifierForVirtualMemPtrType(
1352 T: Context.getMemberPointerType(
1353 T: Comps[I].getFunctionDecl()->getType(),
1354 Cls: Context.getRecordType(AP.Base).getTypePtr()));
1355 VTable->addTypeMetadata(Offset: (ComponentWidth * I).getQuantity(), TypeID: MD);
1356 }
1357 }
1358
1359 if (getCodeGenOpts().VirtualFunctionElimination ||
1360 getCodeGenOpts().WholeProgramVTables) {
1361 llvm::DenseSet<const CXXRecordDecl *> Visited;
1362 llvm::GlobalObject::VCallVisibility TypeVis =
1363 GetVCallVisibilityLevel(RD, Visited);
1364 if (TypeVis != llvm::GlobalObject::VCallVisibilityPublic)
1365 VTable->setVCallVisibilityMetadata(TypeVis);
1366 }
1367}
1368

source code of clang/lib/CodeGen/CGVTables.cpp