summaryrefslogtreecommitdiff
path: root/lib/CodeGen/CGClass.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/CodeGen/CGClass.cpp')
-rw-r--r--lib/CodeGen/CGClass.cpp177
1 files changed, 105 insertions, 72 deletions
diff --git a/lib/CodeGen/CGClass.cpp b/lib/CodeGen/CGClass.cpp
index 50d702c62268..a6915071ec17 100644
--- a/lib/CodeGen/CGClass.cpp
+++ b/lib/CodeGen/CGClass.cpp
@@ -129,14 +129,16 @@ Address
CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base,
llvm::Value *memberPtr,
const MemberPointerType *memberPtrType,
- LValueBaseInfo *BaseInfo) {
+ LValueBaseInfo *BaseInfo,
+ TBAAAccessInfo *TBAAInfo) {
// Ask the ABI to compute the actual address.
llvm::Value *ptr =
CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base,
memberPtr, memberPtrType);
QualType memberType = memberPtrType->getPointeeType();
- CharUnits memberAlign = getNaturalTypeAlignment(memberType, BaseInfo);
+ CharUnits memberAlign = getNaturalTypeAlignment(memberType, BaseInfo,
+ TBAAInfo);
memberAlign =
CGM.getDynamicOffsetAlignment(base.getAlignment(),
memberPtrType->getClass()->getAsCXXRecordDecl(),
@@ -1413,10 +1415,11 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
// possible to delegate the destructor body to the complete
// destructor. Do so.
if (DtorType == Dtor_Deleting) {
+ RunCleanupsScope DtorEpilogue(*this);
EnterDtorCleanups(Dtor, Dtor_Deleting);
- EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
- /*Delegating=*/false, LoadCXXThisAddress());
- PopCleanupBlock();
+ if (HaveInsertPoint())
+ EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
+ /*Delegating=*/false, LoadCXXThisAddress());
return;
}
@@ -1512,6 +1515,13 @@ void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args)
}
namespace {
+ llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF,
+ const CXXDestructorDecl *DD) {
+ if (Expr *ThisArg = DD->getOperatorDeleteThisArg())
+ return CGF.EmitScalarExpr(ThisArg);
+ return CGF.LoadCXXThis();
+ }
+
/// Call the operator delete associated with the current destructor.
struct CallDtorDelete final : EHScopeStack::Cleanup {
CallDtorDelete() {}
@@ -1519,11 +1529,38 @@ namespace {
void Emit(CodeGenFunction &CGF, Flags flags) override {
const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
const CXXRecordDecl *ClassDecl = Dtor->getParent();
- CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
+ CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
+ LoadThisForDtorDelete(CGF, Dtor),
CGF.getContext().getTagDeclType(ClassDecl));
}
};
+ void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF,
+ llvm::Value *ShouldDeleteCondition,
+ bool ReturnAfterDelete) {
+ llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");
+ llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");
+ llvm::Value *ShouldCallDelete
+ = CGF.Builder.CreateIsNull(ShouldDeleteCondition);
+ CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);
+
+ CGF.EmitBlock(callDeleteBB);
+ const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
+ const CXXRecordDecl *ClassDecl = Dtor->getParent();
+ CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
+ LoadThisForDtorDelete(CGF, Dtor),
+ CGF.getContext().getTagDeclType(ClassDecl));
+ assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() ==
+ ReturnAfterDelete &&
+ "unexpected value for ReturnAfterDelete");
+ if (ReturnAfterDelete)
+ CGF.EmitBranchThroughCleanup(CGF.ReturnBlock);
+ else
+ CGF.Builder.CreateBr(continueBB);
+
+ CGF.EmitBlock(continueBB);
+ }
+
struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {
llvm::Value *ShouldDeleteCondition;
@@ -1534,20 +1571,8 @@ namespace {
}
void Emit(CodeGenFunction &CGF, Flags flags) override {
- llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");
- llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");
- llvm::Value *ShouldCallDelete
- = CGF.Builder.CreateIsNull(ShouldDeleteCondition);
- CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);
-
- CGF.EmitBlock(callDeleteBB);
- const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
- const CXXRecordDecl *ClassDecl = Dtor->getParent();
- CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
- CGF.getContext().getTagDeclType(ClassDecl));
- CGF.Builder.CreateBr(continueBB);
-
- CGF.EmitBlock(continueBB);
+ EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition,
+ /*ReturnAfterDelete*/false);
}
};
@@ -1577,6 +1602,7 @@ namespace {
static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr,
CharUnits::QuantityType PoisonSize) {
+ CodeGenFunction::SanitizerScope SanScope(&CGF);
// Pass in void pointer and size of region as arguments to runtime
// function
llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy),
@@ -1705,6 +1731,9 @@ namespace {
/// \brief Emit all code that comes at the end of class's
/// destructor. This is to call destructors on members and base classes
/// in reverse order of their construction.
+///
+/// For a deleting destructor, this also handles the case where a destroying
+/// operator delete completely overrides the definition.
void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
CXXDtorType DtorType) {
assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) &&
@@ -1717,11 +1746,23 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
"operator delete missing - EnterDtorCleanups");
if (CXXStructorImplicitParamValue) {
// If there is an implicit param to the deleting dtor, it's a boolean
- // telling whether we should call delete at the end of the dtor.
- EHStack.pushCleanup<CallDtorDeleteConditional>(
- NormalAndEHCleanup, CXXStructorImplicitParamValue);
+ // telling whether this is a deleting destructor.
+ if (DD->getOperatorDelete()->isDestroyingOperatorDelete())
+ EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue,
+ /*ReturnAfterDelete*/true);
+ else
+ EHStack.pushCleanup<CallDtorDeleteConditional>(
+ NormalAndEHCleanup, CXXStructorImplicitParamValue);
} else {
- EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
+ if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) {
+ const CXXRecordDecl *ClassDecl = DD->getParent();
+ EmitDeleteCall(DD->getOperatorDelete(),
+ LoadThisForDtorDelete(*this, DD),
+ getContext().getTagDeclType(ClassDecl));
+ EmitBranchThroughCleanup(ReturnBlock);
+ } else {
+ EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
+ }
}
return;
}
@@ -2382,7 +2423,8 @@ void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) {
VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy);
llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
- CGM.DecorateInstructionWithTBAA(Store, CGM.getTBAAInfoForVTablePtr());
+ TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTablePtrTy);
+ CGM.DecorateInstructionWithTBAA(Store, TBAAInfo);
if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
CGM.getCodeGenOpts().StrictVTablePointers)
CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass);
@@ -2476,7 +2518,8 @@ llvm::Value *CodeGenFunction::GetVTablePtr(Address This,
const CXXRecordDecl *RD) {
Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy);
llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
- CGM.DecorateInstructionWithTBAA(VTable, CGM.getTBAAInfoForVTablePtr());
+ TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy);
+ CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo);
if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
CGM.getCodeGenOpts().StrictVTablePointers)
@@ -2523,8 +2566,10 @@ LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {
void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
llvm::Value *VTable,
SourceLocation Loc) {
- if (CGM.getCodeGenOpts().WholeProgramVTables &&
- CGM.HasHiddenLTOVisibility(RD)) {
+ if (SanOpts.has(SanitizerKind::CFIVCall))
+ EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
+ else if (CGM.getCodeGenOpts().WholeProgramVTables &&
+ CGM.HasHiddenLTOVisibility(RD)) {
llvm::Metadata *MD =
CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
llvm::Value *TypeId =
@@ -2536,9 +2581,6 @@ void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
{CastedVTable, TypeId});
Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);
}
-
- if (SanOpts.has(SanitizerKind::CFIVCall))
- EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
}
void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,
@@ -2585,8 +2627,9 @@ void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T,
EmitBlock(CheckBlock);
}
- llvm::Value *VTable =
- GetVTablePtr(Address(Derived, getPointerAlign()), Int8PtrTy, ClassDecl);
+ llvm::Value *VTable;
+ std::tie(VTable, ClassDecl) = CGM.getCXXABI().LoadVTablePtr(
+ *this, Address(Derived, getPointerAlign()), ClassDecl);
EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);
@@ -2604,28 +2647,34 @@ void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
!CGM.HasHiddenLTOVisibility(RD))
return;
- std::string TypeName = RD->getQualifiedNameAsString();
- if (getContext().getSanitizerBlacklist().isBlacklistedType(TypeName))
- return;
-
- SanitizerScope SanScope(this);
+ SanitizerMask M;
llvm::SanitizerStatKind SSK;
switch (TCK) {
case CFITCK_VCall:
+ M = SanitizerKind::CFIVCall;
SSK = llvm::SanStat_CFI_VCall;
break;
case CFITCK_NVCall:
+ M = SanitizerKind::CFINVCall;
SSK = llvm::SanStat_CFI_NVCall;
break;
case CFITCK_DerivedCast:
+ M = SanitizerKind::CFIDerivedCast;
SSK = llvm::SanStat_CFI_DerivedCast;
break;
case CFITCK_UnrelatedCast:
+ M = SanitizerKind::CFIUnrelatedCast;
SSK = llvm::SanStat_CFI_UnrelatedCast;
break;
case CFITCK_ICall:
llvm_unreachable("not expecting CFITCK_ICall");
}
+
+ std::string TypeName = RD->getQualifiedNameAsString();
+ if (getContext().getSanitizerBlacklist().isBlacklistedType(M, TypeName))
+ return;
+
+ SanitizerScope SanScope(this);
EmitSanitizerStatReport(SSK);
llvm::Metadata *MD =
@@ -2636,24 +2685,6 @@ void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
llvm::Value *TypeTest = Builder.CreateCall(
CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, TypeId});
- SanitizerMask M;
- switch (TCK) {
- case CFITCK_VCall:
- M = SanitizerKind::CFIVCall;
- break;
- case CFITCK_NVCall:
- M = SanitizerKind::CFINVCall;
- break;
- case CFITCK_DerivedCast:
- M = SanitizerKind::CFIDerivedCast;
- break;
- case CFITCK_UnrelatedCast:
- M = SanitizerKind::CFIUnrelatedCast;
- break;
- case CFITCK_ICall:
- llvm_unreachable("not expecting CFITCK_ICall");
- }
-
llvm::Constant *StaticData[] = {
llvm::ConstantInt::get(Int8Ty, TCK),
EmitCheckSourceLocation(Loc),
@@ -2688,7 +2719,8 @@ bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {
return false;
std::string TypeName = RD->getQualifiedNameAsString();
- return !getContext().getSanitizerBlacklist().isBlacklistedType(TypeName);
+ return !getContext().getSanitizerBlacklist().isBlacklistedType(
+ SanitizerKind::CFIVCall, TypeName);
}
llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(
@@ -2745,9 +2777,12 @@ void CodeGenFunction::EmitForwardingCallToLambda(
RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, callArgs);
// If necessary, copy the returned value into the slot.
- if (!resultType->isVoidType() && returnSlot.isNull())
+ if (!resultType->isVoidType() && returnSlot.isNull()) {
+ if (getLangOpts().ObjCAutoRefCount && resultType->isObjCRetainableType()) {
+ RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal()));
+ }
EmitReturnOfRValue(RV, resultType);
- else
+ } else
EmitBranchThroughCleanup(ReturnBlock);
}
@@ -2755,6 +2790,15 @@ void CodeGenFunction::EmitLambdaBlockInvokeBody() {
const BlockDecl *BD = BlockInfo->getBlockDecl();
const VarDecl *variable = BD->capture_begin()->getVariable();
const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
+ const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
+
+ if (CallOp->isVariadic()) {
+ // FIXME: Making this work correctly is nasty because it requires either
+ // cloning the body of the call operator or making the call operator
+ // forward.
+ CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");
+ return;
+ }
// Start building arguments for forwarding call
CallArgList CallArgs;
@@ -2769,18 +2813,7 @@ void CodeGenFunction::EmitLambdaBlockInvokeBody() {
assert(!Lambda->isGenericLambda() &&
"generic lambda interconversion to block not implemented");
- EmitForwardingCallToLambda(Lambda->getLambdaCallOperator(), CallArgs);
-}
-
-void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) {
- if (cast<CXXMethodDecl>(CurCodeDecl)->isVariadic()) {
- // FIXME: Making this work correctly is nasty because it requires either
- // cloning the body of the call operator or making the call operator forward.
- CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");
- return;
- }
-
- EmitFunctionBody(Args, cast<FunctionDecl>(CurGD.getDecl())->getBody());
+ EmitForwardingCallToLambda(CallOp, CallArgs);
}
void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
@@ -2813,7 +2846,7 @@ void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
EmitForwardingCallToLambda(CallOp, CallArgs);
}
-void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) {
+void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) {
if (MD->isVariadic()) {
// FIXME: Making this work correctly is nasty because it requires either
// cloning the body of the call operator or making the call operator forward.