diff options
author | Dimitry Andric <dim@FreeBSD.org> | 2019-10-23 17:52:09 +0000 |
---|---|---|
committer | Dimitry Andric <dim@FreeBSD.org> | 2019-10-23 17:52:09 +0000 |
commit | 519fc96c475680de2cc49e7811dbbfadb912cbcc (patch) | |
tree | 310ca684459b7e9ae13c9a3b9abf308b3a634afe /lib/Analysis | |
parent | 2298981669bf3bd63335a4be179bc0f96823a8f4 (diff) |
Notes
Diffstat (limited to 'lib/Analysis')
-rw-r--r-- | lib/Analysis/AnalysisDeclContext.cpp | 23 | ||||
-rw-r--r-- | lib/Analysis/BodyFarm.cpp | 4 | ||||
-rw-r--r-- | lib/Analysis/CFG.cpp | 432 | ||||
-rw-r--r-- | lib/Analysis/CallGraph.cpp | 42 | ||||
-rw-r--r-- | lib/Analysis/CloneDetection.cpp | 3 | ||||
-rw-r--r-- | lib/Analysis/CocoaConventions.cpp | 4 | ||||
-rw-r--r-- | lib/Analysis/Consumed.cpp | 12 | ||||
-rw-r--r-- | lib/Analysis/PathDiagnostic.cpp | 1219 | ||||
-rw-r--r-- | lib/Analysis/ProgramPoint.cpp | 6 | ||||
-rw-r--r-- | lib/Analysis/ReachableCode.cpp | 2 | ||||
-rw-r--r-- | lib/Analysis/RetainSummaryManager.cpp | 2 | ||||
-rw-r--r-- | lib/Analysis/ThreadSafety.cpp | 30 | ||||
-rw-r--r-- | lib/Analysis/plugins/SampleAnalyzer/MainCallChecker.cpp | 4 |
13 files changed, 1630 insertions, 153 deletions
diff --git a/lib/Analysis/AnalysisDeclContext.cpp b/lib/Analysis/AnalysisDeclContext.cpp index b6a429ff49ebc..9f58b5079c760 100644 --- a/lib/Analysis/AnalysisDeclContext.cpp +++ b/lib/Analysis/AnalysisDeclContext.cpp @@ -302,7 +302,7 @@ AnalysisDeclContext *AnalysisDeclContextManager::getContext(const Decl *D) { std::unique_ptr<AnalysisDeclContext> &AC = Contexts[D]; if (!AC) - AC = llvm::make_unique<AnalysisDeclContext>(this, D, cfgBuildOptions); + AC = std::make_unique<AnalysisDeclContext>(this, D, cfgBuildOptions); return AC.get(); } @@ -310,8 +310,10 @@ BodyFarm &AnalysisDeclContextManager::getBodyFarm() { return FunctionBodyFarm; } const StackFrameContext * AnalysisDeclContext::getStackFrame(LocationContext const *Parent, const Stmt *S, - const CFGBlock *Blk, unsigned Idx) { - return getLocationContextManager().getStackFrame(this, Parent, S, Blk, Idx); + const CFGBlock *Blk, unsigned BlockCount, + unsigned Idx) { + return getLocationContextManager().getStackFrame(this, Parent, S, Blk, + BlockCount, Idx); } const BlockInvocationContext * @@ -359,7 +361,8 @@ void LocationContext::ProfileCommon(llvm::FoldingSetNodeID &ID, } void StackFrameContext::Profile(llvm::FoldingSetNodeID &ID) { - Profile(ID, getAnalysisDeclContext(), getParent(), CallSite, Block, Index); + Profile(ID, getAnalysisDeclContext(), getParent(), CallSite, Block, + BlockCount, Index); } void ScopeContext::Profile(llvm::FoldingSetNodeID &ID) { @@ -392,18 +395,16 @@ LocationContextManager::getLocationContext(AnalysisDeclContext *ctx, return L; } -const StackFrameContext* -LocationContextManager::getStackFrame(AnalysisDeclContext *ctx, - const LocationContext *parent, - const Stmt *s, - const CFGBlock *blk, unsigned idx) { +const StackFrameContext *LocationContextManager::getStackFrame( + AnalysisDeclContext *ctx, const LocationContext *parent, const Stmt *s, + const CFGBlock *blk, unsigned blockCount, unsigned idx) { llvm::FoldingSetNodeID ID; - StackFrameContext::Profile(ID, ctx, parent, s, blk, idx); + StackFrameContext::Profile(ID, ctx, parent, s, blk, blockCount, idx); void *InsertPos; auto *L = cast_or_null<StackFrameContext>(Contexts.FindNodeOrInsertPos(ID, InsertPos)); if (!L) { - L = new StackFrameContext(ctx, parent, s, blk, idx, ++NewID); + L = new StackFrameContext(ctx, parent, s, blk, blockCount, idx, ++NewID); Contexts.InsertNode(L, InsertPos); } return L; diff --git a/lib/Analysis/BodyFarm.cpp b/lib/Analysis/BodyFarm.cpp index 576f86516017b..43f9e715b3de2 100644 --- a/lib/Analysis/BodyFarm.cpp +++ b/lib/Analysis/BodyFarm.cpp @@ -408,8 +408,8 @@ static Stmt *create_call_once(ASTContext &C, const FunctionDecl *D) { // reference. for (unsigned int ParamIdx = 2; ParamIdx < D->getNumParams(); ParamIdx++) { const ParmVarDecl *PDecl = D->getParamDecl(ParamIdx); - if (PDecl && - CallbackFunctionType->getParamType(ParamIdx - 2) + assert(PDecl); + if (CallbackFunctionType->getParamType(ParamIdx - 2) .getNonReferenceType() .getCanonicalType() != PDecl->getType().getNonReferenceType().getCanonicalType()) { diff --git a/lib/Analysis/CFG.cpp b/lib/Analysis/CFG.cpp index 0ed1e988a196a..a533a8d97b848 100644 --- a/lib/Analysis/CFG.cpp +++ b/lib/Analysis/CFG.cpp @@ -70,23 +70,47 @@ static SourceLocation GetEndLoc(Decl *D) { return D->getLocation(); } +/// Returns true on constant values based around a single IntegerLiteral. +/// Allow for use of parentheses, integer casts, and negative signs. +static bool IsIntegerLiteralConstantExpr(const Expr *E) { + // Allow parentheses + E = E->IgnoreParens(); + + // Allow conversions to different integer kind. + if (const auto *CE = dyn_cast<CastExpr>(E)) { + if (CE->getCastKind() != CK_IntegralCast) + return false; + E = CE->getSubExpr(); + } + + // Allow negative numbers. + if (const auto *UO = dyn_cast<UnaryOperator>(E)) { + if (UO->getOpcode() != UO_Minus) + return false; + E = UO->getSubExpr(); + } + + return isa<IntegerLiteral>(E); +} + /// Helper for tryNormalizeBinaryOperator. Attempts to extract an IntegerLiteral -/// or EnumConstantDecl from the given Expr. If it fails, returns nullptr. +/// constant expression or EnumConstantDecl from the given Expr. If it fails, +/// returns nullptr. static const Expr *tryTransformToIntOrEnumConstant(const Expr *E) { E = E->IgnoreParens(); - if (isa<IntegerLiteral>(E)) + if (IsIntegerLiteralConstantExpr(E)) return E; if (auto *DR = dyn_cast<DeclRefExpr>(E->IgnoreParenImpCasts())) return isa<EnumConstantDecl>(DR->getDecl()) ? DR : nullptr; return nullptr; } -/// Tries to interpret a binary operator into `Decl Op Expr` form, if Expr is -/// an integer literal or an enum constant. +/// Tries to interpret a binary operator into `Expr Op NumExpr` form, if +/// NumExpr is an integer literal or an enum constant. /// /// If this fails, at least one of the returned DeclRefExpr or Expr will be /// null. -static std::tuple<const DeclRefExpr *, BinaryOperatorKind, const Expr *> +static std::tuple<const Expr *, BinaryOperatorKind, const Expr *> tryNormalizeBinaryOperator(const BinaryOperator *B) { BinaryOperatorKind Op = B->getOpcode(); @@ -108,8 +132,7 @@ tryNormalizeBinaryOperator(const BinaryOperator *B) { Constant = tryTransformToIntOrEnumConstant(B->getLHS()); } - auto *D = dyn_cast<DeclRefExpr>(MaybeDecl->IgnoreParenImpCasts()); - return std::make_tuple(D, Op, Constant); + return std::make_tuple(MaybeDecl, Op, Constant); } /// For an expression `x == Foo && x == Bar`, this determines whether the @@ -121,11 +144,11 @@ tryNormalizeBinaryOperator(const BinaryOperator *B) { static bool areExprTypesCompatible(const Expr *E1, const Expr *E2) { // User intent isn't clear if they're mixing int literals with enum // constants. - if (isa<IntegerLiteral>(E1) != isa<IntegerLiteral>(E2)) + if (isa<DeclRefExpr>(E1) != isa<DeclRefExpr>(E2)) return false; // Integer literal comparisons, regardless of literal type, are acceptable. - if (isa<IntegerLiteral>(E1)) + if (!isa<DeclRefExpr>(E1)) return true; // IntegerLiterals are handled above and only EnumConstantDecls are expected @@ -525,7 +548,7 @@ private: CFGBlock *VisitCallExpr(CallExpr *C, AddStmtChoice asc); CFGBlock *VisitCaseStmt(CaseStmt *C); CFGBlock *VisitChooseExpr(ChooseExpr *C, AddStmtChoice asc); - CFGBlock *VisitCompoundStmt(CompoundStmt *C); + CFGBlock *VisitCompoundStmt(CompoundStmt *C, bool ExternallyDestructed); CFGBlock *VisitConditionalOperator(AbstractConditionalOperator *C, AddStmtChoice asc); CFGBlock *VisitContinueStmt(ContinueStmt *C); @@ -546,7 +569,8 @@ private: CFGBlock *VisitDeclSubExpr(DeclStmt *DS); CFGBlock *VisitDefaultStmt(DefaultStmt *D); CFGBlock *VisitDoStmt(DoStmt *D); - CFGBlock *VisitExprWithCleanups(ExprWithCleanups *E, AddStmtChoice asc); + CFGBlock *VisitExprWithCleanups(ExprWithCleanups *E, + AddStmtChoice asc, bool ExternallyDestructed); CFGBlock *VisitForStmt(ForStmt *F); CFGBlock *VisitGotoStmt(GotoStmt *G); CFGBlock *VisitGCCAsmStmt(GCCAsmStmt *G, AddStmtChoice asc); @@ -585,7 +609,8 @@ private: CFGBlock *VisitUnaryOperator(UnaryOperator *U, AddStmtChoice asc); CFGBlock *VisitWhileStmt(WhileStmt *W); - CFGBlock *Visit(Stmt *S, AddStmtChoice asc = AddStmtChoice::NotAlwaysAdd); + CFGBlock *Visit(Stmt *S, AddStmtChoice asc = AddStmtChoice::NotAlwaysAdd, + bool ExternallyDestructed = false); CFGBlock *VisitStmt(Stmt *S, AddStmtChoice asc); CFGBlock *VisitChildren(Stmt *S); CFGBlock *VisitNoRecurse(Expr *E, AddStmtChoice asc); @@ -656,15 +681,17 @@ private: // Visitors to walk an AST and generate destructors of temporaries in // full expression. - CFGBlock *VisitForTemporaryDtors(Stmt *E, bool BindToTemporary, + CFGBlock *VisitForTemporaryDtors(Stmt *E, bool ExternallyDestructed, TempDtorContext &Context); - CFGBlock *VisitChildrenForTemporaryDtors(Stmt *E, TempDtorContext &Context); + CFGBlock *VisitChildrenForTemporaryDtors(Stmt *E, bool ExternallyDestructed, + TempDtorContext &Context); CFGBlock *VisitBinaryOperatorForTemporaryDtors(BinaryOperator *E, + bool ExternallyDestructed, TempDtorContext &Context); CFGBlock *VisitCXXBindTemporaryExprForTemporaryDtors( - CXXBindTemporaryExpr *E, bool BindToTemporary, TempDtorContext &Context); + CXXBindTemporaryExpr *E, bool ExternallyDestructed, TempDtorContext &Context); CFGBlock *VisitConditionalOperatorForTemporaryDtors( - AbstractConditionalOperator *E, bool BindToTemporary, + AbstractConditionalOperator *E, bool ExternallyDestructed, TempDtorContext &Context); void InsertTempDtorDecisionBlock(const TempDtorContext &Context, CFGBlock *FalseSucc = nullptr); @@ -1017,34 +1044,34 @@ private: if (!LHS->isComparisonOp() || !RHS->isComparisonOp()) return {}; - const DeclRefExpr *Decl1; - const Expr *Expr1; + const Expr *DeclExpr1; + const Expr *NumExpr1; BinaryOperatorKind BO1; - std::tie(Decl1, BO1, Expr1) = tryNormalizeBinaryOperator(LHS); + std::tie(DeclExpr1, BO1, NumExpr1) = tryNormalizeBinaryOperator(LHS); - if (!Decl1 || !Expr1) + if (!DeclExpr1 || !NumExpr1) return {}; - const DeclRefExpr *Decl2; - const Expr *Expr2; + const Expr *DeclExpr2; + const Expr *NumExpr2; BinaryOperatorKind BO2; - std::tie(Decl2, BO2, Expr2) = tryNormalizeBinaryOperator(RHS); + std::tie(DeclExpr2, BO2, NumExpr2) = tryNormalizeBinaryOperator(RHS); - if (!Decl2 || !Expr2) + if (!DeclExpr2 || !NumExpr2) return {}; // Check that it is the same variable on both sides. - if (Decl1->getDecl() != Decl2->getDecl()) + if (!Expr::isSameComparisonOperand(DeclExpr1, DeclExpr2)) return {}; // Make sure the user's intent is clear (e.g. they're comparing against two // int literals, or two things from the same enum) - if (!areExprTypesCompatible(Expr1, Expr2)) + if (!areExprTypesCompatible(NumExpr1, NumExpr2)) return {}; Expr::EvalResult L1Result, L2Result; - if (!Expr1->EvaluateAsInt(L1Result, *Context) || - !Expr2->EvaluateAsInt(L2Result, *Context)) + if (!NumExpr1->EvaluateAsInt(L1Result, *Context) || + !NumExpr2->EvaluateAsInt(L2Result, *Context)) return {}; llvm::APSInt L1 = L1Result.Val.getInt(); @@ -1077,6 +1104,10 @@ private: // * Variable x is equal to the largest literal. // * Variable x is greater than largest literal. bool AlwaysTrue = true, AlwaysFalse = true; + // Track value of both subexpressions. If either side is always + // true/false, another warning should have already been emitted. + bool LHSAlwaysTrue = true, LHSAlwaysFalse = true; + bool RHSAlwaysTrue = true, RHSAlwaysFalse = true; for (const llvm::APSInt &Value : Values) { TryResult Res1, Res2; Res1 = analyzeLogicOperatorCondition(BO1, Value, L1); @@ -1092,16 +1123,47 @@ private: AlwaysTrue &= (Res1.isTrue() || Res2.isTrue()); AlwaysFalse &= !(Res1.isTrue() || Res2.isTrue()); } + + LHSAlwaysTrue &= Res1.isTrue(); + LHSAlwaysFalse &= Res1.isFalse(); + RHSAlwaysTrue &= Res2.isTrue(); + RHSAlwaysFalse &= Res2.isFalse(); } if (AlwaysTrue || AlwaysFalse) { - if (BuildOpts.Observer) + if (!LHSAlwaysTrue && !LHSAlwaysFalse && !RHSAlwaysTrue && + !RHSAlwaysFalse && BuildOpts.Observer) BuildOpts.Observer->compareAlwaysTrue(B, AlwaysTrue); return TryResult(AlwaysTrue); } return {}; } + /// A bitwise-or with a non-zero constant always evaluates to true. + TryResult checkIncorrectBitwiseOrOperator(const BinaryOperator *B) { + const Expr *LHSConstant = + tryTransformToIntOrEnumConstant(B->getLHS()->IgnoreParenImpCasts()); + const Expr *RHSConstant = + tryTransformToIntOrEnumConstant(B->getRHS()->IgnoreParenImpCasts()); + + if ((LHSConstant && RHSConstant) || (!LHSConstant && !RHSConstant)) + return {}; + + const Expr *Constant = LHSConstant ? LHSConstant : RHSConstant; + + Expr::EvalResult Result; + if (!Constant->EvaluateAsInt(Result, *Context)) + return {}; + + if (Result.Val.getInt() == 0) + return {}; + + if (BuildOpts.Observer) + BuildOpts.Observer->compareBitwiseOr(B); + + return TryResult(true); + } + /// Try and evaluate an expression to an integer constant. bool tryEvaluate(Expr *S, Expr::EvalResult &outResult) { if (!BuildOpts.PruneTriviallyFalseEdges) @@ -1119,7 +1181,7 @@ private: return {}; if (BinaryOperator *Bop = dyn_cast<BinaryOperator>(S)) { - if (Bop->isLogicalOp()) { + if (Bop->isLogicalOp() || Bop->isEqualityOp()) { // Check the cache first. CachedBoolEvalsTy::iterator I = CachedBoolEvals.find(S); if (I != CachedBoolEvals.end()) @@ -1203,6 +1265,10 @@ private: TryResult BopRes = checkIncorrectRelationalOperator(Bop); if (BopRes.isKnown()) return BopRes.isTrue(); + } else if (Bop->getOpcode() == BO_Or) { + TryResult BopRes = checkIncorrectBitwiseOrOperator(Bop); + if (BopRes.isKnown()) + return BopRes.isTrue(); } } @@ -1575,7 +1641,7 @@ CFGBlock *CFGBuilder::addInitializer(CXXCtorInitializer *I) { // Generate destructors for temporaries in initialization expression. TempDtorContext Context; VisitForTemporaryDtors(cast<ExprWithCleanups>(Init)->getSubExpr(), - /*BindToTemporary=*/false, Context); + /*ExternallyDestructed=*/false, Context); } } @@ -2051,7 +2117,8 @@ CFGBuilder::prependAutomaticObjScopeEndWithTerminator( /// Visit - Walk the subtree of a statement and add extra /// blocks for ternary operators, &&, and ||. We also process "," and /// DeclStmts (which may contain nested control-flow). -CFGBlock *CFGBuilder::Visit(Stmt * S, AddStmtChoice asc) { +CFGBlock *CFGBuilder::Visit(Stmt * S, AddStmtChoice asc, + bool ExternallyDestructed) { if (!S) { badCFG = true; return nullptr; @@ -2096,7 +2163,7 @@ CFGBlock *CFGBuilder::Visit(Stmt * S, AddStmtChoice asc) { return VisitChooseExpr(cast<ChooseExpr>(S), asc); case Stmt::CompoundStmtClass: - return VisitCompoundStmt(cast<CompoundStmt>(S)); + return VisitCompoundStmt(cast<CompoundStmt>(S), ExternallyDestructed); case Stmt::ConditionalOperatorClass: return VisitConditionalOperator(cast<ConditionalOperator>(S), asc); @@ -2108,7 +2175,8 @@ CFGBlock *CFGBuilder::Visit(Stmt * S, AddStmtChoice asc) { return VisitCXXCatchStmt(cast<CXXCatchStmt>(S)); case Stmt::ExprWithCleanupsClass: - return VisitExprWithCleanups(cast<ExprWithCleanups>(S), asc); + return VisitExprWithCleanups(cast<ExprWithCleanups>(S), + asc, ExternallyDestructed); case Stmt::CXXDefaultArgExprClass: case Stmt::CXXDefaultInitExprClass: @@ -2301,6 +2369,9 @@ CFGBlock *CFGBuilder::VisitUnaryOperator(UnaryOperator *U, appendStmt(Block, U); } + if (U->getOpcode() == UO_LNot) + tryEvaluateBool(U->getSubExpr()->IgnoreParens()); + return Visit(U->getSubExpr(), AddStmtChoice()); } @@ -2435,6 +2506,9 @@ CFGBlock *CFGBuilder::VisitBinaryOperator(BinaryOperator *B, appendStmt(Block, B); } + if (B->isEqualityOp() || B->isRelationalOp()) + tryEvaluateBool(B); + CFGBlock *RBlock = Visit(B->getRHS()); CFGBlock *LBlock = Visit(B->getLHS()); // If visiting RHS causes us to finish 'Block', e.g. the RHS is a StmtExpr @@ -2474,10 +2548,8 @@ CFGBlock *CFGBuilder::VisitBreakStmt(BreakStmt *B) { static bool CanThrow(Expr *E, ASTContext &Ctx) { QualType Ty = E->getType(); - if (Ty->isFunctionPointerType()) - Ty = Ty->getAs<PointerType>()->getPointeeType(); - else if (Ty->isBlockPointerType()) - Ty = Ty->getAs<BlockPointerType>()->getPointeeType(); + if (Ty->isFunctionPointerType() || Ty->isBlockPointerType()) + Ty = Ty->getPointeeType(); const FunctionType *FT = Ty->getAs<FunctionType>(); if (FT) { @@ -2603,7 +2675,7 @@ CFGBlock *CFGBuilder::VisitChooseExpr(ChooseExpr *C, return addStmt(C->getCond()); } -CFGBlock *CFGBuilder::VisitCompoundStmt(CompoundStmt *C) { +CFGBlock *CFGBuilder::VisitCompoundStmt(CompoundStmt *C, bool ExternallyDestructed) { LocalScope::const_iterator scopeBeginPos = ScopePos; addLocalScopeForStmt(C); @@ -2619,11 +2691,16 @@ CFGBlock *CFGBuilder::VisitCompoundStmt(CompoundStmt *C) { I != E; ++I ) { // If we hit a segment of code just containing ';' (NullStmts), we can // get a null block back. In such cases, just use the LastBlock - if (CFGBlock *newBlock = addStmt(*I)) + CFGBlock *newBlock = Visit(*I, AddStmtChoice::AlwaysAdd, + ExternallyDestructed); + + if (newBlock) LastBlock = newBlock; if (badCFG) return nullptr; + + ExternallyDestructed = false; } return LastBlock; @@ -2766,7 +2843,7 @@ CFGBlock *CFGBuilder::VisitDeclSubExpr(DeclStmt *DS) { // Generate destructors for temporaries in initialization expression. TempDtorContext Context; VisitForTemporaryDtors(cast<ExprWithCleanups>(Init)->getSubExpr(), - /*BindToTemporary=*/false, Context); + /*ExternallyDestructed=*/true, Context); } } @@ -2980,9 +3057,17 @@ CFGBlock *CFGBuilder::VisitReturnStmt(Stmt *S) { if (!Block->hasNoReturnElement()) addSuccessor(Block, &cfg->getExit()); - // Add the return statement to the block. This may create new blocks if R - // contains control-flow (short-circuit operations). - return VisitStmt(S, AddStmtChoice::AlwaysAdd); + // Add the return statement to the block. + appendStmt(Block, S); + + // Visit children + if (ReturnStmt *RS = dyn_cast<ReturnStmt>(S)) { + if (Expr *O = RS->getRetValue()) + return Visit(O, AddStmtChoice::AlwaysAdd, /*ExternallyDestructed=*/true); + return Block; + } else { // co_return + return VisitChildren(S); + } } CFGBlock *CFGBuilder::VisitSEHExceptStmt(SEHExceptStmt *ES) { @@ -3014,7 +3099,7 @@ CFGBlock *CFGBuilder::VisitSEHExceptStmt(SEHExceptStmt *ES) { } CFGBlock *CFGBuilder::VisitSEHFinallyStmt(SEHFinallyStmt *FS) { - return VisitCompoundStmt(FS->getBlock()); + return VisitCompoundStmt(FS->getBlock(), /*ExternallyDestructed=*/false); } CFGBlock *CFGBuilder::VisitSEHLeaveStmt(SEHLeaveStmt *LS) { @@ -3898,7 +3983,7 @@ CFGBlock *CFGBuilder::VisitStmtExpr(StmtExpr *SE, AddStmtChoice asc) { autoCreateBlock(); appendStmt(Block, SE); } - return VisitCompoundStmt(SE->getSubStmt()); + return VisitCompoundStmt(SE->getSubStmt(), /*ExternallyDestructed=*/true); } CFGBlock *CFGBuilder::VisitSwitchStmt(SwitchStmt *Terminator) { @@ -4363,12 +4448,12 @@ CFGBlock *CFGBuilder::VisitCXXForRangeStmt(CXXForRangeStmt *S) { } CFGBlock *CFGBuilder::VisitExprWithCleanups(ExprWithCleanups *E, - AddStmtChoice asc) { + AddStmtChoice asc, bool ExternallyDestructed) { if (BuildOpts.AddTemporaryDtors) { // If adding implicit destructors visit the full expression for adding // destructors of temporaries. TempDtorContext Context; - VisitForTemporaryDtors(E->getSubExpr(), false, Context); + VisitForTemporaryDtors(E->getSubExpr(), ExternallyDestructed, Context); // Full expression has to be added as CFGStmt so it will be sequenced // before destructors of it's temporaries. @@ -4477,6 +4562,10 @@ CFGBlock *CFGBuilder::VisitImplicitCastExpr(ImplicitCastExpr *E, autoCreateBlock(); appendStmt(Block, E); } + + if (E->getCastKind() == CK_IntegralToBoolean) + tryEvaluateBool(E->getSubExpr()->IgnoreParens()); + return Visit(E->getSubExpr(), AddStmtChoice()); } @@ -4504,7 +4593,7 @@ CFGBlock *CFGBuilder::VisitIndirectGotoStmt(IndirectGotoStmt *I) { return addStmt(I->getTarget()); } -CFGBlock *CFGBuilder::VisitForTemporaryDtors(Stmt *E, bool BindToTemporary, +CFGBlock *CFGBuilder::VisitForTemporaryDtors(Stmt *E, bool ExternallyDestructed, TempDtorContext &Context) { assert(BuildOpts.AddImplicitDtors && BuildOpts.AddTemporaryDtors); @@ -4515,28 +4604,32 @@ tryAgain: } switch (E->getStmtClass()) { default: - return VisitChildrenForTemporaryDtors(E, Context); + return VisitChildrenForTemporaryDtors(E, false, Context); + + case Stmt::InitListExprClass: + return VisitChildrenForTemporaryDtors(E, ExternallyDestructed, Context); case Stmt::BinaryOperatorClass: return VisitBinaryOperatorForTemporaryDtors(cast<BinaryOperator>(E), + ExternallyDestructed, Context); case Stmt::CXXBindTemporaryExprClass: return VisitCXXBindTemporaryExprForTemporaryDtors( - cast<CXXBindTemporaryExpr>(E), BindToTemporary, Context); + cast<CXXBindTemporaryExpr>(E), ExternallyDestructed, Context); case Stmt::BinaryConditionalOperatorClass: case Stmt::ConditionalOperatorClass: return VisitConditionalOperatorForTemporaryDtors( - cast<AbstractConditionalOperator>(E), BindToTemporary, Context); + cast<AbstractConditionalOperator>(E), ExternallyDestructed, Context); case Stmt::ImplicitCastExprClass: - // For implicit cast we want BindToTemporary to be passed further. + // For implicit cast we want ExternallyDestructed to be passed further. E = cast<CastExpr>(E)->getSubExpr(); goto tryAgain; case Stmt::CXXFunctionalCastExprClass: - // For functional cast we want BindToTemporary to be passed further. + // For functional cast we want ExternallyDestructed to be passed further. E = cast<CXXFunctionalCastExpr>(E)->getSubExpr(); goto tryAgain; @@ -4550,7 +4643,7 @@ tryAgain: case Stmt::MaterializeTemporaryExprClass: { const MaterializeTemporaryExpr* MTE = cast<MaterializeTemporaryExpr>(E); - BindToTemporary = (MTE->getStorageDuration() != SD_FullExpression); + ExternallyDestructed = (MTE->getStorageDuration() != SD_FullExpression); SmallVector<const Expr *, 2> CommaLHSs; SmallVector<SubobjectAdjustment, 2> Adjustments; // Find the expression whose lifetime needs to be extended. @@ -4561,7 +4654,7 @@ tryAgain: // Visit the skipped comma operator left-hand sides for other temporaries. for (const Expr *CommaLHS : CommaLHSs) { VisitForTemporaryDtors(const_cast<Expr *>(CommaLHS), - /*BindToTemporary=*/false, Context); + /*ExternallyDestructed=*/false, Context); } goto tryAgain; } @@ -4579,13 +4672,18 @@ tryAgain: for (Expr *Init : LE->capture_inits()) { if (Init) { if (CFGBlock *R = VisitForTemporaryDtors( - Init, /*BindToTemporary=*/false, Context)) + Init, /*ExternallyDestructed=*/true, Context)) B = R; } } return B; } + case Stmt::StmtExprClass: + // Don't recurse into statement expressions; any cleanups inside them + // will be wrapped in their own ExprWithCleanups. + return Block; + case Stmt::CXXDefaultArgExprClass: E = cast<CXXDefaultArgExpr>(E)->getExpr(); goto tryAgain; @@ -4597,6 +4695,7 @@ tryAgain: } CFGBlock *CFGBuilder::VisitChildrenForTemporaryDtors(Stmt *E, + bool ExternallyDestructed, TempDtorContext &Context) { if (isa<LambdaExpr>(E)) { // Do not visit the children of lambdas; they have their own CFGs. @@ -4610,14 +4709,22 @@ CFGBlock *CFGBuilder::VisitChildrenForTemporaryDtors(Stmt *E, CFGBlock *B = Block; for (Stmt *Child : E->children()) if (Child) - if (CFGBlock *R = VisitForTemporaryDtors(Child, false, Context)) + if (CFGBlock *R = VisitForTemporaryDtors(Child, ExternallyDestructed, Context)) B = R; return B; } CFGBlock *CFGBuilder::VisitBinaryOperatorForTemporaryDtors( - BinaryOperator *E, TempDtorContext &Context) { + BinaryOperator *E, bool ExternallyDestructed, TempDtorContext &Context) { + if (E->isCommaOp()) { + // For comma operator LHS expression is visited + // before RHS expression. For destructors visit them in reverse order. + CFGBlock *RHSBlock = VisitForTemporaryDtors(E->getRHS(), ExternallyDestructed, Context); + CFGBlock *LHSBlock = VisitForTemporaryDtors(E->getLHS(), false, Context); + return LHSBlock ? LHSBlock : RHSBlock; + } + if (E->isLogicalOp()) { VisitForTemporaryDtors(E->getLHS(), false, Context); TryResult RHSExecuted = tryEvaluateBool(E->getLHS()); @@ -4652,10 +4759,11 @@ CFGBlock *CFGBuilder::VisitBinaryOperatorForTemporaryDtors( } CFGBlock *CFGBuilder::VisitCXXBindTemporaryExprForTemporaryDtors( - CXXBindTemporaryExpr *E, bool BindToTemporary, TempDtorContext &Context) { + CXXBindTemporaryExpr *E, bool ExternallyDestructed, TempDtorContext &Context) { // First add destructors for temporaries in subexpression. - CFGBlock *B = VisitForTemporaryDtors(E->getSubExpr(), false, Context); - if (!BindToTemporary) { + // Because VisitCXXBindTemporaryExpr calls setDestructed: + CFGBlock *B = VisitForTemporaryDtors(E->getSubExpr(), true, Context); + if (!ExternallyDestructed) { // If lifetime of temporary is not prolonged (by assigning to constant // reference) add destructor for it. @@ -4703,7 +4811,7 @@ void CFGBuilder::InsertTempDtorDecisionBlock(const TempDtorContext &Context, } CFGBlock *CFGBuilder::VisitConditionalOperatorForTemporaryDtors( - AbstractConditionalOperator *E, bool BindToTemporary, + AbstractConditionalOperator *E, bool ExternallyDestructed, TempDtorContext &Context) { VisitForTemporaryDtors(E->getCond(), false, Context); CFGBlock *ConditionBlock = Block; @@ -4714,14 +4822,14 @@ CFGBlock *CFGBuilder::VisitConditionalOperatorForTemporaryDtors( TempDtorContext TrueContext( bothKnownTrue(Context.KnownExecuted, ConditionVal)); - VisitForTemporaryDtors(E->getTrueExpr(), BindToTemporary, TrueContext); + VisitForTemporaryDtors(E->getTrueExpr(), ExternallyDestructed, TrueContext); CFGBlock *TrueBlock = Block; Block = ConditionBlock; Succ = ConditionSucc; TempDtorContext FalseContext( bothKnownTrue(Context.KnownExecuted, NegatedVal)); - VisitForTemporaryDtors(E->getFalseExpr(), BindToTemporary, FalseContext); + VisitForTemporaryDtors(E->getFalseExpr(), ExternallyDestructed, FalseContext); if (TrueContext.TerminatorExpr && FalseContext.TerminatorExpr) { InsertTempDtorDecisionBlock(FalseContext, TrueBlock); @@ -4755,7 +4863,8 @@ CFGBlock *CFGBuilder::VisitOMPExecutableDirective(OMPExecutableDirective *D, } // Visit associated structured block if any. if (!D->isStandaloneDirective()) - if (Stmt *S = D->getStructuredBlock()) { + if (CapturedStmt *CS = D->getInnermostCapturedStmt()) { + Stmt *S = CS->getCapturedStmt(); if (!isa<CompoundStmt>(S)) addLocalScopeAndDtors(S); if (CFGBlock *R = addStmt(S)) @@ -4867,9 +4976,13 @@ CFGImplicitDtor::getDestructorDecl(ASTContext &astContext) const { while (const ArrayType *arrayType = astContext.getAsArrayType(ty)) { ty = arrayType->getElementType(); } - const RecordType *recordType = ty->getAs<RecordType>(); - const CXXRecordDecl *classDecl = - cast<CXXRecordDecl>(recordType->getDecl()); + + // The situation when the type of the lifetime-extending reference + // does not correspond to the type of the object is supposed + // to be handled by now. In particular, 'ty' is now the unwrapped + // record type. + const CXXRecordDecl *classDecl = ty->getAsCXXRecordDecl(); + assert(classDecl); return classDecl->getDestructor(); } case CFGElement::DeleteDtor: { @@ -4894,12 +5007,6 @@ CFGImplicitDtor::getDestructorDecl(ASTContext &astContext) const { llvm_unreachable("getKind() returned bogus value"); } -bool CFGImplicitDtor::isNoReturn(ASTContext &astContext) const { - if (const CXXDestructorDecl *DD = getDestructorDecl(astContext)) - return DD->isNoReturn(); - return false; -} - //===----------------------------------------------------------------------===// // CFGBlock operations. //===----------------------------------------------------------------------===// @@ -4965,6 +5072,8 @@ class StmtPrinterHelper : public PrinterHelper { public: StmtPrinterHelper(const CFG* cfg, const LangOptions &LO) : LangOpts(LO) { + if (!cfg) + return; for (CFG::const_iterator I = cfg->begin(), E = cfg->end(); I != E; ++I ) { unsigned j = 1; for (CFGBlock::const_iterator BI = (*I)->begin(), BEnd = (*I)->end() ; @@ -5287,9 +5396,21 @@ static void print_construction_context(raw_ostream &OS, } static void print_elem(raw_ostream &OS, StmtPrinterHelper &Helper, + const CFGElement &E); + +void CFGElement::dumpToStream(llvm::raw_ostream &OS) const { + StmtPrinterHelper Helper(nullptr, {}); + print_elem(OS, Helper, *this); +} + +static void print_elem(raw_ostream &OS, StmtPrinterHelper &Helper, const CFGElement &E) { - if (Optional<CFGStmt> CS = E.getAs<CFGStmt>()) { - const Stmt *S = CS->getStmt(); + switch (E.getKind()) { + case CFGElement::Kind::Statement: + case CFGElement::Kind::CXXRecordTypedCall: + case CFGElement::Kind::Constructor: { + CFGStmt CS = E.castAs<CFGStmt>(); + const Stmt *S = CS.getStmt(); assert(S != nullptr && "Expecting non-null Stmt"); // special printing for statement-expressions. @@ -5341,70 +5462,96 @@ static void print_elem(raw_ostream &OS, StmtPrinterHelper &Helper, // Expressions need a newline. if (isa<Expr>(S)) OS << '\n'; - } else if (Optional<CFGInitializer> IE = E.getAs<CFGInitializer>()) { - print_initializer(OS, Helper, IE->getInitializer()); + + break; + } + + case CFGElement::Kind::Initializer: + print_initializer(OS, Helper, E.castAs<CFGInitializer>().getInitializer()); OS << '\n'; - } else if (Optional<CFGAutomaticObjDtor> DE = - E.getAs<CFGAutomaticObjDtor>()) { - const VarDecl *VD = DE->getVarDecl(); + break; + + case CFGElement::Kind::AutomaticObjectDtor: { + CFGAutomaticObjDtor DE = E.castAs<CFGAutomaticObjDtor>(); + const VarDecl *VD = DE.getVarDecl(); Helper.handleDecl(VD, OS); - ASTContext &ACtx = VD->getASTContext(); QualType T = VD->getType(); if (T->isReferenceType()) T = getReferenceInitTemporaryType(VD->getInit(), nullptr); - if (const ArrayType *AT = ACtx.getAsArrayType(T)) - T = ACtx.getBaseElementType(AT); - OS << ".~" << T->getAsCXXRecordDecl()->getName().str() << "()"; - OS << " (Implicit destructor)\n"; - } else if (Optional<CFGLifetimeEnds> DE = E.getAs<CFGLifetimeEnds>()) { - const VarDecl *VD = DE->getVarDecl(); - Helper.handleDecl(VD, OS); + OS << ".~"; + T.getUnqualifiedType().print(OS, PrintingPolicy(Helper.getLangOpts())); + OS << "() (Implicit destructor)\n"; + break; + } + case CFGElement::Kind::LifetimeEnds: + Helper.handleDecl(E.castAs<CFGLifetimeEnds>().getVarDecl(), OS); OS << " (Lifetime ends)\n"; - } else if (Optional<CFGLoopExit> LE = E.getAs<CFGLoopExit>()) { - const Stmt *LoopStmt = LE->getLoopStmt(); - OS << LoopStmt->getStmtClassName() << " (LoopExit)\n"; - } else if (Optional<CFGScopeBegin> SB = E.getAs<CFGScopeBegin>()) { + break; + + case CFGElement::Kind::LoopExit: + OS << E.castAs<CFGLoopExit>().getLoopStmt()->getStmtClassName() << " (LoopExit)\n"; + break; + + case CFGElement::Kind::ScopeBegin: OS << "CFGScopeBegin("; - if (const VarDecl *VD = SB->getVarDecl()) + if (const VarDecl *VD = E.castAs<CFGScopeBegin>().getVarDecl()) OS << VD->getQualifiedNameAsString(); OS << ")\n"; - } else if (Optional<CFGScopeEnd> SE = E.getAs<CFGScopeEnd>()) { + break; + + case CFGElement::Kind::ScopeEnd: OS << "CFGScopeEnd("; - if (const VarDecl *VD = SE->getVarDecl()) + if (const VarDecl *VD = E.castAs<CFGScopeEnd>().getVarDecl()) OS << VD->getQualifiedNameAsString(); OS << ")\n"; - } else if (Optional<CFGNewAllocator> NE = E.getAs<CFGNewAllocator>()) { + break; + + case CFGElement::Kind::NewAllocator: OS << "CFGNewAllocator("; - if (const CXXNewExpr *AllocExpr = NE->getAllocatorExpr()) + if (const CXXNewExpr *AllocExpr = E.castAs<CFGNewAllocator>().getAllocatorExpr()) AllocExpr->getType().print(OS, PrintingPolicy(Helper.getLangOpts())); OS << ")\n"; - } else if (Optional<CFGDeleteDtor> DE = E.getAs<CFGDeleteDtor>()) { - const CXXRecordDecl *RD = DE->getCXXRecordDecl(); + break; + + case CFGElement::Kind::DeleteDtor: { + CFGDeleteDtor DE = E.castAs<CFGDeleteDtor>(); + const CXXRecordDecl *RD = DE.getCXXRecordDecl(); if (!RD) return; CXXDeleteExpr *DelExpr = - const_cast<CXXDeleteExpr*>(DE->getDeleteExpr()); + const_cast<CXXDeleteExpr*>(DE.getDeleteExpr()); Helper.handledStmt(cast<Stmt>(DelExpr->getArgument()), OS); OS << "->~" << RD->getName().str() << "()"; OS << " (Implicit destructor)\n"; - } else if (Optional<CFGBaseDtor> BE = E.getAs<CFGBaseDtor>()) { - const CXXBaseSpecifier *BS = BE->getBaseSpecifier(); + break; + } + + case CFGElement::Kind::BaseDtor: { + const CXXBaseSpecifier *BS = E.castAs<CFGBaseDtor>().getBaseSpecifier(); OS << "~" << BS->getType()->getAsCXXRecordDecl()->getName() << "()"; OS << " (Base object destructor)\n"; - } else if (Optional<CFGMemberDtor> ME = E.getAs<CFGMemberDtor>()) { - const FieldDecl *FD = ME->getFieldDecl(); + break; + } + + case CFGElement::Kind::MemberDtor: { + const FieldDecl *FD = E.castAs<CFGMemberDtor>().getFieldDecl(); const Type *T = FD->getType()->getBaseElementTypeUnsafe(); OS << "this->" << FD->getName(); OS << ".~" << T->getAsCXXRecordDecl()->getName() << "()"; OS << " (Member object destructor)\n"; - } else if (Optional<CFGTemporaryDtor> TE = E.getAs<CFGTemporaryDtor>()) { - const CXXBindTemporaryExpr *BT = TE->getBindTemporaryExpr(); + break; + } + + case CFGElement::Kind::TemporaryDtor: { + const CXXBindTemporaryExpr *BT = E.castAs<CFGTemporaryDtor>().getBindTemporaryExpr(); OS << "~"; BT->getType().print(OS, PrintingPolicy(Helper.getLangOpts())); OS << "() (Temporary object destructor)\n"; + break; + } } } @@ -5615,6 +5762,10 @@ void CFG::print(raw_ostream &OS, const LangOptions &LO, bool ShowColors) const { OS.flush(); } +size_t CFGBlock::getIndexInCFG() const { + return llvm::find(*getParent(), this) - getParent()->begin(); +} + /// dump - A simply pretty printer of a CFGBlock that outputs to stderr. void CFGBlock::dump(const CFG* cfg, const LangOptions &LO, bool ShowColors) const { @@ -5652,6 +5803,71 @@ void CFGBlock::printTerminatorJson(raw_ostream &Out, const LangOptions &LO, Out << JsonFormat(TempOut.str(), AddQuotes); } +// Returns true if by simply looking at the block, we can be sure that it +// results in a sink during analysis. This is useful to know when the analysis +// was interrupted, and we try to figure out if it would sink eventually. +// There may be many more reasons why a sink would appear during analysis +// (eg. checkers may generate sinks arbitrarily), but here we only consider +// sinks that would be obvious by looking at the CFG. +static bool isImmediateSinkBlock(const CFGBlock *Blk) { + if (Blk->hasNoReturnElement()) + return true; + + // FIXME: Throw-expressions are currently generating sinks during analysis: + // they're not supported yet, and also often used for actually terminating + // the program. So we should treat them as sinks in this analysis as well, + // at least for now, but once we have better support for exceptions, + // we'd need to carefully handle the case when the throw is being + // immediately caught. + if (std::any_of(Blk->begin(), Blk->end(), [](const CFGElement &Elm) { + if (Optional<CFGStmt> StmtElm = Elm.getAs<CFGStmt>()) + if (isa<CXXThrowExpr>(StmtElm->getStmt())) + return true; + return false; + })) + return true; + + return false; +} + +bool CFGBlock::isInevitablySinking() const { + const CFG &Cfg = *getParent(); + + const CFGBlock *StartBlk = this; + if (isImmediateSinkBlock(StartBlk)) + return true; + + llvm::SmallVector<const CFGBlock *, 32> DFSWorkList; + llvm::SmallPtrSet<const CFGBlock *, 32> Visited; + + DFSWorkList.push_back(StartBlk); + while (!DFSWorkList.empty()) { + const CFGBlock *Blk = DFSWorkList.back(); + DFSWorkList.pop_back(); + Visited.insert(Blk); + + // If at least one path reaches the CFG exit, it means that control is + // returned to the caller. For now, say that we are not sure what + // happens next. If necessary, this can be improved to analyze + // the parent StackFrameContext's call site in a similar manner. + if (Blk == &Cfg.getExit()) + return false; + + for (const auto &Succ : Blk->succs()) { + if (const CFGBlock *SuccBlk = Succ.getReachableBlock()) { + if (!isImmediateSinkBlock(SuccBlk) && !Visited.count(SuccBlk)) { + // If the block has reachable child blocks that aren't no-return, + // add them to the worklist. + DFSWorkList.push_back(SuccBlk); + } + } + } + } + + // Nothing reached the exit. It can only mean one thing: there's no return. + return true; +} + const Expr *CFGBlock::getLastCondition() const { // If the terminator is a temporary dtor or a virtual base, etc, we can't // retrieve a meaningful condition, bail out. diff --git a/lib/Analysis/CallGraph.cpp b/lib/Analysis/CallGraph.cpp index 7eda80ea0505e..76be292dad8d2 100644 --- a/lib/Analysis/CallGraph.cpp +++ b/lib/Analysis/CallGraph.cpp @@ -79,6 +79,37 @@ public: VisitChildren(CE); } + void VisitLambdaExpr(LambdaExpr *LE) { + if (FunctionTemplateDecl *FTD = LE->getDependentCallOperator()) + for (FunctionDecl *FD : FTD->specializations()) + G->VisitFunctionDecl(FD); + else if (CXXMethodDecl *MD = LE->getCallOperator()) + G->VisitFunctionDecl(MD); + } + + void VisitCXXNewExpr(CXXNewExpr *E) { + if (FunctionDecl *FD = E->getOperatorNew()) + addCalledDecl(FD); + VisitChildren(E); + } + + void VisitCXXConstructExpr(CXXConstructExpr *E) { + CXXConstructorDecl *Ctor = E->getConstructor(); + if (FunctionDecl *Def = Ctor->getDefinition()) + addCalledDecl(Def); + VisitChildren(E); + } + + // Include the evaluation of the default argument. + void VisitCXXDefaultArgExpr(CXXDefaultArgExpr *E) { + Visit(E->getExpr()); + } + + // Include the evaluation of the default initializers in a class. + void VisitCXXDefaultInitExpr(CXXDefaultInitExpr *E) { + Visit(E->getExpr()); + } + // Adds may-call edges for the ObjC message sends. void VisitObjCMessageExpr(ObjCMessageExpr *ME) { if (ObjCInterfaceDecl *IDecl = ME->getReceiverInterface()) { @@ -143,13 +174,20 @@ bool CallGraph::includeInGraph(const Decl *D) { void CallGraph::addNodeForDecl(Decl* D, bool IsGlobal) { assert(D); - // Allocate a new node, mark it as root, and process it's calls. + // Allocate a new node, mark it as root, and process its calls. CallGraphNode *Node = getOrInsertNode(D); // Process all the calls by this function as well. CGBuilder builder(this, Node); if (Stmt *Body = D->getBody()) builder.Visit(Body); + + // Include C++ constructor member initializers. + if (auto constructor = dyn_cast<CXXConstructorDecl>(D)) { + for (CXXCtorInitializer *init : constructor->inits()) { + builder.Visit(init->getInit()); + } + } } CallGraphNode *CallGraph::getNode(const Decl *F) const { @@ -166,7 +204,7 @@ CallGraphNode *CallGraph::getOrInsertNode(Decl *F) { if (Node) return Node.get(); - Node = llvm::make_unique<CallGraphNode>(F); + Node = std::make_unique<CallGraphNode>(F); // Make Root node a parent of all functions to make sure all are reachable. if (F) Root->addCallee(Node.get()); diff --git a/lib/Analysis/CloneDetection.cpp b/lib/Analysis/CloneDetection.cpp index 74328e8ae67fd..30d104165cc54 100644 --- a/lib/Analysis/CloneDetection.cpp +++ b/lib/Analysis/CloneDetection.cpp @@ -153,9 +153,8 @@ void OnlyLargestCloneConstraint::constrain( bool FilenamePatternConstraint::isAutoGenerated( const CloneDetector::CloneGroup &Group) { - std::string Error; if (IgnoredFilesPattern.empty() || Group.empty() || - !IgnoredFilesRegex->isValid(Error)) + !IgnoredFilesRegex->isValid()) return false; for (const StmtSequence &S : Group) { diff --git a/lib/Analysis/CocoaConventions.cpp b/lib/Analysis/CocoaConventions.cpp index b2ef426dead28..571d72e1a8416 100644 --- a/lib/Analysis/CocoaConventions.cpp +++ b/lib/Analysis/CocoaConventions.cpp @@ -38,8 +38,8 @@ bool cocoa::isRefType(QualType RetTy, StringRef Prefix, return false; // Is the type void*? - const PointerType* PT = RetTy->getAs<PointerType>(); - if (!(PT->getPointeeType().getUnqualifiedType()->isVoidType())) + const PointerType* PT = RetTy->castAs<PointerType>(); + if (!PT || !PT->getPointeeType().getUnqualifiedType()->isVoidType()) return false; // Does the name start with the prefix? diff --git a/lib/Analysis/Consumed.cpp b/lib/Analysis/Consumed.cpp index eee36d9caf7f1..cde753e8ec576 100644 --- a/lib/Analysis/Consumed.cpp +++ b/lib/Analysis/Consumed.cpp @@ -644,10 +644,10 @@ bool ConsumedStmtVisitor::handleCall(const CallExpr *Call, const Expr *ObjArg, continue; // Adjust state on the caller side. - if (isRValueRef(ParamType)) - setStateForVarOrTmp(StateMap, PInfo, consumed::CS_Consumed); - else if (ReturnTypestateAttr *RT = Param->getAttr<ReturnTypestateAttr>()) + if (ReturnTypestateAttr *RT = Param->getAttr<ReturnTypestateAttr>()) setStateForVarOrTmp(StateMap, PInfo, mapReturnTypestateAttrState(RT)); + else if (isRValueRef(ParamType) || isConsumableType(ParamType)) + setStateForVarOrTmp(StateMap, PInfo, consumed::CS_Consumed); else if (isPointerOrRef(ParamType) && (!ParamType->getPointeeType().isConstQualified() || isSetOnReadPtrType(ParamType))) @@ -1026,7 +1026,7 @@ void ConsumedBlockInfo::addInfo( } else if (OwnedStateMap) Entry = std::move(OwnedStateMap); else - Entry = llvm::make_unique<ConsumedStateMap>(*StateMap); + Entry = std::make_unique<ConsumedStateMap>(*StateMap); } void ConsumedBlockInfo::addInfo(const CFGBlock *Block, @@ -1058,7 +1058,7 @@ ConsumedBlockInfo::getInfo(const CFGBlock *Block) { assert(Block && "Block pointer must not be NULL"); auto &Entry = StateMapsArray[Block->getBlockID()]; - return isBackEdgeTarget(Block) ? llvm::make_unique<ConsumedStateMap>(*Entry) + return isBackEdgeTarget(Block) ? std::make_unique<ConsumedStateMap>(*Entry) : std::move(Entry); } @@ -1317,7 +1317,7 @@ void ConsumedAnalyzer::run(AnalysisDeclContext &AC) { BlockInfo = ConsumedBlockInfo(CFGraph->getNumBlockIDs(), SortedGraph); - CurrStates = llvm::make_unique<ConsumedStateMap>(); + CurrStates = std::make_unique<ConsumedStateMap>(); ConsumedStmtVisitor Visitor(*this, CurrStates.get()); // Add all trackable parameters to the state map. diff --git a/lib/Analysis/PathDiagnostic.cpp b/lib/Analysis/PathDiagnostic.cpp new file mode 100644 index 0000000000000..53235ba076994 --- /dev/null +++ b/lib/Analysis/PathDiagnostic.cpp @@ -0,0 +1,1219 @@ +//===- PathDiagnostic.cpp - Path-Specific Diagnostic Handling -------------===// +// +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// +//===----------------------------------------------------------------------===// +// +// This file defines the PathDiagnostic-related interfaces. +// +//===----------------------------------------------------------------------===// + +#include "clang/Analysis/PathDiagnostic.h" +#include "clang/AST/Decl.h" +#include "clang/AST/DeclBase.h" +#include "clang/AST/DeclCXX.h" +#include "clang/AST/DeclObjC.h" +#include "clang/AST/DeclTemplate.h" +#include "clang/AST/Expr.h" +#include "clang/AST/ExprCXX.h" +#include "clang/AST/OperationKinds.h" +#include "clang/AST/ParentMap.h" +#include "clang/AST/Stmt.h" +#include "clang/AST/Type.h" +#include "clang/Analysis/AnalysisDeclContext.h" +#include "clang/Analysis/CFG.h" +#include "clang/Analysis/ProgramPoint.h" +#include "clang/Basic/FileManager.h" +#include "clang/Basic/LLVM.h" +#include "clang/Basic/SourceLocation.h" +#include "clang/Basic/SourceManager.h" +#include "llvm/ADT/ArrayRef.h" +#include "llvm/ADT/FoldingSet.h" +#include "llvm/ADT/None.h" +#include "llvm/ADT/Optional.h" +#include "llvm/ADT/STLExtras.h" +#include "llvm/ADT/SmallString.h" +#include "llvm/ADT/SmallVector.h" +#include "llvm/ADT/StringExtras.h" +#include "llvm/ADT/StringRef.h" +#include "llvm/Support/Casting.h" +#include "llvm/Support/ErrorHandling.h" +#include "llvm/Support/raw_ostream.h" +#include <cassert> +#include <cstring> +#include <memory> +#include <utility> +#include <vector> + +using namespace clang; +using namespace ento; + +static StringRef StripTrailingDots(StringRef s) { + for (StringRef::size_type i = s.size(); i != 0; --i) + if (s[i - 1] != '.') + return s.substr(0, i); + return {}; +} + +PathDiagnosticPiece::PathDiagnosticPiece(StringRef s, + Kind k, DisplayHint hint) + : str(StripTrailingDots(s)), kind(k), Hint(hint) {} + +PathDiagnosticPiece::PathDiagnosticPiece(Kind k, DisplayHint hint) + : kind(k), Hint(hint) {} + +PathDiagnosticPiece::~PathDiagnosticPiece() = default; + +PathDiagnosticEventPiece::~PathDiagnosticEventPiece() = default; + +PathDiagnosticCallPiece::~PathDiagnosticCallPiece() = default; + +PathDiagnosticControlFlowPiece::~PathDiagnosticControlFlowPiece() = default; + +PathDiagnosticMacroPiece::~PathDiagnosticMacroPiece() = default; + +PathDiagnosticNotePiece::~PathDiagnosticNotePiece() = default; + +PathDiagnosticPopUpPiece::~PathDiagnosticPopUpPiece() = default; + +void PathPieces::flattenTo(PathPieces &Primary, PathPieces &Current, + bool ShouldFlattenMacros) const { + for (auto &Piece : *this) { + switch (Piece->getKind()) { + case PathDiagnosticPiece::Call: { + auto &Call = cast<PathDiagnosticCallPiece>(*Piece); + if (auto CallEnter = Call.getCallEnterEvent()) + Current.push_back(std::move(CallEnter)); + Call.path.flattenTo(Primary, Primary, ShouldFlattenMacros); + if (auto callExit = Call.getCallExitEvent()) + Current.push_back(std::move(callExit)); + break; + } + case PathDiagnosticPiece::Macro: { + auto &Macro = cast<PathDiagnosticMacroPiece>(*Piece); + if (ShouldFlattenMacros) { + Macro.subPieces.flattenTo(Primary, Primary, ShouldFlattenMacros); + } else { + Current.push_back(Piece); + PathPieces NewPath; + Macro.subPieces.flattenTo(Primary, NewPath, ShouldFlattenMacros); + // FIXME: This probably shouldn't mutate the original path piece. + Macro.subPieces = NewPath; + } + break; + } + case PathDiagnosticPiece::Event: + case PathDiagnosticPiece::ControlFlow: + case PathDiagnosticPiece::Note: + case PathDiagnosticPiece::PopUp: + Current.push_back(Piece); + break; + } + } +} + +PathDiagnostic::~PathDiagnostic() = default; + +PathDiagnostic::PathDiagnostic( + StringRef CheckerName, const Decl *declWithIssue, StringRef bugtype, + StringRef verboseDesc, StringRef shortDesc, StringRef category, + PathDiagnosticLocation LocationToUnique, const Decl *DeclToUnique, + std::unique_ptr<FilesToLineNumsMap> ExecutedLines) + : CheckerName(CheckerName), DeclWithIssue(declWithIssue), + BugType(StripTrailingDots(bugtype)), + VerboseDesc(StripTrailingDots(verboseDesc)), + ShortDesc(StripTrailingDots(shortDesc)), + Category(StripTrailingDots(category)), UniqueingLoc(LocationToUnique), + UniqueingDecl(DeclToUnique), ExecutedLines(std::move(ExecutedLines)), + path(pathImpl) {} + +void PathDiagnosticConsumer::anchor() {} + +PathDiagnosticConsumer::~PathDiagnosticConsumer() { + // Delete the contents of the FoldingSet if it isn't empty already. + for (auto &Diag : Diags) + delete &Diag; +} + +void PathDiagnosticConsumer::HandlePathDiagnostic( + std::unique_ptr<PathDiagnostic> D) { + if (!D || D->path.empty()) + return; + + // We need to flatten the locations (convert Stmt* to locations) because + // the referenced statements may be freed by the time the diagnostics + // are emitted. + D->flattenLocations(); + + // If the PathDiagnosticConsumer does not support diagnostics that + // cross file boundaries, prune out such diagnostics now. + if (!supportsCrossFileDiagnostics()) { + // Verify that the entire path is from the same FileID. + FileID FID; + const SourceManager &SMgr = D->path.front()->getLocation().getManager(); + SmallVector<const PathPieces *, 5> WorkList; + WorkList.push_back(&D->path); + SmallString<128> buf; + llvm::raw_svector_ostream warning(buf); + warning << "warning: Path diagnostic report is not generated. Current " + << "output format does not support diagnostics that cross file " + << "boundaries. Refer to --analyzer-output for valid output " + << "formats\n"; + + while (!WorkList.empty()) { + const PathPieces &path = *WorkList.pop_back_val(); + + for (const auto &I : path) { + const PathDiagnosticPiece *piece = I.get(); + FullSourceLoc L = piece->getLocation().asLocation().getExpansionLoc(); + + if (FID.isInvalid()) { + FID = SMgr.getFileID(L); + } else if (SMgr.getFileID(L) != FID) { + llvm::errs() << warning.str(); + return; + } + + // Check the source ranges. + ArrayRef<SourceRange> Ranges = piece->getRanges(); + for (const auto &I : Ranges) { + SourceLocation L = SMgr.getExpansionLoc(I.getBegin()); + if (!L.isFileID() || SMgr.getFileID(L) != FID) { + llvm::errs() << warning.str(); + return; + } + L = SMgr.getExpansionLoc(I.getEnd()); + if (!L.isFileID() || SMgr.getFileID(L) != FID) { + llvm::errs() << warning.str(); + return; + } + } + + if (const auto *call = dyn_cast<PathDiagnosticCallPiece>(piece)) + WorkList.push_back(&call->path); + else if (const auto *macro = dyn_cast<PathDiagnosticMacroPiece>(piece)) + WorkList.push_back(¯o->subPieces); + } + } + + if (FID.isInvalid()) + return; // FIXME: Emit a warning? + } + + // Profile the node to see if we already have something matching it + llvm::FoldingSetNodeID profile; + D->Profile(profile); + void *InsertPos = nullptr; + + if (PathDiagnostic *orig = Diags.FindNodeOrInsertPos(profile, InsertPos)) { + // Keep the PathDiagnostic with the shorter path. + // Note, the enclosing routine is called in deterministic order, so the + // results will be consistent between runs (no reason to break ties if the + // size is the same). + const unsigned orig_size = orig->full_size(); + const unsigned new_size = D->full_size(); + if (orig_size <= new_size) + return; + + assert(orig != D.get()); + Diags.RemoveNode(orig); + delete orig; + } + + Diags.InsertNode(D.release()); +} + +static Optional<bool> comparePath(const PathPieces &X, const PathPieces &Y); + +static Optional<bool> +compareControlFlow(const PathDiagnosticControlFlowPiece &X, + const PathDiagnosticControlFlowPiece &Y) { + FullSourceLoc XSL = X.getStartLocation().asLocation(); + FullSourceLoc YSL = Y.getStartLocation().asLocation(); + if (XSL != YSL) + return XSL.isBeforeInTranslationUnitThan(YSL); + FullSourceLoc XEL = X.getEndLocation().asLocation(); + FullSourceLoc YEL = Y.getEndLocation().asLocation(); + if (XEL != YEL) + return XEL.isBeforeInTranslationUnitThan(YEL); + return None; +} + +static Optional<bool> compareMacro(const PathDiagnosticMacroPiece &X, + const PathDiagnosticMacroPiece &Y) { + return comparePath(X.subPieces, Y.subPieces); +} + +static Optional<bool> compareCall(const PathDiagnosticCallPiece &X, + const PathDiagnosticCallPiece &Y) { + FullSourceLoc X_CEL = X.callEnter.asLocation(); + FullSourceLoc Y_CEL = Y.callEnter.asLocation(); + if (X_CEL != Y_CEL) + return X_CEL.isBeforeInTranslationUnitThan(Y_CEL); + FullSourceLoc X_CEWL = X.callEnterWithin.asLocation(); + FullSourceLoc Y_CEWL = Y.callEnterWithin.asLocation(); + if (X_CEWL != Y_CEWL) + return X_CEWL.isBeforeInTranslationUnitThan(Y_CEWL); + FullSourceLoc X_CRL = X.callReturn.asLocation(); + FullSourceLoc Y_CRL = Y.callReturn.asLocation(); + if (X_CRL != Y_CRL) + return X_CRL.isBeforeInTranslationUnitThan(Y_CRL); + return comparePath(X.path, Y.path); +} + +static Optional<bool> comparePiece(const PathDiagnosticPiece &X, + const PathDiagnosticPiece &Y) { + if (X.getKind() != Y.getKind()) + return X.getKind() < Y.getKind(); + + FullSourceLoc XL = X.getLocation().asLocation(); + FullSourceLoc YL = Y.getLocation().asLocation(); + if (XL != YL) + return XL.isBeforeInTranslationUnitThan(YL); + + if (X.getString() != Y.getString()) + return X.getString() < Y.getString(); + + if (X.getRanges().size() != Y.getRanges().size()) + return X.getRanges().size() < Y.getRanges().size(); + + const SourceManager &SM = XL.getManager(); + + for (unsigned i = 0, n = X.getRanges().size(); i < n; ++i) { + SourceRange XR = X.getRanges()[i]; + SourceRange YR = Y.getRanges()[i]; + if (XR != YR) { + if (XR.getBegin() != YR.getBegin()) + return SM.isBeforeInTranslationUnit(XR.getBegin(), YR.getBegin()); + return SM.isBeforeInTranslationUnit(XR.getEnd(), YR.getEnd()); + } + } + + switch (X.getKind()) { + case PathDiagnosticPiece::ControlFlow: + return compareControlFlow(cast<PathDiagnosticControlFlowPiece>(X), + cast<PathDiagnosticControlFlowPiece>(Y)); + case PathDiagnosticPiece::Macro: + return compareMacro(cast<PathDiagnosticMacroPiece>(X), + cast<PathDiagnosticMacroPiece>(Y)); + case PathDiagnosticPiece::Call: + return compareCall(cast<PathDiagnosticCallPiece>(X), + cast<PathDiagnosticCallPiece>(Y)); + case PathDiagnosticPiece::Event: + case PathDiagnosticPiece::Note: + case PathDiagnosticPiece::PopUp: + return None; + } + llvm_unreachable("all cases handled"); +} + +static Optional<bool> comparePath(const PathPieces &X, const PathPieces &Y) { + if (X.size() != Y.size()) + return X.size() < Y.size(); + + PathPieces::const_iterator X_I = X.begin(), X_end = X.end(); + PathPieces::const_iterator Y_I = Y.begin(), Y_end = Y.end(); + + for ( ; X_I != X_end && Y_I != Y_end; ++X_I, ++Y_I) { + Optional<bool> b = comparePiece(**X_I, **Y_I); + if (b.hasValue()) + return b.getValue(); + } + + return None; +} + +static bool compareCrossTUSourceLocs(FullSourceLoc XL, FullSourceLoc YL) { + std::pair<FileID, unsigned> XOffs = XL.getDecomposedLoc(); + std::pair<FileID, unsigned> YOffs = YL.getDecomposedLoc(); + const SourceManager &SM = XL.getManager(); + std::pair<bool, bool> InSameTU = SM.isInTheSameTranslationUnit(XOffs, YOffs); + if (InSameTU.first) + return XL.isBeforeInTranslationUnitThan(YL); + const FileEntry *XFE = SM.getFileEntryForID(XL.getSpellingLoc().getFileID()); + const FileEntry *YFE = SM.getFileEntryForID(YL.getSpellingLoc().getFileID()); + if (!XFE || !YFE) + return XFE && !YFE; + int NameCmp = XFE->getName().compare(YFE->getName()); + if (NameCmp != 0) + return NameCmp == -1; + // Last resort: Compare raw file IDs that are possibly expansions. + return XL.getFileID() < YL.getFileID(); +} + +static bool compare(const PathDiagnostic &X, const PathDiagnostic &Y) { + FullSourceLoc XL = X.getLocation().asLocation(); + FullSourceLoc YL = Y.getLocation().asLocation(); + if (XL != YL) + return compareCrossTUSourceLocs(XL, YL); + if (X.getBugType() != Y.getBugType()) + return X.getBugType() < Y.getBugType(); + if (X.getCategory() != Y.getCategory()) + return X.getCategory() < Y.getCategory(); + if (X.getVerboseDescription() != Y.getVerboseDescription()) + return X.getVerboseDescription() < Y.getVerboseDescription(); + if (X.getShortDescription() != Y.getShortDescription()) + return X.getShortDescription() < Y.getShortDescription(); + if (X.getDeclWithIssue() != Y.getDeclWithIssue()) { + const Decl *XD = X.getDeclWithIssue(); + if (!XD) + return true; + const Decl *YD = Y.getDeclWithIssue(); + if (!YD) + return false; + SourceLocation XDL = XD->getLocation(); + SourceLocation YDL = YD->getLocation(); + if (XDL != YDL) { + const SourceManager &SM = XL.getManager(); + return compareCrossTUSourceLocs(FullSourceLoc(XDL, SM), + FullSourceLoc(YDL, SM)); + } + } + PathDiagnostic::meta_iterator XI = X.meta_begin(), XE = X.meta_end(); + PathDiagnostic::meta_iterator YI = Y.meta_begin(), YE = Y.meta_end(); + if (XE - XI != YE - YI) + return (XE - XI) < (YE - YI); + for ( ; XI != XE ; ++XI, ++YI) { + if (*XI != *YI) + return (*XI) < (*YI); + } + Optional<bool> b = comparePath(X.path, Y.path); + assert(b.hasValue()); + return b.getValue(); +} + +void PathDiagnosticConsumer::FlushDiagnostics( + PathDiagnosticConsumer::FilesMade *Files) { + if (flushed) + return; + + flushed = true; + + std::vector<const PathDiagnostic *> BatchDiags; + for (const auto &D : Diags) + BatchDiags.push_back(&D); + + // Sort the diagnostics so that they are always emitted in a deterministic + // order. + int (*Comp)(const PathDiagnostic *const *, const PathDiagnostic *const *) = + [](const PathDiagnostic *const *X, const PathDiagnostic *const *Y) { + assert(*X != *Y && "PathDiagnostics not uniqued!"); + if (compare(**X, **Y)) + return -1; + assert(compare(**Y, **X) && "Not a total order!"); + return 1; + }; + array_pod_sort(BatchDiags.begin(), BatchDiags.end(), Comp); + + FlushDiagnosticsImpl(BatchDiags, Files); + + // Delete the flushed diagnostics. + for (const auto D : BatchDiags) + delete D; + + // Clear out the FoldingSet. + Diags.clear(); +} + +PathDiagnosticConsumer::FilesMade::~FilesMade() { + for (PDFileEntry &Entry : Set) + Entry.~PDFileEntry(); +} + +void PathDiagnosticConsumer::FilesMade::addDiagnostic(const PathDiagnostic &PD, + StringRef ConsumerName, + StringRef FileName) { + llvm::FoldingSetNodeID NodeID; + NodeID.Add(PD); + void *InsertPos; + PDFileEntry *Entry = Set.FindNodeOrInsertPos(NodeID, InsertPos); + if (!Entry) { + Entry = Alloc.Allocate<PDFileEntry>(); + Entry = new (Entry) PDFileEntry(NodeID); + Set.InsertNode(Entry, InsertPos); + } + + // Allocate persistent storage for the file name. + char *FileName_cstr = (char*) Alloc.Allocate(FileName.size(), 1); + memcpy(FileName_cstr, FileName.data(), FileName.size()); + + Entry->files.push_back(std::make_pair(ConsumerName, + StringRef(FileName_cstr, + FileName.size()))); +} + +PathDiagnosticConsumer::PDFileEntry::ConsumerFiles * +PathDiagnosticConsumer::FilesMade::getFiles(const PathDiagnostic &PD) { + llvm::FoldingSetNodeID NodeID; + NodeID.Add(PD); + void *InsertPos; + PDFileEntry *Entry = Set.FindNodeOrInsertPos(NodeID, InsertPos); + if (!Entry) + return nullptr; + return &Entry->files; +} + +//===----------------------------------------------------------------------===// +// PathDiagnosticLocation methods. +//===----------------------------------------------------------------------===// + +SourceLocation PathDiagnosticLocation::getValidSourceLocation( + const Stmt *S, LocationOrAnalysisDeclContext LAC, bool UseEndOfStatement) { + SourceLocation L = UseEndOfStatement ? S->getEndLoc() : S->getBeginLoc(); + assert(!LAC.isNull() && + "A valid LocationContext or AnalysisDeclContext should be passed to " + "PathDiagnosticLocation upon creation."); + + // S might be a temporary statement that does not have a location in the + // source code, so find an enclosing statement and use its location. + if (!L.isValid()) { + AnalysisDeclContext *ADC; + if (LAC.is<const LocationContext*>()) + ADC = LAC.get<const LocationContext*>()->getAnalysisDeclContext(); + else + ADC = LAC.get<AnalysisDeclContext*>(); + + ParentMap &PM = ADC->getParentMap(); + + const Stmt *Parent = S; + do { + Parent = PM.getParent(Parent); + + // In rare cases, we have implicit top-level expressions, + // such as arguments for implicit member initializers. + // In this case, fall back to the start of the body (even if we were + // asked for the statement end location). + if (!Parent) { + const Stmt *Body = ADC->getBody(); + if (Body) + L = Body->getBeginLoc(); + else + L = ADC->getDecl()->getEndLoc(); + break; + } + + L = UseEndOfStatement ? Parent->getEndLoc() : Parent->getBeginLoc(); + } while (!L.isValid()); + } + + // FIXME: Ironically, this assert actually fails in some cases. + //assert(L.isValid()); + return L; +} + +static PathDiagnosticLocation +getLocationForCaller(const StackFrameContext *SFC, + const LocationContext *CallerCtx, + const SourceManager &SM) { + const CFGBlock &Block = *SFC->getCallSiteBlock(); + CFGElement Source = Block[SFC->getIndex()]; + + switch (Source.getKind()) { + case CFGElement::Statement: + case CFGElement::Constructor: + case CFGElement::CXXRecordTypedCall: + return PathDiagnosticLocation(Source.castAs<CFGStmt>().getStmt(), + SM, CallerCtx); + case CFGElement::Initializer: { + const CFGInitializer &Init = Source.castAs<CFGInitializer>(); + return PathDiagnosticLocation(Init.getInitializer()->getInit(), + SM, CallerCtx); + } + case CFGElement::AutomaticObjectDtor: { + const CFGAutomaticObjDtor &Dtor = Source.castAs<CFGAutomaticObjDtor>(); + return PathDiagnosticLocation::createEnd(Dtor.getTriggerStmt(), + SM, CallerCtx); + } + case CFGElement::DeleteDtor: { + const CFGDeleteDtor &Dtor = Source.castAs<CFGDeleteDtor>(); + return PathDiagnosticLocation(Dtor.getDeleteExpr(), SM, CallerCtx); + } + case CFGElement::BaseDtor: + case CFGElement::MemberDtor: { + const AnalysisDeclContext *CallerInfo = CallerCtx->getAnalysisDeclContext(); + if (const Stmt *CallerBody = CallerInfo->getBody()) + return PathDiagnosticLocation::createEnd(CallerBody, SM, CallerCtx); + return PathDiagnosticLocation::create(CallerInfo->getDecl(), SM); + } + case CFGElement::NewAllocator: { + const CFGNewAllocator &Alloc = Source.castAs<CFGNewAllocator>(); + return PathDiagnosticLocation(Alloc.getAllocatorExpr(), SM, CallerCtx); + } + case CFGElement::TemporaryDtor: { + // Temporary destructors are for temporaries. They die immediately at around + // the location of CXXBindTemporaryExpr. If they are lifetime-extended, + // they'd be dealt with via an AutomaticObjectDtor instead. + const auto &Dtor = Source.castAs<CFGTemporaryDtor>(); + return PathDiagnosticLocation::createEnd(Dtor.getBindTemporaryExpr(), SM, + CallerCtx); + } + case CFGElement::ScopeBegin: + case CFGElement::ScopeEnd: + llvm_unreachable("not yet implemented!"); + case CFGElement::LifetimeEnds: + case CFGElement::LoopExit: + llvm_unreachable("CFGElement kind should not be on callsite!"); + } + + llvm_unreachable("Unknown CFGElement kind"); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createBegin(const Decl *D, + const SourceManager &SM) { + return PathDiagnosticLocation(D->getBeginLoc(), SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createBegin(const Stmt *S, + const SourceManager &SM, + LocationOrAnalysisDeclContext LAC) { + return PathDiagnosticLocation(getValidSourceLocation(S, LAC), + SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createEnd(const Stmt *S, + const SourceManager &SM, + LocationOrAnalysisDeclContext LAC) { + if (const auto *CS = dyn_cast<CompoundStmt>(S)) + return createEndBrace(CS, SM); + return PathDiagnosticLocation(getValidSourceLocation(S, LAC, /*End=*/true), + SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createOperatorLoc(const BinaryOperator *BO, + const SourceManager &SM) { + return PathDiagnosticLocation(BO->getOperatorLoc(), SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createConditionalColonLoc( + const ConditionalOperator *CO, + const SourceManager &SM) { + return PathDiagnosticLocation(CO->getColonLoc(), SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createMemberLoc(const MemberExpr *ME, + const SourceManager &SM) { + + assert(ME->getMemberLoc().isValid() || ME->getBeginLoc().isValid()); + + // In some cases, getMemberLoc isn't valid -- in this case we'll return with + // some other related valid SourceLocation. + if (ME->getMemberLoc().isValid()) + return PathDiagnosticLocation(ME->getMemberLoc(), SM, SingleLocK); + + return PathDiagnosticLocation(ME->getBeginLoc(), SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createBeginBrace(const CompoundStmt *CS, + const SourceManager &SM) { + SourceLocation L = CS->getLBracLoc(); + return PathDiagnosticLocation(L, SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createEndBrace(const CompoundStmt *CS, + const SourceManager &SM) { + SourceLocation L = CS->getRBracLoc(); + return PathDiagnosticLocation(L, SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createDeclBegin(const LocationContext *LC, + const SourceManager &SM) { + // FIXME: Should handle CXXTryStmt if analyser starts supporting C++. + if (const auto *CS = dyn_cast_or_null<CompoundStmt>(LC->getDecl()->getBody())) + if (!CS->body_empty()) { + SourceLocation Loc = (*CS->body_begin())->getBeginLoc(); + return PathDiagnosticLocation(Loc, SM, SingleLocK); + } + + return PathDiagnosticLocation(); +} + +PathDiagnosticLocation +PathDiagnosticLocation::createDeclEnd(const LocationContext *LC, + const SourceManager &SM) { + SourceLocation L = LC->getDecl()->getBodyRBrace(); + return PathDiagnosticLocation(L, SM, SingleLocK); +} + +PathDiagnosticLocation +PathDiagnosticLocation::create(const ProgramPoint& P, + const SourceManager &SMng) { + const Stmt* S = nullptr; + if (Optional<BlockEdge> BE = P.getAs<BlockEdge>()) { + const CFGBlock *BSrc = BE->getSrc(); + if (BSrc->getTerminator().isVirtualBaseBranch()) { + // TODO: VirtualBaseBranches should also appear for destructors. + // In this case we should put the diagnostic at the end of decl. + return PathDiagnosticLocation::createBegin( + P.getLocationContext()->getDecl(), SMng); + + } else { + S = BSrc->getTerminatorCondition(); + if (!S) { + // If the BlockEdge has no terminator condition statement but its + // source is the entry of the CFG (e.g. a checker crated the branch at + // the beginning of a function), use the function's declaration instead. + assert(BSrc == &BSrc->getParent()->getEntry() && "CFGBlock has no " + "TerminatorCondition and is not the enrty block of the CFG"); + return PathDiagnosticLocation::createBegin( + P.getLocationContext()->getDecl(), SMng); + } + } + } else if (Optional<StmtPoint> SP = P.getAs<StmtPoint>()) { + S = SP->getStmt(); + if (P.getAs<PostStmtPurgeDeadSymbols>()) + return PathDiagnosticLocation::createEnd(S, SMng, P.getLocationContext()); + } else if (Optional<PostInitializer> PIP = P.getAs<PostInitializer>()) { + return PathDiagnosticLocation(PIP->getInitializer()->getSourceLocation(), + SMng); + } else if (Optional<PreImplicitCall> PIC = P.getAs<PreImplicitCall>()) { + return PathDiagnosticLocation(PIC->getLocation(), SMng); + } else if (Optional<PostImplicitCall> PIE = P.getAs<PostImplicitCall>()) { + return PathDiagnosticLocation(PIE->getLocation(), SMng); + } else if (Optional<CallEnter> CE = P.getAs<CallEnter>()) { + return getLocationForCaller(CE->getCalleeContext(), + CE->getLocationContext(), + SMng); + } else if (Optional<CallExitEnd> CEE = P.getAs<CallExitEnd>()) { + return getLocationForCaller(CEE->getCalleeContext(), + CEE->getLocationContext(), + SMng); + } else if (auto CEB = P.getAs<CallExitBegin>()) { + if (const ReturnStmt *RS = CEB->getReturnStmt()) + return PathDiagnosticLocation::createBegin(RS, SMng, + CEB->getLocationContext()); + return PathDiagnosticLocation( + CEB->getLocationContext()->getDecl()->getSourceRange().getEnd(), SMng); + } else if (Optional<BlockEntrance> BE = P.getAs<BlockEntrance>()) { + if (Optional<CFGElement> BlockFront = BE->getFirstElement()) { + if (auto StmtElt = BlockFront->getAs<CFGStmt>()) { + return PathDiagnosticLocation(StmtElt->getStmt()->getBeginLoc(), SMng); + } else if (auto NewAllocElt = BlockFront->getAs<CFGNewAllocator>()) { + return PathDiagnosticLocation( + NewAllocElt->getAllocatorExpr()->getBeginLoc(), SMng); + } + llvm_unreachable("Unexpected CFG element at front of block"); + } + + return PathDiagnosticLocation( + BE->getBlock()->getTerminatorStmt()->getBeginLoc(), SMng); + } else if (Optional<FunctionExitPoint> FE = P.getAs<FunctionExitPoint>()) { + return PathDiagnosticLocation(FE->getStmt(), SMng, + FE->getLocationContext()); + } else { + llvm_unreachable("Unexpected ProgramPoint"); + } + + return PathDiagnosticLocation(S, SMng, P.getLocationContext()); +} + +PathDiagnosticLocation PathDiagnosticLocation::createSingleLocation( + const PathDiagnosticLocation &PDL) { + FullSourceLoc L = PDL.asLocation(); + return PathDiagnosticLocation(L, L.getManager(), SingleLocK); +} + +FullSourceLoc + PathDiagnosticLocation::genLocation(SourceLocation L, + LocationOrAnalysisDeclContext LAC) const { + assert(isValid()); + // Note that we want a 'switch' here so that the compiler can warn us in + // case we add more cases. + switch (K) { + case SingleLocK: + case RangeK: + break; + case StmtK: + // Defensive checking. + if (!S) + break; + return FullSourceLoc(getValidSourceLocation(S, LAC), + const_cast<SourceManager&>(*SM)); + case DeclK: + // Defensive checking. + if (!D) + break; + return FullSourceLoc(D->getLocation(), const_cast<SourceManager&>(*SM)); + } + + return FullSourceLoc(L, const_cast<SourceManager&>(*SM)); +} + +PathDiagnosticRange + PathDiagnosticLocation::genRange(LocationOrAnalysisDeclContext LAC) const { + assert(isValid()); + // Note that we want a 'switch' here so that the compiler can warn us in + // case we add more cases. + switch (K) { + case SingleLocK: + return PathDiagnosticRange(SourceRange(Loc,Loc), true); + case RangeK: + break; + case StmtK: { + const Stmt *S = asStmt(); + switch (S->getStmtClass()) { + default: + break; + case Stmt::DeclStmtClass: { + const auto *DS = cast<DeclStmt>(S); + if (DS->isSingleDecl()) { + // Should always be the case, but we'll be defensive. + return SourceRange(DS->getBeginLoc(), + DS->getSingleDecl()->getLocation()); + } + break; + } + // FIXME: Provide better range information for different + // terminators. + case Stmt::IfStmtClass: + case Stmt::WhileStmtClass: + case Stmt::DoStmtClass: + case Stmt::ForStmtClass: + case Stmt::ChooseExprClass: + case Stmt::IndirectGotoStmtClass: + case Stmt::SwitchStmtClass: + case Stmt::BinaryConditionalOperatorClass: + case Stmt::ConditionalOperatorClass: + case Stmt::ObjCForCollectionStmtClass: { + SourceLocation L = getValidSourceLocation(S, LAC); + return SourceRange(L, L); + } + } + SourceRange R = S->getSourceRange(); + if (R.isValid()) + return R; + break; + } + case DeclK: + if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) + return MD->getSourceRange(); + if (const auto *FD = dyn_cast<FunctionDecl>(D)) { + if (Stmt *Body = FD->getBody()) + return Body->getSourceRange(); + } + else { + SourceLocation L = D->getLocation(); + return PathDiagnosticRange(SourceRange(L, L), true); + } + } + + return SourceRange(Loc, Loc); +} + +void PathDiagnosticLocation::flatten() { + if (K == StmtK) { + K = RangeK; + S = nullptr; + D = nullptr; + } + else if (K == DeclK) { + K = SingleLocK; + S = nullptr; + D = nullptr; + } +} + +//===----------------------------------------------------------------------===// +// Manipulation of PathDiagnosticCallPieces. +//===----------------------------------------------------------------------===// + +std::shared_ptr<PathDiagnosticCallPiece> +PathDiagnosticCallPiece::construct(const CallExitEnd &CE, + const SourceManager &SM) { + const Decl *caller = CE.getLocationContext()->getDecl(); + PathDiagnosticLocation pos = getLocationForCaller(CE.getCalleeContext(), + CE.getLocationContext(), + SM); + return std::shared_ptr<PathDiagnosticCallPiece>( + new PathDiagnosticCallPiece(caller, pos)); +} + +PathDiagnosticCallPiece * +PathDiagnosticCallPiece::construct(PathPieces &path, + const Decl *caller) { + std::shared_ptr<PathDiagnosticCallPiece> C( + new PathDiagnosticCallPiece(path, caller)); + path.clear(); + auto *R = C.get(); + path.push_front(std::move(C)); + return R; +} + +void PathDiagnosticCallPiece::setCallee(const CallEnter &CE, + const SourceManager &SM) { + const StackFrameContext *CalleeCtx = CE.getCalleeContext(); + Callee = CalleeCtx->getDecl(); + + callEnterWithin = PathDiagnosticLocation::createBegin(Callee, SM); + callEnter = getLocationForCaller(CalleeCtx, CE.getLocationContext(), SM); + + // Autosynthesized property accessors are special because we'd never + // pop back up to non-autosynthesized code until we leave them. + // This is not generally true for autosynthesized callees, which may call + // non-autosynthesized callbacks. + // Unless set here, the IsCalleeAnAutosynthesizedPropertyAccessor flag + // defaults to false. + if (const auto *MD = dyn_cast<ObjCMethodDecl>(Callee)) + IsCalleeAnAutosynthesizedPropertyAccessor = ( + MD->isPropertyAccessor() && + CalleeCtx->getAnalysisDeclContext()->isBodyAutosynthesized()); +} + +static void describeTemplateParameters(raw_ostream &Out, + const ArrayRef<TemplateArgument> TAList, + const LangOptions &LO, + StringRef Prefix = StringRef(), + StringRef Postfix = StringRef()); + +static void describeTemplateParameter(raw_ostream &Out, + const TemplateArgument &TArg, + const LangOptions &LO) { + + if (TArg.getKind() == TemplateArgument::ArgKind::Pack) { + describeTemplateParameters(Out, TArg.getPackAsArray(), LO); + } else { + TArg.print(PrintingPolicy(LO), Out); + } +} + +static void describeTemplateParameters(raw_ostream &Out, + const ArrayRef<TemplateArgument> TAList, + const LangOptions &LO, + StringRef Prefix, StringRef Postfix) { + if (TAList.empty()) + return; + + Out << Prefix; + for (int I = 0, Last = TAList.size() - 1; I != Last; ++I) { + describeTemplateParameter(Out, TAList[I], LO); + Out << ", "; + } + describeTemplateParameter(Out, TAList[TAList.size() - 1], LO); + Out << Postfix; +} + +static void describeClass(raw_ostream &Out, const CXXRecordDecl *D, + StringRef Prefix = StringRef()) { + if (!D->getIdentifier()) + return; + Out << Prefix << '\'' << *D; + if (const auto T = dyn_cast<ClassTemplateSpecializationDecl>(D)) + describeTemplateParameters(Out, T->getTemplateArgs().asArray(), + D->getASTContext().getLangOpts(), "<", ">"); + + Out << '\''; +} + +static bool describeCodeDecl(raw_ostream &Out, const Decl *D, + bool ExtendedDescription, + StringRef Prefix = StringRef()) { + if (!D) + return false; + + if (isa<BlockDecl>(D)) { + if (ExtendedDescription) + Out << Prefix << "anonymous block"; + return ExtendedDescription; + } + + if (const auto *MD = dyn_cast<CXXMethodDecl>(D)) { + Out << Prefix; + if (ExtendedDescription && !MD->isUserProvided()) { + if (MD->isExplicitlyDefaulted()) + Out << "defaulted "; + else + Out << "implicit "; + } + + if (const auto *CD = dyn_cast<CXXConstructorDecl>(MD)) { + if (CD->isDefaultConstructor()) + Out << "default "; + else if (CD->isCopyConstructor()) + Out << "copy "; + else if (CD->isMoveConstructor()) + Out << "move "; + + Out << "constructor"; + describeClass(Out, MD->getParent(), " for "); + } else if (isa<CXXDestructorDecl>(MD)) { + if (!MD->isUserProvided()) { + Out << "destructor"; + describeClass(Out, MD->getParent(), " for "); + } else { + // Use ~Foo for explicitly-written destructors. + Out << "'" << *MD << "'"; + } + } else if (MD->isCopyAssignmentOperator()) { + Out << "copy assignment operator"; + describeClass(Out, MD->getParent(), " for "); + } else if (MD->isMoveAssignmentOperator()) { + Out << "move assignment operator"; + describeClass(Out, MD->getParent(), " for "); + } else { + if (MD->getParent()->getIdentifier()) + Out << "'" << *MD->getParent() << "::" << *MD << "'"; + else + Out << "'" << *MD << "'"; + } + + return true; + } + + Out << Prefix << '\'' << cast<NamedDecl>(*D); + + // Adding template parameters. + if (const auto FD = dyn_cast<FunctionDecl>(D)) + if (const TemplateArgumentList *TAList = + FD->getTemplateSpecializationArgs()) + describeTemplateParameters(Out, TAList->asArray(), + FD->getASTContext().getLangOpts(), "<", ">"); + + Out << '\''; + return true; +} + +std::shared_ptr<PathDiagnosticEventPiece> +PathDiagnosticCallPiece::getCallEnterEvent() const { + // We do not produce call enters and call exits for autosynthesized property + // accessors. We do generally produce them for other functions coming from + // the body farm because they may call callbacks that bring us back into + // visible code. + if (!Callee || IsCalleeAnAutosynthesizedPropertyAccessor) + return nullptr; + + SmallString<256> buf; + llvm::raw_svector_ostream Out(buf); + + Out << "Calling "; + describeCodeDecl(Out, Callee, /*ExtendedDescription=*/true); + + assert(callEnter.asLocation().isValid()); + return std::make_shared<PathDiagnosticEventPiece>(callEnter, Out.str()); +} + +std::shared_ptr<PathDiagnosticEventPiece> +PathDiagnosticCallPiece::getCallEnterWithinCallerEvent() const { + if (!callEnterWithin.asLocation().isValid()) + return nullptr; + if (Callee->isImplicit() || !Callee->hasBody()) + return nullptr; + if (const auto *MD = dyn_cast<CXXMethodDecl>(Callee)) + if (MD->isDefaulted()) + return nullptr; + + SmallString<256> buf; + llvm::raw_svector_ostream Out(buf); + + Out << "Entered call"; + describeCodeDecl(Out, Caller, /*ExtendedDescription=*/false, " from "); + + return std::make_shared<PathDiagnosticEventPiece>(callEnterWithin, Out.str()); +} + +std::shared_ptr<PathDiagnosticEventPiece> +PathDiagnosticCallPiece::getCallExitEvent() const { + // We do not produce call enters and call exits for autosynthesized property + // accessors. We do generally produce them for other functions coming from + // the body farm because they may call callbacks that bring us back into + // visible code. + if (NoExit || IsCalleeAnAutosynthesizedPropertyAccessor) + return nullptr; + + SmallString<256> buf; + llvm::raw_svector_ostream Out(buf); + + if (!CallStackMessage.empty()) { + Out << CallStackMessage; + } else { + bool DidDescribe = describeCodeDecl(Out, Callee, + /*ExtendedDescription=*/false, + "Returning from "); + if (!DidDescribe) + Out << "Returning to caller"; + } + + assert(callReturn.asLocation().isValid()); + return std::make_shared<PathDiagnosticEventPiece>(callReturn, Out.str()); +} + +static void compute_path_size(const PathPieces &pieces, unsigned &size) { + for (const auto &I : pieces) { + const PathDiagnosticPiece *piece = I.get(); + if (const auto *cp = dyn_cast<PathDiagnosticCallPiece>(piece)) + compute_path_size(cp->path, size); + else + ++size; + } +} + +unsigned PathDiagnostic::full_size() { + unsigned size = 0; + compute_path_size(path, size); + return size; +} + +//===----------------------------------------------------------------------===// +// FoldingSet profiling methods. +//===----------------------------------------------------------------------===// + +void PathDiagnosticLocation::Profile(llvm::FoldingSetNodeID &ID) const { + ID.AddInteger(Range.getBegin().getRawEncoding()); + ID.AddInteger(Range.getEnd().getRawEncoding()); + ID.AddInteger(Loc.getRawEncoding()); +} + +void PathDiagnosticPiece::Profile(llvm::FoldingSetNodeID &ID) const { + ID.AddInteger((unsigned) getKind()); + ID.AddString(str); + // FIXME: Add profiling support for code hints. + ID.AddInteger((unsigned) getDisplayHint()); + ArrayRef<SourceRange> Ranges = getRanges(); + for (const auto &I : Ranges) { + ID.AddInteger(I.getBegin().getRawEncoding()); + ID.AddInteger(I.getEnd().getRawEncoding()); + } +} + +void PathDiagnosticCallPiece::Profile(llvm::FoldingSetNodeID &ID) const { + PathDiagnosticPiece::Profile(ID); + for (const auto &I : path) + ID.Add(*I); +} + +void PathDiagnosticSpotPiece::Profile(llvm::FoldingSetNodeID &ID) const { + PathDiagnosticPiece::Profile(ID); + ID.Add(Pos); +} + +void PathDiagnosticControlFlowPiece::Profile(llvm::FoldingSetNodeID &ID) const { + PathDiagnosticPiece::Profile(ID); + for (const auto &I : *this) + ID.Add(I); +} + +void PathDiagnosticMacroPiece::Profile(llvm::FoldingSetNodeID &ID) const { + PathDiagnosticSpotPiece::Profile(ID); + for (const auto &I : subPieces) + ID.Add(*I); +} + +void PathDiagnosticNotePiece::Profile(llvm::FoldingSetNodeID &ID) const { + PathDiagnosticSpotPiece::Profile(ID); +} + +void PathDiagnosticPopUpPiece::Profile(llvm::FoldingSetNodeID &ID) const { + PathDiagnosticSpotPiece::Profile(ID); +} + +void PathDiagnostic::Profile(llvm::FoldingSetNodeID &ID) const { + ID.Add(getLocation()); + ID.AddString(BugType); + ID.AddString(VerboseDesc); + ID.AddString(Category); +} + +void PathDiagnostic::FullProfile(llvm::FoldingSetNodeID &ID) const { + Profile(ID); + for (const auto &I : path) + ID.Add(*I); + for (meta_iterator I = meta_begin(), E = meta_end(); I != E; ++I) + ID.AddString(*I); +} + +LLVM_DUMP_METHOD void PathPieces::dump() const { + unsigned index = 0; + for (PathPieces::const_iterator I = begin(), E = end(); I != E; ++I) { + llvm::errs() << "[" << index++ << "] "; + (*I)->dump(); + llvm::errs() << "\n"; + } +} + +LLVM_DUMP_METHOD void PathDiagnosticCallPiece::dump() const { + llvm::errs() << "CALL\n--------------\n"; + + if (const Stmt *SLoc = getLocation().getStmtOrNull()) + SLoc->dump(); + else if (const auto *ND = dyn_cast_or_null<NamedDecl>(getCallee())) + llvm::errs() << *ND << "\n"; + else + getLocation().dump(); +} + +LLVM_DUMP_METHOD void PathDiagnosticEventPiece::dump() const { + llvm::errs() << "EVENT\n--------------\n"; + llvm::errs() << getString() << "\n"; + llvm::errs() << " ---- at ----\n"; + getLocation().dump(); +} + +LLVM_DUMP_METHOD void PathDiagnosticControlFlowPiece::dump() const { + llvm::errs() << "CONTROL\n--------------\n"; + getStartLocation().dump(); + llvm::errs() << " ---- to ----\n"; + getEndLocation().dump(); +} + +LLVM_DUMP_METHOD void PathDiagnosticMacroPiece::dump() const { + llvm::errs() << "MACRO\n--------------\n"; + // FIXME: Print which macro is being invoked. +} + +LLVM_DUMP_METHOD void PathDiagnosticNotePiece::dump() const { + llvm::errs() << "NOTE\n--------------\n"; + llvm::errs() << getString() << "\n"; + llvm::errs() << " ---- at ----\n"; + getLocation().dump(); +} + +LLVM_DUMP_METHOD void PathDiagnosticPopUpPiece::dump() const { + llvm::errs() << "POP-UP\n--------------\n"; + llvm::errs() << getString() << "\n"; + llvm::errs() << " ---- at ----\n"; + getLocation().dump(); +} + +LLVM_DUMP_METHOD void PathDiagnosticLocation::dump() const { + if (!isValid()) { + llvm::errs() << "<INVALID>\n"; + return; + } + + switch (K) { + case RangeK: + // FIXME: actually print the range. + llvm::errs() << "<range>\n"; + break; + case SingleLocK: + asLocation().dump(); + llvm::errs() << "\n"; + break; + case StmtK: + if (S) + S->dump(); + else + llvm::errs() << "<NULL STMT>\n"; + break; + case DeclK: + if (const auto *ND = dyn_cast_or_null<NamedDecl>(D)) + llvm::errs() << *ND << "\n"; + else if (isa<BlockDecl>(D)) + // FIXME: Make this nicer. + llvm::errs() << "<block>\n"; + else if (D) + llvm::errs() << "<unknown decl>\n"; + else + llvm::errs() << "<NULL DECL>\n"; + break; + } +} diff --git a/lib/Analysis/ProgramPoint.cpp b/lib/Analysis/ProgramPoint.cpp index 97e90965d007a..0783fbed53155 100644 --- a/lib/Analysis/ProgramPoint.cpp +++ b/lib/Analysis/ProgramPoint.cpp @@ -188,7 +188,11 @@ void ProgramPoint::printJson(llvm::raw_ostream &Out, const char *NL) const { Out << "Statement\", \"stmt_kind\": \"" << S->getStmtClassName() << "\", \"stmt_id\": " << S->getID(Context) - << ", \"pointer\": \"" << (const void *)S << "\", \"pretty\": "; + << ", \"pointer\": \"" << (const void *)S << "\", "; + if (const auto *CS = dyn_cast<CastExpr>(S)) + Out << "\"cast_kind\": \"" << CS->getCastKindName() << "\", "; + + Out << "\"pretty\": "; S->printJson(Out, nullptr, PP, AddQuotes); diff --git a/lib/Analysis/ReachableCode.cpp b/lib/Analysis/ReachableCode.cpp index 2fea88ea2eff4..1dab8e309f599 100644 --- a/lib/Analysis/ReachableCode.cpp +++ b/lib/Analysis/ReachableCode.cpp @@ -247,7 +247,7 @@ static bool isConfigurationValue(const Stmt *S, } case Stmt::UnaryOperatorClass: { const UnaryOperator *UO = cast<UnaryOperator>(S); - if (UO->getOpcode() != UO_LNot) + if (UO->getOpcode() != UO_LNot && UO->getOpcode() != UO_Minus) return false; bool SilenceableCondValNotSet = SilenceableCondVal && SilenceableCondVal->getBegin().isInvalid(); diff --git a/lib/Analysis/RetainSummaryManager.cpp b/lib/Analysis/RetainSummaryManager.cpp index 132053fd2c244..6f46917b2dfc6 100644 --- a/lib/Analysis/RetainSummaryManager.cpp +++ b/lib/Analysis/RetainSummaryManager.cpp @@ -504,7 +504,7 @@ RetainSummaryManager::generateSummary(const FunctionDecl *FD, FName = FName.substr(FName.find_first_not_of('_')); // Inspect the result type. Strip away any typedefs. - const auto *FT = FD->getType()->getAs<FunctionType>(); + const auto *FT = FD->getType()->castAs<FunctionType>(); QualType RetTy = FT->getReturnType(); if (TrackOSObjects) diff --git a/lib/Analysis/ThreadSafety.cpp b/lib/Analysis/ThreadSafety.cpp index c7b4c4455664a..c60954374ce32 100644 --- a/lib/Analysis/ThreadSafety.cpp +++ b/lib/Analysis/ThreadSafety.cpp @@ -882,7 +882,7 @@ public: StringRef DiagKind) const override { FSet.removeLock(FactMan, Cp); if (!Cp.negative()) { - FSet.addLock(FactMan, llvm::make_unique<LockableFactEntry>( + FSet.addLock(FactMan, std::make_unique<LockableFactEntry>( !Cp, LK_Exclusive, UnlockLoc)); } } @@ -987,7 +987,7 @@ private: } else { FSet.removeLock(FactMan, !Cp); FSet.addLock(FactMan, - llvm::make_unique<LockableFactEntry>(Cp, kind, loc)); + std::make_unique<LockableFactEntry>(Cp, kind, loc)); } } @@ -996,7 +996,7 @@ private: StringRef DiagKind) const { if (FSet.findLock(FactMan, Cp)) { FSet.removeLock(FactMan, Cp); - FSet.addLock(FactMan, llvm::make_unique<LockableFactEntry>( + FSet.addLock(FactMan, std::make_unique<LockableFactEntry>( !Cp, LK_Exclusive, loc)); } else if (Handler) { Handler->handleUnmatchedUnlock(DiagKind, Cp.toString(), loc); @@ -1551,11 +1551,11 @@ void ThreadSafetyAnalyzer::getEdgeLockset(FactSet& Result, // Add and remove locks. SourceLocation Loc = Exp->getExprLoc(); for (const auto &ExclusiveLockToAdd : ExclusiveLocksToAdd) - addLock(Result, llvm::make_unique<LockableFactEntry>(ExclusiveLockToAdd, + addLock(Result, std::make_unique<LockableFactEntry>(ExclusiveLockToAdd, LK_Exclusive, Loc), CapDiagKind); for (const auto &SharedLockToAdd : SharedLocksToAdd) - addLock(Result, llvm::make_unique<LockableFactEntry>(SharedLockToAdd, + addLock(Result, std::make_unique<LockableFactEntry>(SharedLockToAdd, LK_Shared, Loc), CapDiagKind); } @@ -1840,7 +1840,7 @@ void BuildLockset::handleCall(const Expr *Exp, const NamedDecl *D, Analyzer->getMutexIDs(AssertLocks, A, Exp, D, VD); for (const auto &AssertLock : AssertLocks) Analyzer->addLock(FSet, - llvm::make_unique<LockableFactEntry>( + std::make_unique<LockableFactEntry>( AssertLock, LK_Exclusive, Loc, false, true), ClassifyDiagnostic(A)); break; @@ -1852,7 +1852,7 @@ void BuildLockset::handleCall(const Expr *Exp, const NamedDecl *D, Analyzer->getMutexIDs(AssertLocks, A, Exp, D, VD); for (const auto &AssertLock : AssertLocks) Analyzer->addLock(FSet, - llvm::make_unique<LockableFactEntry>( + std::make_unique<LockableFactEntry>( AssertLock, LK_Shared, Loc, false, true), ClassifyDiagnostic(A)); break; @@ -1864,7 +1864,7 @@ void BuildLockset::handleCall(const Expr *Exp, const NamedDecl *D, Analyzer->getMutexIDs(AssertLocks, A, Exp, D, VD); for (const auto &AssertLock : AssertLocks) Analyzer->addLock(FSet, - llvm::make_unique<LockableFactEntry>( + std::make_unique<LockableFactEntry>( AssertLock, A->isShared() ? LK_Shared : LK_Exclusive, Loc, false, true), @@ -1928,11 +1928,11 @@ void BuildLockset::handleCall(const Expr *Exp, const NamedDecl *D, // Add locks. for (const auto &M : ExclusiveLocksToAdd) - Analyzer->addLock(FSet, llvm::make_unique<LockableFactEntry>( + Analyzer->addLock(FSet, std::make_unique<LockableFactEntry>( M, LK_Exclusive, Loc, isScopedVar), CapDiagKind); for (const auto &M : SharedLocksToAdd) - Analyzer->addLock(FSet, llvm::make_unique<LockableFactEntry>( + Analyzer->addLock(FSet, std::make_unique<LockableFactEntry>( M, LK_Shared, Loc, isScopedVar), CapDiagKind); @@ -1944,7 +1944,7 @@ void BuildLockset::handleCall(const Expr *Exp, const NamedDecl *D, // FIXME: does this store a pointer to DRE? CapabilityExpr Scp = Analyzer->SxBuilder.translateAttrExpr(&DRE, nullptr); - auto ScopedEntry = llvm::make_unique<ScopedLockableFactEntry>(Scp, MLoc); + auto ScopedEntry = std::make_unique<ScopedLockableFactEntry>(Scp, MLoc); for (const auto &M : ExclusiveLocksToAdd) ScopedEntry->addExclusiveLock(M); for (const auto &M : ScopedExclusiveReqs) @@ -2349,12 +2349,12 @@ void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) { // FIXME -- Loc can be wrong here. for (const auto &Mu : ExclusiveLocksToAdd) { - auto Entry = llvm::make_unique<LockableFactEntry>(Mu, LK_Exclusive, Loc); + auto Entry = std::make_unique<LockableFactEntry>(Mu, LK_Exclusive, Loc); Entry->setDeclared(true); addLock(InitialLockset, std::move(Entry), CapDiagKind, true); } for (const auto &Mu : SharedLocksToAdd) { - auto Entry = llvm::make_unique<LockableFactEntry>(Mu, LK_Shared, Loc); + auto Entry = std::make_unique<LockableFactEntry>(Mu, LK_Shared, Loc); Entry->setDeclared(true); addLock(InitialLockset, std::move(Entry), CapDiagKind, true); } @@ -2523,10 +2523,10 @@ void ThreadSafetyAnalyzer::runAnalysis(AnalysisDeclContext &AC) { // issue the appropriate warning. // FIXME: the location here is not quite right. for (const auto &Lock : ExclusiveLocksAcquired) - ExpectedExitSet.addLock(FactMan, llvm::make_unique<LockableFactEntry>( + ExpectedExitSet.addLock(FactMan, std::make_unique<LockableFactEntry>( Lock, LK_Exclusive, D->getLocation())); for (const auto &Lock : SharedLocksAcquired) - ExpectedExitSet.addLock(FactMan, llvm::make_unique<LockableFactEntry>( + ExpectedExitSet.addLock(FactMan, std::make_unique<LockableFactEntry>( Lock, LK_Shared, D->getLocation())); for (const auto &Lock : LocksReleased) ExpectedExitSet.removeLock(FactMan, Lock); diff --git a/lib/Analysis/plugins/SampleAnalyzer/MainCallChecker.cpp b/lib/Analysis/plugins/SampleAnalyzer/MainCallChecker.cpp index 8bd4085108e9a..fd210d733fd0a 100644 --- a/lib/Analysis/plugins/SampleAnalyzer/MainCallChecker.cpp +++ b/lib/Analysis/plugins/SampleAnalyzer/MainCallChecker.cpp @@ -36,8 +36,8 @@ void MainCallChecker::checkPreStmt(const CallExpr *CE, if (!BT) BT.reset(new BugType(this, "call to main", "example analyzer plugin")); - std::unique_ptr<BugReport> report = - llvm::make_unique<BugReport>(*BT, BT->getName(), N); + auto report = + std::make_unique<PathSensitiveBugReport>(*BT, BT->getDescription(), N); report->addRange(Callee->getSourceRange()); C.emitReport(std::move(report)); } |