From 1d5ae1026e831016fc29fd927877c86af904481f Mon Sep 17 00:00:00 2001 From: Dimitry Andric Date: Wed, 23 Oct 2019 17:51:42 +0000 Subject: Vendor import of stripped llvm trunk r375505, the last commit before the upstream Subversion repository was made read-only, and the LLVM project migrated to GitHub: https://llvm.org/svn/llvm-project/llvm/trunk@375505 --- lib/CodeGen/StackProtector.cpp | 67 +++++++++++++++++++++++++++++++++++++++--- 1 file changed, 63 insertions(+), 4 deletions(-) (limited to 'lib/CodeGen/StackProtector.cpp') diff --git a/lib/CodeGen/StackProtector.cpp b/lib/CodeGen/StackProtector.cpp index 809960c7fdf9..5683d1db473c 100644 --- a/lib/CodeGen/StackProtector.cpp +++ b/lib/CodeGen/StackProtector.cpp @@ -17,7 +17,6 @@ #include "llvm/ADT/SmallPtrSet.h" #include "llvm/ADT/Statistic.h" #include "llvm/Analysis/BranchProbabilityInfo.h" -#include "llvm/Analysis/CaptureTracking.h" #include "llvm/Analysis/EHPersonalities.h" #include "llvm/Analysis/OptimizationRemarkEmitter.h" #include "llvm/CodeGen/Passes.h" @@ -157,6 +156,68 @@ bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge, return NeedsProtector; } +bool StackProtector::HasAddressTaken(const Instruction *AI) { + for (const User *U : AI->users()) { + const auto *I = cast(U); + switch (I->getOpcode()) { + case Instruction::Store: + if (AI == cast(I)->getValueOperand()) + return true; + break; + case Instruction::AtomicCmpXchg: + // cmpxchg conceptually includes both a load and store from the same + // location. So, like store, the value being stored is what matters. + if (AI == cast(I)->getNewValOperand()) + return true; + break; + case Instruction::PtrToInt: + if (AI == cast(I)->getOperand(0)) + return true; + break; + case Instruction::Call: { + // Ignore intrinsics that do not become real instructions. + // TODO: Narrow this to intrinsics that have store-like effects. + const auto *CI = cast(I); + if (!isa(CI) && !CI->isLifetimeStartOrEnd()) + return true; + break; + } + case Instruction::Invoke: + return true; + case Instruction::BitCast: + case Instruction::GetElementPtr: + case Instruction::Select: + case Instruction::AddrSpaceCast: + if (HasAddressTaken(I)) + return true; + break; + case Instruction::PHI: { + // Keep track of what PHI nodes we have already visited to ensure + // they are only visited once. + const auto *PN = cast(I); + if (VisitedPHIs.insert(PN).second) + if (HasAddressTaken(PN)) + return true; + break; + } + case Instruction::Load: + case Instruction::AtomicRMW: + case Instruction::Ret: + // These instructions take an address operand, but have load-like or + // other innocuous behavior that should not trigger a stack protector. + // atomicrmw conceptually has both load and store semantics, but the + // value being stored must be integer; so if a pointer is being stored, + // we'll catch it in the PtrToInt case above. + break; + default: + // Conservatively return true for any instruction that takes an address + // operand, but is not handled above. + return true; + } + } + return false; +} + /// Search for the first call to the llvm.stackprotector intrinsic and return it /// if present. static const CallInst *findStackProtectorIntrinsic(Function &F) { @@ -264,9 +325,7 @@ bool StackProtector::RequiresStackProtector() { continue; } - if (Strong && PointerMayBeCaptured(AI, - /* ReturnCaptures */ false, - /* StoreCaptures */ true)) { + if (Strong && HasAddressTaken(AI)) { ++NumAddrTaken; Layout.insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf)); ORE.emit([&]() { -- cgit v1.2.3