summaryrefslogtreecommitdiff
path: root/lib/CodeGen/SafeStack.cpp
diff options
context:
space:
mode:
authorDimitry Andric <dim@FreeBSD.org>2018-07-28 10:51:19 +0000
committerDimitry Andric <dim@FreeBSD.org>2018-07-28 10:51:19 +0000
commiteb11fae6d08f479c0799db45860a98af528fa6e7 (patch)
tree44d492a50c8c1a7eb8e2d17ea3360ec4d066f042 /lib/CodeGen/SafeStack.cpp
parentb8a2042aa938069e862750553db0e4d82d25822c (diff)
Notes
Diffstat (limited to 'lib/CodeGen/SafeStack.cpp')
-rw-r--r--lib/CodeGen/SafeStack.cpp118
1 files changed, 85 insertions, 33 deletions
diff --git a/lib/CodeGen/SafeStack.cpp b/lib/CodeGen/SafeStack.cpp
index 51233be521be..cbbbf7c385aa 100644
--- a/lib/CodeGen/SafeStack.cpp
+++ b/lib/CodeGen/SafeStack.cpp
@@ -24,10 +24,12 @@
#include "llvm/ADT/Statistic.h"
#include "llvm/Analysis/AssumptionCache.h"
#include "llvm/Analysis/BranchProbabilityInfo.h"
+#include "llvm/Analysis/InlineCost.h"
#include "llvm/Analysis/LoopInfo.h"
#include "llvm/Analysis/ScalarEvolution.h"
#include "llvm/Analysis/ScalarEvolutionExpressions.h"
#include "llvm/Analysis/TargetLibraryInfo.h"
+#include "llvm/Transforms/Utils/Local.h"
#include "llvm/CodeGen/TargetLowering.h"
#include "llvm/CodeGen/TargetPassConfig.h"
#include "llvm/CodeGen/TargetSubtargetInfo.h"
@@ -61,7 +63,7 @@
#include "llvm/Support/raw_ostream.h"
#include "llvm/Target/TargetMachine.h"
#include "llvm/Transforms/Utils/BasicBlockUtils.h"
-#include "llvm/Transforms/Utils/Local.h"
+#include "llvm/Transforms/Utils/Cloning.h"
#include <algorithm>
#include <cassert>
#include <cstdint>
@@ -88,6 +90,13 @@ STATISTIC(NumUnsafeStackRestorePoints, "Number of setjmps and landingpads");
} // namespace llvm
+/// Use __safestack_pointer_address even if the platform has a faster way of
+/// access safe stack pointer.
+static cl::opt<bool>
+ SafeStackUsePointerAddress("safestack-use-pointer-address",
+ cl::init(false), cl::Hidden);
+
+
namespace {
/// Rewrite an SCEV expression for a memory access address to an expression that
@@ -134,14 +143,14 @@ class SafeStack {
/// might expect to appear on the stack on most common targets.
enum { StackAlignment = 16 };
- /// \brief Return the value of the stack canary.
+ /// Return the value of the stack canary.
Value *getStackGuard(IRBuilder<> &IRB, Function &F);
- /// \brief Load stack guard from the frame and check if it has changed.
+ /// Load stack guard from the frame and check if it has changed.
void checkStackGuard(IRBuilder<> &IRB, Function &F, ReturnInst &RI,
AllocaInst *StackGuardSlot, Value *StackGuard);
- /// \brief Find all static allocas, dynamic allocas, return instructions and
+ /// Find all static allocas, dynamic allocas, return instructions and
/// stack restore points (exception unwind blocks and setjmp calls) in the
/// given function and append them to the respective vectors.
void findInsts(Function &F, SmallVectorImpl<AllocaInst *> &StaticAllocas,
@@ -150,11 +159,11 @@ class SafeStack {
SmallVectorImpl<ReturnInst *> &Returns,
SmallVectorImpl<Instruction *> &StackRestorePoints);
- /// \brief Calculate the allocation size of a given alloca. Returns 0 if the
+ /// Calculate the allocation size of a given alloca. Returns 0 if the
/// size can not be statically determined.
uint64_t getStaticAllocaAllocationSize(const AllocaInst* AI);
- /// \brief Allocate space for all static allocas in \p StaticAllocas,
+ /// Allocate space for all static allocas in \p StaticAllocas,
/// replace allocas with pointers into the unsafe stack and generate code to
/// restore the stack pointer before all return instructions in \p Returns.
///
@@ -167,7 +176,7 @@ class SafeStack {
Instruction *BasePointer,
AllocaInst *StackGuardSlot);
- /// \brief Generate code to restore the stack after all stack restore points
+ /// Generate code to restore the stack after all stack restore points
/// in \p StackRestorePoints.
///
/// \returns A local variable in which to maintain the dynamic top of the
@@ -177,7 +186,7 @@ class SafeStack {
ArrayRef<Instruction *> StackRestorePoints,
Value *StaticTop, bool NeedDynamicTop);
- /// \brief Replace all allocas in \p DynamicAllocas with code to allocate
+ /// Replace all allocas in \p DynamicAllocas with code to allocate
/// space dynamically on the unsafe stack and store the dynamic unsafe stack
/// top to \p DynamicTop if non-null.
void moveDynamicAllocasToUnsafeStack(Function &F, Value *UnsafeStackPtr,
@@ -191,6 +200,9 @@ class SafeStack {
bool IsAccessSafe(Value *Addr, uint64_t Size, const Value *AllocaPtr,
uint64_t AllocaSize);
+ bool ShouldInlinePointerAddress(CallSite &CS);
+ void TryInlinePointerAddress();
+
public:
SafeStack(Function &F, const TargetLoweringBase &TL, const DataLayout &DL,
ScalarEvolution &SE)
@@ -230,16 +242,17 @@ bool SafeStack::IsAccessSafe(Value *Addr, uint64_t AccessSize,
ConstantRange(APInt(BitWidth, 0), APInt(BitWidth, AllocaSize));
bool Safe = AllocaRange.contains(AccessRange);
- DEBUG(dbgs() << "[SafeStack] "
- << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ")
- << *AllocaPtr << "\n"
- << " Access " << *Addr << "\n"
- << " SCEV " << *Expr
- << " U: " << SE.getUnsignedRange(Expr)
- << ", S: " << SE.getSignedRange(Expr) << "\n"
- << " Range " << AccessRange << "\n"
- << " AllocaRange " << AllocaRange << "\n"
- << " " << (Safe ? "safe" : "unsafe") << "\n");
+ LLVM_DEBUG(
+ dbgs() << "[SafeStack] "
+ << (isa<AllocaInst>(AllocaPtr) ? "Alloca " : "ByValArgument ")
+ << *AllocaPtr << "\n"
+ << " Access " << *Addr << "\n"
+ << " SCEV " << *Expr
+ << " U: " << SE.getUnsignedRange(Expr)
+ << ", S: " << SE.getSignedRange(Expr) << "\n"
+ << " Range " << AccessRange << "\n"
+ << " AllocaRange " << AllocaRange << "\n"
+ << " " << (Safe ? "safe" : "unsafe") << "\n");
return Safe;
}
@@ -286,8 +299,9 @@ bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) {
case Instruction::Store:
if (V == I->getOperand(0)) {
// Stored the pointer - conservatively assume it may be unsafe.
- DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
- << "\n store of address: " << *I << "\n");
+ LLVM_DEBUG(dbgs()
+ << "[SafeStack] Unsafe alloca: " << *AllocaPtr
+ << "\n store of address: " << *I << "\n");
return false;
}
@@ -312,9 +326,9 @@ bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) {
if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
if (!IsMemIntrinsicSafe(MI, UI, AllocaPtr, AllocaSize)) {
- DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
- << "\n unsafe memintrinsic: " << *I
- << "\n");
+ LLVM_DEBUG(dbgs()
+ << "[SafeStack] Unsafe alloca: " << *AllocaPtr
+ << "\n unsafe memintrinsic: " << *I << "\n");
return false;
}
continue;
@@ -332,8 +346,8 @@ bool SafeStack::IsSafeStackAlloca(const Value *AllocaPtr, uint64_t AllocaSize) {
if (A->get() == V)
if (!(CS.doesNotCapture(A - B) && (CS.doesNotAccessMemory(A - B) ||
CS.doesNotAccessMemory()))) {
- DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
- << "\n unsafe call: " << *I << "\n");
+ LLVM_DEBUG(dbgs() << "[SafeStack] Unsafe alloca: " << *AllocaPtr
+ << "\n unsafe call: " << *I << "\n");
return false;
}
continue;
@@ -545,6 +559,7 @@ Value *SafeStack::moveStaticAllocasToUnsafeStack(
for (Argument *Arg : ByValArguments) {
unsigned Offset = SSL.getObjectOffset(Arg);
+ unsigned Align = SSL.getObjectAlignment(Arg);
Type *Ty = Arg->getType()->getPointerElementType();
uint64_t Size = DL.getTypeStoreSize(Ty);
@@ -561,7 +576,7 @@ Value *SafeStack::moveStaticAllocasToUnsafeStack(
DIExpression::NoDeref, -Offset, DIExpression::NoDeref);
Arg->replaceAllUsesWith(NewArg);
IRB.SetInsertPoint(cast<Instruction>(NewArg)->getNextNode());
- IRB.CreateMemCpy(Off, Arg, Size, Arg->getParamAlignment());
+ IRB.CreateMemCpy(Off, Align, Arg, Arg->getParamAlignment(), Size);
}
// Allocate space for every unsafe static AllocaInst on the unsafe stack.
@@ -695,6 +710,35 @@ void SafeStack::moveDynamicAllocasToUnsafeStack(
}
}
+bool SafeStack::ShouldInlinePointerAddress(CallSite &CS) {
+ Function *Callee = CS.getCalledFunction();
+ if (CS.hasFnAttr(Attribute::AlwaysInline) && isInlineViable(*Callee))
+ return true;
+ if (Callee->isInterposable() || Callee->hasFnAttribute(Attribute::NoInline) ||
+ CS.isNoInline())
+ return false;
+ return true;
+}
+
+void SafeStack::TryInlinePointerAddress() {
+ if (!isa<CallInst>(UnsafeStackPtr))
+ return;
+
+ if(F.hasFnAttribute(Attribute::OptimizeNone))
+ return;
+
+ CallSite CS(UnsafeStackPtr);
+ Function *Callee = CS.getCalledFunction();
+ if (!Callee || Callee->isDeclaration())
+ return;
+
+ if (!ShouldInlinePointerAddress(CS))
+ return;
+
+ InlineFunctionInfo IFI;
+ InlineFunction(CS, IFI);
+}
+
bool SafeStack::run() {
assert(F.hasFnAttribute(Attribute::SafeStack) &&
"Can't run SafeStack on a function without the attribute");
@@ -731,7 +775,13 @@ bool SafeStack::run() {
++NumUnsafeStackRestorePointsFunctions;
IRBuilder<> IRB(&F.front(), F.begin()->getFirstInsertionPt());
- UnsafeStackPtr = TL.getSafeStackPointerLocation(IRB);
+ if (SafeStackUsePointerAddress) {
+ Value *Fn = F.getParent()->getOrInsertFunction(
+ "__safestack_pointer_address", StackPtrTy->getPointerTo(0));
+ UnsafeStackPtr = IRB.CreateCall(Fn);
+ } else {
+ UnsafeStackPtr = TL.getSafeStackPointerLocation(IRB);
+ }
// Load the current stack pointer (we'll also use it as a base pointer).
// FIXME: use a dedicated register for it ?
@@ -779,7 +829,9 @@ bool SafeStack::run() {
IRB.CreateStore(BasePointer, UnsafeStackPtr);
}
- DEBUG(dbgs() << "[SafeStack] safestack applied\n");
+ TryInlinePointerAddress();
+
+ LLVM_DEBUG(dbgs() << "[SafeStack] safestack applied\n");
return true;
}
@@ -800,17 +852,17 @@ public:
}
bool runOnFunction(Function &F) override {
- DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
+ LLVM_DEBUG(dbgs() << "[SafeStack] Function: " << F.getName() << "\n");
if (!F.hasFnAttribute(Attribute::SafeStack)) {
- DEBUG(dbgs() << "[SafeStack] safestack is not requested"
- " for this function\n");
+ LLVM_DEBUG(dbgs() << "[SafeStack] safestack is not requested"
+ " for this function\n");
return false;
}
if (F.isDeclaration()) {
- DEBUG(dbgs() << "[SafeStack] function definition"
- " is not available\n");
+ LLVM_DEBUG(dbgs() << "[SafeStack] function definition"
+ " is not available\n");
return false;
}