summaryrefslogtreecommitdiff
path: root/lib/Analysis/CaptureTracking.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Analysis/CaptureTracking.cpp')
-rw-r--r--lib/Analysis/CaptureTracking.cpp52
1 files changed, 48 insertions, 4 deletions
diff --git a/lib/Analysis/CaptureTracking.cpp b/lib/Analysis/CaptureTracking.cpp
index 1add2fa775669..9862c3c9c270a 100644
--- a/lib/Analysis/CaptureTracking.cpp
+++ b/lib/Analysis/CaptureTracking.cpp
@@ -26,6 +26,7 @@
#include "llvm/IR/Constants.h"
#include "llvm/IR/Dominators.h"
#include "llvm/IR/Instructions.h"
+#include "llvm/IR/IntrinsicInst.h"
using namespace llvm;
@@ -242,6 +243,13 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy())
break;
+ // Volatile operations effectively capture the memory location that they
+ // load and store to.
+ if (auto *MI = dyn_cast<MemIntrinsic>(I))
+ if (MI->isVolatile())
+ if (Tracker->captured(U))
+ return;
+
// Not captured if only passed via 'nocapture' arguments. Note that
// calling a function pointer does not in itself cause the pointer to
// be captured. This is a subtle point considering that (for example)
@@ -259,18 +267,46 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
break;
}
case Instruction::Load:
- // Loading from a pointer does not cause it to be captured.
+ // Volatile loads make the address observable.
+ if (cast<LoadInst>(I)->isVolatile())
+ if (Tracker->captured(U))
+ return;
break;
case Instruction::VAArg:
// "va-arg" from a pointer does not cause it to be captured.
break;
case Instruction::Store:
- if (V == I->getOperand(0))
// Stored the pointer - conservatively assume it may be captured.
+ // Volatile stores make the address observable.
+ if (V == I->getOperand(0) || cast<StoreInst>(I)->isVolatile())
+ if (Tracker->captured(U))
+ return;
+ break;
+ case Instruction::AtomicRMW: {
+ // atomicrmw conceptually includes both a load and store from
+ // the same location.
+ // As with a store, the location being accessed is not captured,
+ // but the value being stored is.
+ // Volatile stores make the address observable.
+ auto *ARMWI = cast<AtomicRMWInst>(I);
+ if (ARMWI->getValOperand() == V || ARMWI->isVolatile())
if (Tracker->captured(U))
return;
- // Storing to the pointee does not cause the pointer to be captured.
break;
+ }
+ case Instruction::AtomicCmpXchg: {
+ // cmpxchg conceptually includes both a load and store from
+ // the same location.
+ // As with a store, the location being accessed is not captured,
+ // but the value being stored is.
+ // Volatile stores make the address observable.
+ auto *ACXI = cast<AtomicCmpXchgInst>(I);
+ if (ACXI->getCompareOperand() == V || ACXI->getNewValOperand() == V ||
+ ACXI->isVolatile())
+ if (Tracker->captured(U))
+ return;
+ break;
+ }
case Instruction::BitCast:
case Instruction::GetElementPtr:
case Instruction::PHI:
@@ -289,7 +325,7 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
Worklist.push_back(&UU);
}
break;
- case Instruction::ICmp:
+ case Instruction::ICmp: {
// Don't count comparisons of a no-alias return value against null as
// captures. This allows us to ignore comparisons of malloc results
// with null, for example.
@@ -298,11 +334,19 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
if (CPN->getType()->getAddressSpace() == 0)
if (isNoAliasCall(V->stripPointerCasts()))
break;
+ // Comparison against value stored in global variable. Given the pointer
+ // does not escape, its value cannot be guessed and stored separately in a
+ // global variable.
+ unsigned OtherIndex = (I->getOperand(0) == V) ? 1 : 0;
+ auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIndex));
+ if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
+ break;
// Otherwise, be conservative. There are crazy ways to capture pointers
// using comparisons.
if (Tracker->captured(U))
return;
break;
+ }
default:
// Something else - be conservative and say it is captured.
if (Tracker->captured(U))