aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Transforms/Scalar/InferAddressSpaces.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Transforms/Scalar/InferAddressSpaces.cpp')
-rw-r--r--llvm/lib/Transforms/Scalar/InferAddressSpaces.cpp206
1 files changed, 165 insertions, 41 deletions
diff --git a/llvm/lib/Transforms/Scalar/InferAddressSpaces.cpp b/llvm/lib/Transforms/Scalar/InferAddressSpaces.cpp
index dfb1b6bfb739..db9cc58bbfc4 100644
--- a/llvm/lib/Transforms/Scalar/InferAddressSpaces.cpp
+++ b/llvm/lib/Transforms/Scalar/InferAddressSpaces.cpp
@@ -96,7 +96,6 @@
#include "llvm/ADT/SetVector.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/Analysis/TargetTransformInfo.h"
-#include "llvm/Transforms/Utils/Local.h"
#include "llvm/IR/BasicBlock.h"
#include "llvm/IR/Constant.h"
#include "llvm/IR/Constants.h"
@@ -116,11 +115,13 @@
#include "llvm/IR/ValueHandle.h"
#include "llvm/Pass.h"
#include "llvm/Support/Casting.h"
+#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Compiler.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/ErrorHandling.h"
#include "llvm/Support/raw_ostream.h"
#include "llvm/Transforms/Scalar.h"
+#include "llvm/Transforms/Utils/Local.h"
#include "llvm/Transforms/Utils/ValueMapper.h"
#include <cassert>
#include <iterator>
@@ -132,16 +133,23 @@
using namespace llvm;
+static cl::opt<bool> AssumeDefaultIsFlatAddressSpace(
+ "assume-default-is-flat-addrspace", cl::init(false), cl::ReallyHidden,
+ cl::desc("The default address space is assumed as the flat address space. "
+ "This is mainly for test purpose."));
+
static const unsigned UninitializedAddressSpace =
std::numeric_limits<unsigned>::max();
namespace {
using ValueToAddrSpaceMapTy = DenseMap<const Value *, unsigned>;
+using PostorderStackTy = llvm::SmallVector<PointerIntPair<Value *, 1, bool>, 4>;
/// InferAddressSpaces
class InferAddressSpaces : public FunctionPass {
const TargetTransformInfo *TTI = nullptr;
+ const DataLayout *DL = nullptr;
/// Target specific address space which uses of should be replaced if
/// possible.
@@ -174,6 +182,11 @@ private:
bool isSafeToCastConstAddrSpace(Constant *C, unsigned NewAS) const;
+ Value *cloneInstructionWithNewAddressSpace(
+ Instruction *I, unsigned NewAddrSpace,
+ const ValueToValueMapTy &ValueWithNewAddrSpace,
+ SmallVectorImpl<const Use *> *UndefUsesToFix) const;
+
// Changes the flat address expressions in function F to point to specific
// address spaces if InferredAddrSpace says so. Postorder is the postorder of
// all flat expressions in the use-def graph of function F.
@@ -182,15 +195,14 @@ private:
const ValueToAddrSpaceMapTy &InferredAddrSpace, Function *F) const;
void appendsFlatAddressExpressionToPostorderStack(
- Value *V, std::vector<std::pair<Value *, bool>> &PostorderStack,
- DenseSet<Value *> &Visited) const;
+ Value *V, PostorderStackTy &PostorderStack,
+ DenseSet<Value *> &Visited) const;
bool rewriteIntrinsicOperands(IntrinsicInst *II,
Value *OldV, Value *NewV) const;
- void collectRewritableIntrinsicOperands(
- IntrinsicInst *II,
- std::vector<std::pair<Value *, bool>> &PostorderStack,
- DenseSet<Value *> &Visited) const;
+ void collectRewritableIntrinsicOperands(IntrinsicInst *II,
+ PostorderStackTy &PostorderStack,
+ DenseSet<Value *> &Visited) const;
std::vector<WeakTrackingVH> collectFlatAddressExpressions(Function &F) const;
@@ -214,24 +226,65 @@ void initializeInferAddressSpacesPass(PassRegistry &);
INITIALIZE_PASS(InferAddressSpaces, DEBUG_TYPE, "Infer address spaces",
false, false)
+// Check whether that's no-op pointer bicast using a pair of
+// `ptrtoint`/`inttoptr` due to the missing no-op pointer bitcast over
+// different address spaces.
+static bool isNoopPtrIntCastPair(const Operator *I2P, const DataLayout &DL,
+ const TargetTransformInfo *TTI) {
+ assert(I2P->getOpcode() == Instruction::IntToPtr);
+ auto *P2I = dyn_cast<Operator>(I2P->getOperand(0));
+ if (!P2I || P2I->getOpcode() != Instruction::PtrToInt)
+ return false;
+ // Check it's really safe to treat that pair of `ptrtoint`/`inttoptr` as a
+ // no-op cast. Besides checking both of them are no-op casts, as the
+ // reinterpreted pointer may be used in other pointer arithmetic, we also
+ // need to double-check that through the target-specific hook. That ensures
+ // the underlying target also agrees that's a no-op address space cast and
+ // pointer bits are preserved.
+ // The current IR spec doesn't have clear rules on address space casts,
+ // especially a clear definition for pointer bits in non-default address
+ // spaces. It would be undefined if that pointer is dereferenced after an
+ // invalid reinterpret cast. Also, due to the unclearness for the meaning of
+ // bits in non-default address spaces in the current spec, the pointer
+ // arithmetic may also be undefined after invalid pointer reinterpret cast.
+ // However, as we confirm through the target hooks that it's a no-op
+ // addrspacecast, it doesn't matter since the bits should be the same.
+ return CastInst::isNoopCast(Instruction::CastOps(I2P->getOpcode()),
+ I2P->getOperand(0)->getType(), I2P->getType(),
+ DL) &&
+ CastInst::isNoopCast(Instruction::CastOps(P2I->getOpcode()),
+ P2I->getOperand(0)->getType(), P2I->getType(),
+ DL) &&
+ TTI->isNoopAddrSpaceCast(
+ P2I->getOperand(0)->getType()->getPointerAddressSpace(),
+ I2P->getType()->getPointerAddressSpace());
+}
+
// Returns true if V is an address expression.
// TODO: Currently, we consider only phi, bitcast, addrspacecast, and
// getelementptr operators.
-static bool isAddressExpression(const Value &V) {
- if (!isa<Operator>(V))
+static bool isAddressExpression(const Value &V, const DataLayout &DL,
+ const TargetTransformInfo *TTI) {
+ const Operator *Op = dyn_cast<Operator>(&V);
+ if (!Op)
return false;
- const Operator &Op = cast<Operator>(V);
- switch (Op.getOpcode()) {
+ switch (Op->getOpcode()) {
case Instruction::PHI:
- assert(Op.getType()->isPointerTy());
+ assert(Op->getType()->isPointerTy());
return true;
case Instruction::BitCast:
case Instruction::AddrSpaceCast:
case Instruction::GetElementPtr:
return true;
case Instruction::Select:
- return Op.getType()->isPointerTy();
+ return Op->getType()->isPointerTy();
+ case Instruction::Call: {
+ const IntrinsicInst *II = dyn_cast<IntrinsicInst>(&V);
+ return II && II->getIntrinsicID() == Intrinsic::ptrmask;
+ }
+ case Instruction::IntToPtr:
+ return isNoopPtrIntCastPair(Op, DL, TTI);
default:
return false;
}
@@ -240,7 +293,9 @@ static bool isAddressExpression(const Value &V) {
// Returns the pointer operands of V.
//
// Precondition: V is an address expression.
-static SmallVector<Value *, 2> getPointerOperands(const Value &V) {
+static SmallVector<Value *, 2>
+getPointerOperands(const Value &V, const DataLayout &DL,
+ const TargetTransformInfo *TTI) {
const Operator &Op = cast<Operator>(V);
switch (Op.getOpcode()) {
case Instruction::PHI: {
@@ -254,12 +309,22 @@ static SmallVector<Value *, 2> getPointerOperands(const Value &V) {
return {Op.getOperand(0)};
case Instruction::Select:
return {Op.getOperand(1), Op.getOperand(2)};
+ case Instruction::Call: {
+ const IntrinsicInst &II = cast<IntrinsicInst>(Op);
+ assert(II.getIntrinsicID() == Intrinsic::ptrmask &&
+ "unexpected intrinsic call");
+ return {II.getArgOperand(0)};
+ }
+ case Instruction::IntToPtr: {
+ assert(isNoopPtrIntCastPair(&Op, DL, TTI));
+ auto *P2I = cast<Operator>(Op.getOperand(0));
+ return {P2I->getOperand(0)};
+ }
default:
llvm_unreachable("Unexpected instruction type.");
}
}
-// TODO: Move logic to TTI?
bool InferAddressSpaces::rewriteIntrinsicOperands(IntrinsicInst *II,
Value *OldV,
Value *NewV) const {
@@ -275,16 +340,26 @@ bool InferAddressSpaces::rewriteIntrinsicOperands(IntrinsicInst *II,
II->setCalledFunction(NewDecl);
return true;
}
- default:
- return TTI->rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
+ case Intrinsic::ptrmask:
+ // This is handled as an address expression, not as a use memory operation.
+ return false;
+ default: {
+ Value *Rewrite = TTI->rewriteIntrinsicWithAddressSpace(II, OldV, NewV);
+ if (!Rewrite)
+ return false;
+ if (Rewrite != II)
+ II->replaceAllUsesWith(Rewrite);
+ return true;
+ }
}
}
void InferAddressSpaces::collectRewritableIntrinsicOperands(
- IntrinsicInst *II, std::vector<std::pair<Value *, bool>> &PostorderStack,
+ IntrinsicInst *II, PostorderStackTy &PostorderStack,
DenseSet<Value *> &Visited) const {
auto IID = II->getIntrinsicID();
switch (IID) {
+ case Intrinsic::ptrmask:
case Intrinsic::objectsize:
appendsFlatAddressExpressionToPostorderStack(II->getArgOperand(0),
PostorderStack, Visited);
@@ -305,7 +380,7 @@ void InferAddressSpaces::collectRewritableIntrinsicOperands(
// If V is an unvisited flat address expression, appends V to PostorderStack
// and marks it as visited.
void InferAddressSpaces::appendsFlatAddressExpressionToPostorderStack(
- Value *V, std::vector<std::pair<Value *, bool>> &PostorderStack,
+ Value *V, PostorderStackTy &PostorderStack,
DenseSet<Value *> &Visited) const {
assert(V->getType()->isPointerTy());
@@ -313,21 +388,21 @@ void InferAddressSpaces::appendsFlatAddressExpressionToPostorderStack(
// expressions.
if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
// TODO: Look in non-address parts, like icmp operands.
- if (isAddressExpression(*CE) && Visited.insert(CE).second)
- PostorderStack.push_back(std::make_pair(CE, false));
+ if (isAddressExpression(*CE, *DL, TTI) && Visited.insert(CE).second)
+ PostorderStack.emplace_back(CE, false);
return;
}
- if (isAddressExpression(*V) &&
+ if (isAddressExpression(*V, *DL, TTI) &&
V->getType()->getPointerAddressSpace() == FlatAddrSpace) {
if (Visited.insert(V).second) {
- PostorderStack.push_back(std::make_pair(V, false));
+ PostorderStack.emplace_back(V, false);
Operator *Op = cast<Operator>(V);
for (unsigned I = 0, E = Op->getNumOperands(); I != E; ++I) {
if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Op->getOperand(I))) {
- if (isAddressExpression(*CE) && Visited.insert(CE).second)
+ if (isAddressExpression(*CE, *DL, TTI) && Visited.insert(CE).second)
PostorderStack.emplace_back(CE, false);
}
}
@@ -341,7 +416,7 @@ std::vector<WeakTrackingVH>
InferAddressSpaces::collectFlatAddressExpressions(Function &F) const {
// This function implements a non-recursive postorder traversal of a partial
// use-def graph of function F.
- std::vector<std::pair<Value *, bool>> PostorderStack;
+ PostorderStackTy PostorderStack;
// The set of visited expressions.
DenseSet<Value *> Visited;
@@ -383,23 +458,27 @@ InferAddressSpaces::collectFlatAddressExpressions(Function &F) const {
} else if (auto *ASC = dyn_cast<AddrSpaceCastInst>(&I)) {
if (!ASC->getType()->isVectorTy())
PushPtrOperand(ASC->getPointerOperand());
+ } else if (auto *I2P = dyn_cast<IntToPtrInst>(&I)) {
+ if (isNoopPtrIntCastPair(cast<Operator>(I2P), *DL, TTI))
+ PushPtrOperand(
+ cast<PtrToIntInst>(I2P->getOperand(0))->getPointerOperand());
}
}
std::vector<WeakTrackingVH> Postorder; // The resultant postorder.
while (!PostorderStack.empty()) {
- Value *TopVal = PostorderStack.back().first;
+ Value *TopVal = PostorderStack.back().getPointer();
// If the operands of the expression on the top are already explored,
// adds that expression to the resultant postorder.
- if (PostorderStack.back().second) {
+ if (PostorderStack.back().getInt()) {
if (TopVal->getType()->getPointerAddressSpace() == FlatAddrSpace)
Postorder.push_back(TopVal);
PostorderStack.pop_back();
continue;
}
// Otherwise, adds its operands to the stack and explores them.
- PostorderStack.back().second = true;
- for (Value *PtrOperand : getPointerOperands(*TopVal)) {
+ PostorderStack.back().setInt(true);
+ for (Value *PtrOperand : getPointerOperands(*TopVal, *DL, TTI)) {
appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,
Visited);
}
@@ -438,10 +517,13 @@ static Value *operandWithNewAddressSpaceOrCreateUndef(
// Note that we do not necessarily clone `I`, e.g., if it is an addrspacecast
// from a pointer whose type already matches. Therefore, this function returns a
// Value* instead of an Instruction*.
-static Value *cloneInstructionWithNewAddressSpace(
+//
+// This may also return nullptr in the case the instruction could not be
+// rewritten.
+Value *InferAddressSpaces::cloneInstructionWithNewAddressSpace(
Instruction *I, unsigned NewAddrSpace,
const ValueToValueMapTy &ValueWithNewAddrSpace,
- SmallVectorImpl<const Use *> *UndefUsesToFix) {
+ SmallVectorImpl<const Use *> *UndefUsesToFix) const {
Type *NewPtrType =
I->getType()->getPointerElementType()->getPointerTo(NewAddrSpace);
@@ -456,6 +538,23 @@ static Value *cloneInstructionWithNewAddressSpace(
return Src;
}
+ if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
+ // Technically the intrinsic ID is a pointer typed argument, so specially
+ // handle calls early.
+ assert(II->getIntrinsicID() == Intrinsic::ptrmask);
+ Value *NewPtr = operandWithNewAddressSpaceOrCreateUndef(
+ II->getArgOperandUse(0), NewAddrSpace, ValueWithNewAddrSpace,
+ UndefUsesToFix);
+ Value *Rewrite =
+ TTI->rewriteIntrinsicWithAddressSpace(II, II->getArgOperand(0), NewPtr);
+ if (Rewrite) {
+ assert(Rewrite != II && "cannot modify this pointer operation in place");
+ return Rewrite;
+ }
+
+ return nullptr;
+ }
+
// Computes the converted pointer operands.
SmallVector<Value *, 4> NewPointerOperands;
for (const Use &OperandUse : I->operands()) {
@@ -492,6 +591,14 @@ static Value *cloneInstructionWithNewAddressSpace(
assert(I->getType()->isPointerTy());
return SelectInst::Create(I->getOperand(0), NewPointerOperands[1],
NewPointerOperands[2], "", nullptr, I);
+ case Instruction::IntToPtr: {
+ assert(isNoopPtrIntCastPair(cast<Operator>(I), *DL, TTI));
+ Value *Src = cast<Operator>(I->getOperand(0))->getOperand(0);
+ assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
+ if (Src->getType() != NewPtrType)
+ return new BitCastInst(Src, NewPtrType);
+ return Src;
+ }
default:
llvm_unreachable("Unexpected opcode");
}
@@ -501,8 +608,9 @@ static Value *cloneInstructionWithNewAddressSpace(
// constant expression `CE` with its operands replaced as specified in
// ValueWithNewAddrSpace.
static Value *cloneConstantExprWithNewAddressSpace(
- ConstantExpr *CE, unsigned NewAddrSpace,
- const ValueToValueMapTy &ValueWithNewAddrSpace) {
+ ConstantExpr *CE, unsigned NewAddrSpace,
+ const ValueToValueMapTy &ValueWithNewAddrSpace, const DataLayout *DL,
+ const TargetTransformInfo *TTI) {
Type *TargetType =
CE->getType()->getPointerElementType()->getPointerTo(NewAddrSpace);
@@ -533,6 +641,13 @@ static Value *cloneConstantExprWithNewAddressSpace(
}
}
+ if (CE->getOpcode() == Instruction::IntToPtr) {
+ assert(isNoopPtrIntCastPair(cast<Operator>(CE), *DL, TTI));
+ Constant *Src = cast<ConstantExpr>(CE->getOperand(0))->getOperand(0);
+ assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
+ return ConstantExpr::getBitCast(Src, TargetType);
+ }
+
// Computes the operands of the new constant expression.
bool IsNew = false;
SmallVector<Constant *, 4> NewOperands;
@@ -550,7 +665,7 @@ static Value *cloneConstantExprWithNewAddressSpace(
}
if (auto CExpr = dyn_cast<ConstantExpr>(Operand))
if (Value *NewOperand = cloneConstantExprWithNewAddressSpace(
- CExpr, NewAddrSpace, ValueWithNewAddrSpace)) {
+ CExpr, NewAddrSpace, ValueWithNewAddrSpace, DL, TTI)) {
IsNew = true;
NewOperands.push_back(cast<Constant>(NewOperand));
continue;
@@ -585,13 +700,13 @@ Value *InferAddressSpaces::cloneValueWithNewAddressSpace(
const ValueToValueMapTy &ValueWithNewAddrSpace,
SmallVectorImpl<const Use *> *UndefUsesToFix) const {
// All values in Postorder are flat address expressions.
- assert(isAddressExpression(*V) &&
+ assert(isAddressExpression(*V, *DL, TTI) &&
V->getType()->getPointerAddressSpace() == FlatAddrSpace);
if (Instruction *I = dyn_cast<Instruction>(V)) {
Value *NewV = cloneInstructionWithNewAddressSpace(
I, NewAddrSpace, ValueWithNewAddrSpace, UndefUsesToFix);
- if (Instruction *NewI = dyn_cast<Instruction>(NewV)) {
+ if (Instruction *NewI = dyn_cast_or_null<Instruction>(NewV)) {
if (NewI->getParent() == nullptr) {
NewI->insertBefore(I);
NewI->takeName(I);
@@ -601,7 +716,7 @@ Value *InferAddressSpaces::cloneValueWithNewAddressSpace(
}
return cloneConstantExprWithNewAddressSpace(
- cast<ConstantExpr>(V), NewAddrSpace, ValueWithNewAddrSpace);
+ cast<ConstantExpr>(V), NewAddrSpace, ValueWithNewAddrSpace, DL, TTI);
}
// Defines the join operation on the address space lattice (see the file header
@@ -625,6 +740,10 @@ bool InferAddressSpaces::runOnFunction(Function &F) {
return false;
TTI = &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
+ DL = &F.getParent()->getDataLayout();
+
+ if (AssumeDefaultIsFlatAddressSpace)
+ FlatAddrSpace = 0;
if (FlatAddrSpace == UninitializedAddressSpace) {
FlatAddrSpace = TTI->getFlatAddressSpace();
@@ -729,7 +848,7 @@ Optional<unsigned> InferAddressSpaces::updateAddressSpace(
else
NewAS = joinAddressSpaces(Src0AS, Src1AS);
} else {
- for (Value *PtrOperand : getPointerOperands(V)) {
+ for (Value *PtrOperand : getPointerOperands(V, *DL, TTI)) {
auto I = InferredAddrSpace.find(PtrOperand);
unsigned OperandAS = I != InferredAddrSpace.end() ?
I->second : PtrOperand->getType()->getPointerAddressSpace();
@@ -879,8 +998,10 @@ bool InferAddressSpaces::rewriteWithNewAddressSpaces(
for (Value* V : Postorder) {
unsigned NewAddrSpace = InferredAddrSpace.lookup(V);
if (V->getType()->getPointerAddressSpace() != NewAddrSpace) {
- ValueWithNewAddrSpace[V] = cloneValueWithNewAddressSpace(
- V, NewAddrSpace, ValueWithNewAddrSpace, &UndefUsesToFix);
+ Value *New = cloneValueWithNewAddressSpace(
+ V, NewAddrSpace, ValueWithNewAddrSpace, &UndefUsesToFix);
+ if (New)
+ ValueWithNewAddrSpace[V] = New;
}
}
@@ -890,7 +1011,10 @@ bool InferAddressSpaces::rewriteWithNewAddressSpaces(
// Fixes all the undef uses generated by cloneInstructionWithNewAddressSpace.
for (const Use *UndefUse : UndefUsesToFix) {
User *V = UndefUse->getUser();
- User *NewV = cast<User>(ValueWithNewAddrSpace.lookup(V));
+ User *NewV = cast_or_null<User>(ValueWithNewAddrSpace.lookup(V));
+ if (!NewV)
+ continue;
+
unsigned OperandNo = UndefUse->getOperandNo();
assert(isa<UndefValue>(NewV->getOperand(OperandNo)));
NewV->setOperand(OperandNo, ValueWithNewAddrSpace.lookup(UndefUse->get()));