summaryrefslogtreecommitdiff
path: root/lib/Transforms/InstCombine/InstCombineCalls.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Transforms/InstCombine/InstCombineCalls.cpp')
-rw-r--r--lib/Transforms/InstCombine/InstCombineCalls.cpp36
1 files changed, 8 insertions, 28 deletions
diff --git a/lib/Transforms/InstCombine/InstCombineCalls.cpp b/lib/Transforms/InstCombine/InstCombineCalls.cpp
index a088d447337f..40e52ee755e5 100644
--- a/lib/Transforms/InstCombine/InstCombineCalls.cpp
+++ b/lib/Transforms/InstCombine/InstCombineCalls.cpp
@@ -1802,9 +1802,7 @@ Instruction *InstCombiner::visitVACopyInst(VACopyInst &I) {
/// instructions. For normal calls, it allows visitCallSite to do the heavy
/// lifting.
Instruction *InstCombiner::visitCallInst(CallInst &CI) {
- auto Args = CI.arg_operands();
- if (Value *V = SimplifyCall(&CI, CI.getCalledValue(), Args.begin(),
- Args.end(), SQ.getWithInstruction(&CI)))
+ if (Value *V = SimplifyCall(&CI, SQ.getWithInstruction(&CI)))
return replaceInstUsesWith(CI, V);
if (isFreeCall(&CI, &TLI))
@@ -1903,16 +1901,10 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
lowerObjectSizeCall(II, DL, &TLI, /*MustSucceed=*/false))
return replaceInstUsesWith(CI, N);
return nullptr;
-
case Intrinsic::bswap: {
Value *IIOperand = II->getArgOperand(0);
Value *X = nullptr;
- // TODO should this be in InstSimplify?
- // bswap(bswap(x)) -> x
- if (match(IIOperand, m_BSwap(m_Value(X))))
- return replaceInstUsesWith(CI, X);
-
// bswap(trunc(bswap(x))) -> trunc(lshr(x, c))
if (match(IIOperand, m_Trunc(m_BSwap(m_Value(X))))) {
unsigned C = X->getType()->getPrimitiveSizeInBits() -
@@ -1923,18 +1915,6 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
}
break;
}
-
- case Intrinsic::bitreverse: {
- Value *IIOperand = II->getArgOperand(0);
- Value *X = nullptr;
-
- // TODO should this be in InstSimplify?
- // bitreverse(bitreverse(x)) -> x
- if (match(IIOperand, m_BitReverse(m_Value(X))))
- return replaceInstUsesWith(CI, X);
- break;
- }
-
case Intrinsic::masked_load:
if (Value *SimplifiedMaskedOp = simplifyMaskedLoad(*II, Builder))
return replaceInstUsesWith(CI, SimplifiedMaskedOp);
@@ -1948,16 +1928,16 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
case Intrinsic::powi:
if (ConstantInt *Power = dyn_cast<ConstantInt>(II->getArgOperand(1))) {
- // powi(x, 0) -> 1.0
- if (Power->isZero())
- return replaceInstUsesWith(CI, ConstantFP::get(CI.getType(), 1.0));
- // powi(x, 1) -> x
- if (Power->isOne())
- return replaceInstUsesWith(CI, II->getArgOperand(0));
+ // 0 and 1 are handled in instsimplify
+
// powi(x, -1) -> 1/x
if (Power->isMinusOne())
return BinaryOperator::CreateFDiv(ConstantFP::get(CI.getType(), 1.0),
II->getArgOperand(0));
+ // powi(x, 2) -> x*x
+ if (Power->equalsInt(2))
+ return BinaryOperator::CreateFMul(II->getArgOperand(0),
+ II->getArgOperand(0));
}
break;
@@ -2396,7 +2376,7 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
// The compare intrinsic uses the above assumptions and therefore
// doesn't require additional flags.
if ((match(Arg0, m_OneUse(m_FSub(m_Value(A), m_Value(B)))) &&
- match(Arg1, m_Zero()) &&
+ match(Arg1, m_Zero()) && isa<Instruction>(Arg0) &&
cast<Instruction>(Arg0)->getFastMathFlags().noInfs())) {
if (Arg0IsZero)
std::swap(A, B);