summaryrefslogtreecommitdiff
path: root/lib/Analysis/ConstantFolding.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Analysis/ConstantFolding.cpp')
-rw-r--r--lib/Analysis/ConstantFolding.cpp42
1 files changed, 28 insertions, 14 deletions
diff --git a/lib/Analysis/ConstantFolding.cpp b/lib/Analysis/ConstantFolding.cpp
index a906770dbb34..0f5ec3f5626e 100644
--- a/lib/Analysis/ConstantFolding.cpp
+++ b/lib/Analysis/ConstantFolding.cpp
@@ -22,8 +22,8 @@
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/STLExtras.h"
-#include "llvm/ADT/StringRef.h"
#include "llvm/ADT/SmallVector.h"
+#include "llvm/ADT/StringRef.h"
#include "llvm/Analysis/TargetLibraryInfo.h"
#include "llvm/Analysis/ValueTracking.h"
#include "llvm/Config/config.h"
@@ -1015,9 +1015,11 @@ Constant *ConstantFoldInstOperandsImpl(const Value *InstOrCE, unsigned Opcode,
case Instruction::ICmp:
case Instruction::FCmp: llvm_unreachable("Invalid for compares");
case Instruction::Call:
- if (auto *F = dyn_cast<Function>(Ops.back()))
- if (canConstantFoldCallTo(F))
- return ConstantFoldCall(F, Ops.slice(0, Ops.size() - 1), TLI);
+ if (auto *F = dyn_cast<Function>(Ops.back())) {
+ ImmutableCallSite CS(cast<CallInst>(InstOrCE));
+ if (canConstantFoldCallTo(CS, F))
+ return ConstantFoldCall(CS, F, Ops.slice(0, Ops.size() - 1), TLI);
+ }
return nullptr;
case Instruction::Select:
return ConstantExpr::getSelect(Ops[0], Ops[1], Ops[2]);
@@ -1356,7 +1358,9 @@ llvm::ConstantFoldLoadThroughGEPIndices(Constant *C,
// Constant Folding for Calls
//
-bool llvm::canConstantFoldCallTo(const Function *F) {
+bool llvm::canConstantFoldCallTo(ImmutableCallSite CS, const Function *F) {
+ if (CS.isNoBuiltin())
+ return false;
switch (F->getIntrinsicID()) {
case Intrinsic::fabs:
case Intrinsic::minnum:
@@ -1584,6 +1588,9 @@ Constant *ConstantFoldScalarCall(StringRef Name, unsigned IntrinsicID, Type *Ty,
// cosine(arg) is between -1 and 1. cosine(invalid arg) is NaN
if (IntrinsicID == Intrinsic::cos)
return Constant::getNullValue(Ty);
+ if (IntrinsicID == Intrinsic::bswap ||
+ IntrinsicID == Intrinsic::bitreverse)
+ return Operands[0];
}
if (auto *Op = dyn_cast<ConstantFP>(Operands[0])) {
if (IntrinsicID == Intrinsic::convert_to_fp16) {
@@ -1815,7 +1822,7 @@ Constant *ConstantFoldScalarCall(StringRef Name, unsigned IntrinsicID, Type *Ty,
dyn_cast_or_null<ConstantFP>(Op->getAggregateElement(0U)))
return ConstantFoldSSEConvertToInt(FPOp->getValueAPF(),
/*roundTowardZero=*/false, Ty);
- LLVM_FALLTHROUGH;
+ break;
case Intrinsic::x86_sse_cvttss2si:
case Intrinsic::x86_sse_cvttss2si64:
case Intrinsic::x86_sse2_cvttsd2si:
@@ -1824,16 +1831,10 @@ Constant *ConstantFoldScalarCall(StringRef Name, unsigned IntrinsicID, Type *Ty,
dyn_cast_or_null<ConstantFP>(Op->getAggregateElement(0U)))
return ConstantFoldSSEConvertToInt(FPOp->getValueAPF(),
/*roundTowardZero=*/true, Ty);
+ break;
}
}
- if (isa<UndefValue>(Operands[0])) {
- if (IntrinsicID == Intrinsic::bswap ||
- IntrinsicID == Intrinsic::bitreverse)
- return Operands[0];
- return nullptr;
- }
-
return nullptr;
}
@@ -2034,6 +2035,14 @@ Constant *ConstantFoldVectorCall(StringRef Name, unsigned IntrinsicID,
for (unsigned I = 0, E = VTy->getNumElements(); I != E; ++I) {
// Gather a column of constants.
for (unsigned J = 0, JE = Operands.size(); J != JE; ++J) {
+ // These intrinsics use a scalar type for their second argument.
+ if (J == 1 &&
+ (IntrinsicID == Intrinsic::cttz || IntrinsicID == Intrinsic::ctlz ||
+ IntrinsicID == Intrinsic::powi)) {
+ Lane[J] = Operands[J];
+ continue;
+ }
+
Constant *Agg = Operands[J]->getAggregateElement(I);
if (!Agg)
return nullptr;
@@ -2054,8 +2063,11 @@ Constant *ConstantFoldVectorCall(StringRef Name, unsigned IntrinsicID,
} // end anonymous namespace
Constant *
-llvm::ConstantFoldCall(Function *F, ArrayRef<Constant *> Operands,
+llvm::ConstantFoldCall(ImmutableCallSite CS, Function *F,
+ ArrayRef<Constant *> Operands,
const TargetLibraryInfo *TLI) {
+ if (CS.isNoBuiltin())
+ return nullptr;
if (!F->hasName())
return nullptr;
StringRef Name = F->getName();
@@ -2072,6 +2084,8 @@ llvm::ConstantFoldCall(Function *F, ArrayRef<Constant *> Operands,
bool llvm::isMathLibCallNoop(CallSite CS, const TargetLibraryInfo *TLI) {
// FIXME: Refactor this code; this duplicates logic in LibCallsShrinkWrap
// (and to some extent ConstantFoldScalarCall).
+ if (CS.isNoBuiltin())
+ return false;
Function *F = CS.getCalledFunction();
if (!F)
return false;