summaryrefslogtreecommitdiff
path: root/lib/Transforms/Scalar/NaryReassociate.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Transforms/Scalar/NaryReassociate.cpp')
-rw-r--r--lib/Transforms/Scalar/NaryReassociate.cpp49
1 files changed, 9 insertions, 40 deletions
diff --git a/lib/Transforms/Scalar/NaryReassociate.cpp b/lib/Transforms/Scalar/NaryReassociate.cpp
index c8f885e7eec53..ed754fa710253 100644
--- a/lib/Transforms/Scalar/NaryReassociate.cpp
+++ b/lib/Transforms/Scalar/NaryReassociate.cpp
@@ -208,7 +208,7 @@ FunctionPass *llvm::createNaryReassociatePass() {
}
bool NaryReassociate::runOnFunction(Function &F) {
- if (skipOptnoneFunction(F))
+ if (skipFunction(F))
return false;
AC = &getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
@@ -299,49 +299,18 @@ Instruction *NaryReassociate::tryReassociate(Instruction *I) {
}
}
-// FIXME: extract this method into TTI->getGEPCost.
static bool isGEPFoldable(GetElementPtrInst *GEP,
- const TargetTransformInfo *TTI,
- const DataLayout *DL) {
- GlobalVariable *BaseGV = nullptr;
- int64_t BaseOffset = 0;
- bool HasBaseReg = false;
- int64_t Scale = 0;
-
- if (GlobalVariable *GV = dyn_cast<GlobalVariable>(GEP->getPointerOperand()))
- BaseGV = GV;
- else
- HasBaseReg = true;
-
- gep_type_iterator GTI = gep_type_begin(GEP);
- for (auto I = GEP->idx_begin(); I != GEP->idx_end(); ++I, ++GTI) {
- if (isa<SequentialType>(*GTI)) {
- int64_t ElementSize = DL->getTypeAllocSize(GTI.getIndexedType());
- if (ConstantInt *ConstIdx = dyn_cast<ConstantInt>(*I)) {
- BaseOffset += ConstIdx->getSExtValue() * ElementSize;
- } else {
- // Needs scale register.
- if (Scale != 0) {
- // No addressing mode takes two scale registers.
- return false;
- }
- Scale = ElementSize;
- }
- } else {
- StructType *STy = cast<StructType>(*GTI);
- uint64_t Field = cast<ConstantInt>(*I)->getZExtValue();
- BaseOffset += DL->getStructLayout(STy)->getElementOffset(Field);
- }
- }
-
- unsigned AddrSpace = GEP->getPointerAddressSpace();
- return TTI->isLegalAddressingMode(GEP->getType()->getElementType(), BaseGV,
- BaseOffset, HasBaseReg, Scale, AddrSpace);
+ const TargetTransformInfo *TTI) {
+ SmallVector<const Value*, 4> Indices;
+ for (auto I = GEP->idx_begin(); I != GEP->idx_end(); ++I)
+ Indices.push_back(*I);
+ return TTI->getGEPCost(GEP->getSourceElementType(), GEP->getPointerOperand(),
+ Indices) == TargetTransformInfo::TCC_Free;
}
Instruction *NaryReassociate::tryReassociateGEP(GetElementPtrInst *GEP) {
// Not worth reassociating GEP if it is foldable.
- if (isGEPFoldable(GEP, TTI, DL))
+ if (isGEPFoldable(GEP, TTI))
return nullptr;
gep_type_iterator GTI = gep_type_begin(*GEP);
@@ -434,7 +403,7 @@ GetElementPtrInst *NaryReassociate::tryReassociateGEPAtIndex(
// NewGEP = (char *)Candidate + RHS * sizeof(IndexedType)
uint64_t IndexedSize = DL->getTypeAllocSize(IndexedType);
- Type *ElementType = GEP->getType()->getElementType();
+ Type *ElementType = GEP->getResultElementType();
uint64_t ElementSize = DL->getTypeAllocSize(ElementType);
// Another less rare case: because I is not necessarily the last index of the
// GEP, the size of the type at the I-th index (IndexedSize) is not