diff options
Diffstat (limited to 'lib/CodeGen/CodeGenPrepare.cpp')
| -rw-r--r-- | lib/CodeGen/CodeGenPrepare.cpp | 81 | 
1 files changed, 23 insertions, 58 deletions
diff --git a/lib/CodeGen/CodeGenPrepare.cpp b/lib/CodeGen/CodeGenPrepare.cpp index b7155ac2480a7..45dc13d58de70 100644 --- a/lib/CodeGen/CodeGenPrepare.cpp +++ b/lib/CodeGen/CodeGenPrepare.cpp @@ -4267,9 +4267,7 @@ bool CodeGenPrepare::optimizeMemoryInst(Instruction *MemoryInst, Value *Addr,    // Use a worklist to iteratively look through PHI nodes, and ensure that    // the addressing mode obtained from the non-PHI roots of the graph    // are equivalent. -  Value *Consensus = nullptr; -  unsigned NumUsesConsensus = 0; -  bool IsNumUsesConsensusValid = false; +  bool AddrModeFound = false;    bool PhiSeen = false;    SmallVector<Instruction*, 16> AddrModeInsts;    ExtAddrMode AddrMode; @@ -4280,11 +4278,17 @@ bool CodeGenPrepare::optimizeMemoryInst(Instruction *MemoryInst, Value *Addr,      Value *V = worklist.back();      worklist.pop_back(); -    // Break use-def graph loops. -    if (!Visited.insert(V).second) { -      Consensus = nullptr; -      break; -    } +    // We allow traversing cyclic Phi nodes. +    // In case of success after this loop we ensure that traversing through +    // Phi nodes ends up with all cases to compute address of the form +    //    BaseGV + Base + Scale * Index + Offset +    // where Scale and Offset are constans and BaseGV, Base and Index +    // are exactly the same Values in all cases. +    // It means that BaseGV, Scale and Offset dominate our memory instruction +    // and have the same value as they had in address computation represented +    // as Phi. So we can safely sink address computation to memory instruction. +    if (!Visited.insert(V).second) +      continue;      // For a PHI node, push all of its incoming values.      if (PHINode *P = dyn_cast<PHINode>(V)) { @@ -4297,47 +4301,26 @@ bool CodeGenPrepare::optimizeMemoryInst(Instruction *MemoryInst, Value *Addr,      // For non-PHIs, determine the addressing mode being computed.  Note that      // the result may differ depending on what other uses our candidate      // addressing instructions might have. -    SmallVector<Instruction*, 16> NewAddrModeInsts; +    AddrModeInsts.clear();      ExtAddrMode NewAddrMode = AddressingModeMatcher::Match( -      V, AccessTy, AddrSpace, MemoryInst, NewAddrModeInsts, *TLI, *TRI, -      InsertedInsts, PromotedInsts, TPT); - -    // This check is broken into two cases with very similar code to avoid using -    // getNumUses() as much as possible. Some values have a lot of uses, so -    // calling getNumUses() unconditionally caused a significant compile-time -    // regression. -    if (!Consensus) { -      Consensus = V; -      AddrMode = NewAddrMode; -      AddrModeInsts = NewAddrModeInsts; -      continue; -    } else if (NewAddrMode == AddrMode) { -      if (!IsNumUsesConsensusValid) { -        NumUsesConsensus = Consensus->getNumUses(); -        IsNumUsesConsensusValid = true; -      } +        V, AccessTy, AddrSpace, MemoryInst, AddrModeInsts, *TLI, *TRI, +        InsertedInsts, PromotedInsts, TPT); -      // Ensure that the obtained addressing mode is equivalent to that obtained -      // for all other roots of the PHI traversal.  Also, when choosing one -      // such root as representative, select the one with the most uses in order -      // to keep the cost modeling heuristics in AddressingModeMatcher -      // applicable. -      unsigned NumUses = V->getNumUses(); -      if (NumUses > NumUsesConsensus) { -        Consensus = V; -        NumUsesConsensus = NumUses; -        AddrModeInsts = NewAddrModeInsts; -      } +    if (!AddrModeFound) { +      AddrModeFound = true; +      AddrMode = NewAddrMode;        continue;      } +    if (NewAddrMode == AddrMode) +      continue; -    Consensus = nullptr; +    AddrModeFound = false;      break;    }    // If the addressing mode couldn't be determined, or if multiple different    // ones were determined, bail out now. -  if (!Consensus) { +  if (!AddrModeFound) {      TPT.rollback(LastKnownGood);      return false;    } @@ -4847,25 +4830,7 @@ bool CodeGenPrepare::canFormExtLd(    if (!HasPromoted && LI->getParent() == Inst->getParent())      return false; -  EVT VT = TLI->getValueType(*DL, Inst->getType()); -  EVT LoadVT = TLI->getValueType(*DL, LI->getType()); - -  // If the load has other users and the truncate is not free, this probably -  // isn't worthwhile. -  if (!LI->hasOneUse() && (TLI->isTypeLegal(LoadVT) || !TLI->isTypeLegal(VT)) && -      !TLI->isTruncateFree(Inst->getType(), LI->getType())) -    return false; - -  // Check whether the target supports casts folded into loads. -  unsigned LType; -  if (isa<ZExtInst>(Inst)) -    LType = ISD::ZEXTLOAD; -  else { -    assert(isa<SExtInst>(Inst) && "Unexpected ext type!"); -    LType = ISD::SEXTLOAD; -  } - -  return TLI->isLoadExtLegal(LType, VT, LoadVT); +  return TLI->isExtLoad(LI, Inst, *DL);  }  /// Move a zext or sext fed by a load into the same basic block as the load,  | 
