aboutsummaryrefslogtreecommitdiff
path: root/lib/CodeGen/PeepholeOptimizer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/CodeGen/PeepholeOptimizer.cpp')
-rw-r--r--lib/CodeGen/PeepholeOptimizer.cpp24
1 files changed, 10 insertions, 14 deletions
diff --git a/lib/CodeGen/PeepholeOptimizer.cpp b/lib/CodeGen/PeepholeOptimizer.cpp
index 1d058ccfb633..b918396aa8c5 100644
--- a/lib/CodeGen/PeepholeOptimizer.cpp
+++ b/lib/CodeGen/PeepholeOptimizer.cpp
@@ -1,9 +1,8 @@
//===- PeepholeOptimizer.cpp - Peephole Optimizations ---------------------===//
//
-// The LLVM Compiler Infrastructure
-//
-// This file is distributed under the University of Illinois Open Source
-// License. See LICENSE.TXT for details.
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
@@ -1307,7 +1306,7 @@ bool PeepholeOptimizer::optimizeUncoalescableCopy(
/// Check whether MI is a candidate for folding into a later instruction.
/// We only fold loads to virtual registers and the virtual register defined
-/// has a single use.
+/// has a single user.
bool PeepholeOptimizer::isLoadFoldable(
MachineInstr &MI, SmallSet<unsigned, 16> &FoldAsLoadDefCandidates) {
if (!MI.canFoldAsLoad() || !MI.mayLoad())
@@ -1317,12 +1316,12 @@ bool PeepholeOptimizer::isLoadFoldable(
return false;
unsigned Reg = MI.getOperand(0).getReg();
- // To reduce compilation time, we check MRI->hasOneNonDBGUse when inserting
+ // To reduce compilation time, we check MRI->hasOneNonDBGUser when inserting
// loads. It should be checked when processing uses of the load, since
// uses can be removed during peephole.
if (!MI.getOperand(0).getSubReg() &&
TargetRegisterInfo::isVirtualRegister(Reg) &&
- MRI->hasOneNonDBGUse(Reg)) {
+ MRI->hasOneNonDBGUser(Reg)) {
FoldAsLoadDefCandidates.insert(Reg);
return true;
}
@@ -1778,6 +1777,8 @@ bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) {
LocalMIs.erase(MI);
LocalMIs.erase(DefMI);
LocalMIs.insert(FoldMI);
+ if (MI->isCall())
+ MI->getMF()->updateCallSiteInfo(MI, FoldMI);
MI->eraseFromParent();
DefMI->eraseFromParent();
MRI->markUsesInDebugValueAsUndef(FoldedReg);
@@ -1826,7 +1827,7 @@ ValueTrackerResult ValueTracker::getNextSourceFromBitcast() {
assert(Def->isBitcast() && "Invalid definition");
// Bail if there are effects that a plain copy will not expose.
- if (Def->hasUnmodeledSideEffects())
+ if (Def->mayRaiseFPException() || Def->hasUnmodeledSideEffects())
return ValueTrackerResult();
// Bitcasts with more than one def are not supported.
@@ -1901,13 +1902,8 @@ ValueTrackerResult ValueTracker::getNextSourceFromRegSequence() {
// Def = REG_SEQUENCE v0, sub0, v1, sub1, ...
// Check if one of the operand defines the subreg we are interested in.
for (const RegSubRegPairAndIdx &RegSeqInput : RegSeqInputRegs) {
- if (RegSeqInput.SubIdx == DefSubReg) {
- if (RegSeqInput.SubReg)
- // Bail if we have to compose sub registers.
- return ValueTrackerResult();
-
+ if (RegSeqInput.SubIdx == DefSubReg)
return ValueTrackerResult(RegSeqInput.Reg, RegSeqInput.SubReg);
- }
}
// If the subreg we are tracking is super-defined by another subreg,