diff options
Diffstat (limited to 'lib/Transforms/InstCombine/InstCombineCasts.cpp')
-rw-r--r-- | lib/Transforms/InstCombine/InstCombineCasts.cpp | 90 |
1 files changed, 59 insertions, 31 deletions
diff --git a/lib/Transforms/InstCombine/InstCombineCasts.cpp b/lib/Transforms/InstCombine/InstCombineCasts.cpp index 1201ac196ec0..2c9ba203fbf3 100644 --- a/lib/Transforms/InstCombine/InstCombineCasts.cpp +++ b/lib/Transforms/InstCombine/InstCombineCasts.cpp @@ -1,9 +1,8 @@ //===- InstCombineCasts.cpp -----------------------------------------------===// // -// The LLVM Compiler Infrastructure -// -// This file is distributed under the University of Illinois Open Source -// License. See LICENSE.TXT for details. +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. +// See https://llvm.org/LICENSE.txt for license information. +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // @@ -1373,10 +1372,8 @@ Instruction *InstCombiner::visitSExt(SExtInst &CI) { // If we know that the value being extended is positive, we can use a zext // instead. KnownBits Known = computeKnownBits(Src, 0, &CI); - if (Known.isNonNegative()) { - Value *ZExt = Builder.CreateZExt(Src, DestTy); - return replaceInstUsesWith(CI, ZExt); - } + if (Known.isNonNegative()) + return CastInst::Create(Instruction::ZExt, Src, DestTy); // Try to extend the entire expression tree to the wide destination type. if (shouldChangeType(SrcTy, DestTy) && canEvaluateSExtd(Src, DestTy)) { @@ -1618,12 +1615,20 @@ Instruction *InstCombiner::visitFPTrunc(FPTruncInst &FPT) { return CastInst::CreateFPCast(ExactResult, Ty); } } + } - // (fptrunc (fneg x)) -> (fneg (fptrunc x)) - Value *X; - if (match(OpI, m_FNeg(m_Value(X)))) { + // (fptrunc (fneg x)) -> (fneg (fptrunc x)) + Value *X; + Instruction *Op = dyn_cast<Instruction>(FPT.getOperand(0)); + if (Op && Op->hasOneUse()) { + if (match(Op, m_FNeg(m_Value(X)))) { Value *InnerTrunc = Builder.CreateFPTrunc(X, Ty); - return BinaryOperator::CreateFNegFMF(InnerTrunc, OpI); + + // FIXME: Once we're sure that unary FNeg optimizations are on par with + // binary FNeg, this should always return a unary operator. + if (isa<BinaryOperator>(Op)) + return BinaryOperator::CreateFNegFMF(InnerTrunc, Op); + return UnaryOperator::CreateFNegFMF(InnerTrunc, Op); } } @@ -1657,8 +1662,8 @@ Instruction *InstCombiner::visitFPTrunc(FPTruncInst &FPT) { II->getIntrinsicID(), Ty); SmallVector<OperandBundleDef, 1> OpBundles; II->getOperandBundlesAsDefs(OpBundles); - CallInst *NewCI = CallInst::Create(Overload, { InnerTrunc }, OpBundles, - II->getName()); + CallInst *NewCI = + CallInst::Create(Overload, {InnerTrunc}, OpBundles, II->getName()); NewCI->copyFastMathFlags(II); return NewCI; } @@ -2167,7 +2172,7 @@ Instruction *InstCombiner::optimizeBitCastFromPhi(CastInst &CI, PHINode *PN) { SmallSetVector<PHINode *, 4> OldPhiNodes; // Find all of the A->B casts and PHI nodes. - // We need to inpect all related PHI nodes, but PHIs can be cyclic, so + // We need to inspect all related PHI nodes, but PHIs can be cyclic, so // OldPhiNodes is used to track all known PHI nodes, before adding a new // PHI to PhiWorklist, it is checked against and added to OldPhiNodes first. PhiWorklist.push_back(PN); @@ -2242,20 +2247,43 @@ Instruction *InstCombiner::optimizeBitCastFromPhi(CastInst &CI, PHINode *PN) { } } + // Traverse all accumulated PHI nodes and process its users, + // which are Stores and BitcCasts. Without this processing + // NewPHI nodes could be replicated and could lead to extra + // moves generated after DeSSA. // If there is a store with type B, change it to type A. - for (User *U : PN->users()) { - auto *SI = dyn_cast<StoreInst>(U); - if (SI && SI->isSimple() && SI->getOperand(0) == PN) { - Builder.SetInsertPoint(SI); - auto *NewBC = - cast<BitCastInst>(Builder.CreateBitCast(NewPNodes[PN], SrcTy)); - SI->setOperand(0, NewBC); - Worklist.Add(SI); - assert(hasStoreUsersOnly(*NewBC)); + + + // Replace users of BitCast B->A with NewPHI. These will help + // later to get rid off a closure formed by OldPHI nodes. + Instruction *RetVal = nullptr; + for (auto *OldPN : OldPhiNodes) { + PHINode *NewPN = NewPNodes[OldPN]; + for (User *V : OldPN->users()) { + if (auto *SI = dyn_cast<StoreInst>(V)) { + if (SI->isSimple() && SI->getOperand(0) == OldPN) { + Builder.SetInsertPoint(SI); + auto *NewBC = + cast<BitCastInst>(Builder.CreateBitCast(NewPN, SrcTy)); + SI->setOperand(0, NewBC); + Worklist.Add(SI); + assert(hasStoreUsersOnly(*NewBC)); + } + } + else if (auto *BCI = dyn_cast<BitCastInst>(V)) { + // Verify it's a B->A cast. + Type *TyB = BCI->getOperand(0)->getType(); + Type *TyA = BCI->getType(); + if (TyA == DestTy && TyB == SrcTy) { + Instruction *I = replaceInstUsesWith(*BCI, NewPN); + if (BCI == &CI) + RetVal = I; + } + } } } - return replaceInstUsesWith(CI, NewPNodes[PN]); + return RetVal; } Instruction *InstCombiner::visitBitCast(BitCastInst &CI) { @@ -2310,7 +2338,8 @@ Instruction *InstCombiner::visitBitCast(BitCastInst &CI) { // If we found a path from the src to dest, create the getelementptr now. if (SrcElTy == DstElTy) { SmallVector<Value *, 8> Idxs(NumZeros + 1, Builder.getInt32(0)); - return GetElementPtrInst::CreateInBounds(Src, Idxs); + return GetElementPtrInst::CreateInBounds(SrcPTy->getElementType(), Src, + Idxs); } } @@ -2355,11 +2384,10 @@ Instruction *InstCombiner::visitBitCast(BitCastInst &CI) { } // Otherwise, see if our source is an insert. If so, then use the scalar - // component directly. - if (InsertElementInst *IEI = - dyn_cast<InsertElementInst>(CI.getOperand(0))) - return CastInst::Create(Instruction::BitCast, IEI->getOperand(1), - DestTy); + // component directly: + // bitcast (inselt <1 x elt> V, X, 0) to <n x m> --> bitcast X to <n x m> + if (auto *InsElt = dyn_cast<InsertElementInst>(Src)) + return new BitCastInst(InsElt->getOperand(1), DestTy); } } |