diff options
| author | Dimitry Andric <dim@FreeBSD.org> | 2021-07-29 20:15:26 +0000 |
|---|---|---|
| committer | Dimitry Andric <dim@FreeBSD.org> | 2021-07-29 20:15:26 +0000 |
| commit | 344a3780b2e33f6ca763666c380202b18aab72a3 (patch) | |
| tree | f0b203ee6eb71d7fdd792373e3c81eb18d6934dd /llvm/lib/Analysis/MemoryDependenceAnalysis.cpp | |
| parent | b60736ec1405bb0a8dd40989f67ef4c93da068ab (diff) | |
vendor/llvm-project/llvmorg-13-init-16847-g88e66fa60ae5vendor/llvm-project/llvmorg-12.0.1-rc2-0-ge7dac564cd0evendor/llvm-project/llvmorg-12.0.1-0-gfed41342a82f
Diffstat (limited to 'llvm/lib/Analysis/MemoryDependenceAnalysis.cpp')
| -rw-r--r-- | llvm/lib/Analysis/MemoryDependenceAnalysis.cpp | 105 |
1 files changed, 50 insertions, 55 deletions
diff --git a/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp b/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp index 895936d47175..b44d15e71556 100644 --- a/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp +++ b/llvm/lib/Analysis/MemoryDependenceAnalysis.cpp @@ -243,19 +243,10 @@ MemDepResult MemoryDependenceResults::getCallDependencyFrom( return MemDepResult::getNonFuncLocal(); } -static bool isVolatile(Instruction *Inst) { - if (auto *LI = dyn_cast<LoadInst>(Inst)) - return LI->isVolatile(); - if (auto *SI = dyn_cast<StoreInst>(Inst)) - return SI->isVolatile(); - if (auto *AI = dyn_cast<AtomicCmpXchgInst>(Inst)) - return AI->isVolatile(); - return false; -} - MemDepResult MemoryDependenceResults::getPointerDependencyFrom( const MemoryLocation &MemLoc, bool isLoad, BasicBlock::iterator ScanIt, - BasicBlock *BB, Instruction *QueryInst, unsigned *Limit) { + BasicBlock *BB, Instruction *QueryInst, unsigned *Limit, + BatchAAResults &BatchAA) { MemDepResult InvariantGroupDependency = MemDepResult::getUnknown(); if (QueryInst != nullptr) { if (auto *LI = dyn_cast<LoadInst>(QueryInst)) { @@ -266,7 +257,7 @@ MemDepResult MemoryDependenceResults::getPointerDependencyFrom( } } MemDepResult SimpleDep = getSimplePointerDependencyFrom( - MemLoc, isLoad, ScanIt, BB, QueryInst, Limit); + MemLoc, isLoad, ScanIt, BB, QueryInst, Limit, BatchAA); if (SimpleDep.isDef()) return SimpleDep; // Non-local invariant group dependency indicates there is non local Def @@ -280,6 +271,14 @@ MemDepResult MemoryDependenceResults::getPointerDependencyFrom( return SimpleDep; } +MemDepResult MemoryDependenceResults::getPointerDependencyFrom( + const MemoryLocation &MemLoc, bool isLoad, BasicBlock::iterator ScanIt, + BasicBlock *BB, Instruction *QueryInst, unsigned *Limit) { + BatchAAResults BatchAA(AA); + return getPointerDependencyFrom(MemLoc, isLoad, ScanIt, BB, QueryInst, Limit, + BatchAA); +} + MemDepResult MemoryDependenceResults::getInvariantGroupPointerDependency(LoadInst *LI, BasicBlock *BB) { @@ -344,7 +343,9 @@ MemoryDependenceResults::getInvariantGroupPointerDependency(LoadInst *LI, // If we hit load/store with the same invariant.group metadata (and the // same pointer operand) we can assume that value pointed by pointer // operand didn't change. - if ((isa<LoadInst>(U) || isa<StoreInst>(U)) && + if ((isa<LoadInst>(U) || + (isa<StoreInst>(U) && + cast<StoreInst>(U)->getPointerOperand() == Ptr)) && U->hasMetadata(LLVMContext::MD_invariant_group)) ClosestDependency = GetClosestDependency(ClosestDependency, U); } @@ -367,9 +368,8 @@ MemoryDependenceResults::getInvariantGroupPointerDependency(LoadInst *LI, MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( const MemoryLocation &MemLoc, bool isLoad, BasicBlock::iterator ScanIt, - BasicBlock *BB, Instruction *QueryInst, unsigned *Limit) { - // We can batch AA queries, because IR does not change during a MemDep query. - BatchAAResults BatchAA(AA); + BasicBlock *BB, Instruction *QueryInst, unsigned *Limit, + BatchAAResults &BatchAA) { bool isInvariantLoad = false; unsigned DefaultLimit = getDefaultBlockScanLimit(); @@ -465,9 +465,9 @@ MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( MemoryLocation Loc; /*ModRefInfo MR =*/ GetLocation(II, Loc, TLI); AliasResult R = BatchAA.alias(Loc, MemLoc); - if (R == NoAlias) + if (R == AliasResult::NoAlias) continue; - if (R == MustAlias) + if (R == AliasResult::MustAlias) return MemDepResult::getDef(II); if (ID == Intrinsic::masked_load) continue; @@ -489,7 +489,7 @@ MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( if (!QueryInst) // Original QueryInst *may* be volatile return MemDepResult::getClobber(LI); - if (isVolatile(QueryInst)) + if (QueryInst->isVolatile()) // Ordering required if QueryInst is itself volatile return MemDepResult::getClobber(LI); // Otherwise, volatile doesn't imply any special ordering @@ -513,23 +513,19 @@ MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( AliasResult R = BatchAA.alias(LoadLoc, MemLoc); if (isLoad) { - if (R == NoAlias) + if (R == AliasResult::NoAlias) continue; // Must aliased loads are defs of each other. - if (R == MustAlias) + if (R == AliasResult::MustAlias) return MemDepResult::getDef(Inst); -#if 0 // FIXME: Temporarily disabled. GVN is cleverly rewriting loads - // in terms of clobbering loads, but since it does this by looking - // at the clobbering load directly, it doesn't know about any - // phi translation that may have happened along the way. - // If we have a partial alias, then return this as a clobber for the // client to handle. - if (R == PartialAlias) + if (R == AliasResult::PartialAlias && R.hasOffset()) { + ClobberOffsets[LI] = R.getOffset(); return MemDepResult::getClobber(Inst); -#endif + } // Random may-alias loads don't depend on each other without a // dependence. @@ -537,7 +533,7 @@ MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( } // Stores don't depend on other no-aliased accesses. - if (R == NoAlias) + if (R == AliasResult::NoAlias) continue; // Stores don't alias loads from read-only memory. @@ -583,9 +579,9 @@ MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( // If we found a pointer, check if it could be the same as our pointer. AliasResult R = BatchAA.alias(StoreLoc, MemLoc); - if (R == NoAlias) + if (R == AliasResult::NoAlias) continue; - if (R == MustAlias) + if (R == AliasResult::MustAlias) return MemDepResult::getDef(Inst); if (isInvariantLoad) continue; @@ -620,8 +616,7 @@ MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( ModRefInfo MR = BatchAA.getModRefInfo(Inst, MemLoc); // If necessary, perform additional analysis. if (isModAndRefSet(MR)) - // TODO: Support callCapturesBefore() on BatchAAResults. - MR = AA.callCapturesBefore(Inst, MemLoc, &DT); + MR = BatchAA.callCapturesBefore(Inst, MemLoc, &DT); switch (clearMust(MR)) { case ModRefInfo::NoModRef: // If the call has no effect on the queried pointer, just ignore it. @@ -648,6 +643,7 @@ MemDepResult MemoryDependenceResults::getSimplePointerDependencyFrom( } MemDepResult MemoryDependenceResults::getDependency(Instruction *QueryInst) { + ClobberOffsets.clear(); Instruction *ScanPos = QueryInst; // Check for a cached result @@ -721,7 +717,7 @@ MemoryDependenceResults::getNonLocalCallDependency(CallBase *QueryCall) { assert(getDependency(QueryCall).isNonLocal() && "getNonLocalCallDependency should only be used on calls with " "non-local deps!"); - PerInstNLInfo &CacheP = NonLocalDeps[QueryCall]; + PerInstNLInfo &CacheP = NonLocalDepsMap[QueryCall]; NonLocalDepInfo &Cache = CacheP.first; // This is the set of blocks that need to be recomputed. In the cached case, @@ -880,7 +876,7 @@ void MemoryDependenceResults::getNonLocalPointerDependency( } return false; }; - if (isVolatile(QueryInst) || isOrdered(QueryInst)) { + if (QueryInst->isVolatile() || isOrdered(QueryInst)) { Result.push_back(NonLocalDepResult(FromBB, MemDepResult::getUnknown(), const_cast<Value *>(Loc.Ptr))); return; @@ -906,9 +902,10 @@ void MemoryDependenceResults::getNonLocalPointerDependency( /// info if available). /// /// If we do a lookup, add the result to the cache. -MemDepResult MemoryDependenceResults::GetNonLocalInfoForBlock( +MemDepResult MemoryDependenceResults::getNonLocalInfoForBlock( Instruction *QueryInst, const MemoryLocation &Loc, bool isLoad, - BasicBlock *BB, NonLocalDepInfo *Cache, unsigned NumSortedEntries) { + BasicBlock *BB, NonLocalDepInfo *Cache, unsigned NumSortedEntries, + BatchAAResults &BatchAA) { bool isInvariantLoad = false; @@ -958,8 +955,8 @@ MemDepResult MemoryDependenceResults::GetNonLocalInfoForBlock( } // Scan the block for the dependency. - MemDepResult Dep = - getPointerDependencyFrom(Loc, isLoad, ScanPos, BB, QueryInst); + MemDepResult Dep = getPointerDependencyFrom(Loc, isLoad, ScanPos, BB, + QueryInst, nullptr, BatchAA); // Don't cache results for invariant load. if (isInvariantLoad) @@ -1133,9 +1130,6 @@ bool MemoryDependenceResults::getNonLocalPointerDepFromBB( // investigating, just return it with no recomputation. // Don't use cached information for invariant loads since it is valid for // non-invariant loads only. - // - // Don't use cached information for invariant loads since it is valid for - // non-invariant loads only. if (!IsIncomplete && !isInvariantLoad && CacheInfo->Pair == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) { // We have a fully cached result for this query then we can just return the @@ -1203,6 +1197,7 @@ bool MemoryDependenceResults::getNonLocalPointerDepFromBB( bool GotWorklistLimit = false; LLVM_DEBUG(AssertSorted(*Cache)); + BatchAAResults BatchAA(AA); while (!Worklist.empty()) { BasicBlock *BB = Worklist.pop_back_val(); @@ -1233,8 +1228,8 @@ bool MemoryDependenceResults::getNonLocalPointerDepFromBB( // Get the dependency info for Pointer in BB. If we have cached // information, we will use it, otherwise we compute it. LLVM_DEBUG(AssertSorted(*Cache, NumSortedEntries)); - MemDepResult Dep = GetNonLocalInfoForBlock(QueryInst, Loc, isLoad, BB, - Cache, NumSortedEntries); + MemDepResult Dep = getNonLocalInfoForBlock( + QueryInst, Loc, isLoad, BB, Cache, NumSortedEntries, BatchAA); // If we got a Def or Clobber, add this to the list of results. if (!Dep.isNonLocal()) { @@ -1456,7 +1451,7 @@ bool MemoryDependenceResults::getNonLocalPointerDepFromBB( } /// If P exists in CachedNonLocalPointerInfo or NonLocalDefsCache, remove it. -void MemoryDependenceResults::RemoveCachedNonLocalPointerDependencies( +void MemoryDependenceResults::removeCachedNonLocalPointerDependencies( ValueIsLoadPair P) { // Most of the time this cache is empty. @@ -1505,9 +1500,9 @@ void MemoryDependenceResults::invalidateCachedPointerInfo(Value *Ptr) { if (!Ptr->getType()->isPointerTy()) return; // Flush store info for the pointer. - RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false)); + removeCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false)); // Flush load info for the pointer. - RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true)); + removeCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true)); // Invalidate phis that use the pointer. PV.invalidateValue(Ptr); } @@ -1519,13 +1514,13 @@ void MemoryDependenceResults::invalidateCachedPredecessors() { void MemoryDependenceResults::removeInstruction(Instruction *RemInst) { // Walk through the Non-local dependencies, removing this one as the value // for any cached queries. - NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst); - if (NLDI != NonLocalDeps.end()) { + NonLocalDepMapType::iterator NLDI = NonLocalDepsMap.find(RemInst); + if (NLDI != NonLocalDepsMap.end()) { NonLocalDepInfo &BlockMap = NLDI->second.first; for (auto &Entry : BlockMap) if (Instruction *Inst = Entry.getResult().getInst()) RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst); - NonLocalDeps.erase(NLDI); + NonLocalDepsMap.erase(NLDI); } // If we have a cached local dependence query for this instruction, remove it. @@ -1545,8 +1540,8 @@ void MemoryDependenceResults::removeInstruction(Instruction *RemInst) { // If the instruction is a pointer, remove it from both the load info and the // store info. if (RemInst->getType()->isPointerTy()) { - RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false)); - RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true)); + removeCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false)); + removeCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true)); } else { // Otherwise, if the instructions is in the map directly, it must be a load. // Remove it. @@ -1609,7 +1604,7 @@ void MemoryDependenceResults::removeInstruction(Instruction *RemInst) { for (Instruction *I : ReverseDepIt->second) { assert(I != RemInst && "Already removed NonLocalDep info for RemInst"); - PerInstNLInfo &INLD = NonLocalDeps[I]; + PerInstNLInfo &INLD = NonLocalDepsMap[I]; // The information is now dirty! INLD.second = true; @@ -1681,7 +1676,7 @@ void MemoryDependenceResults::removeInstruction(Instruction *RemInst) { // Invalidate phis that use the removed instruction. PV.invalidateValue(RemInst); - assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?"); + assert(!NonLocalDepsMap.count(RemInst) && "RemInst got reinserted?"); LLVM_DEBUG(verifyRemoved(RemInst)); } @@ -1702,7 +1697,7 @@ void MemoryDependenceResults::verifyRemoved(Instruction *D) const { assert(Entry.getResult().getInst() != D && "Inst occurs as NLPD value"); } - for (const auto &DepKV : NonLocalDeps) { + for (const auto &DepKV : NonLocalDepsMap) { assert(DepKV.first != D && "Inst occurs in data structures"); const PerInstNLInfo &INLD = DepKV.second; for (const auto &Entry : INLD.first) |
