52 #define DEBUG_TYPE "memdep"
54 STATISTIC(NumCacheNonLocal,
"Number of fully cached non-local responses");
55 STATISTIC(NumCacheDirtyNonLocal,
"Number of dirty cached non-local responses");
56 STATISTIC(NumUncacheNonLocal,
"Number of uncached non-local responses");
59 "Number of fully cached non-local ptr responses");
61 "Number of cached, but dirty, non-local ptr responses");
62 STATISTIC(NumUncacheNonLocalPtr,
"Number of uncached non-local ptr responses");
64 "Number of block queries that were completely cached");
70 cl::desc(
"The number of instructions to scan in a block in memory "
71 "dependency analysis (default = 100)"));
75 cl::desc(
"The number of blocks to scan during memory "
76 "dependency analysis (default = 1000)"));
84 template <
typename KeyTy>
89 ReverseMap.
find(Inst);
90 assert(InstIt != ReverseMap.
end() &&
"Reverse map out of sync?");
91 bool Found = InstIt->second.
erase(Val);
92 assert(Found &&
"Invalid reverse map!");
94 if (InstIt->second.
empty())
95 ReverseMap.erase(InstIt);
105 if (
const LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
106 if (LI->isUnordered()) {
118 if (
const StoreInst *
SI = dyn_cast<StoreInst>(Inst)) {
119 if (
SI->isUnordered()) {
131 if (
const VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
142 if (
const IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) {
145 switch (II->getIntrinsicID()) {
146 case Intrinsic::lifetime_start:
147 case Intrinsic::lifetime_end:
148 case Intrinsic::invariant_start:
149 II->getAAMetadata(AAInfo);
151 II->getArgOperand(1),
152 cast<ConstantInt>(II->getArgOperand(0))->getZExtValue(), AAInfo);
156 case Intrinsic::invariant_end:
157 II->getAAMetadata(AAInfo);
159 II->getArgOperand(2),
160 cast<ConstantInt>(II->getArgOperand(1))->getZExtValue(), AAInfo);
178 MemDepResult MemoryDependenceResults::getCallSiteDependencyFrom(
184 while (ScanIt != BB->
begin()) {
205 if (isa<DbgInfoIntrinsic>(Inst))
212 if (isReadOnlyCall && !(MR &
MRI_Mod) &&
238 const Value *MemLocBase, int64_t MemLocOffs,
unsigned MemLocSize,
253 const Value *LIBase =
258 if (LIBase != MemLocBase)
268 if (MemLocOffs < LIOffs)
278 int64_t MemLocEnd = MemLocOffs + MemLocSize;
281 if (LIOffs + LoadAlign < MemLocEnd)
292 if (NewLoadByteSize > LoadAlign ||
296 if (LIOffs + NewLoadByteSize > MemLocEnd &&
298 Attribute::SanitizeAddress))
305 if (LIOffs + NewLoadByteSize >= MemLocEnd)
306 return NewLoadByteSize;
308 NewLoadByteSize <<= 1;
313 if (
LoadInst *LI = dyn_cast<LoadInst>(Inst))
314 return LI->isVolatile();
315 else if (
StoreInst *
SI = dyn_cast<StoreInst>(Inst))
316 return SI->isVolatile();
318 return AI->isVolatile();
327 if (QueryInst !=
nullptr) {
328 if (
auto *LI = dyn_cast<LoadInst>(QueryInst)) {
331 if (InvariantGroupDependency.
isDef())
332 return InvariantGroupDependency;
336 MemLoc, isLoad, ScanIt, BB, QueryInst, Limit);
337 if (SimpleDep.
isDef())
343 return InvariantGroupDependency;
346 "InvariantGroupDependency should be only unknown at this point");
355 if (!InvariantGroupMD)
366 if (isa<GlobalValue>(LoadOperand))
371 LoadOperandsQueue.
push_back(LoadOperand);
377 assert(
Other &&
"Must call it with not null instruction");
386 while (!LoadOperandsQueue.
empty()) {
388 assert(Ptr && !isa<GlobalValue>(Ptr) &&
389 "Null or GlobalValue should not be inserted");
391 for (
const Use &Us : Ptr->
uses()) {
393 if (!U || U == LI || !DT.
dominates(U, LI))
398 if (isa<BitCastInst>(U)) {
407 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(U))
408 if (
GEP->hasAllZeroIndices()) {
416 if ((isa<LoadInst>(U) || isa<StoreInst>(U)) &&
418 ClosestDependency = GetClosestDependency(ClosestDependency, U);
422 if (!ClosestDependency)
424 if (ClosestDependency->
getParent() == BB)
430 NonLocalDefsCache.try_emplace(
439 bool isInvariantLoad =
false;
479 if (isLoad && QueryInst) {
482 isInvariantLoad =
true;
495 auto isNonSimpleLoadOrStore = [](
Instruction *
I) ->
bool {
496 if (
auto *LI = dyn_cast<LoadInst>(
I))
497 return !LI->isSimple();
498 if (
auto *
SI = dyn_cast<StoreInst>(
I))
499 return !
SI->isSimple();
506 return !isa<LoadInst>(
I) && !isa<StoreInst>(
I) &&
I->mayReadOrWriteMemory();
510 while (ScanIt != BB->
begin()) {
515 if (isa<DbgInfoIntrinsic>(II))
527 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
543 if (
LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
548 if (LI->isVolatile()) {
563 if (!QueryInst || isNonSimpleLoadOrStore(QueryInst) ||
564 isOtherMemAccess(QueryInst))
583 #if 0 // FIXME: Temporarily disabled. GVN is cleverly rewriting loads
615 if (!
SI->isUnordered() &&
SI->isAtomic()) {
616 if (!QueryInst || isNonSimpleLoadOrStore(QueryInst) ||
617 isOtherMemAccess(QueryInst))
627 if (
SI->isVolatile())
628 if (!QueryInst || isNonSimpleLoadOrStore(QueryInst) ||
629 isOtherMemAccess(QueryInst))
660 if (isa<AllocaInst>(Inst) ||
isNoAliasFn(Inst, &TLI)) {
662 if (AccessPtr == Inst || AA.
isMustAlias(Inst, AccessPtr))
674 if (
FenceInst *FI = dyn_cast<FenceInst>(Inst))
715 if (!LocalCache.isDirty())
742 if (
auto *II = dyn_cast<IntrinsicInst>(QueryInst))
743 isLoad |= II->getIntrinsicID() == Intrinsic::lifetime_start;
746 MemLoc, isLoad, ScanPos->
getIterator(), QueryParent, QueryInst);
747 }
else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
750 LocalCache = getCallSiteDependencyFrom(
751 QueryCS, isReadOnly, ScanPos->
getIterator(), QueryParent);
759 ReverseLocalDeps[
I].insert(QueryInst);
770 Count = Cache.size();
771 assert(std::is_sorted(Cache.begin(), Cache.begin() + Count) &&
772 "Cache isn't sorted!");
779 "getNonLocalCallDependency should only be used on calls with "
789 if (!Cache.empty()) {
792 if (!CacheP.second) {
799 for (
auto &Entry : Cache)
800 if (Entry.getResult().isDirty())
804 std::sort(Cache.begin(), Cache.end());
806 ++NumCacheDirtyNonLocal;
814 ++NumUncacheNonLocal;
822 unsigned NumSortedEntries = Cache.
size();
826 while (!DirtyBlocks.
empty()) {
831 if (!Visited.
insert(DirtyBB).second)
837 NonLocalDepInfo::iterator Entry =
838 std::upper_bound(Cache.begin(), Cache.begin() + NumSortedEntries,
840 if (Entry != Cache.begin() && std::prev(Entry)->getBB() == DirtyBB)
844 if (Entry != Cache.begin() + NumSortedEntries &&
845 Entry->getBB() == DirtyBB) {
848 if (!Entry->getResult().isDirty())
852 ExistingResult = &*Entry;
858 if (ExistingResult) {
870 if (ScanPos != DirtyBB->
begin()) {
872 getCallSiteDependencyFrom(QueryCS, isReadonlyCall, ScanPos, DirtyBB);
910 bool isLoad = isa<LoadInst>(QueryInst);
915 "Can't get pointer deps of a non-pointer!");
921 auto NonLocalDefIt = NonLocalDefsCache.find(QueryInst);
922 if (NonLocalDefIt != NonLocalDefsCache.end()) {
923 Result.
push_back(std::move(NonLocalDefIt->second));
924 NonLocalDefsCache.erase(NonLocalDefIt);
937 if (
LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
938 return !LI->isUnordered();
939 }
else if (
StoreInst *
SI = dyn_cast<StoreInst>(Inst)) {
940 return !
SI->isUnordered();
944 if (
isVolatile(QueryInst) || isOrdered(QueryInst)) {
946 const_cast<Value *>(Loc.
Ptr)));
949 const DataLayout &DL = FromBB->getModule()->getDataLayout();
957 if (getNonLocalPointerDepFromBB(QueryInst, Address, Loc, isLoad, FromBB,
958 Result, Visited,
true))
962 const_cast<Value *>(Loc.
Ptr)));
970 MemDepResult MemoryDependenceResults::GetNonLocalInfoForBlock(
972 BasicBlock *BB, NonLocalDepInfo *Cache,
unsigned NumSortedEntries) {
976 NonLocalDepInfo::iterator Entry = std::upper_bound(
978 if (Entry != Cache->begin() && (Entry - 1)->getBB() == BB)
982 if (Entry != Cache->begin() + NumSortedEntries && Entry->getBB() == BB)
983 ExistingResult = &*Entry;
987 if (ExistingResult && !ExistingResult->
getResult().isDirty()) {
988 ++NumCacheNonLocalPtr;
998 "Instruction invalidated?");
999 ++NumCacheDirtyNonLocalPtr;
1003 ValueIsLoadPair CacheKey(Loc.
Ptr, isLoad);
1006 ++NumUncacheNonLocalPtr;
1029 assert(Inst &&
"Didn't depend on anything?");
1030 ValueIsLoadPair CacheKey(Loc.
Ptr, isLoad);
1031 ReverseNonLocalPtrDeps[Inst].
insert(CacheKey);
1041 unsigned NumSortedEntries) {
1042 switch (Cache.size() - NumSortedEntries) {
1050 MemoryDependenceResults::NonLocalDepInfo::iterator Entry =
1051 std::upper_bound(Cache.begin(), Cache.end() - 1, Val);
1052 Cache.insert(Entry, Val);
1057 if (Cache.size() != 1) {
1060 MemoryDependenceResults::NonLocalDepInfo::iterator Entry =
1061 std::upper_bound(Cache.begin(), Cache.end(), Val);
1062 Cache.insert(Entry, Val);
1067 std::sort(Cache.begin(), Cache.end());
1085 bool MemoryDependenceResults::getNonLocalPointerDepFromBB(
1091 ValueIsLoadPair CacheKey(Pointer.
getAddr(), isLoad);
1097 NonLocalPointerInfo InitialNLPI;
1098 InitialNLPI.Size = Loc.
Size;
1099 InitialNLPI.AATags = Loc.
AATags;
1103 std::pair<CachedNonLocalPointerInfo::iterator, bool> Pair =
1104 NonLocalPointerDeps.
insert(std::make_pair(CacheKey, InitialNLPI));
1105 NonLocalPointerInfo *CacheInfo = &Pair.first->second;
1110 if (CacheInfo->Size < Loc.
Size) {
1113 CacheInfo->Pair = BBSkipFirstBlockPair();
1114 CacheInfo->Size = Loc.
Size;
1115 for (
auto &Entry : CacheInfo->NonLocalDeps)
1116 if (
Instruction *Inst = Entry.getResult().getInst())
1118 CacheInfo->NonLocalDeps.clear();
1119 }
else if (CacheInfo->Size > Loc.
Size) {
1122 return getNonLocalPointerDepFromBB(
1124 StartBB, Result, Visited, SkipFirstBlock);
1130 if (CacheInfo->AATags != Loc.
AATags) {
1131 if (CacheInfo->AATags) {
1132 CacheInfo->Pair = BBSkipFirstBlockPair();
1134 for (
auto &Entry : CacheInfo->NonLocalDeps)
1135 if (
Instruction *Inst = Entry.getResult().getInst())
1137 CacheInfo->NonLocalDeps.clear();
1140 return getNonLocalPointerDepFromBB(
1142 Visited, SkipFirstBlock);
1150 if (CacheInfo->Pair == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) {
1156 if (!Visited.
empty()) {
1157 for (
auto &Entry : *Cache) {
1159 Visited.
find(Entry.getBB());
1160 if (VI == Visited.
end() || VI->second == Pointer.
getAddr())
1171 for (
auto &Entry : *Cache) {
1172 Visited.
insert(std::make_pair(Entry.getBB(), Addr));
1173 if (Entry.getResult().isNonLocal()) {
1182 ++NumCacheCompleteNonLocalPtr;
1191 CacheInfo->Pair = BBSkipFirstBlockPair(StartBB, SkipFirstBlock);
1193 CacheInfo->Pair = BBSkipFirstBlockPair();
1206 unsigned NumSortedEntries = Cache->size();
1208 bool GotWorklistLimit =
false;
1211 while (!Worklist.
empty()) {
1221 if (Cache && NumSortedEntries != Cache->size()) {
1228 CacheInfo->Pair = BBSkipFirstBlockPair();
1233 if (!SkipFirstBlock) {
1236 assert(Visited.
count(BB) &&
"Should check 'visited' before adding to WL");
1241 MemDepResult Dep = GetNonLocalInfoForBlock(QueryInst, Loc, isLoad, BB,
1242 Cache, NumSortedEntries);
1258 SkipFirstBlock =
false;
1262 std::pair<DenseMap<BasicBlock *, Value *>::iterator,
bool> InsertRes =
1264 if (InsertRes.second) {
1273 if (InsertRes.first->second != Pointer.
getAddr()) {
1276 for (
unsigned i = 0;
i < NewBlocks.
size();
i++)
1277 Visited.
erase(NewBlocks[
i]);
1278 goto PredTranslationFailure;
1281 if (NewBlocks.
size() > WorklistEntries) {
1284 for (
unsigned i = 0;
i < NewBlocks.
size();
i++)
1285 Visited.
erase(NewBlocks[
i]);
1286 GotWorklistLimit =
true;
1287 goto PredTranslationFailure;
1289 WorklistEntries -= NewBlocks.
size();
1297 goto PredTranslationFailure;
1304 if (Cache && NumSortedEntries != Cache->size()) {
1306 NumSortedEntries = Cache->size();
1312 PredList.
push_back(std::make_pair(Pred, Pointer));
1325 std::pair<DenseMap<BasicBlock *, Value *>::iterator,
bool> InsertRes =
1326 Visited.
insert(std::make_pair(Pred, PredPtrVal));
1328 if (!InsertRes.second) {
1334 if (InsertRes.first->second == PredPtrVal)
1343 for (
unsigned i = 0, n = PredList.
size();
i < n; ++
i)
1344 Visited.
erase(PredList[
i].first);
1346 goto PredTranslationFailure;
1355 for (
unsigned i = 0, n = PredList.
size();
i < n; ++
i) {
1360 bool CanTranslate =
true;
1366 CanTranslate =
false;
1376 if (!CanTranslate ||
1377 !getNonLocalPointerDepFromBB(QueryInst, PredPointer,
1379 Pred, Result, Visited)) {
1382 Result.push_back(Entry);
1389 NonLocalPointerInfo &NLPI = NonLocalPointerDeps[CacheKey];
1390 NLPI.Pair = BBSkipFirstBlockPair();
1396 CacheInfo = &NonLocalPointerDeps[CacheKey];
1397 Cache = &CacheInfo->NonLocalDeps;
1398 NumSortedEntries = Cache->size();
1404 CacheInfo->Pair = BBSkipFirstBlockPair();
1405 SkipFirstBlock =
false;
1408 PredTranslationFailure:
1415 CacheInfo = &NonLocalPointerDeps[CacheKey];
1416 Cache = &CacheInfo->NonLocalDeps;
1417 NumSortedEntries = Cache->size();
1424 CacheInfo->Pair = BBSkipFirstBlockPair();
1434 bool foundBlock =
false;
1436 if (
I.getBB() != BB)
1439 assert((GotWorklistLimit ||
I.getResult().isNonLocal() ||
1441 "Should only be here with transparent block");
1448 (void)foundBlock; (void)GotWorklistLimit;
1449 assert((foundBlock || GotWorklistLimit) &&
"Current block not in cache?");
1459 void MemoryDependenceResults::RemoveCachedNonLocalPointerDependencies(
1460 ValueIsLoadPair
P) {
1462 if (It == NonLocalPointerDeps.
end())
1469 for (
unsigned i = 0, e = PInfo.size();
i != e; ++
i) {
1480 NonLocalPointerDeps.
erase(It);
1501 if (NLDI != NonLocalDeps.
end()) {
1503 for (
auto &Entry : BlockMap)
1504 if (
Instruction *Inst = Entry.getResult().getInst())
1506 NonLocalDeps.
erase(NLDI);
1512 if (LocalDepEntry != LocalDeps.
end()) {
1514 if (
Instruction *Inst = LocalDepEntry->second.getInst())
1518 LocalDeps.
erase(LocalDepEntry);
1528 RemoveCachedNonLocalPointerDependencies(
ValueIsLoadPair(RemInst,
false));
1529 RemoveCachedNonLocalPointerDependencies(
ValueIsLoadPair(RemInst,
true));
1544 NewDirtyVal = MemDepResult::getDirty(&*++RemInst->
getIterator());
1547 if (ReverseDepIt != ReverseLocalDeps.
end()) {
1549 assert(!ReverseDepIt->second.empty() && !isa<TerminatorInst>(RemInst) &&
1550 "Nothing can locally depend on a terminator");
1552 for (
Instruction *InstDependingOnRemInst : ReverseDepIt->second) {
1553 assert(InstDependingOnRemInst != RemInst &&
1554 "Already removed our local dep info");
1556 LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
1560 "There is no way something else can have "
1561 "a local dep on this if it is a terminator!");
1563 std::make_pair(NewDirtyVal.
getInst(), InstDependingOnRemInst));
1566 ReverseLocalDeps.
erase(ReverseDepIt);
1570 while (!ReverseDepsToAdd.
empty()) {
1571 ReverseLocalDeps[ReverseDepsToAdd.
back().first].
insert(
1572 ReverseDepsToAdd.
back().second);
1577 ReverseDepIt = ReverseNonLocalDeps.
find(RemInst);
1578 if (ReverseDepIt != ReverseNonLocalDeps.
end()) {
1580 assert(I != RemInst &&
"Already removed NonLocalDep info for RemInst");
1582 PerInstNLInfo &INLD = NonLocalDeps[
I];
1586 for (
auto &Entry : INLD.first) {
1587 if (Entry.getResult().getInst() != RemInst)
1591 Entry.setResult(NewDirtyVal);
1594 ReverseDepsToAdd.
push_back(std::make_pair(NextI, I));
1598 ReverseNonLocalDeps.
erase(ReverseDepIt);
1601 while (!ReverseDepsToAdd.
empty()) {
1602 ReverseNonLocalDeps[ReverseDepsToAdd.
back().first].
insert(
1603 ReverseDepsToAdd.
back().second);
1611 ReverseNonLocalPtrDeps.
find(RemInst);
1612 if (ReversePtrDepIt != ReverseNonLocalPtrDeps.
end()) {
1614 ReversePtrDepsToAdd;
1618 "Already removed NonLocalPointerDeps info for RemInst");
1626 for (
auto &Entry : NLPDI) {
1627 if (Entry.getResult().getInst() != RemInst)
1631 Entry.setResult(NewDirtyVal);
1634 ReversePtrDepsToAdd.
push_back(std::make_pair(NewDirtyInst, P));
1639 std::sort(NLPDI.begin(), NLPDI.end());
1642 ReverseNonLocalPtrDeps.
erase(ReversePtrDepIt);
1644 while (!ReversePtrDepsToAdd.
empty()) {
1645 ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.
back().first].
insert(
1646 ReversePtrDepsToAdd.
back().second);
1651 assert(!NonLocalDeps.
count(RemInst) &&
"RemInst got reinserted?");
1652 DEBUG(verifyRemoved(RemInst));
1659 void MemoryDependenceResults::verifyRemoved(
Instruction *
D)
const {
1661 for (
const auto &DepKV : LocalDeps) {
1662 assert(DepKV.first != D &&
"Inst occurs in data structures");
1663 assert(DepKV.second.getInst() != D &&
"Inst occurs in data structures");
1666 for (
const auto &DepKV : NonLocalPointerDeps) {
1667 assert(DepKV.first.getPointer() != D &&
"Inst occurs in NLPD map key");
1668 for (
const auto &Entry : DepKV.second.NonLocalDeps)
1669 assert(Entry.getResult().getInst() != D &&
"Inst occurs as NLPD value");
1672 for (
const auto &DepKV : NonLocalDeps) {
1673 assert(DepKV.first != D &&
"Inst occurs in data structures");
1674 const PerInstNLInfo &INLD = DepKV.second;
1675 for (
const auto &Entry : INLD.first)
1676 assert(Entry.getResult().getInst() != D &&
1677 "Inst occurs in data structures");
1680 for (
const auto &DepKV : ReverseLocalDeps) {
1681 assert(DepKV.first != D &&
"Inst occurs in data structures");
1683 assert(Inst != D &&
"Inst occurs in data structures");
1686 for (
const auto &DepKV : ReverseNonLocalDeps) {
1687 assert(DepKV.first != D &&
"Inst occurs in data structures");
1689 assert(Inst != D &&
"Inst occurs in data structures");
1692 for (
const auto &DepKV : ReverseNonLocalPtrDeps) {
1693 assert(DepKV.first != D &&
"Inst occurs in rev NLPD map");
1695 for (ValueIsLoadPair P : DepKV.second)
1696 assert(P != ValueIsLoadPair(D,
false) && P != ValueIsLoadPair(D,
true) &&
1697 "Inst occurs in ReverseNonLocalPtrDeps map");
1716 "Memory Dependence Analysis",
false,
true)
1765 auto &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
1766 auto &AC = getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F);
1767 auto &TLI = getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
1768 auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
1769 MemDep.emplace(AA, AC, TLI, DT);
The two locations precisely alias each other.
void push_back(const T &Elt)
void invalidateCachedPointerInfo(Value *Ptr)
Invalidates cached information about the specified pointer, because it may be too conservative in mem...
A parsed version of the target data layout string in and methods for querying it. ...
void invalidateCachedPredecessors()
Clears the PredIteratorCache info.
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA)
Trigger the invalidation of some other analysis pass if not already handled and return whether it was...
iterator_range< use_iterator > uses()
Provides a lazy, caching interface for making common memory aliasing information queries, backed by LLVM's alias analysis passes.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
bool isDef() const
Tests if this MemDepResult represents a query that is an instruction definition dependency.
Atomic ordering constants.
STATISTIC(NumFunctions,"Total number of functions")
MemDepResult getInvariantGroupPointerDependency(LoadInst *LI, BasicBlock *BB)
This analysis looks for other loads and stores with invariant.group metadata and the same pointer ope...
An instruction for ordering other memory operations.
an instruction that atomically checks whether a specified value is in a memory location, and, if it is, stores a new value there.
This class provides various memory handling functions that manipulate MemoryBlock instances...
The two locations alias, but only due to a partial overlap.
This class represents a function call, abstracting a target machine's calling convention.
An immutable pass that tracks lazily created AssumptionCache objects.
bool onlyReadsMemory(ImmutableCallSite CS)
Checks if the specified call is known to only read from non-volatile memory (or not access memory at ...
const Function * getParent() const
Return the enclosing method, or null if none.
The two locations do not alias at all.
Analysis pass which computes a DominatorTree.
An instruction for reading from memory.
bool isUnknown() const
Tests if this MemDepResult represents a query which cannot and/or will not be computed.
The access modifies the value stored in memory.
bool isClobber() const
Tests if this MemDepResult represents a query that is an instruction clobber dependency.
MemoryLocation getWithNewSize(uint64_t NewSize) const
const CallInst * isFreeCall(const Value *I, const TargetLibraryInfo *TLI)
isFreeCall - Returns non-null if the value is a call to the builtin free()
iterator begin()
Instruction iterator methods.
static cl::opt< unsigned > BlockScanLimit("memdep-block-scan-limit", cl::Hidden, cl::init(100), cl::desc("The number of instructions to scan in a block in memory ""dependency analysis (default = 100)"))
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)
The main low level interface to the alias analysis implementation.
AnalysisUsage & addRequired()
ArrayRef< BasicBlock * > get(BasicBlock *BB)
#define INITIALIZE_PASS_DEPENDENCY(depName)
ModRefInfo
Flags indicating whether a memory access modifies or references memory.
bool NeedsPHITranslationFromBlock(BasicBlock *BB) const
NeedsPHITranslationFromBlock - Return true if moving from the specified BasicBlock to its predecessor...
MemoryLocation getWithoutAATags() const
const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr it the function does no...
A Use represents the edge between a Value definition and its users.
static unsigned getLoadLoadClobberFullWidthSize(const Value *MemLocBase, int64_t MemLocOffs, unsigned MemLocSize, const LoadInst *LI)
Looks at a memory location for a load (specified by MemLocBase, Offs, and Size) and compares it again...
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
static GCRegistry::Add< StatepointGC > D("statepoint-example","an example strategy for statepoint")
void getNonLocalPointerDependency(Instruction *QueryInst, SmallVectorImpl< NonLocalDepResult > &Result)
Perform a full dependency query for an access to the QueryInst's specified memory location...
static MemDepResult getDef(Instruction *Inst)
get methods: These are static ctor methods for creating various MemDepResult kinds.
MemoryLocation getWithNewPtr(const Value *NewPtr) const
An analysis that produces MemoryDependenceResults for a function.
The access references the value stored in memory.
std::vector< NonLocalDepEntry > NonLocalDepInfo
LLVM_NODISCARD bool empty() const
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
bool isNoAliasFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast=false)
Tests if a value is a call or invoke to a function that returns a NoAlias pointer (including malloc/c...
bool mayReadFromMemory() const
Return true if this instruction may read memory.
PointerTy getPointer() const
static void RemoveFromReverseMap(DenseMap< Instruction *, SmallPtrSet< KeyTy, 4 >> &ReverseMap, Instruction *Inst, KeyTy Val)
This is a helper function that removes Val from 'Inst's set in ReverseMap.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset...
bool isReachableFromEntry(const Use &U) const
Provide an overload for a Use.
An instruction for storing to memory.
~MemoryDependenceWrapperPass() override
bool pointsToConstantMemory(const MemoryLocation &Loc, bool OrLocal=false)
Checks whether the given location points to constant memory, or if OrLocal is true whether it points ...
static MemDepResult getUnknown()
The access neither references nor modifies the value stored in memory.
static MemoryLocation get(const LoadInst *LI)
Return a location with information about the memory reference by the given instruction.
initializer< Ty > init(const Ty &Val)
bool erase(const KeyT &Val)
const NonLocalDepInfo & getNonLocalCallDependency(CallSite QueryCS)
Perform a full dependency query for the specified call, returning the set of blocks that the value is...
A set of analyses that are preserved following a run of a transformation pass.
bool isIdenticalToWhenDefined(const Instruction *I) const
This is like isIdenticalTo, except that it ignores the SubclassOptionalData flags, which may specify conditions under which the instruction's result is undefined.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs...ExtraArgs)
Get the result of an analysis pass for a given IR unit.
LLVM Basic Block Representation.
PointerIntPair - This class implements a pair of a pointer and small integer.
PHITransAddr - An address value which tracks and handles phi translation.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
This file contains the declarations for the subclasses of Constant, which represent the different fla...
INITIALIZE_PASS_BEGIN(MemoryDependenceWrapperPass,"memdep","Memory Dependence Analysis", false, true) INITIALIZE_PASS_END(MemoryDependenceWrapperPass
A manager for alias analyses.
This is a result from a NonLocal dependence query.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
MemDepResult getSimplePointerDependencyFrom(const MemoryLocation &MemLoc, bool isLoad, BasicBlock::iterator ScanIt, BasicBlock *BB, Instruction *QueryInst, unsigned *Limit=nullptr)
AliasResult
The possible results of an alias query.
Represent the analysis usage information of a pass.
static MemDepResult getNonFuncLocal()
PreservedAnalysisChecker getChecker() const
Build a checker for this PreservedAnalyses and the specified analysis type.
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE,"Assign register bank of generic virtual registers", false, false) RegBankSelect
FunctionPass class - This class is used to implement most global optimizations.
Value * getPointerOperand()
const MemDepResult & getResult() const
self_iterator getIterator()
void setResult(const MemDepResult &R)
void append(in_iter in_start, in_iter in_end)
Add the specified range to the end of the SmallVector.
bool isPointerTy() const
True if this is an instance of PointerType.
uint64_t NextPowerOf2(uint64_t A)
NextPowerOf2 - Returns the next power of two (in 64-bits) that is strictly greater than A...
This class represents the va_arg llvm instruction, which returns an argument of the specified type gi...
static void SortNonLocalDepInfoCache(MemoryDependenceResults::NonLocalDepInfo &Cache, unsigned NumSortedEntries)
Sort the NonLocalDepInfo cache, given a certain number of elements in the array that are already prop...
A wrapper analysis pass for the legacy pass manager that exposes a MemoryDepnedenceResults instance...
Value * GetUnderlyingObject(Value *V, const DataLayout &DL, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value...
bool IsPotentiallyPHITranslatable() const
IsPotentiallyPHITranslatable - If this needs PHI translation, return true if we have some hope of doi...
bool mayWriteToMemory() const
Return true if this instruction may modify memory.
A memory dependence query can return one of three different answers.
unsigned getDefaultBlockScanLimit() const
Some methods limit the number of instructions they will examine.
void clear()
clear - Remove all information.
bool isTerminator() const
bool dominates(const Instruction *Def, const Use &U) const
Return true if Def dominates a use in User.
const Value * Ptr
The address of the start of the location.
Representation for a specific memory location.
A function analysis which provides an AssumptionCache.
MemDepResult getPointerDependencyFrom(const MemoryLocation &Loc, bool isLoad, BasicBlock::iterator ScanIt, BasicBlock *BB, Instruction *QueryInst=nullptr, unsigned *Limit=nullptr)
Returns the instruction on which a memory location depends.
Iterator for intrusive lists based on ilist_node.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
bool fitsInLegalInteger(unsigned Width) const
Returns true if the specified type fits in a native integer type supported by the CPU...
InstrTy * getInstruction() const
bool PHITranslateValue(BasicBlock *CurBB, BasicBlock *PredBB, const DominatorTree *DT, bool MustDominate)
PHITranslateValue - PHI translate the current address up the CFG from CurBB to Pred, updating our state to reflect any needed changes.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
static bool isReadOnly(const GlobalValue *GV)
Type * getType() const
All values are typed, get the type of this value.
Provides information about what library functions are available for the current target.
static MemDepResult getClobber(Instruction *Inst)
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
MemoryDependenceResults run(Function &F, FunctionAnalysisManager &AM)
A collection of metadata nodes that might be associated with a memory access used by the alias-analys...
LLVM_NODISCARD T pop_back_val()
bool isNonLocal() const
Tests if this MemDepResult represents a query that is transparent to the start of the block...
Value * stripPointerCasts()
Strip off pointer casts, all-zero GEPs, and aliases.
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT > iterator
void getAnalysisUsage(AnalysisUsage &AU) const override
Does not modify anything. It uses Value Numbering and Alias Analysis.
const BasicBlock & getEntryBlock() const
size_type count(const KeyT &Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Target - Wrapper for Target specific information.
void releaseMemory() override
Clean up memory in between runs.
ModRefInfo callCapturesBefore(const Instruction *I, const MemoryLocation &MemLoc, DominatorTree *DT, OrderedBasicBlock *OBB=nullptr)
Return information about whether a particular call site modifies or reads the specified memory locati...
ModRefInfo getModRefInfo(ImmutableCallSite CS, const MemoryLocation &Loc)
getModRefInfo (for call sites) - Return information about whether a particular call site modifies or ...
void setPreservesAll()
Set by analyses that do not transform their input at all.
bool isMustAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are must-alias.
bool runOnFunction(Function &) override
Pass Implementation stuff. This doesn't do any analysis eagerly.
static bool isStrongerThanUnordered(AtomicOrdering ao)
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Instruction * getInst() const
If this is a normal dependency, returns the instruction that is depended on.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
AAMDNodes AATags
The metadata nodes which describes the aliasing of the location (each member is null if that kind of ...
static ModRefInfo GetLocation(const Instruction *Inst, MemoryLocation &Loc, const TargetLibraryInfo &TLI)
If the given instruction references a specific memory location, fill in Loc with the details...
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
void initializeMemoryDependenceWrapperPassPass(PassRegistry &)
unsigned getAlignment() const
Return the alignment of the access that is being performed.
bool invalidate(Function &F, const PreservedAnalyses &PA, FunctionAnalysisManager::Invalidator &Inv)
Handle invalidation in the new PM.
The access both references and modifies the value stored in memory.
LLVM_ATTRIBUTE_ALWAYS_INLINE size_type size() const
iterator find(const KeyT &Val)
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
AnalysisUsage & addRequiredTransitive()
Memory Dependence Analysis
LLVM_NODISCARD bool empty() const
API to communicate dependencies between analyses during invalidation.
Analysis pass providing the TargetLibraryInfo.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This templated class represents "all analyses that operate over \<a particular IR unit\>" (e...
unsigned getPrimitiveSizeInBits() const LLVM_READONLY
Return the basic size of this type if it is a primitive type.
LLVM Value Representation.
void removeInstruction(Instruction *InstToRemove)
Removes an instruction from the dependence analysis, updating the dependence of instructions that pre...
#define LLVM_FALLTHROUGH
LLVM_FALLTHROUGH - Mark fallthrough cases in switch statements.
static cl::opt< unsigned > BlockNumberLimit("memdep-block-number-limit", cl::Hidden, cl::init(1000), cl::desc("The number of blocks to scan during memory ""dependency analysis (default = 1000)"))
This is an entry in the NonLocalDepInfo cache.
A container for analyses that lazily runs them and caches their results.
static void AssertSorted(MemoryDependenceResults::NonLocalDepInfo &Cache, int Count=-1)
This method is used when -debug is specified to verify that cache arrays are properly kept sorted...
Legacy analysis pass which computes a DominatorTree.
static bool isVolatile(Instruction *Inst)
A wrapper pass to provide the legacy pass manager access to a suitably prepared AAResults object...
Dependence - This class represents a dependence between two memory memory references in a function...
static MemDepResult getNonLocal()
static const unsigned int NumResultsLimit
A special type used by analysis passes to provide an address that identifies that particular analysis...
const BasicBlock * getParent() const
A wrapper class for inspecting calls to intrinsic functions.
uint64_t Size
The maximum size of the location, in address-units, or UnknownSize if the size is not known...
MemDepResult getDependency(Instruction *QueryInst)
Returns the instruction on which a memory operation depends.