57#define DEBUG_TYPE "memdep"
59STATISTIC(NumCacheNonLocal,
"Number of fully cached non-local responses");
60STATISTIC(NumCacheDirtyNonLocal,
"Number of dirty cached non-local responses");
61STATISTIC(NumUncacheNonLocal,
"Number of uncached non-local responses");
64 "Number of fully cached non-local ptr responses");
66 "Number of cached, but dirty, non-local ptr responses");
67STATISTIC(NumUncacheNonLocalPtr,
"Number of uncached non-local ptr responses");
69 "Number of block queries that were completely cached");
75 cl::desc(
"The number of instructions to scan in a block in memory "
76 "dependency analysis (default = 100)"));
80 cl::desc(
"The number of blocks to scan during memory "
81 "dependency analysis (default = 200)"));
89template <
typename KeyTy>
94 ReverseMap.find(Inst);
95 assert(InstIt != ReverseMap.
end() &&
"Reverse map out of sync?");
96 bool Found = InstIt->second.
erase(Val);
97 assert(Found &&
"Invalid reverse map!");
99 if (InstIt->second.
empty())
100 ReverseMap.erase(InstIt);
111 if (LI->isUnordered()) {
124 if (
SI->isUnordered()) {
150 switch (
II->getIntrinsicID()) {
151 case Intrinsic::lifetime_start:
152 case Intrinsic::lifetime_end:
157 case Intrinsic::invariant_start:
162 case Intrinsic::invariant_end:
167 case Intrinsic::masked_load:
170 case Intrinsic::masked_store:
187MemDepResult MemoryDependenceResults::getCallDependencyFrom(
193 while (ScanIt != BB->
begin()) {
217 if (isReadOnlyCall && !
isModSet(MR) &&
246 if (QueryInst !=
nullptr) {
250 if (InvariantGroupDependency.
isDef())
251 return InvariantGroupDependency;
255 MemLoc,
isLoad, ScanIt, BB, QueryInst, Limit, BatchAA);
256 if (SimpleDep.
isDef())
262 return InvariantGroupDependency;
265 "InvariantGroupDependency should be only unknown at this point");
281 if (!LI->
hasMetadata(LLVMContext::MD_invariant_group))
299 assert(
Other &&
"Must call it with not null instruction");
300 if (Best ==
nullptr || DT.dominates(Best,
Other))
305 for (
const Use &Us : LoadOperand->
uses()) {
307 if (!U || U == LI || !DT.dominates(U, LI))
316 U->hasMetadata(LLVMContext::MD_invariant_group))
317 ClosestDependency = GetClosestDependency(ClosestDependency, U);
320 if (!ClosestDependency)
322 if (ClosestDependency->
getParent() == BB)
328 NonLocalDefsCache.try_emplace(
331 ReverseNonLocalDefsCache[ClosestDependency].insert(LI);
343 unsigned ScanLimit) {
350 if (std::min(MemLocAlign,
SI->getAlign()).value() <
355 if (!LI || LI->getParent() !=
SI->getParent())
359 unsigned NumVisitedInsts = 0;
361 if (++NumVisitedInsts > ScanLimit ||
378 Limit = &DefaultLimit;
414 if (LI->hasMetadata(LLVMContext::MD_invariant_load))
416 MemLocAlign = LI->getAlign();
429 return I->mayReadOrWriteMemory();
433 while (ScanIt != BB->
begin()) {
447 case Intrinsic::lifetime_start: {
453 case Intrinsic::masked_load:
454 case Intrinsic::masked_store: {
462 if (
ID == Intrinsic::masked_load)
478 if (LI->isVolatile()) {
516 ClobberOffsets[LI] = R.getOffset();
537 if (!
SI->isUnordered() &&
SI->isAtomic()) {
555 if (
SI->isVolatile())
592 if (AccessPtr == Inst || BatchAA.
isMustAlias(Inst, AccessPtr))
640 ClobberOffsets.clear();
648 if (!LocalCache.isDirty())
676 isLoad |=
II->getIntrinsicID() == Intrinsic::lifetime_start;
680 QueryParent, QueryInst,
nullptr);
682 bool isReadOnly = AA.onlyReadsMemory(QueryCall);
683 LocalCache = getCallDependencyFrom(QueryCall, isReadOnly,
692 ReverseLocalDeps[
I].insert(QueryInst);
703 Count = Cache.size();
704 assert(std::is_sorted(Cache.begin(), Cache.begin() +
Count) &&
705 "Cache isn't sorted!");
712 "getNonLocalCallDependency should only be used on calls with "
714 PerInstNLInfo &CacheP = NonLocalDepsMap[QueryCall];
722 if (!Cache.empty()) {
725 if (!CacheP.second) {
732 for (
auto &Entry : Cache)
733 if (Entry.getResult().isDirty())
739 ++NumCacheDirtyNonLocal;
744 ++NumUncacheNonLocal;
748 bool isReadonlyCall = AA.onlyReadsMemory(QueryCall);
752 unsigned NumSortedEntries = Cache.
size();
756 while (!DirtyBlocks.
empty()) {
760 if (!Visited.
insert(DirtyBB).second)
766 NonLocalDepInfo::iterator Entry =
767 std::upper_bound(Cache.begin(), Cache.begin() + NumSortedEntries,
769 if (Entry != Cache.begin() && std::prev(Entry)->getBB() == DirtyBB)
773 if (Entry != Cache.begin() + NumSortedEntries &&
774 Entry->getBB() == DirtyBB) {
777 if (!Entry->getResult().isDirty())
781 ExistingResult = &*Entry;
787 if (ExistingResult) {
799 if (ScanPos != DirtyBB->
begin()) {
800 Dep = getCallDependencyFrom(QueryCall, isReadonlyCall, ScanPos, DirtyBB);
822 ReverseNonLocalDeps[Inst].insert(QueryCall);
841 assert(
Loc.Ptr->getType()->isPointerTy() &&
842 "Can't get pointer deps of a non-pointer!");
846 auto NonLocalDefIt = NonLocalDefsCache.find(QueryInst);
847 if (NonLocalDefIt != NonLocalDefsCache.end()) {
848 Result.push_back(NonLocalDefIt->second);
849 ReverseNonLocalDefsCache[NonLocalDefIt->second.getResult().getInst()]
851 NonLocalDefsCache.erase(NonLocalDefIt);
865 return !LI->isUnordered();
867 return !
SI->isUnordered();
885 Result, Visited,
true))
897MemDepResult MemoryDependenceResults::getNonLocalInfoForBlock(
899 BasicBlock *BB, NonLocalDepInfo *Cache,
unsigned NumSortedEntries,
909 NonLocalDepInfo::iterator Entry = std::upper_bound(
911 if (Entry != Cache->begin() && (Entry - 1)->getBB() == BB)
915 if (Entry != Cache->begin() + NumSortedEntries && Entry->getBB() == BB)
916 ExistingResult = &*Entry;
923 ExistingResult =
nullptr;
927 if (ExistingResult && !ExistingResult->
getResult().isDirty()) {
928 ++NumCacheNonLocalPtr;
938 "Instruction invalidated?");
939 ++NumCacheDirtyNonLocalPtr;
943 ValueIsLoadPair CacheKey(
Loc.Ptr,
isLoad);
946 ++NumUncacheNonLocalPtr;
951 QueryInst,
nullptr, BatchAA);
962 Cache->push_back(NonLocalDepEntry(BB, Dep));
973 assert(Inst &&
"Didn't depend on anything?");
974 ValueIsLoadPair CacheKey(Loc.
Ptr,
isLoad);
975 ReverseNonLocalPtrDeps[Inst].insert(CacheKey);
985 unsigned NumSortedEntries) {
988 if (Cache.size() < 2)
991 unsigned s = Cache.size() - NumSortedEntries;
998 if (NumSortedEntries == 0) {
1006 if (s <
Log2_32(Cache.size())) {
1010 MemoryDependenceResults::NonLocalDepInfo::iterator Entry =
1011 std::upper_bound(Cache.begin(), Cache.end() - s + 1, Val);
1012 Cache.insert(Entry, Val);
1033bool MemoryDependenceResults::getNonLocalPointerDepFromBB(
1038 bool IsIncomplete) {
1046 NonLocalPointerInfo InitialNLPI;
1047 InitialNLPI.Size = Loc.
Size;
1048 InitialNLPI.AATags = Loc.
AATags;
1056 std::pair<CachedNonLocalPointerInfo::iterator, bool> Pair =
1057 NonLocalPointerDeps.insert(std::make_pair(CacheKey, InitialNLPI));
1058 NonLocalPointerInfo *CacheInfo = &Pair.first->second;
1064 if (CacheInfo->Size != Loc.
Size) {
1067 CacheInfo->Pair = BBSkipFirstBlockPair();
1068 CacheInfo->Size = Loc.
Size;
1069 for (
auto &Entry : CacheInfo->NonLocalDeps)
1070 if (Instruction *Inst =
Entry.getResult().getInst())
1072 CacheInfo->NonLocalDeps.clear();
1076 IsIncomplete =
true;
1082 if (CacheInfo->AATags != Loc.
AATags) {
1083 if (CacheInfo->AATags) {
1084 CacheInfo->Pair = BBSkipFirstBlockPair();
1085 CacheInfo->AATags = AAMDNodes();
1086 for (
auto &Entry : CacheInfo->NonLocalDeps)
1087 if (Instruction *Inst =
Entry.getResult().getInst())
1089 CacheInfo->NonLocalDeps.clear();
1093 IsIncomplete =
true;
1096 return getNonLocalPointerDepFromBB(
1098 Visited, SkipFirstBlock, IsIncomplete);
1109 CacheInfo->Pair == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) {
1115 if (!Visited.
empty()) {
1116 for (
auto &Entry : *Cache) {
1119 if (VI == Visited.
end() ||
VI->second ==
Pointer.getAddr())
1130 for (
auto &Entry : *Cache) {
1131 Visited.
insert(std::make_pair(
Entry.getBB(), Addr));
1132 if (
Entry.getResult().isNonLocal()) {
1136 if (DT.isReachableFromEntry(
Entry.getBB())) {
1138 NonLocalDepResult(
Entry.getBB(),
Entry.getResult(), Addr));
1141 ++NumCacheCompleteNonLocalPtr;
1153 if (!IsIncomplete && Cache->empty())
1154 CacheInfo->Pair = BBSkipFirstBlockPair(StartBB, SkipFirstBlock);
1156 CacheInfo->Pair = BBSkipFirstBlockPair();
1170 unsigned NumSortedEntries = Cache->size();
1172 bool GotWorklistLimit =
false;
1175 BatchAAResults BatchAA(AA, &EEA);
1176 while (!Worklist.
empty()) {
1185 if (Cache && NumSortedEntries != Cache->size()) {
1192 CacheInfo->Pair = BBSkipFirstBlockPair();
1197 if (!SkipFirstBlock) {
1200 assert(Visited.
count(BB) &&
"Should check 'visited' before adding to WL");
1205 MemDepResult Dep = getNonLocalInfoForBlock(
1206 QueryInst, Loc,
isLoad, BB, Cache, NumSortedEntries, BatchAA);
1210 if (DT.isReachableFromEntry(BB)) {
1211 Result.push_back(NonLocalDepResult(BB, Dep,
Pointer.getAddr()));
1221 if (!
Pointer.needsPHITranslationFromBlock(BB)) {
1222 SkipFirstBlock =
false;
1223 SmallVector<BasicBlock *, 16> NewBlocks;
1224 for (BasicBlock *Pred : PredCache.get(BB)) {
1226 std::pair<DenseMap<BasicBlock *, Value *>::iterator,
bool> InsertRes =
1228 if (InsertRes.second) {
1237 if (InsertRes.first->second !=
Pointer.getAddr()) {
1240 for (
auto *NewBlock : NewBlocks)
1241 Visited.
erase(NewBlock);
1242 goto PredTranslationFailure;
1245 if (NewBlocks.
size() > WorklistEntries) {
1248 for (
auto *NewBlock : NewBlocks)
1249 Visited.
erase(NewBlock);
1250 GotWorklistLimit =
true;
1251 goto PredTranslationFailure;
1253 WorklistEntries -= NewBlocks.size();
1254 Worklist.
append(NewBlocks.begin(), NewBlocks.end());
1260 if (!
Pointer.isPotentiallyPHITranslatable())
1261 goto PredTranslationFailure;
1268 if (Cache && NumSortedEntries != Cache->size()) {
1270 NumSortedEntries = Cache->size();
1275 for (BasicBlock *Pred : PredCache.get(BB)) {
1276 PredList.
push_back(std::make_pair(Pred, Pointer));
1280 PHITransAddr &PredPointer = PredList.
back().second;
1289 std::pair<DenseMap<BasicBlock *, Value *>::iterator,
bool> InsertRes =
1290 Visited.
insert(std::make_pair(Pred, PredPtrVal));
1292 if (!InsertRes.second) {
1298 if (InsertRes.first->second == PredPtrVal)
1307 for (
const auto &Pred : PredList)
1308 Visited.
erase(Pred.first);
1310 goto PredTranslationFailure;
1319 for (
auto &
I : PredList) {
1321 PHITransAddr &PredPointer =
I.second;
1324 bool CanTranslate =
true;
1330 CanTranslate =
false;
1340 if (!CanTranslate ||
1341 !getNonLocalPointerDepFromBB(QueryInst, PredPointer,
1343 Pred, Result, Visited)) {
1353 NonLocalPointerInfo &NLPI = NonLocalPointerDeps[CacheKey];
1354 NLPI.Pair = BBSkipFirstBlockPair();
1360 CacheInfo = &NonLocalPointerDeps[CacheKey];
1361 Cache = &CacheInfo->NonLocalDeps;
1362 NumSortedEntries = Cache->size();
1368 CacheInfo->Pair = BBSkipFirstBlockPair();
1369 SkipFirstBlock =
false;
1372 PredTranslationFailure:
1379 CacheInfo = &NonLocalPointerDeps[CacheKey];
1380 Cache = &CacheInfo->NonLocalDeps;
1381 NumSortedEntries = Cache->size();
1388 CacheInfo->Pair = BBSkipFirstBlockPair();
1402 if (
I.getBB() != BB)
1405 assert((GotWorklistLimit ||
I.getResult().isNonLocal() ||
1406 !DT.isReachableFromEntry(BB)) &&
1407 "Should only be here with transparent block");
1415 (void)GotWorklistLimit;
1428void MemoryDependenceResults::removeCachedNonLocalPointerDependencies(
1429 ValueIsLoadPair
P) {
1432 if (!NonLocalDefsCache.empty()) {
1433 auto it = NonLocalDefsCache.find(
P.getPointer());
1434 if (it != NonLocalDefsCache.end()) {
1436 it->second.getResult().getInst(),
P.getPointer());
1437 NonLocalDefsCache.erase(it);
1441 auto toRemoveIt = ReverseNonLocalDefsCache.find(
I);
1442 if (toRemoveIt != ReverseNonLocalDefsCache.end()) {
1443 for (
const auto *entry : toRemoveIt->second)
1444 NonLocalDefsCache.erase(entry);
1445 ReverseNonLocalDefsCache.erase(toRemoveIt);
1451 if (It == NonLocalPointerDeps.end())
1458 for (
const NonLocalDepEntry &DE : PInfo) {
1469 NonLocalPointerDeps.erase(It);
1474 if (!
Ptr->getType()->isPointerTy())
1477 removeCachedNonLocalPointerDependencies(ValueIsLoadPair(
Ptr,
false));
1479 removeCachedNonLocalPointerDependencies(ValueIsLoadPair(
Ptr,
true));
1487 EEA.removeInstruction(RemInst);
1492 if (NLDI != NonLocalDepsMap.end()) {
1494 for (
auto &Entry : BlockMap)
1495 if (
Instruction *Inst = Entry.getResult().getInst())
1497 NonLocalDepsMap.erase(NLDI);
1502 if (LocalDepEntry != LocalDeps.end()) {
1504 if (
Instruction *Inst = LocalDepEntry->second.getInst())
1508 LocalDeps.erase(LocalDepEntry);
1517 removeCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst,
false));
1518 removeCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst,
true));
1522 auto toRemoveIt = NonLocalDefsCache.find(RemInst);
1523 if (toRemoveIt != NonLocalDefsCache.end()) {
1525 "only load instructions should be added directly");
1526 const Instruction *DepV = toRemoveIt->second.getResult().getInst();
1527 ReverseNonLocalDefsCache.find(DepV)->second.erase(RemInst);
1528 NonLocalDefsCache.erase(toRemoveIt);
1543 NewDirtyVal = MemDepResult::getDirty(&*++RemInst->
getIterator());
1545 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
1546 if (ReverseDepIt != ReverseLocalDeps.end()) {
1549 "Nothing can locally depend on a terminator");
1551 for (
Instruction *InstDependingOnRemInst : ReverseDepIt->second) {
1552 assert(InstDependingOnRemInst != RemInst &&
1553 "Already removed our local dep info");
1555 LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
1559 "There is no way something else can have "
1560 "a local dep on this if it is a terminator!");
1562 std::make_pair(NewDirtyVal.
getInst(), InstDependingOnRemInst));
1565 ReverseLocalDeps.erase(ReverseDepIt);
1569 while (!ReverseDepsToAdd.
empty()) {
1570 ReverseLocalDeps[ReverseDepsToAdd.
back().first].insert(
1571 ReverseDepsToAdd.
back().second);
1576 ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
1577 if (ReverseDepIt != ReverseNonLocalDeps.end()) {
1579 assert(
I != RemInst &&
"Already removed NonLocalDep info for RemInst");
1581 PerInstNLInfo &INLD = NonLocalDepsMap[
I];
1585 for (
auto &Entry : INLD.first) {
1586 if (Entry.getResult().getInst() != RemInst)
1590 Entry.setResult(NewDirtyVal);
1593 ReverseDepsToAdd.
push_back(std::make_pair(NextI,
I));
1597 ReverseNonLocalDeps.erase(ReverseDepIt);
1600 while (!ReverseDepsToAdd.
empty()) {
1601 ReverseNonLocalDeps[ReverseDepsToAdd.
back().first].insert(
1602 ReverseDepsToAdd.
back().second);
1609 ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt =
1610 ReverseNonLocalPtrDeps.find(RemInst);
1611 if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) {
1613 ReversePtrDepsToAdd;
1615 for (ValueIsLoadPair
P : ReversePtrDepIt->second) {
1616 assert(
P.getPointer() != RemInst &&
1617 "Already removed NonLocalPointerDeps info for RemInst");
1619 auto &NLPD = NonLocalPointerDeps[
P];
1624 NLPD.Pair = BBSkipFirstBlockPair();
1627 for (
auto &Entry : NLPDI) {
1628 if (Entry.getResult().getInst() != RemInst)
1632 Entry.setResult(NewDirtyVal);
1635 ReversePtrDepsToAdd.
push_back(std::make_pair(NewDirtyInst,
P));
1643 ReverseNonLocalPtrDeps.
erase(ReversePtrDepIt);
1645 while (!ReversePtrDepsToAdd.
empty()) {
1646 ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.
back().first].insert(
1647 ReversePtrDepsToAdd.
back().second);
1652 assert(!NonLocalDepsMap.count(RemInst) &&
"RemInst got reinserted?");
1660void MemoryDependenceResults::verifyRemoved(
Instruction *
D)
const {
1662 for (
const auto &DepKV : LocalDeps) {
1663 assert(DepKV.first !=
D &&
"Inst occurs in data structures");
1664 assert(DepKV.second.getInst() !=
D &&
"Inst occurs in data structures");
1667 for (
const auto &DepKV : NonLocalPointerDeps) {
1668 assert(DepKV.first.getPointer() !=
D &&
"Inst occurs in NLPD map key");
1669 for (
const auto &Entry : DepKV.second.NonLocalDeps)
1670 assert(Entry.getResult().getInst() !=
D &&
"Inst occurs as NLPD value");
1673 for (
const auto &DepKV : NonLocalDepsMap) {
1674 assert(DepKV.first !=
D &&
"Inst occurs in data structures");
1675 const PerInstNLInfo &INLD = DepKV.second;
1676 for (
const auto &Entry : INLD.first)
1678 "Inst occurs in data structures");
1681 for (
const auto &DepKV : ReverseLocalDeps) {
1682 assert(DepKV.first !=
D &&
"Inst occurs in data structures");
1683 for (Instruction *Inst : DepKV.second)
1684 assert(Inst !=
D &&
"Inst occurs in data structures");
1687 for (
const auto &DepKV : ReverseNonLocalDeps) {
1688 assert(DepKV.first !=
D &&
"Inst occurs in data structures");
1689 for (Instruction *Inst : DepKV.second)
1690 assert(Inst !=
D &&
"Inst occurs in data structures");
1693 for (
const auto &DepKV : ReverseNonLocalPtrDeps) {
1694 assert(DepKV.first !=
D &&
"Inst occurs in rev NLPD map");
1696 for (ValueIsLoadPair
P : DepKV.second)
1697 assert(
P != ValueIsLoadPair(
D,
false) &&
P != ValueIsLoadPair(
D,
true) &&
1698 "Inst occurs in ReverseNonLocalPtrDeps map");
1720 "Memory Dependence Analysis",
false,
true)
1745 FunctionAnalysisManager::Invalidator &Inv) {
1763 return DefaultBlockScanLimit;
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static bool isLoad(int Opcode)
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Atomic ordering constants.
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
This file defines the DenseMap class.
Module.h This file contains the declarations for the Module class.
This defines the Use class.
static const unsigned int NumResultsLimit
static ModRefInfo GetLocation(const Instruction *Inst, MemoryLocation &Loc, const TargetLibraryInfo &TLI)
If the given instruction references a specific memory location, fill in Loc with the details,...
static cl::opt< unsigned > BlockNumberLimit("memdep-block-number-limit", cl::Hidden, cl::init(200), cl::desc("The number of blocks to scan during memory " "dependency analysis (default = 200)"))
static void RemoveFromReverseMap(DenseMap< Instruction *, SmallPtrSet< KeyTy, 4 > > &ReverseMap, Instruction *Inst, KeyTy Val)
This is a helper function that removes Val from 'Inst's set in ReverseMap.
static void SortNonLocalDepInfoCache(MemoryDependenceResults::NonLocalDepInfo &Cache, unsigned NumSortedEntries)
Sort the NonLocalDepInfo cache, given a certain number of elements in the array that are already prop...
static void AssertSorted(MemoryDependenceResults::NonLocalDepInfo &Cache, int Count=-1)
This method is used when -debug is specified to verify that cache arrays are properly kept sorted.
static bool canSkipClobberingStore(const StoreInst *SI, const MemoryLocation &MemLoc, Align MemLocAlign, BatchAAResults &BatchAA, unsigned ScanLimit)
static cl::opt< unsigned > BlockScanLimit("memdep-block-scan-limit", cl::Hidden, cl::init(100), cl::desc("The number of instructions to scan in a block in memory " "dependency analysis (default = 100)"))
This file provides utility analysis objects describing memory locations.
static bool isOrdered(const Instruction *I)
static bool isInvariantLoad(const LoadInst *LI, const bool IsKernelFn)
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
A manager for alias analyses.
A wrapper pass to provide the legacy pass manager access to a suitably prepared AAResults object.
The possible results of an alias query.
@ NoAlias
The two locations do not alias at all.
@ PartialAlias
The two locations alias, but only due to a partial overlap.
@ MustAlias
The two locations precisely alias each other.
This templated class represents "all analyses that operate over <aparticular IR unit>" (e....
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
void setPreservesAll()
Set by analyses that do not transform their input at all.
AnalysisUsage & addRequiredTransitive()
A function analysis which provides an AssumptionCache.
An immutable pass that tracks lazily created AssumptionCache objects.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const Function * getParent() const
Return the enclosing method, or null if none.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this basic block belongs to.
InstListType::iterator iterator
Instruction iterators...
This class is a wrapper over an AAResults, and it is intended to be used only when there are no IR ch...
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)
bool isMustAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
ModRefInfo getModRefInfo(const Instruction *I, const std::optional< MemoryLocation > &OptLoc)
ModRefInfo getModRefInfoMask(const MemoryLocation &Loc, bool IgnoreLocals=false)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT > iterator
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Legacy analysis pass which computes a DominatorTree.
An instruction for ordering other memory operations.
const BasicBlock & getEntryBlock() const
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI bool isIdenticalToWhenDefined(const Instruction *I, bool IntersectAttrs=false) const LLVM_READONLY
This is like isIdenticalTo, except that it ignores the SubclassOptionalData flags,...
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI bool isVolatile() const LLVM_READONLY
Return true if this instruction has a volatile memory access.
A wrapper class for inspecting calls to intrinsic functions.
An instruction for reading from memory.
Value * getPointerOperand()
TypeSize getValue() const
A memory dependence query can return one of three different answers.
bool isNonLocal() const
Tests if this MemDepResult represents a query that is transparent to the start of the block,...
static MemDepResult getNonLocal()
bool isNonFuncLocal() const
Tests if this MemDepResult represents a query that is transparent to the start of the function.
static MemDepResult getClobber(Instruction *Inst)
bool isDef() const
Tests if this MemDepResult represents a query that is an instruction definition dependency.
static MemDepResult getUnknown()
bool isLocal() const
Tests if this MemDepResult represents a valid local query (Clobber/Def).
bool isUnknown() const
Tests if this MemDepResult represents a query which cannot and/or will not be computed.
static MemDepResult getNonFuncLocal()
static MemDepResult getDef(Instruction *Inst)
get methods: These are static ctor methods for creating various MemDepResult kinds.
Instruction * getInst() const
If this is a normal dependency, returns the instruction that is depended on.
An analysis that produces MemoryDependenceResults for a function.
MemoryDependenceResults run(Function &F, FunctionAnalysisManager &AM)
MemoryDependenceAnalysis()
Provides a lazy, caching interface for making common memory aliasing information queries,...
MemDepResult getSimplePointerDependencyFrom(const MemoryLocation &MemLoc, bool isLoad, BasicBlock::iterator ScanIt, BasicBlock *BB, Instruction *QueryInst, unsigned *Limit, BatchAAResults &BatchAA)
std::vector< NonLocalDepEntry > NonLocalDepInfo
void invalidateCachedPredecessors()
Clears the PredIteratorCache info.
void invalidateCachedPointerInfo(Value *Ptr)
Invalidates cached information about the specified pointer, because it may be too conservative in mem...
MemDepResult getPointerDependencyFrom(const MemoryLocation &Loc, bool isLoad, BasicBlock::iterator ScanIt, BasicBlock *BB, Instruction *QueryInst=nullptr, unsigned *Limit=nullptr)
Returns the instruction on which a memory location depends.
void removeInstruction(Instruction *InstToRemove)
Removes an instruction from the dependence analysis, updating the dependence of instructions that pre...
MemDepResult getInvariantGroupPointerDependency(LoadInst *LI, BasicBlock *BB)
This analysis looks for other loads and stores with invariant.group metadata and the same pointer ope...
unsigned getDefaultBlockScanLimit() const
Some methods limit the number of instructions they will examine.
MemDepResult getDependency(Instruction *QueryInst)
Returns the instruction on which a memory operation depends.
const NonLocalDepInfo & getNonLocalCallDependency(CallBase *QueryCall)
Perform a full dependency query for the specified call, returning the set of blocks that the value is...
void getNonLocalPointerDependency(Instruction *QueryInst, SmallVectorImpl< NonLocalDepResult > &Result)
Perform a full dependency query for an access to the QueryInst's specified memory location,...
bool invalidate(Function &F, const PreservedAnalyses &PA, FunctionAnalysisManager::Invalidator &Inv)
Handle invalidation in the new PM.
A wrapper analysis pass for the legacy pass manager that exposes a MemoryDepnedenceResults instance.
bool runOnFunction(Function &) override
Pass Implementation stuff. This doesn't do any analysis eagerly.
~MemoryDependenceWrapperPass() override
void getAnalysisUsage(AnalysisUsage &AU) const override
Does not modify anything. It uses Value Numbering and Alias Analysis.
void releaseMemory() override
Clean up memory in between runs.
MemoryDependenceWrapperPass()
Representation for a specific memory location.
MemoryLocation getWithoutAATags() const
static LLVM_ABI MemoryLocation get(const LoadInst *LI)
Return a location with information about the memory reference by the given instruction.
LocationSize Size
The maximum size of the location, in address-units, or UnknownSize if the size is not known.
static MemoryLocation getAfter(const Value *Ptr, const AAMDNodes &AATags=AAMDNodes())
Return a location that may access any location after Ptr, while remaining within the underlying objec...
MemoryLocation getWithNewPtr(const Value *NewPtr) const
AAMDNodes AATags
The metadata nodes which describes the aliasing of the location (each member is null if that kind of ...
const Value * Ptr
The address of the start of the location.
static LLVM_ABI MemoryLocation getForArgument(const CallBase *Call, unsigned ArgIdx, const TargetLibraryInfo *TLI)
Return a location representing a particular argument of a call.
This is an entry in the NonLocalDepInfo cache.
void setResult(const MemDepResult &R)
const MemDepResult & getResult() const
This is a result from a NonLocal dependence query.
PHITransAddr - An address value which tracks and handles phi translation.
LLVM_ABI Value * translateValue(BasicBlock *CurBB, BasicBlock *PredBB, const DominatorTree *DT, bool MustDominate)
translateValue - PHI translate the current address up the CFG from CurBB to Pred, updating our state ...
AnalysisType & getAnalysis() const
getAnalysis<AnalysisType>() - This function is used by subclasses to get to the analysis information ...
A set of analyses that are preserved following a run of a transformation pass.
PreservedAnalysisChecker getChecker() const
Build a checker for this PreservedAnalyses and the specified analysis type.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
iterator erase(const_iterator CI)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
bool isPointerTy() const
True if this is an instance of PointerType.
A Use represents the edge between a Value definition and its users.
This class represents the va_arg llvm instruction, which returns an argument of the specified type gi...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI Align getPointerAlignment(const DataLayout &DL) const
Returns an alignment of the pointer value.
LLVM_ABI const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
iterator_range< use_iterator > uses()
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
const ParentTy * getParent() const
self_iterator getIterator()
Abstract Attribute helper functions.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ BasicBlock
Various leaf nodes.
initializer< Ty > init(const Ty &Val)
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
FunctionAddr VTableAddr Value
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isStrongerThanUnordered(AtomicOrdering AO)
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
auto dyn_cast_or_null(const Y &Val)
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
auto reverse(ContainerTy &&C)
bool isModSet(const ModRefInfo MRI)
void sort(IteratorTy Start, IteratorTy End)
FunctionAddr VTableAddr Count
bool isModOrRefSet(const ModRefInfo MRI)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
AtomicOrdering
Atomic ordering for LLVM's memory model.
ModRefInfo
Flags indicating whether a memory access modifies or references memory.
@ Ref
The access may reference the value stored in memory.
@ ModRef
The access may reference and may modify the value stored in memory.
@ Mod
The access may modify the value stored in memory.
@ NoModRef
The access neither references nor modifies the value stored in memory.
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool isNoModRef(const ModRefInfo MRI)
bool isStrongerThan(AtomicOrdering AO, AtomicOrdering Other)
Returns true if ao is stronger than other as defined by the AtomicOrdering lattice,...
This struct is a compact representation of a valid (non-zero power of two) alignment.
A special type used by analysis passes to provide an address that identifies that particular analysis...