55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
889 if (!
Range.mayOverlap(ItRange))
891 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
892 for (
auto Index : It.getSecond()) {
894 if (!CB(Access, IsExact))
914 for (
unsigned Index : LocalList->getSecond()) {
917 if (
Range.offsetAndSizeAreUnknown())
933 RemoteI = RemoteI ? RemoteI : &
I;
937 bool AccExists =
false;
939 for (
auto Index : LocalList) {
941 if (
A.getLocalInst() == &
I) {
950 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
952 for (
auto Key : ToAdd) {
961 "New Access should have been at AccIndex");
962 LocalList.push_back(AccIndex);
976 auto &ExistingRanges =
Before.getRanges();
977 auto &NewRanges = Current.getRanges();
984 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
991 "Expected bin to actually contain the Access.");
1010 using const_iterator = VecTy::const_iterator;
1013 const_iterator begin()
const {
return Offsets.begin(); }
1014 const_iterator end()
const {
return Offsets.end(); }
1017 return Offsets ==
RHS.Offsets;
1023 bool isUnassigned()
const {
return Offsets.size() == 0; }
1025 bool isUnknown()
const {
1038 void addToAll(int64_t Inc) {
1039 for (
auto &
Offset : Offsets) {
1048 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1063struct AAPointerInfoImpl
1064 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1069 const std::string getAsStr(
Attributor *
A)
const override {
1070 return std::string(
"PointerInfo ") +
1071 (isValidState() ? (std::string(
"#") +
1072 std::to_string(OffsetBins.
size()) +
" bins")
1078 return AAPointerInfo::manifest(
A);
1081 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1082 virtual const_bin_iterator
end()
const override {
return State::end(); }
1083 virtual int64_t numOffsetBins()
const override {
1084 return State::numOffsetBins();
1087 bool forallInterferingAccesses(
1091 return State::forallInterferingAccesses(
Range, CB);
1094 bool forallInterferingAccesses(
1096 bool FindInterferingWrites,
bool FindInterferingReads,
1097 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1099 function_ref<
bool(
const Access &)> SkipCB)
const override {
1100 HasBeenWrittenTo =
false;
1107 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1112 bool AllInSameNoSyncFn = IsAssumedNoSync;
1113 bool InstIsExecutedByInitialThreadOnly =
1114 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1121 bool InstIsExecutedInAlignedRegion =
1122 FindInterferingReads && ExecDomainAA &&
1123 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1125 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1126 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1129 bool IsThreadLocalObj =
1138 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1139 if (IsThreadLocalObj || AllInSameNoSyncFn)
1141 const auto *FnExecDomainAA =
1142 I.getFunction() == &
Scope
1147 if (!FnExecDomainAA)
1149 if (InstIsExecutedInAlignedRegion ||
1150 (FindInterferingWrites &&
1151 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1155 if (InstIsExecutedByInitialThreadOnly &&
1156 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1157 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1166 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1167 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1168 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1169 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1173 bool IsKnownNoRecurse;
1174 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1181 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1182 bool ObjHasKernelLifetime =
false;
1183 const bool UseDominanceReasoning =
1184 FindInterferingWrites && IsKnownNoRecurse;
1195 case AA::GPUAddressSpace::Shared:
1196 case AA::GPUAddressSpace::Constant:
1197 case AA::GPUAddressSpace::Local:
1209 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1211 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1216 bool IsKnownNoRecurse;
1217 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1219 IsKnownNoRecurse)) {
1220 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1222 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1225 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1226 if (ObjHasKernelLifetime)
1227 IsLiveInCalleeCB = [](
const Function &Fn) {
1228 return !Fn.hasFnAttribute(
"kernel");
1236 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1238 bool AccInSameScope = AccScope == &
Scope;
1242 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1246 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1247 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1248 ExclusionSet.
insert(Acc.getRemoteInst());
1251 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1252 (!FindInterferingReads || !Acc.isRead()))
1255 bool Dominates = FindInterferingWrites && DT && Exact &&
1256 Acc.isMustAccess() && AccInSameScope &&
1259 DominatingWrites.
insert(&Acc);
1263 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1265 InterferingAccesses.
push_back({&Acc, Exact});
1268 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1271 HasBeenWrittenTo = !DominatingWrites.
empty();
1275 for (
const Access *Acc : DominatingWrites) {
1276 if (!LeastDominatingWriteInst) {
1277 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 }
else if (DT->
dominates(LeastDominatingWriteInst,
1279 Acc->getRemoteInst())) {
1280 LeastDominatingWriteInst = Acc->getRemoteInst();
1285 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1286 if (SkipCB && SkipCB(Acc))
1288 if (!CanIgnoreThreading(Acc))
1294 bool ReadChecked = !FindInterferingReads;
1295 bool WriteChecked = !FindInterferingWrites;
1301 &ExclusionSet, IsLiveInCalleeCB))
1306 if (!WriteChecked) {
1308 &ExclusionSet, IsLiveInCalleeCB))
1309 WriteChecked =
true;
1323 if (!WriteChecked && HasBeenWrittenTo &&
1324 Acc.getRemoteInst()->getFunction() != &
Scope) {
1328 if (FnReachabilityAA) {
1334 if (!FnReachabilityAA->instructionCanReach(
1335 A, *LeastDominatingWriteInst,
1336 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1337 WriteChecked =
true;
1344 if (ReadChecked && WriteChecked)
1347 if (!DT || !UseDominanceReasoning)
1349 if (!DominatingWrites.count(&Acc))
1351 return LeastDominatingWriteInst != Acc.getRemoteInst();
1356 for (
auto &It : InterferingAccesses) {
1357 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1358 !CanSkipAccess(*It.first, It.second)) {
1359 if (!UserCB(*It.first, It.second))
1369 using namespace AA::PointerInfo;
1371 return indicatePessimisticFixpoint();
1373 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1374 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1378 const auto &State = OtherAAImpl.getState();
1379 for (
const auto &It : State) {
1380 for (
auto Index : It.getSecond()) {
1381 const auto &RAcc = State.getAccess(
Index);
1382 if (IsByval && !RAcc.isRead())
1384 bool UsedAssumedInformation =
false;
1386 auto Content =
A.translateArgumentToCallSiteContent(
1387 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1388 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1389 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1391 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1392 RAcc.getType(), RAcc.getRemoteInst());
1399 const OffsetInfo &Offsets,
CallBase &CB) {
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1408 const auto &State = OtherAAImpl.getState();
1409 for (
const auto &It : State) {
1410 for (
auto Index : It.getSecond()) {
1411 const auto &RAcc = State.getAccess(
Index);
1412 for (
auto Offset : Offsets) {
1416 if (!NewRanges.isUnknown()) {
1417 NewRanges.addToAllOffsets(
Offset);
1420 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1421 RAcc.getType(), RAcc.getRemoteInst());
1430 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1434 for (
auto &It : OffsetBins) {
1435 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1436 <<
"] : " << It.getSecond().size() <<
"\n";
1437 for (
auto AccIndex : It.getSecond()) {
1438 auto &Acc = AccessList[AccIndex];
1439 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1440 if (Acc.getLocalInst() != Acc.getRemoteInst())
1441 O <<
" --> " << *Acc.getRemoteInst()
1443 if (!Acc.isWrittenValueYetUndetermined()) {
1444 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1445 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1447 else if (Acc.getWrittenValue())
1448 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1450 O <<
" - c: <unknown>\n";
1457struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1460 : AAPointerInfoImpl(IRP,
A) {}
1467 using namespace AA::PointerInfo;
1470 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1493 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1494 auto *ConstContent = cast<Constant>(*
Content);
1498 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1500 ConstContent, ConstantInt::get(Int32Ty, i));
1503 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1507 for (
auto &ElementOffset : ElementOffsets)
1508 ElementOffset += ElementSize;
1522 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1526 void trackStatistics()
const override {
1527 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1531bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1534 const OffsetInfo &PtrOI,
1536 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1540 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1541 "Don't look for constant values if the offset has already been "
1542 "determined to be unknown.");
1544 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1550 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1554 Union.addToAll(ConstantOffset.getSExtValue());
1559 for (
const auto &VI : VariableOffsets) {
1562 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1568 if (PotentialConstantsAA->undefIsContained())
1576 if (AssumedSet.empty())
1580 for (
const auto &ConstOffset : AssumedSet) {
1581 auto CopyPerOffset =
Union;
1582 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1583 VI.second.getZExtValue());
1584 Product.merge(CopyPerOffset);
1589 UsrOI = std::move(Union);
1594 using namespace AA::PointerInfo;
1597 Value &AssociatedValue = getAssociatedValue();
1600 OffsetInfoMap[&AssociatedValue].
insert(0);
1602 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1613 "CurPtr does not exist in the map!");
1615 auto &UsrOI = OffsetInfoMap[Usr];
1616 auto &PtrOI = OffsetInfoMap[CurPtr];
1617 assert(!PtrOI.isUnassigned() &&
1618 "Cannot pass through if the input Ptr was not visited!");
1624 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1626 User *Usr =
U.getUser();
1627 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1630 "The current pointer offset should have been seeded!");
1631 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1632 "Current pointer should be assigned");
1636 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 if (!isa<GEPOperator>(CE)) {
1638 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1643 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1646 auto &UsrOI = OffsetInfoMap[Usr];
1647 auto &PtrOI = OffsetInfoMap[CurPtr];
1649 if (UsrOI.isUnknown())
1652 if (PtrOI.isUnknown()) {
1658 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1661 if (isa<PtrToIntInst>(Usr))
1663 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1664 return HandlePassthroughUser(Usr, CurPtr, Follow);
1669 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1672 bool IsFirstPHIUser = !OffsetInfoMap.
count(
PHI);
1673 auto &UsrOI = OffsetInfoMap[
PHI];
1674 auto &PtrOI = OffsetInfoMap[CurPtr];
1678 if (PtrOI.isUnknown()) {
1679 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1680 << *CurPtr <<
" in " << *
PHI <<
"\n");
1681 Follow = !UsrOI.isUnknown();
1687 if (UsrOI == PtrOI) {
1688 assert(!PtrOI.isUnassigned() &&
1689 "Cannot assign if the current Ptr was not visited!");
1690 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1700 auto It = OffsetInfoMap.
find(CurPtrBase);
1701 if (It == OffsetInfoMap.
end()) {
1702 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1703 << *CurPtr <<
" in " << *
PHI
1704 <<
" (base: " << *CurPtrBase <<
")\n");
1719 *
PHI->getFunction());
1721 auto BaseOI = It->getSecond();
1722 BaseOI.addToAll(
Offset.getZExtValue());
1723 if (IsFirstPHIUser || BaseOI == UsrOI) {
1724 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1725 <<
" in " << *Usr <<
"\n");
1726 return HandlePassthroughUser(Usr, CurPtr, Follow);
1730 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1731 << *CurPtr <<
" in " << *
PHI <<
"\n");
1742 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1750 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1751 OffsetInfoMap[CurPtr].Offsets, Changed,
1756 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1757 return II->isAssumeLikeIntrinsic();
1768 }
while (FromI && FromI != ToI);
1774 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1777 if (IntrI.getParent() == BB) {
1778 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1784 if ((*PredIt) != BB)
1789 if (SuccBB == IntrBB)
1791 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1795 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1798 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1804 std::pair<Value *, IntrinsicInst *> Assumption;
1805 for (
const Use &LoadU : LoadI->
uses()) {
1806 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1807 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1809 for (
const Use &CmpU : CmpI->
uses()) {
1810 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1811 if (!IsValidAssume(*IntrI))
1813 int Idx = CmpI->getOperandUse(0) == LoadU;
1814 Assumption = {CmpI->getOperand(
Idx), IntrI};
1819 if (Assumption.first)
1824 if (!Assumption.first || !Assumption.second)
1828 << *Assumption.second <<
": " << *LoadI
1829 <<
" == " << *Assumption.first <<
"\n");
1830 bool UsedAssumedInformation =
false;
1831 std::optional<Value *>
Content =
nullptr;
1832 if (Assumption.first)
1834 A.getAssumedSimplified(*Assumption.first, *
this,
1836 return handleAccess(
1837 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1838 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1843 for (
auto *OtherOp : OtherOps) {
1844 if (OtherOp == CurPtr) {
1847 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1859 bool UsedAssumedInformation =
false;
1860 std::optional<Value *>
Content =
nullptr;
1864 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1868 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1869 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1870 *StoreI->getValueOperand()->getType(),
1871 {StoreI->getValueOperand()}, AccessKind::AK_W);
1872 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1873 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1874 {RMWI->getValOperand()}, AccessKind::AK_RW);
1875 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1876 return HandleStoreLike(
1877 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1878 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1881 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1885 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1896 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1898 return isValidState();
1900 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1906 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1909 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1910 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1911 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1912 if (OffsetInfoMap.
count(NewU)) {
1914 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1915 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1916 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1920 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1923 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1925 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1927 true, EquivalentUseCB)) {
1928 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1929 return indicatePessimisticFixpoint();
1933 dbgs() <<
"Accesses by bin after update:\n";
1940struct AAPointerInfoReturned final : AAPointerInfoImpl {
1942 : AAPointerInfoImpl(IRP,
A) {}
1946 return indicatePessimisticFixpoint();
1950 void trackStatistics()
const override {
1951 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1955struct AAPointerInfoArgument final : AAPointerInfoFloating {
1957 : AAPointerInfoFloating(IRP,
A) {}
1960 void trackStatistics()
const override {
1961 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1965struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1967 : AAPointerInfoFloating(IRP,
A) {}
1971 using namespace AA::PointerInfo;
1975 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1979 LengthVal =
Length->getSExtValue();
1980 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1983 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1985 return indicatePessimisticFixpoint();
1988 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1990 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1993 dbgs() <<
"Accesses by bin after update:\n";
2004 Argument *Arg = getAssociatedArgument();
2009 if (ArgAA && ArgAA->getState().isValidState())
2010 return translateAndAddStateFromCallee(
A, *ArgAA,
2011 *cast<CallBase>(getCtxI()));
2013 return indicatePessimisticFixpoint();
2016 bool IsKnownNoCapture;
2017 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2018 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2019 return indicatePessimisticFixpoint();
2021 bool IsKnown =
false;
2023 return ChangeStatus::UNCHANGED;
2026 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2032 void trackStatistics()
const override {
2033 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2037struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2039 : AAPointerInfoFloating(IRP,
A) {}
2042 void trackStatistics()
const override {
2043 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2057 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2058 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2062 const std::string getAsStr(
Attributor *
A)
const override {
2063 return getAssumed() ?
"nounwind" :
"may-unwind";
2069 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2070 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2071 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2074 if (!
I.mayThrow(
true))
2077 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2078 bool IsKnownNoUnwind;
2079 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2086 bool UsedAssumedInformation =
false;
2087 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2088 UsedAssumedInformation))
2089 return indicatePessimisticFixpoint();
2091 return ChangeStatus::UNCHANGED;
2095struct AANoUnwindFunction final :
public AANoUnwindImpl {
2097 : AANoUnwindImpl(IRP,
A) {}
2104struct AANoUnwindCallSite final
2105 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2107 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2118 case Intrinsic::nvvm_barrier0:
2119 case Intrinsic::nvvm_barrier0_and:
2120 case Intrinsic::nvvm_barrier0_or:
2121 case Intrinsic::nvvm_barrier0_popc:
2123 case Intrinsic::amdgcn_s_barrier:
2124 if (ExecutedAligned)
2137 if (
auto *FI = dyn_cast<FenceInst>(
I))
2140 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2147 switch (
I->getOpcode()) {
2148 case Instruction::AtomicRMW:
2149 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2151 case Instruction::Store:
2152 Ordering = cast<StoreInst>(
I)->getOrdering();
2154 case Instruction::Load:
2155 Ordering = cast<LoadInst>(
I)->getOrdering();
2159 "New atomic operations need to be known in the attributor.");
2170 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2171 return !
MI->isVolatile();
2182 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2183 DepClassTy::NONE, IsKnown));
2187 const std::string getAsStr(
Attributor *
A)
const override {
2188 return getAssumed() ?
"nosync" :
"may-sync";
2204 if (
I.mayReadOrWriteMemory())
2209 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2218 bool UsedAssumedInformation =
false;
2219 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2220 UsedAssumedInformation) ||
2221 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2222 UsedAssumedInformation))
2223 return indicatePessimisticFixpoint();
2228struct AANoSyncFunction final :
public AANoSyncImpl {
2230 : AANoSyncImpl(IRP,
A) {}
2237struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2239 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2249struct AANoFreeImpl :
public AANoFree {
2255 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2256 DepClassTy::NONE, IsKnown));
2264 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2266 DepClassTy::REQUIRED, IsKnown);
2269 bool UsedAssumedInformation =
false;
2270 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2271 UsedAssumedInformation))
2272 return indicatePessimisticFixpoint();
2273 return ChangeStatus::UNCHANGED;
2277 const std::string getAsStr(
Attributor *
A)
const override {
2278 return getAssumed() ?
"nofree" :
"may-free";
2282struct AANoFreeFunction final :
public AANoFreeImpl {
2284 : AANoFreeImpl(IRP,
A) {}
2291struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2293 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2300struct AANoFreeFloating : AANoFreeImpl {
2302 : AANoFreeImpl(IRP,
A) {}
2312 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2314 DepClassTy::OPTIONAL, IsKnown))
2315 return ChangeStatus::UNCHANGED;
2317 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2318 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2320 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2328 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2330 DepClassTy::REQUIRED, IsKnown);
2333 if (isa<GetElementPtrInst>(UserI) || isa<PHINode>(UserI) ||
2334 isa<SelectInst>(UserI)) {
2338 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2339 isa<ReturnInst>(UserI))
2345 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2346 return indicatePessimisticFixpoint();
2348 return ChangeStatus::UNCHANGED;
2353struct AANoFreeArgument final : AANoFreeFloating {
2355 : AANoFreeFloating(IRP,
A) {}
2362struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2364 : AANoFreeFloating(IRP,
A) {}
2372 Argument *Arg = getAssociatedArgument();
2374 return indicatePessimisticFixpoint();
2377 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2378 DepClassTy::REQUIRED, IsKnown))
2379 return ChangeStatus::UNCHANGED;
2380 return indicatePessimisticFixpoint();
2388struct AANoFreeReturned final : AANoFreeFloating {
2390 : AANoFreeFloating(IRP,
A) {
2405 void trackStatistics()
const override {}
2409struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2411 : AANoFreeFloating(IRP,
A) {}
2414 return ChangeStatus::UNCHANGED;
2425 bool IgnoreSubsumingPositions) {
2427 AttrKinds.
push_back(Attribute::NonNull);
2430 AttrKinds.
push_back(Attribute::Dereferenceable);
2431 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2438 if (!Fn->isDeclaration()) {
2448 bool UsedAssumedInformation =
false;
2449 if (!
A.checkForAllInstructions(
2451 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2455 UsedAssumedInformation,
false,
true))
2467 Attribute::NonNull)});
2472static int64_t getKnownNonNullAndDerefBytesForUse(
2474 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2477 const Value *UseV =
U->get();
2484 if (isa<CastInst>(
I)) {
2489 if (isa<GetElementPtrInst>(
I)) {
2499 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2502 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2519 bool IsKnownNonNull;
2520 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2522 IsNonNull |= IsKnownNonNull;
2529 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2530 Loc->Size.isScalable() ||
I->isVolatile())
2536 if (
Base &&
Base == &AssociatedValue) {
2537 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2539 return std::max(int64_t(0), DerefBytes);
2546 int64_t DerefBytes = Loc->Size.getValue();
2548 return std::max(int64_t(0), DerefBytes);
2559 Value &
V = *getAssociatedValue().stripPointerCasts();
2560 if (isa<ConstantPointerNull>(V)) {
2561 indicatePessimisticFixpoint();
2566 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2572 bool IsNonNull =
false;
2573 bool TrackUse =
false;
2574 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2575 IsNonNull, TrackUse);
2576 State.setKnown(IsNonNull);
2581 const std::string getAsStr(
Attributor *
A)
const override {
2582 return getAssumed() ?
"nonnull" :
"may-null";
2587struct AANonNullFloating :
public AANonNullImpl {
2589 : AANonNullImpl(IRP,
A) {}
2594 bool IsKnownNonNull;
2595 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2596 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2600 bool UsedAssumedInformation =
false;
2601 Value *AssociatedValue = &getAssociatedValue();
2603 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2608 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2612 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2614 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2615 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2618 return ChangeStatus::UNCHANGED;
2619 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2620 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2622 DepClassTy::OPTIONAL, IsKnown) &&
2623 AA::hasAssumedIRAttr<Attribute::NonNull>(
2625 DepClassTy::OPTIONAL, IsKnown))
2626 return ChangeStatus::UNCHANGED;
2633 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2634 return indicatePessimisticFixpoint();
2635 return ChangeStatus::UNCHANGED;
2638 for (
const auto &VAC : Values)
2640 return indicatePessimisticFixpoint();
2642 return ChangeStatus::UNCHANGED;
2650struct AANonNullReturned final
2651 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2652 false, AANonNull::IRAttributeKind, false> {
2659 const std::string getAsStr(
Attributor *
A)
const override {
2660 return getAssumed() ?
"nonnull" :
"may-null";
2668struct AANonNullArgument final
2669 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2671 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2677struct AANonNullCallSiteArgument final : AANonNullFloating {
2679 : AANonNullFloating(IRP,
A) {}
2686struct AANonNullCallSiteReturned final
2687 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2689 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2705 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2706 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2711 const std::string getAsStr(
Attributor *
A)
const override {
2712 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2716struct AAMustProgressFunction final : AAMustProgressImpl {
2718 : AAMustProgressImpl(IRP,
A) {}
2723 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2724 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2726 return indicateOptimisticFixpoint();
2727 return ChangeStatus::UNCHANGED;
2732 bool IsKnownMustProgress;
2733 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2734 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2738 bool AllCallSitesKnown =
true;
2739 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2742 return indicatePessimisticFixpoint();
2744 return ChangeStatus::UNCHANGED;
2748 void trackStatistics()
const override {
2754struct AAMustProgressCallSite final : AAMustProgressImpl {
2756 : AAMustProgressImpl(IRP,
A) {}
2765 bool IsKnownMustProgress;
2766 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2767 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2768 return indicatePessimisticFixpoint();
2769 return ChangeStatus::UNCHANGED;
2773 void trackStatistics()
const override {
2788 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2789 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2794 const std::string getAsStr(
Attributor *
A)
const override {
2795 return getAssumed() ?
"norecurse" :
"may-recurse";
2799struct AANoRecurseFunction final : AANoRecurseImpl {
2801 : AANoRecurseImpl(IRP,
A) {}
2808 bool IsKnownNoRecurse;
2809 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2812 DepClassTy::NONE, IsKnownNoRecurse))
2814 return IsKnownNoRecurse;
2816 bool UsedAssumedInformation =
false;
2817 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2818 UsedAssumedInformation)) {
2824 if (!UsedAssumedInformation)
2825 indicateOptimisticFixpoint();
2826 return ChangeStatus::UNCHANGED;
2831 DepClassTy::REQUIRED);
2832 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2833 return indicatePessimisticFixpoint();
2834 return ChangeStatus::UNCHANGED;
2841struct AANoRecurseCallSite final
2842 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2844 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2859 const std::string getAsStr(
Attributor *
A)
const override {
2860 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2864struct AANonConvergentFunction final : AANonConvergentImpl {
2866 : AANonConvergentImpl(IRP,
A) {}
2872 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2873 CallBase &CB = cast<CallBase>(Inst);
2875 if (!Callee ||
Callee->isIntrinsic()) {
2878 if (
Callee->isDeclaration()) {
2879 return !
Callee->hasFnAttribute(Attribute::Convergent);
2886 bool UsedAssumedInformation =
false;
2887 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2888 UsedAssumedInformation)) {
2889 return indicatePessimisticFixpoint();
2891 return ChangeStatus::UNCHANGED;
2895 if (isKnownNotConvergent() &&
2896 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2897 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2898 return ChangeStatus::CHANGED;
2900 return ChangeStatus::UNCHANGED;
2917 const size_t UBPrevSize = KnownUBInsts.size();
2918 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2922 if (
I.isVolatile() &&
I.mayWriteToMemory())
2926 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2935 "Expected pointer operand of memory accessing instruction");
2939 std::optional<Value *> SimplifiedPtrOp =
2940 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2941 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2943 const Value *PtrOpVal = *SimplifiedPtrOp;
2948 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2949 AssumedNoUBInsts.insert(&
I);
2961 AssumedNoUBInsts.insert(&
I);
2963 KnownUBInsts.insert(&
I);
2972 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2976 auto *BrInst = cast<BranchInst>(&
I);
2979 if (BrInst->isUnconditional())
2984 std::optional<Value *> SimplifiedCond =
2985 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
2986 if (!SimplifiedCond || !*SimplifiedCond)
2988 AssumedNoUBInsts.insert(&
I);
2996 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3005 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3011 if (idx >=
Callee->arg_size())
3023 bool IsKnownNoUndef;
3024 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3025 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3026 if (!IsKnownNoUndef)
3028 bool UsedAssumedInformation =
false;
3029 std::optional<Value *> SimplifiedVal =
3032 if (UsedAssumedInformation)
3034 if (SimplifiedVal && !*SimplifiedVal)
3036 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3037 KnownUBInsts.insert(&
I);
3041 !isa<ConstantPointerNull>(**SimplifiedVal))
3043 bool IsKnownNonNull;
3044 AA::hasAssumedIRAttr<Attribute::NonNull>(
3045 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3047 KnownUBInsts.insert(&
I);
3053 auto &RI = cast<ReturnInst>(
I);
3056 std::optional<Value *> SimplifiedRetValue =
3057 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3058 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3075 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3076 bool IsKnownNonNull;
3077 AA::hasAssumedIRAttr<Attribute::NonNull>(
3081 KnownUBInsts.insert(&
I);
3087 bool UsedAssumedInformation =
false;
3088 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3089 {Instruction::Load, Instruction::Store,
3090 Instruction::AtomicCmpXchg,
3091 Instruction::AtomicRMW},
3092 UsedAssumedInformation,
3094 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3095 UsedAssumedInformation,
3097 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3098 UsedAssumedInformation);
3102 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3104 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3105 bool IsKnownNoUndef;
3106 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3107 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3109 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3110 {Instruction::Ret}, UsedAssumedInformation,
3115 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3116 UBPrevSize != KnownUBInsts.size())
3117 return ChangeStatus::CHANGED;
3118 return ChangeStatus::UNCHANGED;
3122 return KnownUBInsts.count(
I);
3125 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3132 switch (
I->getOpcode()) {
3133 case Instruction::Load:
3134 case Instruction::Store:
3135 case Instruction::AtomicCmpXchg:
3136 case Instruction::AtomicRMW:
3137 return !AssumedNoUBInsts.count(
I);
3138 case Instruction::Br: {
3139 auto *BrInst = cast<BranchInst>(
I);
3140 if (BrInst->isUnconditional())
3142 return !AssumedNoUBInsts.count(
I);
3151 if (KnownUBInsts.empty())
3152 return ChangeStatus::UNCHANGED;
3154 A.changeToUnreachableAfterManifest(
I);
3155 return ChangeStatus::CHANGED;
3159 const std::string getAsStr(
Attributor *
A)
const override {
3160 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3205 bool UsedAssumedInformation =
false;
3206 std::optional<Value *> SimplifiedV =
3209 if (!UsedAssumedInformation) {
3214 KnownUBInsts.insert(
I);
3215 return std::nullopt;
3221 if (isa<UndefValue>(V)) {
3222 KnownUBInsts.insert(
I);
3223 return std::nullopt;
3229struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3231 : AAUndefinedBehaviorImpl(IRP,
A) {}
3234 void trackStatistics()
const override {
3236 "Number of instructions known to have UB");
3238 KnownUBInsts.size();
3259 if (SCCI.hasCycle())
3269 for (
auto *L : LI->getLoopsInPreorder()) {
3283 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3284 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3289 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3290 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3295 return IsKnown || !KnownOnly;
3301 if (isImpliedByMustprogressAndReadonly(
A,
false))
3302 return ChangeStatus::UNCHANGED;
3307 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3308 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3314 bool IsKnownNoRecurse;
3315 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3316 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3319 bool UsedAssumedInformation =
false;
3320 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3321 UsedAssumedInformation))
3322 return indicatePessimisticFixpoint();
3324 return ChangeStatus::UNCHANGED;
3328 const std::string getAsStr(
Attributor *
A)
const override {
3329 return getAssumed() ?
"willreturn" :
"may-noreturn";
3333struct AAWillReturnFunction final : AAWillReturnImpl {
3335 : AAWillReturnImpl(IRP,
A) {}
3339 AAWillReturnImpl::initialize(
A);
3342 assert(
F &&
"Did expect an anchor function");
3343 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3344 indicatePessimisticFixpoint();
3352struct AAWillReturnCallSite final
3353 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3355 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3359 if (isImpliedByMustprogressAndReadonly(
A,
false))
3360 return ChangeStatus::UNCHANGED;
3362 return AACalleeToCallSite::updateImpl(
A);
3384 const ToTy *To =
nullptr;
3394 assert(Hash == 0 &&
"Computed hash twice!");
3398 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3399 InstSetDMI::getHashValue(ExclusionSet));
3409 :
From(&
From), To(&To), ExclusionSet(ES) {
3411 if (!ES || ES->
empty()) {
3412 ExclusionSet =
nullptr;
3413 }
else if (MakeUnique) {
3414 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3419 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3432 return &TombstoneKey;
3439 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3441 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3445#define DefineKeys(ToTy) \
3447 ReachabilityQueryInfo<ToTy> \
3448 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3449 ReachabilityQueryInfo<ToTy>( \
3450 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3451 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3453 ReachabilityQueryInfo<ToTy> \
3454 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3455 ReachabilityQueryInfo<ToTy>( \
3456 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3457 DenseMapInfo<const ToTy *>::getTombstoneKey());
3466template <
typename BaseTy,
typename ToTy>
3467struct CachedReachabilityAA :
public BaseTy {
3473 bool isQueryAA()
const override {
return true; }
3478 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3479 RQITy *RQI = QueryVector[
u];
3480 if (RQI->Result == RQITy::Reachable::No &&
3482 Changed = ChangeStatus::CHANGED;
3488 bool IsTemporaryRQI) = 0;
3491 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3496 QueryCache.erase(&RQI);
3502 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3503 RQITy PlainRQI(RQI.From, RQI.To);
3504 if (!QueryCache.count(&PlainRQI)) {
3505 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3507 QueryVector.push_back(RQIPtr);
3508 QueryCache.insert(RQIPtr);
3513 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3514 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3515 "Did not expect empty set!");
3516 RQITy *RQIPtr =
new (
A.Allocator)
3517 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3518 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3520 assert(!QueryCache.count(RQIPtr));
3521 QueryVector.push_back(RQIPtr);
3522 QueryCache.insert(RQIPtr);
3525 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3526 A.registerForUpdate(*
this);
3527 return Result == RQITy::Reachable::Yes;
3530 const std::string getAsStr(
Attributor *
A)
const override {
3532 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3535 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3536 typename RQITy::Reachable &
Result) {
3537 if (!this->getState().isValidState()) {
3538 Result = RQITy::Reachable::Yes;
3544 if (StackRQI.ExclusionSet) {
3545 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3546 auto It = QueryCache.find(&PlainRQI);
3547 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3548 Result = RQITy::Reachable::No;
3553 auto It = QueryCache.find(&StackRQI);
3554 if (It != QueryCache.end()) {
3561 QueryCache.insert(&StackRQI);
3570struct AAIntraFnReachabilityFunction final
3571 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3572 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3579 bool isAssumedReachable(
3582 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3586 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3587 typename RQITy::Reachable
Result;
3588 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3589 return NonConstThis->isReachableImpl(
A, StackRQI,
3591 return Result == RQITy::Reachable::Yes;
3598 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3601 [&](
const auto &DeadEdge) {
3602 return LivenessAA->isEdgeDead(DeadEdge.first,
3606 return LivenessAA->isAssumedDead(BB);
3608 return ChangeStatus::UNCHANGED;
3612 return Base::updateImpl(
A);
3616 bool IsTemporaryRQI)
override {
3618 bool UsedExclusionSet =
false;
3623 while (IP && IP != &To) {
3624 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3625 UsedExclusionSet =
true;
3636 "Not an intra-procedural query!");
3640 if (FromBB == ToBB &&
3641 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3642 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3647 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3648 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3653 if (RQI.ExclusionSet)
3654 for (
auto *
I : *RQI.ExclusionSet)
3655 if (
I->getFunction() == Fn)
3656 ExclusionBlocks.
insert(
I->getParent());
3659 if (ExclusionBlocks.
count(FromBB) &&
3662 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3665 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3666 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3667 DeadBlocks.insert(ToBB);
3668 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3677 while (!Worklist.
empty()) {
3679 if (!Visited.
insert(BB).second)
3682 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3683 LocalDeadEdges.
insert({BB, SuccBB});
3688 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3691 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3694 if (ExclusionBlocks.
count(SuccBB)) {
3695 UsedExclusionSet =
true;
3702 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3703 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3708 void trackStatistics()
const override {}
3728 bool IgnoreSubsumingPositions) {
3729 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3730 "Unexpected attribute kind");
3733 if (isa<AllocaInst>(Val))
3736 IgnoreSubsumingPositions =
true;
3739 if (isa<UndefValue>(Val))
3742 if (isa<ConstantPointerNull>(Val) &&
3747 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3748 IgnoreSubsumingPositions, Attribute::NoAlias))
3758 "Noalias is a pointer attribute");
3761 const std::string getAsStr(
Attributor *
A)
const override {
3762 return getAssumed() ?
"noalias" :
"may-alias";
3767struct AANoAliasFloating final : AANoAliasImpl {
3769 : AANoAliasImpl(IRP,
A) {}
3774 return indicatePessimisticFixpoint();
3778 void trackStatistics()
const override {
3784struct AANoAliasArgument final
3785 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3786 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3798 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3800 DepClassTy::OPTIONAL, IsKnownNoSycn))
3801 return Base::updateImpl(
A);
3806 return Base::updateImpl(
A);
3810 bool UsedAssumedInformation =
false;
3811 if (
A.checkForAllCallSites(
3813 true, UsedAssumedInformation))
3814 return Base::updateImpl(
A);
3822 return indicatePessimisticFixpoint();
3829struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3831 : AANoAliasImpl(IRP,
A) {}
3837 const CallBase &CB,
unsigned OtherArgNo) {
3839 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3851 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3852 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3859 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3861 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3862 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3868 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3872 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3874 "callsite arguments: "
3875 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3876 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3881 bool isKnownNoAliasDueToNoAliasPreservation(
3901 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3912 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3917 bool IsKnownNoCapture;
3918 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3920 DepClassTy::OPTIONAL, IsKnownNoCapture))
3926 A, *UserI, *getCtxI(), *
this,
nullptr,
3927 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3935 case UseCaptureKind::NO_CAPTURE:
3937 case UseCaptureKind::MAY_CAPTURE:
3941 case UseCaptureKind::PASSTHROUGH:
3948 bool IsKnownNoCapture;
3950 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3951 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3952 if (!IsAssumedNoCapture &&
3954 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3956 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3957 <<
" cannot be noalias as it is potentially captured\n");
3962 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3967 const auto &CB = cast<CallBase>(getAnchorValue());
3968 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3969 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3979 auto *MemBehaviorAA =
3982 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3983 return ChangeStatus::UNCHANGED;
3986 bool IsKnownNoAlias;
3988 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
3989 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
3991 <<
" is not no-alias at the definition\n");
3992 return indicatePessimisticFixpoint();
3996 if (MemBehaviorAA &&
3997 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
3999 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4000 return ChangeStatus::UNCHANGED;
4003 return indicatePessimisticFixpoint();
4011struct AANoAliasReturned final : AANoAliasImpl {
4013 : AANoAliasImpl(IRP,
A) {}
4018 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4019 if (
Constant *
C = dyn_cast<Constant>(&RV))
4020 if (
C->isNullValue() || isa<UndefValue>(
C))
4025 if (!isa<CallBase>(&RV))
4029 bool IsKnownNoAlias;
4030 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4031 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4034 bool IsKnownNoCapture;
4036 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4037 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4039 return IsAssumedNoCapture ||
4043 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4044 return indicatePessimisticFixpoint();
4046 return ChangeStatus::UNCHANGED;
4054struct AANoAliasCallSiteReturned final
4055 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4057 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4067struct AAIsDeadValueImpl :
public AAIsDead {
4071 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4074 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4077 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4080 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4083 bool isAssumedDead(
const Instruction *
I)
const override {
4084 return I == getCtxI() && isAssumedDead();
4088 bool isKnownDead(
const Instruction *
I)
const override {
4089 return isAssumedDead(
I) && isKnownDead();
4093 const std::string getAsStr(
Attributor *
A)
const override {
4094 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4100 if (
V.getType()->isVoidTy() ||
V.use_empty())
4104 if (!isa<Constant>(V)) {
4105 if (
auto *
I = dyn_cast<Instruction>(&V))
4106 if (!
A.isRunOn(*
I->getFunction()))
4108 bool UsedAssumedInformation =
false;
4109 std::optional<Constant *>
C =
4110 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4115 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4120 return A.checkForAllUses(UsePred, *
this, V,
false,
4121 DepClassTy::REQUIRED,
4130 auto *CB = dyn_cast<CallBase>(
I);
4131 if (!CB || isa<IntrinsicInst>(CB))
4136 bool IsKnownNoUnwind;
4137 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4138 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4146struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4148 : AAIsDeadValueImpl(IRP,
A) {}
4152 AAIsDeadValueImpl::initialize(
A);
4154 if (isa<UndefValue>(getAssociatedValue())) {
4155 indicatePessimisticFixpoint();
4159 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4160 if (!isAssumedSideEffectFree(
A,
I)) {
4161 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4162 indicatePessimisticFixpoint();
4164 removeAssumedBits(HAS_NO_EFFECT);
4171 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4173 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4180 if (
SI.isVolatile())
4186 bool UsedAssumedInformation =
false;
4187 if (!AssumeOnlyInst) {
4188 PotentialCopies.clear();
4190 UsedAssumedInformation)) {
4193 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4197 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4198 <<
" potential copies.\n");
4203 UsedAssumedInformation))
4205 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4207 auto &UserI = cast<Instruction>(*U.getUser());
4208 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4210 AssumeOnlyInst->insert(&UserI);
4213 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4219 <<
" is assumed live!\n");
4225 const std::string getAsStr(
Attributor *
A)
const override {
4226 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4227 if (isa_and_nonnull<StoreInst>(
I))
4229 return "assumed-dead-store";
4230 if (isa_and_nonnull<FenceInst>(
I))
4232 return "assumed-dead-fence";
4233 return AAIsDeadValueImpl::getAsStr(
A);
4238 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4239 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4240 if (!isDeadStore(
A, *SI))
4241 return indicatePessimisticFixpoint();
4242 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4243 if (!isDeadFence(
A, *FI))
4244 return indicatePessimisticFixpoint();
4246 if (!isAssumedSideEffectFree(
A,
I))
4247 return indicatePessimisticFixpoint();
4248 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4249 return indicatePessimisticFixpoint();
4254 bool isRemovableStore()
const override {
4255 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4260 Value &
V = getAssociatedValue();
4261 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4266 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4268 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4271 A.deleteAfterManifest(*
I);
4272 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4274 for (
auto *Usr : AOI->
users())
4275 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4276 A.deleteAfterManifest(*AOI);
4280 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4282 A.deleteAfterManifest(*FI);
4285 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4286 A.deleteAfterManifest(*
I);
4294 void trackStatistics()
const override {
4303struct AAIsDeadArgument :
public AAIsDeadFloating {
4305 : AAIsDeadFloating(IRP,
A) {}
4309 Argument &Arg = *getAssociatedArgument();
4310 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4311 if (
A.registerFunctionSignatureRewrite(
4315 return ChangeStatus::CHANGED;
4317 return ChangeStatus::UNCHANGED;
4324struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4326 : AAIsDeadValueImpl(IRP,
A) {}
4330 AAIsDeadValueImpl::initialize(
A);
4331 if (isa<UndefValue>(getAssociatedValue()))
4332 indicatePessimisticFixpoint();
4341 Argument *Arg = getAssociatedArgument();
4343 return indicatePessimisticFixpoint();
4345 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4347 return indicatePessimisticFixpoint();
4353 CallBase &CB = cast<CallBase>(getAnchorValue());
4355 assert(!isa<UndefValue>(
U.get()) &&
4356 "Expected undef values to be filtered out!");
4358 if (
A.changeUseAfterManifest(U, UV))
4359 return ChangeStatus::CHANGED;
4360 return ChangeStatus::UNCHANGED;
4367struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4369 : AAIsDeadFloating(IRP,
A) {}
4372 bool isAssumedDead()
const override {
4373 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4378 AAIsDeadFloating::initialize(
A);
4379 if (isa<UndefValue>(getAssociatedValue())) {
4380 indicatePessimisticFixpoint();
4385 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4391 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4392 IsAssumedSideEffectFree =
false;
4393 Changed = ChangeStatus::CHANGED;
4395 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4396 return indicatePessimisticFixpoint();
4401 void trackStatistics()
const override {
4402 if (IsAssumedSideEffectFree)
4409 const std::string getAsStr(
Attributor *
A)
const override {
4410 return isAssumedDead()
4412 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4416 bool IsAssumedSideEffectFree =
true;
4419struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4421 : AAIsDeadValueImpl(IRP,
A) {}
4426 bool UsedAssumedInformation =
false;
4427 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4428 {Instruction::Ret}, UsedAssumedInformation);
4431 if (ACS.isCallbackCall() || !ACS.getInstruction())
4433 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4436 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4437 UsedAssumedInformation))
4438 return indicatePessimisticFixpoint();
4440 return ChangeStatus::UNCHANGED;
4446 bool AnyChange =
false;
4454 bool UsedAssumedInformation =
false;
4455 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4456 UsedAssumedInformation);
4457 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4464struct AAIsDeadFunction :
public AAIsDead {
4470 assert(
F &&
"Did expect an anchor function");
4471 if (!isAssumedDeadInternalFunction(
A)) {
4472 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4473 assumeLive(
A,
F->getEntryBlock());
4477 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4478 if (!getAnchorScope()->hasLocalLinkage())
4480 bool UsedAssumedInformation =
false;
4482 true, UsedAssumedInformation);
4486 const std::string getAsStr(
Attributor *
A)
const override {
4487 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4488 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4489 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4490 std::to_string(KnownDeadEnds.size()) +
"]";
4495 assert(getState().isValidState() &&
4496 "Attempted to manifest an invalid state!");
4501 if (AssumedLiveBlocks.empty()) {
4502 A.deleteAfterManifest(
F);
4503 return ChangeStatus::CHANGED;
4509 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4511 KnownDeadEnds.set_union(ToBeExploredFrom);
4512 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4513 auto *CB = dyn_cast<CallBase>(DeadEndI);
4516 bool IsKnownNoReturn;
4517 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4520 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4523 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4524 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4526 A.changeToUnreachableAfterManifest(
4527 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4528 HasChanged = ChangeStatus::CHANGED;
4533 if (!AssumedLiveBlocks.count(&BB)) {
4534 A.deleteAfterManifest(BB);
4536 HasChanged = ChangeStatus::CHANGED;
4546 assert(
From->getParent() == getAnchorScope() &&
4548 "Used AAIsDead of the wrong function");
4549 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4553 void trackStatistics()
const override {}
4556 bool isAssumedDead()
const override {
return false; }
4559 bool isKnownDead()
const override {
return false; }
4562 bool isAssumedDead(
const BasicBlock *BB)
const override {
4564 "BB must be in the same anchor scope function.");
4568 return !AssumedLiveBlocks.count(BB);
4572 bool isKnownDead(
const BasicBlock *BB)
const override {
4573 return getKnown() && isAssumedDead(BB);
4577 bool isAssumedDead(
const Instruction *
I)
const override {
4578 assert(
I->getParent()->getParent() == getAnchorScope() &&
4579 "Instruction must be in the same anchor scope function.");
4586 if (!AssumedLiveBlocks.count(
I->getParent()))
4592 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4600 bool isKnownDead(
const Instruction *
I)
const override {
4601 return getKnown() && isAssumedDead(
I);
4607 if (!AssumedLiveBlocks.insert(&BB).second)
4615 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4617 if (
F->hasLocalLinkage())
4618 A.markLiveInternalFunction(*
F);
4642 bool IsKnownNoReturn;
4643 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4645 return !IsKnownNoReturn;
4657 bool UsedAssumedInformation =
4658 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4663 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4664 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4668 bool IsKnownNoUnwind;
4669 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4671 UsedAssumedInformation |= !IsKnownNoUnwind;
4673 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4676 return UsedAssumedInformation;
4683 bool UsedAssumedInformation =
false;
4687 std::optional<Constant *>
C =
4688 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4689 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4691 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4693 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4698 UsedAssumedInformation =
false;
4701 return UsedAssumedInformation;
4708 bool UsedAssumedInformation =
false;
4712 UsedAssumedInformation)) {
4719 if (Values.
empty() ||
4720 (Values.
size() == 1 &&
4721 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4723 return UsedAssumedInformation;
4726 Type &Ty = *
SI.getCondition()->getType();
4728 auto CheckForConstantInt = [&](
Value *
V) {
4729 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4737 return CheckForConstantInt(
VAC.getValue());
4741 return UsedAssumedInformation;
4744 unsigned MatchedCases = 0;
4745 for (
const auto &CaseIt :
SI.cases()) {
4746 if (
Constants.count(CaseIt.getCaseValue())) {
4748 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4755 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4756 return UsedAssumedInformation;
4762 if (AssumedLiveBlocks.empty()) {
4763 if (isAssumedDeadInternalFunction(
A))
4767 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4768 assumeLive(
A,
F->getEntryBlock());
4772 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4773 << getAnchorScope()->
size() <<
"] BBs and "
4774 << ToBeExploredFrom.size() <<
" exploration points and "
4775 << KnownDeadEnds.size() <<
" known dead ends\n");
4780 ToBeExploredFrom.end());
4781 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4784 while (!Worklist.
empty()) {
4790 while (!
I->isTerminator() && !isa<CallBase>(
I))
4791 I =
I->getNextNode();
4793 AliveSuccessors.
clear();
4795 bool UsedAssumedInformation =
false;
4796 switch (
I->getOpcode()) {
4800 "Expected non-terminators to be handled already!");
4804 case Instruction::Call:
4805 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4806 *
this, AliveSuccessors);
4808 case Instruction::Invoke:
4809 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4810 *
this, AliveSuccessors);
4812 case Instruction::Br:
4813 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4814 *
this, AliveSuccessors);
4816 case Instruction::Switch:
4817 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4818 *
this, AliveSuccessors);
4822 if (UsedAssumedInformation) {
4823 NewToBeExploredFrom.insert(
I);
4824 }
else if (AliveSuccessors.
empty() ||
4825 (
I->isTerminator() &&
4826 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4827 if (KnownDeadEnds.insert(
I))
4832 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4833 << UsedAssumedInformation <<
"\n");
4835 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4836 if (!
I->isTerminator()) {
4837 assert(AliveSuccessors.size() == 1 &&
4838 "Non-terminator expected to have a single successor!");
4842 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4843 if (AssumedLiveEdges.insert(Edge).second)
4845 if (assumeLive(
A, *AliveSuccessor->getParent()))
4852 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4854 return !ToBeExploredFrom.count(I);
4857 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4866 if (ToBeExploredFrom.empty() &&
4867 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4869 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4871 return indicatePessimisticFixpoint();
4876struct AAIsDeadCallSite final : AAIsDeadFunction {
4878 : AAIsDeadFunction(IRP,
A) {}
4887 "supported for call sites yet!");
4892 return indicatePessimisticFixpoint();
4896 void trackStatistics()
const override {}
4910 Value &
V = *getAssociatedValue().stripPointerCasts();
4912 A.getAttrs(getIRPosition(),
4913 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4916 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4919 bool IsKnownNonNull;
4920 AA::hasAssumedIRAttr<Attribute::NonNull>(
4921 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4923 bool CanBeNull, CanBeFreed;
4924 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4925 A.getDataLayout(), CanBeNull, CanBeFreed));
4928 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4933 StateType &getState()
override {
return *
this; }
4934 const StateType &getState()
const override {
return *
this; }
4940 const Value *UseV =
U->get();
4945 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4950 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4951 if (
Base &&
Base == &getAssociatedValue())
4952 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4958 bool IsNonNull =
false;
4959 bool TrackUse =
false;
4960 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4961 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4962 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4963 <<
" for instruction " << *
I <<
"\n");
4965 addAccessedBytesForUse(
A, U,
I, State);
4966 State.takeKnownDerefBytesMaximum(DerefBytes);