54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
80#define DEBUG_TYPE "attributor"
84 cl::desc(
"Manifest Attributor internal string attributes."),
97 cl::desc(
"Maximum number of potential values to be "
98 "tracked for each position."),
103 "attributor-max-potential-values-iterations",
cl::Hidden,
105 "Maximum number of iterations we keep dismantling potential values."),
108STATISTIC(NumAAs,
"Number of abstract attributes created");
109STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
888 template <
typename F>
895 if (!
Range.mayOverlap(ItRange))
897 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
898 for (
auto Index : It.getSecond()) {
908 template <
typename F>
919 for (
unsigned Index : LocalList->getSecond()) {
922 if (
Range.offsetAndSizeAreUnknown())
938 RemoteI = RemoteI ? RemoteI : &
I;
942 bool AccExists =
false;
944 for (
auto Index : LocalList) {
946 if (
A.getLocalInst() == &
I) {
955 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
957 for (
auto Key : ToAdd) {
966 "New Access should have been at AccIndex");
967 LocalList.push_back(AccIndex);
981 auto &ExistingRanges =
Before.getRanges();
982 auto &NewRanges = Current.getRanges();
989 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
996 "Expected bin to actually contain the Access.");
1023struct AAPointerInfoImpl
1024 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1029 const std::string getAsStr(
Attributor *
A)
const override {
1030 return std::string(
"PointerInfo ") +
1031 (isValidState() ? (std::string(
"#") +
1032 std::to_string(OffsetBins.size()) +
" bins")
1037 [](int64_t O) {
return std::to_string(O); }),
1045 return AAPointerInfo::manifest(
A);
1048 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1049 virtual const_bin_iterator
end()
const override {
return State::end(); }
1050 virtual int64_t numOffsetBins()
const override {
1051 return State::numOffsetBins();
1053 virtual bool reachesReturn()
const override {
1054 return !ReturnedOffsets.isUnassigned();
1056 virtual void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1057 if (ReturnedOffsets.isUnknown()) {
1062 OffsetInfo MergedOI;
1063 for (
auto Offset : ReturnedOffsets) {
1064 OffsetInfo TmpOI = OI;
1066 MergedOI.merge(TmpOI);
1068 OI = std::move(MergedOI);
1071 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1072 if (ReturnedOffsets.isUnknown())
1073 return ChangeStatus::UNCHANGED;
1074 if (ReachedReturnedOffsets.isUnknown()) {
1075 ReturnedOffsets.setUnknown();
1076 return ChangeStatus::CHANGED;
1078 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1079 return ChangeStatus::CHANGED;
1080 return ChangeStatus::UNCHANGED;
1083 bool forallInterferingAccesses(
1087 return State::forallInterferingAccesses(
Range, CB);
1090 bool forallInterferingAccesses(
1092 bool FindInterferingWrites,
bool FindInterferingReads,
1096 HasBeenWrittenTo =
false;
1103 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1108 bool AllInSameNoSyncFn = IsAssumedNoSync;
1109 bool InstIsExecutedByInitialThreadOnly =
1110 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1117 bool InstIsExecutedInAlignedRegion =
1118 FindInterferingReads && ExecDomainAA &&
1119 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1121 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1122 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1125 bool IsThreadLocalObj =
1134 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1135 if (IsThreadLocalObj || AllInSameNoSyncFn)
1137 const auto *FnExecDomainAA =
1138 I.getFunction() == &
Scope
1143 if (!FnExecDomainAA)
1145 if (InstIsExecutedInAlignedRegion ||
1146 (FindInterferingWrites &&
1147 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1148 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1151 if (InstIsExecutedByInitialThreadOnly &&
1152 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1153 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1162 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1163 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1164 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1165 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1169 bool IsKnownNoRecurse;
1170 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1177 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1178 bool ObjHasKernelLifetime =
false;
1179 const bool UseDominanceReasoning =
1180 FindInterferingWrites && IsKnownNoRecurse;
1191 case AA::GPUAddressSpace::Shared:
1192 case AA::GPUAddressSpace::Constant:
1193 case AA::GPUAddressSpace::Local:
1205 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1207 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1211 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1212 bool IsKnownNoRecurse;
1213 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1215 IsKnownNoRecurse)) {
1216 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1218 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1221 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1222 if (ObjHasKernelLifetime)
1223 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1224 return !
A.getInfoCache().isKernel(Fn);
1232 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1234 bool AccInSameScope = AccScope == &
Scope;
1238 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1239 A.getInfoCache().isKernel(*AccScope))
1242 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1243 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1244 ExclusionSet.
insert(Acc.getRemoteInst());
1247 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1248 (!FindInterferingReads || !Acc.isRead()))
1251 bool Dominates = FindInterferingWrites && DT && Exact &&
1252 Acc.isMustAccess() && AccInSameScope &&
1255 DominatingWrites.
insert(&Acc);
1259 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1261 InterferingAccesses.
push_back({&Acc, Exact});
1264 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1267 HasBeenWrittenTo = !DominatingWrites.
empty();
1271 for (
const Access *Acc : DominatingWrites) {
1272 if (!LeastDominatingWriteInst) {
1273 LeastDominatingWriteInst = Acc->getRemoteInst();
1274 }
else if (DT->
dominates(LeastDominatingWriteInst,
1275 Acc->getRemoteInst())) {
1276 LeastDominatingWriteInst = Acc->getRemoteInst();
1281 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1282 if (SkipCB && SkipCB(Acc))
1284 if (!CanIgnoreThreading(Acc))
1290 bool ReadChecked = !FindInterferingReads;
1291 bool WriteChecked = !FindInterferingWrites;
1297 &ExclusionSet, IsLiveInCalleeCB))
1302 if (!WriteChecked) {
1304 &ExclusionSet, IsLiveInCalleeCB))
1305 WriteChecked =
true;
1319 if (!WriteChecked && HasBeenWrittenTo &&
1320 Acc.getRemoteInst()->getFunction() != &
Scope) {
1324 if (FnReachabilityAA) {
1330 if (!FnReachabilityAA->instructionCanReach(
1331 A, *LeastDominatingWriteInst,
1332 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1333 WriteChecked =
true;
1340 if (ReadChecked && WriteChecked)
1343 if (!DT || !UseDominanceReasoning)
1345 if (!DominatingWrites.count(&Acc))
1347 return LeastDominatingWriteInst != Acc.getRemoteInst();
1352 for (
auto &It : InterferingAccesses) {
1353 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1354 !CanSkipAccess(*It.first, It.second)) {
1355 if (!UserCB(*It.first, It.second))
1365 using namespace AA::PointerInfo;
1367 return indicatePessimisticFixpoint();
1370 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1371 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1372 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1375 const auto &State = OtherAAImpl.getState();
1376 for (
const auto &It : State) {
1377 for (
auto Index : It.getSecond()) {
1378 const auto &RAcc = State.getAccess(
Index);
1379 if (IsByval && !RAcc.isRead())
1381 bool UsedAssumedInformation =
false;
1383 auto Content =
A.translateArgumentToCallSiteContent(
1384 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1385 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1386 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1388 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1389 RAcc.getType(), RAcc.getRemoteInst());
1396 const OffsetInfo &Offsets,
CallBase &CB,
1398 using namespace AA::PointerInfo;
1400 return indicatePessimisticFixpoint();
1402 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1406 const auto &State = OtherAAImpl.getState();
1407 for (
const auto &It : State) {
1408 for (
auto Index : It.getSecond()) {
1409 const auto &RAcc = State.getAccess(
Index);
1410 if (!IsMustAcc && RAcc.isAssumption())
1412 for (
auto Offset : Offsets) {
1416 if (!NewRanges.isUnknown()) {
1417 NewRanges.addToAllOffsets(
Offset);
1422 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1423 RAcc.getType(), RAcc.getRemoteInst());
1432 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1436 for (
auto &It : OffsetBins) {
1437 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1438 <<
"] : " << It.getSecond().size() <<
"\n";
1439 for (
auto AccIndex : It.getSecond()) {
1440 auto &Acc = AccessList[AccIndex];
1441 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1442 if (Acc.getLocalInst() != Acc.getRemoteInst())
1443 O <<
" --> " << *Acc.getRemoteInst()
1445 if (!Acc.isWrittenValueYetUndetermined()) {
1446 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1447 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1449 else if (Acc.getWrittenValue())
1450 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1452 O <<
" - c: <unknown>\n";
1459struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1462 : AAPointerInfoImpl(IRP,
A) {}
1469 using namespace AA::PointerInfo;
1472 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1481 if (!VT || VT->getElementCount().isScalable() ||
1483 (*Content)->getType() != VT ||
1484 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1495 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1496 auto *ConstContent = cast<Constant>(*
Content);
1500 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1502 ConstContent, ConstantInt::get(Int32Ty, i));
1505 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1509 for (
auto &ElementOffset : ElementOffsets)
1510 ElementOffset += ElementSize;
1524 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1528 void trackStatistics()
const override {
1529 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1533bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1536 const OffsetInfo &PtrOI,
1538 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1542 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1543 "Don't look for constant values if the offset has already been "
1544 "determined to be unknown.");
1546 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1552 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1556 Union.addToAll(ConstantOffset.getSExtValue());
1561 for (
const auto &VI : VariableOffsets) {
1564 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1570 if (PotentialConstantsAA->undefIsContained())
1578 if (AssumedSet.empty())
1582 for (
const auto &ConstOffset : AssumedSet) {
1583 auto CopyPerOffset =
Union;
1584 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1585 VI.second.getZExtValue());
1586 Product.merge(CopyPerOffset);
1591 UsrOI = std::move(Union);
1596 using namespace AA::PointerInfo;
1599 Value &AssociatedValue = getAssociatedValue();
1602 OffsetInfoMap[&AssociatedValue].
insert(0);
1604 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1615 "CurPtr does not exist in the map!");
1617 auto &UsrOI = OffsetInfoMap[Usr];
1618 auto &PtrOI = OffsetInfoMap[CurPtr];
1619 assert(!PtrOI.isUnassigned() &&
1620 "Cannot pass through if the input Ptr was not visited!");
1626 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1628 User *Usr =
U.getUser();
1629 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1632 "The current pointer offset should have been seeded!");
1633 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1634 "Current pointer should be assigned");
1638 return HandlePassthroughUser(Usr, CurPtr, Follow);
1639 if (!isa<GEPOperator>(CE)) {
1640 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1645 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1648 auto &UsrOI = OffsetInfoMap[Usr];
1649 auto &PtrOI = OffsetInfoMap[CurPtr];
1651 if (UsrOI.isUnknown())
1654 if (PtrOI.isUnknown()) {
1660 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1663 if (isa<PtrToIntInst>(Usr))
1665 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr))
1666 return HandlePassthroughUser(Usr, CurPtr, Follow);
1670 if (
auto *RI = dyn_cast<ReturnInst>(Usr)) {
1671 if (RI->getFunction() == getAssociatedFunction()) {
1672 auto &PtrOI = OffsetInfoMap[CurPtr];
1673 Changed |= setReachesReturn(PtrOI);
1682 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1685 bool IsFirstPHIUser = !OffsetInfoMap.
count(
PHI);
1686 auto &UsrOI = OffsetInfoMap[
PHI];
1687 auto &PtrOI = OffsetInfoMap[CurPtr];
1691 if (PtrOI.isUnknown()) {
1692 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1693 << *CurPtr <<
" in " << *
PHI <<
"\n");
1694 Follow = !UsrOI.isUnknown();
1700 if (UsrOI == PtrOI) {
1701 assert(!PtrOI.isUnassigned() &&
1702 "Cannot assign if the current Ptr was not visited!");
1703 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1713 auto It = OffsetInfoMap.
find(CurPtrBase);
1714 if (It == OffsetInfoMap.
end()) {
1715 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1716 << *CurPtr <<
" in " << *
PHI
1717 <<
" (base: " << *CurPtrBase <<
")\n");
1732 *
PHI->getFunction());
1734 auto BaseOI = It->getSecond();
1735 BaseOI.addToAll(
Offset.getZExtValue());
1736 if (IsFirstPHIUser || BaseOI == UsrOI) {
1737 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1738 <<
" in " << *Usr <<
"\n");
1739 return HandlePassthroughUser(Usr, CurPtr, Follow);
1743 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1744 << *CurPtr <<
" in " << *
PHI <<
"\n");
1755 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1763 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1764 OffsetInfoMap[CurPtr].Offsets, Changed,
1769 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1770 return II->isAssumeLikeIntrinsic();
1781 }
while (FromI && FromI != ToI);
1787 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1790 if (IntrI.getParent() == BB) {
1791 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1797 if ((*PredIt) != BB)
1802 if (SuccBB == IntrBB)
1804 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1808 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1811 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1817 std::pair<Value *, IntrinsicInst *> Assumption;
1818 for (
const Use &LoadU : LoadI->
uses()) {
1819 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1820 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1822 for (
const Use &CmpU : CmpI->
uses()) {
1823 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1824 if (!IsValidAssume(*IntrI))
1826 int Idx = CmpI->getOperandUse(0) == LoadU;
1827 Assumption = {CmpI->getOperand(
Idx), IntrI};
1832 if (Assumption.first)
1837 if (!Assumption.first || !Assumption.second)
1841 << *Assumption.second <<
": " << *LoadI
1842 <<
" == " << *Assumption.first <<
"\n");
1843 bool UsedAssumedInformation =
false;
1844 std::optional<Value *>
Content =
nullptr;
1845 if (Assumption.first)
1847 A.getAssumedSimplified(*Assumption.first, *
this,
1849 return handleAccess(
1850 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1851 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1856 for (
auto *OtherOp : OtherOps) {
1857 if (OtherOp == CurPtr) {
1860 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1872 bool UsedAssumedInformation =
false;
1873 std::optional<Value *>
Content =
nullptr;
1877 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1881 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1882 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1883 *StoreI->getValueOperand()->getType(),
1884 {StoreI->getValueOperand()}, AccessKind::AK_W);
1885 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1886 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1887 {RMWI->getValOperand()}, AccessKind::AK_RW);
1888 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1889 return HandleStoreLike(
1890 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1891 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1894 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1898 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1909 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1912 if (!CSArgPI->reachesReturn())
1913 return isValidState();
1916 if (!Callee ||
Callee->arg_size() <= ArgNo)
1918 bool UsedAssumedInformation =
false;
1919 auto ReturnedValue =
A.getAssumedSimplified(
1923 dyn_cast_or_null<Argument>(ReturnedValue.value_or(
nullptr));
1924 auto *Arg =
Callee->getArg(ArgNo);
1925 if (ReturnedArg && Arg != ReturnedArg)
1927 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1932 OffsetInfo OI = OffsetInfoMap[CurPtr];
1933 CSArgPI->addReturnedOffsetsTo(OI);
1935 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) | Changed;
1936 return isValidState();
1938 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1943 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1946 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1947 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1948 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1949 if (OffsetInfoMap.
count(NewU)) {
1951 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1952 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1953 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1957 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1960 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1962 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1964 true, EquivalentUseCB)) {
1965 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1966 return indicatePessimisticFixpoint();
1970 dbgs() <<
"Accesses by bin after update:\n";
1977struct AAPointerInfoReturned final : AAPointerInfoImpl {
1979 : AAPointerInfoImpl(IRP,
A) {}
1983 return indicatePessimisticFixpoint();
1987 void trackStatistics()
const override {
1988 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1992struct AAPointerInfoArgument final : AAPointerInfoFloating {
1994 : AAPointerInfoFloating(IRP,
A) {}
1997 void trackStatistics()
const override {
1998 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2002struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
2004 : AAPointerInfoFloating(IRP,
A) {}
2008 using namespace AA::PointerInfo;
2012 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
2016 LengthVal =
Length->getSExtValue();
2017 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2020 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2022 return indicatePessimisticFixpoint();
2025 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2027 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2030 dbgs() <<
"Accesses by bin after update:\n";
2041 Argument *Arg = getAssociatedArgument();
2046 if (ArgAA && ArgAA->getState().isValidState())
2047 return translateAndAddStateFromCallee(
A, *ArgAA,
2048 *cast<CallBase>(getCtxI()));
2050 return indicatePessimisticFixpoint();
2053 bool IsKnownNoCapture;
2054 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2055 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2056 return indicatePessimisticFixpoint();
2058 bool IsKnown =
false;
2060 return ChangeStatus::UNCHANGED;
2063 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2069 void trackStatistics()
const override {
2070 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2074struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2076 : AAPointerInfoFloating(IRP,
A) {}
2079 void trackStatistics()
const override {
2080 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2094 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2095 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2099 const std::string getAsStr(
Attributor *
A)
const override {
2100 return getAssumed() ?
"nounwind" :
"may-unwind";
2106 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2107 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2108 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2111 if (!
I.mayThrow(
true))
2114 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2115 bool IsKnownNoUnwind;
2116 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2123 bool UsedAssumedInformation =
false;
2124 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2125 UsedAssumedInformation))
2126 return indicatePessimisticFixpoint();
2128 return ChangeStatus::UNCHANGED;
2132struct AANoUnwindFunction final :
public AANoUnwindImpl {
2134 : AANoUnwindImpl(IRP,
A) {}
2141struct AANoUnwindCallSite final
2142 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2144 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2155 case Intrinsic::nvvm_barrier0:
2156 case Intrinsic::nvvm_barrier0_and:
2157 case Intrinsic::nvvm_barrier0_or:
2158 case Intrinsic::nvvm_barrier0_popc:
2160 case Intrinsic::amdgcn_s_barrier:
2161 if (ExecutedAligned)
2174 if (
auto *FI = dyn_cast<FenceInst>(
I))
2177 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2184 switch (
I->getOpcode()) {
2185 case Instruction::AtomicRMW:
2186 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2188 case Instruction::Store:
2189 Ordering = cast<StoreInst>(
I)->getOrdering();
2191 case Instruction::Load:
2192 Ordering = cast<LoadInst>(
I)->getOrdering();
2196 "New atomic operations need to be known in the attributor.");
2207 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2208 return !
MI->isVolatile();
2219 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2220 DepClassTy::NONE, IsKnown));
2224 const std::string getAsStr(
Attributor *
A)
const override {
2225 return getAssumed() ?
"nosync" :
"may-sync";
2241 if (
I.mayReadOrWriteMemory())
2246 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2255 bool UsedAssumedInformation =
false;
2256 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2257 UsedAssumedInformation) ||
2258 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2259 UsedAssumedInformation))
2260 return indicatePessimisticFixpoint();
2265struct AANoSyncFunction final :
public AANoSyncImpl {
2267 : AANoSyncImpl(IRP,
A) {}
2274struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2276 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2286struct AANoFreeImpl :
public AANoFree {
2292 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2293 DepClassTy::NONE, IsKnown));
2301 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2303 DepClassTy::REQUIRED, IsKnown);
2306 bool UsedAssumedInformation =
false;
2307 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2308 UsedAssumedInformation))
2309 return indicatePessimisticFixpoint();
2310 return ChangeStatus::UNCHANGED;
2314 const std::string getAsStr(
Attributor *
A)
const override {
2315 return getAssumed() ?
"nofree" :
"may-free";
2319struct AANoFreeFunction final :
public AANoFreeImpl {
2321 : AANoFreeImpl(IRP,
A) {}
2328struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2330 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2337struct AANoFreeFloating : AANoFreeImpl {
2339 : AANoFreeImpl(IRP,
A) {}
2349 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2351 DepClassTy::OPTIONAL, IsKnown))
2352 return ChangeStatus::UNCHANGED;
2354 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2355 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2357 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2365 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2367 DepClassTy::REQUIRED, IsKnown);
2370 if (isa<GetElementPtrInst>(UserI) || isa<PHINode>(UserI) ||
2371 isa<SelectInst>(UserI)) {
2375 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI))
2378 if (isa<ReturnInst>(UserI) && getIRPosition().isArgumentPosition())
2384 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2385 return indicatePessimisticFixpoint();
2387 return ChangeStatus::UNCHANGED;
2392struct AANoFreeArgument final : AANoFreeFloating {
2394 : AANoFreeFloating(IRP,
A) {}
2401struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2403 : AANoFreeFloating(IRP,
A) {}
2411 Argument *Arg = getAssociatedArgument();
2413 return indicatePessimisticFixpoint();
2416 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2417 DepClassTy::REQUIRED, IsKnown))
2418 return ChangeStatus::UNCHANGED;
2419 return indicatePessimisticFixpoint();
2427struct AANoFreeReturned final : AANoFreeFloating {
2429 : AANoFreeFloating(IRP,
A) {
2444 void trackStatistics()
const override {}
2448struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2450 : AANoFreeFloating(IRP,
A) {}
2453 return ChangeStatus::UNCHANGED;
2464 bool IgnoreSubsumingPositions) {
2466 AttrKinds.
push_back(Attribute::NonNull);
2469 AttrKinds.
push_back(Attribute::Dereferenceable);
2470 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2477 if (!Fn->isDeclaration()) {
2487 bool UsedAssumedInformation =
false;
2488 if (!
A.checkForAllInstructions(
2490 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2494 UsedAssumedInformation,
false,
true))
2506 Attribute::NonNull)});
2511static int64_t getKnownNonNullAndDerefBytesForUse(
2513 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2516 const Value *UseV =
U->get();
2523 if (isa<CastInst>(
I)) {
2528 if (isa<GetElementPtrInst>(
I)) {
2538 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2541 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2558 bool IsKnownNonNull;
2559 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2561 IsNonNull |= IsKnownNonNull;
2568 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2569 Loc->Size.isScalable() ||
I->isVolatile())
2575 if (
Base &&
Base == &AssociatedValue) {
2576 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2578 return std::max(int64_t(0), DerefBytes);
2585 int64_t DerefBytes = Loc->Size.getValue();
2587 return std::max(int64_t(0), DerefBytes);
2598 Value &
V = *getAssociatedValue().stripPointerCasts();
2599 if (isa<ConstantPointerNull>(V)) {
2600 indicatePessimisticFixpoint();
2605 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2611 bool IsNonNull =
false;
2612 bool TrackUse =
false;
2613 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2614 IsNonNull, TrackUse);
2615 State.setKnown(IsNonNull);
2620 const std::string getAsStr(
Attributor *
A)
const override {
2621 return getAssumed() ?
"nonnull" :
"may-null";
2626struct AANonNullFloating :
public AANonNullImpl {
2628 : AANonNullImpl(IRP,
A) {}
2633 bool IsKnownNonNull;
2634 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2635 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2639 bool UsedAssumedInformation =
false;
2640 Value *AssociatedValue = &getAssociatedValue();
2642 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2647 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2651 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2653 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2654 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2657 return ChangeStatus::UNCHANGED;
2658 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2659 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2661 DepClassTy::OPTIONAL, IsKnown) &&
2662 AA::hasAssumedIRAttr<Attribute::NonNull>(
2664 DepClassTy::OPTIONAL, IsKnown))
2665 return ChangeStatus::UNCHANGED;
2672 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2673 return indicatePessimisticFixpoint();
2674 return ChangeStatus::UNCHANGED;
2677 for (
const auto &VAC : Values)
2679 return indicatePessimisticFixpoint();
2681 return ChangeStatus::UNCHANGED;
2689struct AANonNullReturned final
2690 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2691 false, AANonNull::IRAttributeKind, false> {
2698 const std::string getAsStr(
Attributor *
A)
const override {
2699 return getAssumed() ?
"nonnull" :
"may-null";
2707struct AANonNullArgument final
2708 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2710 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2716struct AANonNullCallSiteArgument final : AANonNullFloating {
2718 : AANonNullFloating(IRP,
A) {}
2725struct AANonNullCallSiteReturned final
2726 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2728 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2744 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2745 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2750 const std::string getAsStr(
Attributor *
A)
const override {
2751 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2755struct AAMustProgressFunction final : AAMustProgressImpl {
2757 : AAMustProgressImpl(IRP,
A) {}
2762 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2763 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2765 return indicateOptimisticFixpoint();
2766 return ChangeStatus::UNCHANGED;
2771 bool IsKnownMustProgress;
2772 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2773 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2777 bool AllCallSitesKnown =
true;
2778 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2781 return indicatePessimisticFixpoint();
2783 return ChangeStatus::UNCHANGED;
2787 void trackStatistics()
const override {
2793struct AAMustProgressCallSite final : AAMustProgressImpl {
2795 : AAMustProgressImpl(IRP,
A) {}
2804 bool IsKnownMustProgress;
2805 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2806 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2807 return indicatePessimisticFixpoint();
2808 return ChangeStatus::UNCHANGED;
2812 void trackStatistics()
const override {
2827 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2828 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2833 const std::string getAsStr(
Attributor *
A)
const override {
2834 return getAssumed() ?
"norecurse" :
"may-recurse";
2838struct AANoRecurseFunction final : AANoRecurseImpl {
2840 : AANoRecurseImpl(IRP,
A) {}
2847 bool IsKnownNoRecurse;
2848 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2851 DepClassTy::NONE, IsKnownNoRecurse))
2853 return IsKnownNoRecurse;
2855 bool UsedAssumedInformation =
false;
2856 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2857 UsedAssumedInformation)) {
2863 if (!UsedAssumedInformation)
2864 indicateOptimisticFixpoint();
2865 return ChangeStatus::UNCHANGED;
2870 DepClassTy::REQUIRED);
2871 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2872 return indicatePessimisticFixpoint();
2873 return ChangeStatus::UNCHANGED;
2880struct AANoRecurseCallSite final
2881 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2883 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2898 const std::string getAsStr(
Attributor *
A)
const override {
2899 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2903struct AANonConvergentFunction final : AANonConvergentImpl {
2905 : AANonConvergentImpl(IRP,
A) {}
2911 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2912 CallBase &CB = cast<CallBase>(Inst);
2914 if (!Callee ||
Callee->isIntrinsic()) {
2917 if (
Callee->isDeclaration()) {
2918 return !
Callee->hasFnAttribute(Attribute::Convergent);
2925 bool UsedAssumedInformation =
false;
2926 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2927 UsedAssumedInformation)) {
2928 return indicatePessimisticFixpoint();
2930 return ChangeStatus::UNCHANGED;
2934 if (isKnownNotConvergent() &&
2935 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2936 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2937 return ChangeStatus::CHANGED;
2939 return ChangeStatus::UNCHANGED;
2956 const size_t UBPrevSize = KnownUBInsts.size();
2957 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2961 if (
I.isVolatile() &&
I.mayWriteToMemory())
2965 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2974 "Expected pointer operand of memory accessing instruction");
2978 std::optional<Value *> SimplifiedPtrOp =
2979 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2980 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2982 const Value *PtrOpVal = *SimplifiedPtrOp;
2987 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2988 AssumedNoUBInsts.insert(&
I);
3000 AssumedNoUBInsts.insert(&
I);
3002 KnownUBInsts.insert(&
I);
3011 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3015 auto *BrInst = cast<BranchInst>(&
I);
3018 if (BrInst->isUnconditional())
3023 std::optional<Value *> SimplifiedCond =
3024 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3025 if (!SimplifiedCond || !*SimplifiedCond)
3027 AssumedNoUBInsts.insert(&
I);
3035 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3044 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3050 if (idx >=
Callee->arg_size())
3062 bool IsKnownNoUndef;
3063 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3064 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3065 if (!IsKnownNoUndef)
3067 bool UsedAssumedInformation =
false;
3068 std::optional<Value *> SimplifiedVal =
3071 if (UsedAssumedInformation)
3073 if (SimplifiedVal && !*SimplifiedVal)
3075 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3076 KnownUBInsts.insert(&
I);
3080 !isa<ConstantPointerNull>(**SimplifiedVal))
3082 bool IsKnownNonNull;
3083 AA::hasAssumedIRAttr<Attribute::NonNull>(
3084 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3086 KnownUBInsts.insert(&
I);
3092 auto &RI = cast<ReturnInst>(
I);
3095 std::optional<Value *> SimplifiedRetValue =
3096 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3097 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3114 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3115 bool IsKnownNonNull;
3116 AA::hasAssumedIRAttr<Attribute::NonNull>(
3120 KnownUBInsts.insert(&
I);
3126 bool UsedAssumedInformation =
false;
3127 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3128 {Instruction::Load, Instruction::Store,
3129 Instruction::AtomicCmpXchg,
3130 Instruction::AtomicRMW},
3131 UsedAssumedInformation,
3133 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3134 UsedAssumedInformation,
3136 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3137 UsedAssumedInformation);
3141 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3143 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3144 bool IsKnownNoUndef;
3145 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3146 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3148 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3149 {Instruction::Ret}, UsedAssumedInformation,
3154 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3155 UBPrevSize != KnownUBInsts.size())
3156 return ChangeStatus::CHANGED;
3157 return ChangeStatus::UNCHANGED;
3161 return KnownUBInsts.count(
I);
3164 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3171 switch (
I->getOpcode()) {
3172 case Instruction::Load:
3173 case Instruction::Store:
3174 case Instruction::AtomicCmpXchg:
3175 case Instruction::AtomicRMW:
3176 return !AssumedNoUBInsts.count(
I);
3177 case Instruction::Br: {
3178 auto *BrInst = cast<BranchInst>(
I);
3179 if (BrInst->isUnconditional())
3181 return !AssumedNoUBInsts.count(
I);
3190 if (KnownUBInsts.empty())
3191 return ChangeStatus::UNCHANGED;
3193 A.changeToUnreachableAfterManifest(
I);
3194 return ChangeStatus::CHANGED;
3198 const std::string getAsStr(
Attributor *
A)
const override {
3199 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3244 bool UsedAssumedInformation =
false;
3245 std::optional<Value *> SimplifiedV =
3248 if (!UsedAssumedInformation) {
3253 KnownUBInsts.insert(
I);
3254 return std::nullopt;
3260 if (isa<UndefValue>(V)) {
3261 KnownUBInsts.insert(
I);
3262 return std::nullopt;
3268struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3270 : AAUndefinedBehaviorImpl(IRP,
A) {}
3273 void trackStatistics()
const override {
3275 "Number of instructions known to have UB");
3277 KnownUBInsts.size();
3298 if (SCCI.hasCycle())
3308 for (
auto *L : LI->getLoopsInPreorder()) {
3322 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3323 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3328 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3329 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3334 return IsKnown || !KnownOnly;
3340 if (isImpliedByMustprogressAndReadonly(
A,
false))
3341 return ChangeStatus::UNCHANGED;
3346 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3347 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3353 bool IsKnownNoRecurse;
3354 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3355 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3358 bool UsedAssumedInformation =
false;
3359 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3360 UsedAssumedInformation))
3361 return indicatePessimisticFixpoint();
3363 return ChangeStatus::UNCHANGED;
3367 const std::string getAsStr(
Attributor *
A)
const override {
3368 return getAssumed() ?
"willreturn" :
"may-noreturn";
3372struct AAWillReturnFunction final : AAWillReturnImpl {
3374 : AAWillReturnImpl(IRP,
A) {}
3378 AAWillReturnImpl::initialize(
A);
3381 assert(
F &&
"Did expect an anchor function");
3382 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3383 indicatePessimisticFixpoint();
3391struct AAWillReturnCallSite final
3392 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3394 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3398 if (isImpliedByMustprogressAndReadonly(
A,
false))
3399 return ChangeStatus::UNCHANGED;
3401 return AACalleeToCallSite::updateImpl(
A);
3423 const ToTy *To =
nullptr;
3433 assert(Hash == 0 &&
"Computed hash twice!");
3437 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3438 InstSetDMI::getHashValue(ExclusionSet));
3448 :
From(&
From), To(&To), ExclusionSet(ES) {
3450 if (!ES || ES->
empty()) {
3451 ExclusionSet =
nullptr;
3452 }
else if (MakeUnique) {
3453 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3458 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3471 return &TombstoneKey;
3478 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3480 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3484#define DefineKeys(ToTy) \
3486 ReachabilityQueryInfo<ToTy> \
3487 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3488 ReachabilityQueryInfo<ToTy>( \
3489 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3490 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3492 ReachabilityQueryInfo<ToTy> \
3493 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3494 ReachabilityQueryInfo<ToTy>( \
3495 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3496 DenseMapInfo<const ToTy *>::getTombstoneKey());
3505template <
typename BaseTy,
typename ToTy>
3506struct CachedReachabilityAA :
public BaseTy {
3512 bool isQueryAA()
const override {
return true; }
3517 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3518 RQITy *RQI = QueryVector[
u];
3519 if (RQI->Result == RQITy::Reachable::No &&
3521 Changed = ChangeStatus::CHANGED;
3527 bool IsTemporaryRQI) = 0;
3530 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3535 QueryCache.erase(&RQI);
3541 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3542 RQITy PlainRQI(RQI.From, RQI.To);
3543 if (!QueryCache.count(&PlainRQI)) {
3544 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3546 QueryVector.push_back(RQIPtr);
3547 QueryCache.insert(RQIPtr);
3552 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3553 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3554 "Did not expect empty set!");
3555 RQITy *RQIPtr =
new (
A.Allocator)
3556 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3557 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3559 assert(!QueryCache.count(RQIPtr));
3560 QueryVector.push_back(RQIPtr);
3561 QueryCache.insert(RQIPtr);
3564 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3565 A.registerForUpdate(*
this);
3566 return Result == RQITy::Reachable::Yes;
3569 const std::string getAsStr(
Attributor *
A)
const override {
3571 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3574 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3575 typename RQITy::Reachable &
Result) {
3576 if (!this->getState().isValidState()) {
3577 Result = RQITy::Reachable::Yes;
3583 if (StackRQI.ExclusionSet) {
3584 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3585 auto It = QueryCache.find(&PlainRQI);
3586 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3587 Result = RQITy::Reachable::No;
3592 auto It = QueryCache.find(&StackRQI);
3593 if (It != QueryCache.end()) {
3600 QueryCache.insert(&StackRQI);
3609struct AAIntraFnReachabilityFunction final
3610 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3611 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3618 bool isAssumedReachable(
3621 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3625 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3626 typename RQITy::Reachable
Result;
3627 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3628 return NonConstThis->isReachableImpl(
A, StackRQI,
3630 return Result == RQITy::Reachable::Yes;
3637 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3640 [&](
const auto &DeadEdge) {
3641 return LivenessAA->isEdgeDead(DeadEdge.first,
3645 return LivenessAA->isAssumedDead(BB);
3647 return ChangeStatus::UNCHANGED;
3651 return Base::updateImpl(
A);
3655 bool IsTemporaryRQI)
override {
3657 bool UsedExclusionSet =
false;
3662 while (IP && IP != &To) {
3663 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3664 UsedExclusionSet =
true;
3675 "Not an intra-procedural query!");
3679 if (FromBB == ToBB &&
3680 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3681 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3686 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3687 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3692 if (RQI.ExclusionSet)
3693 for (
auto *
I : *RQI.ExclusionSet)
3694 if (
I->getFunction() == Fn)
3695 ExclusionBlocks.
insert(
I->getParent());
3698 if (ExclusionBlocks.
count(FromBB) &&
3701 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3704 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3705 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3706 DeadBlocks.insert(ToBB);
3707 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3716 while (!Worklist.
empty()) {
3718 if (!Visited.
insert(BB).second)
3721 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3722 LocalDeadEdges.
insert({BB, SuccBB});
3727 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3730 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3733 if (ExclusionBlocks.
count(SuccBB)) {
3734 UsedExclusionSet =
true;
3741 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3742 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3747 void trackStatistics()
const override {}
3767 bool IgnoreSubsumingPositions) {
3768 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3769 "Unexpected attribute kind");
3772 if (isa<AllocaInst>(Val))
3775 IgnoreSubsumingPositions =
true;
3778 if (isa<UndefValue>(Val))
3781 if (isa<ConstantPointerNull>(Val) &&
3786 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3787 IgnoreSubsumingPositions, Attribute::NoAlias))
3797 "Noalias is a pointer attribute");
3800 const std::string getAsStr(
Attributor *
A)
const override {
3801 return getAssumed() ?
"noalias" :
"may-alias";
3806struct AANoAliasFloating final : AANoAliasImpl {
3808 : AANoAliasImpl(IRP,
A) {}
3813 return indicatePessimisticFixpoint();
3817 void trackStatistics()
const override {
3823struct AANoAliasArgument final
3824 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3825 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3837 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3839 DepClassTy::OPTIONAL, IsKnownNoSycn))
3840 return Base::updateImpl(
A);
3845 return Base::updateImpl(
A);
3849 bool UsedAssumedInformation =
false;
3850 if (
A.checkForAllCallSites(
3852 true, UsedAssumedInformation))
3853 return Base::updateImpl(
A);
3861 return indicatePessimisticFixpoint();
3868struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3870 : AANoAliasImpl(IRP,
A) {}
3876 const CallBase &CB,
unsigned OtherArgNo) {
3878 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3890 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3891 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3898 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3900 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3901 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3907 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3911 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3913 "callsite arguments: "
3914 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3915 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3920 bool isKnownNoAliasDueToNoAliasPreservation(
3940 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3951 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3956 bool IsKnownNoCapture;
3957 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3959 DepClassTy::OPTIONAL, IsKnownNoCapture))
3965 A, *UserI, *getCtxI(), *
this,
nullptr,
3966 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3974 case UseCaptureKind::NO_CAPTURE:
3976 case UseCaptureKind::MAY_CAPTURE:
3980 case UseCaptureKind::PASSTHROUGH:
3987 bool IsKnownNoCapture;
3989 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3990 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3991 if (!IsAssumedNoCapture &&
3993 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3995 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3996 <<
" cannot be noalias as it is potentially captured\n");
4001 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
4006 const auto &CB = cast<CallBase>(getAnchorValue());
4007 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
4008 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4018 auto *MemBehaviorAA =
4021 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4022 return ChangeStatus::UNCHANGED;
4025 bool IsKnownNoAlias;
4027 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4028 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4030 <<
" is not no-alias at the definition\n");
4031 return indicatePessimisticFixpoint();
4035 if (MemBehaviorAA &&
4036 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4038 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4039 return ChangeStatus::UNCHANGED;
4042 return indicatePessimisticFixpoint();
4050struct AANoAliasReturned final : AANoAliasImpl {
4052 : AANoAliasImpl(IRP,
A) {}
4057 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4058 if (
Constant *
C = dyn_cast<Constant>(&RV))
4059 if (
C->isNullValue() || isa<UndefValue>(
C))
4064 if (!isa<CallBase>(&RV))
4068 bool IsKnownNoAlias;
4069 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4070 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4073 bool IsKnownNoCapture;
4075 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4076 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4078 return IsAssumedNoCapture ||
4082 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4083 return indicatePessimisticFixpoint();
4085 return ChangeStatus::UNCHANGED;
4093struct AANoAliasCallSiteReturned final
4094 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4096 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4106struct AAIsDeadValueImpl :
public AAIsDead {
4110 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4113 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4116 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4119 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4122 bool isAssumedDead(
const Instruction *
I)
const override {
4123 return I == getCtxI() && isAssumedDead();
4127 bool isKnownDead(
const Instruction *
I)
const override {
4128 return isAssumedDead(
I) && isKnownDead();
4132 const std::string getAsStr(
Attributor *
A)
const override {
4133 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4139 if (
V.getType()->isVoidTy() ||
V.use_empty())
4143 if (!isa<Constant>(V)) {
4144 if (
auto *
I = dyn_cast<Instruction>(&V))
4145 if (!
A.isRunOn(*
I->getFunction()))
4147 bool UsedAssumedInformation =
false;
4148 std::optional<Constant *>
C =
4149 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4154 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4159 return A.checkForAllUses(UsePred, *
this, V,
false,
4160 DepClassTy::REQUIRED,
4169 auto *CB = dyn_cast<CallBase>(
I);
4170 if (!CB || isa<IntrinsicInst>(CB))
4175 bool IsKnownNoUnwind;
4176 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4177 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4185struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4187 : AAIsDeadValueImpl(IRP,
A) {}
4191 AAIsDeadValueImpl::initialize(
A);
4193 if (isa<UndefValue>(getAssociatedValue())) {
4194 indicatePessimisticFixpoint();
4198 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4199 if (!isAssumedSideEffectFree(
A,
I)) {
4200 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4201 indicatePessimisticFixpoint();
4203 removeAssumedBits(HAS_NO_EFFECT);
4210 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4212 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4219 if (
SI.isVolatile())
4225 bool UsedAssumedInformation =
false;
4226 if (!AssumeOnlyInst) {
4227 PotentialCopies.clear();
4229 UsedAssumedInformation)) {
4232 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4236 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4237 <<
" potential copies.\n");
4242 UsedAssumedInformation))
4244 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4246 auto &UserI = cast<Instruction>(*U.getUser());
4247 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4249 AssumeOnlyInst->insert(&UserI);
4252 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4258 <<
" is assumed live!\n");
4264 const std::string getAsStr(
Attributor *
A)
const override {
4265 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4266 if (isa_and_nonnull<StoreInst>(
I))
4268 return "assumed-dead-store";
4269 if (isa_and_nonnull<FenceInst>(
I))
4271 return "assumed-dead-fence";
4272 return AAIsDeadValueImpl::getAsStr(
A);
4277 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4278 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4279 if (!isDeadStore(
A, *SI))
4280 return indicatePessimisticFixpoint();
4281 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4282 if (!isDeadFence(
A, *FI))
4283 return indicatePessimisticFixpoint();
4285 if (!isAssumedSideEffectFree(
A,
I))
4286 return indicatePessimisticFixpoint();
4287 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4288 return indicatePessimisticFixpoint();
4293 bool isRemovableStore()
const override {
4294 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4299 Value &
V = getAssociatedValue();
4300 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4305 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4307 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4310 A.deleteAfterManifest(*
I);
4311 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4313 for (
auto *Usr : AOI->
users())
4314 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4315 A.deleteAfterManifest(*AOI);
4319 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4321 A.deleteAfterManifest(*FI);
4324 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4325 A.deleteAfterManifest(*
I);
4333 void trackStatistics()
const override {
4342struct AAIsDeadArgument :
public AAIsDeadFloating {
4344 : AAIsDeadFloating(IRP,
A) {}
4348 Argument &Arg = *getAssociatedArgument();
4349 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4350 if (
A.registerFunctionSignatureRewrite(
4354 return ChangeStatus::CHANGED;
4356 return ChangeStatus::UNCHANGED;
4363struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4365 : AAIsDeadValueImpl(IRP,
A) {}
4369 AAIsDeadValueImpl::initialize(
A);
4370 if (isa<UndefValue>(getAssociatedValue()))
4371 indicatePessimisticFixpoint();
4380 Argument *Arg = getAssociatedArgument();
4382 return indicatePessimisticFixpoint();
4384 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4386 return indicatePessimisticFixpoint();
4392 CallBase &CB = cast<CallBase>(getAnchorValue());
4394 assert(!isa<UndefValue>(
U.get()) &&
4395 "Expected undef values to be filtered out!");
4397 if (
A.changeUseAfterManifest(U, UV))
4398 return ChangeStatus::CHANGED;
4399 return ChangeStatus::UNCHANGED;
4406struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4408 : AAIsDeadFloating(IRP,
A) {}
4411 bool isAssumedDead()
const override {
4412 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4417 AAIsDeadFloating::initialize(
A);
4418 if (isa<UndefValue>(getAssociatedValue())) {
4419 indicatePessimisticFixpoint();
4424 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4430 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4431 IsAssumedSideEffectFree =
false;
4432 Changed = ChangeStatus::CHANGED;
4434 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4435 return indicatePessimisticFixpoint();
4440 void trackStatistics()
const override {
4441 if (IsAssumedSideEffectFree)
4448 const std::string getAsStr(
Attributor *
A)
const override {
4449 return isAssumedDead()
4451 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4455 bool IsAssumedSideEffectFree =
true;
4458struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4460 : AAIsDeadValueImpl(IRP,
A) {}
4465 bool UsedAssumedInformation =
false;
4466 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4467 {Instruction::Ret}, UsedAssumedInformation);
4470 if (ACS.isCallbackCall() || !ACS.getInstruction())
4472 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4475 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4476 UsedAssumedInformation))
4477 return indicatePessimisticFixpoint();
4479 return ChangeStatus::UNCHANGED;
4485 bool AnyChange =
false;
4493 bool UsedAssumedInformation =
false;
4494 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4495 UsedAssumedInformation);
4496 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4503struct AAIsDeadFunction :
public AAIsDead {
4509 assert(
F &&
"Did expect an anchor function");
4510 if (!isAssumedDeadInternalFunction(
A)) {
4511 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4512 assumeLive(
A,
F->getEntryBlock());
4516 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4517 if (!getAnchorScope()->hasLocalLinkage())
4519 bool UsedAssumedInformation =
false;
4521 true, UsedAssumedInformation);
4525 const std::string getAsStr(
Attributor *
A)
const override {
4526 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4527 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4528 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4529 std::to_string(KnownDeadEnds.size()) +
"]";
4534 assert(getState().isValidState() &&
4535 "Attempted to manifest an invalid state!");
4540 if (AssumedLiveBlocks.empty()) {
4541 A.deleteAfterManifest(
F);
4542 return ChangeStatus::CHANGED;
4548 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4550 KnownDeadEnds.set_union(ToBeExploredFrom);
4551 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4552 auto *CB = dyn_cast<CallBase>(DeadEndI);
4555 bool IsKnownNoReturn;
4556 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4559 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4562 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4563 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4565 A.changeToUnreachableAfterManifest(
4566 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4567 HasChanged = ChangeStatus::CHANGED;
4572 if (!AssumedLiveBlocks.count(&BB)) {
4573 A.deleteAfterManifest(BB);
4575 HasChanged = ChangeStatus::CHANGED;
4585 assert(
From->getParent() == getAnchorScope() &&
4587 "Used AAIsDead of the wrong function");
4588 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4592 void trackStatistics()
const override {}
4595 bool isAssumedDead()
const override {
return false; }
4598 bool isKnownDead()
const override {
return false; }
4601 bool isAssumedDead(
const BasicBlock *BB)
const override {
4603 "BB must be in the same anchor scope function.");
4607 return !AssumedLiveBlocks.count(BB);
4611 bool isKnownDead(
const BasicBlock *BB)
const override {
4612 return getKnown() && isAssumedDead(BB);
4616 bool isAssumedDead(
const Instruction *
I)
const override {
4617 assert(
I->getParent()->getParent() == getAnchorScope() &&
4618 "Instruction must be in the same anchor scope function.");
4625 if (!AssumedLiveBlocks.count(
I->getParent()))
4631 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4639 bool isKnownDead(
const Instruction *
I)
const override {
4640 return getKnown() && isAssumedDead(
I);
4646 if (!AssumedLiveBlocks.insert(&BB).second)
4654 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4656 if (
F->hasLocalLinkage())
4657 A.markLiveInternalFunction(*
F);
4681 bool IsKnownNoReturn;
4682 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4684 return !IsKnownNoReturn;
4696 bool UsedAssumedInformation =
4697 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4702 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4703 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4707 bool IsKnownNoUnwind;
4708 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4710 UsedAssumedInformation |= !IsKnownNoUnwind;
4712 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4715 return UsedAssumedInformation;
4722 bool UsedAssumedInformation =
false;
4726 std::optional<Constant *>
C =
4727 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4728 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4730 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4732 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4737 UsedAssumedInformation =
false;
4740 return UsedAssumedInformation;
4747 bool UsedAssumedInformation =
false;
4751 UsedAssumedInformation)) {
4758 if (Values.
empty() ||
4759 (Values.
size() == 1 &&
4760 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4762 return UsedAssumedInformation;
4765 Type &Ty = *
SI.getCondition()->getType();
4767 auto CheckForConstantInt = [&](
Value *
V) {
4768 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4776 return CheckForConstantInt(
VAC.getValue());
4780 return UsedAssumedInformation;
4783 unsigned MatchedCases = 0;
4784 for (
const auto &CaseIt :
SI.cases()) {
4785 if (
Constants.count(CaseIt.getCaseValue())) {
4787 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4794 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4795 return UsedAssumedInformation;
4801 if (AssumedLiveBlocks.empty()) {
4802 if (isAssumedDeadInternalFunction(
A))
4806 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4807 assumeLive(
A,
F->getEntryBlock());
4811 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4812 << getAnchorScope()->
size() <<
"] BBs and "
4813 << ToBeExploredFrom.size() <<
" exploration points and "
4814 << KnownDeadEnds.size() <<
" known dead ends\n");
4819 ToBeExploredFrom.end());
4820 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4823 while (!Worklist.
empty()) {
4829 while (!
I->isTerminator() && !isa<CallBase>(
I))
4830 I =
I->getNextNode();
4832 AliveSuccessors.
clear();
4834 bool UsedAssumedInformation =
false;
4835 switch (
I->getOpcode()) {
4839 "Expected non-terminators to be handled already!");
4843 case Instruction::Call:
4844 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4845 *
this, AliveSuccessors);
4847 case Instruction::Invoke:
4848 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4849 *
this, AliveSuccessors);
4851 case Instruction::Br:
4852 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4853 *
this, AliveSuccessors);
4855 case Instruction::Switch:
4856 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4857 *
this, AliveSuccessors);
4861 if (UsedAssumedInformation) {
4862 NewToBeExploredFrom.insert(
I);
4863 }
else if (AliveSuccessors.
empty() ||
4864 (
I->isTerminator() &&
4865 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4866 if (KnownDeadEnds.insert(
I))
4871 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4872 << UsedAssumedInformation <<
"\n");
4874 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4875 if (!
I->isTerminator()) {
4876 assert(AliveSuccessors.size() == 1 &&
4877 "Non-terminator expected to have a single successor!");
4881 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4882 if (AssumedLiveEdges.insert(Edge).second)
4884 if (assumeLive(
A, *AliveSuccessor->getParent()))
4891 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4893 return !ToBeExploredFrom.count(I);
4896 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4905 if (ToBeExploredFrom.empty() &&
4906 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4908 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4910 return indicatePessimisticFixpoint();
4915struct AAIsDeadCallSite final : AAIsDeadFunction {
4917 : AAIsDeadFunction(IRP,
A) {}
4926 "supported for call sites yet!");
4931 return indicatePessimisticFixpoint();
4935 void trackStatistics()
const override {}
4949 Value &
V = *getAssociatedValue().stripPointerCasts();
4951 A.getAttrs(getIRPosition(),
4952 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4955 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4958 bool IsKnownNonNull;
4959 AA::hasAssumedIRAttr<Attribute::NonNull>(
4960 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4962 bool CanBeNull, CanBeFreed;
4963 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4964 A.getDataLayout(), CanBeNull, CanBeFreed));
4967 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4972 StateType &getState()
override {
return *
this; }
4973 const StateType &getState()
const override {
return *
this; }
4979 const Value *UseV =
U->get();
4984 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4989 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4990 if (
Base &&
Base == &getAssociatedValue())
4991 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4997 bool IsNonNull =
false;
4998 bool TrackUse =
false;
4999 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
5000 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
5001 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
5002 <<
" for instruction " << *
I <<
"\n");
5004 addAccessedBytesForUse(
A, U,
I, State);
5005 State.takeKnownDerefBytesMaximum(DerefBytes);
5012 bool IsKnownNonNull;
5013 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5014 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5015 if (IsAssumedNonNull &&
5016 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5017 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5018 return ChangeStatus::CHANGED;
5026 bool IsKnownNonNull;
5027 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5028 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5029 if (IsAssumedNonNull)
5031 Ctx, getAssumedDereferenceableBytes()));
5034 Ctx, getAssumedDereferenceableBytes()));
5038 const std::string getAsStr(
Attributor *
A)
const override {
5039 if (!getAssumedDereferenceableBytes())
5040 return "unknown-dereferenceable";
5041 bool IsKnownNonNull;
5042 bool IsAssumedNonNull =
false;
5044 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5045 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5046 return std::string(
"dereferenceable") +
5047 (IsAssumedNonNull ?
"" :
"_or_null") +
5048 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5049 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5050 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5051 (!
A ?
" [non-null is unknown]" :
"");
5056struct AADereferenceableFloating : AADereferenceableImpl {
5058 : AADereferenceableImpl(IRP,
A) {}
5063 bool UsedAssumedInformation =
false;
5065 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5067 Values.
push_back({getAssociatedValue(), getCtxI()});
5070 Stripped = Values.
size() != 1 ||
5071 Values.
front().getValue() != &getAssociatedValue();
5077 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5079 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5087 int64_t DerefBytes = 0;
5088 if (!AA || (!Stripped &&
this == AA)) {
5091 bool CanBeNull, CanBeFreed;
5093 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5094 T.GlobalState.indicatePessimisticFixpoint();
5097 DerefBytes =
DS.DerefBytesState.getAssumed();
5098 T.GlobalState &=
DS.GlobalState;
5104 int64_t OffsetSExt =
Offset.getSExtValue();
5108 T.takeAssumedDerefBytesMinimum(
5109 std::max(int64_t(0), DerefBytes - OffsetSExt));
5114 T.takeKnownDerefBytesMaximum(
5115 std::max(int64_t(0), DerefBytes - OffsetSExt));
5116 T.indicatePessimisticFixpoint();
5117 }
else if (OffsetSExt > 0) {
5123 T.indicatePessimisticFixpoint();
5127 return T.isValidState();
5130 for (
const auto &VAC : Values)
5131 if (!VisitValueCB(*
VAC.getValue()))
5132 return indicatePessimisticFixpoint();
5138 void trackStatistics()
const override {
5144struct AADereferenceableReturned final
5145 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5147 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5152 void trackStatistics()
const override {
5158struct AADereferenceableArgument final
5159 : AAArgumentFromCallSiteArguments<AADereferenceable,
5160 AADereferenceableImpl> {
5162 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5167 void trackStatistics()
const override {
5173struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5175 : AADereferenceableFloating(IRP,
A) {}
5178 void trackStatistics()
const override {
5184struct AADereferenceableCallSiteReturned final
5185 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5186 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5191 void trackStatistics()
const override {
5201 Value &AssociatedValue,
const Use *U,
5205 if (isa<CastInst>(
I)) {
5207 TrackUse = !isa<PtrToIntInst>(
I);
5210 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5211 if (
GEP->hasAllConstantIndices())
5217 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5231 const Value *UseV =
U->get();
5232 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5233 if (
SI->getPointerOperand() == UseV)
5234 MA =
SI->getAlign();
5235 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5236 if (LI->getPointerOperand() == UseV)
5237 MA = LI->getAlign();
5238 }
else if (
auto *AI = dyn_cast<AtomicRMWInst>(
I)) {
5239 if (AI->getPointerOperand() == UseV)
5240 MA = AI->getAlign();
5241 }
else if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
5242 if (AI->getPointerOperand() == UseV)
5243 MA = AI->getAlign();
5249 unsigned Alignment = MA->value();
5253 if (
Base == &AssociatedValue) {
5272 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5274 takeKnownMaximum(Attr.getValueAsInt());
5276 Value &
V = *getAssociatedValue().stripPointerCasts();
5277 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5280 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5285 ChangeStatus LoadStoreChanged = ChangeStatus::UNCHANGED;
5288 Value &AssociatedValue = getAssociatedValue();
5289 for (
const Use &U : AssociatedValue.
uses()) {
5290 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5291 if (
SI->getPointerOperand() == &AssociatedValue)
5292 if (
SI->getAlign() < getAssumedAlign()) {
5294 "Number of times alignment added to a store");
5295 SI->setAlignment(getAssumedAlign());
5296 LoadStoreChanged = ChangeStatus::CHANGED;
5298 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5299 if (LI->getPointerOperand() == &AssociatedValue)
5300 if (LI->getAlign() < getAssumedAlign()) {
5301 LI->setAlignment(getAssumedAlign());
5303 "Number of times alignment added to a load");
5304 LoadStoreChanged = ChangeStatus::CHANGED;
5311 Align InheritAlign =
5312 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5313 if (InheritAlign >= getAssumedAlign())
5314 return LoadStoreChanged;
5315 return Changed | LoadStoreChanged;
5325 if (getAssumedAlign() > 1)
5333 bool TrackUse =
false;
5335 unsigned int KnownAlign =
5336 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5337 State.takeKnownMaximum(KnownAlign);
5343 const std::string getAsStr(
Attributor *
A)
const override {
5344 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5345 std::to_string(getAssumedAlign().
value()) +
">";
5350struct AAAlignFloating : AAAlignImpl {
5358 bool UsedAssumedInformation =
false;
5360 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5362 Values.
push_back({getAssociatedValue(), getCtxI()});
5365 Stripped = Values.
size() != 1 ||
5366 Values.
front().getValue() != &getAssociatedValue();
5370 auto VisitValueCB = [&](
Value &
V) ->
bool {
5371 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5374 DepClassTy::REQUIRED);
5375 if (!AA || (!Stripped &&
this == AA)) {
5377 unsigned Alignment = 1;
5390 Alignment =
V.getPointerAlignment(
DL).value();
5393 T.takeKnownMaximum(Alignment);
5394 T.indicatePessimisticFixpoint();
5400 return T.isValidState();
5403 for (
const auto &VAC : Values) {
5404 if (!VisitValueCB(*
VAC.getValue()))
5405 return indicatePessimisticFixpoint();
5418struct AAAlignReturned final
5419 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5420 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5428struct AAAlignArgument final
5429 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5430 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5438 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5439 return ChangeStatus::UNCHANGED;
5440 return Base::manifest(
A);
5447struct AAAlignCallSiteArgument final : AAAlignFloating {
5449 : AAAlignFloating(IRP,
A) {}
5456 if (
Argument *Arg = getAssociatedArgument())
5457 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5458 return ChangeStatus::UNCHANGED;
5460 Align InheritAlign =
5461 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5462 if (InheritAlign >= getAssumedAlign())
5463 Changed = ChangeStatus::UNCHANGED;
5470 if (
Argument *Arg = getAssociatedArgument()) {
5473 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5476 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5486struct AAAlignCallSiteReturned final
5487 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5488 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5505 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5506 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5511 const std::string getAsStr(
Attributor *
A)
const override {
5512 return getAssumed() ?
"noreturn" :
"may-return";
5517 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5518 bool UsedAssumedInformation =
false;
5519 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5520 {(unsigned)Instruction::Ret},
5521 UsedAssumedInformation))
5522 return indicatePessimisticFixpoint();
5523 return ChangeStatus::UNCHANGED;
5527struct AANoReturnFunction final : AANoReturnImpl {
5529 : AANoReturnImpl(IRP,
A) {}
5536struct AANoReturnCallSite final
5537 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5539 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5556 Value &
V = getAssociatedValue();
5557 if (
auto *
C = dyn_cast<Constant>(&V)) {
5558 if (
C->isThreadDependent())
5559 indicatePessimisticFixpoint();
5561 indicateOptimisticFixpoint();
5564 if (
auto *CB = dyn_cast<CallBase>(&V))
5567 indicateOptimisticFixpoint();
5570 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5575 indicatePessimisticFixpoint();
5585 Value &
V = getAssociatedValue();
5587 if (
auto *
I = dyn_cast<Instruction>(&V))
5588 Scope =
I->getFunction();
5589 if (
auto *
A = dyn_cast<Argument>(&V)) {
5591 if (!
Scope->hasLocalLinkage())
5595 return indicateOptimisticFixpoint();
5597 bool IsKnownNoRecurse;
5598 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5603 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5604 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5605 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5606 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5610 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5611 (isa<StoreInst>(UserI) &&
5612 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5614 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5618 if (!Callee || !
Callee->hasLocalLinkage())
5624 DepClassTy::OPTIONAL);
5625 if (!ArgInstanceInfoAA ||
5626 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5631 A, *CB, *Scope, *
this,
nullptr,
5639 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5640 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5641 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5649 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5650 DepClassTy::OPTIONAL,
5651 true, EquivalentUseCB))
5652 return indicatePessimisticFixpoint();
5658 const std::string getAsStr(
Attributor *
A)
const override {
5659 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5663 void trackStatistics()
const override {}
5667struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5669 : AAInstanceInfoImpl(IRP,
A) {}
5673struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5675 : AAInstanceInfoFloating(IRP,
A) {}
5679struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5681 : AAInstanceInfoImpl(IRP,
A) {}
5689 Argument *Arg = getAssociatedArgument();
5691 return indicatePessimisticFixpoint();
5696 return indicatePessimisticFixpoint();
5702struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5704 : AAInstanceInfoImpl(IRP,
A) {
5720struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5722 : AAInstanceInfoFloating(IRP,
A) {}
5729 bool IgnoreSubsumingPositions) {
5730 assert(ImpliedAttributeKind == Attribute::NoCapture &&
5731 "Unexpected attribute kind");
5734 return V.use_empty();
5740 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5741 V.getType()->getPointerAddressSpace() == 0)) {
5745 if (
A.hasAttr(IRP, {Attribute::NoCapture},
5746 true, Attribute::NoCapture))
5752 {Attribute::NoCapture, Attribute::ByVal},
5754 A.manifestAttrs(IRP,
5762 determineFunctionCaptureCapabilities(IRP, *
F, State);
5764 A.manifestAttrs(IRP,
5783 bool ReadOnly =
F.onlyReadsMemory();
5784 bool NoThrow =
F.doesNotThrow();
5785 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5786 if (ReadOnly && NoThrow && IsVoidReturn) {
5799 if (NoThrow && IsVoidReturn)
5804 if (!NoThrow || ArgNo < 0 ||
5805 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5808 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5809 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5810 if (U ==
unsigned(ArgNo))
5828 assert(!AA::hasAssumedIRAttr<Attribute::NoCapture>(
5829 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5839 if (!isAssumedNoCaptureMaybeReturned())
5842 if (isArgumentPosition()) {
5843 if (isAssumedNoCapture())
5851 const std::string getAsStr(
Attributor *
A)
const override {
5852 if (isKnownNoCapture())
5853 return "known not-captured";
5854 if (isAssumedNoCapture())
5855 return "assumed not-captured";
5856 if (isKnownNoCaptureMaybeReturned())
5857 return "known not-captured-maybe-returned";
5858 if (isAssumedNoCaptureMaybeReturned())
5859 return "assumed not-captured-maybe-returned";
5860 return "assumed-captured";
5868 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5872 if (isa<PtrToIntInst>(UInst)) {
5874 return isCapturedIn(State,
true,
true,
5880 if (isa<StoreInst>(UInst))
5881 return isCapturedIn(State,
true,
true,
5885 if (isa<ReturnInst>(UInst)) {
5887 return isCapturedIn(State,
false,
false,
5889 return isCapturedIn(State,
true,
true,
5895 auto *CB = dyn_cast<CallBase>(UInst);
5897 return isCapturedIn(State,
true,
true,
5904 bool IsKnownNoCapture;
5906 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
5907 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5909 if (IsAssumedNoCapture)
5910 return isCapturedIn(State,
false,
false,
5914 return isCapturedIn(State,
false,
false,
5919 return isCapturedIn(State,
true,
true,
5927 bool CapturedInInt,
bool CapturedInRet) {
5928 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5929 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5945 return indicatePessimisticFixpoint();
5952 return indicatePessimisticFixpoint();
5960 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5962 addKnownBits(NOT_CAPTURED_IN_MEM);
5969 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5973 UsedAssumedInformation))
5975 bool SeenConstant =
false;
5977 if (isa<Constant>(
VAC.getValue())) {
5980 SeenConstant =
true;
5981 }
else if (!isa<Argument>(
VAC.getValue()) ||
5982 VAC.getValue() == getAssociatedArgument())
5988 bool IsKnownNoUnwind;
5989 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
5991 bool IsVoidTy =
F->getReturnType()->isVoidTy();
5992 bool UsedAssumedInformation =
false;
5993 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
5994 T.addKnownBits(NOT_CAPTURED_IN_RET);
5995 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
5997 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
5998 addKnownBits(NOT_CAPTURED_IN_RET);
5999 if (isKnown(NOT_CAPTURED_IN_MEM))
6000 return indicateOptimisticFixpoint();
6011 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6016 return checkUse(
A,
T, U, Follow);
6024 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6025 return indicatePessimisticFixpoint();
6028 auto Assumed = S.getAssumed();
6029 S.intersectAssumedBits(
T.getAssumed());
6030 if (!isAssumedNoCaptureMaybeReturned())
6031 return indicatePessimisticFixpoint();
6037struct AANoCaptureArgument final : AANoCaptureImpl {
6039 : AANoCaptureImpl(IRP,
A) {}
6046struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6048 : AANoCaptureImpl(IRP,
A) {}
6056 Argument *Arg = getAssociatedArgument();
6058 return indicatePessimisticFixpoint();
6060 bool IsKnownNoCapture;
6062 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
6063 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6065 return ChangeStatus::UNCHANGED;
6067 return indicatePessimisticFixpoint();
6076struct AANoCaptureFloating final : AANoCaptureImpl {
6078 : AANoCaptureImpl(IRP,
A) {}
6081 void trackStatistics()
const override {
6087struct AANoCaptureReturned final : AANoCaptureImpl {
6089 : AANoCaptureImpl(IRP,
A) {
6104 void trackStatistics()
const override {}
6108struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6110 : AANoCaptureImpl(IRP,
A) {}
6116 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6120 void trackStatistics()
const override {
6131 SimplifiedAssociatedValue,
Other, Ty);
6132 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6136 if (SimplifiedAssociatedValue)
6137 dbgs() <<
"[ValueSimplify] is assumed to be "
6138 << **SimplifiedAssociatedValue <<
"\n";
6140 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6152 if (getAssociatedValue().
getType()->isVoidTy())
6153 indicatePessimisticFixpoint();
6154 if (
A.hasSimplificationCallback(getIRPosition()))
6155 indicatePessimisticFixpoint();
6159 const std::string getAsStr(
Attributor *
A)
const override {
6161 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6162 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6163 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6165 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6170 void trackStatistics()
const override {}
6173 std::optional<Value *>
6174 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6175 return SimplifiedAssociatedValue;
6186 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6188 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6201 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6202 if (
Check && (
I.mayReadFromMemory() ||
6207 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6209 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6235 if (
const auto &NewV = VMap.
lookup(&V))
6237 bool UsedAssumedInformation =
false;
6238 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6240 if (!SimpleV.has_value())
6244 EffectiveV = *SimpleV;
6245 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6249 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6250 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6251 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6252 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6259 Value *NewV = SimplifiedAssociatedValue
6260 ? *SimplifiedAssociatedValue
6262 if (NewV && NewV != &getAssociatedValue()) {
6266 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6268 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6277 const IRPosition &IRP,
bool Simplify =
true) {
6278 bool UsedAssumedInformation =
false;
6281 QueryingValueSimplified =
A.getAssumedSimplified(
6283 return unionAssumed(QueryingValueSimplified);
6287 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6288 if (!getAssociatedValue().
getType()->isIntegerTy())
6293 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6297 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6300 SimplifiedAssociatedValue = std::nullopt;
6301 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6304 if (
auto *
C = *COpt) {
6305 SimplifiedAssociatedValue =
C;
6306 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6312 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6313 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6315 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6323 for (
auto &U : getAssociatedValue().
uses()) {
6327 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6328 IP =
PHI->getIncomingBlock(U)->getTerminator();
6329 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6331 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6332 if (
A.changeUseAfterManifest(U, *NewV))
6333 Changed = ChangeStatus::CHANGED;
6337 return Changed | AAValueSimplify::manifest(
A);
6342 SimplifiedAssociatedValue = &getAssociatedValue();
6343 return AAValueSimplify::indicatePessimisticFixpoint();
6347struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6349 : AAValueSimplifyImpl(IRP,
A) {}
6352 AAValueSimplifyImpl::initialize(
A);
6353 if (
A.hasAttr(getIRPosition(),
6354 {Attribute::InAlloca, Attribute::Preallocated,
6355 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6357 indicatePessimisticFixpoint();
6364 Argument *Arg = getAssociatedArgument();
6370 return indicatePessimisticFixpoint();
6373 auto Before = SimplifiedAssociatedValue;
6387 bool UsedAssumedInformation =
false;
6388 std::optional<Constant *> SimpleArgOp =
6389 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6396 return unionAssumed(*SimpleArgOp);
6401 bool UsedAssumedInformation =
false;
6402 if (hasCallBaseContext() &&
6403 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6407 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6408 UsedAssumedInformation);
6411 if (!askSimplifiedValueForOtherAAs(
A))
6412 return indicatePessimisticFixpoint();
6415 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6416 : ChangeStatus ::CHANGED;
6420 void trackStatistics()
const override {
6425struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6427 : AAValueSimplifyImpl(IRP,
A) {}
6430 std::optional<Value *>
6431 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6432 if (!isValidState())
6434 return SimplifiedAssociatedValue;
6439 auto Before = SimplifiedAssociatedValue;
6442 auto &RI = cast<ReturnInst>(
I);
6443 return checkAndUpdate(
6448 bool UsedAssumedInformation =
false;
6449 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6450 UsedAssumedInformation))
6451 if (!askSimplifiedValueForOtherAAs(
A))
6452 return indicatePessimisticFixpoint();
6455 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6456 : ChangeStatus ::CHANGED;
6462 return ChangeStatus::UNCHANGED;
6466 void trackStatistics()
const override {
6471struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6473 : AAValueSimplifyImpl(IRP,
A) {}
6477 AAValueSimplifyImpl::initialize(
A);
6478 Value &
V = getAnchorValue();
6481 if (isa<Constant>(V))
6482 indicatePessimisticFixpoint();
6487 auto Before = SimplifiedAssociatedValue;
6488 if (!askSimplifiedValueForOtherAAs(
A))
6489 return indicatePessimisticFixpoint();
6492 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6493 : ChangeStatus ::CHANGED;
6497 void trackStatistics()
const override {
6502struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6504 : AAValueSimplifyImpl(IRP,
A) {}
6508 SimplifiedAssociatedValue =
nullptr;
6509 indicateOptimisticFixpoint();
6514 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6517 void trackStatistics()
const override {
6522struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6524 : AAValueSimplifyFunction(IRP,
A) {}
6526 void trackStatistics()
const override {
6531struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6533 : AAValueSimplifyImpl(IRP,
A) {}
6536 AAValueSimplifyImpl::initialize(
A);
6537 Function *Fn = getAssociatedFunction();
6538 assert(Fn &&
"Did expect an associted function");
6544 checkAndUpdate(
A, *
this, IRP))
6545 indicateOptimisticFixpoint();
6547 indicatePessimisticFixpoint();
6555 return indicatePessimisticFixpoint();
6558 void trackStatistics()
const override {
6563struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6565 : AAValueSimplifyFloating(IRP,
A) {}
6573 if (FloatAA && FloatAA->getState().isValidState())
6576 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6577 Use &
U = cast<CallBase>(&getAnchorValue())
6578 ->getArgOperandUse(getCallSiteArgNo());
6579 if (
A.changeUseAfterManifest(U, *NewV))
6580 Changed = ChangeStatus::CHANGED;
6583 return Changed | AAValueSimplify::manifest(
A);
6586 void trackStatistics()
const override {
6596 struct AllocationInfo {
6608 }
Status = STACK_DUE_TO_USE;
6612 bool HasPotentiallyFreeingUnknownUses =
false;
6616 bool MoveAllocaIntoEntry =
true;
6622 struct DeallocationInfo {
6630 bool MightFreeUnknownObjects =
false;
6639 ~AAHeapToStackFunction() {
6642 for (
auto &It : AllocationInfos)
6643 It.second->~AllocationInfo();
6644 for (
auto &It : DeallocationInfos)
6645 It.second->~DeallocationInfo();
6649 AAHeapToStack::initialize(
A);
6652 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6659 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6668 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6669 AllocationInfos[CB] = AI;
6671 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6677 bool UsedAssumedInformation =
false;
6678 bool Success =
A.checkForAllCallLikeInstructions(
6679 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6683 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6687 bool &) -> std::optional<Value *> {
return nullptr; };
6688 for (
const auto &It : AllocationInfos)
6691 for (
const auto &It : DeallocationInfos)
6696 const std::string getAsStr(
Attributor *
A)
const override {
6697 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6698 for (
const auto &It : AllocationInfos) {
6699 if (It.second->Status == AllocationInfo::INVALID)
6700 ++NumInvalidMallocs;
6704 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6705 std::to_string(NumInvalidMallocs);
6709 void trackStatistics()
const override {
6712 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6713 for (
const auto &It : AllocationInfos)
6714 if (It.second->Status != AllocationInfo::INVALID)
6718 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6720 if (AllocationInfo *AI =
6721 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6722 return AI->Status != AllocationInfo::INVALID;
6726 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6727 if (!isValidState())
6730 for (
const auto &It : AllocationInfos) {
6731 AllocationInfo &AI = *It.second;
6732 if (AI.Status == AllocationInfo::INVALID)
6735 if (AI.PotentialFreeCalls.count(&CB))
6743 assert(getState().isValidState() &&
6744 "Attempted to manifest an invalid state!");
6748 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6750 for (
auto &It : AllocationInfos) {
6751 AllocationInfo &AI = *It.second;
6752 if (AI.Status == AllocationInfo::INVALID)
6755 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6756 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6757 A.deleteAfterManifest(*FreeCall);
6758 HasChanged = ChangeStatus::CHANGED;
6761 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6766 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6767 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6768 return OR <<
"Moving globalized variable to the stack.";
6769 return OR <<
"Moving memory allocation from the heap to the stack.";
6771 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6778 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6780 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6787 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6792 ?
F->getEntryBlock().begin()
6793 : AI.CB->getIterator();
6796 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6797 Alignment = std::max(Alignment, *RetAlign);
6799 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6800 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6801 "Expected an alignment during manifest!");
6803 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6807 unsigned AS =
DL.getAllocaAddrSpace();
6810 AI.CB->getName() +
".h2s", IP);
6812 if (Alloca->
getType() != AI.CB->getType())
6813 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6814 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6819 "Must be able to materialize initial memory state of allocation");
6823 if (
auto *
II = dyn_cast<InvokeInst>(AI.CB)) {
6824 auto *NBB =
II->getNormalDest();
6826 A.deleteAfterManifest(*AI.CB);
6828 A.deleteAfterManifest(*AI.CB);
6834 if (!isa<UndefValue>(InitVal)) {
6837 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6839 HasChanged = ChangeStatus::CHANGED;
6847 bool UsedAssumedInformation =
false;
6848 std::optional<Constant *> SimpleV =
6849 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6851 return APInt(64, 0);
6852 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6853 return CI->getValue();
6854 return std::nullopt;
6858 AllocationInfo &AI) {
6859 auto Mapper = [&](
const Value *
V) ->
const Value * {
6860 bool UsedAssumedInformation =
false;
6861 if (std::optional<Constant *> SimpleV =
6862 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6869 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6887 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6889 const auto *LivenessAA =
6893 A.getInfoCache().getMustBeExecutedContextExplorer();
6895 bool StackIsAccessibleByOtherThreads =
6896 A.getInfoCache().stackIsAccessibleByOtherThreads();
6899 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6900 std::optional<bool> MayContainIrreducibleControl;
6902 if (&
F->getEntryBlock() == &BB)
6904 if (!MayContainIrreducibleControl.has_value())
6906 if (*MayContainIrreducibleControl)
6915 bool HasUpdatedFrees =
false;
6917 auto UpdateFrees = [&]() {
6918 HasUpdatedFrees =
true;
6920 for (
auto &It : DeallocationInfos) {
6921 DeallocationInfo &DI = *It.second;
6924 if (DI.MightFreeUnknownObjects)
6928 bool UsedAssumedInformation =
false;
6929 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6936 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6937 DI.MightFreeUnknownObjects =
true;
6943 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6946 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6950 DI.MightFreeUnknownObjects =
true;
6954 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6956 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6958 DI.MightFreeUnknownObjects =
true;
6962 DI.PotentialAllocationCalls.insert(ObjCB);
6966 auto FreeCheck = [&](AllocationInfo &AI) {
6970 if (!StackIsAccessibleByOtherThreads) {
6972 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6975 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6976 "other threads and function is not nosync:\n");
6980 if (!HasUpdatedFrees)
6984 if (AI.PotentialFreeCalls.size() != 1) {
6986 << AI.PotentialFreeCalls.size() <<
"\n");
6989 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
6990 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
6993 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
6994 << *UniqueFree <<
"\n");
6997 if (DI->MightFreeUnknownObjects) {
6999 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7002 if (DI->PotentialAllocationCalls.empty())
7004 if (DI->PotentialAllocationCalls.size() > 1) {
7006 << DI->PotentialAllocationCalls.size()
7007 <<
" different allocations\n");
7010 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7013 <<
"[H2S] unique free call not known to free this allocation but "
7014 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7019 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7021 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7022 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7023 "with the allocation "
7024 << *UniqueFree <<
"\n");
7031 auto UsesCheck = [&](AllocationInfo &AI) {
7032 bool ValidUsesOnly =
true;
7034 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7036 if (isa<LoadInst>(UserI))
7038 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
7039 if (
SI->getValueOperand() ==
U.get()) {
7041 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7042 ValidUsesOnly =
false;
7048 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
7051 if (DeallocationInfos.count(CB)) {
7052 AI.PotentialFreeCalls.insert(CB);
7059 bool IsKnownNoCapture;
7060 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7065 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7068 if (!IsAssumedNoCapture ||
7069 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7070 !IsAssumedNoFree)) {
7071 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7076 <<
"Could not move globalized variable to the stack. "
7077 "Variable is potentially captured in call. Mark "
7078 "parameter as `__attribute__((noescape))` to override.";
7081 if (ValidUsesOnly &&
7082 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7086 ValidUsesOnly =
false;
7091 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7092 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7099 ValidUsesOnly =
false;
7102 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7104 [&](
const Use &OldU,
const Use &NewU) {
7105 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7106 return !SI || StackIsAccessibleByOtherThreads ||
7107 AA::isAssumedThreadLocalObject(
7108 A, *SI->getPointerOperand(), *this);
7111 return ValidUsesOnly;
7116 for (
auto &It : AllocationInfos) {
7117 AllocationInfo &AI = *It.second;
7118 if (AI.Status == AllocationInfo::INVALID)
7122 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7126 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7128 AI.Status = AllocationInfo::INVALID;
7133 !APAlign->isPowerOf2()) {
7134 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7136 AI.Status = AllocationInfo::INVALID;
7143 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7148 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7150 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7154 AI.Status = AllocationInfo::INVALID;
7160 switch (AI.Status) {
7161 case AllocationInfo::STACK_DUE_TO_USE:
7164 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7166 case AllocationInfo::STACK_DUE_TO_FREE:
7169 AI.Status = AllocationInfo::INVALID;
7172 case AllocationInfo::INVALID:
7179 bool IsGlobalizedLocal =
7180 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7181 if (AI.MoveAllocaIntoEntry &&
7182 (!
Size.has_value() ||
7183 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7184 AI.MoveAllocaIntoEntry =
false;
7198 AAPrivatizablePtr::indicatePessimisticFixpoint();
7199 PrivatizableType =
nullptr;
7200 return ChangeStatus::CHANGED;
7206 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7210 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7211 std::optional<Type *>
T1) {
7221 std::optional<Type *> getPrivatizableType()
const override {
7222 return PrivatizableType;
7225 const std::string getAsStr(
Attributor *
A)
const override {
7226 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7230 std::optional<Type *> PrivatizableType;
7235struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7237 : AAPrivatizablePtrImpl(IRP,
A) {}
7240 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7243 bool UsedAssumedInformation =
false;
7245 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7247 if (!
Attrs.empty() &&
7249 true, UsedAssumedInformation))
7250 return Attrs[0].getValueAsType();
7252 std::optional<Type *> Ty;
7253 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7276 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7280 dbgs() <<
"<nullptr>";
7285 Ty = combineTypes(Ty, CSTy);
7288 dbgs() <<
" : New Type: ";
7290 (*Ty)->print(
dbgs());
7292 dbgs() <<
"<nullptr>";
7301 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7302 UsedAssumedInformation))
7309 PrivatizableType = identifyPrivatizableType(
A);
7310 if (!PrivatizableType)
7311 return ChangeStatus::UNCHANGED;
7312 if (!*PrivatizableType)
7313 return indicatePessimisticFixpoint();
7318 DepClassTy::OPTIONAL);
7321 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7324 return indicatePessimisticFixpoint();
7330 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7334 Function &Fn = *getIRPosition().getAnchorScope();
7338 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7340 return indicatePessimisticFixpoint();
7350 bool UsedAssumedInformation =
false;
7351 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7352 UsedAssumedInformation)) {
7354 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7356 return indicatePessimisticFixpoint();
7360 Argument *Arg = getAssociatedArgument();
7361 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7363 return indicatePessimisticFixpoint();
7370 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7373 for (
const Use *U : CallbackUses) {
7375 assert(CBACS && CBACS.isCallbackCall());
7376 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7377 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7381 <<
"[AAPrivatizablePtr] Argument " << *Arg
7382 <<
"check if can be privatized in the context of its parent ("
7384 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7386 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7387 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7388 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7390 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7391 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7394 if (CBArgNo !=
int(ArgNo))
7398 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7402 if (*CBArgPrivTy == PrivatizableType)
7407 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7408 <<
" cannot be privatized in the context of its parent ("
7410 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7412 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7413 <<
").\n[AAPrivatizablePtr] for which the argument "
7414 "privatization is not compatible.\n";
7428 "Expected a direct call operand for callback call operand");
7433 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7434 <<
" check if be privatized in the context of its parent ("
7436 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7438 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7441 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7444 DepClassTy::REQUIRED);
7445 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7449 if (*DCArgPrivTy == PrivatizableType)
7455 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7456 <<
" cannot be privatized in the context of its parent ("
7458 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7461 <<
").\n[AAPrivatizablePtr] for which the argument "
7462 "privatization is not compatible.\n";
7474 return IsCompatiblePrivArgOfDirectCS(ACS);
7478 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7479 UsedAssumedInformation))
7480 return indicatePessimisticFixpoint();
7482 return ChangeStatus::UNCHANGED;
7488 identifyReplacementTypes(
Type *PrivType,
7492 assert(PrivType &&
"Expected privatizable type!");
7495 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7496 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7497 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7498 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7499 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7500 PrivArrayType->getElementType());
7511 assert(PrivType &&
"Expected privatizable type!");
7517 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7518 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7519 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7524 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7525 Type *PointeeTy = PrivArrayType->getElementType();
7526 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7527 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7538 void createReplacementValues(
Align Alignment,
Type *PrivType,
7542 assert(PrivType &&
"Expected privatizable type!");
7549 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7550 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7551 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7552 Type *PointeeTy = PrivStructType->getElementType(u);
7556 L->setAlignment(Alignment);
7559 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7560 Type *PointeeTy = PrivArrayType->getElementType();
7561 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7562 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7565 L->setAlignment(Alignment);
7570 L->setAlignment(Alignment);
7577 if (!PrivatizableType)
7578 return ChangeStatus::UNCHANGED;
7579 assert(*PrivatizableType &&
"Expected privatizable type!");
7585 bool UsedAssumedInformation =
false;
7586 if (!
A.checkForAllInstructions(
7588 CallInst &CI = cast<CallInst>(I);
7589 if (CI.isTailCall())
7590 TailCalls.push_back(&CI);
7593 *
this, {Instruction::Call}, UsedAssumedInformation))
7594 return ChangeStatus::UNCHANGED;
7596 Argument *Arg = getAssociatedArgument();
7599 const auto *AlignAA =
7608 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7611 unsigned AS =
DL.getAllocaAddrSpace();
7613 Arg->
getName() +
".priv", IP);
7614 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7615 ArgIt->getArgNo(), IP);
7618 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7623 CI->setTailCall(
false);
7634 createReplacementValues(
7635 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7636 *PrivatizableType, ACS,
7644 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7647 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7648 std::move(FnRepairCB),
7649 std::move(ACSRepairCB)))
7650 return ChangeStatus::CHANGED;
7651 return ChangeStatus::UNCHANGED;
7655 void trackStatistics()
const override {
7660struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7662 : AAPrivatizablePtrImpl(IRP,
A) {}
7667 indicatePessimisticFixpoint();
7672 "updateImpl will not be called");
7676 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7679 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7683 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7684 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7686 return AI->getAllocatedType();
7687 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7690 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7694 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7695 "alloca nor privatizable argument: "
7701 void trackStatistics()
const override {
7706struct AAPrivatizablePtrCallSiteArgument final
7707 :
public AAPrivatizablePtrFloating {
7709 : AAPrivatizablePtrFloating(IRP,
A) {}
7713 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7714 indicateOptimisticFixpoint();
7719 PrivatizableType = identifyPrivatizableType(
A);
7720 if (!PrivatizableType)
7721 return ChangeStatus::UNCHANGED;
7722 if (!*PrivatizableType)
7723 return indicatePessimisticFixpoint();
7726 bool IsKnownNoCapture;
7727 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7728 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7729 if (!IsAssumedNoCapture) {
7730 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7731 return indicatePessimisticFixpoint();
7734 bool IsKnownNoAlias;
7735 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7736 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7737 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7738 return indicatePessimisticFixpoint();
7743 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7744 return indicatePessimisticFixpoint();
7747 return ChangeStatus::UNCHANGED;
7751 void trackStatistics()
const override {
7756struct AAPrivatizablePtrCallSiteReturned final
7757 :
public AAPrivatizablePtrFloating {
7759 : AAPrivatizablePtrFloating(IRP,
A) {}
7764 indicatePessimisticFixpoint();
7768 void trackStatistics()
const override {
7773struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7775 : AAPrivatizablePtrFloating(IRP,
A) {}
7780 indicatePessimisticFixpoint();
7784 void trackStatistics()
const override {
7800 intersectAssumedBits(BEST_STATE);
7801 getKnownStateFromValue(
A, getIRPosition(), getState());
7802 AAMemoryBehavior::initialize(
A);
7808 bool IgnoreSubsumingPositions =
false) {
7810 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7812 switch (Attr.getKindAsEnum()) {
7813 case Attribute::ReadNone:
7816 case Attribute::ReadOnly:
7819 case Attribute::WriteOnly:
7828 if (!
I->mayReadFromMemory())
7830 if (!
I->mayWriteToMemory())
7843 else if (isAssumedWriteOnly())
7852 if (
A.hasAttr(IRP, Attribute::ReadNone,
7854 return ChangeStatus::UNCHANGED;
7863 return ChangeStatus::UNCHANGED;
7866 A.removeAttrs(IRP, AttrKinds);
7869 A.removeAttrs(IRP, Attribute::Writable);
7876 const std::string getAsStr(
Attributor *
A)
const override {
7881 if (isAssumedWriteOnly())
7883 return "may-read/write";
7891 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7894struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7896 : AAMemoryBehaviorImpl(IRP,
A) {}
7902 void trackStatistics()
const override {
7907 else if (isAssumedWriteOnly())
7922struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7924 : AAMemoryBehaviorFloating(IRP,
A) {}
7928 intersectAssumedBits(BEST_STATE);
7933 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7935 getKnownStateFromValue(
A, IRP, getState(),
7942 return ChangeStatus::UNCHANGED;
7946 if (
A.hasAttr(getIRPosition(),
7947 {Attribute::InAlloca, Attribute::Preallocated})) {
7948 removeKnownBits(NO_WRITES);
7949 removeAssumedBits(NO_WRITES);
7951 A.removeAttrs(getIRPosition(), AttrKinds);
7952 return AAMemoryBehaviorFloating::manifest(
A);
7956 void trackStatistics()
const override {
7961 else if (isAssumedWriteOnly())
7966struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7968 : AAMemoryBehaviorArgument(IRP,
A) {}
7974 Argument *Arg = getAssociatedArgument();
7976 indicatePessimisticFixpoint();
7980 addKnownBits(NO_WRITES);
7981 removeKnownBits(NO_READS);
7982 removeAssumedBits(NO_READS);
7984 AAMemoryBehaviorArgument::initialize(
A);
7985 if (getAssociatedFunction()->isDeclaration())
7986 indicatePessimisticFixpoint();
7995 Argument *Arg = getAssociatedArgument();
8000 return indicatePessimisticFixpoint();
8005 void trackStatistics()
const override {
8010 else if (isAssumedWriteOnly())
8016struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8018 : AAMemoryBehaviorFloating(IRP,
A) {}
8022 AAMemoryBehaviorImpl::initialize(
A);
8027 return ChangeStatus::UNCHANGED;
8031 void trackStatistics()
const override {}
8035struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8037 : AAMemoryBehaviorImpl(IRP,
A) {}
8047 Function &
F = cast<Function>(getAnchorValue());
8053 else if (isAssumedWriteOnly())
8056 A.removeAttrs(getIRPosition(), AttrKinds);
8061 return A.manifestAttrs(getIRPosition(),
8066 void trackStatistics()
const override {
8071 else if (isAssumedWriteOnly())
8077struct AAMemoryBehaviorCallSite final
8078 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8085 CallBase &CB = cast<CallBase>(getAnchorValue());
8091 else if (isAssumedWriteOnly())
8094 A.removeAttrs(getIRPosition(), AttrKinds);
8099 Attribute::Writable);
8100 return A.manifestAttrs(
8105 void trackStatistics()
const override {
8110 else if (isAssumedWriteOnly())
8118 auto AssumedState = getAssumed();
8124 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8127 if (MemBehaviorAA) {
8128 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8129 return !isAtFixpoint();
8134 if (
I.mayReadFromMemory())
8135 removeAssumedBits(NO_READS);
8136 if (
I.mayWriteToMemory())
8137 removeAssumedBits(NO_WRITES);
8138 return !isAtFixpoint();
8141 bool UsedAssumedInformation =
false;
8142 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8143 UsedAssumedInformation))
8144 return indicatePessimisticFixpoint();
8163 const auto *FnMemAA =
8167 S.addKnownBits(FnMemAA->getKnown());
8168 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8174 auto AssumedState = S.getAssumed();
8180 bool IsKnownNoCapture;
8182 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
8186 if (!IsAssumedNoCapture &&
8188 S.intersectAssumedBits(FnMemAssumedState);
8194 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8196 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8204 Follow = followUsersOfUseIn(
A, U, UserI);
8208 analyzeUseIn(
A, U, UserI);
8210 return !isAtFixpoint();
8213 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8214 return indicatePessimisticFixpoint();
8220bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8224 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8229 const auto *CB = dyn_cast<CallBase>(UserI);
8239 if (
U.get()->getType()->isPointerTy()) {
8241 bool IsKnownNoCapture;
8242 return !AA::hasAssumedIRAttr<Attribute::NoCapture>(
8250void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8258 case Instruction::Load:
8260 removeAssumedBits(NO_READS);
8263 case Instruction::Store:
8268 removeAssumedBits(NO_WRITES);
8270 indicatePessimisticFixpoint();
8273 case Instruction::Call:
8274 case Instruction::CallBr:
8275 case Instruction::Invoke: {
8278 const auto *CB = cast<CallBase>(UserI);
8282 indicatePessimisticFixpoint();
8289 removeAssumedBits(NO_READS);
8296 if (
U.get()->getType()->isPointerTy())
8300 const auto *MemBehaviorAA =
8306 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8314 removeAssumedBits(NO_READS);
8316 removeAssumedBits(NO_WRITES);
8328 return "all memory";
8331 std::string S =
"memory:";
8337 S +=
"internal global,";
8339 S +=
"external global,";
8343 S +=
"inaccessible,";
8357 AccessKind2Accesses.fill(
nullptr);
8360 ~AAMemoryLocationImpl() {
8363 for (AccessSet *AS : AccessKind2Accesses)
8370 intersectAssumedBits(BEST_STATE);
8371 getKnownStateFromValue(
A, getIRPosition(), getState());
8372 AAMemoryLocation::initialize(
A);
8378 bool IgnoreSubsumingPositions =
false) {
8387 bool UseArgMemOnly =
true;
8389 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8393 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8402 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8407 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8411 A.manifestAttrs(IRP,
8421 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8425 A.manifestAttrs(IRP,
8444 else if (isAssumedInaccessibleMemOnly())
8447 else if (isAssumedArgMemOnly())
8450 else if (isAssumedInaccessibleOrArgMemOnly())
8465 if (DeducedAttrs.
size() != 1)
8466 return ChangeStatus::UNCHANGED;
8474 bool checkForAllAccessesToMemoryKind(
8476 MemoryLocationsKind)>
8478 MemoryLocationsKind RequestedMLK)
const override {
8479 if (!isValidState())
8482 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8483 if (AssumedMLK == NO_LOCATIONS)
8487 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8488 CurMLK *= 2, ++
Idx) {
8489 if (CurMLK & RequestedMLK)
8492 if (
const AccessSet *Accesses = AccessKind2Accesses[
Idx])
8493 for (
const AccessInfo &AI : *Accesses)
8494 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8506 bool Changed =
false;
8507 MemoryLocationsKind KnownMLK = getKnown();
8508 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8509 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8510 if (!(CurMLK & KnownMLK))
8511 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8512 getAccessKindFromInst(
I));
8513 return AAMemoryLocation::indicatePessimisticFixpoint();
8533 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8537 return LHS.Ptr <
RHS.Ptr;
8538 if (
LHS.Kind !=
RHS.Kind)
8539 return LHS.Kind <
RHS.Kind;
8547 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8564 AK =
I->mayReadFromMemory() ? READ :
NONE;
8582 Changed |= Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8583 if (MLK == NO_UNKOWN_MEM)
8585 State.removeAssumedBits(MLK);
8592 unsigned AccessAS = 0);
8598void AAMemoryLocationImpl::categorizePtrValue(
8601 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8603 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8605 auto Pred = [&](
Value &Obj) {
8608 MemoryLocationsKind MLK = NO_LOCATIONS;
8618 if (isa<UndefValue>(&Obj))
8620 if (isa<Argument>(&Obj)) {
8627 MLK = NO_ARGUMENT_MEM;
8628 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8632 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8633 if (GVar->isConstant())
8636 if (GV->hasLocalLinkage())
8637 MLK = NO_GLOBAL_INTERNAL_MEM;
8639 MLK = NO_GLOBAL_EXTERNAL_MEM;
8640 }
else if (isa<ConstantPointerNull>(&Obj) &&
8644 }
else if (isa<AllocaInst>(&Obj)) {
8646 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8647 bool IsKnownNoAlias;
8648 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8651 MLK = NO_MALLOCED_MEM;
8653 MLK = NO_UNKOWN_MEM;
8655 MLK = NO_UNKOWN_MEM;
8658 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8659 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8660 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8661 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8662 getAccessKindFromInst(&
I));
8671 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8672 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8673 getAccessKindFromInst(&
I));
8678 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8679 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8682void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8685 for (
unsigned ArgNo = 0, E = CB.
arg_size(); ArgNo < E; ++ArgNo) {
8694 const auto *ArgOpMemLocationAA =
8697 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8702 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8709 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8713 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8715 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8721 <<
" [" << CBMemLocationAA <<
"]\n");
8722 if (!CBMemLocationAA) {
8723 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8724 Changed, getAccessKindFromInst(&
I));
8725 return NO_UNKOWN_MEM;
8728 if (CBMemLocationAA->isAssumedReadNone())
8729 return NO_LOCATIONS;
8731 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8732 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8733 Changed, getAccessKindFromInst(&
I));
8734 return AccessedLocs.getAssumed();
8737 uint32_t CBAssumedNotAccessedLocs =
8738 CBMemLocationAA->getAssumedNotAccessedLocation();
8741 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8742 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8744 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8745 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8747 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8748 getAccessKindFromInst(&
I));
8753 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8754 if (HasGlobalAccesses) {
8757 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8758 getAccessKindFromInst(&
I));
8761 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8762 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8763 return AccessedLocs.getWorstState();
8767 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8768 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8771 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8773 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8776 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8777 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8779 return AccessedLocs.getAssumed();
8784 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8785 <<
I <<
" [" << *
Ptr <<
"]\n");
8786 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8787 Ptr->getType()->getPointerAddressSpace());
8788 return AccessedLocs.getAssumed();
8791 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8793 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8794 getAccessKindFromInst(&
I));
8795 return AccessedLocs.getAssumed();
8799struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8801 : AAMemoryLocationImpl(IRP,
A) {}
8806 const auto *MemBehaviorAA =
8810 return indicateOptimisticFixpoint();
8812 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8813 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8814 return ChangeStatus::UNCHANGED;
8818 auto AssumedState = getAssumed();
8819 bool Changed =
false;
8822 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8823 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8824 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8825 removeAssumedBits(inverseLocation(MLK,
false,
false));
8828 return getAssumedNotAccessedLocation() != VALID_STATE;
8831 bool UsedAssumedInformation =
false;
8832 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8833 UsedAssumedInformation))
8834 return indicatePessimisticFixpoint();
8836 Changed |= AssumedState != getAssumed();
8837 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8841 void trackStatistics()
const override {
8844 else if (isAssumedArgMemOnly())
8846 else if (isAssumedInaccessibleMemOnly())
8848 else if (isAssumedInaccessibleOrArgMemOnly())
8854struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8856 : AAMemoryLocationImpl(IRP,
A) {}
8869 return indicatePessimisticFixpoint();
8870 bool Changed =
false;
8873 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8874 getAccessKindFromInst(
I));
8877 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8878 return indicatePessimisticFixpoint();
8879 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8883 void trackStatistics()
const override {
8897 const std::string getAsStr(
Attributor *
A)
const override {
8898 std::string Str(
"AADenormalFPMath[");
8901 DenormalState Known = getKnown();
8902 if (Known.Mode.isValid())
8903 OS <<
"denormal-fp-math=" << Known.Mode;
8907 if (Known.ModeF32.isValid())
8908 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8914struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8916 : AADenormalFPMathImpl(IRP,
A) {}
8928 Known = DenormalState{
Mode, ModeF32};
8939 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8947 CallerInfo->getState());
8951 bool AllCallSitesKnown =
true;
8952 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8953 return indicatePessimisticFixpoint();
8955 if (Change == ChangeStatus::CHANGED && isModeFixed())
8961 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8966 AttrToRemove.
push_back(
"denormal-fp-math");
8972 if (Known.ModeF32 != Known.Mode) {
8974 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8976 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8979 auto &IRP = getIRPosition();
8982 return A.removeAttrs(IRP, AttrToRemove) |
8983 A.manifestAttrs(IRP, AttrToAdd,
true);
8986 void trackStatistics()
const override {
9002 if (
A.hasSimplificationCallback(getIRPosition())) {
9003 indicatePessimisticFixpoint();
9008 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9011 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9015 const std::string getAsStr(
Attributor *
A)
const override {
9019 getKnown().print(
OS);
9021 getAssumed().print(
OS);
9029 if (!getAnchorScope())
9042 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9053 if (!getAnchorScope())
9060 const SCEV *S = getSCEV(
A,
I);
9072 if (!getAnchorScope())
9091 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9093 bool AllowAACtxI)
const {
9094 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9105 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9119 const Instruction *CtxI =
nullptr)
const override {
9120 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9126 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9132 const Instruction *CtxI =
nullptr)
const override {
9137 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9139 return getAssumed();
9143 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9151 Ty, AssumedConstantRange.
getLower())),
9153 Ty, AssumedConstantRange.
getUpper()))};
9175 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(0));
9177 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(1));
9180 return Known.contains(Assumed) && Known != Assumed;
9187 auto *OldRangeMD =
I->getMetadata(LLVMContext::MD_range);
9188 if (isBetterRange(AssumedConstantRange, OldRangeMD)) {
9190 I->setMetadata(LLVMContext::MD_range,
9191 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9192 AssumedConstantRange));
9205 auto &
V = getAssociatedValue();
9209 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9210 "not the context instruction");
9211 if (isa<CallInst>(
I) || isa<LoadInst>(
I))
9212 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9213 Changed = ChangeStatus::CHANGED;
9221struct AAValueConstantRangeArgument final
9222 : AAArgumentFromCallSiteArguments<
9223 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9225 using Base = AAArgumentFromCallSiteArguments<
9232 void trackStatistics()
const override {
9237struct AAValueConstantRangeReturned
9238 : AAReturnedFromReturnedValues<AAValueConstantRange,
9239 AAValueConstantRangeImpl,
9240 AAValueConstantRangeImpl::StateType,
9244 AAValueConstantRangeImpl,
9252 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9253 indicatePessimisticFixpoint();
9257 void trackStatistics()
const override {
9262struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9264 : AAValueConstantRangeImpl(IRP,
A) {}
9268 AAValueConstantRangeImpl::initialize(
A);
9272 Value &
V = getAssociatedValue();
9274 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9276 indicateOptimisticFixpoint();
9280 if (isa<UndefValue>(&V)) {
9283 indicateOptimisticFixpoint();
9287 if (isa<CallBase>(&V))
9290 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9294 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9295 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9302 if (isa<SelectInst>(V) || isa<PHINode>(V))
9306 indicatePessimisticFixpoint();
9309 << getAssociatedValue() <<
"\n");
9312 bool calculateBinaryOperator(
9320 bool UsedAssumedInformation =
false;
9321 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9324 if (!SimplifiedLHS.has_value())
9326 if (!*SimplifiedLHS)
9328 LHS = *SimplifiedLHS;
9330 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9333 if (!SimplifiedRHS.has_value())
9335 if (!*SimplifiedRHS)
9337 RHS = *SimplifiedRHS;
9345 DepClassTy::REQUIRED);
9349 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9353 DepClassTy::REQUIRED);
9357 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9359 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9361 T.unionAssumed(AssumedRange);
9365 return T.isValidState();
9368 bool calculateCastInst(
9377 bool UsedAssumedInformation =
false;
9378 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9381 if (!SimplifiedOpV.has_value())
9383 if (!*SimplifiedOpV)
9385 OpV = *SimplifiedOpV;
9392 DepClassTy::REQUIRED);
9396 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9398 return T.isValidState();
9409 bool UsedAssumedInformation =
false;
9410 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9413 if (!SimplifiedLHS.has_value())
9415 if (!*SimplifiedLHS)
9417 LHS = *SimplifiedLHS;
9419 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9422 if (!SimplifiedRHS.has_value())
9424 if (!*SimplifiedRHS)
9426 RHS = *SimplifiedRHS;
9434 DepClassTy::REQUIRED);
9440 DepClassTy::REQUIRED);
9444 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9445 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9448 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9451 bool MustTrue =
false, MustFalse =
false;
9453 auto AllowedRegion =
9456 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9462 assert((!MustTrue || !MustFalse) &&
9463 "Either MustTrue or MustFalse should be false!");
9472 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9473 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9474 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9478 return T.isValidState();
9487 if (!
I || isa<CallBase>(
I)) {
9490 bool UsedAssumedInformation =
false;
9491 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9494 if (!SimplifiedOpV.has_value())
9496 if (!*SimplifiedOpV)
9498 Value *VPtr = *SimplifiedOpV;
9503 DepClassTy::REQUIRED);
9507 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9511 return T.isValidState();
9515 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9516 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9518 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9519 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9521 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9522 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9528 T.indicatePessimisticFixpoint();
9536 if (QueriedAA !=
this)
9539 if (
T.getAssumed() == getState().getAssumed())
9541 T.indicatePessimisticFixpoint();
9544 return T.isValidState();
9547 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9548 return indicatePessimisticFixpoint();
9553 return ChangeStatus::UNCHANGED;
9554 if (++NumChanges > MaxNumChanges) {
9555 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9556 <<
" but only " << MaxNumChanges
9557 <<
" are allowed to avoid cyclic reasoning.");
9558 return indicatePessimisticFixpoint();
9560 return ChangeStatus::CHANGED;
9564 void trackStatistics()
const override {
9573 static constexpr int MaxNumChanges = 5;
9576struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9578 : AAValueConstantRangeImpl(IRP,
A) {}
9582 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9590struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9592 : AAValueConstantRangeFunction(IRP,
A) {}
9598struct AAValueConstantRangeCallSiteReturned
9599 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9600 AAValueConstantRangeImpl::StateType,
9604 AAValueConstantRangeImpl::StateType,
9610 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue()))
9611 if (
auto *RangeMD = CI->getMetadata(LLVMContext::MD_range))
9614 AAValueConstantRangeImpl::initialize(
A);
9618 void trackStatistics()
const override {
9622struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9624 : AAValueConstantRangeFloating(IRP,
A) {}
9628 return ChangeStatus::UNCHANGED;
9632 void trackStatistics()
const override {
9649 if (
A.hasSimplificationCallback(getIRPosition()))
9650 indicatePessimisticFixpoint();
9652 AAPotentialConstantValues::initialize(
A);
9656 bool &ContainsUndef,
bool ForSelf) {
9658 bool UsedAssumedInformation =
false;
9660 UsedAssumedInformation)) {
9668 *
this, IRP, DepClassTy::REQUIRED);
9669 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9671 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9672 S = PotentialValuesAA->getState().getAssumedSet();
9679 ContainsUndef =
false;
9680 for (
auto &It : Values) {
9681 if (isa<UndefValue>(It.getValue())) {
9682 ContainsUndef =
true;
9685 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9688 S.insert(CI->getValue());
9690 ContainsUndef &= S.empty();
9696 const std::string getAsStr(
Attributor *
A)
const override {
9705 return indicatePessimisticFixpoint();
9709struct AAPotentialConstantValuesArgument final
9710 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9711 AAPotentialConstantValuesImpl,
9712 PotentialConstantIntValuesState> {
9714 AAPotentialConstantValuesImpl,
9720 void trackStatistics()
const override {
9725struct AAPotentialConstantValuesReturned
9726 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9727 AAPotentialConstantValuesImpl> {
9729 AAPotentialConstantValuesImpl>;
9734 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9735 indicatePessimisticFixpoint();
9736 Base::initialize(
A);
9740 void trackStatistics()
const override {
9745struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9747 : AAPotentialConstantValuesImpl(IRP,
A) {}
9751 AAPotentialConstantValuesImpl::initialize(
A);
9755 Value &
V = getAssociatedValue();
9757 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9758 unionAssumed(
C->getValue());
9759 indicateOptimisticFixpoint();
9763 if (isa<UndefValue>(&V)) {
9764 unionAssumedWithUndef();
9765 indicateOptimisticFixpoint();
9769 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9772 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9775 indicatePessimisticFixpoint();
9778 << getAssociatedValue() <<
"\n");
9792 case Instruction::Trunc:
9793 return Src.trunc(ResultBitWidth);
9794 case Instruction::SExt:
9795 return Src.sext(ResultBitWidth);
9796 case Instruction::ZExt:
9797 return Src.zext(ResultBitWidth);
9798 case Instruction::BitCast:
9805 bool &SkipOperation,
bool &Unsupported) {
9812 switch (BinOpcode) {
9816 case Instruction::Add:
9818 case Instruction::Sub:
9820 case Instruction::Mul:
9822 case Instruction::UDiv:
9824 SkipOperation =
true;
9828 case Instruction::SDiv:
9830 SkipOperation =
true;
9834 case Instruction::URem:
9836 SkipOperation =
true;
9840 case Instruction::SRem:
9842 SkipOperation =
true;
9846 case Instruction::Shl:
9848 case Instruction::LShr:
9850 case Instruction::AShr:
9852 case Instruction::And:
9854 case Instruction::Or:
9856 case Instruction::Xor:
9861 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9863 bool SkipOperation =
false;
9866 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9871 unionAssumed(Result);
9872 return isValidState();
9876 auto AssumedBefore = getAssumed();
9880 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9881 SetTy LHSAAPVS, RHSAAPVS;
9883 LHSContainsUndef,
false) ||
9885 RHSContainsUndef,
false))
9886 return indicatePessimisticFixpoint();
9889 bool MaybeTrue =
false, MaybeFalse =
false;
9891 if (LHSContainsUndef && RHSContainsUndef) {
9894 unionAssumedWithUndef();
9895 }
else if (LHSContainsUndef) {
9896 for (
const APInt &R : RHSAAPVS) {
9897 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9898 MaybeTrue |= CmpResult;
9899 MaybeFalse |= !CmpResult;
9900 if (MaybeTrue & MaybeFalse)
9901 return indicatePessimisticFixpoint();
9903 }
else if (RHSContainsUndef) {
9904 for (
const APInt &L : LHSAAPVS) {
9905 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9906 MaybeTrue |= CmpResult;
9907 MaybeFalse |= !CmpResult;
9908 if (MaybeTrue & MaybeFalse)
9909 return indicatePessimisticFixpoint();
9912 for (
const APInt &L : LHSAAPVS) {
9913 for (
const APInt &R : RHSAAPVS) {
9914 bool CmpResult = calculateICmpInst(ICI, L, R);
9915 MaybeTrue |= CmpResult;
9916 MaybeFalse |= !CmpResult;
9917 if (MaybeTrue & MaybeFalse)
9918 return indicatePessimisticFixpoint();
9923 unionAssumed(
APInt( 1, 1));
9925 unionAssumed(
APInt( 1, 0));
9926 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9927 : ChangeStatus::CHANGED;
9931 auto AssumedBefore = getAssumed();
9935 bool UsedAssumedInformation =
false;
9936 std::optional<Constant *>
C =
A.getAssumedConstant(
9937 *
SI->getCondition(), *
this, UsedAssumedInformation);
9940 bool OnlyLeft =
false, OnlyRight =
false;
9941 if (
C && *
C && (*C)->isOneValue())
9943 else if (
C && *
C && (*C)->isZeroValue())
9946 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9947 SetTy LHSAAPVS, RHSAAPVS;
9950 LHSContainsUndef,
false))
9951 return indicatePessimisticFixpoint();
9955 RHSContainsUndef,
false))
9956 return indicatePessimisticFixpoint();
9958 if (OnlyLeft || OnlyRight) {
9960 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9961 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9964 unionAssumedWithUndef();
9966 for (
const auto &It : *OpAA)
9970 }
else if (LHSContainsUndef && RHSContainsUndef) {
9972 unionAssumedWithUndef();
9974 for (
const auto &It : LHSAAPVS)
9976 for (
const auto &It : RHSAAPVS)
9979 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9980 : ChangeStatus::CHANGED;
9984 auto AssumedBefore = getAssumed();
9986 return indicatePessimisticFixpoint();
9991 bool SrcContainsUndef =
false;
9994 SrcContainsUndef,
false))
9995 return indicatePessimisticFixpoint();
9997 if (SrcContainsUndef)
9998 unionAssumedWithUndef();
10000 for (
const APInt &S : SrcPVS) {
10001 APInt T = calculateCastInst(CI, S, ResultBitWidth);
10005 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10006 : ChangeStatus::CHANGED;
10010 auto AssumedBefore = getAssumed();
10014 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10015 SetTy LHSAAPVS, RHSAAPVS;
10017 LHSContainsUndef,
false) ||
10019 RHSContainsUndef,
false))
10020 return indicatePessimisticFixpoint();
10025 if (LHSContainsUndef && RHSContainsUndef) {
10026 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10027 return indicatePessimisticFixpoint();
10028 }
else if (LHSContainsUndef) {
10029 for (
const APInt &R : RHSAAPVS) {
10030 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10031 return indicatePessimisticFixpoint();
10033 }
else if (RHSContainsUndef) {
10034 for (
const APInt &L : LHSAAPVS) {
10035 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10036 return indicatePessimisticFixpoint();
10039 for (
const APInt &L : LHSAAPVS) {
10040 for (
const APInt &R : RHSAAPVS) {
10041 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10042 return indicatePessimisticFixpoint();
10046 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10047 : ChangeStatus::CHANGED;
10051 auto AssumedBefore = getAssumed();
10053 bool ContainsUndef;
10055 ContainsUndef,
true))
10056 return indicatePessimisticFixpoint();
10057 if (ContainsUndef) {
10058 unionAssumedWithUndef();
10063 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10064 : ChangeStatus::CHANGED;
10069 Value &
V = getAssociatedValue();
10072 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10073 return updateWithICmpInst(
A, ICI);
10075 if (
auto *SI = dyn_cast<SelectInst>(
I))
10076 return updateWithSelectInst(
A, SI);
10078 if (
auto *CI = dyn_cast<CastInst>(
I))
10079 return updateWithCastInst(
A, CI);
10081 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10082 return updateWithBinaryOperator(
A, BinOp);
10084 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10085 return updateWithInstruction(
A,
I);
10087 return indicatePessimisticFixpoint();
10091 void trackStatistics()
const override {
10096struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10098 : AAPotentialConstantValuesImpl(IRP,
A) {}
10103 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10108 void trackStatistics()
const override {
10113struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10115 : AAPotentialConstantValuesFunction(IRP,
A) {}
10118 void trackStatistics()
const override {
10123struct AAPotentialConstantValuesCallSiteReturned
10124 : AACalleeToCallSite<AAPotentialConstantValues,
10125 AAPotentialConstantValuesImpl> {
10126 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10129 AAPotentialConstantValuesImpl>(IRP,
A) {}
10132 void trackStatistics()
const override {
10137struct AAPotentialConstantValuesCallSiteArgument
10138 : AAPotentialConstantValuesFloating {
10139 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10141 : AAPotentialConstantValuesFloating(IRP,
A) {}
10145 AAPotentialConstantValuesImpl::initialize(
A);
10146 if (isAtFixpoint())
10149 Value &
V = getAssociatedValue();
10151 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10152 unionAssumed(
C->getValue());
10153 indicateOptimisticFixpoint();
10157 if (isa<UndefValue>(&V)) {
10158 unionAssumedWithUndef();
10159 indicateOptimisticFixpoint();
10166 Value &
V = getAssociatedValue();
10167 auto AssumedBefore = getAssumed();
10171 return indicatePessimisticFixpoint();
10172 const auto &S = AA->getAssumed();
10174 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10175 : ChangeStatus::CHANGED;
10179 void trackStatistics()
const override {
10188 bool IgnoreSubsumingPositions) {
10189 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10190 "Unexpected attribute kind");
10191 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10192 Attribute::NoUndef))
10212 Value &V = getAssociatedValue();
10213 if (isa<UndefValue>(V))
10214 indicatePessimisticFixpoint();
10215 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10221 const Value *UseV =
U->get();
10230 bool TrackUse =
false;
10233 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10239 const std::string getAsStr(
Attributor *
A)
const override {
10240 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10247 bool UsedAssumedInformation =
false;
10248 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10249 UsedAssumedInformation))
10250 return ChangeStatus::UNCHANGED;
10254 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10257 return ChangeStatus::UNCHANGED;
10258 return AANoUndef::manifest(
A);
10262struct AANoUndefFloating :
public AANoUndefImpl {
10264 : AANoUndefImpl(IRP,
A) {}
10268 AANoUndefImpl::initialize(
A);
10269 if (!getState().isAtFixpoint() && getAnchorScope() &&
10270 !getAnchorScope()->isDeclaration())
10272 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10277 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10278 bool IsKnownNoUndef;
10279 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10280 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10284 bool UsedAssumedInformation =
false;
10285 Value *AssociatedValue = &getAssociatedValue();
10287 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10292 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10300 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10301 return indicatePessimisticFixpoint();
10302 return ChangeStatus::UNCHANGED;
10305 for (
const auto &VAC : Values)
10307 return indicatePessimisticFixpoint();
10309 return ChangeStatus::UNCHANGED;
10316struct AANoUndefReturned final
10317 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10319 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10325struct AANoUndefArgument final
10326 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10328 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10334struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10336 : AANoUndefFloating(IRP,
A) {}
10342struct AANoUndefCallSiteReturned final
10343 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10345 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10360 if (isa<UndefValue>(V)) {
10361 indicateOptimisticFixpoint();
10366 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10367 for (
const auto &Attr : Attrs) {
10378 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10385 auto *CB = dyn_cast<CallBase>(
I);
10394 if (
auto *NoFPAA =
A.getAAFor<
AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10395 State.addKnownBits(NoFPAA->getState().getKnown());
10399 const std::string getAsStr(
Attributor *
A)
const override {
10400 std::string
Result =
"nofpclass";
10402 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10412struct AANoFPClassFloating :
public AANoFPClassImpl {
10414 : AANoFPClassImpl(IRP,
A) {}
10419 bool UsedAssumedInformation =
false;
10420 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10422 Values.
push_back({getAssociatedValue(), getCtxI()});
10428 DepClassTy::REQUIRED);
10429 if (!AA ||
this == AA) {
10430 T.indicatePessimisticFixpoint();
10436 return T.isValidState();
10439 for (
const auto &VAC : Values)
10440 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10441 return indicatePessimisticFixpoint();
10447 void trackStatistics()
const override {
10452struct AANoFPClassReturned final
10453 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10454 AANoFPClassImpl::StateType, false,
10455 Attribute::None, false> {
10457 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10458 AANoFPClassImpl::StateType,
false,
10462 void trackStatistics()
const override {
10467struct AANoFPClassArgument final
10468 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10470 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10476struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10478 : AANoFPClassFloating(IRP,
A) {}
10481 void trackStatistics()
const override {
10486struct AANoFPClassCallSiteReturned final
10487 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10489 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10492 void trackStatistics()
const override {
10501 return CalledFunctions;
10504 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10506 bool hasNonAsmUnknownCallee()
const override {
10507 return HasUnknownCalleeNonAsm;
10510 const std::string getAsStr(
Attributor *
A)
const override {
10511 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10512 std::to_string(CalledFunctions.size()) +
"]";
10515 void trackStatistics()
const override {}
10519 if (CalledFunctions.insert(Fn)) {
10520 Change = ChangeStatus::CHANGED;
10526 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10527 if (!HasUnknownCallee)
10528 Change = ChangeStatus::CHANGED;
10529 if (NonAsm && !HasUnknownCalleeNonAsm)
10530 Change = ChangeStatus::CHANGED;
10531 HasUnknownCalleeNonAsm |= NonAsm;
10532 HasUnknownCallee =
true;
10540 bool HasUnknownCallee =
false;
10543 bool HasUnknownCalleeNonAsm =
false;
10546struct AACallEdgesCallSite :
public AACallEdgesImpl {
10548 : AACallEdgesImpl(IRP,
A) {}
10554 if (
Function *Fn = dyn_cast<Function>(&V)) {
10555 addCalledFunction(Fn, Change);
10557 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10558 setHasUnknownCallee(
true, Change);
10568 if (isa<Constant>(V)) {
10569 VisitValue(*V, CtxI);
10573 bool UsedAssumedInformation =
false;
10579 for (
auto &VAC : Values)
10580 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10583 CallBase *CB = cast<CallBase>(getCtxI());
10586 if (
IA->hasSideEffects() &&
10589 setHasUnknownCallee(
false, Change);
10596 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10597 if (IndirectCallAA->foreachCallee(
10598 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10607 for (
const Use *U : CallbackUses)
10608 ProcessCalledOperand(
U->get(), CB);
10614struct AACallEdgesFunction :
public AACallEdgesImpl {
10616 : AACallEdgesImpl(IRP,
A) {}
10623 CallBase &CB = cast<CallBase>(Inst);
10629 if (CBEdges->hasNonAsmUnknownCallee())
10630 setHasUnknownCallee(
true, Change);
10631 if (CBEdges->hasUnknownCallee())
10632 setHasUnknownCallee(
false, Change);
10634 for (
Function *
F : CBEdges->getOptimisticEdges())
10635 addCalledFunction(
F, Change);
10641 bool UsedAssumedInformation =
false;
10642 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10643 UsedAssumedInformation,
10647 setHasUnknownCallee(
true, Change);
10656struct AAInterFnReachabilityFunction
10657 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10658 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10662 bool instructionCanReach(
10665 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10666 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10668 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10669 typename RQITy::Reachable
Result;
10670 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10671 return NonConstThis->isReachableImpl(
A, StackRQI,
10673 return Result == RQITy::Reachable::Yes;
10677 bool IsTemporaryRQI)
override {
10680 if (EntryI != RQI.From &&
10681 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10682 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10685 auto CheckReachableCallBase = [&](
CallBase *CB) {
10688 if (!CBEdges || !CBEdges->getState().isValidState())
10691 if (CBEdges->hasUnknownCallee())
10694 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10705 if (Fn == getAnchorScope()) {
10706 if (EntryI == RQI.From)
10713 DepClassTy::OPTIONAL);
10716 if (!InterFnReachability ||
10726 DepClassTy::OPTIONAL);
10732 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10735 A, *RQI.From, CBInst, RQI.ExclusionSet);
10738 bool UsedExclusionSet =
true;
10739 bool UsedAssumedInformation =
false;
10740 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10741 UsedAssumedInformation,
10743 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10746 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10750 void trackStatistics()
const override {}
10754template <
typename AAType>
10755static std::optional<Constant *>
10766 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10768 if (!COpt.has_value()) {
10770 return std::nullopt;
10772 if (
auto *
C = *COpt) {
10783 std::optional<Value *> V;
10784 for (
auto &It : Values) {
10786 if (V.has_value() && !*V)
10789 if (!V.has_value())
10803 if (
A.hasSimplificationCallback(getIRPosition())) {
10804 indicatePessimisticFixpoint();
10807 Value *Stripped = getAssociatedValue().stripPointerCasts();
10808 if (isa<Constant>(Stripped) && !isa<ConstantExpr>(Stripped)) {
10809 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10811 indicateOptimisticFixpoint();
10814 AAPotentialValues::initialize(
A);
10818 const std::string getAsStr(
Attributor *
A)
const override {
10825 template <
typename AAType>
10826 static std::optional<Value *> askOtherAA(
Attributor &
A,
10831 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10833 return std::nullopt;
10845 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10846 for (
const auto &U : CB->
args()) {
10856 Type &Ty = *getAssociatedType();
10857 std::optional<Value *> SimpleV =
10858 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10859 if (SimpleV.has_value() && !*SimpleV) {
10861 *
this, ValIRP, DepClassTy::OPTIONAL);
10862 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10863 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10864 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10865 if (PotentialConstantsAA->undefIsContained())
10870 if (!SimpleV.has_value())
10877 if (isa<ConstantInt>(VPtr))
10882 State.unionAssumed({{*VPtr, CtxI}, S});
10892 return II.I ==
I &&
II.S == S;
10907 bool UsedAssumedInformation =
false;
10909 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10910 UsedAssumedInformation))
10913 for (
auto &It : Values)
10914 ValueScopeMap[It] += CS;
10916 for (
auto &It : ValueScopeMap)
10917 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10924 auto NewS = StateType::getBestState(getState());
10925 for (
const auto &It : getAssumedSet()) {
10928 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10931 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10939 getState() = StateType::getBestState(getState());
10940 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10947 return indicatePessimisticFixpoint();
10955 if (!getAssumedSimplifiedValues(
A, Values, S))
10957 Value &OldV = getAssociatedValue();
10958 if (isa<UndefValue>(OldV))
10960 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10961 if (!NewV || NewV == &OldV)
10966 if (
A.changeAfterManifest(getIRPosition(), *NewV))
10972 bool getAssumedSimplifiedValues(
10974 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
10975 if (!isValidState())
10977 bool UsedAssumedInformation =
false;
10978 for (
const auto &It : getAssumedSet())
10979 if (It.second & S) {
10980 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
10981 isa<SelectInst>(It.first.getValue()))) {
10982 if (
A.getAssumedSimplifiedValues(
10984 this, Values, S, UsedAssumedInformation))
10989 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10994struct AAPotentialValuesFloating : AAPotentialValuesImpl {
10996 : AAPotentialValuesImpl(IRP,
A) {}
11000 auto AssumedBefore = getAssumed();
11002 genericValueTraversal(
A, &getAssociatedValue());
11004 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11005 : ChangeStatus::CHANGED;
11009 struct LivenessInfo {
11010 const AAIsDead *LivenessAA =
nullptr;
11011 bool AnyDead =
false;
11024 bool UsedAssumedInformation =
false;
11026 auto GetSimplifiedValues = [&](
Value &
V,
11028 if (!
A.getAssumedSimplifiedValues(
11034 return Values.
empty();
11036 if (GetSimplifiedValues(*
LHS, LHSValues))
11038 if (GetSimplifiedValues(*
RHS, RHSValues))
11050 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11058 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11059 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11061 nullptr,
II.S, getAnchorScope());
11067 if (&LHSV == &RHSV &&
11071 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11078 if (TypedLHS && TypedRHS) {
11080 if (NewV && NewV != &Cmp) {
11081 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11091 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11092 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11093 if (!LHSIsNull && !RHSIsNull)
11099 assert((LHSIsNull || RHSIsNull) &&
11100 "Expected nullptr versus non-nullptr comparison at this point");
11103 unsigned PtrIdx = LHSIsNull;
11104 bool IsKnownNonNull;
11105 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11107 DepClassTy::REQUIRED, IsKnownNonNull);
11108 if (!IsAssumedNonNull)
11114 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11119 for (
auto &LHSValue : LHSValues)
11120 for (
auto &RHSValue : RHSValues)
11121 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11129 bool UsedAssumedInformation =
false;
11131 std::optional<Constant *>
C =
11132 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11133 bool NoValueYet = !
C.has_value();
11134 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11136 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11141 }
else if (&SI == &getAssociatedValue()) {
11146 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11148 if (!SimpleV.has_value())
11151 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11163 bool UsedAssumedInformation =
false;
11165 PotentialValueOrigins, *
this,
11166 UsedAssumedInformation,
11168 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11169 "loaded values for load instruction "
11180 if (!
I || isa<AssumeInst>(
I))
11182 if (
auto *SI = dyn_cast<StoreInst>(
I))
11183 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11185 UsedAssumedInformation,
11187 return A.isAssumedDead(*
I,
this,
nullptr,
11188 UsedAssumedInformation,
11191 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11192 "and we cannot delete all the stores: "
11203 bool AllLocal = ScopeIsLocal;
11208 if (!DynamicallyUnique) {
11209 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11210 "values are dynamically unique: "
11215 for (
auto *PotentialCopy : PotentialCopies) {
11217 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11222 if (!AllLocal && ScopeIsLocal)
11227 bool handlePHINode(
11231 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11232 LivenessInfo &LI = LivenessAAs[&
F];
11233 if (!LI.LivenessAA)
11239 if (&
PHI == &getAssociatedValue()) {
11240 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11243 *
PHI.getFunction());
11247 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11249 if (LI.LivenessAA &&
11250 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11260 if (CyclePHI && isa<Instruction>(V) &&
11261 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11269 bool UsedAssumedInformation =
false;
11270 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11272 if (!SimpleV.has_value())
11276 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11285 bool SomeSimplified =
false;
11286 bool UsedAssumedInformation =
false;
11291 const auto &SimplifiedOp =
A.getAssumedSimplified(
11296 if (!SimplifiedOp.has_value())
11300 NewOps[
Idx] = *SimplifiedOp;
11304 SomeSimplified |= (NewOps[
Idx] !=
Op);
11310 if (!SomeSimplified)
11317 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11323 if (!NewV || NewV == &
I)
11326 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11336 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11338 CI->getPredicate(),
II, Worklist);
11340 switch (
I.getOpcode()) {
11341 case Instruction::Select:
11342 return handleSelectInst(
A, cast<SelectInst>(
I),
II, Worklist);
11343 case Instruction::PHI:
11344 return handlePHINode(
A, cast<PHINode>(
I),
II, Worklist, LivenessAAs);
11345 case Instruction::Load:
11346 return handleLoadInst(
A, cast<LoadInst>(
I),
II, Worklist);
11348 return handleGenericInst(
A,
I,
II, Worklist);
11375 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11376 << Iteration <<
"!\n");
11377 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11383 Value *NewV =
nullptr;
11384 if (
V->getType()->isPointerTy()) {
11387 if (
auto *CB = dyn_cast<CallBase>(V))
11397 if (NewV && NewV != V) {
11398 Worklist.
push_back({{*NewV, CtxI}, S});
11402 if (
auto *
I = dyn_cast<Instruction>(V)) {
11407 if (V != InitialV || isa<Argument>(V))
11412 if (V == InitialV && CtxI == getCtxI()) {
11413 indicatePessimisticFixpoint();
11417 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11418 }
while (!Worklist.
empty());
11422 for (
auto &It : LivenessAAs)
11423 if (It.second.AnyDead)
11424 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11428 void trackStatistics()
const override {
11433struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11434 using Base = AAPotentialValuesImpl;
11440 auto &Arg = cast<Argument>(getAssociatedValue());
11442 indicatePessimisticFixpoint();
11447 auto AssumedBefore = getAssumed();
11449 unsigned ArgNo = getCalleeArgNo();
11451 bool UsedAssumedInformation =
false;
11455 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11458 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11460 UsedAssumedInformation))
11463 return isValidState();
11466 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11468 UsedAssumedInformation))
11469 return indicatePessimisticFixpoint();
11471 Function *Fn = getAssociatedFunction();
11472 bool AnyNonLocal =
false;
11473 for (
auto &It : Values) {
11474 if (isa<Constant>(It.getValue())) {
11475 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11480 return indicatePessimisticFixpoint();
11482 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11484 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11490 AnyNonLocal =
true;
11492 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11494 giveUpOnIntraprocedural(
A);
11496 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11497 : ChangeStatus::CHANGED;
11501 void trackStatistics()
const override {
11506struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11507 using Base = AAPotentialValuesFloating;
11514 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11515 indicatePessimisticFixpoint();
11522 ReturnedArg = &Arg;
11525 if (!
A.isFunctionIPOAmendable(*
F) ||
11526 A.hasSimplificationCallback(getIRPosition())) {
11528 indicatePessimisticFixpoint();
11530 indicateOptimisticFixpoint();
11536 auto AssumedBefore = getAssumed();
11537 bool UsedAssumedInformation =
false;
11540 Function *AnchorScope = getAnchorScope();
11546 UsedAssumedInformation,
11552 bool AllInterAreIntra =
false;
11560 addValue(
A, getState(), *
VAC.getValue(),
11561 VAC.getCtxI() ?
VAC.getCtxI() : CtxI,
11564 if (AllInterAreIntra)
11571 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11574 bool AddValues =
true;
11575 if (isa<PHINode>(RetI.getOperand(0)) ||
11576 isa<SelectInst>(RetI.getOperand(0))) {
11577 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11581 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11584 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11585 UsedAssumedInformation,
11587 return indicatePessimisticFixpoint();
11590 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11591 : ChangeStatus::CHANGED;
11596 return ChangeStatus::UNCHANGED;
11598 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11600 return ChangeStatus::UNCHANGED;
11601 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11603 return ChangeStatus::UNCHANGED;
11606 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11608 "Number of function with unique return");
11609 Changed |=
A.manifestAttrs(
11616 Value *RetOp = RetI.getOperand(0);
11617 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11620 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11621 Changed = ChangeStatus::CHANGED;
11624 bool UsedAssumedInformation =
false;
11625 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11626 UsedAssumedInformation,
11636 void trackStatistics()
const override{
11643struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11645 : AAPotentialValuesImpl(IRP,
A) {}
11654 void trackStatistics()
const override {
11659struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11661 : AAPotentialValuesFunction(IRP,
A) {}
11664 void trackStatistics()
const override {
11669struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11671 : AAPotentialValuesImpl(IRP,
A) {}
11675 auto AssumedBefore = getAssumed();
11679 return indicatePessimisticFixpoint();
11681 bool UsedAssumedInformation =
false;
11682 auto *CB = cast<CallBase>(getCtxI());
11685 UsedAssumedInformation))
11686 return indicatePessimisticFixpoint();
11693 Values, S, UsedAssumedInformation))
11696 for (
auto &It : Values) {
11697 Value *
V = It.getValue();
11698 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11699 V, *CB, *
this, UsedAssumedInformation);
11700 if (!CallerV.has_value()) {
11704 V = *CallerV ? *CallerV :
V;
11710 giveUpOnIntraprocedural(
A);
11713 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11718 return indicatePessimisticFixpoint();
11720 return indicatePessimisticFixpoint();
11721 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11722 : ChangeStatus::CHANGED;
11726 return AAPotentialValues::indicatePessimisticFixpoint();
11730 void trackStatistics()
const override {
11735struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11737 : AAPotentialValuesFloating(IRP,
A) {}
11740 void trackStatistics()
const override {
11756 if (getKnown().isUniversal())
11757 return ChangeStatus::UNCHANGED;
11761 getAssumed().getSet().
end());
11763 return A.manifestAttrs(IRP,
11766 llvm::join(Set,
",")),
11771 return isValidState() && setContains(Assumption);
11775 const std::string getAsStr(
Attributor *
A)
const override {
11776 const SetContents &Known = getKnown();
11777 const SetContents &Assumed = getAssumed();
11781 const std::string KnownStr = llvm::join(Set,
",");
11783 std::string AssumedStr =
"Universal";
11784 if (!Assumed.isUniversal()) {
11785 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11786 AssumedStr = llvm::join(Set,
",");
11788 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11803struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11805 : AAAssumptionInfoImpl(IRP,
A,
11810 bool Changed =
false;
11815 DepClassTy::REQUIRED);
11819 Changed |= getIntersection(AssumptionAA->getAssumed());
11820 return !getAssumed().empty() || !getKnown().empty();
11823 bool UsedAssumedInformation =
false;
11828 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11829 UsedAssumedInformation))
11830 return indicatePessimisticFixpoint();
11832 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11835 void trackStatistics()
const override {}
11839struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11842 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11853 auto *AssumptionAA =
11856 return indicatePessimisticFixpoint();
11857 bool Changed = getIntersection(AssumptionAA->getAssumed());
11858 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11862 void trackStatistics()
const override {}
11874 return Assumptions;
11889struct AAUnderlyingObjectsImpl
11895 const std::string getAsStr(
Attributor *
A)
const override {
11896 if (!isValidState())
11897 return "<invalid>";
11900 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11901 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11903 if (!InterAssumedUnderlyingObjects.empty()) {
11904 OS <<
"inter objects:\n";
11905 for (
auto *Obj : InterAssumedUnderlyingObjects)
11906 OS << *Obj <<
'\n';
11908 if (!IntraAssumedUnderlyingObjects.empty()) {
11909 OS <<
"intra objects:\n";
11910 for (
auto *Obj : IntraAssumedUnderlyingObjects)
11911 OS << *Obj <<
'\n';
11917 void trackStatistics()
const override {}
11921 auto &
Ptr = getAssociatedValue();
11923 bool UsedAssumedInformation =
false;
11930 Scope, UsedAssumedInformation))
11933 bool Changed =
false;
11935 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11936 auto &
VAC = Values[
I];
11937 auto *Obj =
VAC.getValue();
11939 if (!SeenObjects.
insert(UO ? UO : Obj).second)
11941 if (UO && UO != Obj) {
11942 if (isa<AllocaInst>(UO) || isa<GlobalValue>(UO)) {
11943 Changed |= UnderlyingObjects.
insert(UO);
11949 auto Pred = [&](
Value &
V) {
11951 Changed |= UnderlyingObjects.
insert(UO);
11957 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11959 "The forall call should not return false at this position");
11964 if (isa<SelectInst>(Obj)) {
11965 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
11966 UsedAssumedInformation);
11969 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
11972 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
11974 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
11975 Scope, UsedAssumedInformation);
11980 Changed |= UnderlyingObjects.
insert(Obj);
11986 bool Changed =
false;
11989 if (!UsedAssumedInformation)
11990 indicateOptimisticFixpoint();
11991 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11994 bool forallUnderlyingObjects(
11997 if (!isValidState())
11998 return Pred(getAssociatedValue());
12001 ? IntraAssumedUnderlyingObjects
12002 : InterAssumedUnderlyingObjects;
12003 for (
Value *Obj : AssumedUnderlyingObjects)
12016 bool Changed =
false;
12019 auto Pred = [&](
Value &
V) {
12020 Changed |= UnderlyingObjects.
insert(&V);
12023 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12025 "The forall call should not return false at this position");
12036struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12038 : AAUnderlyingObjectsImpl(IRP,
A) {}
12041struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12043 : AAUnderlyingObjectsImpl(IRP,
A) {}
12046struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12048 : AAUnderlyingObjectsImpl(IRP,
A) {}
12051struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12053 : AAUnderlyingObjectsImpl(IRP,
A) {}
12056struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12058 : AAUnderlyingObjectsImpl(IRP,
A) {}
12061struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12063 : AAUnderlyingObjectsImpl(IRP,
A) {}
12066struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12068 : AAUnderlyingObjectsImpl(IRP,
A) {}
12083 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12089 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12090 << *UInst <<
"\n");
12092 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12093 int Idx = &
Cmp->getOperandUse(0) == &
U;
12094 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12096 return U == &getAnchorValue();
12100 if (isa<ReturnInst>(UInst)) {
12102 Worklist.
push_back(ACS.getInstruction());
12105 bool UsedAssumedInformation =
false;
12107 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12109 UsedAssumedInformation))
12116 auto *CB = dyn_cast<CallBase>(UInst);
12127 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12136 unsigned NumUsesBefore =
Uses.size();
12142 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12145 case UseCaptureKind::NO_CAPTURE:
12146 return checkUse(
A, U, Follow, Worklist);
12147 case UseCaptureKind::MAY_CAPTURE:
12148 return checkUse(
A, U, Follow, Worklist);
12149 case UseCaptureKind::PASSTHROUGH:
12155 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12156 Uses.insert(&OldU);
12160 while (!Worklist.
empty()) {
12162 if (!Visited.
insert(V).second)
12164 if (!
A.checkForAllUses(UsePred, *
this, *V,
12166 DepClassTy::OPTIONAL,
12167 true, EquivalentUseCB)) {
12168 return indicatePessimisticFixpoint();
12172 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12173 : ChangeStatus::CHANGED;
12176 bool isPotentialUse(
const Use &U)
const override {
12177 return !isValidState() ||
Uses.contains(&U);
12182 return ChangeStatus::UNCHANGED;
12186 const std::string getAsStr(
Attributor *
A)
const override {
12187 return "[" + std::to_string(
Uses.size()) +
" uses]";
12190 void trackStatistics()
const override {
12208 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12209 if (!MD && !
A.isClosedWorldModule())
12213 for (
const auto &
Op : MD->operands())
12214 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12215 PotentialCallees.insert(Callee);
12216 }
else if (
A.isClosedWorldModule()) {
12218 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12219 PotentialCallees.insert(IndirectlyCallableFunctions.
begin(),
12220 IndirectlyCallableFunctions.
end());
12223 if (PotentialCallees.empty())
12224 indicateOptimisticFixpoint();
12228 CallBase *CB = cast<CallBase>(getCtxI());
12233 bool AllCalleesKnownNow = AllCalleesKnown;
12235 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12236 bool &UsedAssumedInformation) {
12239 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12241 UsedAssumedInformation = !GIAA->isAtFixpoint();
12245 auto AddPotentialCallees = [&]() {
12246 for (
auto *PotentialCallee : PotentialCallees) {
12247 bool UsedAssumedInformation =
false;
12248 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12249 AssumedCalleesNow.
insert(PotentialCallee);
12255 bool UsedAssumedInformation =
false;
12258 AA::ValueScope::AnyScope,
12259 UsedAssumedInformation)) {
12260 if (PotentialCallees.empty())
12261 return indicatePessimisticFixpoint();
12262 AddPotentialCallees();
12267 auto CheckPotentialCallee = [&](
Function &Fn) {
12268 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12271 auto &CachedResult = FilterResults[&Fn];
12272 if (CachedResult.has_value())
12273 return CachedResult.value();
12275 bool UsedAssumedInformation =
false;
12276 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12277 if (!UsedAssumedInformation)
12278 CachedResult =
false;
12287 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12288 bool IsKnown =
false;
12289 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12291 DepClassTy::OPTIONAL, IsKnown)) {
12293 CachedResult =
false;
12298 CachedResult =
true;
12304 for (
auto &VAC : Values) {
12305 if (isa<UndefValue>(
VAC.getValue()))
12307 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12308 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12311 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12312 if (CheckPotentialCallee(*VACFn))
12313 AssumedCalleesNow.
insert(VACFn);
12316 if (!PotentialCallees.empty()) {
12317 AddPotentialCallees();
12320 AllCalleesKnownNow =
false;
12323 if (AssumedCalleesNow == AssumedCallees &&
12324 AllCalleesKnown == AllCalleesKnownNow)
12325 return ChangeStatus::UNCHANGED;
12327 std::swap(AssumedCallees, AssumedCalleesNow);
12328 AllCalleesKnown = AllCalleesKnownNow;
12329 return ChangeStatus::CHANGED;
12335 if (!AllCalleesKnown && AssumedCallees.empty())
12336 return ChangeStatus::UNCHANGED;
12338 CallBase *CB = cast<CallBase>(getCtxI());
12339 bool UsedAssumedInformation =
false;
12340 if (
A.isAssumedDead(*CB,
this,
nullptr,
12341 UsedAssumedInformation))
12342 return ChangeStatus::UNCHANGED;
12346 if (
FP->getType()->getPointerAddressSpace())
12357 if (AssumedCallees.empty()) {
12358 assert(AllCalleesKnown &&
12359 "Expected all callees to be known if there are none.");
12360 A.changeToUnreachableAfterManifest(CB);
12361 return ChangeStatus::CHANGED;
12365 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12366 auto *NewCallee = AssumedCallees.front();
12369 NumIndirectCallsPromoted++;
12370 return ChangeStatus::CHANGED;
12377 A.deleteAfterManifest(*CB);
12378 return ChangeStatus::CHANGED;
12388 bool SpecializedForAnyCallees =
false;
12389 bool SpecializedForAllCallees = AllCalleesKnown;
12393 for (
Function *NewCallee : AssumedCallees) {
12394 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12395 AssumedCallees.size())) {
12396 SkippedAssumedCallees.
push_back(NewCallee);
12397 SpecializedForAllCallees =
false;
12400 SpecializedForAnyCallees =
true;
12406 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12407 A.registerManifestAddedBasicBlock(*IP->getParent());
12408 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12413 A.registerManifestAddedBasicBlock(*ElseBB);
12415 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12423 auto *CBClone = cast<CallBase>(CB->
clone());
12424 CBClone->insertBefore(ThenTI);
12425 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12426 NumIndirectCallsPromoted++;
12434 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12435 if (!AllCalleesKnown)
12436 return ChangeStatus::UNCHANGED;
12437 MDBuilder MDB(IndirectCB.getContext());
12438 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12439 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12440 return ChangeStatus::CHANGED;
12443 if (!SpecializedForAnyCallees)
12444 return AttachCalleeMetadata(*CB);
12447 if (SpecializedForAllCallees) {
12451 IP->eraseFromParent();
12453 auto *CBClone = cast<CallInst>(CB->
clone());
12454 CBClone->setName(CB->
getName());
12455 CBClone->insertBefore(*IP->getParent(), IP);
12456 NewCalls.
push_back({CBClone,
nullptr});
12457 AttachCalleeMetadata(*CBClone);
12464 CB->
getParent()->getFirstInsertionPt());
12465 for (
auto &It : NewCalls) {
12467 Instruction *CallRet = It.second ? It.second : It.first;
12479 A.deleteAfterManifest(*CB);
12480 Changed = ChangeStatus::CHANGED;
12486 const std::string getAsStr(
Attributor *
A)
const override {
12487 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12488 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12492 void trackStatistics()
const override {
12493 if (AllCalleesKnown) {
12495 Eliminated, CallSites,
12496 "Number of indirect call sites eliminated via specialization")
12499 "Number of indirect call sites specialized")
12504 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12521 bool AllCalleesKnown =
true;
12528template <
typename InstType>
12529static bool makeChange(
Attributor &
A, InstType *MemInst,
const Use &U,
12531 bool UseOriginalValue) {
12532 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12535 if (MemInst->isVolatile()) {
12537 *MemInst->getFunction());
12538 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12543 if (UseOriginalValue) {
12544 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
12550 A.changeUseAfterManifest(
const_cast<Use &
>(U), *
CastInst);
12559 assert(isValidState() &&
"the AA is invalid");
12560 return AssumedAddressSpace;
12565 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12566 "Associated value is not a pointer");
12568 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
12569 indicatePessimisticFixpoint();
12573 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12574 unsigned AS = getAssociatedType()->getPointerAddressSpace();
12575 if (AS != FlatAS) {
12576 [[maybe_unused]]
bool R = takeAddressSpace(AS);
12577 assert(R &&
"The take should happen");
12578 indicateOptimisticFixpoint();
12583 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12584 uint32_t OldAddressSpace = AssumedAddressSpace;
12586 auto CheckAddressSpace = [&](
Value &Obj) {
12587 if (isa<UndefValue>(&Obj))
12591 if (
auto *Arg = dyn_cast<Argument>(&Obj)) {
12593 unsigned CastAddrSpace = FlatAS;
12594 for (
auto *U : Arg->
users()) {
12595 auto *ASCI = dyn_cast<AddrSpaceCastInst>(U);
12597 return takeAddressSpace(Obj.getType()->getPointerAddressSpace());
12598 if (CastAddrSpace != FlatAS &&
12599 CastAddrSpace != ASCI->getDestAddressSpace())
12601 CastAddrSpace = ASCI->getDestAddressSpace();
12603 if (CastAddrSpace != FlatAS)
12604 return takeAddressSpace(CastAddrSpace);
12607 return takeAddressSpace(Obj.getType()->getPointerAddressSpace());
12611 DepClassTy::REQUIRED);
12612 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
12613 return indicatePessimisticFixpoint();
12615 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12616 : ChangeStatus::CHANGED;
12623 if (NewAS == InvalidAddressSpace ||
12625 return ChangeStatus::UNCHANGED;
12627 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12629 Value *AssociatedValue = &getAssociatedValue();
12630 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
12633 PointerType::get(getAssociatedType()->getContext(), NewAS);
12634 bool UseOriginalValue =
12637 bool Changed =
false;
12639 auto Pred = [&](
const Use &
U,
bool &) {
12640 if (
U.get() != AssociatedValue)
12642 auto *Inst = dyn_cast<Instruction>(
U.getUser());
12649 if (
auto *LI = dyn_cast<LoadInst>(Inst)) {
12651 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
12652 }
else if (
auto *SI = dyn_cast<StoreInst>(Inst)) {
12654 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
12655 }
else if (
auto *RMW = dyn_cast<AtomicRMWInst>(Inst)) {
12657 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
12658 }
else if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst)) {
12660 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
12667 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
12670 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12674 const std::string getAsStr(
Attributor *
A)
const override {
12675 if (!isValidState())
12676 return "addrspace(<invalid>)";
12677 return "addrspace(" +
12678 (AssumedAddressSpace == InvalidAddressSpace
12680 : std::to_string(AssumedAddressSpace)) +
12685 uint32_t AssumedAddressSpace = InvalidAddressSpace;
12687 bool takeAddressSpace(
uint32_t AS) {
12688 if (AssumedAddressSpace == InvalidAddressSpace) {
12689 AssumedAddressSpace = AS;
12692 return AssumedAddressSpace == AS;
12695 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
12696 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V)) {
12697 assert(
I->getSrcAddressSpace() != FlatAS &&
12698 "there should not be flat AS -> non-flat AS");
12699 return I->getPointerOperand();
12701 if (
auto *
C = dyn_cast<ConstantExpr>(V))
12702 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
12703 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
12705 "there should not be flat AS -> non-flat AS X");
12706 return C->getOperand(0);
12712struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
12714 : AAAddressSpaceImpl(IRP,
A) {}
12716 void trackStatistics()
const override {
12721struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
12723 : AAAddressSpaceImpl(IRP,
A) {}
12729 (void)indicatePessimisticFixpoint();
12732 void trackStatistics()
const override {
12737struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
12739 : AAAddressSpaceImpl(IRP,
A) {}
12741 void trackStatistics()
const override {
12746struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
12748 : AAAddressSpaceImpl(IRP,
A) {}
12753struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
12755 : AAAddressSpaceImpl(IRP,
A) {}
12761 (void)indicatePessimisticFixpoint();
12764 void trackStatistics()
const override {
12776 std::optional<TypeSize> getAllocatedSize()
const override {
12777 assert(isValidState() &&
"the AA is invalid");
12778 return AssumedAllocatedSize;
12781 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
12785 switch (
I->getOpcode()) {
12786 case Instruction::Alloca: {
12791 return std::nullopt;
12801 if (!isa<AllocaInst>(
I))
12802 return indicatePessimisticFixpoint();
12804 bool IsKnownNoCapture;
12805 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
12806 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
12807 return indicatePessimisticFixpoint();
12810 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
12813 return indicatePessimisticFixpoint();
12816 return indicatePessimisticFixpoint();
12819 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
12822 if (!AllocationSize)
12823 return indicatePessimisticFixpoint();
12827 if (*AllocationSize == 0)
12828 return indicatePessimisticFixpoint();
12834 return indicatePessimisticFixpoint();
12836 if (BinSize == 0) {
12837 auto NewAllocationSize = std::optional<TypeSize>(
TypeSize(0,
false));
12838 if (!changeAllocationSize(NewAllocationSize))
12839 return ChangeStatus::UNCHANGED;
12840 return ChangeStatus::CHANGED;
12844 const auto &It = PI->
begin();
12847 if (It->first.Offset != 0)
12848 return indicatePessimisticFixpoint();
12850 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
12852 if (SizeOfBin >= *AllocationSize)
12853 return indicatePessimisticFixpoint();
12855 auto NewAllocationSize =
12856 std::optional<TypeSize>(
TypeSize(SizeOfBin * 8,
false));
12858 if (!changeAllocationSize(NewAllocationSize))
12859 return ChangeStatus::UNCHANGED;
12861 return ChangeStatus::CHANGED;
12867 assert(isValidState() &&
12868 "Manifest should only be called if the state is valid.");
12872 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
12874 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
12876 switch (
I->getOpcode()) {
12878 case Instruction::Alloca: {
12884 auto *NumBytesToValue =
12885 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
12888 insertPt = std::next(insertPt);
12894 return ChangeStatus::CHANGED;
12902 return ChangeStatus::UNCHANGED;
12906 const std::string getAsStr(
Attributor *
A)
const override {
12907 if (!isValidState())
12908 return "allocationinfo(<invalid>)";
12909 return "allocationinfo(" +
12910 (AssumedAllocatedSize == HasNoAllocationSize
12912 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
12917 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
12921 bool changeAllocationSize(std::optional<TypeSize>
Size) {
12922 if (AssumedAllocatedSize == HasNoAllocationSize ||
12923 AssumedAllocatedSize !=
Size) {
12924 AssumedAllocatedSize =
Size;
12931struct AAAllocationInfoFloating : AAAllocationInfoImpl {
12933 : AAAllocationInfoImpl(IRP,
A) {}
12935 void trackStatistics()
const override {
12940struct AAAllocationInfoReturned : AAAllocationInfoImpl {
12942 : AAAllocationInfoImpl(IRP,
A) {}
12948 (void)indicatePessimisticFixpoint();
12951 void trackStatistics()
const override {
12956struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
12958 : AAAllocationInfoImpl(IRP,
A) {}
12960 void trackStatistics()
const override {
12965struct AAAllocationInfoArgument : AAAllocationInfoImpl {
12967 : AAAllocationInfoImpl(IRP,
A) {}
12969 void trackStatistics()
const override {
12974struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
12976 : AAAllocationInfoImpl(IRP,
A) {}
12981 (void)indicatePessimisticFixpoint();
12984 void trackStatistics()
const override {
13031#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13032 case IRPosition::PK: \
13033 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13035#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13036 case IRPosition::PK: \
13037 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13041#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13042 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13043 CLASS *AA = nullptr; \
13044 switch (IRP.getPositionKind()) { \
13045 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13046 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13047 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13048 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13049 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13050 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13051 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13052 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13057#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13058 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13059 CLASS *AA = nullptr; \
13060 switch (IRP.getPositionKind()) { \
13061 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13062 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13063 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13064 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13065 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13066 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13067 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13068 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13073#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13074 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13075 CLASS *AA = nullptr; \
13076 switch (IRP.getPositionKind()) { \
13077 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13079 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13085#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13086 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13087 CLASS *AA = nullptr; \
13088 switch (IRP.getPositionKind()) { \
13089 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13090 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13091 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13092 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13093 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13094 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13095 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13096 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13101#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13102 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13103 CLASS *AA = nullptr; \
13104 switch (IRP.getPositionKind()) { \
13105 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13106 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13107 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13108 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13109 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13110 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13111 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13112 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13117#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13118 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13119 CLASS *AA = nullptr; \
13120 switch (IRP.getPositionKind()) { \
13121 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13122 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13123 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13124 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13125 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13126 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13127 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13128 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13178#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13179#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13180#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13181#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13182#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13183#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13184#undef SWITCH_PK_CREATE
13185#undef SWITCH_PK_INV
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysis::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Development, "development", "for training")))
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
AACallGraphNode * operator*() const
A manager for alias analyses.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
MemoryEffects getMemoryEffects() const
Returns memory effects.
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static bool isEnumAttrKind(AttrKind Kind)
static Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
static Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
bool isEmptySet() const
Return true if this set contains no members.
APInt getSignedMin() const
Return the smallest signed value contained in the ConstantRange.
bool isSingleElement() const
Return true if this set contains exactly one member.
static ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
APInt getSignedMax() const
Return the largest signed value contained in the ConstantRange.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
'undef' values are things that do not have specified contents.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
const_iterator begin(StringRef path LLVM_LIFETIME_BOUND, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path LLVM_LIFETIME_BOUND)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
UseCaptureKind DetermineUseCaptureKind(const Use &U, llvm::function_ref< bool(Value *, const DataLayout &)> IsDereferenceableOrNull)
Determine what kind of capture behaviour U may exhibit.
Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
PotentialValuesState< APInt > PotentialConstantIntValuesState
DWARFExpression::Operation Op
bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
auto pred_begin(const MachineBasicBlock *BB)
std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, unsigned Depth, const SimplifyQuery &SQ)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
An abstract attribute for getting assumption information.
static const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for liveness abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
static const char ID
Unique ID (due to the unique address)
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
bool isUnassigned() const
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
virtual const_bin_iterator begin() const =0
static const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
static const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.