54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *Ptr, int64_t &BytesOffset,
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
708 StateType ParentState;
712 ParentState.indicateOptimisticFixpoint();
714 for (
const BasicBlock *BB : Br->successors()) {
715 StateType ChildState;
717 size_t BeforeSize =
Uses.size();
718 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
721 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
724 ParentState &= ChildState;
798 R.indicatePessimisticFixpoint();
815 BS.indicateOptimisticFixpoint();
821 BS.indicatePessimisticFixpoint();
891 template <
typename F>
898 if (!
Range.mayOverlap(ItRange))
900 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
901 for (
auto Index : It.getSecond()) {
911 template <
typename F>
922 for (
unsigned Index : LocalList->getSecond()) {
925 if (
Range.offsetAndSizeAreUnknown())
941 RemoteI = RemoteI ? RemoteI : &
I;
945 bool AccExists =
false;
947 for (
auto Index : LocalList) {
949 if (
A.getLocalInst() == &
I) {
958 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
960 for (
auto Key : ToAdd) {
967 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
969 "New Access should have been at AccIndex");
970 LocalList.push_back(AccIndex);
979 auto Before = Current;
981 if (Current == Before)
984 auto &ExistingRanges = Before.getRanges();
985 auto &NewRanges = Current.getRanges();
992 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
999 "Expected bin to actually contain the Access.");
1000 Bin.erase(AccIndex);
1021struct AAPointerInfoImpl
1022 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1027 const std::string getAsStr(
Attributor *
A)
const override {
1028 return std::string(
"PointerInfo ") +
1029 (isValidState() ? (std::string(
"#") +
1030 std::to_string(OffsetBins.size()) +
" bins")
1035 [](int64_t O) {
return std::to_string(O); }),
1043 return AAPointerInfo::manifest(
A);
1046 const_bin_iterator
begin()
const override {
return State::begin(); }
1047 const_bin_iterator
end()
const override {
return State::end(); }
1048 int64_t numOffsetBins()
const override {
return State::numOffsetBins(); }
1049 bool reachesReturn()
const override {
1050 return !ReturnedOffsets.isUnassigned();
1052 void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1053 if (ReturnedOffsets.isUnknown()) {
1058 OffsetInfo MergedOI;
1059 for (
auto Offset : ReturnedOffsets) {
1060 OffsetInfo TmpOI = OI;
1062 MergedOI.merge(TmpOI);
1064 OI = std::move(MergedOI);
1067 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1068 if (ReturnedOffsets.isUnknown())
1069 return ChangeStatus::UNCHANGED;
1070 if (ReachedReturnedOffsets.isUnknown()) {
1071 ReturnedOffsets.setUnknown();
1072 return ChangeStatus::CHANGED;
1074 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1075 return ChangeStatus::CHANGED;
1076 return ChangeStatus::UNCHANGED;
1079 bool forallInterferingAccesses(
1081 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1083 return State::forallInterferingAccesses(
Range, CB);
1086 bool forallInterferingAccesses(
1087 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1088 bool FindInterferingWrites,
bool FindInterferingReads,
1089 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1091 function_ref<
bool(
const Access &)> SkipCB)
const override {
1092 HasBeenWrittenTo =
false;
1094 SmallPtrSet<const Access *, 8> DominatingWrites;
1102 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1104 bool AllInSameNoSyncFn = IsAssumedNoSync;
1105 bool InstIsExecutedByInitialThreadOnly =
1106 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1113 bool InstIsExecutedInAlignedRegion =
1114 FindInterferingReads && ExecDomainAA &&
1115 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1117 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1118 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1120 InformationCache &InfoCache =
A.getInfoCache();
1121 bool IsThreadLocalObj =
1130 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1131 if (IsThreadLocalObj || AllInSameNoSyncFn)
1133 const auto *FnExecDomainAA =
1134 I.getFunction() == &
Scope
1136 :
A.lookupAAFor<AAExecutionDomain>(
1139 if (!FnExecDomainAA)
1141 if (InstIsExecutedInAlignedRegion ||
1142 (FindInterferingWrites &&
1143 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1144 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1147 if (InstIsExecutedByInitialThreadOnly &&
1148 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1149 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1158 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1159 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1160 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1161 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1165 bool IsKnownNoRecurse;
1173 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1174 bool ObjHasKernelLifetime =
false;
1175 const bool UseDominanceReasoning =
1176 FindInterferingWrites && IsKnownNoRecurse;
1177 const DominatorTree *DT =
1186 unsigned VAS =
V->getType()->getPointerAddressSpace();
1197 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1202 const Function *AIFn = AI->getFunction();
1203 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1204 bool IsKnownNoRecurse;
1207 IsKnownNoRecurse)) {
1208 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1213 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1214 if (ObjHasKernelLifetime)
1215 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1216 return !
A.getInfoCache().isKernel(Fn);
1224 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1225 Function *AccScope = Acc.getRemoteInst()->getFunction();
1226 bool AccInSameScope = AccScope == &
Scope;
1230 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1231 A.getInfoCache().isKernel(*AccScope))
1234 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1235 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1236 ExclusionSet.
insert(Acc.getRemoteInst());
1239 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1240 (!FindInterferingReads || !Acc.isRead()))
1243 bool Dominates = FindInterferingWrites && DT && Exact &&
1244 Acc.isMustAccess() && AccInSameScope &&
1247 DominatingWrites.
insert(&Acc);
1251 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1253 InterferingAccesses.
push_back({&Acc, Exact});
1256 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1259 HasBeenWrittenTo = !DominatingWrites.
empty();
1263 for (
const Access *Acc : DominatingWrites) {
1264 if (!LeastDominatingWriteInst) {
1265 LeastDominatingWriteInst = Acc->getRemoteInst();
1266 }
else if (DT->
dominates(LeastDominatingWriteInst,
1267 Acc->getRemoteInst())) {
1268 LeastDominatingWriteInst = Acc->getRemoteInst();
1273 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1274 if (SkipCB && SkipCB(Acc))
1276 if (!CanIgnoreThreading(Acc))
1282 bool ReadChecked = !FindInterferingReads;
1283 bool WriteChecked = !FindInterferingWrites;
1289 &ExclusionSet, IsLiveInCalleeCB))
1294 if (!WriteChecked) {
1296 &ExclusionSet, IsLiveInCalleeCB))
1297 WriteChecked =
true;
1311 if (!WriteChecked && HasBeenWrittenTo &&
1312 Acc.getRemoteInst()->getFunction() != &Scope) {
1314 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1316 if (FnReachabilityAA) {
1322 if (!FnReachabilityAA->instructionCanReach(
1323 A, *LeastDominatingWriteInst,
1324 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1325 WriteChecked =
true;
1332 if (ReadChecked && WriteChecked)
1335 if (!DT || !UseDominanceReasoning)
1337 if (!DominatingWrites.count(&Acc))
1339 return LeastDominatingWriteInst != Acc.getRemoteInst();
1344 for (
auto &It : InterferingAccesses) {
1345 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1346 !CanSkipAccess(*It.first, It.second)) {
1347 if (!UserCB(*It.first, It.second))
1355 const AAPointerInfo &OtherAA,
1357 using namespace AA::PointerInfo;
1359 return indicatePessimisticFixpoint();
1362 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1363 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1364 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1367 const auto &State = OtherAAImpl.getState();
1368 for (
const auto &It : State) {
1369 for (
auto Index : It.getSecond()) {
1370 const auto &RAcc = State.getAccess(Index);
1371 if (IsByval && !RAcc.isRead())
1373 bool UsedAssumedInformation =
false;
1375 auto Content =
A.translateArgumentToCallSiteContent(
1376 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1377 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1378 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1380 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1381 RAcc.getType(), RAcc.getRemoteInst());
1387 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1388 const OffsetInfo &Offsets, CallBase &CB,
1390 using namespace AA::PointerInfo;
1392 return indicatePessimisticFixpoint();
1394 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1398 const auto &State = OtherAAImpl.getState();
1399 for (
const auto &It : State) {
1400 for (
auto Index : It.getSecond()) {
1401 const auto &RAcc = State.getAccess(Index);
1402 if (!IsMustAcc && RAcc.isAssumption())
1404 for (
auto Offset : Offsets) {
1408 if (!NewRanges.isUnknown()) {
1409 NewRanges.addToAllOffsets(Offset);
1414 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1415 RAcc.getType(), RAcc.getRemoteInst());
1424 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1427 void dumpState(raw_ostream &O) {
1428 for (
auto &It : OffsetBins) {
1429 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1430 <<
"] : " << It.getSecond().size() <<
"\n";
1431 for (
auto AccIndex : It.getSecond()) {
1432 auto &Acc = AccessList[AccIndex];
1433 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1434 if (Acc.getLocalInst() != Acc.getRemoteInst())
1435 O <<
" --> " << *Acc.getRemoteInst()
1437 if (!Acc.isWrittenValueYetUndetermined()) {
1439 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1441 else if (Acc.getWrittenValue())
1442 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1444 O <<
" - c: <unknown>\n";
1451struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1453 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1454 : AAPointerInfoImpl(IRP,
A) {}
1457 bool handleAccess(Attributor &
A, Instruction &
I,
1458 std::optional<Value *> Content,
AccessKind Kind,
1461 using namespace AA::PointerInfo;
1463 const DataLayout &
DL =
A.getDataLayout();
1464 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1473 if (!VT || VT->getElementCount().isScalable() ||
1475 (*Content)->getType() != VT ||
1476 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1487 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1492 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1494 ConstContent, ConstantInt::get(
Int32Ty, i));
1501 for (
auto &ElementOffset : ElementOffsets)
1502 ElementOffset += ElementSize;
1515 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1516 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1517 const GEPOperator *
GEP);
1520 void trackStatistics()
const override {
1521 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1525bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1526 const DataLayout &
DL,
1528 const OffsetInfo &PtrOI,
1529 const GEPOperator *
GEP) {
1530 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1531 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1534 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1535 "Don't look for constant values if the offset has already been "
1536 "determined to be unknown.");
1538 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1544 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1548 Union.addToAll(ConstantOffset.getSExtValue());
1553 for (
const auto &VI : VariableOffsets) {
1554 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1556 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1562 if (PotentialConstantsAA->undefIsContained())
1569 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1570 if (AssumedSet.empty())
1574 for (
const auto &ConstOffset : AssumedSet) {
1575 auto CopyPerOffset =
Union;
1576 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1577 VI.second.getZExtValue());
1578 Product.merge(CopyPerOffset);
1583 UsrOI = std::move(Union);
1587ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1588 using namespace AA::PointerInfo;
1590 const DataLayout &
DL =
A.getDataLayout();
1591 Value &AssociatedValue = getAssociatedValue();
1593 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1594 OffsetInfoMap[&AssociatedValue].
insert(0);
1596 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1607 "CurPtr does not exist in the map!");
1609 auto &UsrOI = OffsetInfoMap[Usr];
1610 auto &PtrOI = OffsetInfoMap[CurPtr];
1611 assert(!PtrOI.isUnassigned() &&
1612 "Cannot pass through if the input Ptr was not visited!");
1618 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1620 User *Usr =
U.getUser();
1621 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1624 "The current pointer offset should have been seeded!");
1625 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1626 "Current pointer should be assigned");
1630 return HandlePassthroughUser(Usr, CurPtr, Follow);
1632 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1640 auto &UsrOI = OffsetInfoMap[Usr];
1641 auto &PtrOI = OffsetInfoMap[CurPtr];
1643 if (UsrOI.isUnknown())
1646 if (PtrOI.isUnknown()) {
1652 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1658 return HandlePassthroughUser(Usr, CurPtr, Follow);
1663 if (RI->getFunction() == getAssociatedFunction()) {
1664 auto &PtrOI = OffsetInfoMap[CurPtr];
1665 Changed |= setReachesReturn(PtrOI);
1678 auto &UsrOI = PhiIt->second;
1679 auto &PtrOI = OffsetInfoMap[CurPtr];
1683 if (PtrOI.isUnknown()) {
1684 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1685 << *CurPtr <<
" in " << *
PHI <<
"\n");
1686 Follow = !UsrOI.isUnknown();
1692 if (UsrOI == PtrOI) {
1693 assert(!PtrOI.isUnassigned() &&
1694 "Cannot assign if the current Ptr was not visited!");
1695 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1705 auto It = OffsetInfoMap.
find(CurPtrBase);
1706 if (It == OffsetInfoMap.
end()) {
1707 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1708 << *CurPtr <<
" in " << *
PHI
1709 <<
" (base: " << *CurPtrBase <<
")\n");
1723 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1724 *
PHI->getFunction());
1726 auto BaseOI = It->getSecond();
1727 BaseOI.addToAll(
Offset.getZExtValue());
1728 if (IsFirstPHIUser || BaseOI == UsrOI) {
1729 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1730 <<
" in " << *Usr <<
"\n");
1731 return HandlePassthroughUser(Usr, CurPtr, Follow);
1735 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1736 << *CurPtr <<
" in " << *
PHI <<
"\n");
1755 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1756 OffsetInfoMap[CurPtr].Offsets,
Changed,
1762 return II->isAssumeLikeIntrinsic();
1773 }
while (FromI && FromI != ToI);
1778 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1779 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1782 if (IntrI.getParent() == BB) {
1783 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1789 if ((*PredIt) != BB)
1794 if (SuccBB == IntrBB)
1800 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1802 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1808 std::pair<Value *, IntrinsicInst *> Assumption;
1809 for (
const Use &LoadU : LoadI->uses()) {
1811 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1813 for (
const Use &CmpU : CmpI->uses()) {
1815 if (!IsValidAssume(*IntrI))
1817 int Idx = CmpI->getOperandUse(0) == LoadU;
1818 Assumption = {CmpI->getOperand(Idx), IntrI};
1823 if (Assumption.first)
1828 if (!Assumption.first || !Assumption.second)
1832 << *Assumption.second <<
": " << *LoadI
1833 <<
" == " << *Assumption.first <<
"\n");
1834 bool UsedAssumedInformation =
false;
1835 std::optional<Value *> Content =
nullptr;
1836 if (Assumption.first)
1838 A.getAssumedSimplified(*Assumption.first, *
this,
1840 return handleAccess(
1841 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1842 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1847 for (
auto *OtherOp : OtherOps) {
1848 if (OtherOp == CurPtr) {
1851 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1863 bool UsedAssumedInformation =
false;
1864 std::optional<Value *> Content =
nullptr;
1866 Content =
A.getAssumedSimplified(
1868 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1873 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1874 *StoreI->getValueOperand()->getType(),
1875 {StoreI->getValueOperand()}, AccessKind::AK_W);
1877 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1878 {RMWI->getValOperand()}, AccessKind::AK_RW);
1880 return HandleStoreLike(
1881 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1882 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1889 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1894 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1900 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1903 if (!CSArgPI->reachesReturn())
1904 return isValidState();
1907 if (!Callee ||
Callee->arg_size() <= ArgNo)
1909 bool UsedAssumedInformation =
false;
1910 auto ReturnedValue =
A.getAssumedSimplified(
1915 auto *Arg =
Callee->getArg(ArgNo);
1916 if (ReturnedArg && Arg != ReturnedArg)
1918 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1919 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1923 OffsetInfo OI = OffsetInfoMap[CurPtr];
1924 CSArgPI->addReturnedOffsetsTo(OI);
1926 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1927 return isValidState();
1929 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1934 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1937 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1938 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1939 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1940 if (OffsetInfoMap.
count(NewU)) {
1942 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1943 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1944 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1948 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1951 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1953 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1955 true, EquivalentUseCB)) {
1956 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1957 return indicatePessimisticFixpoint();
1961 dbgs() <<
"Accesses by bin after update:\n";
1968struct AAPointerInfoReturned final : AAPointerInfoImpl {
1969 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1970 : AAPointerInfoImpl(IRP,
A) {}
1974 return indicatePessimisticFixpoint();
1978 void trackStatistics()
const override {
1979 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1983struct AAPointerInfoArgument final : AAPointerInfoFloating {
1984 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1985 : AAPointerInfoFloating(IRP,
A) {}
1988 void trackStatistics()
const override {
1989 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1993struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1994 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
1995 : AAPointerInfoFloating(IRP,
A) {}
1999 using namespace AA::PointerInfo;
2005 if (
auto Length =
MI->getLengthInBytes())
2006 LengthVal =
Length->getSExtValue();
2007 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2010 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2012 return indicatePessimisticFixpoint();
2015 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2017 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2020 dbgs() <<
"Accesses by bin after update:\n";
2031 Argument *Arg = getAssociatedArgument();
2035 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2036 if (ArgAA && ArgAA->getState().isValidState())
2037 return translateAndAddStateFromCallee(
A, *ArgAA,
2040 return indicatePessimisticFixpoint();
2043 bool IsKnownNoCapture;
2045 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2046 return indicatePessimisticFixpoint();
2048 bool IsKnown =
false;
2050 return ChangeStatus::UNCHANGED;
2053 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2059 void trackStatistics()
const override {
2060 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2064struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2065 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2066 : AAPointerInfoFloating(IRP,
A) {}
2069 void trackStatistics()
const override {
2070 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2078struct AANoUnwindImpl : AANoUnwind {
2079 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2085 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2089 const std::string getAsStr(Attributor *
A)
const override {
2090 return getAssumed() ?
"nounwind" :
"may-unwind";
2096 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2097 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2098 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2101 if (!
I.mayThrow(
true))
2105 bool IsKnownNoUnwind;
2113 bool UsedAssumedInformation =
false;
2114 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2115 UsedAssumedInformation))
2116 return indicatePessimisticFixpoint();
2118 return ChangeStatus::UNCHANGED;
2122struct AANoUnwindFunction final :
public AANoUnwindImpl {
2123 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2124 : AANoUnwindImpl(IRP,
A) {}
2131struct AANoUnwindCallSite final
2132 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2133 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2134 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2145 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2146 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2147 case Intrinsic::nvvm_barrier_cta_red_and_aligned_all:
2148 case Intrinsic::nvvm_barrier_cta_red_and_aligned_count:
2149 case Intrinsic::nvvm_barrier_cta_red_or_aligned_all:
2150 case Intrinsic::nvvm_barrier_cta_red_or_aligned_count:
2151 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_all:
2152 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_count:
2154 case Intrinsic::amdgcn_s_barrier:
2155 if (ExecutedAligned)
2178 switch (
I->getOpcode()) {
2179 case Instruction::AtomicRMW:
2182 case Instruction::Store:
2185 case Instruction::Load:
2190 "New atomic operations need to be known in the attributor.");
2202 return !
MI->isVolatile();
2218 const std::string getAsStr(Attributor *
A)
const override {
2219 return getAssumed() ?
"nosync" :
"may-sync";
2235 if (
I.mayReadOrWriteMemory())
2249 bool UsedAssumedInformation =
false;
2250 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2251 UsedAssumedInformation) ||
2252 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2253 UsedAssumedInformation))
2254 return indicatePessimisticFixpoint();
2259struct AANoSyncFunction final :
public AANoSyncImpl {
2260 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2261 : AANoSyncImpl(IRP,
A) {}
2268struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2269 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2270 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2280struct AANoFreeImpl :
public AANoFree {
2281 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2287 DepClassTy::NONE, IsKnown));
2297 DepClassTy::REQUIRED, IsKnown);
2300 bool UsedAssumedInformation =
false;
2301 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2302 UsedAssumedInformation))
2303 return indicatePessimisticFixpoint();
2304 return ChangeStatus::UNCHANGED;
2308 const std::string getAsStr(Attributor *
A)
const override {
2309 return getAssumed() ?
"nofree" :
"may-free";
2313struct AANoFreeFunction final :
public AANoFreeImpl {
2314 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2315 : AANoFreeImpl(IRP,
A) {}
2322struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2323 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2324 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2331struct AANoFreeFloating : AANoFreeImpl {
2332 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2333 : AANoFreeImpl(IRP,
A) {}
2340 const IRPosition &IRP = getIRPosition();
2345 DepClassTy::OPTIONAL, IsKnown))
2346 return ChangeStatus::UNCHANGED;
2348 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2349 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2361 DepClassTy::REQUIRED, IsKnown);
2378 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2379 return indicatePessimisticFixpoint();
2381 return ChangeStatus::UNCHANGED;
2386struct AANoFreeArgument final : AANoFreeFloating {
2387 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2388 : AANoFreeFloating(IRP,
A) {}
2395struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2396 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2397 : AANoFreeFloating(IRP,
A) {}
2405 Argument *Arg = getAssociatedArgument();
2407 return indicatePessimisticFixpoint();
2411 DepClassTy::REQUIRED, IsKnown))
2412 return ChangeStatus::UNCHANGED;
2413 return indicatePessimisticFixpoint();
2421struct AANoFreeReturned final : AANoFreeFloating {
2422 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2423 : AANoFreeFloating(IRP,
A) {
2438 void trackStatistics()
const override {}
2442struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2443 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2444 : AANoFreeFloating(IRP,
A) {}
2447 return ChangeStatus::UNCHANGED;
2458 bool IgnoreSubsumingPositions) {
2460 AttrKinds.
push_back(Attribute::NonNull);
2463 AttrKinds.
push_back(Attribute::Dereferenceable);
2464 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2471 if (!Fn->isDeclaration()) {
2481 bool UsedAssumedInformation =
false;
2482 if (!
A.checkForAllInstructions(
2484 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2488 UsedAssumedInformation,
false,
true))
2500 Attribute::NonNull)});
2505static int64_t getKnownNonNullAndDerefBytesForUse(
2506 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2507 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2510 const Value *UseV =
U->get();
2531 const DataLayout &
DL =
A.getInfoCache().getDL();
2535 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2552 bool IsKnownNonNull;
2555 IsNonNull |= IsKnownNonNull;
2558 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2562 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2563 Loc->Size.isScalable() ||
I->isVolatile())
2569 if (
Base &&
Base == &AssociatedValue) {
2570 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2572 return std::max(int64_t(0), DerefBytes);
2579 int64_t DerefBytes = Loc->Size.getValue();
2581 return std::max(int64_t(0), DerefBytes);
2587struct AANonNullImpl : AANonNull {
2588 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2592 Value &
V = *getAssociatedValue().stripPointerCasts();
2594 indicatePessimisticFixpoint();
2598 if (Instruction *CtxI = getCtxI())
2599 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2603 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2604 AANonNull::StateType &State) {
2605 bool IsNonNull =
false;
2606 bool TrackUse =
false;
2607 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2608 IsNonNull, TrackUse);
2609 State.setKnown(IsNonNull);
2614 const std::string getAsStr(Attributor *
A)
const override {
2615 return getAssumed() ?
"nonnull" :
"may-null";
2620struct AANonNullFloating :
public AANonNullImpl {
2621 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2622 : AANonNullImpl(IRP,
A) {}
2626 auto CheckIRP = [&](
const IRPosition &IRP) {
2627 bool IsKnownNonNull;
2629 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2633 bool UsedAssumedInformation =
false;
2634 Value *AssociatedValue = &getAssociatedValue();
2636 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2641 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2647 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2648 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2651 return ChangeStatus::UNCHANGED;
2655 DepClassTy::OPTIONAL, IsKnown) &&
2658 DepClassTy::OPTIONAL, IsKnown))
2659 return ChangeStatus::UNCHANGED;
2666 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2667 return indicatePessimisticFixpoint();
2668 return ChangeStatus::UNCHANGED;
2671 for (
const auto &VAC : Values)
2673 return indicatePessimisticFixpoint();
2675 return ChangeStatus::UNCHANGED;
2683struct AANonNullReturned final
2684 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2685 false, AANonNull::IRAttributeKind, false> {
2686 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2687 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2692 const std::string getAsStr(Attributor *
A)
const override {
2693 return getAssumed() ?
"nonnull" :
"may-null";
2701struct AANonNullArgument final
2702 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2703 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2704 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2710struct AANonNullCallSiteArgument final : AANonNullFloating {
2711 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2712 : AANonNullFloating(IRP,
A) {}
2719struct AANonNullCallSiteReturned final
2720 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2721 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2722 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2731struct AAMustProgressImpl :
public AAMustProgress {
2732 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2733 : AAMustProgress(IRP,
A) {}
2739 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2744 const std::string getAsStr(Attributor *
A)
const override {
2745 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2749struct AAMustProgressFunction final : AAMustProgressImpl {
2750 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2751 : AAMustProgressImpl(IRP,
A) {}
2757 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2759 return indicateOptimisticFixpoint();
2760 return ChangeStatus::UNCHANGED;
2763 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2765 bool IsKnownMustProgress;
2767 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2771 bool AllCallSitesKnown =
true;
2772 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2775 return indicatePessimisticFixpoint();
2777 return ChangeStatus::UNCHANGED;
2781 void trackStatistics()
const override {
2787struct AAMustProgressCallSite final : AAMustProgressImpl {
2788 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2789 : AAMustProgressImpl(IRP,
A) {}
2798 bool IsKnownMustProgress;
2800 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2801 return indicatePessimisticFixpoint();
2802 return ChangeStatus::UNCHANGED;
2806 void trackStatistics()
const override {
2815struct AANoRecurseImpl :
public AANoRecurse {
2816 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2822 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2827 const std::string getAsStr(Attributor *
A)
const override {
2828 return getAssumed() ?
"norecurse" :
"may-recurse";
2832struct AANoRecurseFunction final : AANoRecurseImpl {
2833 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2834 : AANoRecurseImpl(IRP,
A) {}
2840 auto CallSitePred = [&](AbstractCallSite ACS) {
2841 bool IsKnownNoRecurse;
2845 DepClassTy::NONE, IsKnownNoRecurse))
2847 return IsKnownNoRecurse;
2849 bool UsedAssumedInformation =
false;
2850 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2851 UsedAssumedInformation)) {
2857 if (!UsedAssumedInformation)
2858 indicateOptimisticFixpoint();
2859 return ChangeStatus::UNCHANGED;
2862 const AAInterFnReachability *EdgeReachability =
2863 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2864 DepClassTy::REQUIRED);
2865 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2866 return indicatePessimisticFixpoint();
2867 return ChangeStatus::UNCHANGED;
2874struct AANoRecurseCallSite final
2875 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2876 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2877 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2887struct AANonConvergentImpl :
public AANonConvergent {
2888 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2889 : AANonConvergent(IRP,
A) {}
2892 const std::string getAsStr(Attributor *
A)
const override {
2893 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2897struct AANonConvergentFunction final : AANonConvergentImpl {
2898 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2899 : AANonConvergentImpl(IRP,
A) {}
2905 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2908 if (!Callee ||
Callee->isIntrinsic()) {
2911 if (
Callee->isDeclaration()) {
2912 return !
Callee->hasFnAttribute(Attribute::Convergent);
2914 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2916 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2919 bool UsedAssumedInformation =
false;
2920 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2921 UsedAssumedInformation)) {
2922 return indicatePessimisticFixpoint();
2924 return ChangeStatus::UNCHANGED;
2928 if (isKnownNotConvergent() &&
2929 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2930 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2931 return ChangeStatus::CHANGED;
2933 return ChangeStatus::UNCHANGED;
2943struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2944 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2945 : AAUndefinedBehavior(IRP,
A) {}
2950 const size_t UBPrevSize = KnownUBInsts.size();
2951 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2959 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2968 "Expected pointer operand of memory accessing instruction");
2972 std::optional<Value *> SimplifiedPtrOp =
2973 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2974 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2976 const Value *PtrOpVal = *SimplifiedPtrOp;
2982 AssumedNoUBInsts.insert(&
I);
2994 AssumedNoUBInsts.insert(&
I);
2996 KnownUBInsts.insert(&
I);
3005 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3013 std::optional<Value *> SimplifiedCond =
3014 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3015 if (!SimplifiedCond || !*SimplifiedCond)
3017 AssumedNoUBInsts.insert(&
I);
3025 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3034 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3040 if (idx >=
Callee->arg_size())
3052 bool IsKnownNoUndef;
3054 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3055 if (!IsKnownNoUndef)
3057 bool UsedAssumedInformation =
false;
3058 std::optional<Value *> SimplifiedVal =
3061 if (UsedAssumedInformation)
3063 if (SimplifiedVal && !*SimplifiedVal)
3066 KnownUBInsts.insert(&
I);
3072 bool IsKnownNonNull;
3074 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3076 KnownUBInsts.insert(&
I);
3085 std::optional<Value *> SimplifiedRetValue =
3086 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3087 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3105 bool IsKnownNonNull;
3110 KnownUBInsts.insert(&
I);
3116 bool UsedAssumedInformation =
false;
3117 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3118 {Instruction::Load, Instruction::Store,
3119 Instruction::AtomicCmpXchg,
3120 Instruction::AtomicRMW},
3121 UsedAssumedInformation,
3123 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::CondBr},
3124 UsedAssumedInformation,
3126 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3127 UsedAssumedInformation);
3131 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3133 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3134 bool IsKnownNoUndef;
3136 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3138 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3139 {Instruction::Ret}, UsedAssumedInformation,
3144 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3145 UBPrevSize != KnownUBInsts.size())
3146 return ChangeStatus::CHANGED;
3147 return ChangeStatus::UNCHANGED;
3150 bool isKnownToCauseUB(Instruction *
I)
const override {
3151 return KnownUBInsts.count(
I);
3154 bool isAssumedToCauseUB(Instruction *
I)
const override {
3161 switch (
I->getOpcode()) {
3162 case Instruction::Load:
3163 case Instruction::Store:
3164 case Instruction::AtomicCmpXchg:
3165 case Instruction::AtomicRMW:
3166 case Instruction::CondBr:
3167 return !AssumedNoUBInsts.count(
I);
3175 if (KnownUBInsts.empty())
3176 return ChangeStatus::UNCHANGED;
3177 for (Instruction *
I : KnownUBInsts)
3178 A.changeToUnreachableAfterManifest(
I);
3179 return ChangeStatus::CHANGED;
3183 const std::string getAsStr(Attributor *
A)
const override {
3184 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3212 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3216 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3227 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3229 bool UsedAssumedInformation =
false;
3230 std::optional<Value *> SimplifiedV =
3233 if (!UsedAssumedInformation) {
3238 KnownUBInsts.insert(
I);
3239 return std::nullopt;
3246 KnownUBInsts.insert(
I);
3247 return std::nullopt;
3253struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3254 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3255 : AAUndefinedBehaviorImpl(IRP,
A) {}
3258 void trackStatistics()
const override {
3259 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3260 "Number of instructions known to have UB");
3262 KnownUBInsts.size();
3273static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3274 ScalarEvolution *SE =
3275 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3276 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3282 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3283 if (SCCI.hasCycle())
3293 for (
auto *L : LI->getLoopsInPreorder()) {
3300struct AAWillReturnImpl :
public AAWillReturn {
3301 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3302 : AAWillReturn(IRP,
A) {}
3308 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3313 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3314 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3319 return IsKnown || !KnownOnly;
3325 if (isImpliedByMustprogressAndReadonly(
A,
false))
3326 return ChangeStatus::UNCHANGED;
3332 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3338 bool IsKnownNoRecurse;
3340 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3343 bool UsedAssumedInformation =
false;
3344 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3345 UsedAssumedInformation))
3346 return indicatePessimisticFixpoint();
3350 return !
I.isVolatile();
3352 if (!
A.checkForAllInstructions(CheckForVolatile, *
this,
3353 {Instruction::Load, Instruction::Store,
3354 Instruction::AtomicCmpXchg,
3355 Instruction::AtomicRMW},
3356 UsedAssumedInformation))
3357 return indicatePessimisticFixpoint();
3359 return ChangeStatus::UNCHANGED;
3363 const std::string getAsStr(Attributor *
A)
const override {
3364 return getAssumed() ?
"willreturn" :
"may-noreturn";
3368struct AAWillReturnFunction final : AAWillReturnImpl {
3369 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3370 : AAWillReturnImpl(IRP,
A) {}
3374 AAWillReturnImpl::initialize(
A);
3377 assert(
F &&
"Did expect an anchor function");
3378 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3379 indicatePessimisticFixpoint();
3387struct AAWillReturnCallSite final
3388 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3389 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3390 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3394 if (isImpliedByMustprogressAndReadonly(
A,
false))
3395 return ChangeStatus::UNCHANGED;
3397 return AACalleeToCallSite::updateImpl(
A);
3419 const ToTy *
To =
nullptr;
3446 if (!ES || ES->
empty()) {
3447 ExclusionSet = nullptr;
3448 }
else if (MakeUnique) {
3449 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3474 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3476 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3480#define DefineKeys(ToTy) \
3482 ReachabilityQueryInfo<ToTy> \
3483 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3484 ReachabilityQueryInfo<ToTy>( \
3485 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3486 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3488 ReachabilityQueryInfo<ToTy> \
3489 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3490 ReachabilityQueryInfo<ToTy>( \
3491 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3492 DenseMapInfo<const ToTy *>::getTombstoneKey());
3501template <
typename BaseTy,
typename ToTy>
3502struct CachedReachabilityAA :
public BaseTy {
3503 using RQITy = ReachabilityQueryInfo<ToTy>;
3505 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3508 bool isQueryAA()
const override {
return true; }
3513 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3514 RQITy *RQI = QueryVector[
u];
3515 if (RQI->Result == RQITy::Reachable::No &&
3517 Changed = ChangeStatus::CHANGED;
3523 bool IsTemporaryRQI) = 0;
3525 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3526 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3531 QueryCache.erase(&RQI);
3537 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3538 RQITy PlainRQI(RQI.From, RQI.To);
3539 if (!QueryCache.count(&PlainRQI)) {
3540 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3542 QueryVector.push_back(RQIPtr);
3543 QueryCache.insert(RQIPtr);
3548 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3549 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3550 "Did not expect empty set!");
3551 RQITy *RQIPtr =
new (
A.Allocator)
3552 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3553 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3555 assert(!QueryCache.count(RQIPtr));
3556 QueryVector.push_back(RQIPtr);
3557 QueryCache.insert(RQIPtr);
3560 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3561 A.registerForUpdate(*
this);
3562 return Result == RQITy::Reachable::Yes;
3565 const std::string getAsStr(Attributor *
A)
const override {
3567 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3570 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3571 typename RQITy::Reachable &
Result) {
3572 if (!this->getState().isValidState()) {
3573 Result = RQITy::Reachable::Yes;
3579 if (StackRQI.ExclusionSet) {
3580 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3581 auto It = QueryCache.find(&PlainRQI);
3582 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3583 Result = RQITy::Reachable::No;
3588 auto It = QueryCache.find(&StackRQI);
3589 if (It != QueryCache.end()) {
3596 QueryCache.insert(&StackRQI);
3602 DenseSet<RQITy *> QueryCache;
3605struct AAIntraFnReachabilityFunction final
3606 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3607 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3608 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3610 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3614 bool isAssumedReachable(
3615 Attributor &
A,
const Instruction &From,
const Instruction &To,
3617 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3621 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3623 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3624 return NonConstThis->isReachableImpl(
A, StackRQI,
3626 return Result == RQITy::Reachable::Yes;
3633 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3636 [&](
const auto &DeadEdge) {
3637 return LivenessAA->isEdgeDead(DeadEdge.first,
3641 return LivenessAA->isAssumedDead(BB);
3643 return ChangeStatus::UNCHANGED;
3647 return Base::updateImpl(
A);
3651 bool IsTemporaryRQI)
override {
3653 bool UsedExclusionSet =
false;
3658 while (IP && IP != &To) {
3659 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3660 UsedExclusionSet =
true;
3668 const BasicBlock *FromBB = RQI.From->getParent();
3669 const BasicBlock *ToBB = RQI.To->getParent();
3671 "Not an intra-procedural query!");
3675 if (FromBB == ToBB &&
3676 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3677 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3682 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3683 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3687 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3688 if (RQI.ExclusionSet)
3689 for (
auto *
I : *RQI.ExclusionSet)
3690 if (
I->getFunction() == Fn)
3691 ExclusionBlocks.
insert(
I->getParent());
3694 if (ExclusionBlocks.
count(FromBB) &&
3697 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3700 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3701 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3702 DeadBlocks.insert(ToBB);
3703 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3707 SmallPtrSet<const BasicBlock *, 16> Visited;
3711 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3712 while (!Worklist.
empty()) {
3714 if (!Visited.
insert(BB).second)
3716 for (
const BasicBlock *SuccBB :
successors(BB)) {
3717 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3718 LocalDeadEdges.
insert({BB, SuccBB});
3723 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3726 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3729 if (ExclusionBlocks.
count(SuccBB)) {
3730 UsedExclusionSet =
true;
3737 DeadEdges.insert_range(LocalDeadEdges);
3738 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3743 void trackStatistics()
const override {}
3748 DenseSet<const BasicBlock *> DeadBlocks;
3752 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3755 const DominatorTree *DT =
nullptr;
3763 bool IgnoreSubsumingPositions) {
3764 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3765 "Unexpected attribute kind");
3771 IgnoreSubsumingPositions =
true;
3782 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3783 IgnoreSubsumingPositions, Attribute::NoAlias))
3793 "Noalias is a pointer attribute");
3796 const std::string getAsStr(
Attributor *
A)
const override {
3797 return getAssumed() ?
"noalias" :
"may-alias";
3802struct AANoAliasFloating final : AANoAliasImpl {
3803 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3804 : AANoAliasImpl(IRP,
A) {}
3809 return indicatePessimisticFixpoint();
3813 void trackStatistics()
const override {
3819struct AANoAliasArgument final
3820 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3821 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3822 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3835 DepClassTy::OPTIONAL, IsKnownNoSycn))
3836 return Base::updateImpl(
A);
3841 return Base::updateImpl(
A);
3845 bool UsedAssumedInformation =
false;
3846 if (
A.checkForAllCallSites(
3847 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3848 true, UsedAssumedInformation))
3849 return Base::updateImpl(
A);
3857 return indicatePessimisticFixpoint();
3864struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3865 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3866 : AANoAliasImpl(IRP,
A) {}
3870 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3871 const AAMemoryBehavior &MemBehaviorAA,
3872 const CallBase &CB,
unsigned OtherArgNo) {
3874 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3882 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3886 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3887 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3894 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3896 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3897 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3903 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3907 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3909 "callsite arguments: "
3910 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3911 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3916 bool isKnownNoAliasDueToNoAliasPreservation(
3917 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3930 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3946 bool IsKnownNoCapture;
3949 DepClassTy::OPTIONAL, IsKnownNoCapture))
3955 A, *UserI, *getCtxI(), *
this,
nullptr,
3956 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3971 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3975 bool IsKnownNoCapture;
3976 const AANoCapture *NoCaptureAA =
nullptr;
3978 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3979 if (!IsAssumedNoCapture &&
3981 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3983 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3984 <<
" cannot be noalias as it is potentially captured\n");
3989 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3995 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3996 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4006 auto *MemBehaviorAA =
4007 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4009 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4010 return ChangeStatus::UNCHANGED;
4013 bool IsKnownNoAlias;
4016 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4018 <<
" is not no-alias at the definition\n");
4019 return indicatePessimisticFixpoint();
4022 AAResults *AAR =
nullptr;
4023 if (MemBehaviorAA &&
4024 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4026 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4027 return ChangeStatus::UNCHANGED;
4030 return indicatePessimisticFixpoint();
4038struct AANoAliasReturned final : AANoAliasImpl {
4039 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4040 : AANoAliasImpl(IRP,
A) {}
4045 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4056 bool IsKnownNoAlias;
4058 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4061 bool IsKnownNoCapture;
4062 const AANoCapture *NoCaptureAA =
nullptr;
4064 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4066 return IsAssumedNoCapture ||
4070 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4071 return indicatePessimisticFixpoint();
4073 return ChangeStatus::UNCHANGED;
4081struct AANoAliasCallSiteReturned final
4082 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4083 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4084 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4094struct AAIsDeadValueImpl :
public AAIsDead {
4095 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4098 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4101 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4104 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4107 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4110 bool isAssumedDead(
const Instruction *
I)
const override {
4111 return I == getCtxI() && isAssumedDead();
4115 bool isKnownDead(
const Instruction *
I)
const override {
4116 return isAssumedDead(
I) && isKnownDead();
4120 const std::string getAsStr(Attributor *
A)
const override {
4121 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4125 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4127 if (
V.getType()->isVoidTy() ||
V.use_empty())
4133 if (!
A.isRunOn(*
I->getFunction()))
4135 bool UsedAssumedInformation =
false;
4136 std::optional<Constant *>
C =
4137 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4142 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4147 return A.checkForAllUses(UsePred, *
this, V,
false,
4148 DepClassTy::REQUIRED,
4153 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4157 if (!
I->isTerminator() && !
I->mayHaveSideEffects())
4166 bool IsKnownNoUnwind;
4168 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4176struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4177 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4178 : AAIsDeadValueImpl(IRP,
A) {}
4182 AAIsDeadValueImpl::initialize(
A);
4185 indicatePessimisticFixpoint();
4190 if (!isAssumedSideEffectFree(
A,
I)) {
4192 indicatePessimisticFixpoint();
4194 removeAssumedBits(HAS_NO_EFFECT);
4198 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4199 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4201 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4203 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4207 bool isDeadStore(Attributor &
A, StoreInst &SI,
4208 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4210 if (
SI.isVolatile())
4216 bool UsedAssumedInformation =
false;
4217 if (!AssumeOnlyInst) {
4218 PotentialCopies.clear();
4220 UsedAssumedInformation)) {
4223 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4227 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4228 <<
" potential copies.\n");
4230 InformationCache &InfoCache =
A.getInfoCache();
4233 UsedAssumedInformation))
4237 auto &UserI = cast<Instruction>(*U.getUser());
4238 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4240 AssumeOnlyInst->insert(&UserI);
4243 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4249 <<
" is assumed live!\n");
4255 const std::string getAsStr(Attributor *
A)
const override {
4259 return "assumed-dead-store";
4262 return "assumed-dead-fence";
4263 return AAIsDeadValueImpl::getAsStr(
A);
4270 if (!isDeadStore(
A, *SI))
4271 return indicatePessimisticFixpoint();
4273 if (!isDeadFence(
A, *FI))
4274 return indicatePessimisticFixpoint();
4276 if (!isAssumedSideEffectFree(
A,
I))
4277 return indicatePessimisticFixpoint();
4278 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4279 return indicatePessimisticFixpoint();
4284 bool isRemovableStore()
const override {
4285 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4290 Value &
V = getAssociatedValue();
4297 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4298 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4301 A.deleteAfterManifest(*
I);
4302 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4304 for (
auto *Usr : AOI->
users())
4306 A.deleteAfterManifest(*AOI);
4312 A.deleteAfterManifest(*FI);
4316 A.deleteAfterManifest(*
I);
4324 void trackStatistics()
const override {
4330 SmallSetVector<Value *, 4> PotentialCopies;
4333struct AAIsDeadArgument :
public AAIsDeadFloating {
4334 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4335 : AAIsDeadFloating(IRP,
A) {}
4339 Argument &Arg = *getAssociatedArgument();
4340 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4341 if (
A.registerFunctionSignatureRewrite(
4345 return ChangeStatus::CHANGED;
4347 return ChangeStatus::UNCHANGED;
4354struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4355 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4356 : AAIsDeadValueImpl(IRP,
A) {}
4360 AAIsDeadValueImpl::initialize(
A);
4362 indicatePessimisticFixpoint();
4371 Argument *Arg = getAssociatedArgument();
4373 return indicatePessimisticFixpoint();
4375 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4377 return indicatePessimisticFixpoint();
4386 "Expected undef values to be filtered out!");
4388 if (
A.changeUseAfterManifest(U, UV))
4389 return ChangeStatus::CHANGED;
4390 return ChangeStatus::UNCHANGED;
4397struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4398 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4399 : AAIsDeadFloating(IRP,
A) {}
4402 bool isAssumedDead()
const override {
4403 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4408 AAIsDeadFloating::initialize(
A);
4410 indicatePessimisticFixpoint();
4415 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4421 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4422 IsAssumedSideEffectFree =
false;
4423 Changed = ChangeStatus::CHANGED;
4425 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4426 return indicatePessimisticFixpoint();
4431 void trackStatistics()
const override {
4432 if (IsAssumedSideEffectFree)
4439 const std::string getAsStr(Attributor *
A)
const override {
4440 return isAssumedDead()
4442 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4446 bool IsAssumedSideEffectFree =
true;
4449struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4450 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4451 : AAIsDeadValueImpl(IRP,
A) {}
4456 bool UsedAssumedInformation =
false;
4457 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4458 {Instruction::Ret}, UsedAssumedInformation);
4460 auto PredForCallSite = [&](AbstractCallSite ACS) {
4461 if (ACS.isCallbackCall() || !ACS.getInstruction())
4463 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4466 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4467 UsedAssumedInformation))
4468 return indicatePessimisticFixpoint();
4470 return ChangeStatus::UNCHANGED;
4476 bool AnyChange =
false;
4477 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4484 bool UsedAssumedInformation =
false;
4485 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4486 UsedAssumedInformation);
4487 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4494struct AAIsDeadFunction :
public AAIsDead {
4495 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4500 assert(
F &&
"Did expect an anchor function");
4501 if (!isAssumedDeadInternalFunction(
A)) {
4502 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4503 assumeLive(
A,
F->getEntryBlock());
4507 bool isAssumedDeadInternalFunction(Attributor &
A) {
4508 if (!getAnchorScope()->hasLocalLinkage())
4510 bool UsedAssumedInformation =
false;
4511 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4512 true, UsedAssumedInformation);
4516 const std::string getAsStr(Attributor *
A)
const override {
4517 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4518 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4519 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4520 std::to_string(KnownDeadEnds.size()) +
"]";
4525 assert(getState().isValidState() &&
4526 "Attempted to manifest an invalid state!");
4531 if (AssumedLiveBlocks.empty()) {
4532 A.deleteAfterManifest(
F);
4533 return ChangeStatus::CHANGED;
4539 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4541 KnownDeadEnds.set_union(ToBeExploredFrom);
4542 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4546 bool IsKnownNoReturn;
4554 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4556 A.changeToUnreachableAfterManifest(
4557 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4558 HasChanged = ChangeStatus::CHANGED;
4561 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4562 for (BasicBlock &BB :
F)
4563 if (!AssumedLiveBlocks.count(&BB)) {
4564 A.deleteAfterManifest(BB);
4566 HasChanged = ChangeStatus::CHANGED;
4575 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4578 "Used AAIsDead of the wrong function");
4579 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4583 void trackStatistics()
const override {}
4586 bool isAssumedDead()
const override {
return false; }
4589 bool isKnownDead()
const override {
return false; }
4592 bool isAssumedDead(
const BasicBlock *BB)
const override {
4594 "BB must be in the same anchor scope function.");
4598 return !AssumedLiveBlocks.count(BB);
4602 bool isKnownDead(
const BasicBlock *BB)
const override {
4603 return getKnown() && isAssumedDead(BB);
4607 bool isAssumedDead(
const Instruction *
I)
const override {
4608 assert(
I->getParent()->getParent() == getAnchorScope() &&
4609 "Instruction must be in the same anchor scope function.");
4616 if (!AssumedLiveBlocks.count(
I->getParent()))
4622 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4630 bool isKnownDead(
const Instruction *
I)
const override {
4631 return getKnown() && isAssumedDead(
I);
4636 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4637 if (!AssumedLiveBlocks.insert(&BB).second)
4644 for (
const Instruction &
I : BB)
4647 if (
F->hasLocalLinkage())
4648 A.markLiveInternalFunction(*
F);
4654 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4657 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4660 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4663 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4667identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4668 AbstractAttribute &AA,
4669 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4672 bool IsKnownNoReturn;
4675 return !IsKnownNoReturn;
4684identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4685 AbstractAttribute &AA,
4686 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4687 bool UsedAssumedInformation =
4693 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4694 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4698 bool IsKnownNoUnwind;
4701 UsedAssumedInformation |= !IsKnownNoUnwind;
4703 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4706 return UsedAssumedInformation;
4710identifyAliveSuccessors(Attributor &,
const UncondBrInst &BI,
4711 AbstractAttribute &,
4712 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4718identifyAliveSuccessors(Attributor &
A,
const CondBrInst &BI,
4719 AbstractAttribute &AA,
4720 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4721 bool UsedAssumedInformation =
false;
4722 std::optional<Constant *>
C =
4723 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4733 UsedAssumedInformation =
false;
4735 return UsedAssumedInformation;
4739identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4740 AbstractAttribute &AA,
4741 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4742 bool UsedAssumedInformation =
false;
4746 UsedAssumedInformation)) {
4748 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4753 if (Values.
empty() ||
4754 (Values.
size() == 1 &&
4757 return UsedAssumedInformation;
4760 Type &Ty = *
SI.getCondition()->getType();
4761 SmallPtrSet<ConstantInt *, 8>
Constants;
4762 auto CheckForConstantInt = [&](
Value *
V) {
4770 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4771 return CheckForConstantInt(VAC.
getValue());
4773 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4775 return UsedAssumedInformation;
4778 unsigned MatchedCases = 0;
4779 for (
const auto &CaseIt :
SI.cases()) {
4780 if (
Constants.count(CaseIt.getCaseValue())) {
4782 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4789 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4790 return UsedAssumedInformation;
4796 if (AssumedLiveBlocks.empty()) {
4797 if (isAssumedDeadInternalFunction(
A))
4801 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4802 assumeLive(
A,
F->getEntryBlock());
4806 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4807 << getAnchorScope()->
size() <<
"] BBs and "
4808 << ToBeExploredFrom.size() <<
" exploration points and "
4809 << KnownDeadEnds.size() <<
" known dead ends\n");
4814 ToBeExploredFrom.end());
4815 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4818 while (!Worklist.
empty()) {
4825 I =
I->getNextNode();
4827 AliveSuccessors.
clear();
4829 bool UsedAssumedInformation =
false;
4830 switch (
I->getOpcode()) {
4834 "Expected non-terminators to be handled already!");
4835 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4838 case Instruction::Call:
4840 *
this, AliveSuccessors);
4842 case Instruction::Invoke:
4844 *
this, AliveSuccessors);
4846 case Instruction::UncondBr:
4847 UsedAssumedInformation = identifyAliveSuccessors(
4850 case Instruction::CondBr:
4852 *
this, AliveSuccessors);
4854 case Instruction::Switch:
4856 *
this, AliveSuccessors);
4860 if (UsedAssumedInformation) {
4861 NewToBeExploredFrom.insert(
I);
4862 }
else if (AliveSuccessors.
empty() ||
4863 (
I->isTerminator() &&
4864 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4865 if (KnownDeadEnds.insert(
I))
4870 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4871 << UsedAssumedInformation <<
"\n");
4873 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4874 if (!
I->isTerminator()) {
4875 assert(AliveSuccessors.size() == 1 &&
4876 "Non-terminator expected to have a single successor!");
4880 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4881 if (AssumedLiveEdges.insert(
Edge).second)
4883 if (assumeLive(
A, *AliveSuccessor->getParent()))
4890 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4891 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4892 return !ToBeExploredFrom.count(I);
4895 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4904 if (ToBeExploredFrom.empty() &&
4905 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4906 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4907 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4909 return indicatePessimisticFixpoint();
4914struct AAIsDeadCallSite final : AAIsDeadFunction {
4915 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4916 : AAIsDeadFunction(IRP,
A) {}
4925 "supported for call sites yet!");
4930 return indicatePessimisticFixpoint();
4934 void trackStatistics()
const override {}
4941struct AADereferenceableImpl : AADereferenceable {
4942 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4943 : AADereferenceable(IRP,
A) {}
4944 using StateType = DerefState;
4948 Value &
V = *getAssociatedValue().stripPointerCasts();
4950 A.getAttrs(getIRPosition(),
4951 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4954 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4957 bool IsKnownNonNull;
4959 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4961 bool CanBeNull, CanBeFreed;
4962 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4963 A.getDataLayout(), CanBeNull, CanBeFreed));
4965 if (Instruction *CtxI = getCtxI())
4966 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4971 StateType &getState()
override {
return *
this; }
4972 const StateType &getState()
const override {
return *
this; }
4976 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4977 DerefState &State) {
4978 const Value *UseV =
U->get();
4983 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4988 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4989 if (
Base &&
Base == &getAssociatedValue())
4990 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4994 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4995 AADereferenceable::StateType &State) {
4996 bool IsNonNull =
false;
4997 bool TrackUse =
false;
4998 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4999 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
5000 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
5001 <<
" for instruction " << *
I <<
"\n");
5003 addAccessedBytesForUse(
A, U,
I, State);
5004 State.takeKnownDerefBytesMaximum(DerefBytes);
5011 bool IsKnownNonNull;
5013 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5014 if (IsAssumedNonNull &&
5015 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5016 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5017 return ChangeStatus::CHANGED;
5022 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5023 SmallVectorImpl<Attribute> &Attrs)
const override {
5025 bool IsKnownNonNull;
5027 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5028 if (IsAssumedNonNull)
5029 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5030 Ctx, getAssumedDereferenceableBytes()));
5032 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5033 Ctx, getAssumedDereferenceableBytes()));
5037 const std::string getAsStr(Attributor *
A)
const override {
5038 if (!getAssumedDereferenceableBytes())
5039 return "unknown-dereferenceable";
5040 bool IsKnownNonNull;
5041 bool IsAssumedNonNull =
false;
5044 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5045 return std::string(
"dereferenceable") +
5046 (IsAssumedNonNull ?
"" :
"_or_null") +
5047 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5048 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5049 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5050 (!
A ?
" [non-null is unknown]" :
"");
5055struct AADereferenceableFloating : AADereferenceableImpl {
5056 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5057 : AADereferenceableImpl(IRP,
A) {}
5062 bool UsedAssumedInformation =
false;
5064 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5066 Values.
push_back({getAssociatedValue(), getCtxI()});
5069 Stripped = Values.
size() != 1 ||
5070 Values.
front().getValue() != &getAssociatedValue();
5073 const DataLayout &
DL =
A.getDataLayout();
5076 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5078 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5079 APInt
Offset(IdxWidth, 0);
5084 const auto *AA =
A.getAAFor<AADereferenceable>(
5086 int64_t DerefBytes = 0;
5087 if (!AA || (!Stripped &&
this == AA)) {
5090 bool CanBeNull, CanBeFreed;
5092 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5093 T.GlobalState.indicatePessimisticFixpoint();
5096 DerefBytes =
DS.DerefBytesState.getAssumed();
5097 T.GlobalState &=
DS.GlobalState;
5103 int64_t OffsetSExt =
Offset.getSExtValue();
5107 T.takeAssumedDerefBytesMinimum(
5108 std::max(int64_t(0), DerefBytes - OffsetSExt));
5113 T.takeKnownDerefBytesMaximum(
5114 std::max(int64_t(0), DerefBytes - OffsetSExt));
5115 T.indicatePessimisticFixpoint();
5116 }
else if (OffsetSExt > 0) {
5122 T.indicatePessimisticFixpoint();
5126 return T.isValidState();
5129 for (
const auto &VAC : Values)
5130 if (!VisitValueCB(*VAC.
getValue()))
5131 return indicatePessimisticFixpoint();
5137 void trackStatistics()
const override {
5143struct AADereferenceableReturned final
5144 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5146 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5147 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5151 void trackStatistics()
const override {
5157struct AADereferenceableArgument final
5158 : AAArgumentFromCallSiteArguments<AADereferenceable,
5159 AADereferenceableImpl> {
5161 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5162 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5166 void trackStatistics()
const override {
5172struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5173 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5174 : AADereferenceableFloating(IRP,
A) {}
5177 void trackStatistics()
const override {
5183struct AADereferenceableCallSiteReturned final
5184 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5185 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5186 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5190 void trackStatistics()
const override {
5200static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5201 Value &AssociatedValue,
const Use *U,
5202 const Instruction *
I,
bool &TrackUse) {
5211 if (
GEP->hasAllConstantIndices())
5216 switch (
II->getIntrinsicID()) {
5217 case Intrinsic::ptrmask: {
5219 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5221 const auto *AlignAA =
A.getAAFor<AAAlign>(
5223 if (ConstVals && ConstVals->isValidState() && ConstVals->isAtFixpoint()) {
5224 unsigned ShiftValue = std::min(ConstVals->getAssumedMinTrailingZeros(),
5226 Align ConstAlign(UINT64_C(1) << ShiftValue);
5227 if (ConstAlign >= AlignAA->getKnownAlign())
5228 return Align(1).value();
5231 return AlignAA->getKnownAlign().
value();
5234 case Intrinsic::amdgcn_make_buffer_rsrc: {
5235 const auto *AlignAA =
A.getAAFor<AAAlign>(
5238 return AlignAA->getKnownAlign().
value();
5256 MA = MaybeAlign(AlignAA->getKnownAlign());
5259 const DataLayout &
DL =
A.getDataLayout();
5260 const Value *UseV =
U->get();
5262 if (
SI->getPointerOperand() == UseV)
5263 MA =
SI->getAlign();
5265 if (LI->getPointerOperand() == UseV)
5266 MA = LI->getAlign();
5268 if (AI->getPointerOperand() == UseV)
5269 MA = AI->getAlign();
5271 if (AI->getPointerOperand() == UseV)
5272 MA = AI->getAlign();
5278 unsigned Alignment = MA->value();
5282 if (
Base == &AssociatedValue) {
5287 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5295struct AAAlignImpl : AAAlign {
5296 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5301 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5303 takeKnownMaximum(Attr.getValueAsInt());
5305 Value &
V = *getAssociatedValue().stripPointerCasts();
5306 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5308 if (Instruction *CtxI = getCtxI())
5309 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5317 Value &AssociatedValue = getAssociatedValue();
5319 return ChangeStatus::UNCHANGED;
5321 for (
const Use &U : AssociatedValue.
uses()) {
5323 if (
SI->getPointerOperand() == &AssociatedValue)
5324 if (
SI->getAlign() < getAssumedAlign()) {
5326 "Number of times alignment added to a store");
5327 SI->setAlignment(getAssumedAlign());
5328 InstrChanged = ChangeStatus::CHANGED;
5331 if (LI->getPointerOperand() == &AssociatedValue)
5332 if (LI->getAlign() < getAssumedAlign()) {
5333 LI->setAlignment(getAssumedAlign());
5335 "Number of times alignment added to a load");
5336 InstrChanged = ChangeStatus::CHANGED;
5339 if (RMW->getPointerOperand() == &AssociatedValue) {
5340 if (RMW->getAlign() < getAssumedAlign()) {
5342 "Number of times alignment added to atomicrmw");
5344 RMW->setAlignment(getAssumedAlign());
5345 InstrChanged = ChangeStatus::CHANGED;
5349 if (CAS->getPointerOperand() == &AssociatedValue) {
5350 if (CAS->getAlign() < getAssumedAlign()) {
5352 "Number of times alignment added to cmpxchg");
5353 CAS->setAlignment(getAssumedAlign());
5354 InstrChanged = ChangeStatus::CHANGED;
5362 Align InheritAlign =
5363 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5364 if (InheritAlign >= getAssumedAlign())
5365 return InstrChanged;
5366 return Changed | InstrChanged;
5374 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5375 SmallVectorImpl<Attribute> &Attrs)
const override {
5376 if (getAssumedAlign() > 1)
5378 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5382 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5383 AAAlign::StateType &State) {
5384 bool TrackUse =
false;
5386 unsigned int KnownAlign =
5387 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5388 State.takeKnownMaximum(KnownAlign);
5394 const std::string getAsStr(Attributor *
A)
const override {
5395 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5396 std::to_string(getAssumedAlign().value()) +
">";
5401struct AAAlignFloating : AAAlignImpl {
5402 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5406 const DataLayout &
DL =
A.getDataLayout();
5409 bool UsedAssumedInformation =
false;
5411 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5413 Values.
push_back({getAssociatedValue(), getCtxI()});
5416 Stripped = Values.
size() != 1 ||
5417 Values.
front().getValue() != &getAssociatedValue();
5421 auto VisitValueCB = [&](
Value &
V) ->
bool {
5425 DepClassTy::REQUIRED);
5426 if (!AA || (!Stripped &&
this == AA)) {
5428 unsigned Alignment = 1;
5441 Alignment =
V.getPointerAlignment(
DL).value();
5444 T.takeKnownMaximum(Alignment);
5445 T.indicatePessimisticFixpoint();
5448 const AAAlign::StateType &
DS = AA->
getState();
5451 return T.isValidState();
5454 for (
const auto &VAC : Values) {
5455 if (!VisitValueCB(*VAC.
getValue()))
5456 return indicatePessimisticFixpoint();
5469struct AAAlignReturned final
5470 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5471 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5472 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5479struct AAAlignArgument final
5480 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5481 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5482 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5489 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5490 return ChangeStatus::UNCHANGED;
5491 return Base::manifest(
A);
5498struct AAAlignCallSiteArgument final : AAAlignFloating {
5499 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5500 : AAAlignFloating(IRP,
A) {}
5507 if (Argument *Arg = getAssociatedArgument())
5508 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5509 return ChangeStatus::UNCHANGED;
5511 Align InheritAlign =
5512 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5513 if (InheritAlign >= getAssumedAlign())
5514 Changed = ChangeStatus::UNCHANGED;
5521 if (Argument *Arg = getAssociatedArgument()) {
5524 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5527 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5537struct AAAlignCallSiteReturned final
5538 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5539 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5540 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5546 switch (
II->getIntrinsicID()) {
5547 case Intrinsic::ptrmask: {
5551 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5553 if (ConstVals && ConstVals->isValidState()) {
5554 unsigned ShiftValue =
5555 std::min(ConstVals->getAssumedMinTrailingZeros(),
5556 Value::MaxAlignmentExponent);
5557 Alignment =
Align(UINT64_C(1) << ShiftValue);
5561 const auto *AlignAA =
5563 DepClassTy::REQUIRED);
5565 Alignment = std::max(AlignAA->getAssumedAlign(), Alignment);
5572 std::min(this->getAssumedAlign(), Alignment).value());
5578 case Intrinsic::amdgcn_make_buffer_rsrc: {
5579 const auto *AlignAA =
5581 DepClassTy::REQUIRED);
5584 this->getState(), AlignAA->getAssumedAlign().
value());
5591 return Base::updateImpl(
A);
5600struct AANoReturnImpl :
public AANoReturn {
5601 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5607 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5612 const std::string getAsStr(Attributor *
A)
const override {
5613 return getAssumed() ?
"noreturn" :
"may-return";
5618 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5619 bool UsedAssumedInformation =
false;
5620 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5621 {(unsigned)Instruction::Ret},
5622 UsedAssumedInformation))
5623 return indicatePessimisticFixpoint();
5624 return ChangeStatus::UNCHANGED;
5628struct AANoReturnFunction final : AANoReturnImpl {
5629 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5630 : AANoReturnImpl(IRP,
A) {}
5637struct AANoReturnCallSite final
5638 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5639 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5640 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5651struct AAInstanceInfoImpl :
public AAInstanceInfo {
5652 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5653 : AAInstanceInfo(IRP,
A) {}
5657 Value &
V = getAssociatedValue();
5659 if (
C->isThreadDependent())
5660 indicatePessimisticFixpoint();
5662 indicateOptimisticFixpoint();
5668 indicateOptimisticFixpoint();
5673 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5676 indicatePessimisticFixpoint();
5686 Value &
V = getAssociatedValue();
5689 Scope =
I->getFunction();
5692 if (!
Scope->hasLocalLinkage())
5696 return indicateOptimisticFixpoint();
5698 bool IsKnownNoRecurse;
5704 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5719 if (!Callee || !
Callee->hasLocalLinkage())
5723 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5725 DepClassTy::OPTIONAL);
5726 if (!ArgInstanceInfoAA ||
5727 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5732 A, *CB, *Scope, *
this,
nullptr,
5733 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5740 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5742 auto *Ptr =
SI->getPointerOperand()->stripPointerCasts();
5750 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5751 DepClassTy::OPTIONAL,
5752 true, EquivalentUseCB))
5753 return indicatePessimisticFixpoint();
5759 const std::string getAsStr(Attributor *
A)
const override {
5760 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5764 void trackStatistics()
const override {}
5768struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5769 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5770 : AAInstanceInfoImpl(IRP,
A) {}
5774struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5775 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5776 : AAInstanceInfoFloating(IRP,
A) {}
5780struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5781 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5782 : AAInstanceInfoImpl(IRP,
A) {}
5790 Argument *Arg = getAssociatedArgument();
5792 return indicatePessimisticFixpoint();
5795 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5797 return indicatePessimisticFixpoint();
5803struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5804 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5805 : AAInstanceInfoImpl(IRP,
A) {
5821struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5822 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5823 : AAInstanceInfoFloating(IRP,
A) {}
5830 bool IgnoreSubsumingPositions) {
5831 assert(ImpliedAttributeKind == Attribute::Captures &&
5832 "Unexpected attribute kind");
5842 V.getType()->getPointerAddressSpace() == 0)) {
5847 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5857 {Attribute::Captures, Attribute::ByVal}, Attrs,
5895 bool NoThrow =
F.doesNotThrow();
5896 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5897 if (
ReadOnly && NoThrow && IsVoidReturn) {
5910 if (NoThrow && IsVoidReturn)
5915 if (!NoThrow || ArgNo < 0 ||
5916 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5919 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5920 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5921 if (U ==
unsigned(ArgNo))
5948 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5949 SmallVectorImpl<Attribute> &Attrs)
const override {
5950 if (!isAssumedNoCaptureMaybeReturned())
5953 if (isArgumentPosition()) {
5954 if (isAssumedNoCapture())
5955 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5957 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5962 const std::string getAsStr(Attributor *
A)
const override {
5963 if (isKnownNoCapture())
5964 return "known not-captured";
5965 if (isAssumedNoCapture())
5966 return "assumed not-captured";
5967 if (isKnownNoCaptureMaybeReturned())
5968 return "known not-captured-maybe-returned";
5969 if (isAssumedNoCaptureMaybeReturned())
5970 return "assumed not-captured-maybe-returned";
5971 return "assumed-captured";
5976 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5979 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5985 return isCapturedIn(State,
true,
true,
5992 return isCapturedIn(State,
true,
true,
5998 return isCapturedIn(State,
false,
false,
6000 return isCapturedIn(State,
true,
true,
6008 return isCapturedIn(State,
true,
true,
6015 bool IsKnownNoCapture;
6016 const AANoCapture *ArgNoCaptureAA =
nullptr;
6018 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6020 if (IsAssumedNoCapture)
6021 return isCapturedIn(State,
false,
false,
6025 return isCapturedIn(State,
false,
false,
6030 return isCapturedIn(State,
true,
true,
6037 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
6038 bool CapturedInInt,
bool CapturedInRet) {
6039 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
6040 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
6052 const IRPosition &IRP = getIRPosition();
6056 return indicatePessimisticFixpoint();
6063 return indicatePessimisticFixpoint();
6071 T.addKnownBits(NOT_CAPTURED_IN_MEM);
6073 addKnownBits(NOT_CAPTURED_IN_MEM);
6080 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
6084 UsedAssumedInformation))
6086 bool SeenConstant =
false;
6087 for (
const AA::ValueAndContext &VAC : Values) {
6091 SeenConstant =
true;
6093 VAC.
getValue() == getAssociatedArgument())
6099 bool IsKnownNoUnwind;
6102 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6103 bool UsedAssumedInformation =
false;
6104 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6105 T.addKnownBits(NOT_CAPTURED_IN_RET);
6106 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6108 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6109 addKnownBits(NOT_CAPTURED_IN_RET);
6110 if (isKnown(NOT_CAPTURED_IN_MEM))
6111 return indicateOptimisticFixpoint();
6116 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6125 return checkUse(
A,
T, U, Follow);
6128 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6129 return indicatePessimisticFixpoint();
6132 auto Assumed = S.getAssumed();
6133 S.intersectAssumedBits(
T.getAssumed());
6134 if (!isAssumedNoCaptureMaybeReturned())
6135 return indicatePessimisticFixpoint();
6141struct AANoCaptureArgument final : AANoCaptureImpl {
6142 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6143 : AANoCaptureImpl(IRP,
A) {}
6150struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6151 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6152 : AANoCaptureImpl(IRP,
A) {}
6160 Argument *Arg = getAssociatedArgument();
6162 return indicatePessimisticFixpoint();
6164 bool IsKnownNoCapture;
6165 const AANoCapture *ArgAA =
nullptr;
6167 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6169 return ChangeStatus::UNCHANGED;
6171 return indicatePessimisticFixpoint();
6176 void trackStatistics()
const override {
6182struct AANoCaptureFloating final : AANoCaptureImpl {
6183 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6184 : AANoCaptureImpl(IRP,
A) {}
6187 void trackStatistics()
const override {
6193struct AANoCaptureReturned final : AANoCaptureImpl {
6194 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6195 : AANoCaptureImpl(IRP,
A) {
6210 void trackStatistics()
const override {}
6214struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6215 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6216 : AANoCaptureImpl(IRP,
A) {}
6222 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6226 void trackStatistics()
const override {
6243 dbgs() <<
"[ValueSimplify] is assumed to be "
6246 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6258 if (getAssociatedValue().
getType()->isVoidTy())
6259 indicatePessimisticFixpoint();
6260 if (
A.hasSimplificationCallback(getIRPosition()))
6261 indicatePessimisticFixpoint();
6265 const std::string getAsStr(Attributor *
A)
const override {
6267 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6268 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6269 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6271 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6276 void trackStatistics()
const override {}
6279 std::optional<Value *>
6280 getAssumedSimplifiedValue(Attributor &
A)
const override {
6281 return SimplifiedAssociatedValue;
6288 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6292 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6294 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6303 static Value *reproduceInst(Attributor &
A,
6304 const AbstractAttribute &QueryingAA,
6305 Instruction &
I,
Type &Ty, Instruction *CtxI,
6307 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6308 if (
Check && (
I.mayReadFromMemory() ||
6313 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6315 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6337 static Value *reproduceValue(Attributor &
A,
6338 const AbstractAttribute &QueryingAA,
Value &V,
6339 Type &Ty, Instruction *CtxI,
bool Check,
6341 if (
const auto &NewV = VMap.
lookup(&V))
6343 bool UsedAssumedInformation =
false;
6344 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6346 if (!SimpleV.has_value())
6350 EffectiveV = *SimpleV;
6355 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6357 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6358 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6364 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6365 Value *NewV = SimplifiedAssociatedValue
6366 ? *SimplifiedAssociatedValue
6368 if (NewV && NewV != &getAssociatedValue()) {
6372 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6374 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6382 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6383 const IRPosition &IRP,
bool Simplify =
true) {
6384 bool UsedAssumedInformation =
false;
6387 QueryingValueSimplified =
A.getAssumedSimplified(
6389 return unionAssumed(QueryingValueSimplified);
6393 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6394 if (!getAssociatedValue().
getType()->isIntegerTy())
6399 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6403 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6406 SimplifiedAssociatedValue = std::nullopt;
6407 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6410 if (
auto *
C = *COpt) {
6411 SimplifiedAssociatedValue =
C;
6412 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6418 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6419 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6421 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6429 for (
auto &U : getAssociatedValue().uses()) {
6434 IP =
PHI->getIncomingBlock(U)->getTerminator();
6435 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6437 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6438 if (
A.changeUseAfterManifest(U, *NewV))
6439 Changed = ChangeStatus::CHANGED;
6443 return Changed | AAValueSimplify::manifest(
A);
6448 SimplifiedAssociatedValue = &getAssociatedValue();
6449 return AAValueSimplify::indicatePessimisticFixpoint();
6453struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6454 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6455 : AAValueSimplifyImpl(IRP,
A) {}
6458 AAValueSimplifyImpl::initialize(
A);
6459 if (
A.hasAttr(getIRPosition(),
6460 {Attribute::InAlloca, Attribute::Preallocated,
6461 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6463 indicatePessimisticFixpoint();
6470 Argument *Arg = getAssociatedArgument();
6476 return indicatePessimisticFixpoint();
6479 auto Before = SimplifiedAssociatedValue;
6481 auto PredForCallSite = [&](AbstractCallSite ACS) {
6482 const IRPosition &ACSArgPos =
6493 bool UsedAssumedInformation =
false;
6494 std::optional<Constant *> SimpleArgOp =
6495 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6502 return unionAssumed(*SimpleArgOp);
6507 bool UsedAssumedInformation =
false;
6508 if (hasCallBaseContext() &&
6509 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6511 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6513 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6514 UsedAssumedInformation);
6517 if (!askSimplifiedValueForOtherAAs(
A))
6518 return indicatePessimisticFixpoint();
6521 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6522 : ChangeStatus ::CHANGED;
6526 void trackStatistics()
const override {
6531struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6532 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6533 : AAValueSimplifyImpl(IRP,
A) {}
6536 std::optional<Value *>
6537 getAssumedSimplifiedValue(Attributor &
A)
const override {
6538 if (!isValidState())
6540 return SimplifiedAssociatedValue;
6545 auto Before = SimplifiedAssociatedValue;
6549 return checkAndUpdate(
6554 bool UsedAssumedInformation =
false;
6555 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6556 UsedAssumedInformation))
6557 if (!askSimplifiedValueForOtherAAs(
A))
6558 return indicatePessimisticFixpoint();
6561 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6562 : ChangeStatus ::CHANGED;
6568 return ChangeStatus::UNCHANGED;
6572 void trackStatistics()
const override {
6577struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6578 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6579 : AAValueSimplifyImpl(IRP,
A) {}
6583 AAValueSimplifyImpl::initialize(
A);
6584 Value &
V = getAnchorValue();
6588 indicatePessimisticFixpoint();
6593 auto Before = SimplifiedAssociatedValue;
6594 if (!askSimplifiedValueForOtherAAs(
A))
6595 return indicatePessimisticFixpoint();
6598 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6599 : ChangeStatus ::CHANGED;
6603 void trackStatistics()
const override {
6608struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6609 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6610 : AAValueSimplifyImpl(IRP,
A) {}
6614 SimplifiedAssociatedValue =
nullptr;
6615 indicateOptimisticFixpoint();
6620 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6623 void trackStatistics()
const override {
6628struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6629 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6630 : AAValueSimplifyFunction(IRP,
A) {}
6632 void trackStatistics()
const override {
6637struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6638 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6639 : AAValueSimplifyImpl(IRP,
A) {}
6642 AAValueSimplifyImpl::initialize(
A);
6643 Function *Fn = getAssociatedFunction();
6644 assert(Fn &&
"Did expect an associted function");
6645 for (Argument &Arg : Fn->
args()) {
6650 checkAndUpdate(
A, *
this, IRP))
6651 indicateOptimisticFixpoint();
6653 indicatePessimisticFixpoint();
6661 return indicatePessimisticFixpoint();
6664 void trackStatistics()
const override {
6669struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6670 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6671 : AAValueSimplifyFloating(IRP,
A) {}
6677 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6679 if (FloatAA && FloatAA->getState().isValidState())
6682 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6684 ->getArgOperandUse(getCallSiteArgNo());
6685 if (
A.changeUseAfterManifest(U, *NewV))
6686 Changed = ChangeStatus::CHANGED;
6689 return Changed | AAValueSimplify::manifest(
A);
6692 void trackStatistics()
const override {
6700struct AAHeapToStackFunction final :
public AAHeapToStack {
6702 static bool isGlobalizedLocal(
const CallBase &CB) {
6704 return A.
isValid() &&
A.getValueAsString() ==
"__kmpc_alloc_shared";
6707 struct AllocationInfo {
6712 bool IsGlobalizedLocal =
false;
6719 } Status = STACK_DUE_TO_USE;
6723 bool HasPotentiallyFreeingUnknownUses =
false;
6727 bool MoveAllocaIntoEntry =
true;
6730 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6733 struct DeallocationInfo {
6741 bool MightFreeUnknownObjects =
false;
6744 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6747 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6748 : AAHeapToStack(IRP,
A) {}
6750 ~AAHeapToStackFunction()
override {
6753 for (
auto &It : AllocationInfos)
6754 It.second->~AllocationInfo();
6755 for (
auto &It : DeallocationInfos)
6756 It.second->~DeallocationInfo();
6760 AAHeapToStack::initialize(
A);
6763 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6770 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6777 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6779 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6780 AllocationInfos[CB] = AI;
6781 AI->IsGlobalizedLocal = isGlobalizedLocal(*CB);
6787 bool UsedAssumedInformation =
false;
6788 bool Success =
A.checkForAllCallLikeInstructions(
6789 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6793 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6796 [](
const IRPosition &,
const AbstractAttribute *,
6797 bool &) -> std::optional<Value *> {
return nullptr; };
6798 for (
const auto &It : AllocationInfos)
6801 for (
const auto &It : DeallocationInfos)
6806 const std::string getAsStr(Attributor *
A)
const override {
6807 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6808 for (
const auto &It : AllocationInfos) {
6809 if (It.second->Status == AllocationInfo::INVALID)
6810 ++NumInvalidMallocs;
6814 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6815 std::to_string(NumInvalidMallocs);
6819 void trackStatistics()
const override {
6821 MallocCalls, Function,
6822 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6823 for (
const auto &It : AllocationInfos)
6824 if (It.second->Status != AllocationInfo::INVALID)
6828 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6830 if (AllocationInfo *AI =
6831 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6832 return AI->Status != AllocationInfo::INVALID;
6836 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6837 if (!isValidState())
6840 for (
const auto &It : AllocationInfos) {
6841 AllocationInfo &AI = *It.second;
6842 if (AI.Status == AllocationInfo::INVALID)
6845 if (AI.PotentialFreeCalls.count(&CB))
6853 assert(getState().isValidState() &&
6854 "Attempted to manifest an invalid state!");
6858 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6860 for (
auto &It : AllocationInfos) {
6861 AllocationInfo &AI = *It.second;
6862 if (AI.Status == AllocationInfo::INVALID)
6865 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6866 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6867 A.deleteAfterManifest(*FreeCall);
6868 HasChanged = ChangeStatus::CHANGED;
6871 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6874 auto Remark = [&](OptimizationRemark
OR) {
6875 if (AI.IsGlobalizedLocal)
6876 return OR <<
"Moving globalized variable to the stack.";
6877 return OR <<
"Moving memory allocation from the heap to the stack.";
6879 if (AI.IsGlobalizedLocal)
6880 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6882 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6884 const DataLayout &
DL =
A.getInfoCache().getDL();
6886 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6888 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6890 LLVMContext &Ctx = AI.CB->getContext();
6891 ObjectSizeOpts Opts;
6892 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6893 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6900 ?
F->getEntryBlock().begin()
6901 : AI.CB->getIterator();
6904 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6905 Alignment = std::max(Alignment, *RetAlign);
6907 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6908 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6909 "Expected an alignment during manifest!");
6911 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6915 unsigned AS =
DL.getAllocaAddrSpace();
6917 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6918 AI.CB->getName() +
".h2s", IP);
6920 if (Alloca->
getType() != AI.CB->getType())
6921 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6922 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6924 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6927 "Must be able to materialize initial memory state of allocation");
6932 auto *NBB =
II->getNormalDest();
6934 A.deleteAfterManifest(*AI.CB);
6936 A.deleteAfterManifest(*AI.CB);
6945 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6947 HasChanged = ChangeStatus::CHANGED;
6953 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6955 bool UsedAssumedInformation =
false;
6956 std::optional<Constant *> SimpleV =
6957 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6959 return APInt(64, 0);
6961 return CI->getValue();
6962 return std::nullopt;
6965 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6966 AllocationInfo &AI) {
6967 auto Mapper = [&](
const Value *
V) ->
const Value * {
6968 bool UsedAssumedInformation =
false;
6969 if (std::optional<Constant *> SimpleV =
6970 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6977 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6983 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6987 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6992ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6995 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6997 const auto *LivenessAA =
7000 MustBeExecutedContextExplorer *Explorer =
7001 A.getInfoCache().getMustBeExecutedContextExplorer();
7003 bool StackIsAccessibleByOtherThreads =
7004 A.getInfoCache().stackIsAccessibleByOtherThreads();
7007 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
7008 std::optional<bool> MayContainIrreducibleControl;
7010 if (&
F->getEntryBlock() == &BB)
7012 if (!MayContainIrreducibleControl.has_value())
7014 if (*MayContainIrreducibleControl)
7023 bool HasUpdatedFrees =
false;
7025 auto UpdateFrees = [&]() {
7026 HasUpdatedFrees =
true;
7028 for (
auto &It : DeallocationInfos) {
7029 DeallocationInfo &DI = *It.second;
7032 if (DI.MightFreeUnknownObjects)
7036 bool UsedAssumedInformation =
false;
7037 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
7044 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
7045 DI.MightFreeUnknownObjects =
true;
7058 DI.MightFreeUnknownObjects =
true;
7062 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
7064 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
7066 DI.MightFreeUnknownObjects =
true;
7070 DI.PotentialAllocationCalls.insert(ObjCB);
7074 auto FreeCheck = [&](AllocationInfo &AI) {
7078 if (!StackIsAccessibleByOtherThreads) {
7083 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
7084 "other threads and function is not nosync:\n");
7088 if (!HasUpdatedFrees)
7092 if (AI.PotentialFreeCalls.size() != 1) {
7094 << AI.PotentialFreeCalls.size() <<
"\n");
7097 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7098 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7101 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7102 << *UniqueFree <<
"\n");
7105 if (DI->MightFreeUnknownObjects) {
7107 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7110 if (DI->PotentialAllocationCalls.empty())
7112 if (DI->PotentialAllocationCalls.size() > 1) {
7114 << DI->PotentialAllocationCalls.size()
7115 <<
" different allocations\n");
7118 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7121 <<
"[H2S] unique free call not known to free this allocation but "
7122 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7127 if (!AI.IsGlobalizedLocal) {
7129 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7130 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7131 "with the allocation "
7132 << *UniqueFree <<
"\n");
7139 auto UsesCheck = [&](AllocationInfo &AI) {
7140 bool ValidUsesOnly =
true;
7142 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7147 if (
SI->getValueOperand() ==
U.get()) {
7149 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7150 ValidUsesOnly =
false;
7159 if (DeallocationInfos.count(CB)) {
7160 AI.PotentialFreeCalls.insert(CB);
7167 bool IsKnownNoCapture;
7176 if (!IsAssumedNoCapture ||
7177 (!AI.IsGlobalizedLocal && !IsAssumedNoFree)) {
7178 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7181 auto Remark = [&](OptimizationRemarkMissed ORM) {
7183 <<
"Could not move globalized variable to the stack. "
7184 "Variable is potentially captured in call. Mark "
7185 "parameter as `__attribute__((noescape))` to override.";
7188 if (ValidUsesOnly && AI.IsGlobalizedLocal)
7189 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7192 ValidUsesOnly =
false;
7205 ValidUsesOnly =
false;
7208 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7210 [&](
const Use &OldU,
const Use &NewU) {
7211 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7212 return !SI || StackIsAccessibleByOtherThreads ||
7213 AA::isAssumedThreadLocalObject(
7214 A, *SI->getPointerOperand(), *this);
7217 return ValidUsesOnly;
7222 for (
auto &It : AllocationInfos) {
7223 AllocationInfo &AI = *It.second;
7224 if (AI.Status == AllocationInfo::INVALID)
7228 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7232 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7234 AI.Status = AllocationInfo::INVALID;
7239 !APAlign->isPowerOf2()) {
7240 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7242 AI.Status = AllocationInfo::INVALID;
7253 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7255 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7259 AI.Status = AllocationInfo::INVALID;
7265 switch (AI.Status) {
7266 case AllocationInfo::STACK_DUE_TO_USE:
7269 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7271 case AllocationInfo::STACK_DUE_TO_FREE:
7274 AI.Status = AllocationInfo::INVALID;
7277 case AllocationInfo::INVALID:
7284 bool IsGlobalizedLocal = AI.IsGlobalizedLocal;
7285 if (AI.MoveAllocaIntoEntry &&
7286 (!
Size.has_value() ||
7287 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7288 AI.MoveAllocaIntoEntry =
false;
7297struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7298 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7299 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7302 AAPrivatizablePtr::indicatePessimisticFixpoint();
7303 PrivatizableType =
nullptr;
7304 return ChangeStatus::CHANGED;
7310 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7314 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7315 std::optional<Type *>
T1) {
7325 std::optional<Type *> getPrivatizableType()
const override {
7326 return PrivatizableType;
7329 const std::string getAsStr(Attributor *
A)
const override {
7330 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7334 std::optional<Type *> PrivatizableType;
7339struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7340 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7341 : AAPrivatizablePtrImpl(IRP,
A) {}
7344 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7347 bool UsedAssumedInformation =
false;
7349 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7351 if (!
Attrs.empty() &&
7352 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7353 true, UsedAssumedInformation))
7354 return Attrs[0].getValueAsType();
7356 std::optional<Type *> Ty;
7357 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7365 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7374 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7377 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7380 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7384 dbgs() <<
"<nullptr>";
7389 Ty = combineTypes(Ty, CSTy);
7392 dbgs() <<
" : New Type: ";
7394 (*Ty)->print(
dbgs());
7396 dbgs() <<
"<nullptr>";
7405 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7406 UsedAssumedInformation))
7413 PrivatizableType = identifyPrivatizableType(
A);
7414 if (!PrivatizableType)
7415 return ChangeStatus::UNCHANGED;
7416 if (!*PrivatizableType)
7417 return indicatePessimisticFixpoint();
7422 DepClassTy::OPTIONAL);
7425 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7428 return indicatePessimisticFixpoint();
7434 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7438 Function &Fn = *getIRPosition().getAnchorScope();
7440 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7442 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7444 return indicatePessimisticFixpoint();
7447 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7454 bool UsedAssumedInformation =
false;
7455 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7456 UsedAssumedInformation)) {
7458 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7460 return indicatePessimisticFixpoint();
7464 Argument *Arg = getAssociatedArgument();
7465 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7467 return indicatePessimisticFixpoint();
7474 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7477 for (
const Use *U : CallbackUses) {
7478 AbstractCallSite CBACS(U);
7479 assert(CBACS && CBACS.isCallbackCall());
7480 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7481 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7485 <<
"[AAPrivatizablePtr] Argument " << *Arg
7486 <<
"check if can be privatized in the context of its parent ("
7488 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7490 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7491 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7492 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7494 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7495 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7498 if (CBArgNo !=
int(ArgNo))
7500 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7502 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7503 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7506 if (*CBArgPrivTy == PrivatizableType)
7511 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7512 <<
" cannot be privatized in the context of its parent ("
7514 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7516 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7517 <<
").\n[AAPrivatizablePtr] for which the argument "
7518 "privatization is not compatible.\n";
7528 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7532 "Expected a direct call operand for callback call operand");
7537 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7538 <<
" check if be privatized in the context of its parent ("
7540 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7542 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7545 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7546 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7548 DepClassTy::REQUIRED);
7549 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7550 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7553 if (*DCArgPrivTy == PrivatizableType)
7559 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7560 <<
" cannot be privatized in the context of its parent ("
7562 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7565 <<
").\n[AAPrivatizablePtr] for which the argument "
7566 "privatization is not compatible.\n";
7574 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7578 return IsCompatiblePrivArgOfDirectCS(ACS);
7582 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7583 UsedAssumedInformation))
7584 return indicatePessimisticFixpoint();
7586 return ChangeStatus::UNCHANGED;
7592 identifyReplacementTypes(
Type *PrivType,
7593 SmallVectorImpl<Type *> &ReplacementTypes) {
7596 assert(PrivType &&
"Expected privatizable type!");
7600 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7601 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7603 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7604 PrivArrayType->getElementType());
7613 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7615 assert(PrivType &&
"Expected privatizable type!");
7618 const DataLayout &
DL =
F.getDataLayout();
7622 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7623 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7626 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7629 Type *PointeeTy = PrivArrayType->getElementType();
7630 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7631 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7633 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7636 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7642 void createReplacementValues(Align Alignment,
Type *PrivType,
7644 SmallVectorImpl<Value *> &ReplacementValues) {
7646 assert(PrivType &&
"Expected privatizable type!");
7654 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7655 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7656 Type *PointeeTy = PrivStructType->getElementType(u);
7659 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7660 L->setAlignment(Alignment);
7664 Type *PointeeTy = PrivArrayType->getElementType();
7665 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7666 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7668 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7669 L->setAlignment(Alignment);
7674 L->setAlignment(Alignment);
7681 if (!PrivatizableType)
7682 return ChangeStatus::UNCHANGED;
7683 assert(*PrivatizableType &&
"Expected privatizable type!");
7689 bool UsedAssumedInformation =
false;
7690 if (!
A.checkForAllInstructions(
7691 [&](Instruction &
I) {
7692 CallInst &CI = cast<CallInst>(I);
7693 if (CI.isTailCall())
7694 TailCalls.push_back(&CI);
7697 *
this, {Instruction::Call}, UsedAssumedInformation))
7698 return ChangeStatus::UNCHANGED;
7700 Argument *Arg = getAssociatedArgument();
7703 const auto *AlignAA =
7710 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7712 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7714 const DataLayout &
DL = IP->getDataLayout();
7715 unsigned AS =
DL.getAllocaAddrSpace();
7716 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7717 Arg->
getName() +
".priv", IP);
7718 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7719 ArgIt->getArgNo(), IP);
7722 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7726 for (CallInst *CI : TailCalls)
7727 CI->setTailCall(
false);
7734 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7735 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7738 createReplacementValues(
7739 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7740 *PrivatizableType, ACS,
7748 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7751 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7752 std::move(FnRepairCB),
7753 std::move(ACSRepairCB)))
7754 return ChangeStatus::CHANGED;
7755 return ChangeStatus::UNCHANGED;
7759 void trackStatistics()
const override {
7764struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7765 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7766 : AAPrivatizablePtrImpl(IRP,
A) {}
7771 indicatePessimisticFixpoint();
7776 "updateImpl will not be called");
7780 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7783 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7790 return AI->getAllocatedType();
7792 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7794 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7795 return PrivArgAA->getPrivatizableType();
7798 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7799 "alloca nor privatizable argument: "
7805 void trackStatistics()
const override {
7810struct AAPrivatizablePtrCallSiteArgument final
7811 :
public AAPrivatizablePtrFloating {
7812 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7813 : AAPrivatizablePtrFloating(IRP,
A) {}
7817 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7818 indicateOptimisticFixpoint();
7823 PrivatizableType = identifyPrivatizableType(
A);
7824 if (!PrivatizableType)
7825 return ChangeStatus::UNCHANGED;
7826 if (!*PrivatizableType)
7827 return indicatePessimisticFixpoint();
7829 const IRPosition &IRP = getIRPosition();
7830 bool IsKnownNoCapture;
7832 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7833 if (!IsAssumedNoCapture) {
7834 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7835 return indicatePessimisticFixpoint();
7838 bool IsKnownNoAlias;
7840 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7841 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7842 return indicatePessimisticFixpoint();
7847 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7848 return indicatePessimisticFixpoint();
7851 return ChangeStatus::UNCHANGED;
7855 void trackStatistics()
const override {
7860struct AAPrivatizablePtrCallSiteReturned final
7861 :
public AAPrivatizablePtrFloating {
7862 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7863 : AAPrivatizablePtrFloating(IRP,
A) {}
7868 indicatePessimisticFixpoint();
7872 void trackStatistics()
const override {
7877struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7878 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7879 : AAPrivatizablePtrFloating(IRP,
A) {}
7884 indicatePessimisticFixpoint();
7888 void trackStatistics()
const override {
7898struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7899 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7900 : AAMemoryBehavior(IRP,
A) {}
7904 intersectAssumedBits(BEST_STATE);
7905 getKnownStateFromValue(
A, getIRPosition(), getState());
7906 AAMemoryBehavior::initialize(
A);
7910 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7911 BitIntegerState &State,
7912 bool IgnoreSubsumingPositions =
false) {
7914 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7916 switch (Attr.getKindAsEnum()) {
7917 case Attribute::ReadNone:
7920 case Attribute::ReadOnly:
7923 case Attribute::WriteOnly:
7932 if (!
I->mayReadFromMemory())
7934 if (!
I->mayWriteToMemory())
7940 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7941 SmallVectorImpl<Attribute> &Attrs)
const override {
7944 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7946 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7947 else if (isAssumedWriteOnly())
7948 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7954 const IRPosition &IRP = getIRPosition();
7956 if (
A.hasAttr(IRP, Attribute::ReadNone,
7958 return ChangeStatus::UNCHANGED;
7967 return ChangeStatus::UNCHANGED;
7970 A.removeAttrs(IRP, AttrKinds);
7973 A.removeAttrs(IRP, Attribute::Writable);
7980 const std::string getAsStr(Attributor *
A)
const override {
7985 if (isAssumedWriteOnly())
7987 return "may-read/write";
7991 static const Attribute::AttrKind AttrKinds[3];
7995 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7998struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7999 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
8000 : AAMemoryBehaviorImpl(IRP,
A) {}
8006 void trackStatistics()
const override {
8011 else if (isAssumedWriteOnly())
8018 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
8019 const Instruction *UserI);
8022 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
8026struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
8027 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
8028 : AAMemoryBehaviorFloating(IRP,
A) {}
8032 intersectAssumedBits(BEST_STATE);
8033 const IRPosition &IRP = getIRPosition();
8037 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
8039 getKnownStateFromValue(
A, IRP, getState(),
8046 return ChangeStatus::UNCHANGED;
8050 if (
A.hasAttr(getIRPosition(),
8051 {Attribute::InAlloca, Attribute::Preallocated})) {
8052 removeKnownBits(NO_WRITES);
8053 removeAssumedBits(NO_WRITES);
8055 A.removeAttrs(getIRPosition(), AttrKinds);
8056 return AAMemoryBehaviorFloating::manifest(
A);
8060 void trackStatistics()
const override {
8065 else if (isAssumedWriteOnly())
8070struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
8071 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
8072 : AAMemoryBehaviorArgument(IRP,
A) {}
8078 Argument *Arg = getAssociatedArgument();
8080 indicatePessimisticFixpoint();
8084 addKnownBits(NO_WRITES);
8085 removeKnownBits(NO_READS);
8086 removeAssumedBits(NO_READS);
8088 AAMemoryBehaviorArgument::initialize(
A);
8089 if (getAssociatedFunction()->isDeclaration())
8090 indicatePessimisticFixpoint();
8099 Argument *Arg = getAssociatedArgument();
8102 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8104 return indicatePessimisticFixpoint();
8109 void trackStatistics()
const override {
8114 else if (isAssumedWriteOnly())
8120struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8121 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8122 : AAMemoryBehaviorFloating(IRP,
A) {}
8126 AAMemoryBehaviorImpl::initialize(
A);
8131 return ChangeStatus::UNCHANGED;
8135 void trackStatistics()
const override {}
8139struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8140 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8141 : AAMemoryBehaviorImpl(IRP,
A) {}
8157 else if (isAssumedWriteOnly())
8160 A.removeAttrs(getIRPosition(), AttrKinds);
8163 for (Argument &Arg :
F.args())
8165 return A.manifestAttrs(getIRPosition(),
8166 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8170 void trackStatistics()
const override {
8175 else if (isAssumedWriteOnly())
8181struct AAMemoryBehaviorCallSite final
8182 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8183 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8184 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8195 else if (isAssumedWriteOnly())
8198 A.removeAttrs(getIRPosition(), AttrKinds);
8201 for (Use &U : CB.
args())
8203 Attribute::Writable);
8204 return A.manifestAttrs(
8205 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8209 void trackStatistics()
const override {
8214 else if (isAssumedWriteOnly())
8219ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8222 auto AssumedState = getAssumed();
8229 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8231 if (MemBehaviorAA) {
8232 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8233 return !isAtFixpoint();
8238 if (
I.mayReadFromMemory())
8239 removeAssumedBits(NO_READS);
8240 if (
I.mayWriteToMemory())
8241 removeAssumedBits(NO_WRITES);
8242 return !isAtFixpoint();
8245 bool UsedAssumedInformation =
false;
8246 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8247 UsedAssumedInformation))
8248 return indicatePessimisticFixpoint();
8254ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8256 const IRPosition &IRP = getIRPosition();
8267 const auto *FnMemAA =
8270 FnMemAssumedState = FnMemAA->getAssumed();
8271 S.addKnownBits(FnMemAA->getKnown());
8272 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8278 auto AssumedState = S.getAssumed();
8284 bool IsKnownNoCapture;
8285 const AANoCapture *ArgNoCaptureAA =
nullptr;
8290 if (!IsAssumedNoCapture &&
8292 S.intersectAssumedBits(FnMemAssumedState);
8298 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8300 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8308 Follow = followUsersOfUseIn(
A, U, UserI);
8312 analyzeUseIn(
A, U, UserI);
8314 return !isAtFixpoint();
8317 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8318 return indicatePessimisticFixpoint();
8324bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8325 const Instruction *UserI) {
8343 if (
U.get()->getType()->isPointerTy()) {
8345 bool IsKnownNoCapture;
8354void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8355 const Instruction *UserI) {
8362 case Instruction::Load:
8364 removeAssumedBits(NO_READS);
8367 case Instruction::Store:
8372 removeAssumedBits(NO_WRITES);
8374 indicatePessimisticFixpoint();
8377 case Instruction::Call:
8378 case Instruction::CallBr:
8379 case Instruction::Invoke: {
8386 indicatePessimisticFixpoint();
8393 removeAssumedBits(NO_READS);
8400 if (
U.get()->getType()->isPointerTy())
8404 const auto *MemBehaviorAA =
8410 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8418 removeAssumedBits(NO_READS);
8420 removeAssumedBits(NO_WRITES);
8432 return "all memory";
8435 std::string S =
"memory:";
8441 S +=
"internal global,";
8443 S +=
"external global,";
8447 S +=
"inaccessible,";
8461 AccessKind2Accesses.fill(
nullptr);
8464 ~AAMemoryLocationImpl()
override {
8467 for (AccessSet *AS : AccessKind2Accesses)
8474 intersectAssumedBits(BEST_STATE);
8475 getKnownStateFromValue(
A, getIRPosition(), getState());
8476 AAMemoryLocation::initialize(
A);
8480 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8481 BitIntegerState &State,
8482 bool IgnoreSubsumingPositions =
false) {
8491 bool UseArgMemOnly =
true;
8493 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8497 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8506 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8511 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8515 A.manifestAttrs(IRP,
8516 Attribute::getWithMemoryEffects(
8525 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8529 A.manifestAttrs(IRP,
8530 Attribute::getWithMemoryEffects(
8540 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8541 SmallVectorImpl<Attribute> &Attrs)
const override {
8548 else if (isAssumedInaccessibleMemOnly())
8549 Attrs.push_back(Attribute::getWithMemoryEffects(
8551 else if (isAssumedArgMemOnly())
8554 else if (isAssumedInaccessibleOrArgMemOnly())
8555 Attrs.push_back(Attribute::getWithMemoryEffects(
8565 const IRPosition &IRP = getIRPosition();
8569 if (DeducedAttrs.
size() != 1)
8570 return ChangeStatus::UNCHANGED;
8573 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8578 bool checkForAllAccessesToMemoryKind(
8580 MemoryLocationsKind)>
8582 MemoryLocationsKind RequestedMLK)
const override {
8583 if (!isValidState())
8586 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8587 if (AssumedMLK == NO_LOCATIONS)
8591 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8592 CurMLK *= 2, ++Idx) {
8593 if (CurMLK & RequestedMLK)
8596 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8597 for (
const AccessInfo &AI : *
Accesses)
8598 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8611 MemoryLocationsKind KnownMLK = getKnown();
8613 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8614 if (!(CurMLK & KnownMLK))
8615 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8616 getAccessKindFromInst(
I));
8617 return AAMemoryLocation::indicatePessimisticFixpoint();
8637 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8641 return LHS.Ptr <
RHS.Ptr;
8642 if (
LHS.Kind !=
RHS.Kind)
8643 return LHS.Kind <
RHS.Kind;
8650 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8651 std::array<AccessSet *, llvm::ConstantLog2<VALID_STATE>()>
8652 AccessKind2Accesses;
8657 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8658 AAMemoryLocation::StateType &AccessedLocs,
8663 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8666 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8669 AK =
I->mayReadFromMemory() ? READ :
NONE;
8678 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8679 MemoryLocationsKind MLK,
const Instruction *
I,
8688 if (MLK == NO_UNKOWN_MEM)
8690 State.removeAssumedBits(MLK);
8695 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8696 AAMemoryLocation::StateType &State,
bool &
Changed,
8697 unsigned AccessAS = 0);
8703void AAMemoryLocationImpl::categorizePtrValue(
8704 Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8706 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8711 unsigned ObjectAS =
Obj.getType()->getPointerAddressSpace();
8713 MemoryLocationsKind MLK = NO_LOCATIONS;
8733 MLK = NO_ARGUMENT_MEM;
8739 if (GVar->isConstant())
8742 if (GV->hasLocalLinkage())
8743 MLK = NO_GLOBAL_INTERNAL_MEM;
8745 MLK = NO_GLOBAL_EXTERNAL_MEM;
8753 bool IsKnownNoAlias;
8757 MLK = NO_MALLOCED_MEM;
8759 MLK = NO_UNKOWN_MEM;
8761 MLK = NO_UNKOWN_MEM;
8764 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8765 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8766 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8768 getAccessKindFromInst(&
I));
8773 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8777 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8778 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8779 getAccessKindFromInst(&
I));
8784 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8788void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8791 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8800 const auto *ArgOpMemLocationAA =
8803 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8808 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8813AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8815 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8819 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8824 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8827 <<
" [" << CBMemLocationAA <<
"]\n");
8828 if (!CBMemLocationAA) {
8829 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8830 Changed, getAccessKindFromInst(&
I));
8831 return NO_UNKOWN_MEM;
8834 if (CBMemLocationAA->isAssumedReadNone())
8835 return NO_LOCATIONS;
8837 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8838 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8839 Changed, getAccessKindFromInst(&
I));
8840 return AccessedLocs.getAssumed();
8843 uint32_t CBAssumedNotAccessedLocs =
8844 CBMemLocationAA->getAssumedNotAccessedLocation();
8847 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8848 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8850 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8851 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8853 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8854 getAccessKindFromInst(&
I));
8859 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8860 if (HasGlobalAccesses) {
8863 updateStateAndAccessesMap(AccessedLocs, MLK, &
I, Ptr,
Changed,
8864 getAccessKindFromInst(&
I));
8867 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8868 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8869 return AccessedLocs.getWorstState();
8873 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8874 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8877 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8879 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8882 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8883 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8885 return AccessedLocs.getAssumed();
8890 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8891 <<
I <<
" [" << *Ptr <<
"]\n");
8892 categorizePtrValue(
A,
I, *Ptr, AccessedLocs,
Changed,
8893 Ptr->getType()->getPointerAddressSpace());
8894 return AccessedLocs.getAssumed();
8897 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8899 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8900 getAccessKindFromInst(&
I));
8901 return AccessedLocs.getAssumed();
8905struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8906 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8907 : AAMemoryLocationImpl(IRP,
A) {}
8912 const auto *MemBehaviorAA =
8913 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8916 return indicateOptimisticFixpoint();
8918 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8919 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8920 return ChangeStatus::UNCHANGED;
8924 auto AssumedState = getAssumed();
8928 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8929 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8930 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8931 removeAssumedBits(inverseLocation(MLK,
false,
false));
8934 return getAssumedNotAccessedLocation() != VALID_STATE;
8937 bool UsedAssumedInformation =
false;
8938 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8939 UsedAssumedInformation))
8940 return indicatePessimisticFixpoint();
8942 Changed |= AssumedState != getAssumed();
8943 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8947 void trackStatistics()
const override {
8950 else if (isAssumedArgMemOnly())
8952 else if (isAssumedInaccessibleMemOnly())
8954 else if (isAssumedInaccessibleOrArgMemOnly())
8960struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8961 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8962 : AAMemoryLocationImpl(IRP,
A) {}
8973 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8975 return indicatePessimisticFixpoint();
8979 updateStateAndAccessesMap(getState(), MLK,
I, Ptr,
Changed,
8980 getAccessKindFromInst(
I));
8983 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8984 return indicatePessimisticFixpoint();
8985 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8989 void trackStatistics()
const override {
8999struct AADenormalFPMathImpl :
public AADenormalFPMath {
9000 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
9001 : AADenormalFPMath(IRP,
A) {}
9003 const std::string getAsStr(Attributor *
A)
const override {
9004 std::string Str(
"AADenormalFPMath[");
9005 raw_string_ostream OS(Str);
9007 DenormalState Known = getKnown();
9008 if (Known.Mode.isValid())
9009 OS <<
"denormal-fp-math=" << Known.Mode;
9013 if (Known.ModeF32.isValid())
9014 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
9020struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
9021 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
9022 : AADenormalFPMathImpl(IRP,
A) {}
9026 DenormalFPEnv DenormEnv =
F->getDenormalFPEnv();
9036 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
9039 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
9041 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
9047 CallerInfo->getState());
9051 bool AllCallSitesKnown =
true;
9052 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
9053 return indicatePessimisticFixpoint();
9055 if (Change == ChangeStatus::CHANGED && isModeFixed())
9061 LLVMContext &Ctx = getAssociatedFunction()->getContext();
9067 DenormalFPEnv KnownEnv(Known.Mode, Known.ModeF32);
9070 AttrToRemove.
push_back(Attribute::DenormalFPEnv);
9073 Ctx, Attribute::DenormalFPEnv,
9074 DenormalFPEnv(Known.Mode, Known.ModeF32).toIntValue()));
9077 auto &IRP = getIRPosition();
9080 return A.removeAttrs(IRP, AttrToRemove) |
9081 A.manifestAttrs(IRP, AttrToAdd,
true);
9084 void trackStatistics()
const override {
9093struct AAValueConstantRangeImpl : AAValueConstantRange {
9094 using StateType = IntegerRangeState;
9095 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9096 : AAValueConstantRange(IRP,
A) {}
9100 if (
A.hasSimplificationCallback(getIRPosition())) {
9101 indicatePessimisticFixpoint();
9106 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9109 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9113 const std::string getAsStr(Attributor *
A)
const override {
9115 llvm::raw_string_ostream OS(Str);
9117 getKnown().print(OS);
9119 getAssumed().print(OS);
9126 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9127 if (!getAnchorScope())
9130 ScalarEvolution *SE =
9131 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9134 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9140 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9149 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9150 const Instruction *
I =
nullptr)
const {
9151 if (!getAnchorScope())
9154 ScalarEvolution *SE =
9155 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9158 const SCEV *S = getSCEV(
A,
I);
9168 getConstantRangeFromLVI(Attributor &
A,
9169 const Instruction *CtxI =
nullptr)
const {
9170 if (!getAnchorScope())
9173 LazyValueInfo *LVI =
9174 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9189 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9190 const Instruction *CtxI,
9191 bool AllowAACtxI)
const {
9192 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9204 InformationCache &InfoCache =
A.getInfoCache();
9205 const DominatorTree *DT =
9216 getKnownConstantRange(Attributor &
A,
9217 const Instruction *CtxI =
nullptr)
const override {
9218 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9222 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9223 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9224 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9229 getAssumedConstantRange(Attributor &
A,
9230 const Instruction *CtxI =
nullptr)
const override {
9235 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9237 return getAssumed();
9239 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9240 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9241 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9246 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9247 const ConstantRange &AssumedConstantRange) {
9249 Ty, AssumedConstantRange.
getLower())),
9251 Ty, AssumedConstantRange.
getUpper()))};
9256 static bool isBetterRange(
const ConstantRange &Assumed,
9257 const Instruction &
I) {
9261 std::optional<ConstantRange> Known;
9265 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9271 if (KnownRanges->getNumOperands() > 2)
9274 ConstantInt *
Lower =
9276 ConstantInt *
Upper =
9279 Known.emplace(
Lower->getValue(),
Upper->getValue());
9281 return !Known || (*Known != Assumed && Known->contains(Assumed));
9286 setRangeMetadataIfisBetterRange(Instruction *
I,
9287 const ConstantRange &AssumedConstantRange) {
9288 if (isBetterRange(AssumedConstantRange, *
I)) {
9289 I->setMetadata(LLVMContext::MD_range,
9290 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9291 AssumedConstantRange));
9298 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9300 const ConstantRange &AssumedConstantRange) {
9301 if (isBetterRange(AssumedConstantRange, *
I)) {
9302 A.manifestAttrs(IRP,
9303 Attribute::get(
I->getContext(), Attribute::Range,
9304 AssumedConstantRange),
9314 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9317 auto &
V = getAssociatedValue();
9321 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9322 "not the context instruction");
9324 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9325 Changed = ChangeStatus::CHANGED;
9327 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9328 AssumedConstantRange))
9329 Changed = ChangeStatus::CHANGED;
9337struct AAValueConstantRangeArgument final
9338 : AAArgumentFromCallSiteArguments<
9339 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9341 using Base = AAArgumentFromCallSiteArguments<
9342 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9344 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9348 void trackStatistics()
const override {
9353struct AAValueConstantRangeReturned
9354 : AAReturnedFromReturnedValues<AAValueConstantRange,
9355 AAValueConstantRangeImpl,
9356 AAValueConstantRangeImpl::StateType,
9359 AAReturnedFromReturnedValues<AAValueConstantRange,
9360 AAValueConstantRangeImpl,
9361 AAValueConstantRangeImpl::StateType,
9363 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9368 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9369 indicatePessimisticFixpoint();
9373 void trackStatistics()
const override {
9378struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9379 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9380 : AAValueConstantRangeImpl(IRP,
A) {}
9384 AAValueConstantRangeImpl::initialize(
A);
9388 Value &
V = getAssociatedValue();
9391 unionAssumed(ConstantRange(
C->getValue()));
9392 indicateOptimisticFixpoint();
9398 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9399 indicateOptimisticFixpoint();
9411 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9422 indicatePessimisticFixpoint();
9425 << getAssociatedValue() <<
"\n");
9428 bool calculateBinaryOperator(
9429 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9430 const Instruction *CtxI,
9431 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9436 bool UsedAssumedInformation =
false;
9437 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9440 if (!SimplifiedLHS.has_value())
9442 if (!*SimplifiedLHS)
9444 LHS = *SimplifiedLHS;
9446 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9449 if (!SimplifiedRHS.has_value())
9451 if (!*SimplifiedRHS)
9453 RHS = *SimplifiedRHS;
9459 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9461 DepClassTy::REQUIRED);
9465 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9467 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9469 DepClassTy::REQUIRED);
9473 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9475 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9477 T.unionAssumed(AssumedRange);
9481 return T.isValidState();
9484 bool calculateCastInst(
9485 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9486 const Instruction *CtxI,
9487 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9493 bool UsedAssumedInformation =
false;
9494 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9497 if (!SimplifiedOpV.has_value())
9499 if (!*SimplifiedOpV)
9501 OpV = *SimplifiedOpV;
9506 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9508 DepClassTy::REQUIRED);
9512 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9514 return T.isValidState();
9518 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9519 const Instruction *CtxI,
9520 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9525 bool UsedAssumedInformation =
false;
9526 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9529 if (!SimplifiedLHS.has_value())
9531 if (!*SimplifiedLHS)
9533 LHS = *SimplifiedLHS;
9535 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9538 if (!SimplifiedRHS.has_value())
9540 if (!*SimplifiedRHS)
9542 RHS = *SimplifiedRHS;
9548 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9550 DepClassTy::REQUIRED);
9554 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9556 DepClassTy::REQUIRED);
9560 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9561 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9564 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9567 bool MustTrue =
false, MustFalse =
false;
9569 auto AllowedRegion =
9572 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9578 assert((!MustTrue || !MustFalse) &&
9579 "Either MustTrue or MustFalse should be false!");
9582 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9584 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9586 T.unionAssumed(ConstantRange( 1,
true));
9588 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9589 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9590 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9594 return T.isValidState();
9606 bool UsedAssumedInformation =
false;
9607 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9610 if (!SimplifiedOpV.has_value())
9612 if (!*SimplifiedOpV)
9614 Value *VPtr = *SimplifiedOpV;
9617 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9619 DepClassTy::REQUIRED);
9623 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9627 return T.isValidState();
9632 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9635 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9638 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9644 T.indicatePessimisticFixpoint();
9651 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9652 if (QueriedAA !=
this)
9655 if (
T.getAssumed() == getState().getAssumed())
9657 T.indicatePessimisticFixpoint();
9660 return T.isValidState();
9663 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9664 return indicatePessimisticFixpoint();
9669 return ChangeStatus::UNCHANGED;
9670 if (++NumChanges > MaxNumChanges) {
9671 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9672 <<
" but only " << MaxNumChanges
9673 <<
" are allowed to avoid cyclic reasoning.");
9674 return indicatePessimisticFixpoint();
9676 return ChangeStatus::CHANGED;
9680 void trackStatistics()
const override {
9689 static constexpr int MaxNumChanges = 5;
9692struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9693 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9694 : AAValueConstantRangeImpl(IRP,
A) {}
9698 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9706struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9707 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9708 : AAValueConstantRangeFunction(IRP,
A) {}
9714struct AAValueConstantRangeCallSiteReturned
9715 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9716 AAValueConstantRangeImpl::StateType,
9718 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9719 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9720 AAValueConstantRangeImpl::StateType,
9727 if (std::optional<ConstantRange>
Range = CI->getRange())
9728 intersectKnown(*
Range);
9731 AAValueConstantRangeImpl::initialize(
A);
9735 void trackStatistics()
const override {
9739struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9740 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9741 : AAValueConstantRangeFloating(IRP,
A) {}
9745 return ChangeStatus::UNCHANGED;
9749 void trackStatistics()
const override {
9758struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9761 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9762 : AAPotentialConstantValues(IRP,
A) {}
9766 if (
A.hasSimplificationCallback(getIRPosition()))
9767 indicatePessimisticFixpoint();
9769 AAPotentialConstantValues::initialize(
A);
9772 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9773 bool &ContainsUndef,
bool ForSelf) {
9775 bool UsedAssumedInformation =
false;
9777 UsedAssumedInformation)) {
9784 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9785 *
this, IRP, DepClassTy::REQUIRED);
9786 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9788 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9789 S = PotentialValuesAA->getState().getAssumedSet();
9796 ContainsUndef =
false;
9797 for (
auto &It : Values) {
9799 ContainsUndef =
true;
9805 S.insert(CI->getValue());
9807 ContainsUndef &= S.empty();
9813 const std::string getAsStr(Attributor *
A)
const override {
9815 llvm::raw_string_ostream OS(Str);
9822 return indicatePessimisticFixpoint();
9826struct AAPotentialConstantValuesArgument final
9827 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9828 AAPotentialConstantValuesImpl,
9829 PotentialConstantIntValuesState> {
9830 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9831 AAPotentialConstantValuesImpl,
9833 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9837 void trackStatistics()
const override {
9842struct AAPotentialConstantValuesReturned
9843 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9844 AAPotentialConstantValuesImpl> {
9845 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9846 AAPotentialConstantValuesImpl>;
9847 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9851 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9852 indicatePessimisticFixpoint();
9853 Base::initialize(
A);
9857 void trackStatistics()
const override {
9862struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9863 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9864 : AAPotentialConstantValuesImpl(IRP,
A) {}
9868 AAPotentialConstantValuesImpl::initialize(
A);
9872 Value &
V = getAssociatedValue();
9875 unionAssumed(
C->getValue());
9876 indicateOptimisticFixpoint();
9881 unionAssumedWithUndef();
9882 indicateOptimisticFixpoint();
9892 indicatePessimisticFixpoint();
9895 << getAssociatedValue() <<
"\n");
9898 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9903 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9904 uint32_t ResultBitWidth) {
9909 case Instruction::Trunc:
9910 return Src.trunc(ResultBitWidth);
9911 case Instruction::SExt:
9912 return Src.sext(ResultBitWidth);
9913 case Instruction::ZExt:
9914 return Src.zext(ResultBitWidth);
9915 case Instruction::BitCast:
9920 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9921 const APInt &
LHS,
const APInt &
RHS,
9922 bool &SkipOperation,
bool &Unsupported) {
9929 switch (BinOpcode) {
9933 case Instruction::Add:
9935 case Instruction::Sub:
9937 case Instruction::Mul:
9939 case Instruction::UDiv:
9941 SkipOperation =
true;
9945 case Instruction::SDiv:
9947 SkipOperation =
true;
9951 case Instruction::URem:
9953 SkipOperation =
true;
9957 case Instruction::SRem:
9959 SkipOperation =
true;
9963 case Instruction::Shl:
9965 case Instruction::LShr:
9967 case Instruction::AShr:
9969 case Instruction::And:
9971 case Instruction::Or:
9973 case Instruction::Xor:
9978 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9979 const APInt &
LHS,
const APInt &
RHS) {
9980 bool SkipOperation =
false;
9983 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9988 unionAssumed(Result);
9989 return isValidState();
9992 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9993 auto AssumedBefore = getAssumed();
9997 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9998 SetTy LHSAAPVS, RHSAAPVS;
10000 LHSContainsUndef,
false) ||
10002 RHSContainsUndef,
false))
10003 return indicatePessimisticFixpoint();
10006 bool MaybeTrue =
false, MaybeFalse =
false;
10008 if (LHSContainsUndef && RHSContainsUndef) {
10011 unionAssumedWithUndef();
10012 }
else if (LHSContainsUndef) {
10013 for (
const APInt &R : RHSAAPVS) {
10014 bool CmpResult = calculateICmpInst(ICI, Zero, R);
10015 MaybeTrue |= CmpResult;
10016 MaybeFalse |= !CmpResult;
10017 if (MaybeTrue & MaybeFalse)
10018 return indicatePessimisticFixpoint();
10020 }
else if (RHSContainsUndef) {
10021 for (
const APInt &L : LHSAAPVS) {
10022 bool CmpResult = calculateICmpInst(ICI, L, Zero);
10023 MaybeTrue |= CmpResult;
10024 MaybeFalse |= !CmpResult;
10025 if (MaybeTrue & MaybeFalse)
10026 return indicatePessimisticFixpoint();
10029 for (
const APInt &L : LHSAAPVS) {
10030 for (
const APInt &R : RHSAAPVS) {
10031 bool CmpResult = calculateICmpInst(ICI, L, R);
10032 MaybeTrue |= CmpResult;
10033 MaybeFalse |= !CmpResult;
10034 if (MaybeTrue & MaybeFalse)
10035 return indicatePessimisticFixpoint();
10040 unionAssumed(APInt( 1, 1));
10042 unionAssumed(APInt( 1, 0));
10043 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10044 : ChangeStatus::CHANGED;
10047 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
10048 auto AssumedBefore = getAssumed();
10052 bool UsedAssumedInformation =
false;
10053 std::optional<Constant *>
C =
A.getAssumedConstant(
10054 *
SI->getCondition(), *
this, UsedAssumedInformation);
10057 bool OnlyLeft =
false, OnlyRight =
false;
10058 if (
C && *
C && (*C)->isOneValue())
10060 else if (
C && *
C && (*C)->isNullValue())
10063 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10064 SetTy LHSAAPVS, RHSAAPVS;
10067 LHSContainsUndef,
false))
10068 return indicatePessimisticFixpoint();
10072 RHSContainsUndef,
false))
10073 return indicatePessimisticFixpoint();
10075 if (OnlyLeft || OnlyRight) {
10077 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
10078 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10081 unionAssumedWithUndef();
10083 for (
const auto &It : *OpAA)
10087 }
else if (LHSContainsUndef && RHSContainsUndef) {
10089 unionAssumedWithUndef();
10091 for (
const auto &It : LHSAAPVS)
10093 for (
const auto &It : RHSAAPVS)
10096 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10097 : ChangeStatus::CHANGED;
10100 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10101 auto AssumedBefore = getAssumed();
10103 return indicatePessimisticFixpoint();
10108 bool SrcContainsUndef =
false;
10111 SrcContainsUndef,
false))
10112 return indicatePessimisticFixpoint();
10114 if (SrcContainsUndef)
10115 unionAssumedWithUndef();
10117 for (
const APInt &S : SrcPVS) {
10118 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10122 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10123 : ChangeStatus::CHANGED;
10126 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10127 auto AssumedBefore = getAssumed();
10131 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10132 SetTy LHSAAPVS, RHSAAPVS;
10134 LHSContainsUndef,
false) ||
10136 RHSContainsUndef,
false))
10137 return indicatePessimisticFixpoint();
10142 if (LHSContainsUndef && RHSContainsUndef) {
10143 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10144 return indicatePessimisticFixpoint();
10145 }
else if (LHSContainsUndef) {
10146 for (
const APInt &R : RHSAAPVS) {
10147 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10148 return indicatePessimisticFixpoint();
10150 }
else if (RHSContainsUndef) {
10151 for (
const APInt &L : LHSAAPVS) {
10152 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10153 return indicatePessimisticFixpoint();
10156 for (
const APInt &L : LHSAAPVS) {
10157 for (
const APInt &R : RHSAAPVS) {
10158 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10159 return indicatePessimisticFixpoint();
10163 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10164 : ChangeStatus::CHANGED;
10167 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10168 auto AssumedBefore = getAssumed();
10170 bool ContainsUndef;
10172 ContainsUndef,
true))
10173 return indicatePessimisticFixpoint();
10174 if (ContainsUndef) {
10175 unionAssumedWithUndef();
10177 for (
const auto &It : Incoming)
10180 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10181 : ChangeStatus::CHANGED;
10186 Value &
V = getAssociatedValue();
10190 return updateWithICmpInst(
A, ICI);
10193 return updateWithSelectInst(
A, SI);
10196 return updateWithCastInst(
A, CI);
10199 return updateWithBinaryOperator(
A, BinOp);
10202 return updateWithInstruction(
A,
I);
10204 return indicatePessimisticFixpoint();
10208 void trackStatistics()
const override {
10213struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10214 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10215 : AAPotentialConstantValuesImpl(IRP,
A) {}
10220 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10225 void trackStatistics()
const override {
10230struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10231 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10232 : AAPotentialConstantValuesFunction(IRP,
A) {}
10235 void trackStatistics()
const override {
10240struct AAPotentialConstantValuesCallSiteReturned
10241 : AACalleeToCallSite<AAPotentialConstantValues,
10242 AAPotentialConstantValuesImpl> {
10243 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10245 : AACalleeToCallSite<AAPotentialConstantValues,
10246 AAPotentialConstantValuesImpl>(IRP,
A) {}
10249 void trackStatistics()
const override {
10254struct AAPotentialConstantValuesCallSiteArgument
10255 : AAPotentialConstantValuesFloating {
10256 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10258 : AAPotentialConstantValuesFloating(IRP,
A) {}
10262 AAPotentialConstantValuesImpl::initialize(
A);
10263 if (isAtFixpoint())
10266 Value &
V = getAssociatedValue();
10269 unionAssumed(
C->getValue());
10270 indicateOptimisticFixpoint();
10275 unionAssumedWithUndef();
10276 indicateOptimisticFixpoint();
10283 Value &
V = getAssociatedValue();
10284 auto AssumedBefore = getAssumed();
10285 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10288 return indicatePessimisticFixpoint();
10289 const auto &S = AA->getAssumed();
10291 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10292 : ChangeStatus::CHANGED;
10296 void trackStatistics()
const override {
10305 bool IgnoreSubsumingPositions) {
10306 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10307 "Unexpected attribute kind");
10308 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10309 Attribute::NoUndef))
10329 Value &V = getAssociatedValue();
10331 indicatePessimisticFixpoint();
10332 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10336 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10337 AANoUndef::StateType &State) {
10338 const Value *UseV =
U->get();
10339 const DominatorTree *DT =
nullptr;
10340 AssumptionCache *AC =
nullptr;
10341 InformationCache &InfoCache =
A.getInfoCache();
10342 if (Function *
F = getAnchorScope()) {
10347 bool TrackUse =
false;
10356 const std::string getAsStr(Attributor *
A)
const override {
10357 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10364 bool UsedAssumedInformation =
false;
10365 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10366 UsedAssumedInformation))
10367 return ChangeStatus::UNCHANGED;
10371 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10374 return ChangeStatus::UNCHANGED;
10375 return AANoUndef::manifest(
A);
10379struct AANoUndefFloating :
public AANoUndefImpl {
10380 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10381 : AANoUndefImpl(IRP,
A) {}
10385 AANoUndefImpl::initialize(
A);
10386 if (!getState().isAtFixpoint() && getAnchorScope() &&
10387 !getAnchorScope()->isDeclaration())
10388 if (Instruction *CtxI = getCtxI())
10389 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10394 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10395 bool IsKnownNoUndef;
10397 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10401 bool UsedAssumedInformation =
false;
10402 Value *AssociatedValue = &getAssociatedValue();
10404 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10409 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10417 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10418 return indicatePessimisticFixpoint();
10419 return ChangeStatus::UNCHANGED;
10422 for (
const auto &VAC : Values)
10424 return indicatePessimisticFixpoint();
10426 return ChangeStatus::UNCHANGED;
10433struct AANoUndefReturned final
10434 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10435 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10436 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10442struct AANoUndefArgument final
10443 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10444 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10445 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10451struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10452 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10453 : AANoUndefFloating(IRP,
A) {}
10459struct AANoUndefCallSiteReturned final
10460 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10461 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10462 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10470struct AANoFPClassImpl : AANoFPClass {
10471 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10474 const IRPosition &IRP = getIRPosition();
10478 indicateOptimisticFixpoint();
10483 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10484 for (
const auto &Attr : Attrs) {
10491 const DataLayout &
DL =
A.getDataLayout();
10492 InformationCache &InfoCache =
A.getInfoCache();
10494 const DominatorTree *DT =
nullptr;
10495 AssumptionCache *AC =
nullptr;
10496 const TargetLibraryInfo *TLI =
nullptr;
10500 if (!
F->isDeclaration()) {
10507 SimplifyQuery Q(
DL, TLI, DT, AC, CtxI);
10514 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10518 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10519 AANoFPClass::StateType &State) {
10530 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10531 State.addKnownBits(NoFPAA->getState().getKnown());
10535 const std::string getAsStr(Attributor *
A)
const override {
10536 std::string
Result =
"nofpclass";
10537 raw_string_ostream OS(Result);
10538 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10542 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10543 SmallVectorImpl<Attribute> &Attrs)
const override {
10544 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10548struct AANoFPClassFloating :
public AANoFPClassImpl {
10549 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10550 : AANoFPClassImpl(IRP,
A) {}
10555 bool UsedAssumedInformation =
false;
10556 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10558 Values.
push_back({getAssociatedValue(), getCtxI()});
10564 DepClassTy::REQUIRED);
10565 if (!AA ||
this == AA) {
10566 T.indicatePessimisticFixpoint();
10568 const AANoFPClass::StateType &S =
10569 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10572 return T.isValidState();
10575 for (
const auto &VAC : Values)
10577 return indicatePessimisticFixpoint();
10583 void trackStatistics()
const override {
10588struct AANoFPClassReturned final
10589 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10590 AANoFPClassImpl::StateType, false,
10591 Attribute::None, false> {
10592 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10593 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10594 AANoFPClassImpl::StateType,
false,
10598 void trackStatistics()
const override {
10603struct AANoFPClassArgument final
10604 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10605 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10606 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10612struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10613 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10614 : AANoFPClassFloating(IRP,
A) {}
10617 void trackStatistics()
const override {
10622struct AANoFPClassCallSiteReturned final
10623 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10624 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10625 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10628 void trackStatistics()
const override {
10633struct AACallEdgesImpl :
public AACallEdges {
10634 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10636 const SetVector<Function *> &getOptimisticEdges()
const override {
10637 return CalledFunctions;
10640 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10642 bool hasNonAsmUnknownCallee()
const override {
10643 return HasUnknownCalleeNonAsm;
10646 const std::string getAsStr(Attributor *
A)
const override {
10647 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10648 std::to_string(CalledFunctions.size()) +
"]";
10651 void trackStatistics()
const override {}
10654 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10655 if (CalledFunctions.insert(Fn)) {
10656 Change = ChangeStatus::CHANGED;
10662 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10663 if (!HasUnknownCallee)
10664 Change = ChangeStatus::CHANGED;
10665 if (NonAsm && !HasUnknownCalleeNonAsm)
10666 Change = ChangeStatus::CHANGED;
10667 HasUnknownCalleeNonAsm |= NonAsm;
10668 HasUnknownCallee =
true;
10673 SetVector<Function *> CalledFunctions;
10676 bool HasUnknownCallee =
false;
10679 bool HasUnknownCalleeNonAsm =
false;
10682struct AACallEdgesCallSite :
public AACallEdgesImpl {
10683 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10684 : AACallEdgesImpl(IRP,
A) {}
10691 addCalledFunction(Fn, Change);
10693 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10694 setHasUnknownCallee(
true, Change);
10705 VisitValue(*V, CtxI);
10709 bool UsedAssumedInformation =
false;
10715 for (
auto &VAC : Values)
10722 if (
IA->hasSideEffects() &&
10725 setHasUnknownCallee(
false, Change);
10731 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10732 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10733 if (IndirectCallAA->foreachCallee(
10734 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10743 for (
const Use *U : CallbackUses)
10744 ProcessCalledOperand(
U->get(), CB);
10750struct AACallEdgesFunction :
public AACallEdgesImpl {
10751 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10752 : AACallEdgesImpl(IRP,
A) {}
10761 auto *CBEdges =
A.getAAFor<AACallEdges>(
10765 if (CBEdges->hasNonAsmUnknownCallee())
10766 setHasUnknownCallee(
true, Change);
10767 if (CBEdges->hasUnknownCallee())
10768 setHasUnknownCallee(
false, Change);
10770 for (Function *
F : CBEdges->getOptimisticEdges())
10771 addCalledFunction(
F, Change);
10777 bool UsedAssumedInformation =
false;
10778 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10779 UsedAssumedInformation,
10783 setHasUnknownCallee(
true, Change);
10792struct AAInterFnReachabilityFunction
10793 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10794 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10795 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10798 bool instructionCanReach(
10799 Attributor &
A,
const Instruction &From,
const Function &To,
10802 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10804 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10805 RQITy::Reachable
Result;
10806 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10807 return NonConstThis->isReachableImpl(
A, StackRQI,
10809 return Result == RQITy::Reachable::Yes;
10813 bool IsTemporaryRQI)
override {
10815 &RQI.From->getFunction()->getEntryBlock().front();
10816 if (EntryI != RQI.From &&
10817 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10818 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10821 auto CheckReachableCallBase = [&](CallBase *CB) {
10822 auto *CBEdges =
A.getAAFor<AACallEdges>(
10824 if (!CBEdges || !CBEdges->getState().isValidState())
10827 if (CBEdges->hasUnknownCallee())
10830 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10841 if (Fn == getAnchorScope()) {
10842 if (EntryI == RQI.From)
10847 const AAInterFnReachability *InterFnReachability =
10849 DepClassTy::OPTIONAL);
10852 if (!InterFnReachability ||
10860 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10862 DepClassTy::OPTIONAL);
10870 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10871 A, *RQI.From, CBInst, RQI.ExclusionSet);
10874 bool UsedExclusionSet =
true;
10875 bool UsedAssumedInformation =
false;
10876 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10877 UsedAssumedInformation,
10879 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10882 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10886 void trackStatistics()
const override {}
10890template <
typename AAType>
10891static std::optional<Constant *>
10894 if (!Ty.isIntegerTy())
10902 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10904 if (!COpt.has_value()) {
10906 return std::nullopt;
10908 if (
auto *
C = *COpt) {
10919 std::optional<Value *> V;
10920 for (
auto &It : Values) {
10922 if (V.has_value() && !*V)
10925 if (!V.has_value())
10939 if (
A.hasSimplificationCallback(getIRPosition())) {
10940 indicatePessimisticFixpoint();
10943 Value *Stripped = getAssociatedValue().stripPointerCasts();
10945 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10947 indicateOptimisticFixpoint();
10950 AAPotentialValues::initialize(
A);
10954 const std::string getAsStr(Attributor *
A)
const override {
10956 llvm::raw_string_ostream OS(Str);
10961 template <
typename AAType>
10962 static std::optional<Value *> askOtherAA(Attributor &
A,
10963 const AbstractAttribute &AA,
10964 const IRPosition &IRP,
Type &Ty) {
10969 return std::nullopt;
10976 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10978 Function *AnchorScope)
const {
10982 for (
const auto &U : CB->
args()) {
10992 Type &Ty = *getAssociatedType();
10993 std::optional<Value *> SimpleV =
10994 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10995 if (SimpleV.has_value() && !*SimpleV) {
10996 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10997 *
this, ValIRP, DepClassTy::OPTIONAL);
10998 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10999 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
11000 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
11001 if (PotentialConstantsAA->undefIsContained())
11006 if (!SimpleV.has_value())
11018 State.unionAssumed({{*VPtr, CtxI}, S});
11024 AA::ValueAndContext
I;
11028 return II.I ==
I &&
II.S == S;
11031 return std::tie(
I, S) < std::tie(
II.I,
II.S);
11035 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
11036 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
11041 bool UsedAssumedInformation =
false;
11043 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
11044 UsedAssumedInformation))
11047 for (
auto &It : Values)
11048 ValueScopeMap[It] += CS;
11050 for (
auto &It : ValueScopeMap)
11051 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
11057 void giveUpOnIntraprocedural(Attributor &
A) {
11058 auto NewS = StateType::getBestState(getState());
11059 for (
const auto &It : getAssumedSet()) {
11062 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
11065 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11073 getState() = StateType::getBestState(getState());
11074 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
11075 AAPotentialValues::indicateOptimisticFixpoint();
11076 return ChangeStatus::CHANGED;
11081 return indicatePessimisticFixpoint();
11089 if (!getAssumedSimplifiedValues(
A, Values, S))
11091 Value &OldV = getAssociatedValue();
11094 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
11095 if (!NewV || NewV == &OldV)
11100 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11101 return ChangeStatus::CHANGED;
11103 return ChangeStatus::UNCHANGED;
11106 bool getAssumedSimplifiedValues(
11107 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11108 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11109 if (!isValidState())
11111 bool UsedAssumedInformation =
false;
11112 for (
const auto &It : getAssumedSet())
11113 if (It.second & S) {
11114 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11116 if (
A.getAssumedSimplifiedValues(
11118 this, Values, S, UsedAssumedInformation))
11123 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11128struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11129 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11130 : AAPotentialValuesImpl(IRP,
A) {}
11134 auto AssumedBefore = getAssumed();
11136 genericValueTraversal(
A, &getAssociatedValue());
11138 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11139 : ChangeStatus::CHANGED;
11143 struct LivenessInfo {
11144 const AAIsDead *LivenessAA =
nullptr;
11145 bool AnyDead =
false;
11155 SmallVectorImpl<ItemInfo> &Worklist) {
11158 bool UsedAssumedInformation =
false;
11160 auto GetSimplifiedValues = [&](
Value &
V,
11162 if (!
A.getAssumedSimplifiedValues(
11166 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11168 return Values.
empty();
11170 if (GetSimplifiedValues(*
LHS, LHSValues))
11172 if (GetSimplifiedValues(*
RHS, RHSValues))
11177 InformationCache &InfoCache =
A.getInfoCache();
11184 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11189 const DataLayout &
DL =
A.getDataLayout();
11190 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11192 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11195 nullptr,
II.S, getAnchorScope());
11201 if (&LHSV == &RHSV &&
11203 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11205 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11212 if (TypedLHS && TypedRHS) {
11214 if (NewV && NewV != &Cmp) {
11215 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11227 if (!LHSIsNull && !RHSIsNull)
11233 assert((LHSIsNull || RHSIsNull) &&
11234 "Expected nullptr versus non-nullptr comparison at this point");
11237 unsigned PtrIdx = LHSIsNull;
11238 bool IsKnownNonNull;
11241 DepClassTy::REQUIRED, IsKnownNonNull);
11242 if (!IsAssumedNonNull)
11248 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11253 for (
auto &LHSValue : LHSValues)
11254 for (
auto &RHSValue : RHSValues)
11255 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11260 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11261 SmallVectorImpl<ItemInfo> &Worklist) {
11263 bool UsedAssumedInformation =
false;
11265 std::optional<Constant *>
C =
11266 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11267 bool NoValueYet = !
C.has_value();
11275 }
else if (&SI == &getAssociatedValue()) {
11280 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11282 if (!SimpleV.has_value())
11285 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11293 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11294 SmallVectorImpl<ItemInfo> &Worklist) {
11295 SmallSetVector<Value *, 4> PotentialCopies;
11296 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11297 bool UsedAssumedInformation =
false;
11299 PotentialValueOrigins, *
this,
11300 UsedAssumedInformation,
11302 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11303 "loaded values for load instruction "
11311 InformationCache &InfoCache =
A.getInfoCache();
11313 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11317 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11319 UsedAssumedInformation,
11321 return A.isAssumedDead(*
I,
this,
nullptr,
11322 UsedAssumedInformation,
11325 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11326 "and we cannot delete all the stores: "
11337 bool AllLocal = ScopeIsLocal;
11342 if (!DynamicallyUnique) {
11343 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11344 "values are dynamically unique: "
11349 for (
auto *PotentialCopy : PotentialCopies) {
11351 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11356 if (!AllLocal && ScopeIsLocal)
11361 bool handlePHINode(
11362 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11363 SmallVectorImpl<ItemInfo> &Worklist,
11364 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11365 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11366 LivenessInfo &LI = LivenessAAs[&
F];
11367 if (!LI.LivenessAA)
11373 if (&
PHI == &getAssociatedValue()) {
11374 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11376 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11377 *
PHI.getFunction());
11381 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11383 if (LI.LivenessAA &&
11384 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11403 bool UsedAssumedInformation =
false;
11404 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11406 if (!SimpleV.has_value())
11410 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11417 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11418 SmallVectorImpl<ItemInfo> &Worklist) {
11419 bool SomeSimplified =
false;
11420 bool UsedAssumedInformation =
false;
11422 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11425 const auto &SimplifiedOp =
A.getAssumedSimplified(
11430 if (!SimplifiedOp.has_value())
11434 NewOps[Idx] = *SimplifiedOp;
11438 SomeSimplified |= (NewOps[Idx] !=
Op);
11444 if (!SomeSimplified)
11447 InformationCache &InfoCache =
A.getInfoCache();
11451 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11454 const DataLayout &
DL =
I.getDataLayout();
11455 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11457 if (!NewV || NewV == &
I)
11460 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11467 Attributor &
A, Instruction &
I, ItemInfo
II,
11468 SmallVectorImpl<ItemInfo> &Worklist,
11469 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11472 CI->getPredicate(),
II, Worklist);
11474 switch (
I.getOpcode()) {
11475 case Instruction::Select:
11477 case Instruction::PHI:
11479 case Instruction::Load:
11482 return handleGenericInst(
A,
I,
II, Worklist);
11487 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11488 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11490 SmallSet<ItemInfo, 16> Visited;
11509 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11510 << Iteration <<
"!\n");
11511 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11517 Value *NewV =
nullptr;
11518 if (
V->getType()->isPointerTy()) {
11524 for (Argument &Arg :
Callee->args())
11531 if (NewV && NewV != V) {
11532 Worklist.
push_back({{*NewV, CtxI}, S});
11546 if (V == InitialV && CtxI == getCtxI()) {
11547 indicatePessimisticFixpoint();
11551 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11552 }
while (!Worklist.
empty());
11556 for (
auto &It : LivenessAAs)
11557 if (It.second.AnyDead)
11558 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11562 void trackStatistics()
const override {
11567struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11568 using Base = AAPotentialValuesImpl;
11569 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11576 indicatePessimisticFixpoint();
11581 auto AssumedBefore = getAssumed();
11583 unsigned ArgNo = getCalleeArgNo();
11585 bool UsedAssumedInformation =
false;
11587 auto CallSitePred = [&](AbstractCallSite ACS) {
11589 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11592 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11594 UsedAssumedInformation))
11597 return isValidState();
11600 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11602 UsedAssumedInformation))
11603 return indicatePessimisticFixpoint();
11605 Function *Fn = getAssociatedFunction();
11606 bool AnyNonLocal =
false;
11607 for (
auto &It : Values) {
11609 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11614 return indicatePessimisticFixpoint();
11618 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11624 AnyNonLocal =
true;
11626 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11628 giveUpOnIntraprocedural(
A);
11630 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11631 : ChangeStatus::CHANGED;
11635 void trackStatistics()
const override {
11640struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11641 using Base = AAPotentialValuesFloating;
11642 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11648 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11649 indicatePessimisticFixpoint();
11653 for (Argument &Arg :
F->args())
11656 ReturnedArg = &Arg;
11659 if (!
A.isFunctionIPOAmendable(*
F) ||
11660 A.hasSimplificationCallback(getIRPosition())) {
11662 indicatePessimisticFixpoint();
11664 indicateOptimisticFixpoint();
11670 auto AssumedBefore = getAssumed();
11671 bool UsedAssumedInformation =
false;
11674 Function *AnchorScope = getAnchorScope();
11680 UsedAssumedInformation,
11686 bool AllInterAreIntra =
false;
11689 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11693 for (
const AA::ValueAndContext &VAC : Values) {
11694 addValue(
A, getState(), *VAC.
getValue(),
11698 if (AllInterAreIntra)
11705 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11708 bool AddValues =
true;
11711 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11715 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11718 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11719 UsedAssumedInformation,
11721 return indicatePessimisticFixpoint();
11724 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11725 : ChangeStatus::CHANGED;
11730 return ChangeStatus::UNCHANGED;
11732 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11734 return ChangeStatus::UNCHANGED;
11735 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11737 return ChangeStatus::UNCHANGED;
11742 "Number of function with unique return");
11745 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11750 Value *RetOp = RetI.getOperand(0);
11754 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11755 Changed = ChangeStatus::CHANGED;
11758 bool UsedAssumedInformation =
false;
11759 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11760 UsedAssumedInformation,
11766 return AAPotentialValues::indicatePessimisticFixpoint();
11770 void trackStatistics()
const override{
11777struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11778 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11779 : AAPotentialValuesImpl(IRP,
A) {}
11788 void trackStatistics()
const override {
11793struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11794 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11795 : AAPotentialValuesFunction(IRP,
A) {}
11798 void trackStatistics()
const override {
11803struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11804 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11805 : AAPotentialValuesImpl(IRP,
A) {}
11809 auto AssumedBefore = getAssumed();
11813 return indicatePessimisticFixpoint();
11815 bool UsedAssumedInformation =
false;
11819 UsedAssumedInformation))
11820 return indicatePessimisticFixpoint();
11827 Values, S, UsedAssumedInformation))
11830 for (
auto &It : Values) {
11831 Value *
V = It.getValue();
11832 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11833 V, *CB, *
this, UsedAssumedInformation);
11834 if (!CallerV.has_value()) {
11838 V = *CallerV ? *CallerV :
V;
11844 giveUpOnIntraprocedural(
A);
11847 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11852 return indicatePessimisticFixpoint();
11854 return indicatePessimisticFixpoint();
11855 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11856 : ChangeStatus::CHANGED;
11860 return AAPotentialValues::indicatePessimisticFixpoint();
11864 void trackStatistics()
const override {
11869struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11870 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11871 : AAPotentialValuesFloating(IRP,
A) {}
11874 void trackStatistics()
const override {
11882struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11883 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11884 const DenseSet<StringRef> &Known)
11885 : AAAssumptionInfo(IRP,
A, Known) {}
11890 if (getKnown().isUniversal())
11891 return ChangeStatus::UNCHANGED;
11893 const IRPosition &IRP = getIRPosition();
11895 getAssumed().getSet().
end());
11897 return A.manifestAttrs(IRP,
11904 bool hasAssumption(
const StringRef Assumption)
const override {
11905 return isValidState() && setContains(Assumption);
11909 const std::string getAsStr(Attributor *
A)
const override {
11910 const SetContents &Known = getKnown();
11911 const SetContents &Assumed = getAssumed();
11915 const std::string KnownStr =
llvm::join(Set,
",");
11917 std::string AssumedStr =
"Universal";
11918 if (!Assumed.isUniversal()) {
11919 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11922 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11937struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11938 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11939 : AAAssumptionInfoImpl(IRP,
A,
11946 auto CallSitePred = [&](AbstractCallSite ACS) {
11947 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11949 DepClassTy::REQUIRED);
11953 Changed |= getIntersection(AssumptionAA->getAssumed());
11954 return !getAssumed().empty() || !getKnown().empty();
11957 bool UsedAssumedInformation =
false;
11962 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11963 UsedAssumedInformation))
11964 return indicatePessimisticFixpoint();
11966 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11969 void trackStatistics()
const override {}
11973struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11975 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11976 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11981 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11987 auto *AssumptionAA =
11988 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11990 return indicatePessimisticFixpoint();
11991 bool Changed = getIntersection(AssumptionAA->getAssumed());
11992 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11996 void trackStatistics()
const override {}
12001 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
12008 return Assumptions;
12023struct AAUnderlyingObjectsImpl
12029 const std::string getAsStr(
Attributor *
A)
const override {
12030 if (!isValidState())
12031 return "<invalid>";
12034 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
12035 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
12037 if (!InterAssumedUnderlyingObjects.empty()) {
12038 OS <<
"inter objects:\n";
12039 for (
auto *Obj : InterAssumedUnderlyingObjects)
12040 OS << *Obj <<
'\n';
12042 if (!IntraAssumedUnderlyingObjects.empty()) {
12043 OS <<
"intra objects:\n";
12044 for (
auto *Obj : IntraAssumedUnderlyingObjects)
12045 OS << *
Obj <<
'\n';
12051 void trackStatistics()
const override {}
12055 auto &Ptr = getAssociatedValue();
12057 bool UsedAssumedInformation =
false;
12058 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
12060 SmallPtrSet<Value *, 8> SeenObjects;
12064 Scope, UsedAssumedInformation))
12065 return UnderlyingObjects.
insert(&Ptr);
12069 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
12070 auto &VAC = Values[
I];
12073 if (!SeenObjects.
insert(UO ? UO : Obj).second)
12075 if (UO && UO != Obj) {
12081 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
12083 auto Pred = [&](
Value &
V) {
12091 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
12093 "The forall call should not return false at this position");
12099 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12100 UsedAssumedInformation);
12106 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12108 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12109 Scope, UsedAssumedInformation);
12123 if (!UsedAssumedInformation)
12124 indicateOptimisticFixpoint();
12125 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12128 bool forallUnderlyingObjects(
12129 function_ref<
bool(
Value &)> Pred,
12131 if (!isValidState())
12132 return Pred(getAssociatedValue());
12135 ? IntraAssumedUnderlyingObjects
12136 : InterAssumedUnderlyingObjects;
12137 for (
Value *Obj : AssumedUnderlyingObjects)
12147 bool handleIndirect(Attributor &
A,
Value &V,
12148 SmallSetVector<Value *, 8> &UnderlyingObjects,
12151 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12153 auto Pred = [&](
Value &
V) {
12157 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12159 "The forall call should not return false at this position");
12165 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12167 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12170struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12171 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12172 : AAUnderlyingObjectsImpl(IRP,
A) {}
12175struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12176 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12177 : AAUnderlyingObjectsImpl(IRP,
A) {}
12180struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12181 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12182 : AAUnderlyingObjectsImpl(IRP,
A) {}
12185struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12186 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12187 : AAUnderlyingObjectsImpl(IRP,
A) {}
12190struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12191 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12192 : AAUnderlyingObjectsImpl(IRP,
A) {}
12195struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12196 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12197 : AAUnderlyingObjectsImpl(IRP,
A) {}
12200struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12201 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12202 : AAUnderlyingObjectsImpl(IRP,
A) {}
12208struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12209 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12210 : AAGlobalValueInfo(IRP,
A) {}
12215 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12216 SmallVectorImpl<const Value *> &Worklist) {
12223 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12224 << *UInst <<
"\n");
12227 int Idx = &
Cmp->getOperandUse(0) == &
U;
12230 return U == &getAnchorValue();
12235 auto CallSitePred = [&](AbstractCallSite ACS) {
12236 Worklist.
push_back(ACS.getInstruction());
12239 bool UsedAssumedInformation =
false;
12241 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12243 UsedAssumedInformation))
12261 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12270 unsigned NumUsesBefore =
Uses.size();
12272 SmallPtrSet<const Value *, 8> Visited;
12276 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12284 return checkUse(
A, U, Follow, Worklist);
12286 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12287 Uses.insert(&OldU);
12291 while (!Worklist.
empty()) {
12293 if (!Visited.
insert(V).second)
12295 if (!
A.checkForAllUses(UsePred, *
this, *V,
12297 DepClassTy::OPTIONAL,
12298 true, EquivalentUseCB)) {
12299 return indicatePessimisticFixpoint();
12303 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12304 : ChangeStatus::CHANGED;
12307 bool isPotentialUse(
const Use &U)
const override {
12308 return !isValidState() ||
Uses.contains(&U);
12313 return ChangeStatus::UNCHANGED;
12317 const std::string getAsStr(Attributor *
A)
const override {
12318 return "[" + std::to_string(
Uses.size()) +
" uses]";
12321 void trackStatistics()
const override {
12327 SmallPtrSet<const Use *, 8>
Uses;
12333struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12334 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12335 : AAIndirectCallInfo(IRP,
A) {}
12339 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12340 if (!MD && !
A.isClosedWorldModule())
12344 for (
const auto &
Op : MD->operands())
12346 PotentialCallees.insert(Callee);
12347 }
else if (
A.isClosedWorldModule()) {
12349 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12350 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12353 if (PotentialCallees.empty())
12354 indicateOptimisticFixpoint();
12362 SmallSetVector<Function *, 4> AssumedCalleesNow;
12363 bool AllCalleesKnownNow = AllCalleesKnown;
12365 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12366 bool &UsedAssumedInformation) {
12367 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12369 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12371 UsedAssumedInformation = !GIAA->isAtFixpoint();
12375 auto AddPotentialCallees = [&]() {
12376 for (
auto *PotentialCallee : PotentialCallees) {
12377 bool UsedAssumedInformation =
false;
12378 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12379 AssumedCalleesNow.
insert(PotentialCallee);
12385 bool UsedAssumedInformation =
false;
12388 AA::ValueScope::AnyScope,
12389 UsedAssumedInformation)) {
12390 if (PotentialCallees.empty())
12391 return indicatePessimisticFixpoint();
12392 AddPotentialCallees();
12397 auto CheckPotentialCallee = [&](
Function &Fn) {
12398 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12401 auto &CachedResult = FilterResults[&Fn];
12402 if (CachedResult.has_value())
12403 return CachedResult.value();
12405 bool UsedAssumedInformation =
false;
12406 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12407 if (!UsedAssumedInformation)
12408 CachedResult =
false;
12417 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12418 bool IsKnown =
false;
12421 DepClassTy::OPTIONAL, IsKnown)) {
12423 CachedResult =
false;
12428 CachedResult =
true;
12434 for (
auto &VAC : Values) {
12442 if (CheckPotentialCallee(*VACFn))
12443 AssumedCalleesNow.
insert(VACFn);
12446 if (!PotentialCallees.empty()) {
12447 AddPotentialCallees();
12450 AllCalleesKnownNow =
false;
12453 if (AssumedCalleesNow == AssumedCallees &&
12454 AllCalleesKnown == AllCalleesKnownNow)
12455 return ChangeStatus::UNCHANGED;
12457 std::swap(AssumedCallees, AssumedCalleesNow);
12458 AllCalleesKnown = AllCalleesKnownNow;
12459 return ChangeStatus::CHANGED;
12465 if (!AllCalleesKnown && AssumedCallees.empty())
12466 return ChangeStatus::UNCHANGED;
12469 bool UsedAssumedInformation =
false;
12470 if (
A.isAssumedDead(*CB,
this,
nullptr,
12471 UsedAssumedInformation))
12472 return ChangeStatus::UNCHANGED;
12476 if (
FP->getType()->getPointerAddressSpace())
12477 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12487 if (AssumedCallees.empty()) {
12488 assert(AllCalleesKnown &&
12489 "Expected all callees to be known if there are none.");
12490 A.changeToUnreachableAfterManifest(CB);
12491 return ChangeStatus::CHANGED;
12495 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12496 auto *NewCallee = AssumedCallees.front();
12499 NumIndirectCallsPromoted++;
12500 return ChangeStatus::CHANGED;
12507 A.deleteAfterManifest(*CB);
12508 return ChangeStatus::CHANGED;
12518 bool SpecializedForAnyCallees =
false;
12519 bool SpecializedForAllCallees = AllCalleesKnown;
12520 ICmpInst *LastCmp =
nullptr;
12523 for (Function *NewCallee : AssumedCallees) {
12524 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12525 AssumedCallees.size())) {
12526 SkippedAssumedCallees.
push_back(NewCallee);
12527 SpecializedForAllCallees =
false;
12530 SpecializedForAnyCallees =
true;
12536 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12537 A.registerManifestAddedBasicBlock(*IP->getParent());
12543 A.registerManifestAddedBasicBlock(*ElseBB);
12545 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12550 CastInst *RetBC =
nullptr;
12551 CallInst *NewCall =
nullptr;
12556 NumIndirectCallsPromoted++;
12564 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12565 if (!AllCalleesKnown)
12566 return ChangeStatus::UNCHANGED;
12567 MDBuilder MDB(IndirectCB.getContext());
12568 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12569 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12570 return ChangeStatus::CHANGED;
12573 if (!SpecializedForAnyCallees)
12574 return AttachCalleeMetadata(*CB);
12577 if (SpecializedForAllCallees) {
12580 new UnreachableInst(IP->getContext(), IP);
12581 IP->eraseFromParent();
12584 CBClone->setName(CB->
getName());
12585 CBClone->insertBefore(*IP->getParent(), IP);
12586 NewCalls.
push_back({CBClone,
nullptr});
12587 AttachCalleeMetadata(*CBClone);
12594 CB->
getParent()->getFirstInsertionPt());
12595 for (
auto &It : NewCalls) {
12596 CallBase *NewCall = It.first;
12597 Instruction *CallRet = It.second ? It.second : It.first;
12609 A.deleteAfterManifest(*CB);
12610 Changed = ChangeStatus::CHANGED;
12616 const std::string getAsStr(Attributor *
A)
const override {
12617 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12618 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12622 void trackStatistics()
const override {
12623 if (AllCalleesKnown) {
12625 Eliminated, CallSites,
12626 "Number of indirect call sites eliminated via specialization")
12629 "Number of indirect call sites specialized")
12633 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12634 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12639 DenseMap<Function *, std::optional<bool>> FilterResults;
12643 SmallSetVector<Function *, 4> PotentialCallees;
12647 SmallSetVector<Function *, 4> AssumedCallees;
12651 bool AllCalleesKnown =
true;
12658struct AAInvariantLoadPointerImpl
12659 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12660 AAInvariantLoadPointer> {
12664 IS_NOALIAS = 1 << 0,
12667 IS_NOEFFECT = 1 << 1,
12669 IS_LOCALLY_INVARIANT = 1 << 2,
12671 IS_LOCALLY_CONSTRAINED = 1 << 3,
12673 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12674 IS_LOCALLY_CONSTRAINED,
12676 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12679 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12683 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12686 bool isKnownInvariant()
const final {
12687 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12690 bool isKnownLocallyInvariant()
const final {
12691 if (isKnown(IS_LOCALLY_INVARIANT))
12693 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12696 bool isAssumedInvariant()
const final {
12697 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12700 bool isAssumedLocallyInvariant()
const final {
12701 if (isAssumed(IS_LOCALLY_INVARIANT))
12703 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12710 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12711 return indicatePessimisticFixpoint();
12715 Changed |= updateLocalInvariance(
A);
12721 if (!isKnownInvariant())
12722 return ChangeStatus::UNCHANGED;
12725 const Value *Ptr = &getAssociatedValue();
12726 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12727 if (
U.get() != Ptr)
12735 if (!
A.isRunOn(
I->getFunction()))
12738 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12742 LI->setMetadata(LLVMContext::MD_invariant_load,
12744 Changed = ChangeStatus::CHANGED;
12749 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *Ptr);
12754 const std::string getAsStr(Attributor *)
const override {
12755 if (isKnownInvariant())
12756 return "load-invariant pointer";
12757 return "non-invariant pointer";
12761 void trackStatistics()
const override {}
12765 bool requiresNoAlias()
const {
12766 switch (getPositionKind()) {
12772 case IRP_CALL_SITE:
12774 case IRP_CALL_SITE_RETURNED: {
12779 case IRP_ARGUMENT: {
12780 const Function *
F = getAssociatedFunction();
12781 assert(
F &&
"no associated function for argument");
12787 bool isExternal()
const {
12788 const Function *
F = getAssociatedFunction();
12792 getPositionKind() != IRP_CALL_SITE_RETURNED;
12796 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12797 return ChangeStatus::UNCHANGED;
12800 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12801 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12802 if (ANoAlias->isKnownNoAlias()) {
12803 addKnownBits(IS_NOALIAS);
12804 return ChangeStatus::CHANGED;
12807 if (!ANoAlias->isAssumedNoAlias()) {
12808 removeAssumedBits(IS_NOALIAS);
12809 return ChangeStatus::CHANGED;
12812 return ChangeStatus::UNCHANGED;
12817 if (
const Argument *Arg = getAssociatedArgument()) {
12819 addKnownBits(IS_NOALIAS);
12820 return ChangeStatus::UNCHANGED;
12825 removeAssumedBits(IS_NOALIAS);
12826 return ChangeStatus::CHANGED;
12829 return ChangeStatus::UNCHANGED;
12833 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12834 return ChangeStatus::UNCHANGED;
12836 if (!getAssociatedFunction())
12837 return indicatePessimisticFixpoint();
12840 return indicatePessimisticFixpoint();
12842 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12844 return !LI || !LI->mayHaveSideEffects();
12846 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12847 return indicatePessimisticFixpoint();
12849 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12850 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12853 if (!AMemoryBehavior->isAssumedReadOnly())
12854 return indicatePessimisticFixpoint();
12856 if (AMemoryBehavior->isKnownReadOnly()) {
12857 addKnownBits(IS_NOEFFECT);
12858 return ChangeStatus::UNCHANGED;
12861 return ChangeStatus::UNCHANGED;
12864 if (
const Argument *Arg = getAssociatedArgument()) {
12866 addKnownBits(IS_NOEFFECT);
12867 return ChangeStatus::UNCHANGED;
12872 return indicatePessimisticFixpoint();
12875 return ChangeStatus::UNCHANGED;
12879 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12880 return ChangeStatus::UNCHANGED;
12883 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12884 getIRPosition(),
this, DepClassTy::REQUIRED);
12886 return ChangeStatus::UNCHANGED;
12888 bool UsedAssumedInformation =
false;
12889 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12890 if (!
V.getType()->isPointerTy())
12892 const auto *IsInvariantLoadPointer =
12894 DepClassTy::REQUIRED);
12896 if (!IsInvariantLoadPointer)
12899 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12901 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12904 UsedAssumedInformation =
true;
12907 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12908 return indicatePessimisticFixpoint();
12914 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12915 return indicatePessimisticFixpoint();
12920 if (!UsedAssumedInformation) {
12922 addKnownBits(IS_LOCALLY_INVARIANT);
12923 return ChangeStatus::CHANGED;
12926 return ChangeStatus::UNCHANGED;
12930struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12931 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12932 : AAInvariantLoadPointerImpl(IRP,
A) {}
12935struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12936 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12937 : AAInvariantLoadPointerImpl(IRP,
A) {}
12940 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12944struct AAInvariantLoadPointerCallSiteReturned final
12945 : AAInvariantLoadPointerImpl {
12946 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12947 : AAInvariantLoadPointerImpl(IRP,
A) {}
12950 const Function *
F = getAssociatedFunction();
12951 assert(
F &&
"no associated function for return from call");
12953 if (!
F->isDeclaration() && !
F->isIntrinsic())
12954 return AAInvariantLoadPointerImpl::initialize(
A);
12959 return AAInvariantLoadPointerImpl::initialize(
A);
12961 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12962 return AAInvariantLoadPointerImpl::initialize(
A);
12966 indicatePessimisticFixpoint();
12970struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12971 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12972 : AAInvariantLoadPointerImpl(IRP,
A) {}
12975 const Function *
F = getAssociatedFunction();
12976 assert(
F &&
"no associated function for argument");
12979 addKnownBits(IS_LOCALLY_CONSTRAINED);
12983 if (!
F->hasLocalLinkage())
12984 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12988struct AAInvariantLoadPointerCallSiteArgument final
12989 : AAInvariantLoadPointerImpl {
12990 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12991 : AAInvariantLoadPointerImpl(IRP,
A) {}
12998template <
typename InstType>
12999static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
13000 Value *OriginalValue, PointerType *NewPtrTy,
13001 bool UseOriginalValue) {
13002 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
13005 if (MemInst->isVolatile()) {
13006 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
13007 *MemInst->getFunction());
13008 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
13013 if (UseOriginalValue) {
13014 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
13018 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
13020 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
13024struct AAAddressSpaceImpl :
public AAAddressSpace {
13025 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13026 : AAAddressSpace(IRP,
A) {}
13029 assert(isValidState() &&
"the AA is invalid");
13030 return AssumedAddressSpace;
13035 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13036 "Associated value is not a pointer");
13038 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
13039 indicatePessimisticFixpoint();
13043 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13044 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13045 if (AS != FlatAS) {
13046 [[maybe_unused]]
bool R = takeAddressSpace(AS);
13047 assert(R &&
"The take should happen");
13048 indicateOptimisticFixpoint();
13053 uint32_t OldAddressSpace = AssumedAddressSpace;
13054 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13056 auto CheckAddressSpace = [&](
Value &
Obj) {
13062 unsigned ObjAS =
Obj.getType()->getPointerAddressSpace();
13063 if (ObjAS != FlatAS)
13064 return takeAddressSpace(ObjAS);
13078 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
13080 if (AssumedAS != ~0U)
13081 return takeAddressSpace(AssumedAS);
13085 return takeAddressSpace(FlatAS);
13088 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
13089 DepClassTy::REQUIRED);
13090 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
13091 return indicatePessimisticFixpoint();
13093 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
13094 : ChangeStatus::CHANGED;
13101 if (NewAS == InvalidAddressSpace ||
13103 return ChangeStatus::UNCHANGED;
13105 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13107 Value *AssociatedValue = &getAssociatedValue();
13108 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13111 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13112 bool UseOriginalValue =
13117 auto Pred = [&](
const Use &
U,
bool &) {
13118 if (
U.get() != AssociatedValue)
13129 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13132 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13135 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13138 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13145 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13148 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13152 const std::string getAsStr(Attributor *
A)
const override {
13153 if (!isValidState())
13154 return "addrspace(<invalid>)";
13155 return "addrspace(" +
13156 (AssumedAddressSpace == InvalidAddressSpace
13158 : std::to_string(AssumedAddressSpace)) +
13163 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13165 bool takeAddressSpace(uint32_t AS) {
13166 if (AssumedAddressSpace == InvalidAddressSpace) {
13167 AssumedAddressSpace = AS;
13170 return AssumedAddressSpace == AS;
13173 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13175 assert(
I->getSrcAddressSpace() != FlatAS &&
13176 "there should not be flat AS -> non-flat AS");
13177 return I->getPointerOperand();
13180 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13181 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13183 "there should not be flat AS -> non-flat AS X");
13184 return C->getOperand(0);
13190struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13191 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13192 : AAAddressSpaceImpl(IRP,
A) {}
13194 void trackStatistics()
const override {
13199struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13200 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13201 : AAAddressSpaceImpl(IRP,
A) {}
13207 (void)indicatePessimisticFixpoint();
13210 void trackStatistics()
const override {
13215struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13216 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13217 : AAAddressSpaceImpl(IRP,
A) {}
13219 void trackStatistics()
const override {
13224struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13225 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13226 : AAAddressSpaceImpl(IRP,
A) {}
13231struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13232 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13233 : AAAddressSpaceImpl(IRP,
A) {}
13239 (void)indicatePessimisticFixpoint();
13242 void trackStatistics()
const override {
13257struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13258 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13259 : AANoAliasAddrSpace(IRP,
A) {}
13262 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13263 "Associated value is not a pointer");
13267 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13268 if (!FlatAS.has_value()) {
13269 indicatePessimisticFixpoint();
13275 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13276 if (AS != *FlatAS) {
13278 indicateOptimisticFixpoint();
13283 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13284 uint32_t OldAssumed = getAssumed();
13286 auto CheckAddressSpace = [&](
Value &
Obj) {
13290 unsigned AS =
Obj.getType()->getPointerAddressSpace();
13294 removeAS(
Obj.getType()->getPointerAddressSpace());
13298 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13299 getIRPosition(),
this, DepClassTy::REQUIRED);
13301 return indicatePessimisticFixpoint();
13303 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13304 : ChangeStatus::CHANGED;
13309 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13311 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13312 if (AS != FlatAS ||
Map.empty())
13313 return ChangeStatus::UNCHANGED;
13315 LLVMContext &Ctx = getAssociatedValue().getContext();
13316 MDNode *NoAliasASNode =
nullptr;
13317 MDBuilder MDB(Ctx);
13319 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13322 unsigned Upper =
I.stop();
13323 unsigned Lower =
I.start();
13324 if (!NoAliasASNode) {
13325 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13328 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13332 Value *AssociatedValue = &getAssociatedValue();
13335 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13336 if (
U.get() != AssociatedValue)
13339 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13346 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13350 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13352 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13356 const std::string getAsStr(Attributor *
A)
const override {
13357 if (!isValidState())
13358 return "<invalid>";
13360 raw_string_ostream OS(Str);
13361 OS <<
"CanNotBeAddrSpace(";
13362 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13363 unsigned Upper =
I.stop();
13364 unsigned Lower =
I.start();
13365 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13372 void removeAS(
unsigned AS) {
13373 RangeMap::iterator
I =
Map.find(AS);
13375 if (
I !=
Map.end()) {
13376 unsigned Upper =
I.stop();
13377 unsigned Lower =
I.start();
13381 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13383 if (AS != 0 &&
Lower <= AS - 1)
13388 void resetASRanges(Attributor &
A) {
13390 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13394struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13395 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13396 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13398 void trackStatistics()
const override {
13403struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13404 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13405 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13407 void trackStatistics()
const override {
13412struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13413 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13414 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13416 void trackStatistics()
const override {
13421struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13422 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13423 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13425 void trackStatistics()
const override {
13430struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13431 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13432 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13434 void trackStatistics()
const override {
13441struct AAAllocationInfoImpl :
public AAAllocationInfo {
13442 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13443 : AAAllocationInfo(IRP,
A) {}
13445 std::optional<TypeSize> getAllocatedSize()
const override {
13446 assert(isValidState() &&
"the AA is invalid");
13447 return AssumedAllocatedSize;
13450 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13451 const DataLayout &
DL) {
13454 switch (
I->getOpcode()) {
13455 case Instruction::Alloca: {
13460 return std::nullopt;
13466 const IRPosition &IRP = getIRPosition();
13471 return indicatePessimisticFixpoint();
13473 bool IsKnownNoCapture;
13475 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13476 return indicatePessimisticFixpoint();
13478 const AAPointerInfo *PI =
13479 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13482 return indicatePessimisticFixpoint();
13485 return indicatePessimisticFixpoint();
13487 const DataLayout &
DL =
A.getDataLayout();
13488 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13491 if (!AllocationSize)
13492 return indicatePessimisticFixpoint();
13496 if (*AllocationSize == 0)
13497 return indicatePessimisticFixpoint();
13503 return indicatePessimisticFixpoint();
13505 if (BinSize == 0) {
13506 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13507 if (!changeAllocationSize(NewAllocationSize))
13508 return ChangeStatus::UNCHANGED;
13509 return ChangeStatus::CHANGED;
13513 const auto &It = PI->
begin();
13516 if (It->first.Offset != 0)
13517 return indicatePessimisticFixpoint();
13519 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13521 if (SizeOfBin >= *AllocationSize)
13522 return indicatePessimisticFixpoint();
13524 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13526 if (!changeAllocationSize(NewAllocationSize))
13527 return ChangeStatus::UNCHANGED;
13529 return ChangeStatus::CHANGED;
13535 assert(isValidState() &&
13536 "Manifest should only be called if the state is valid.");
13540 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13542 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13544 switch (
I->getOpcode()) {
13546 case Instruction::Alloca: {
13550 Type *CharType = Type::getInt8Ty(
I->getContext());
13552 auto *NumBytesToValue =
13553 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13556 insertPt = std::next(insertPt);
13557 AllocaInst *NewAllocaInst =
13562 return ChangeStatus::CHANGED;
13570 return ChangeStatus::UNCHANGED;
13574 const std::string getAsStr(Attributor *
A)
const override {
13575 if (!isValidState())
13576 return "allocationinfo(<invalid>)";
13577 return "allocationinfo(" +
13578 (AssumedAllocatedSize == HasNoAllocationSize
13580 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13585 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13589 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13590 if (AssumedAllocatedSize == HasNoAllocationSize ||
13591 AssumedAllocatedSize !=
Size) {
13592 AssumedAllocatedSize =
Size;
13599struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13600 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13601 : AAAllocationInfoImpl(IRP,
A) {}
13603 void trackStatistics()
const override {
13608struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13609 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13610 : AAAllocationInfoImpl(IRP,
A) {}
13616 (void)indicatePessimisticFixpoint();
13619 void trackStatistics()
const override {
13624struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13625 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13626 : AAAllocationInfoImpl(IRP,
A) {}
13628 void trackStatistics()
const override {
13633struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13634 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13635 : AAAllocationInfoImpl(IRP,
A) {}
13637 void trackStatistics()
const override {
13642struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13643 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13644 : AAAllocationInfoImpl(IRP,
A) {}
13649 (void)indicatePessimisticFixpoint();
13652 void trackStatistics()
const override {
13701#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13702 case IRPosition::PK: \
13703 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13705#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13706 case IRPosition::PK: \
13707 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13711#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13712 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13713 CLASS *AA = nullptr; \
13714 switch (IRP.getPositionKind()) { \
13715 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13716 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13717 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13718 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13719 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13720 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13721 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13722 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13727#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13728 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13729 CLASS *AA = nullptr; \
13730 switch (IRP.getPositionKind()) { \
13731 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13732 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13733 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13734 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13735 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13736 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13737 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13738 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13743#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13744 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13745 CLASS *AA = nullptr; \
13746 switch (IRP.getPositionKind()) { \
13747 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13749 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13755#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13756 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13757 CLASS *AA = nullptr; \
13758 switch (IRP.getPositionKind()) { \
13759 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13760 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13761 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13762 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13763 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13764 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13765 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13766 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13771#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13772 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13773 CLASS *AA = nullptr; \
13774 switch (IRP.getPositionKind()) { \
13775 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13776 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13777 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13778 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13779 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13780 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13781 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13782 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13787#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13788 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13789 CLASS *AA = nullptr; \
13790 switch (IRP.getPositionKind()) { \
13791 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13792 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13793 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13794 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13795 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13796 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13797 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13798 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13850#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13851#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13852#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13853#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13854#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13855#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13856#undef SWITCH_PK_CREATE
13857#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefInfo InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI, const CycleInfo *CI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, const llvm::StringTable &StandardNames, VectorLibrary VecLib)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
bool isValid() const
Return true if the attribute is any kind of attribute.
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction; assumes that the block is well-formed.
BinaryOps getOpcode() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
Attribute getFnAttr(StringRef Kind) const
Get the attribute of a given kind for the function.
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
Conditional Branch instruction.
Value * getCondition() const
BasicBlock * getSuccessor(unsigned i) const
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static UncondBrInst * Create(BasicBlock *Target, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i=0) const
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
const Use & getOperandUse(unsigned i) const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
static constexpr unsigned MaxAlignmentExponent
The maximum alignment for instructions.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isGPUConstantAddressSpace(const Module &M, unsigned AS)
Check if the given address space AS corresponds to a GPU constant address space for the target triple...
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
LLVM_ABI bool isGPUSharedAddressSpace(const Module &M, unsigned AS)
Check if the given address space AS corresponds to a GPU shared address space for the target triple i...
LLVM_ABI bool isGPULocalAddressSpace(const Module &M, unsigned AS)
Check if the given address space AS corresponds to a GPU local/private address space for the target t...
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
@ Valid
The data is already valid.
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
@ Undef
Value of the register doesn't matter.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
Return a range that applies F to the elements of C.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
BumpPtrAllocatorImpl<> BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of denormal_fpenv to a known denormal mod...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
const Instruction * getCtxI() const
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalFPEnv getDefault()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.