54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *Ptr, int64_t &BytesOffset,
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
708 StateType ParentState;
712 ParentState.indicateOptimisticFixpoint();
714 for (
const BasicBlock *BB : Br->successors()) {
715 StateType ChildState;
717 size_t BeforeSize =
Uses.size();
718 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
721 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
724 ParentState &= ChildState;
798 R.indicatePessimisticFixpoint();
815 BS.indicateOptimisticFixpoint();
821 BS.indicatePessimisticFixpoint();
891 template <
typename F>
898 if (!
Range.mayOverlap(ItRange))
900 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
901 for (
auto Index : It.getSecond()) {
911 template <
typename F>
922 for (
unsigned Index : LocalList->getSecond()) {
925 if (
Range.offsetAndSizeAreUnknown())
941 RemoteI = RemoteI ? RemoteI : &
I;
945 bool AccExists =
false;
947 for (
auto Index : LocalList) {
949 if (
A.getLocalInst() == &
I) {
958 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
960 for (
auto Key : ToAdd) {
967 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
969 "New Access should have been at AccIndex");
970 LocalList.push_back(AccIndex);
979 auto Before = Current;
981 if (Current == Before)
984 auto &ExistingRanges = Before.getRanges();
985 auto &NewRanges = Current.getRanges();
992 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
999 "Expected bin to actually contain the Access.");
1000 Bin.erase(AccIndex);
1021struct AAPointerInfoImpl
1022 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1027 const std::string getAsStr(
Attributor *
A)
const override {
1028 return std::string(
"PointerInfo ") +
1029 (isValidState() ? (std::string(
"#") +
1030 std::to_string(OffsetBins.size()) +
" bins")
1035 [](int64_t O) {
return std::to_string(O); }),
1043 return AAPointerInfo::manifest(
A);
1046 const_bin_iterator
begin()
const override {
return State::begin(); }
1047 const_bin_iterator
end()
const override {
return State::end(); }
1048 int64_t numOffsetBins()
const override {
return State::numOffsetBins(); }
1049 bool reachesReturn()
const override {
1050 return !ReturnedOffsets.isUnassigned();
1052 void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1053 if (ReturnedOffsets.isUnknown()) {
1058 OffsetInfo MergedOI;
1059 for (
auto Offset : ReturnedOffsets) {
1060 OffsetInfo TmpOI = OI;
1062 MergedOI.merge(TmpOI);
1064 OI = std::move(MergedOI);
1067 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1068 if (ReturnedOffsets.isUnknown())
1069 return ChangeStatus::UNCHANGED;
1070 if (ReachedReturnedOffsets.isUnknown()) {
1071 ReturnedOffsets.setUnknown();
1072 return ChangeStatus::CHANGED;
1074 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1075 return ChangeStatus::CHANGED;
1076 return ChangeStatus::UNCHANGED;
1079 bool forallInterferingAccesses(
1081 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1083 return State::forallInterferingAccesses(
Range, CB);
1086 bool forallInterferingAccesses(
1087 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1088 bool FindInterferingWrites,
bool FindInterferingReads,
1089 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1091 function_ref<
bool(
const Access &)> SkipCB)
const override {
1092 HasBeenWrittenTo =
false;
1094 SmallPtrSet<const Access *, 8> DominatingWrites;
1102 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1104 bool AllInSameNoSyncFn = IsAssumedNoSync;
1105 bool InstIsExecutedByInitialThreadOnly =
1106 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1113 bool InstIsExecutedInAlignedRegion =
1114 FindInterferingReads && ExecDomainAA &&
1115 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1117 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1118 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1120 InformationCache &InfoCache =
A.getInfoCache();
1121 bool IsThreadLocalObj =
1130 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1131 if (IsThreadLocalObj || AllInSameNoSyncFn)
1133 const auto *FnExecDomainAA =
1134 I.getFunction() == &
Scope
1136 :
A.lookupAAFor<AAExecutionDomain>(
1139 if (!FnExecDomainAA)
1141 if (InstIsExecutedInAlignedRegion ||
1142 (FindInterferingWrites &&
1143 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1144 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1147 if (InstIsExecutedByInitialThreadOnly &&
1148 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1149 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1158 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1159 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1160 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1161 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1165 bool IsKnownNoRecurse;
1173 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1174 bool ObjHasKernelLifetime =
false;
1175 const bool UseDominanceReasoning =
1176 FindInterferingWrites && IsKnownNoRecurse;
1177 const DominatorTree *DT =
1187 case AA::GPUAddressSpace::Shared:
1188 case AA::GPUAddressSpace::Constant:
1189 case AA::GPUAddressSpace::Local:
1201 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1206 const Function *AIFn = AI->getFunction();
1207 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1208 bool IsKnownNoRecurse;
1211 IsKnownNoRecurse)) {
1212 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1217 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1218 if (ObjHasKernelLifetime)
1219 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1220 return !
A.getInfoCache().isKernel(Fn);
1228 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1229 Function *AccScope = Acc.getRemoteInst()->getFunction();
1230 bool AccInSameScope = AccScope == &
Scope;
1234 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1235 A.getInfoCache().isKernel(*AccScope))
1238 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1239 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1240 ExclusionSet.
insert(Acc.getRemoteInst());
1243 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1244 (!FindInterferingReads || !Acc.isRead()))
1247 bool Dominates = FindInterferingWrites && DT && Exact &&
1248 Acc.isMustAccess() && AccInSameScope &&
1251 DominatingWrites.
insert(&Acc);
1255 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1257 InterferingAccesses.
push_back({&Acc, Exact});
1260 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1263 HasBeenWrittenTo = !DominatingWrites.
empty();
1267 for (
const Access *Acc : DominatingWrites) {
1268 if (!LeastDominatingWriteInst) {
1269 LeastDominatingWriteInst = Acc->getRemoteInst();
1270 }
else if (DT->
dominates(LeastDominatingWriteInst,
1271 Acc->getRemoteInst())) {
1272 LeastDominatingWriteInst = Acc->getRemoteInst();
1277 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1278 if (SkipCB && SkipCB(Acc))
1280 if (!CanIgnoreThreading(Acc))
1286 bool ReadChecked = !FindInterferingReads;
1287 bool WriteChecked = !FindInterferingWrites;
1293 &ExclusionSet, IsLiveInCalleeCB))
1298 if (!WriteChecked) {
1300 &ExclusionSet, IsLiveInCalleeCB))
1301 WriteChecked =
true;
1315 if (!WriteChecked && HasBeenWrittenTo &&
1316 Acc.getRemoteInst()->getFunction() != &Scope) {
1318 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1320 if (FnReachabilityAA) {
1326 if (!FnReachabilityAA->instructionCanReach(
1327 A, *LeastDominatingWriteInst,
1328 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1329 WriteChecked =
true;
1336 if (ReadChecked && WriteChecked)
1339 if (!DT || !UseDominanceReasoning)
1341 if (!DominatingWrites.count(&Acc))
1343 return LeastDominatingWriteInst != Acc.getRemoteInst();
1348 for (
auto &It : InterferingAccesses) {
1349 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1350 !CanSkipAccess(*It.first, It.second)) {
1351 if (!UserCB(*It.first, It.second))
1359 const AAPointerInfo &OtherAA,
1361 using namespace AA::PointerInfo;
1363 return indicatePessimisticFixpoint();
1366 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1367 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1368 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1371 const auto &State = OtherAAImpl.getState();
1372 for (
const auto &It : State) {
1373 for (
auto Index : It.getSecond()) {
1374 const auto &RAcc = State.getAccess(Index);
1375 if (IsByval && !RAcc.isRead())
1377 bool UsedAssumedInformation =
false;
1379 auto Content =
A.translateArgumentToCallSiteContent(
1380 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1381 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1382 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1384 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1385 RAcc.getType(), RAcc.getRemoteInst());
1391 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1392 const OffsetInfo &Offsets, CallBase &CB,
1394 using namespace AA::PointerInfo;
1396 return indicatePessimisticFixpoint();
1398 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1402 const auto &State = OtherAAImpl.getState();
1403 for (
const auto &It : State) {
1404 for (
auto Index : It.getSecond()) {
1405 const auto &RAcc = State.getAccess(Index);
1406 if (!IsMustAcc && RAcc.isAssumption())
1408 for (
auto Offset : Offsets) {
1412 if (!NewRanges.isUnknown()) {
1413 NewRanges.addToAllOffsets(Offset);
1418 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1419 RAcc.getType(), RAcc.getRemoteInst());
1428 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1431 void dumpState(raw_ostream &O) {
1432 for (
auto &It : OffsetBins) {
1433 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1434 <<
"] : " << It.getSecond().size() <<
"\n";
1435 for (
auto AccIndex : It.getSecond()) {
1436 auto &Acc = AccessList[AccIndex];
1437 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1438 if (Acc.getLocalInst() != Acc.getRemoteInst())
1439 O <<
" --> " << *Acc.getRemoteInst()
1441 if (!Acc.isWrittenValueYetUndetermined()) {
1443 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1445 else if (Acc.getWrittenValue())
1446 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1448 O <<
" - c: <unknown>\n";
1455struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1457 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1458 : AAPointerInfoImpl(IRP,
A) {}
1461 bool handleAccess(Attributor &
A, Instruction &
I,
1462 std::optional<Value *> Content,
AccessKind Kind,
1465 using namespace AA::PointerInfo;
1467 const DataLayout &
DL =
A.getDataLayout();
1468 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1477 if (!VT || VT->getElementCount().isScalable() ||
1479 (*Content)->getType() != VT ||
1480 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1491 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1496 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1498 ConstContent, ConstantInt::get(
Int32Ty, i));
1505 for (
auto &ElementOffset : ElementOffsets)
1506 ElementOffset += ElementSize;
1519 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1520 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1521 const GEPOperator *
GEP);
1524 void trackStatistics()
const override {
1525 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1529bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1530 const DataLayout &
DL,
1532 const OffsetInfo &PtrOI,
1533 const GEPOperator *
GEP) {
1534 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1535 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1538 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1539 "Don't look for constant values if the offset has already been "
1540 "determined to be unknown.");
1542 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1548 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1552 Union.addToAll(ConstantOffset.getSExtValue());
1557 for (
const auto &VI : VariableOffsets) {
1558 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1560 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1566 if (PotentialConstantsAA->undefIsContained())
1573 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1574 if (AssumedSet.empty())
1578 for (
const auto &ConstOffset : AssumedSet) {
1579 auto CopyPerOffset =
Union;
1580 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1581 VI.second.getZExtValue());
1582 Product.merge(CopyPerOffset);
1587 UsrOI = std::move(Union);
1591ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1592 using namespace AA::PointerInfo;
1594 const DataLayout &
DL =
A.getDataLayout();
1595 Value &AssociatedValue = getAssociatedValue();
1597 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1598 OffsetInfoMap[&AssociatedValue].
insert(0);
1600 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1611 "CurPtr does not exist in the map!");
1613 auto &UsrOI = OffsetInfoMap[Usr];
1614 auto &PtrOI = OffsetInfoMap[CurPtr];
1615 assert(!PtrOI.isUnassigned() &&
1616 "Cannot pass through if the input Ptr was not visited!");
1622 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1624 User *Usr =
U.getUser();
1625 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1628 "The current pointer offset should have been seeded!");
1629 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1630 "Current pointer should be assigned");
1634 return HandlePassthroughUser(Usr, CurPtr, Follow);
1636 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1644 auto &UsrOI = OffsetInfoMap[Usr];
1645 auto &PtrOI = OffsetInfoMap[CurPtr];
1647 if (UsrOI.isUnknown())
1650 if (PtrOI.isUnknown()) {
1656 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1662 return HandlePassthroughUser(Usr, CurPtr, Follow);
1667 if (RI->getFunction() == getAssociatedFunction()) {
1668 auto &PtrOI = OffsetInfoMap[CurPtr];
1669 Changed |= setReachesReturn(PtrOI);
1682 auto &UsrOI = PhiIt->second;
1683 auto &PtrOI = OffsetInfoMap[CurPtr];
1687 if (PtrOI.isUnknown()) {
1688 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1689 << *CurPtr <<
" in " << *
PHI <<
"\n");
1690 Follow = !UsrOI.isUnknown();
1696 if (UsrOI == PtrOI) {
1697 assert(!PtrOI.isUnassigned() &&
1698 "Cannot assign if the current Ptr was not visited!");
1699 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1709 auto It = OffsetInfoMap.
find(CurPtrBase);
1710 if (It == OffsetInfoMap.
end()) {
1711 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1712 << *CurPtr <<
" in " << *
PHI
1713 <<
" (base: " << *CurPtrBase <<
")\n");
1727 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1728 *
PHI->getFunction());
1730 auto BaseOI = It->getSecond();
1731 BaseOI.addToAll(
Offset.getZExtValue());
1732 if (IsFirstPHIUser || BaseOI == UsrOI) {
1733 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1734 <<
" in " << *Usr <<
"\n");
1735 return HandlePassthroughUser(Usr, CurPtr, Follow);
1739 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1740 << *CurPtr <<
" in " << *
PHI <<
"\n");
1759 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1760 OffsetInfoMap[CurPtr].Offsets,
Changed,
1766 return II->isAssumeLikeIntrinsic();
1777 }
while (FromI && FromI != ToI);
1782 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1783 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1786 if (IntrI.getParent() == BB) {
1787 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1793 if ((*PredIt) != BB)
1798 if (SuccBB == IntrBB)
1804 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1806 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1812 std::pair<Value *, IntrinsicInst *> Assumption;
1813 for (
const Use &LoadU : LoadI->uses()) {
1815 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1817 for (
const Use &CmpU : CmpI->uses()) {
1819 if (!IsValidAssume(*IntrI))
1821 int Idx = CmpI->getOperandUse(0) == LoadU;
1822 Assumption = {CmpI->getOperand(Idx), IntrI};
1827 if (Assumption.first)
1832 if (!Assumption.first || !Assumption.second)
1836 << *Assumption.second <<
": " << *LoadI
1837 <<
" == " << *Assumption.first <<
"\n");
1838 bool UsedAssumedInformation =
false;
1839 std::optional<Value *> Content =
nullptr;
1840 if (Assumption.first)
1842 A.getAssumedSimplified(*Assumption.first, *
this,
1844 return handleAccess(
1845 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1846 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1851 for (
auto *OtherOp : OtherOps) {
1852 if (OtherOp == CurPtr) {
1855 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1867 bool UsedAssumedInformation =
false;
1868 std::optional<Value *> Content =
nullptr;
1870 Content =
A.getAssumedSimplified(
1872 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1877 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1878 *StoreI->getValueOperand()->getType(),
1879 {StoreI->getValueOperand()}, AccessKind::AK_W);
1881 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1882 {RMWI->getValOperand()}, AccessKind::AK_RW);
1884 return HandleStoreLike(
1885 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1886 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1893 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1898 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1904 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1907 if (!CSArgPI->reachesReturn())
1908 return isValidState();
1911 if (!Callee ||
Callee->arg_size() <= ArgNo)
1913 bool UsedAssumedInformation =
false;
1914 auto ReturnedValue =
A.getAssumedSimplified(
1919 auto *Arg =
Callee->getArg(ArgNo);
1920 if (ReturnedArg && Arg != ReturnedArg)
1922 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1923 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1927 OffsetInfo OI = OffsetInfoMap[CurPtr];
1928 CSArgPI->addReturnedOffsetsTo(OI);
1930 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1931 return isValidState();
1933 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1938 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1941 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1942 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1943 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1944 if (OffsetInfoMap.
count(NewU)) {
1946 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1947 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1948 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1952 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1955 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1957 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1959 true, EquivalentUseCB)) {
1960 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1961 return indicatePessimisticFixpoint();
1965 dbgs() <<
"Accesses by bin after update:\n";
1972struct AAPointerInfoReturned final : AAPointerInfoImpl {
1973 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1974 : AAPointerInfoImpl(IRP,
A) {}
1978 return indicatePessimisticFixpoint();
1982 void trackStatistics()
const override {
1983 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1987struct AAPointerInfoArgument final : AAPointerInfoFloating {
1988 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1989 : AAPointerInfoFloating(IRP,
A) {}
1992 void trackStatistics()
const override {
1993 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1997struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1998 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
1999 : AAPointerInfoFloating(IRP,
A) {}
2003 using namespace AA::PointerInfo;
2009 if (
auto Length =
MI->getLengthInBytes())
2010 LengthVal =
Length->getSExtValue();
2011 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2014 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2016 return indicatePessimisticFixpoint();
2019 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2021 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2024 dbgs() <<
"Accesses by bin after update:\n";
2035 Argument *Arg = getAssociatedArgument();
2039 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2040 if (ArgAA && ArgAA->getState().isValidState())
2041 return translateAndAddStateFromCallee(
A, *ArgAA,
2044 return indicatePessimisticFixpoint();
2047 bool IsKnownNoCapture;
2049 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2050 return indicatePessimisticFixpoint();
2052 bool IsKnown =
false;
2054 return ChangeStatus::UNCHANGED;
2057 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2063 void trackStatistics()
const override {
2064 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2068struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2069 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2070 : AAPointerInfoFloating(IRP,
A) {}
2073 void trackStatistics()
const override {
2074 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2082struct AANoUnwindImpl : AANoUnwind {
2083 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2089 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2093 const std::string getAsStr(Attributor *
A)
const override {
2094 return getAssumed() ?
"nounwind" :
"may-unwind";
2100 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2101 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2102 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2105 if (!
I.mayThrow(
true))
2109 bool IsKnownNoUnwind;
2117 bool UsedAssumedInformation =
false;
2118 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2119 UsedAssumedInformation))
2120 return indicatePessimisticFixpoint();
2122 return ChangeStatus::UNCHANGED;
2126struct AANoUnwindFunction final :
public AANoUnwindImpl {
2127 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2128 : AANoUnwindImpl(IRP,
A) {}
2135struct AANoUnwindCallSite final
2136 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2137 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2138 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2149 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2150 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2151 case Intrinsic::nvvm_barrier_cta_red_and_aligned_all:
2152 case Intrinsic::nvvm_barrier_cta_red_and_aligned_count:
2153 case Intrinsic::nvvm_barrier_cta_red_or_aligned_all:
2154 case Intrinsic::nvvm_barrier_cta_red_or_aligned_count:
2155 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_all:
2156 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_count:
2158 case Intrinsic::amdgcn_s_barrier:
2159 if (ExecutedAligned)
2182 switch (
I->getOpcode()) {
2183 case Instruction::AtomicRMW:
2186 case Instruction::Store:
2189 case Instruction::Load:
2194 "New atomic operations need to be known in the attributor.");
2206 return !
MI->isVolatile();
2222 const std::string getAsStr(Attributor *
A)
const override {
2223 return getAssumed() ?
"nosync" :
"may-sync";
2239 if (
I.mayReadOrWriteMemory())
2253 bool UsedAssumedInformation =
false;
2254 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2255 UsedAssumedInformation) ||
2256 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2257 UsedAssumedInformation))
2258 return indicatePessimisticFixpoint();
2263struct AANoSyncFunction final :
public AANoSyncImpl {
2264 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2265 : AANoSyncImpl(IRP,
A) {}
2272struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2273 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2274 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2284struct AANoFreeImpl :
public AANoFree {
2285 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2291 DepClassTy::NONE, IsKnown));
2301 DepClassTy::REQUIRED, IsKnown);
2304 bool UsedAssumedInformation =
false;
2305 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2306 UsedAssumedInformation))
2307 return indicatePessimisticFixpoint();
2308 return ChangeStatus::UNCHANGED;
2312 const std::string getAsStr(Attributor *
A)
const override {
2313 return getAssumed() ?
"nofree" :
"may-free";
2317struct AANoFreeFunction final :
public AANoFreeImpl {
2318 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2319 : AANoFreeImpl(IRP,
A) {}
2326struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2327 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2328 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2335struct AANoFreeFloating : AANoFreeImpl {
2336 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2337 : AANoFreeImpl(IRP,
A) {}
2344 const IRPosition &IRP = getIRPosition();
2349 DepClassTy::OPTIONAL, IsKnown))
2350 return ChangeStatus::UNCHANGED;
2352 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2353 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2365 DepClassTy::REQUIRED, IsKnown);
2382 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2383 return indicatePessimisticFixpoint();
2385 return ChangeStatus::UNCHANGED;
2390struct AANoFreeArgument final : AANoFreeFloating {
2391 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2392 : AANoFreeFloating(IRP,
A) {}
2399struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2400 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2401 : AANoFreeFloating(IRP,
A) {}
2409 Argument *Arg = getAssociatedArgument();
2411 return indicatePessimisticFixpoint();
2415 DepClassTy::REQUIRED, IsKnown))
2416 return ChangeStatus::UNCHANGED;
2417 return indicatePessimisticFixpoint();
2425struct AANoFreeReturned final : AANoFreeFloating {
2426 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2427 : AANoFreeFloating(IRP,
A) {
2442 void trackStatistics()
const override {}
2446struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2447 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2448 : AANoFreeFloating(IRP,
A) {}
2451 return ChangeStatus::UNCHANGED;
2462 bool IgnoreSubsumingPositions) {
2464 AttrKinds.
push_back(Attribute::NonNull);
2467 AttrKinds.
push_back(Attribute::Dereferenceable);
2468 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2475 if (!Fn->isDeclaration()) {
2485 bool UsedAssumedInformation =
false;
2486 if (!
A.checkForAllInstructions(
2488 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2492 UsedAssumedInformation,
false,
true))
2504 Attribute::NonNull)});
2509static int64_t getKnownNonNullAndDerefBytesForUse(
2510 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2511 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2514 const Value *UseV =
U->get();
2535 const DataLayout &
DL =
A.getInfoCache().getDL();
2539 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2556 bool IsKnownNonNull;
2559 IsNonNull |= IsKnownNonNull;
2562 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2566 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2567 Loc->Size.isScalable() ||
I->isVolatile())
2573 if (
Base &&
Base == &AssociatedValue) {
2574 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2576 return std::max(int64_t(0), DerefBytes);
2583 int64_t DerefBytes = Loc->Size.getValue();
2585 return std::max(int64_t(0), DerefBytes);
2591struct AANonNullImpl : AANonNull {
2592 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2596 Value &
V = *getAssociatedValue().stripPointerCasts();
2598 indicatePessimisticFixpoint();
2602 if (Instruction *CtxI = getCtxI())
2603 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2607 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2608 AANonNull::StateType &State) {
2609 bool IsNonNull =
false;
2610 bool TrackUse =
false;
2611 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2612 IsNonNull, TrackUse);
2613 State.setKnown(IsNonNull);
2618 const std::string getAsStr(Attributor *
A)
const override {
2619 return getAssumed() ?
"nonnull" :
"may-null";
2624struct AANonNullFloating :
public AANonNullImpl {
2625 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2626 : AANonNullImpl(IRP,
A) {}
2630 auto CheckIRP = [&](
const IRPosition &IRP) {
2631 bool IsKnownNonNull;
2633 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2637 bool UsedAssumedInformation =
false;
2638 Value *AssociatedValue = &getAssociatedValue();
2640 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2645 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2651 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2652 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2655 return ChangeStatus::UNCHANGED;
2659 DepClassTy::OPTIONAL, IsKnown) &&
2662 DepClassTy::OPTIONAL, IsKnown))
2663 return ChangeStatus::UNCHANGED;
2670 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2671 return indicatePessimisticFixpoint();
2672 return ChangeStatus::UNCHANGED;
2675 for (
const auto &VAC : Values)
2677 return indicatePessimisticFixpoint();
2679 return ChangeStatus::UNCHANGED;
2687struct AANonNullReturned final
2688 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2689 false, AANonNull::IRAttributeKind, false> {
2690 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2691 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2696 const std::string getAsStr(Attributor *
A)
const override {
2697 return getAssumed() ?
"nonnull" :
"may-null";
2705struct AANonNullArgument final
2706 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2707 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2708 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2714struct AANonNullCallSiteArgument final : AANonNullFloating {
2715 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2716 : AANonNullFloating(IRP,
A) {}
2723struct AANonNullCallSiteReturned final
2724 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2725 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2726 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2735struct AAMustProgressImpl :
public AAMustProgress {
2736 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2737 : AAMustProgress(IRP,
A) {}
2743 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2748 const std::string getAsStr(Attributor *
A)
const override {
2749 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2753struct AAMustProgressFunction final : AAMustProgressImpl {
2754 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2755 : AAMustProgressImpl(IRP,
A) {}
2761 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2763 return indicateOptimisticFixpoint();
2764 return ChangeStatus::UNCHANGED;
2767 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2769 bool IsKnownMustProgress;
2771 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2775 bool AllCallSitesKnown =
true;
2776 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2779 return indicatePessimisticFixpoint();
2781 return ChangeStatus::UNCHANGED;
2785 void trackStatistics()
const override {
2791struct AAMustProgressCallSite final : AAMustProgressImpl {
2792 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2793 : AAMustProgressImpl(IRP,
A) {}
2802 bool IsKnownMustProgress;
2804 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2805 return indicatePessimisticFixpoint();
2806 return ChangeStatus::UNCHANGED;
2810 void trackStatistics()
const override {
2819struct AANoRecurseImpl :
public AANoRecurse {
2820 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2826 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2831 const std::string getAsStr(Attributor *
A)
const override {
2832 return getAssumed() ?
"norecurse" :
"may-recurse";
2836struct AANoRecurseFunction final : AANoRecurseImpl {
2837 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2838 : AANoRecurseImpl(IRP,
A) {}
2844 auto CallSitePred = [&](AbstractCallSite ACS) {
2845 bool IsKnownNoRecurse;
2849 DepClassTy::NONE, IsKnownNoRecurse))
2851 return IsKnownNoRecurse;
2853 bool UsedAssumedInformation =
false;
2854 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2855 UsedAssumedInformation)) {
2861 if (!UsedAssumedInformation)
2862 indicateOptimisticFixpoint();
2863 return ChangeStatus::UNCHANGED;
2866 const AAInterFnReachability *EdgeReachability =
2867 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2868 DepClassTy::REQUIRED);
2869 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2870 return indicatePessimisticFixpoint();
2871 return ChangeStatus::UNCHANGED;
2878struct AANoRecurseCallSite final
2879 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2880 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2881 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2891struct AANonConvergentImpl :
public AANonConvergent {
2892 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2893 : AANonConvergent(IRP,
A) {}
2896 const std::string getAsStr(Attributor *
A)
const override {
2897 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2901struct AANonConvergentFunction final : AANonConvergentImpl {
2902 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2903 : AANonConvergentImpl(IRP,
A) {}
2909 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2912 if (!Callee ||
Callee->isIntrinsic()) {
2915 if (
Callee->isDeclaration()) {
2916 return !
Callee->hasFnAttribute(Attribute::Convergent);
2918 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2920 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2923 bool UsedAssumedInformation =
false;
2924 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2925 UsedAssumedInformation)) {
2926 return indicatePessimisticFixpoint();
2928 return ChangeStatus::UNCHANGED;
2932 if (isKnownNotConvergent() &&
2933 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2934 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2935 return ChangeStatus::CHANGED;
2937 return ChangeStatus::UNCHANGED;
2947struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2948 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2949 : AAUndefinedBehavior(IRP,
A) {}
2954 const size_t UBPrevSize = KnownUBInsts.size();
2955 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2959 if (
I.isVolatile() &&
I.mayWriteToMemory())
2963 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2972 "Expected pointer operand of memory accessing instruction");
2976 std::optional<Value *> SimplifiedPtrOp =
2977 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2978 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2980 const Value *PtrOpVal = *SimplifiedPtrOp;
2986 AssumedNoUBInsts.insert(&
I);
2998 AssumedNoUBInsts.insert(&
I);
3000 KnownUBInsts.insert(&
I);
3009 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3017 std::optional<Value *> SimplifiedCond =
3018 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3019 if (!SimplifiedCond || !*SimplifiedCond)
3021 AssumedNoUBInsts.insert(&
I);
3029 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3038 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3044 if (idx >=
Callee->arg_size())
3056 bool IsKnownNoUndef;
3058 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3059 if (!IsKnownNoUndef)
3061 bool UsedAssumedInformation =
false;
3062 std::optional<Value *> SimplifiedVal =
3065 if (UsedAssumedInformation)
3067 if (SimplifiedVal && !*SimplifiedVal)
3070 KnownUBInsts.insert(&
I);
3076 bool IsKnownNonNull;
3078 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3080 KnownUBInsts.insert(&
I);
3089 std::optional<Value *> SimplifiedRetValue =
3090 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3091 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3109 bool IsKnownNonNull;
3114 KnownUBInsts.insert(&
I);
3120 bool UsedAssumedInformation =
false;
3121 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3122 {Instruction::Load, Instruction::Store,
3123 Instruction::AtomicCmpXchg,
3124 Instruction::AtomicRMW},
3125 UsedAssumedInformation,
3127 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::CondBr},
3128 UsedAssumedInformation,
3130 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3131 UsedAssumedInformation);
3135 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3137 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3138 bool IsKnownNoUndef;
3140 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3142 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3143 {Instruction::Ret}, UsedAssumedInformation,
3148 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3149 UBPrevSize != KnownUBInsts.size())
3150 return ChangeStatus::CHANGED;
3151 return ChangeStatus::UNCHANGED;
3154 bool isKnownToCauseUB(Instruction *
I)
const override {
3155 return KnownUBInsts.count(
I);
3158 bool isAssumedToCauseUB(Instruction *
I)
const override {
3165 switch (
I->getOpcode()) {
3166 case Instruction::Load:
3167 case Instruction::Store:
3168 case Instruction::AtomicCmpXchg:
3169 case Instruction::AtomicRMW:
3170 case Instruction::CondBr:
3171 return !AssumedNoUBInsts.count(
I);
3179 if (KnownUBInsts.empty())
3180 return ChangeStatus::UNCHANGED;
3181 for (Instruction *
I : KnownUBInsts)
3182 A.changeToUnreachableAfterManifest(
I);
3183 return ChangeStatus::CHANGED;
3187 const std::string getAsStr(Attributor *
A)
const override {
3188 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3216 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3220 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3231 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3233 bool UsedAssumedInformation =
false;
3234 std::optional<Value *> SimplifiedV =
3237 if (!UsedAssumedInformation) {
3242 KnownUBInsts.insert(
I);
3243 return std::nullopt;
3250 KnownUBInsts.insert(
I);
3251 return std::nullopt;
3257struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3258 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3259 : AAUndefinedBehaviorImpl(IRP,
A) {}
3262 void trackStatistics()
const override {
3263 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3264 "Number of instructions known to have UB");
3266 KnownUBInsts.size();
3277static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3278 ScalarEvolution *SE =
3279 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3280 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3286 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3287 if (SCCI.hasCycle())
3297 for (
auto *L : LI->getLoopsInPreorder()) {
3304struct AAWillReturnImpl :
public AAWillReturn {
3305 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3306 : AAWillReturn(IRP,
A) {}
3312 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3317 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3318 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3323 return IsKnown || !KnownOnly;
3329 if (isImpliedByMustprogressAndReadonly(
A,
false))
3330 return ChangeStatus::UNCHANGED;
3336 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3342 bool IsKnownNoRecurse;
3344 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3347 bool UsedAssumedInformation =
false;
3348 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3349 UsedAssumedInformation))
3350 return indicatePessimisticFixpoint();
3352 return ChangeStatus::UNCHANGED;
3356 const std::string getAsStr(Attributor *
A)
const override {
3357 return getAssumed() ?
"willreturn" :
"may-noreturn";
3361struct AAWillReturnFunction final : AAWillReturnImpl {
3362 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3363 : AAWillReturnImpl(IRP,
A) {}
3367 AAWillReturnImpl::initialize(
A);
3370 assert(
F &&
"Did expect an anchor function");
3371 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3372 indicatePessimisticFixpoint();
3380struct AAWillReturnCallSite final
3381 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3382 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3383 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3387 if (isImpliedByMustprogressAndReadonly(
A,
false))
3388 return ChangeStatus::UNCHANGED;
3390 return AACalleeToCallSite::updateImpl(
A);
3412 const ToTy *
To =
nullptr;
3439 if (!ES || ES->
empty()) {
3440 ExclusionSet = nullptr;
3441 }
else if (MakeUnique) {
3442 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3467 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3469 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3473#define DefineKeys(ToTy) \
3475 ReachabilityQueryInfo<ToTy> \
3476 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3477 ReachabilityQueryInfo<ToTy>( \
3478 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3479 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3481 ReachabilityQueryInfo<ToTy> \
3482 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3483 ReachabilityQueryInfo<ToTy>( \
3484 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3485 DenseMapInfo<const ToTy *>::getTombstoneKey());
3494template <
typename BaseTy,
typename ToTy>
3495struct CachedReachabilityAA :
public BaseTy {
3496 using RQITy = ReachabilityQueryInfo<ToTy>;
3498 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3501 bool isQueryAA()
const override {
return true; }
3506 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3507 RQITy *RQI = QueryVector[
u];
3508 if (RQI->Result == RQITy::Reachable::No &&
3510 Changed = ChangeStatus::CHANGED;
3516 bool IsTemporaryRQI) = 0;
3518 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3519 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3524 QueryCache.erase(&RQI);
3530 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3531 RQITy PlainRQI(RQI.From, RQI.To);
3532 if (!QueryCache.count(&PlainRQI)) {
3533 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3535 QueryVector.push_back(RQIPtr);
3536 QueryCache.insert(RQIPtr);
3541 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3542 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3543 "Did not expect empty set!");
3544 RQITy *RQIPtr =
new (
A.Allocator)
3545 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3546 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3548 assert(!QueryCache.count(RQIPtr));
3549 QueryVector.push_back(RQIPtr);
3550 QueryCache.insert(RQIPtr);
3553 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3554 A.registerForUpdate(*
this);
3555 return Result == RQITy::Reachable::Yes;
3558 const std::string getAsStr(Attributor *
A)
const override {
3560 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3563 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3564 typename RQITy::Reachable &
Result) {
3565 if (!this->getState().isValidState()) {
3566 Result = RQITy::Reachable::Yes;
3572 if (StackRQI.ExclusionSet) {
3573 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3574 auto It = QueryCache.find(&PlainRQI);
3575 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3576 Result = RQITy::Reachable::No;
3581 auto It = QueryCache.find(&StackRQI);
3582 if (It != QueryCache.end()) {
3589 QueryCache.insert(&StackRQI);
3595 DenseSet<RQITy *> QueryCache;
3598struct AAIntraFnReachabilityFunction final
3599 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3600 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3601 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3603 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3607 bool isAssumedReachable(
3608 Attributor &
A,
const Instruction &From,
const Instruction &To,
3610 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3614 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3616 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3617 return NonConstThis->isReachableImpl(
A, StackRQI,
3619 return Result == RQITy::Reachable::Yes;
3626 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3629 [&](
const auto &DeadEdge) {
3630 return LivenessAA->isEdgeDead(DeadEdge.first,
3634 return LivenessAA->isAssumedDead(BB);
3636 return ChangeStatus::UNCHANGED;
3640 return Base::updateImpl(
A);
3644 bool IsTemporaryRQI)
override {
3646 bool UsedExclusionSet =
false;
3651 while (IP && IP != &To) {
3652 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3653 UsedExclusionSet =
true;
3661 const BasicBlock *FromBB = RQI.From->getParent();
3662 const BasicBlock *ToBB = RQI.To->getParent();
3664 "Not an intra-procedural query!");
3668 if (FromBB == ToBB &&
3669 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3670 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3675 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3676 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3680 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3681 if (RQI.ExclusionSet)
3682 for (
auto *
I : *RQI.ExclusionSet)
3683 if (
I->getFunction() == Fn)
3684 ExclusionBlocks.
insert(
I->getParent());
3687 if (ExclusionBlocks.
count(FromBB) &&
3690 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3693 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3694 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3695 DeadBlocks.insert(ToBB);
3696 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3700 SmallPtrSet<const BasicBlock *, 16> Visited;
3704 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3705 while (!Worklist.
empty()) {
3707 if (!Visited.
insert(BB).second)
3709 for (
const BasicBlock *SuccBB :
successors(BB)) {
3710 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3711 LocalDeadEdges.
insert({BB, SuccBB});
3716 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3719 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3722 if (ExclusionBlocks.
count(SuccBB)) {
3723 UsedExclusionSet =
true;
3730 DeadEdges.insert_range(LocalDeadEdges);
3731 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3736 void trackStatistics()
const override {}
3741 DenseSet<const BasicBlock *> DeadBlocks;
3745 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3748 const DominatorTree *DT =
nullptr;
3756 bool IgnoreSubsumingPositions) {
3757 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3758 "Unexpected attribute kind");
3764 IgnoreSubsumingPositions =
true;
3775 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3776 IgnoreSubsumingPositions, Attribute::NoAlias))
3786 "Noalias is a pointer attribute");
3789 const std::string getAsStr(
Attributor *
A)
const override {
3790 return getAssumed() ?
"noalias" :
"may-alias";
3795struct AANoAliasFloating final : AANoAliasImpl {
3796 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3797 : AANoAliasImpl(IRP,
A) {}
3802 return indicatePessimisticFixpoint();
3806 void trackStatistics()
const override {
3812struct AANoAliasArgument final
3813 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3814 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3815 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3828 DepClassTy::OPTIONAL, IsKnownNoSycn))
3829 return Base::updateImpl(
A);
3834 return Base::updateImpl(
A);
3838 bool UsedAssumedInformation =
false;
3839 if (
A.checkForAllCallSites(
3840 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3841 true, UsedAssumedInformation))
3842 return Base::updateImpl(
A);
3850 return indicatePessimisticFixpoint();
3857struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3858 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3859 : AANoAliasImpl(IRP,
A) {}
3863 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3864 const AAMemoryBehavior &MemBehaviorAA,
3865 const CallBase &CB,
unsigned OtherArgNo) {
3867 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3875 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3879 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3880 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3887 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3889 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3890 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3896 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3900 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3902 "callsite arguments: "
3903 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3904 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3909 bool isKnownNoAliasDueToNoAliasPreservation(
3910 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3923 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3939 bool IsKnownNoCapture;
3942 DepClassTy::OPTIONAL, IsKnownNoCapture))
3948 A, *UserI, *getCtxI(), *
this,
nullptr,
3949 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3964 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3968 bool IsKnownNoCapture;
3969 const AANoCapture *NoCaptureAA =
nullptr;
3971 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3972 if (!IsAssumedNoCapture &&
3974 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3976 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3977 <<
" cannot be noalias as it is potentially captured\n");
3982 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3988 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3989 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3999 auto *MemBehaviorAA =
4000 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4002 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4003 return ChangeStatus::UNCHANGED;
4006 bool IsKnownNoAlias;
4009 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4011 <<
" is not no-alias at the definition\n");
4012 return indicatePessimisticFixpoint();
4015 AAResults *AAR =
nullptr;
4016 if (MemBehaviorAA &&
4017 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4019 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4020 return ChangeStatus::UNCHANGED;
4023 return indicatePessimisticFixpoint();
4031struct AANoAliasReturned final : AANoAliasImpl {
4032 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4033 : AANoAliasImpl(IRP,
A) {}
4038 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4049 bool IsKnownNoAlias;
4051 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4054 bool IsKnownNoCapture;
4055 const AANoCapture *NoCaptureAA =
nullptr;
4057 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4059 return IsAssumedNoCapture ||
4063 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4064 return indicatePessimisticFixpoint();
4066 return ChangeStatus::UNCHANGED;
4074struct AANoAliasCallSiteReturned final
4075 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4076 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4077 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4087struct AAIsDeadValueImpl :
public AAIsDead {
4088 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4091 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4094 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4097 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4100 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4103 bool isAssumedDead(
const Instruction *
I)
const override {
4104 return I == getCtxI() && isAssumedDead();
4108 bool isKnownDead(
const Instruction *
I)
const override {
4109 return isAssumedDead(
I) && isKnownDead();
4113 const std::string getAsStr(Attributor *
A)
const override {
4114 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4118 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4120 if (
V.getType()->isVoidTy() ||
V.use_empty())
4126 if (!
A.isRunOn(*
I->getFunction()))
4128 bool UsedAssumedInformation =
false;
4129 std::optional<Constant *>
C =
4130 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4135 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4140 return A.checkForAllUses(UsePred, *
this, V,
false,
4141 DepClassTy::REQUIRED,
4146 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4156 bool IsKnownNoUnwind;
4158 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4166struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4167 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4168 : AAIsDeadValueImpl(IRP,
A) {}
4172 AAIsDeadValueImpl::initialize(
A);
4175 indicatePessimisticFixpoint();
4180 if (!isAssumedSideEffectFree(
A,
I)) {
4182 indicatePessimisticFixpoint();
4184 removeAssumedBits(HAS_NO_EFFECT);
4188 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4189 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4191 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4193 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4197 bool isDeadStore(Attributor &
A, StoreInst &SI,
4198 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4200 if (
SI.isVolatile())
4206 bool UsedAssumedInformation =
false;
4207 if (!AssumeOnlyInst) {
4208 PotentialCopies.clear();
4210 UsedAssumedInformation)) {
4213 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4217 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4218 <<
" potential copies.\n");
4220 InformationCache &InfoCache =
A.getInfoCache();
4223 UsedAssumedInformation))
4227 auto &UserI = cast<Instruction>(*U.getUser());
4228 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4230 AssumeOnlyInst->insert(&UserI);
4233 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4239 <<
" is assumed live!\n");
4245 const std::string getAsStr(Attributor *
A)
const override {
4249 return "assumed-dead-store";
4252 return "assumed-dead-fence";
4253 return AAIsDeadValueImpl::getAsStr(
A);
4260 if (!isDeadStore(
A, *SI))
4261 return indicatePessimisticFixpoint();
4263 if (!isDeadFence(
A, *FI))
4264 return indicatePessimisticFixpoint();
4266 if (!isAssumedSideEffectFree(
A,
I))
4267 return indicatePessimisticFixpoint();
4268 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4269 return indicatePessimisticFixpoint();
4274 bool isRemovableStore()
const override {
4275 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4280 Value &
V = getAssociatedValue();
4287 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4288 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4291 A.deleteAfterManifest(*
I);
4292 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4294 for (
auto *Usr : AOI->
users())
4296 A.deleteAfterManifest(*AOI);
4302 A.deleteAfterManifest(*FI);
4306 A.deleteAfterManifest(*
I);
4314 void trackStatistics()
const override {
4320 SmallSetVector<Value *, 4> PotentialCopies;
4323struct AAIsDeadArgument :
public AAIsDeadFloating {
4324 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4325 : AAIsDeadFloating(IRP,
A) {}
4329 Argument &Arg = *getAssociatedArgument();
4330 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4331 if (
A.registerFunctionSignatureRewrite(
4335 return ChangeStatus::CHANGED;
4337 return ChangeStatus::UNCHANGED;
4344struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4345 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4346 : AAIsDeadValueImpl(IRP,
A) {}
4350 AAIsDeadValueImpl::initialize(
A);
4352 indicatePessimisticFixpoint();
4361 Argument *Arg = getAssociatedArgument();
4363 return indicatePessimisticFixpoint();
4365 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4367 return indicatePessimisticFixpoint();
4376 "Expected undef values to be filtered out!");
4378 if (
A.changeUseAfterManifest(U, UV))
4379 return ChangeStatus::CHANGED;
4380 return ChangeStatus::UNCHANGED;
4387struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4388 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4389 : AAIsDeadFloating(IRP,
A) {}
4392 bool isAssumedDead()
const override {
4393 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4398 AAIsDeadFloating::initialize(
A);
4400 indicatePessimisticFixpoint();
4405 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4411 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4412 IsAssumedSideEffectFree =
false;
4413 Changed = ChangeStatus::CHANGED;
4415 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4416 return indicatePessimisticFixpoint();
4421 void trackStatistics()
const override {
4422 if (IsAssumedSideEffectFree)
4429 const std::string getAsStr(Attributor *
A)
const override {
4430 return isAssumedDead()
4432 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4436 bool IsAssumedSideEffectFree =
true;
4439struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4440 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4441 : AAIsDeadValueImpl(IRP,
A) {}
4446 bool UsedAssumedInformation =
false;
4447 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4448 {Instruction::Ret}, UsedAssumedInformation);
4450 auto PredForCallSite = [&](AbstractCallSite ACS) {
4451 if (ACS.isCallbackCall() || !ACS.getInstruction())
4453 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4456 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4457 UsedAssumedInformation))
4458 return indicatePessimisticFixpoint();
4460 return ChangeStatus::UNCHANGED;
4466 bool AnyChange =
false;
4467 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4474 bool UsedAssumedInformation =
false;
4475 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4476 UsedAssumedInformation);
4477 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4484struct AAIsDeadFunction :
public AAIsDead {
4485 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4490 assert(
F &&
"Did expect an anchor function");
4491 if (!isAssumedDeadInternalFunction(
A)) {
4492 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4493 assumeLive(
A,
F->getEntryBlock());
4497 bool isAssumedDeadInternalFunction(Attributor &
A) {
4498 if (!getAnchorScope()->hasLocalLinkage())
4500 bool UsedAssumedInformation =
false;
4501 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4502 true, UsedAssumedInformation);
4506 const std::string getAsStr(Attributor *
A)
const override {
4507 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4508 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4509 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4510 std::to_string(KnownDeadEnds.size()) +
"]";
4515 assert(getState().isValidState() &&
4516 "Attempted to manifest an invalid state!");
4521 if (AssumedLiveBlocks.empty()) {
4522 A.deleteAfterManifest(
F);
4523 return ChangeStatus::CHANGED;
4529 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4531 KnownDeadEnds.set_union(ToBeExploredFrom);
4532 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4536 bool IsKnownNoReturn;
4544 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4546 A.changeToUnreachableAfterManifest(
4547 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4548 HasChanged = ChangeStatus::CHANGED;
4551 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4552 for (BasicBlock &BB :
F)
4553 if (!AssumedLiveBlocks.count(&BB)) {
4554 A.deleteAfterManifest(BB);
4556 HasChanged = ChangeStatus::CHANGED;
4565 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4568 "Used AAIsDead of the wrong function");
4569 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4573 void trackStatistics()
const override {}
4576 bool isAssumedDead()
const override {
return false; }
4579 bool isKnownDead()
const override {
return false; }
4582 bool isAssumedDead(
const BasicBlock *BB)
const override {
4584 "BB must be in the same anchor scope function.");
4588 return !AssumedLiveBlocks.count(BB);
4592 bool isKnownDead(
const BasicBlock *BB)
const override {
4593 return getKnown() && isAssumedDead(BB);
4597 bool isAssumedDead(
const Instruction *
I)
const override {
4598 assert(
I->getParent()->getParent() == getAnchorScope() &&
4599 "Instruction must be in the same anchor scope function.");
4606 if (!AssumedLiveBlocks.count(
I->getParent()))
4612 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4620 bool isKnownDead(
const Instruction *
I)
const override {
4621 return getKnown() && isAssumedDead(
I);
4626 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4627 if (!AssumedLiveBlocks.insert(&BB).second)
4634 for (
const Instruction &
I : BB)
4637 if (
F->hasLocalLinkage())
4638 A.markLiveInternalFunction(*
F);
4644 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4647 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4650 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4653 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4657identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4658 AbstractAttribute &AA,
4659 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4662 bool IsKnownNoReturn;
4665 return !IsKnownNoReturn;
4674identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4675 AbstractAttribute &AA,
4676 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4677 bool UsedAssumedInformation =
4683 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4684 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4688 bool IsKnownNoUnwind;
4691 UsedAssumedInformation |= !IsKnownNoUnwind;
4693 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4696 return UsedAssumedInformation;
4700identifyAliveSuccessors(Attributor &,
const UncondBrInst &BI,
4701 AbstractAttribute &,
4702 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4708identifyAliveSuccessors(Attributor &
A,
const CondBrInst &BI,
4709 AbstractAttribute &AA,
4710 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4711 bool UsedAssumedInformation =
false;
4712 std::optional<Constant *>
C =
4713 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4723 UsedAssumedInformation =
false;
4725 return UsedAssumedInformation;
4729identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4730 AbstractAttribute &AA,
4731 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4732 bool UsedAssumedInformation =
false;
4736 UsedAssumedInformation)) {
4738 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4743 if (Values.
empty() ||
4744 (Values.
size() == 1 &&
4747 return UsedAssumedInformation;
4750 Type &Ty = *
SI.getCondition()->getType();
4751 SmallPtrSet<ConstantInt *, 8>
Constants;
4752 auto CheckForConstantInt = [&](
Value *
V) {
4760 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4761 return CheckForConstantInt(VAC.
getValue());
4763 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4765 return UsedAssumedInformation;
4768 unsigned MatchedCases = 0;
4769 for (
const auto &CaseIt :
SI.cases()) {
4770 if (
Constants.count(CaseIt.getCaseValue())) {
4772 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4779 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4780 return UsedAssumedInformation;
4786 if (AssumedLiveBlocks.empty()) {
4787 if (isAssumedDeadInternalFunction(
A))
4791 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4792 assumeLive(
A,
F->getEntryBlock());
4796 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4797 << getAnchorScope()->
size() <<
"] BBs and "
4798 << ToBeExploredFrom.size() <<
" exploration points and "
4799 << KnownDeadEnds.size() <<
" known dead ends\n");
4804 ToBeExploredFrom.end());
4805 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4808 while (!Worklist.
empty()) {
4815 I =
I->getNextNode();
4817 AliveSuccessors.
clear();
4819 bool UsedAssumedInformation =
false;
4820 switch (
I->getOpcode()) {
4824 "Expected non-terminators to be handled already!");
4825 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4828 case Instruction::Call:
4830 *
this, AliveSuccessors);
4832 case Instruction::Invoke:
4834 *
this, AliveSuccessors);
4836 case Instruction::UncondBr:
4837 UsedAssumedInformation = identifyAliveSuccessors(
4840 case Instruction::CondBr:
4842 *
this, AliveSuccessors);
4844 case Instruction::Switch:
4846 *
this, AliveSuccessors);
4850 if (UsedAssumedInformation) {
4851 NewToBeExploredFrom.insert(
I);
4852 }
else if (AliveSuccessors.
empty() ||
4853 (
I->isTerminator() &&
4854 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4855 if (KnownDeadEnds.insert(
I))
4860 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4861 << UsedAssumedInformation <<
"\n");
4863 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4864 if (!
I->isTerminator()) {
4865 assert(AliveSuccessors.size() == 1 &&
4866 "Non-terminator expected to have a single successor!");
4870 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4871 if (AssumedLiveEdges.insert(
Edge).second)
4873 if (assumeLive(
A, *AliveSuccessor->getParent()))
4880 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4881 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4882 return !ToBeExploredFrom.count(I);
4885 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4894 if (ToBeExploredFrom.empty() &&
4895 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4896 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4897 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4899 return indicatePessimisticFixpoint();
4904struct AAIsDeadCallSite final : AAIsDeadFunction {
4905 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4906 : AAIsDeadFunction(IRP,
A) {}
4915 "supported for call sites yet!");
4920 return indicatePessimisticFixpoint();
4924 void trackStatistics()
const override {}
4931struct AADereferenceableImpl : AADereferenceable {
4932 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4933 : AADereferenceable(IRP,
A) {}
4934 using StateType = DerefState;
4938 Value &
V = *getAssociatedValue().stripPointerCasts();
4940 A.getAttrs(getIRPosition(),
4941 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4944 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4947 bool IsKnownNonNull;
4949 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4951 bool CanBeNull, CanBeFreed;
4952 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4953 A.getDataLayout(), CanBeNull, CanBeFreed));
4955 if (Instruction *CtxI = getCtxI())
4956 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4961 StateType &getState()
override {
return *
this; }
4962 const StateType &getState()
const override {
return *
this; }
4966 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4967 DerefState &State) {
4968 const Value *UseV =
U->get();
4973 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4978 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4979 if (
Base &&
Base == &getAssociatedValue())
4980 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4984 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4985 AADereferenceable::StateType &State) {
4986 bool IsNonNull =
false;
4987 bool TrackUse =
false;
4988 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4989 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4990 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4991 <<
" for instruction " << *
I <<
"\n");
4993 addAccessedBytesForUse(
A, U,
I, State);
4994 State.takeKnownDerefBytesMaximum(DerefBytes);
5001 bool IsKnownNonNull;
5003 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5004 if (IsAssumedNonNull &&
5005 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5006 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5007 return ChangeStatus::CHANGED;
5012 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5013 SmallVectorImpl<Attribute> &Attrs)
const override {
5015 bool IsKnownNonNull;
5017 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5018 if (IsAssumedNonNull)
5019 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5020 Ctx, getAssumedDereferenceableBytes()));
5022 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5023 Ctx, getAssumedDereferenceableBytes()));
5027 const std::string getAsStr(Attributor *
A)
const override {
5028 if (!getAssumedDereferenceableBytes())
5029 return "unknown-dereferenceable";
5030 bool IsKnownNonNull;
5031 bool IsAssumedNonNull =
false;
5034 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5035 return std::string(
"dereferenceable") +
5036 (IsAssumedNonNull ?
"" :
"_or_null") +
5037 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5038 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5039 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5040 (!
A ?
" [non-null is unknown]" :
"");
5045struct AADereferenceableFloating : AADereferenceableImpl {
5046 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5047 : AADereferenceableImpl(IRP,
A) {}
5052 bool UsedAssumedInformation =
false;
5054 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5056 Values.
push_back({getAssociatedValue(), getCtxI()});
5059 Stripped = Values.
size() != 1 ||
5060 Values.
front().getValue() != &getAssociatedValue();
5063 const DataLayout &
DL =
A.getDataLayout();
5066 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5068 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5069 APInt
Offset(IdxWidth, 0);
5074 const auto *AA =
A.getAAFor<AADereferenceable>(
5076 int64_t DerefBytes = 0;
5077 if (!AA || (!Stripped &&
this == AA)) {
5080 bool CanBeNull, CanBeFreed;
5082 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5083 T.GlobalState.indicatePessimisticFixpoint();
5086 DerefBytes =
DS.DerefBytesState.getAssumed();
5087 T.GlobalState &=
DS.GlobalState;
5093 int64_t OffsetSExt =
Offset.getSExtValue();
5097 T.takeAssumedDerefBytesMinimum(
5098 std::max(int64_t(0), DerefBytes - OffsetSExt));
5103 T.takeKnownDerefBytesMaximum(
5104 std::max(int64_t(0), DerefBytes - OffsetSExt));
5105 T.indicatePessimisticFixpoint();
5106 }
else if (OffsetSExt > 0) {
5112 T.indicatePessimisticFixpoint();
5116 return T.isValidState();
5119 for (
const auto &VAC : Values)
5120 if (!VisitValueCB(*VAC.
getValue()))
5121 return indicatePessimisticFixpoint();
5127 void trackStatistics()
const override {
5133struct AADereferenceableReturned final
5134 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5136 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5137 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5141 void trackStatistics()
const override {
5147struct AADereferenceableArgument final
5148 : AAArgumentFromCallSiteArguments<AADereferenceable,
5149 AADereferenceableImpl> {
5151 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5152 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5156 void trackStatistics()
const override {
5162struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5163 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5164 : AADereferenceableFloating(IRP,
A) {}
5167 void trackStatistics()
const override {
5173struct AADereferenceableCallSiteReturned final
5174 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5175 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5176 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5180 void trackStatistics()
const override {
5190static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5191 Value &AssociatedValue,
const Use *U,
5192 const Instruction *
I,
bool &TrackUse) {
5201 if (
GEP->hasAllConstantIndices())
5206 switch (
II->getIntrinsicID()) {
5207 case Intrinsic::ptrmask: {
5209 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5211 const auto *AlignAA =
A.getAAFor<AAAlign>(
5213 if (ConstVals && ConstVals->isValidState() && ConstVals->isAtFixpoint()) {
5214 unsigned ShiftValue = std::min(ConstVals->getAssumedMinTrailingZeros(),
5216 Align ConstAlign(UINT64_C(1) << ShiftValue);
5217 if (ConstAlign >= AlignAA->getKnownAlign())
5218 return Align(1).value();
5221 return AlignAA->getKnownAlign().
value();
5224 case Intrinsic::amdgcn_make_buffer_rsrc: {
5225 const auto *AlignAA =
A.getAAFor<AAAlign>(
5228 return AlignAA->getKnownAlign().
value();
5246 MA = MaybeAlign(AlignAA->getKnownAlign());
5249 const DataLayout &
DL =
A.getDataLayout();
5250 const Value *UseV =
U->get();
5252 if (
SI->getPointerOperand() == UseV)
5253 MA =
SI->getAlign();
5255 if (LI->getPointerOperand() == UseV)
5256 MA = LI->getAlign();
5258 if (AI->getPointerOperand() == UseV)
5259 MA = AI->getAlign();
5261 if (AI->getPointerOperand() == UseV)
5262 MA = AI->getAlign();
5268 unsigned Alignment = MA->value();
5272 if (
Base == &AssociatedValue) {
5277 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5285struct AAAlignImpl : AAAlign {
5286 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5291 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5293 takeKnownMaximum(Attr.getValueAsInt());
5295 Value &
V = *getAssociatedValue().stripPointerCasts();
5296 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5298 if (Instruction *CtxI = getCtxI())
5299 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5307 Value &AssociatedValue = getAssociatedValue();
5309 return ChangeStatus::UNCHANGED;
5311 for (
const Use &U : AssociatedValue.
uses()) {
5313 if (
SI->getPointerOperand() == &AssociatedValue)
5314 if (
SI->getAlign() < getAssumedAlign()) {
5316 "Number of times alignment added to a store");
5317 SI->setAlignment(getAssumedAlign());
5318 InstrChanged = ChangeStatus::CHANGED;
5321 if (LI->getPointerOperand() == &AssociatedValue)
5322 if (LI->getAlign() < getAssumedAlign()) {
5323 LI->setAlignment(getAssumedAlign());
5325 "Number of times alignment added to a load");
5326 InstrChanged = ChangeStatus::CHANGED;
5329 if (RMW->getPointerOperand() == &AssociatedValue) {
5330 if (RMW->getAlign() < getAssumedAlign()) {
5332 "Number of times alignment added to atomicrmw");
5334 RMW->setAlignment(getAssumedAlign());
5335 InstrChanged = ChangeStatus::CHANGED;
5339 if (CAS->getPointerOperand() == &AssociatedValue) {
5340 if (CAS->getAlign() < getAssumedAlign()) {
5342 "Number of times alignment added to cmpxchg");
5343 CAS->setAlignment(getAssumedAlign());
5344 InstrChanged = ChangeStatus::CHANGED;
5352 Align InheritAlign =
5353 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5354 if (InheritAlign >= getAssumedAlign())
5355 return InstrChanged;
5356 return Changed | InstrChanged;
5364 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5365 SmallVectorImpl<Attribute> &Attrs)
const override {
5366 if (getAssumedAlign() > 1)
5368 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5372 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5373 AAAlign::StateType &State) {
5374 bool TrackUse =
false;
5376 unsigned int KnownAlign =
5377 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5378 State.takeKnownMaximum(KnownAlign);
5384 const std::string getAsStr(Attributor *
A)
const override {
5385 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5386 std::to_string(getAssumedAlign().value()) +
">";
5391struct AAAlignFloating : AAAlignImpl {
5392 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5396 const DataLayout &
DL =
A.getDataLayout();
5399 bool UsedAssumedInformation =
false;
5401 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5403 Values.
push_back({getAssociatedValue(), getCtxI()});
5406 Stripped = Values.
size() != 1 ||
5407 Values.
front().getValue() != &getAssociatedValue();
5411 auto VisitValueCB = [&](
Value &
V) ->
bool {
5415 DepClassTy::REQUIRED);
5416 if (!AA || (!Stripped &&
this == AA)) {
5418 unsigned Alignment = 1;
5431 Alignment =
V.getPointerAlignment(
DL).value();
5434 T.takeKnownMaximum(Alignment);
5435 T.indicatePessimisticFixpoint();
5438 const AAAlign::StateType &
DS = AA->
getState();
5441 return T.isValidState();
5444 for (
const auto &VAC : Values) {
5445 if (!VisitValueCB(*VAC.
getValue()))
5446 return indicatePessimisticFixpoint();
5459struct AAAlignReturned final
5460 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5461 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5462 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5469struct AAAlignArgument final
5470 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5471 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5472 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5479 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5480 return ChangeStatus::UNCHANGED;
5481 return Base::manifest(
A);
5488struct AAAlignCallSiteArgument final : AAAlignFloating {
5489 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5490 : AAAlignFloating(IRP,
A) {}
5497 if (Argument *Arg = getAssociatedArgument())
5498 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5499 return ChangeStatus::UNCHANGED;
5501 Align InheritAlign =
5502 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5503 if (InheritAlign >= getAssumedAlign())
5504 Changed = ChangeStatus::UNCHANGED;
5511 if (Argument *Arg = getAssociatedArgument()) {
5514 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5517 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5527struct AAAlignCallSiteReturned final
5528 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5529 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5530 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5536 switch (
II->getIntrinsicID()) {
5537 case Intrinsic::ptrmask: {
5541 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5543 if (ConstVals && ConstVals->isValidState()) {
5544 unsigned ShiftValue =
5545 std::min(ConstVals->getAssumedMinTrailingZeros(),
5546 Value::MaxAlignmentExponent);
5547 Alignment =
Align(UINT64_C(1) << ShiftValue);
5551 const auto *AlignAA =
5553 DepClassTy::REQUIRED);
5555 Alignment = std::max(AlignAA->getAssumedAlign(), Alignment);
5562 std::min(this->getAssumedAlign(), Alignment).value());
5568 case Intrinsic::amdgcn_make_buffer_rsrc: {
5569 const auto *AlignAA =
5571 DepClassTy::REQUIRED);
5574 this->getState(), AlignAA->getAssumedAlign().
value());
5581 return Base::updateImpl(
A);
5590struct AANoReturnImpl :
public AANoReturn {
5591 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5597 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5602 const std::string getAsStr(Attributor *
A)
const override {
5603 return getAssumed() ?
"noreturn" :
"may-return";
5608 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5609 bool UsedAssumedInformation =
false;
5610 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5611 {(unsigned)Instruction::Ret},
5612 UsedAssumedInformation))
5613 return indicatePessimisticFixpoint();
5614 return ChangeStatus::UNCHANGED;
5618struct AANoReturnFunction final : AANoReturnImpl {
5619 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5620 : AANoReturnImpl(IRP,
A) {}
5627struct AANoReturnCallSite final
5628 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5629 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5630 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5641struct AAInstanceInfoImpl :
public AAInstanceInfo {
5642 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5643 : AAInstanceInfo(IRP,
A) {}
5647 Value &
V = getAssociatedValue();
5649 if (
C->isThreadDependent())
5650 indicatePessimisticFixpoint();
5652 indicateOptimisticFixpoint();
5658 indicateOptimisticFixpoint();
5663 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5666 indicatePessimisticFixpoint();
5676 Value &
V = getAssociatedValue();
5679 Scope =
I->getFunction();
5682 if (!
Scope->hasLocalLinkage())
5686 return indicateOptimisticFixpoint();
5688 bool IsKnownNoRecurse;
5694 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5709 if (!Callee || !
Callee->hasLocalLinkage())
5713 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5715 DepClassTy::OPTIONAL);
5716 if (!ArgInstanceInfoAA ||
5717 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5722 A, *CB, *Scope, *
this,
nullptr,
5723 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5730 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5732 auto *Ptr =
SI->getPointerOperand()->stripPointerCasts();
5740 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5741 DepClassTy::OPTIONAL,
5742 true, EquivalentUseCB))
5743 return indicatePessimisticFixpoint();
5749 const std::string getAsStr(Attributor *
A)
const override {
5750 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5754 void trackStatistics()
const override {}
5758struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5759 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5760 : AAInstanceInfoImpl(IRP,
A) {}
5764struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5765 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5766 : AAInstanceInfoFloating(IRP,
A) {}
5770struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5771 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5772 : AAInstanceInfoImpl(IRP,
A) {}
5780 Argument *Arg = getAssociatedArgument();
5782 return indicatePessimisticFixpoint();
5785 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5787 return indicatePessimisticFixpoint();
5793struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5794 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5795 : AAInstanceInfoImpl(IRP,
A) {
5811struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5812 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5813 : AAInstanceInfoFloating(IRP,
A) {}
5820 bool IgnoreSubsumingPositions) {
5821 assert(ImpliedAttributeKind == Attribute::Captures &&
5822 "Unexpected attribute kind");
5832 V.getType()->getPointerAddressSpace() == 0)) {
5837 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5847 {Attribute::Captures, Attribute::ByVal}, Attrs,
5885 bool NoThrow =
F.doesNotThrow();
5886 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5887 if (
ReadOnly && NoThrow && IsVoidReturn) {
5900 if (NoThrow && IsVoidReturn)
5905 if (!NoThrow || ArgNo < 0 ||
5906 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5909 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5910 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5911 if (U ==
unsigned(ArgNo))
5938 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5939 SmallVectorImpl<Attribute> &Attrs)
const override {
5940 if (!isAssumedNoCaptureMaybeReturned())
5943 if (isArgumentPosition()) {
5944 if (isAssumedNoCapture())
5945 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5947 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5952 const std::string getAsStr(Attributor *
A)
const override {
5953 if (isKnownNoCapture())
5954 return "known not-captured";
5955 if (isAssumedNoCapture())
5956 return "assumed not-captured";
5957 if (isKnownNoCaptureMaybeReturned())
5958 return "known not-captured-maybe-returned";
5959 if (isAssumedNoCaptureMaybeReturned())
5960 return "assumed not-captured-maybe-returned";
5961 return "assumed-captured";
5966 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5969 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5975 return isCapturedIn(State,
true,
true,
5982 return isCapturedIn(State,
true,
true,
5988 return isCapturedIn(State,
false,
false,
5990 return isCapturedIn(State,
true,
true,
5998 return isCapturedIn(State,
true,
true,
6005 bool IsKnownNoCapture;
6006 const AANoCapture *ArgNoCaptureAA =
nullptr;
6008 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6010 if (IsAssumedNoCapture)
6011 return isCapturedIn(State,
false,
false,
6015 return isCapturedIn(State,
false,
false,
6020 return isCapturedIn(State,
true,
true,
6027 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
6028 bool CapturedInInt,
bool CapturedInRet) {
6029 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
6030 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
6042 const IRPosition &IRP = getIRPosition();
6046 return indicatePessimisticFixpoint();
6053 return indicatePessimisticFixpoint();
6061 T.addKnownBits(NOT_CAPTURED_IN_MEM);
6063 addKnownBits(NOT_CAPTURED_IN_MEM);
6070 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
6074 UsedAssumedInformation))
6076 bool SeenConstant =
false;
6077 for (
const AA::ValueAndContext &VAC : Values) {
6081 SeenConstant =
true;
6083 VAC.
getValue() == getAssociatedArgument())
6089 bool IsKnownNoUnwind;
6092 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6093 bool UsedAssumedInformation =
false;
6094 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6095 T.addKnownBits(NOT_CAPTURED_IN_RET);
6096 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6098 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6099 addKnownBits(NOT_CAPTURED_IN_RET);
6100 if (isKnown(NOT_CAPTURED_IN_MEM))
6101 return indicateOptimisticFixpoint();
6106 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6115 return checkUse(
A,
T, U, Follow);
6118 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6119 return indicatePessimisticFixpoint();
6122 auto Assumed = S.getAssumed();
6123 S.intersectAssumedBits(
T.getAssumed());
6124 if (!isAssumedNoCaptureMaybeReturned())
6125 return indicatePessimisticFixpoint();
6131struct AANoCaptureArgument final : AANoCaptureImpl {
6132 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6133 : AANoCaptureImpl(IRP,
A) {}
6140struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6141 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6142 : AANoCaptureImpl(IRP,
A) {}
6150 Argument *Arg = getAssociatedArgument();
6152 return indicatePessimisticFixpoint();
6154 bool IsKnownNoCapture;
6155 const AANoCapture *ArgAA =
nullptr;
6157 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6159 return ChangeStatus::UNCHANGED;
6161 return indicatePessimisticFixpoint();
6166 void trackStatistics()
const override {
6172struct AANoCaptureFloating final : AANoCaptureImpl {
6173 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6174 : AANoCaptureImpl(IRP,
A) {}
6177 void trackStatistics()
const override {
6183struct AANoCaptureReturned final : AANoCaptureImpl {
6184 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6185 : AANoCaptureImpl(IRP,
A) {
6200 void trackStatistics()
const override {}
6204struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6205 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6206 : AANoCaptureImpl(IRP,
A) {}
6212 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6216 void trackStatistics()
const override {
6233 dbgs() <<
"[ValueSimplify] is assumed to be "
6236 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6248 if (getAssociatedValue().
getType()->isVoidTy())
6249 indicatePessimisticFixpoint();
6250 if (
A.hasSimplificationCallback(getIRPosition()))
6251 indicatePessimisticFixpoint();
6255 const std::string getAsStr(Attributor *
A)
const override {
6257 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6258 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6259 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6261 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6266 void trackStatistics()
const override {}
6269 std::optional<Value *>
6270 getAssumedSimplifiedValue(Attributor &
A)
const override {
6271 return SimplifiedAssociatedValue;
6278 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6282 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6284 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6293 static Value *reproduceInst(Attributor &
A,
6294 const AbstractAttribute &QueryingAA,
6295 Instruction &
I,
Type &Ty, Instruction *CtxI,
6297 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6298 if (
Check && (
I.mayReadFromMemory() ||
6303 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6305 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6327 static Value *reproduceValue(Attributor &
A,
6328 const AbstractAttribute &QueryingAA,
Value &V,
6329 Type &Ty, Instruction *CtxI,
bool Check,
6331 if (
const auto &NewV = VMap.
lookup(&V))
6333 bool UsedAssumedInformation =
false;
6334 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6336 if (!SimpleV.has_value())
6340 EffectiveV = *SimpleV;
6345 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6347 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6348 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6354 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6355 Value *NewV = SimplifiedAssociatedValue
6356 ? *SimplifiedAssociatedValue
6358 if (NewV && NewV != &getAssociatedValue()) {
6362 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6364 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6372 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6373 const IRPosition &IRP,
bool Simplify =
true) {
6374 bool UsedAssumedInformation =
false;
6377 QueryingValueSimplified =
A.getAssumedSimplified(
6379 return unionAssumed(QueryingValueSimplified);
6383 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6384 if (!getAssociatedValue().
getType()->isIntegerTy())
6389 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6393 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6396 SimplifiedAssociatedValue = std::nullopt;
6397 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6400 if (
auto *
C = *COpt) {
6401 SimplifiedAssociatedValue =
C;
6402 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6408 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6409 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6411 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6419 for (
auto &U : getAssociatedValue().uses()) {
6424 IP =
PHI->getIncomingBlock(U)->getTerminator();
6425 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6427 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6428 if (
A.changeUseAfterManifest(U, *NewV))
6429 Changed = ChangeStatus::CHANGED;
6433 return Changed | AAValueSimplify::manifest(
A);
6438 SimplifiedAssociatedValue = &getAssociatedValue();
6439 return AAValueSimplify::indicatePessimisticFixpoint();
6443struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6444 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6445 : AAValueSimplifyImpl(IRP,
A) {}
6448 AAValueSimplifyImpl::initialize(
A);
6449 if (
A.hasAttr(getIRPosition(),
6450 {Attribute::InAlloca, Attribute::Preallocated,
6451 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6453 indicatePessimisticFixpoint();
6460 Argument *Arg = getAssociatedArgument();
6466 return indicatePessimisticFixpoint();
6469 auto Before = SimplifiedAssociatedValue;
6471 auto PredForCallSite = [&](AbstractCallSite ACS) {
6472 const IRPosition &ACSArgPos =
6483 bool UsedAssumedInformation =
false;
6484 std::optional<Constant *> SimpleArgOp =
6485 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6492 return unionAssumed(*SimpleArgOp);
6497 bool UsedAssumedInformation =
false;
6498 if (hasCallBaseContext() &&
6499 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6501 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6503 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6504 UsedAssumedInformation);
6507 if (!askSimplifiedValueForOtherAAs(
A))
6508 return indicatePessimisticFixpoint();
6511 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6512 : ChangeStatus ::CHANGED;
6516 void trackStatistics()
const override {
6521struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6522 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6523 : AAValueSimplifyImpl(IRP,
A) {}
6526 std::optional<Value *>
6527 getAssumedSimplifiedValue(Attributor &
A)
const override {
6528 if (!isValidState())
6530 return SimplifiedAssociatedValue;
6535 auto Before = SimplifiedAssociatedValue;
6539 return checkAndUpdate(
6544 bool UsedAssumedInformation =
false;
6545 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6546 UsedAssumedInformation))
6547 if (!askSimplifiedValueForOtherAAs(
A))
6548 return indicatePessimisticFixpoint();
6551 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6552 : ChangeStatus ::CHANGED;
6558 return ChangeStatus::UNCHANGED;
6562 void trackStatistics()
const override {
6567struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6568 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6569 : AAValueSimplifyImpl(IRP,
A) {}
6573 AAValueSimplifyImpl::initialize(
A);
6574 Value &
V = getAnchorValue();
6578 indicatePessimisticFixpoint();
6583 auto Before = SimplifiedAssociatedValue;
6584 if (!askSimplifiedValueForOtherAAs(
A))
6585 return indicatePessimisticFixpoint();
6588 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6589 : ChangeStatus ::CHANGED;
6593 void trackStatistics()
const override {
6598struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6599 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6600 : AAValueSimplifyImpl(IRP,
A) {}
6604 SimplifiedAssociatedValue =
nullptr;
6605 indicateOptimisticFixpoint();
6610 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6613 void trackStatistics()
const override {
6618struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6619 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6620 : AAValueSimplifyFunction(IRP,
A) {}
6622 void trackStatistics()
const override {
6627struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6628 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6629 : AAValueSimplifyImpl(IRP,
A) {}
6632 AAValueSimplifyImpl::initialize(
A);
6633 Function *Fn = getAssociatedFunction();
6634 assert(Fn &&
"Did expect an associted function");
6635 for (Argument &Arg : Fn->
args()) {
6640 checkAndUpdate(
A, *
this, IRP))
6641 indicateOptimisticFixpoint();
6643 indicatePessimisticFixpoint();
6651 return indicatePessimisticFixpoint();
6654 void trackStatistics()
const override {
6659struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6660 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6661 : AAValueSimplifyFloating(IRP,
A) {}
6667 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6669 if (FloatAA && FloatAA->getState().isValidState())
6672 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6674 ->getArgOperandUse(getCallSiteArgNo());
6675 if (
A.changeUseAfterManifest(U, *NewV))
6676 Changed = ChangeStatus::CHANGED;
6679 return Changed | AAValueSimplify::manifest(
A);
6682 void trackStatistics()
const override {
6690struct AAHeapToStackFunction final :
public AAHeapToStack {
6692 static bool isGlobalizedLocal(
const CallBase &CB) {
6694 return A.
isValid() &&
A.getValueAsString() ==
"__kmpc_alloc_shared";
6697 struct AllocationInfo {
6702 bool IsGlobalizedLocal =
false;
6709 } Status = STACK_DUE_TO_USE;
6713 bool HasPotentiallyFreeingUnknownUses =
false;
6717 bool MoveAllocaIntoEntry =
true;
6720 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6723 struct DeallocationInfo {
6731 bool MightFreeUnknownObjects =
false;
6734 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6737 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6738 : AAHeapToStack(IRP,
A) {}
6740 ~AAHeapToStackFunction()
override {
6743 for (
auto &It : AllocationInfos)
6744 It.second->~AllocationInfo();
6745 for (
auto &It : DeallocationInfos)
6746 It.second->~DeallocationInfo();
6750 AAHeapToStack::initialize(
A);
6753 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6760 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6767 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6769 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6770 AllocationInfos[CB] = AI;
6771 AI->IsGlobalizedLocal = isGlobalizedLocal(*CB);
6777 bool UsedAssumedInformation =
false;
6778 bool Success =
A.checkForAllCallLikeInstructions(
6779 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6783 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6786 [](
const IRPosition &,
const AbstractAttribute *,
6787 bool &) -> std::optional<Value *> {
return nullptr; };
6788 for (
const auto &It : AllocationInfos)
6791 for (
const auto &It : DeallocationInfos)
6796 const std::string getAsStr(Attributor *
A)
const override {
6797 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6798 for (
const auto &It : AllocationInfos) {
6799 if (It.second->Status == AllocationInfo::INVALID)
6800 ++NumInvalidMallocs;
6804 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6805 std::to_string(NumInvalidMallocs);
6809 void trackStatistics()
const override {
6811 MallocCalls, Function,
6812 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6813 for (
const auto &It : AllocationInfos)
6814 if (It.second->Status != AllocationInfo::INVALID)
6818 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6820 if (AllocationInfo *AI =
6821 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6822 return AI->Status != AllocationInfo::INVALID;
6826 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6827 if (!isValidState())
6830 for (
const auto &It : AllocationInfos) {
6831 AllocationInfo &AI = *It.second;
6832 if (AI.Status == AllocationInfo::INVALID)
6835 if (AI.PotentialFreeCalls.count(&CB))
6843 assert(getState().isValidState() &&
6844 "Attempted to manifest an invalid state!");
6848 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6850 for (
auto &It : AllocationInfos) {
6851 AllocationInfo &AI = *It.second;
6852 if (AI.Status == AllocationInfo::INVALID)
6855 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6856 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6857 A.deleteAfterManifest(*FreeCall);
6858 HasChanged = ChangeStatus::CHANGED;
6861 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6864 auto Remark = [&](OptimizationRemark
OR) {
6865 if (AI.IsGlobalizedLocal)
6866 return OR <<
"Moving globalized variable to the stack.";
6867 return OR <<
"Moving memory allocation from the heap to the stack.";
6869 if (AI.IsGlobalizedLocal)
6870 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6872 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6874 const DataLayout &
DL =
A.getInfoCache().getDL();
6876 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6878 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6880 LLVMContext &Ctx = AI.CB->getContext();
6881 ObjectSizeOpts Opts;
6882 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6883 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6890 ?
F->getEntryBlock().begin()
6891 : AI.CB->getIterator();
6894 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6895 Alignment = std::max(Alignment, *RetAlign);
6897 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6898 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6899 "Expected an alignment during manifest!");
6901 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6905 unsigned AS =
DL.getAllocaAddrSpace();
6907 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6908 AI.CB->getName() +
".h2s", IP);
6910 if (Alloca->
getType() != AI.CB->getType())
6911 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6912 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6914 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6917 "Must be able to materialize initial memory state of allocation");
6922 auto *NBB =
II->getNormalDest();
6924 A.deleteAfterManifest(*AI.CB);
6926 A.deleteAfterManifest(*AI.CB);
6935 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6937 HasChanged = ChangeStatus::CHANGED;
6943 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6945 bool UsedAssumedInformation =
false;
6946 std::optional<Constant *> SimpleV =
6947 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6949 return APInt(64, 0);
6951 return CI->getValue();
6952 return std::nullopt;
6955 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6956 AllocationInfo &AI) {
6957 auto Mapper = [&](
const Value *
V) ->
const Value * {
6958 bool UsedAssumedInformation =
false;
6959 if (std::optional<Constant *> SimpleV =
6960 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6967 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6973 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6977 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6982ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6985 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6987 const auto *LivenessAA =
6990 MustBeExecutedContextExplorer *Explorer =
6991 A.getInfoCache().getMustBeExecutedContextExplorer();
6993 bool StackIsAccessibleByOtherThreads =
6994 A.getInfoCache().stackIsAccessibleByOtherThreads();
6997 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
6998 std::optional<bool> MayContainIrreducibleControl;
7000 if (&
F->getEntryBlock() == &BB)
7002 if (!MayContainIrreducibleControl.has_value())
7004 if (*MayContainIrreducibleControl)
7013 bool HasUpdatedFrees =
false;
7015 auto UpdateFrees = [&]() {
7016 HasUpdatedFrees =
true;
7018 for (
auto &It : DeallocationInfos) {
7019 DeallocationInfo &DI = *It.second;
7022 if (DI.MightFreeUnknownObjects)
7026 bool UsedAssumedInformation =
false;
7027 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
7034 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
7035 DI.MightFreeUnknownObjects =
true;
7048 DI.MightFreeUnknownObjects =
true;
7052 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
7054 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
7056 DI.MightFreeUnknownObjects =
true;
7060 DI.PotentialAllocationCalls.insert(ObjCB);
7064 auto FreeCheck = [&](AllocationInfo &AI) {
7068 if (!StackIsAccessibleByOtherThreads) {
7073 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
7074 "other threads and function is not nosync:\n");
7078 if (!HasUpdatedFrees)
7082 if (AI.PotentialFreeCalls.size() != 1) {
7084 << AI.PotentialFreeCalls.size() <<
"\n");
7087 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7088 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7091 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7092 << *UniqueFree <<
"\n");
7095 if (DI->MightFreeUnknownObjects) {
7097 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7100 if (DI->PotentialAllocationCalls.empty())
7102 if (DI->PotentialAllocationCalls.size() > 1) {
7104 << DI->PotentialAllocationCalls.size()
7105 <<
" different allocations\n");
7108 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7111 <<
"[H2S] unique free call not known to free this allocation but "
7112 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7117 if (!AI.IsGlobalizedLocal) {
7119 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7120 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7121 "with the allocation "
7122 << *UniqueFree <<
"\n");
7129 auto UsesCheck = [&](AllocationInfo &AI) {
7130 bool ValidUsesOnly =
true;
7132 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7137 if (
SI->getValueOperand() ==
U.get()) {
7139 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7140 ValidUsesOnly =
false;
7149 if (DeallocationInfos.count(CB)) {
7150 AI.PotentialFreeCalls.insert(CB);
7157 bool IsKnownNoCapture;
7166 if (!IsAssumedNoCapture ||
7167 (!AI.IsGlobalizedLocal && !IsAssumedNoFree)) {
7168 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7171 auto Remark = [&](OptimizationRemarkMissed ORM) {
7173 <<
"Could not move globalized variable to the stack. "
7174 "Variable is potentially captured in call. Mark "
7175 "parameter as `__attribute__((noescape))` to override.";
7178 if (ValidUsesOnly && AI.IsGlobalizedLocal)
7179 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7182 ValidUsesOnly =
false;
7195 ValidUsesOnly =
false;
7198 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7200 [&](
const Use &OldU,
const Use &NewU) {
7201 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7202 return !SI || StackIsAccessibleByOtherThreads ||
7203 AA::isAssumedThreadLocalObject(
7204 A, *SI->getPointerOperand(), *this);
7207 return ValidUsesOnly;
7212 for (
auto &It : AllocationInfos) {
7213 AllocationInfo &AI = *It.second;
7214 if (AI.Status == AllocationInfo::INVALID)
7218 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7222 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7224 AI.Status = AllocationInfo::INVALID;
7229 !APAlign->isPowerOf2()) {
7230 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7232 AI.Status = AllocationInfo::INVALID;
7243 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7245 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7249 AI.Status = AllocationInfo::INVALID;
7255 switch (AI.Status) {
7256 case AllocationInfo::STACK_DUE_TO_USE:
7259 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7261 case AllocationInfo::STACK_DUE_TO_FREE:
7264 AI.Status = AllocationInfo::INVALID;
7267 case AllocationInfo::INVALID:
7274 bool IsGlobalizedLocal = AI.IsGlobalizedLocal;
7275 if (AI.MoveAllocaIntoEntry &&
7276 (!
Size.has_value() ||
7277 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7278 AI.MoveAllocaIntoEntry =
false;
7287struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7288 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7289 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7292 AAPrivatizablePtr::indicatePessimisticFixpoint();
7293 PrivatizableType =
nullptr;
7294 return ChangeStatus::CHANGED;
7300 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7304 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7305 std::optional<Type *>
T1) {
7315 std::optional<Type *> getPrivatizableType()
const override {
7316 return PrivatizableType;
7319 const std::string getAsStr(Attributor *
A)
const override {
7320 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7324 std::optional<Type *> PrivatizableType;
7329struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7330 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7331 : AAPrivatizablePtrImpl(IRP,
A) {}
7334 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7337 bool UsedAssumedInformation =
false;
7339 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7341 if (!
Attrs.empty() &&
7342 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7343 true, UsedAssumedInformation))
7344 return Attrs[0].getValueAsType();
7346 std::optional<Type *> Ty;
7347 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7355 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7364 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7367 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7370 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7374 dbgs() <<
"<nullptr>";
7379 Ty = combineTypes(Ty, CSTy);
7382 dbgs() <<
" : New Type: ";
7384 (*Ty)->print(
dbgs());
7386 dbgs() <<
"<nullptr>";
7395 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7396 UsedAssumedInformation))
7403 PrivatizableType = identifyPrivatizableType(
A);
7404 if (!PrivatizableType)
7405 return ChangeStatus::UNCHANGED;
7406 if (!*PrivatizableType)
7407 return indicatePessimisticFixpoint();
7412 DepClassTy::OPTIONAL);
7415 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7418 return indicatePessimisticFixpoint();
7424 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7428 Function &Fn = *getIRPosition().getAnchorScope();
7430 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7432 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7434 return indicatePessimisticFixpoint();
7437 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7444 bool UsedAssumedInformation =
false;
7445 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7446 UsedAssumedInformation)) {
7448 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7450 return indicatePessimisticFixpoint();
7454 Argument *Arg = getAssociatedArgument();
7455 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7457 return indicatePessimisticFixpoint();
7464 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7467 for (
const Use *U : CallbackUses) {
7468 AbstractCallSite CBACS(U);
7469 assert(CBACS && CBACS.isCallbackCall());
7470 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7471 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7475 <<
"[AAPrivatizablePtr] Argument " << *Arg
7476 <<
"check if can be privatized in the context of its parent ("
7478 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7480 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7481 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7482 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7484 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7485 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7488 if (CBArgNo !=
int(ArgNo))
7490 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7492 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7493 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7496 if (*CBArgPrivTy == PrivatizableType)
7501 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7502 <<
" cannot be privatized in the context of its parent ("
7504 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7506 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7507 <<
").\n[AAPrivatizablePtr] for which the argument "
7508 "privatization is not compatible.\n";
7518 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7522 "Expected a direct call operand for callback call operand");
7527 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7528 <<
" check if be privatized in the context of its parent ("
7530 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7532 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7535 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7536 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7538 DepClassTy::REQUIRED);
7539 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7540 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7543 if (*DCArgPrivTy == PrivatizableType)
7549 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7550 <<
" cannot be privatized in the context of its parent ("
7552 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7555 <<
").\n[AAPrivatizablePtr] for which the argument "
7556 "privatization is not compatible.\n";
7564 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7568 return IsCompatiblePrivArgOfDirectCS(ACS);
7572 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7573 UsedAssumedInformation))
7574 return indicatePessimisticFixpoint();
7576 return ChangeStatus::UNCHANGED;
7582 identifyReplacementTypes(
Type *PrivType,
7583 SmallVectorImpl<Type *> &ReplacementTypes) {
7586 assert(PrivType &&
"Expected privatizable type!");
7590 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7591 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7593 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7594 PrivArrayType->getElementType());
7603 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7605 assert(PrivType &&
"Expected privatizable type!");
7608 const DataLayout &
DL =
F.getDataLayout();
7612 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7613 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7616 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7619 Type *PointeeTy = PrivArrayType->getElementType();
7620 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7621 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7623 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7626 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7632 void createReplacementValues(Align Alignment,
Type *PrivType,
7634 SmallVectorImpl<Value *> &ReplacementValues) {
7636 assert(PrivType &&
"Expected privatizable type!");
7644 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7645 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7646 Type *PointeeTy = PrivStructType->getElementType(u);
7649 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7650 L->setAlignment(Alignment);
7654 Type *PointeeTy = PrivArrayType->getElementType();
7655 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7656 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7658 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7659 L->setAlignment(Alignment);
7664 L->setAlignment(Alignment);
7671 if (!PrivatizableType)
7672 return ChangeStatus::UNCHANGED;
7673 assert(*PrivatizableType &&
"Expected privatizable type!");
7679 bool UsedAssumedInformation =
false;
7680 if (!
A.checkForAllInstructions(
7681 [&](Instruction &
I) {
7682 CallInst &CI = cast<CallInst>(I);
7683 if (CI.isTailCall())
7684 TailCalls.push_back(&CI);
7687 *
this, {Instruction::Call}, UsedAssumedInformation))
7688 return ChangeStatus::UNCHANGED;
7690 Argument *Arg = getAssociatedArgument();
7693 const auto *AlignAA =
7700 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7702 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7704 const DataLayout &
DL = IP->getDataLayout();
7705 unsigned AS =
DL.getAllocaAddrSpace();
7706 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7707 Arg->
getName() +
".priv", IP);
7708 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7709 ArgIt->getArgNo(), IP);
7712 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7716 for (CallInst *CI : TailCalls)
7717 CI->setTailCall(
false);
7724 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7725 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7728 createReplacementValues(
7729 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7730 *PrivatizableType, ACS,
7738 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7741 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7742 std::move(FnRepairCB),
7743 std::move(ACSRepairCB)))
7744 return ChangeStatus::CHANGED;
7745 return ChangeStatus::UNCHANGED;
7749 void trackStatistics()
const override {
7754struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7755 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7756 : AAPrivatizablePtrImpl(IRP,
A) {}
7761 indicatePessimisticFixpoint();
7766 "updateImpl will not be called");
7770 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7773 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7780 return AI->getAllocatedType();
7782 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7784 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7785 return PrivArgAA->getPrivatizableType();
7788 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7789 "alloca nor privatizable argument: "
7795 void trackStatistics()
const override {
7800struct AAPrivatizablePtrCallSiteArgument final
7801 :
public AAPrivatizablePtrFloating {
7802 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7803 : AAPrivatizablePtrFloating(IRP,
A) {}
7807 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7808 indicateOptimisticFixpoint();
7813 PrivatizableType = identifyPrivatizableType(
A);
7814 if (!PrivatizableType)
7815 return ChangeStatus::UNCHANGED;
7816 if (!*PrivatizableType)
7817 return indicatePessimisticFixpoint();
7819 const IRPosition &IRP = getIRPosition();
7820 bool IsKnownNoCapture;
7822 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7823 if (!IsAssumedNoCapture) {
7824 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7825 return indicatePessimisticFixpoint();
7828 bool IsKnownNoAlias;
7830 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7831 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7832 return indicatePessimisticFixpoint();
7837 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7838 return indicatePessimisticFixpoint();
7841 return ChangeStatus::UNCHANGED;
7845 void trackStatistics()
const override {
7850struct AAPrivatizablePtrCallSiteReturned final
7851 :
public AAPrivatizablePtrFloating {
7852 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7853 : AAPrivatizablePtrFloating(IRP,
A) {}
7858 indicatePessimisticFixpoint();
7862 void trackStatistics()
const override {
7867struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7868 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7869 : AAPrivatizablePtrFloating(IRP,
A) {}
7874 indicatePessimisticFixpoint();
7878 void trackStatistics()
const override {
7888struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7889 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7890 : AAMemoryBehavior(IRP,
A) {}
7894 intersectAssumedBits(BEST_STATE);
7895 getKnownStateFromValue(
A, getIRPosition(), getState());
7896 AAMemoryBehavior::initialize(
A);
7900 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7901 BitIntegerState &State,
7902 bool IgnoreSubsumingPositions =
false) {
7904 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7906 switch (Attr.getKindAsEnum()) {
7907 case Attribute::ReadNone:
7910 case Attribute::ReadOnly:
7913 case Attribute::WriteOnly:
7922 if (!
I->mayReadFromMemory())
7924 if (!
I->mayWriteToMemory())
7930 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7931 SmallVectorImpl<Attribute> &Attrs)
const override {
7934 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7936 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7937 else if (isAssumedWriteOnly())
7938 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7944 const IRPosition &IRP = getIRPosition();
7946 if (
A.hasAttr(IRP, Attribute::ReadNone,
7948 return ChangeStatus::UNCHANGED;
7957 return ChangeStatus::UNCHANGED;
7960 A.removeAttrs(IRP, AttrKinds);
7963 A.removeAttrs(IRP, Attribute::Writable);
7970 const std::string getAsStr(Attributor *
A)
const override {
7975 if (isAssumedWriteOnly())
7977 return "may-read/write";
7981 static const Attribute::AttrKind AttrKinds[3];
7985 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7988struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7989 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
7990 : AAMemoryBehaviorImpl(IRP,
A) {}
7996 void trackStatistics()
const override {
8001 else if (isAssumedWriteOnly())
8008 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
8009 const Instruction *UserI);
8012 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
8016struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
8017 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
8018 : AAMemoryBehaviorFloating(IRP,
A) {}
8022 intersectAssumedBits(BEST_STATE);
8023 const IRPosition &IRP = getIRPosition();
8027 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
8029 getKnownStateFromValue(
A, IRP, getState(),
8036 return ChangeStatus::UNCHANGED;
8040 if (
A.hasAttr(getIRPosition(),
8041 {Attribute::InAlloca, Attribute::Preallocated})) {
8042 removeKnownBits(NO_WRITES);
8043 removeAssumedBits(NO_WRITES);
8045 A.removeAttrs(getIRPosition(), AttrKinds);
8046 return AAMemoryBehaviorFloating::manifest(
A);
8050 void trackStatistics()
const override {
8055 else if (isAssumedWriteOnly())
8060struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
8061 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
8062 : AAMemoryBehaviorArgument(IRP,
A) {}
8068 Argument *Arg = getAssociatedArgument();
8070 indicatePessimisticFixpoint();
8074 addKnownBits(NO_WRITES);
8075 removeKnownBits(NO_READS);
8076 removeAssumedBits(NO_READS);
8078 AAMemoryBehaviorArgument::initialize(
A);
8079 if (getAssociatedFunction()->isDeclaration())
8080 indicatePessimisticFixpoint();
8089 Argument *Arg = getAssociatedArgument();
8092 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8094 return indicatePessimisticFixpoint();
8099 void trackStatistics()
const override {
8104 else if (isAssumedWriteOnly())
8110struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8111 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8112 : AAMemoryBehaviorFloating(IRP,
A) {}
8116 AAMemoryBehaviorImpl::initialize(
A);
8121 return ChangeStatus::UNCHANGED;
8125 void trackStatistics()
const override {}
8129struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8130 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8131 : AAMemoryBehaviorImpl(IRP,
A) {}
8147 else if (isAssumedWriteOnly())
8150 A.removeAttrs(getIRPosition(), AttrKinds);
8153 for (Argument &Arg :
F.args())
8155 return A.manifestAttrs(getIRPosition(),
8156 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8160 void trackStatistics()
const override {
8165 else if (isAssumedWriteOnly())
8171struct AAMemoryBehaviorCallSite final
8172 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8173 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8174 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8185 else if (isAssumedWriteOnly())
8188 A.removeAttrs(getIRPosition(), AttrKinds);
8191 for (Use &U : CB.
args())
8193 Attribute::Writable);
8194 return A.manifestAttrs(
8195 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8199 void trackStatistics()
const override {
8204 else if (isAssumedWriteOnly())
8209ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8212 auto AssumedState = getAssumed();
8219 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8221 if (MemBehaviorAA) {
8222 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8223 return !isAtFixpoint();
8228 if (
I.mayReadFromMemory())
8229 removeAssumedBits(NO_READS);
8230 if (
I.mayWriteToMemory())
8231 removeAssumedBits(NO_WRITES);
8232 return !isAtFixpoint();
8235 bool UsedAssumedInformation =
false;
8236 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8237 UsedAssumedInformation))
8238 return indicatePessimisticFixpoint();
8244ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8246 const IRPosition &IRP = getIRPosition();
8257 const auto *FnMemAA =
8260 FnMemAssumedState = FnMemAA->getAssumed();
8261 S.addKnownBits(FnMemAA->getKnown());
8262 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8268 auto AssumedState = S.getAssumed();
8274 bool IsKnownNoCapture;
8275 const AANoCapture *ArgNoCaptureAA =
nullptr;
8280 if (!IsAssumedNoCapture &&
8282 S.intersectAssumedBits(FnMemAssumedState);
8288 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8290 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8298 Follow = followUsersOfUseIn(
A, U, UserI);
8302 analyzeUseIn(
A, U, UserI);
8304 return !isAtFixpoint();
8307 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8308 return indicatePessimisticFixpoint();
8314bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8315 const Instruction *UserI) {
8333 if (
U.get()->getType()->isPointerTy()) {
8335 bool IsKnownNoCapture;
8344void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8345 const Instruction *UserI) {
8352 case Instruction::Load:
8354 removeAssumedBits(NO_READS);
8357 case Instruction::Store:
8362 removeAssumedBits(NO_WRITES);
8364 indicatePessimisticFixpoint();
8367 case Instruction::Call:
8368 case Instruction::CallBr:
8369 case Instruction::Invoke: {
8376 indicatePessimisticFixpoint();
8383 removeAssumedBits(NO_READS);
8390 if (
U.get()->getType()->isPointerTy())
8394 const auto *MemBehaviorAA =
8400 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8408 removeAssumedBits(NO_READS);
8410 removeAssumedBits(NO_WRITES);
8422 return "all memory";
8425 std::string S =
"memory:";
8431 S +=
"internal global,";
8433 S +=
"external global,";
8437 S +=
"inaccessible,";
8451 AccessKind2Accesses.fill(
nullptr);
8454 ~AAMemoryLocationImpl()
override {
8457 for (AccessSet *AS : AccessKind2Accesses)
8464 intersectAssumedBits(BEST_STATE);
8465 getKnownStateFromValue(
A, getIRPosition(), getState());
8466 AAMemoryLocation::initialize(
A);
8470 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8471 BitIntegerState &State,
8472 bool IgnoreSubsumingPositions =
false) {
8481 bool UseArgMemOnly =
true;
8483 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8487 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8496 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8501 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8505 A.manifestAttrs(IRP,
8506 Attribute::getWithMemoryEffects(
8515 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8519 A.manifestAttrs(IRP,
8520 Attribute::getWithMemoryEffects(
8530 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8531 SmallVectorImpl<Attribute> &Attrs)
const override {
8538 else if (isAssumedInaccessibleMemOnly())
8539 Attrs.push_back(Attribute::getWithMemoryEffects(
8541 else if (isAssumedArgMemOnly())
8544 else if (isAssumedInaccessibleOrArgMemOnly())
8545 Attrs.push_back(Attribute::getWithMemoryEffects(
8555 const IRPosition &IRP = getIRPosition();
8559 if (DeducedAttrs.
size() != 1)
8560 return ChangeStatus::UNCHANGED;
8563 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8568 bool checkForAllAccessesToMemoryKind(
8570 MemoryLocationsKind)>
8572 MemoryLocationsKind RequestedMLK)
const override {
8573 if (!isValidState())
8576 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8577 if (AssumedMLK == NO_LOCATIONS)
8581 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8582 CurMLK *= 2, ++Idx) {
8583 if (CurMLK & RequestedMLK)
8586 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8587 for (
const AccessInfo &AI : *
Accesses)
8588 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8601 MemoryLocationsKind KnownMLK = getKnown();
8603 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8604 if (!(CurMLK & KnownMLK))
8605 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8606 getAccessKindFromInst(
I));
8607 return AAMemoryLocation::indicatePessimisticFixpoint();
8627 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8631 return LHS.Ptr <
RHS.Ptr;
8632 if (
LHS.Kind !=
RHS.Kind)
8633 return LHS.Kind <
RHS.Kind;
8640 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8641 std::array<AccessSet *, llvm::ConstantLog2<VALID_STATE>()>
8642 AccessKind2Accesses;
8647 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8648 AAMemoryLocation::StateType &AccessedLocs,
8653 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8656 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8659 AK =
I->mayReadFromMemory() ? READ :
NONE;
8668 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8669 MemoryLocationsKind MLK,
const Instruction *
I,
8678 if (MLK == NO_UNKOWN_MEM)
8680 State.removeAssumedBits(MLK);
8685 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8686 AAMemoryLocation::StateType &State,
bool &
Changed,
8687 unsigned AccessAS = 0);
8693void AAMemoryLocationImpl::categorizePtrValue(
8694 Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8696 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8701 unsigned ObjectAS =
Obj.getType()->getPointerAddressSpace();
8703 MemoryLocationsKind MLK = NO_LOCATIONS;
8722 MLK = NO_ARGUMENT_MEM;
8728 if (GVar->isConstant())
8731 if (GV->hasLocalLinkage())
8732 MLK = NO_GLOBAL_INTERNAL_MEM;
8734 MLK = NO_GLOBAL_EXTERNAL_MEM;
8742 bool IsKnownNoAlias;
8746 MLK = NO_MALLOCED_MEM;
8748 MLK = NO_UNKOWN_MEM;
8750 MLK = NO_UNKOWN_MEM;
8753 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8754 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8755 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8757 getAccessKindFromInst(&
I));
8762 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8766 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8767 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8768 getAccessKindFromInst(&
I));
8773 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8777void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8780 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8789 const auto *ArgOpMemLocationAA =
8792 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8797 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8802AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8804 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8808 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8813 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8816 <<
" [" << CBMemLocationAA <<
"]\n");
8817 if (!CBMemLocationAA) {
8818 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8819 Changed, getAccessKindFromInst(&
I));
8820 return NO_UNKOWN_MEM;
8823 if (CBMemLocationAA->isAssumedReadNone())
8824 return NO_LOCATIONS;
8826 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8827 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8828 Changed, getAccessKindFromInst(&
I));
8829 return AccessedLocs.getAssumed();
8832 uint32_t CBAssumedNotAccessedLocs =
8833 CBMemLocationAA->getAssumedNotAccessedLocation();
8836 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8837 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8839 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8840 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8842 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8843 getAccessKindFromInst(&
I));
8848 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8849 if (HasGlobalAccesses) {
8852 updateStateAndAccessesMap(AccessedLocs, MLK, &
I, Ptr,
Changed,
8853 getAccessKindFromInst(&
I));
8856 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8857 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8858 return AccessedLocs.getWorstState();
8862 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8863 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8866 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8868 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8871 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8872 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8874 return AccessedLocs.getAssumed();
8879 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8880 <<
I <<
" [" << *Ptr <<
"]\n");
8881 categorizePtrValue(
A,
I, *Ptr, AccessedLocs,
Changed,
8882 Ptr->getType()->getPointerAddressSpace());
8883 return AccessedLocs.getAssumed();
8886 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8888 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8889 getAccessKindFromInst(&
I));
8890 return AccessedLocs.getAssumed();
8894struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8895 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8896 : AAMemoryLocationImpl(IRP,
A) {}
8901 const auto *MemBehaviorAA =
8902 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8905 return indicateOptimisticFixpoint();
8907 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8908 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8909 return ChangeStatus::UNCHANGED;
8913 auto AssumedState = getAssumed();
8917 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8918 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8919 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8920 removeAssumedBits(inverseLocation(MLK,
false,
false));
8923 return getAssumedNotAccessedLocation() != VALID_STATE;
8926 bool UsedAssumedInformation =
false;
8927 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8928 UsedAssumedInformation))
8929 return indicatePessimisticFixpoint();
8931 Changed |= AssumedState != getAssumed();
8932 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8936 void trackStatistics()
const override {
8939 else if (isAssumedArgMemOnly())
8941 else if (isAssumedInaccessibleMemOnly())
8943 else if (isAssumedInaccessibleOrArgMemOnly())
8949struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8950 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8951 : AAMemoryLocationImpl(IRP,
A) {}
8962 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8964 return indicatePessimisticFixpoint();
8968 updateStateAndAccessesMap(getState(), MLK,
I, Ptr,
Changed,
8969 getAccessKindFromInst(
I));
8972 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8973 return indicatePessimisticFixpoint();
8974 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8978 void trackStatistics()
const override {
8988struct AADenormalFPMathImpl :
public AADenormalFPMath {
8989 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
8990 : AADenormalFPMath(IRP,
A) {}
8992 const std::string getAsStr(Attributor *
A)
const override {
8993 std::string Str(
"AADenormalFPMath[");
8994 raw_string_ostream OS(Str);
8996 DenormalState Known = getKnown();
8997 if (Known.Mode.isValid())
8998 OS <<
"denormal-fp-math=" << Known.Mode;
9002 if (Known.ModeF32.isValid())
9003 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
9009struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
9010 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
9011 : AADenormalFPMathImpl(IRP,
A) {}
9015 DenormalFPEnv DenormEnv =
F->getDenormalFPEnv();
9025 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
9028 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
9030 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
9036 CallerInfo->getState());
9040 bool AllCallSitesKnown =
true;
9041 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
9042 return indicatePessimisticFixpoint();
9044 if (Change == ChangeStatus::CHANGED && isModeFixed())
9050 LLVMContext &Ctx = getAssociatedFunction()->getContext();
9056 DenormalFPEnv KnownEnv(Known.Mode, Known.ModeF32);
9059 AttrToRemove.
push_back(Attribute::DenormalFPEnv);
9062 Ctx, Attribute::DenormalFPEnv,
9063 DenormalFPEnv(Known.Mode, Known.ModeF32).toIntValue()));
9066 auto &IRP = getIRPosition();
9069 return A.removeAttrs(IRP, AttrToRemove) |
9070 A.manifestAttrs(IRP, AttrToAdd,
true);
9073 void trackStatistics()
const override {
9082struct AAValueConstantRangeImpl : AAValueConstantRange {
9083 using StateType = IntegerRangeState;
9084 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9085 : AAValueConstantRange(IRP,
A) {}
9089 if (
A.hasSimplificationCallback(getIRPosition())) {
9090 indicatePessimisticFixpoint();
9095 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9098 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9102 const std::string getAsStr(Attributor *
A)
const override {
9104 llvm::raw_string_ostream OS(Str);
9106 getKnown().print(OS);
9108 getAssumed().print(OS);
9115 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9116 if (!getAnchorScope())
9119 ScalarEvolution *SE =
9120 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9123 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9129 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9138 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9139 const Instruction *
I =
nullptr)
const {
9140 if (!getAnchorScope())
9143 ScalarEvolution *SE =
9144 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9147 const SCEV *S = getSCEV(
A,
I);
9157 getConstantRangeFromLVI(Attributor &
A,
9158 const Instruction *CtxI =
nullptr)
const {
9159 if (!getAnchorScope())
9162 LazyValueInfo *LVI =
9163 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9178 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9179 const Instruction *CtxI,
9180 bool AllowAACtxI)
const {
9181 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9193 InformationCache &InfoCache =
A.getInfoCache();
9194 const DominatorTree *DT =
9205 getKnownConstantRange(Attributor &
A,
9206 const Instruction *CtxI =
nullptr)
const override {
9207 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9211 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9212 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9213 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9218 getAssumedConstantRange(Attributor &
A,
9219 const Instruction *CtxI =
nullptr)
const override {
9224 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9226 return getAssumed();
9228 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9229 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9230 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9235 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9236 const ConstantRange &AssumedConstantRange) {
9238 Ty, AssumedConstantRange.
getLower())),
9240 Ty, AssumedConstantRange.
getUpper()))};
9245 static bool isBetterRange(
const ConstantRange &Assumed,
9246 const Instruction &
I) {
9250 std::optional<ConstantRange> Known;
9254 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9260 if (KnownRanges->getNumOperands() > 2)
9263 ConstantInt *
Lower =
9265 ConstantInt *
Upper =
9268 Known.emplace(
Lower->getValue(),
Upper->getValue());
9270 return !Known || (*Known != Assumed && Known->contains(Assumed));
9275 setRangeMetadataIfisBetterRange(Instruction *
I,
9276 const ConstantRange &AssumedConstantRange) {
9277 if (isBetterRange(AssumedConstantRange, *
I)) {
9278 I->setMetadata(LLVMContext::MD_range,
9279 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9280 AssumedConstantRange));
9287 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9289 const ConstantRange &AssumedConstantRange) {
9290 if (isBetterRange(AssumedConstantRange, *
I)) {
9291 A.manifestAttrs(IRP,
9292 Attribute::get(
I->getContext(), Attribute::Range,
9293 AssumedConstantRange),
9303 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9306 auto &
V = getAssociatedValue();
9310 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9311 "not the context instruction");
9313 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9314 Changed = ChangeStatus::CHANGED;
9316 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9317 AssumedConstantRange))
9318 Changed = ChangeStatus::CHANGED;
9326struct AAValueConstantRangeArgument final
9327 : AAArgumentFromCallSiteArguments<
9328 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9330 using Base = AAArgumentFromCallSiteArguments<
9331 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9333 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9337 void trackStatistics()
const override {
9342struct AAValueConstantRangeReturned
9343 : AAReturnedFromReturnedValues<AAValueConstantRange,
9344 AAValueConstantRangeImpl,
9345 AAValueConstantRangeImpl::StateType,
9348 AAReturnedFromReturnedValues<AAValueConstantRange,
9349 AAValueConstantRangeImpl,
9350 AAValueConstantRangeImpl::StateType,
9352 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9357 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9358 indicatePessimisticFixpoint();
9362 void trackStatistics()
const override {
9367struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9368 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9369 : AAValueConstantRangeImpl(IRP,
A) {}
9373 AAValueConstantRangeImpl::initialize(
A);
9377 Value &
V = getAssociatedValue();
9380 unionAssumed(ConstantRange(
C->getValue()));
9381 indicateOptimisticFixpoint();
9387 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9388 indicateOptimisticFixpoint();
9400 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9411 indicatePessimisticFixpoint();
9414 << getAssociatedValue() <<
"\n");
9417 bool calculateBinaryOperator(
9418 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9419 const Instruction *CtxI,
9420 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9425 bool UsedAssumedInformation =
false;
9426 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9429 if (!SimplifiedLHS.has_value())
9431 if (!*SimplifiedLHS)
9433 LHS = *SimplifiedLHS;
9435 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9438 if (!SimplifiedRHS.has_value())
9440 if (!*SimplifiedRHS)
9442 RHS = *SimplifiedRHS;
9448 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9450 DepClassTy::REQUIRED);
9454 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9456 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9458 DepClassTy::REQUIRED);
9462 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9464 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9466 T.unionAssumed(AssumedRange);
9470 return T.isValidState();
9473 bool calculateCastInst(
9474 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9475 const Instruction *CtxI,
9476 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9482 bool UsedAssumedInformation =
false;
9483 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9486 if (!SimplifiedOpV.has_value())
9488 if (!*SimplifiedOpV)
9490 OpV = *SimplifiedOpV;
9495 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9497 DepClassTy::REQUIRED);
9501 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9503 return T.isValidState();
9507 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9508 const Instruction *CtxI,
9509 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9514 bool UsedAssumedInformation =
false;
9515 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9518 if (!SimplifiedLHS.has_value())
9520 if (!*SimplifiedLHS)
9522 LHS = *SimplifiedLHS;
9524 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9527 if (!SimplifiedRHS.has_value())
9529 if (!*SimplifiedRHS)
9531 RHS = *SimplifiedRHS;
9537 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9539 DepClassTy::REQUIRED);
9543 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9545 DepClassTy::REQUIRED);
9549 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9550 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9553 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9556 bool MustTrue =
false, MustFalse =
false;
9558 auto AllowedRegion =
9561 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9567 assert((!MustTrue || !MustFalse) &&
9568 "Either MustTrue or MustFalse should be false!");
9571 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9573 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9575 T.unionAssumed(ConstantRange( 1,
true));
9577 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9578 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9579 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9583 return T.isValidState();
9595 bool UsedAssumedInformation =
false;
9596 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9599 if (!SimplifiedOpV.has_value())
9601 if (!*SimplifiedOpV)
9603 Value *VPtr = *SimplifiedOpV;
9606 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9608 DepClassTy::REQUIRED);
9612 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9616 return T.isValidState();
9621 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9624 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9627 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9633 T.indicatePessimisticFixpoint();
9640 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9641 if (QueriedAA !=
this)
9644 if (
T.getAssumed() == getState().getAssumed())
9646 T.indicatePessimisticFixpoint();
9649 return T.isValidState();
9652 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9653 return indicatePessimisticFixpoint();
9658 return ChangeStatus::UNCHANGED;
9659 if (++NumChanges > MaxNumChanges) {
9660 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9661 <<
" but only " << MaxNumChanges
9662 <<
" are allowed to avoid cyclic reasoning.");
9663 return indicatePessimisticFixpoint();
9665 return ChangeStatus::CHANGED;
9669 void trackStatistics()
const override {
9678 static constexpr int MaxNumChanges = 5;
9681struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9682 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9683 : AAValueConstantRangeImpl(IRP,
A) {}
9687 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9695struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9696 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9697 : AAValueConstantRangeFunction(IRP,
A) {}
9703struct AAValueConstantRangeCallSiteReturned
9704 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9705 AAValueConstantRangeImpl::StateType,
9707 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9708 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9709 AAValueConstantRangeImpl::StateType,
9716 if (std::optional<ConstantRange>
Range = CI->getRange())
9717 intersectKnown(*
Range);
9720 AAValueConstantRangeImpl::initialize(
A);
9724 void trackStatistics()
const override {
9728struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9729 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9730 : AAValueConstantRangeFloating(IRP,
A) {}
9734 return ChangeStatus::UNCHANGED;
9738 void trackStatistics()
const override {
9747struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9750 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9751 : AAPotentialConstantValues(IRP,
A) {}
9755 if (
A.hasSimplificationCallback(getIRPosition()))
9756 indicatePessimisticFixpoint();
9758 AAPotentialConstantValues::initialize(
A);
9761 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9762 bool &ContainsUndef,
bool ForSelf) {
9764 bool UsedAssumedInformation =
false;
9766 UsedAssumedInformation)) {
9773 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9774 *
this, IRP, DepClassTy::REQUIRED);
9775 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9777 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9778 S = PotentialValuesAA->getState().getAssumedSet();
9785 ContainsUndef =
false;
9786 for (
auto &It : Values) {
9788 ContainsUndef =
true;
9794 S.insert(CI->getValue());
9796 ContainsUndef &= S.empty();
9802 const std::string getAsStr(Attributor *
A)
const override {
9804 llvm::raw_string_ostream OS(Str);
9811 return indicatePessimisticFixpoint();
9815struct AAPotentialConstantValuesArgument final
9816 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9817 AAPotentialConstantValuesImpl,
9818 PotentialConstantIntValuesState> {
9819 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9820 AAPotentialConstantValuesImpl,
9822 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9826 void trackStatistics()
const override {
9831struct AAPotentialConstantValuesReturned
9832 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9833 AAPotentialConstantValuesImpl> {
9834 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9835 AAPotentialConstantValuesImpl>;
9836 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9840 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9841 indicatePessimisticFixpoint();
9842 Base::initialize(
A);
9846 void trackStatistics()
const override {
9851struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9852 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9853 : AAPotentialConstantValuesImpl(IRP,
A) {}
9857 AAPotentialConstantValuesImpl::initialize(
A);
9861 Value &
V = getAssociatedValue();
9864 unionAssumed(
C->getValue());
9865 indicateOptimisticFixpoint();
9870 unionAssumedWithUndef();
9871 indicateOptimisticFixpoint();
9881 indicatePessimisticFixpoint();
9884 << getAssociatedValue() <<
"\n");
9887 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9892 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9893 uint32_t ResultBitWidth) {
9898 case Instruction::Trunc:
9899 return Src.trunc(ResultBitWidth);
9900 case Instruction::SExt:
9901 return Src.sext(ResultBitWidth);
9902 case Instruction::ZExt:
9903 return Src.zext(ResultBitWidth);
9904 case Instruction::BitCast:
9909 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9910 const APInt &
LHS,
const APInt &
RHS,
9911 bool &SkipOperation,
bool &Unsupported) {
9918 switch (BinOpcode) {
9922 case Instruction::Add:
9924 case Instruction::Sub:
9926 case Instruction::Mul:
9928 case Instruction::UDiv:
9930 SkipOperation =
true;
9934 case Instruction::SDiv:
9936 SkipOperation =
true;
9940 case Instruction::URem:
9942 SkipOperation =
true;
9946 case Instruction::SRem:
9948 SkipOperation =
true;
9952 case Instruction::Shl:
9954 case Instruction::LShr:
9956 case Instruction::AShr:
9958 case Instruction::And:
9960 case Instruction::Or:
9962 case Instruction::Xor:
9967 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9968 const APInt &
LHS,
const APInt &
RHS) {
9969 bool SkipOperation =
false;
9972 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9977 unionAssumed(Result);
9978 return isValidState();
9981 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9982 auto AssumedBefore = getAssumed();
9986 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9987 SetTy LHSAAPVS, RHSAAPVS;
9989 LHSContainsUndef,
false) ||
9991 RHSContainsUndef,
false))
9992 return indicatePessimisticFixpoint();
9995 bool MaybeTrue =
false, MaybeFalse =
false;
9997 if (LHSContainsUndef && RHSContainsUndef) {
10000 unionAssumedWithUndef();
10001 }
else if (LHSContainsUndef) {
10002 for (
const APInt &R : RHSAAPVS) {
10003 bool CmpResult = calculateICmpInst(ICI, Zero, R);
10004 MaybeTrue |= CmpResult;
10005 MaybeFalse |= !CmpResult;
10006 if (MaybeTrue & MaybeFalse)
10007 return indicatePessimisticFixpoint();
10009 }
else if (RHSContainsUndef) {
10010 for (
const APInt &L : LHSAAPVS) {
10011 bool CmpResult = calculateICmpInst(ICI, L, Zero);
10012 MaybeTrue |= CmpResult;
10013 MaybeFalse |= !CmpResult;
10014 if (MaybeTrue & MaybeFalse)
10015 return indicatePessimisticFixpoint();
10018 for (
const APInt &L : LHSAAPVS) {
10019 for (
const APInt &R : RHSAAPVS) {
10020 bool CmpResult = calculateICmpInst(ICI, L, R);
10021 MaybeTrue |= CmpResult;
10022 MaybeFalse |= !CmpResult;
10023 if (MaybeTrue & MaybeFalse)
10024 return indicatePessimisticFixpoint();
10029 unionAssumed(APInt( 1, 1));
10031 unionAssumed(APInt( 1, 0));
10032 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10033 : ChangeStatus::CHANGED;
10036 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
10037 auto AssumedBefore = getAssumed();
10041 bool UsedAssumedInformation =
false;
10042 std::optional<Constant *>
C =
A.getAssumedConstant(
10043 *
SI->getCondition(), *
this, UsedAssumedInformation);
10046 bool OnlyLeft =
false, OnlyRight =
false;
10047 if (
C && *
C && (*C)->isOneValue())
10049 else if (
C && *
C && (*C)->isNullValue())
10052 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10053 SetTy LHSAAPVS, RHSAAPVS;
10056 LHSContainsUndef,
false))
10057 return indicatePessimisticFixpoint();
10061 RHSContainsUndef,
false))
10062 return indicatePessimisticFixpoint();
10064 if (OnlyLeft || OnlyRight) {
10066 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
10067 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10070 unionAssumedWithUndef();
10072 for (
const auto &It : *OpAA)
10076 }
else if (LHSContainsUndef && RHSContainsUndef) {
10078 unionAssumedWithUndef();
10080 for (
const auto &It : LHSAAPVS)
10082 for (
const auto &It : RHSAAPVS)
10085 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10086 : ChangeStatus::CHANGED;
10089 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10090 auto AssumedBefore = getAssumed();
10092 return indicatePessimisticFixpoint();
10097 bool SrcContainsUndef =
false;
10100 SrcContainsUndef,
false))
10101 return indicatePessimisticFixpoint();
10103 if (SrcContainsUndef)
10104 unionAssumedWithUndef();
10106 for (
const APInt &S : SrcPVS) {
10107 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10111 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10112 : ChangeStatus::CHANGED;
10115 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10116 auto AssumedBefore = getAssumed();
10120 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10121 SetTy LHSAAPVS, RHSAAPVS;
10123 LHSContainsUndef,
false) ||
10125 RHSContainsUndef,
false))
10126 return indicatePessimisticFixpoint();
10131 if (LHSContainsUndef && RHSContainsUndef) {
10132 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10133 return indicatePessimisticFixpoint();
10134 }
else if (LHSContainsUndef) {
10135 for (
const APInt &R : RHSAAPVS) {
10136 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10137 return indicatePessimisticFixpoint();
10139 }
else if (RHSContainsUndef) {
10140 for (
const APInt &L : LHSAAPVS) {
10141 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10142 return indicatePessimisticFixpoint();
10145 for (
const APInt &L : LHSAAPVS) {
10146 for (
const APInt &R : RHSAAPVS) {
10147 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10148 return indicatePessimisticFixpoint();
10152 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10153 : ChangeStatus::CHANGED;
10156 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10157 auto AssumedBefore = getAssumed();
10159 bool ContainsUndef;
10161 ContainsUndef,
true))
10162 return indicatePessimisticFixpoint();
10163 if (ContainsUndef) {
10164 unionAssumedWithUndef();
10166 for (
const auto &It : Incoming)
10169 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10170 : ChangeStatus::CHANGED;
10175 Value &
V = getAssociatedValue();
10179 return updateWithICmpInst(
A, ICI);
10182 return updateWithSelectInst(
A, SI);
10185 return updateWithCastInst(
A, CI);
10188 return updateWithBinaryOperator(
A, BinOp);
10191 return updateWithInstruction(
A,
I);
10193 return indicatePessimisticFixpoint();
10197 void trackStatistics()
const override {
10202struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10203 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10204 : AAPotentialConstantValuesImpl(IRP,
A) {}
10209 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10214 void trackStatistics()
const override {
10219struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10220 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10221 : AAPotentialConstantValuesFunction(IRP,
A) {}
10224 void trackStatistics()
const override {
10229struct AAPotentialConstantValuesCallSiteReturned
10230 : AACalleeToCallSite<AAPotentialConstantValues,
10231 AAPotentialConstantValuesImpl> {
10232 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10234 : AACalleeToCallSite<AAPotentialConstantValues,
10235 AAPotentialConstantValuesImpl>(IRP,
A) {}
10238 void trackStatistics()
const override {
10243struct AAPotentialConstantValuesCallSiteArgument
10244 : AAPotentialConstantValuesFloating {
10245 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10247 : AAPotentialConstantValuesFloating(IRP,
A) {}
10251 AAPotentialConstantValuesImpl::initialize(
A);
10252 if (isAtFixpoint())
10255 Value &
V = getAssociatedValue();
10258 unionAssumed(
C->getValue());
10259 indicateOptimisticFixpoint();
10264 unionAssumedWithUndef();
10265 indicateOptimisticFixpoint();
10272 Value &
V = getAssociatedValue();
10273 auto AssumedBefore = getAssumed();
10274 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10277 return indicatePessimisticFixpoint();
10278 const auto &S = AA->getAssumed();
10280 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10281 : ChangeStatus::CHANGED;
10285 void trackStatistics()
const override {
10294 bool IgnoreSubsumingPositions) {
10295 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10296 "Unexpected attribute kind");
10297 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10298 Attribute::NoUndef))
10318 Value &V = getAssociatedValue();
10320 indicatePessimisticFixpoint();
10321 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10325 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10326 AANoUndef::StateType &State) {
10327 const Value *UseV =
U->get();
10328 const DominatorTree *DT =
nullptr;
10329 AssumptionCache *AC =
nullptr;
10330 InformationCache &InfoCache =
A.getInfoCache();
10331 if (Function *
F = getAnchorScope()) {
10336 bool TrackUse =
false;
10345 const std::string getAsStr(Attributor *
A)
const override {
10346 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10353 bool UsedAssumedInformation =
false;
10354 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10355 UsedAssumedInformation))
10356 return ChangeStatus::UNCHANGED;
10360 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10363 return ChangeStatus::UNCHANGED;
10364 return AANoUndef::manifest(
A);
10368struct AANoUndefFloating :
public AANoUndefImpl {
10369 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10370 : AANoUndefImpl(IRP,
A) {}
10374 AANoUndefImpl::initialize(
A);
10375 if (!getState().isAtFixpoint() && getAnchorScope() &&
10376 !getAnchorScope()->isDeclaration())
10377 if (Instruction *CtxI = getCtxI())
10378 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10383 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10384 bool IsKnownNoUndef;
10386 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10390 bool UsedAssumedInformation =
false;
10391 Value *AssociatedValue = &getAssociatedValue();
10393 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10398 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10406 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10407 return indicatePessimisticFixpoint();
10408 return ChangeStatus::UNCHANGED;
10411 for (
const auto &VAC : Values)
10413 return indicatePessimisticFixpoint();
10415 return ChangeStatus::UNCHANGED;
10422struct AANoUndefReturned final
10423 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10424 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10425 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10431struct AANoUndefArgument final
10432 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10433 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10434 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10440struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10441 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10442 : AANoUndefFloating(IRP,
A) {}
10448struct AANoUndefCallSiteReturned final
10449 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10450 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10451 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10459struct AANoFPClassImpl : AANoFPClass {
10460 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10463 const IRPosition &IRP = getIRPosition();
10467 indicateOptimisticFixpoint();
10472 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10473 for (
const auto &Attr : Attrs) {
10480 const DataLayout &
DL =
A.getDataLayout();
10481 InformationCache &InfoCache =
A.getInfoCache();
10483 const DominatorTree *DT =
nullptr;
10484 AssumptionCache *AC =
nullptr;
10485 const TargetLibraryInfo *TLI =
nullptr;
10489 if (!
F->isDeclaration()) {
10496 SimplifyQuery Q(
DL, TLI, DT, AC, CtxI);
10503 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10507 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10508 AANoFPClass::StateType &State) {
10519 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10520 State.addKnownBits(NoFPAA->getState().getKnown());
10524 const std::string getAsStr(Attributor *
A)
const override {
10525 std::string
Result =
"nofpclass";
10526 raw_string_ostream OS(Result);
10527 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10531 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10532 SmallVectorImpl<Attribute> &Attrs)
const override {
10533 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10537struct AANoFPClassFloating :
public AANoFPClassImpl {
10538 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10539 : AANoFPClassImpl(IRP,
A) {}
10544 bool UsedAssumedInformation =
false;
10545 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10547 Values.
push_back({getAssociatedValue(), getCtxI()});
10553 DepClassTy::REQUIRED);
10554 if (!AA ||
this == AA) {
10555 T.indicatePessimisticFixpoint();
10557 const AANoFPClass::StateType &S =
10558 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10561 return T.isValidState();
10564 for (
const auto &VAC : Values)
10566 return indicatePessimisticFixpoint();
10572 void trackStatistics()
const override {
10577struct AANoFPClassReturned final
10578 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10579 AANoFPClassImpl::StateType, false,
10580 Attribute::None, false> {
10581 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10582 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10583 AANoFPClassImpl::StateType,
false,
10587 void trackStatistics()
const override {
10592struct AANoFPClassArgument final
10593 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10594 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10595 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10601struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10602 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10603 : AANoFPClassFloating(IRP,
A) {}
10606 void trackStatistics()
const override {
10611struct AANoFPClassCallSiteReturned final
10612 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10613 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10614 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10617 void trackStatistics()
const override {
10622struct AACallEdgesImpl :
public AACallEdges {
10623 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10625 const SetVector<Function *> &getOptimisticEdges()
const override {
10626 return CalledFunctions;
10629 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10631 bool hasNonAsmUnknownCallee()
const override {
10632 return HasUnknownCalleeNonAsm;
10635 const std::string getAsStr(Attributor *
A)
const override {
10636 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10637 std::to_string(CalledFunctions.size()) +
"]";
10640 void trackStatistics()
const override {}
10643 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10644 if (CalledFunctions.insert(Fn)) {
10645 Change = ChangeStatus::CHANGED;
10651 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10652 if (!HasUnknownCallee)
10653 Change = ChangeStatus::CHANGED;
10654 if (NonAsm && !HasUnknownCalleeNonAsm)
10655 Change = ChangeStatus::CHANGED;
10656 HasUnknownCalleeNonAsm |= NonAsm;
10657 HasUnknownCallee =
true;
10662 SetVector<Function *> CalledFunctions;
10665 bool HasUnknownCallee =
false;
10668 bool HasUnknownCalleeNonAsm =
false;
10671struct AACallEdgesCallSite :
public AACallEdgesImpl {
10672 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10673 : AACallEdgesImpl(IRP,
A) {}
10680 addCalledFunction(Fn, Change);
10682 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10683 setHasUnknownCallee(
true, Change);
10694 VisitValue(*V, CtxI);
10698 bool UsedAssumedInformation =
false;
10704 for (
auto &VAC : Values)
10711 if (
IA->hasSideEffects() &&
10714 setHasUnknownCallee(
false, Change);
10720 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10721 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10722 if (IndirectCallAA->foreachCallee(
10723 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10732 for (
const Use *U : CallbackUses)
10733 ProcessCalledOperand(
U->get(), CB);
10739struct AACallEdgesFunction :
public AACallEdgesImpl {
10740 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10741 : AACallEdgesImpl(IRP,
A) {}
10750 auto *CBEdges =
A.getAAFor<AACallEdges>(
10754 if (CBEdges->hasNonAsmUnknownCallee())
10755 setHasUnknownCallee(
true, Change);
10756 if (CBEdges->hasUnknownCallee())
10757 setHasUnknownCallee(
false, Change);
10759 for (Function *
F : CBEdges->getOptimisticEdges())
10760 addCalledFunction(
F, Change);
10766 bool UsedAssumedInformation =
false;
10767 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10768 UsedAssumedInformation,
10772 setHasUnknownCallee(
true, Change);
10781struct AAInterFnReachabilityFunction
10782 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10783 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10784 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10787 bool instructionCanReach(
10788 Attributor &
A,
const Instruction &From,
const Function &To,
10791 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10793 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10794 RQITy::Reachable
Result;
10795 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10796 return NonConstThis->isReachableImpl(
A, StackRQI,
10798 return Result == RQITy::Reachable::Yes;
10802 bool IsTemporaryRQI)
override {
10804 &RQI.From->getFunction()->getEntryBlock().front();
10805 if (EntryI != RQI.From &&
10806 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10807 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10810 auto CheckReachableCallBase = [&](CallBase *CB) {
10811 auto *CBEdges =
A.getAAFor<AACallEdges>(
10813 if (!CBEdges || !CBEdges->getState().isValidState())
10816 if (CBEdges->hasUnknownCallee())
10819 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10830 if (Fn == getAnchorScope()) {
10831 if (EntryI == RQI.From)
10836 const AAInterFnReachability *InterFnReachability =
10838 DepClassTy::OPTIONAL);
10841 if (!InterFnReachability ||
10849 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10851 DepClassTy::OPTIONAL);
10859 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10860 A, *RQI.From, CBInst, RQI.ExclusionSet);
10863 bool UsedExclusionSet =
true;
10864 bool UsedAssumedInformation =
false;
10865 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10866 UsedAssumedInformation,
10868 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10871 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10875 void trackStatistics()
const override {}
10879template <
typename AAType>
10880static std::optional<Constant *>
10883 if (!Ty.isIntegerTy())
10891 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10893 if (!COpt.has_value()) {
10895 return std::nullopt;
10897 if (
auto *
C = *COpt) {
10908 std::optional<Value *> V;
10909 for (
auto &It : Values) {
10911 if (V.has_value() && !*V)
10914 if (!V.has_value())
10928 if (
A.hasSimplificationCallback(getIRPosition())) {
10929 indicatePessimisticFixpoint();
10932 Value *Stripped = getAssociatedValue().stripPointerCasts();
10934 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10936 indicateOptimisticFixpoint();
10939 AAPotentialValues::initialize(
A);
10943 const std::string getAsStr(Attributor *
A)
const override {
10945 llvm::raw_string_ostream OS(Str);
10950 template <
typename AAType>
10951 static std::optional<Value *> askOtherAA(Attributor &
A,
10952 const AbstractAttribute &AA,
10953 const IRPosition &IRP,
Type &Ty) {
10958 return std::nullopt;
10965 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10967 Function *AnchorScope)
const {
10971 for (
const auto &U : CB->
args()) {
10981 Type &Ty = *getAssociatedType();
10982 std::optional<Value *> SimpleV =
10983 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10984 if (SimpleV.has_value() && !*SimpleV) {
10985 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10986 *
this, ValIRP, DepClassTy::OPTIONAL);
10987 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10988 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10989 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10990 if (PotentialConstantsAA->undefIsContained())
10995 if (!SimpleV.has_value())
11007 State.unionAssumed({{*VPtr, CtxI}, S});
11013 AA::ValueAndContext
I;
11017 return II.I ==
I &&
II.S == S;
11020 return std::tie(
I, S) < std::tie(
II.I,
II.S);
11024 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
11025 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
11030 bool UsedAssumedInformation =
false;
11032 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
11033 UsedAssumedInformation))
11036 for (
auto &It : Values)
11037 ValueScopeMap[It] += CS;
11039 for (
auto &It : ValueScopeMap)
11040 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
11046 void giveUpOnIntraprocedural(Attributor &
A) {
11047 auto NewS = StateType::getBestState(getState());
11048 for (
const auto &It : getAssumedSet()) {
11051 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
11054 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11062 getState() = StateType::getBestState(getState());
11063 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
11064 AAPotentialValues::indicateOptimisticFixpoint();
11065 return ChangeStatus::CHANGED;
11070 return indicatePessimisticFixpoint();
11078 if (!getAssumedSimplifiedValues(
A, Values, S))
11080 Value &OldV = getAssociatedValue();
11083 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
11084 if (!NewV || NewV == &OldV)
11089 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11090 return ChangeStatus::CHANGED;
11092 return ChangeStatus::UNCHANGED;
11095 bool getAssumedSimplifiedValues(
11096 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11097 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11098 if (!isValidState())
11100 bool UsedAssumedInformation =
false;
11101 for (
const auto &It : getAssumedSet())
11102 if (It.second & S) {
11103 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11105 if (
A.getAssumedSimplifiedValues(
11107 this, Values, S, UsedAssumedInformation))
11112 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11117struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11118 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11119 : AAPotentialValuesImpl(IRP,
A) {}
11123 auto AssumedBefore = getAssumed();
11125 genericValueTraversal(
A, &getAssociatedValue());
11127 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11128 : ChangeStatus::CHANGED;
11132 struct LivenessInfo {
11133 const AAIsDead *LivenessAA =
nullptr;
11134 bool AnyDead =
false;
11144 SmallVectorImpl<ItemInfo> &Worklist) {
11147 bool UsedAssumedInformation =
false;
11149 auto GetSimplifiedValues = [&](
Value &
V,
11151 if (!
A.getAssumedSimplifiedValues(
11155 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11157 return Values.
empty();
11159 if (GetSimplifiedValues(*
LHS, LHSValues))
11161 if (GetSimplifiedValues(*
RHS, RHSValues))
11166 InformationCache &InfoCache =
A.getInfoCache();
11173 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11178 const DataLayout &
DL =
A.getDataLayout();
11179 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11181 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11184 nullptr,
II.S, getAnchorScope());
11190 if (&LHSV == &RHSV &&
11192 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11194 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11201 if (TypedLHS && TypedRHS) {
11203 if (NewV && NewV != &Cmp) {
11204 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11216 if (!LHSIsNull && !RHSIsNull)
11222 assert((LHSIsNull || RHSIsNull) &&
11223 "Expected nullptr versus non-nullptr comparison at this point");
11226 unsigned PtrIdx = LHSIsNull;
11227 bool IsKnownNonNull;
11230 DepClassTy::REQUIRED, IsKnownNonNull);
11231 if (!IsAssumedNonNull)
11237 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11242 for (
auto &LHSValue : LHSValues)
11243 for (
auto &RHSValue : RHSValues)
11244 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11249 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11250 SmallVectorImpl<ItemInfo> &Worklist) {
11252 bool UsedAssumedInformation =
false;
11254 std::optional<Constant *>
C =
11255 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11256 bool NoValueYet = !
C.has_value();
11264 }
else if (&SI == &getAssociatedValue()) {
11269 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11271 if (!SimpleV.has_value())
11274 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11282 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11283 SmallVectorImpl<ItemInfo> &Worklist) {
11284 SmallSetVector<Value *, 4> PotentialCopies;
11285 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11286 bool UsedAssumedInformation =
false;
11288 PotentialValueOrigins, *
this,
11289 UsedAssumedInformation,
11291 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11292 "loaded values for load instruction "
11300 InformationCache &InfoCache =
A.getInfoCache();
11302 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11306 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11308 UsedAssumedInformation,
11310 return A.isAssumedDead(*
I,
this,
nullptr,
11311 UsedAssumedInformation,
11314 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11315 "and we cannot delete all the stores: "
11326 bool AllLocal = ScopeIsLocal;
11331 if (!DynamicallyUnique) {
11332 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11333 "values are dynamically unique: "
11338 for (
auto *PotentialCopy : PotentialCopies) {
11340 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11345 if (!AllLocal && ScopeIsLocal)
11350 bool handlePHINode(
11351 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11352 SmallVectorImpl<ItemInfo> &Worklist,
11353 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11354 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11355 LivenessInfo &LI = LivenessAAs[&
F];
11356 if (!LI.LivenessAA)
11362 if (&
PHI == &getAssociatedValue()) {
11363 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11365 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11366 *
PHI.getFunction());
11370 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11372 if (LI.LivenessAA &&
11373 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11392 bool UsedAssumedInformation =
false;
11393 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11395 if (!SimpleV.has_value())
11399 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11406 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11407 SmallVectorImpl<ItemInfo> &Worklist) {
11408 bool SomeSimplified =
false;
11409 bool UsedAssumedInformation =
false;
11411 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11414 const auto &SimplifiedOp =
A.getAssumedSimplified(
11419 if (!SimplifiedOp.has_value())
11423 NewOps[Idx] = *SimplifiedOp;
11427 SomeSimplified |= (NewOps[Idx] !=
Op);
11433 if (!SomeSimplified)
11436 InformationCache &InfoCache =
A.getInfoCache();
11440 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11443 const DataLayout &
DL =
I.getDataLayout();
11444 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11446 if (!NewV || NewV == &
I)
11449 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11456 Attributor &
A, Instruction &
I, ItemInfo
II,
11457 SmallVectorImpl<ItemInfo> &Worklist,
11458 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11461 CI->getPredicate(),
II, Worklist);
11463 switch (
I.getOpcode()) {
11464 case Instruction::Select:
11466 case Instruction::PHI:
11468 case Instruction::Load:
11471 return handleGenericInst(
A,
I,
II, Worklist);
11476 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11477 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11479 SmallSet<ItemInfo, 16> Visited;
11498 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11499 << Iteration <<
"!\n");
11500 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11506 Value *NewV =
nullptr;
11507 if (
V->getType()->isPointerTy()) {
11513 for (Argument &Arg :
Callee->args())
11520 if (NewV && NewV != V) {
11521 Worklist.
push_back({{*NewV, CtxI}, S});
11535 if (V == InitialV && CtxI == getCtxI()) {
11536 indicatePessimisticFixpoint();
11540 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11541 }
while (!Worklist.
empty());
11545 for (
auto &It : LivenessAAs)
11546 if (It.second.AnyDead)
11547 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11551 void trackStatistics()
const override {
11556struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11557 using Base = AAPotentialValuesImpl;
11558 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11565 indicatePessimisticFixpoint();
11570 auto AssumedBefore = getAssumed();
11572 unsigned ArgNo = getCalleeArgNo();
11574 bool UsedAssumedInformation =
false;
11576 auto CallSitePred = [&](AbstractCallSite ACS) {
11578 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11581 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11583 UsedAssumedInformation))
11586 return isValidState();
11589 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11591 UsedAssumedInformation))
11592 return indicatePessimisticFixpoint();
11594 Function *Fn = getAssociatedFunction();
11595 bool AnyNonLocal =
false;
11596 for (
auto &It : Values) {
11598 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11603 return indicatePessimisticFixpoint();
11607 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11613 AnyNonLocal =
true;
11615 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11617 giveUpOnIntraprocedural(
A);
11619 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11620 : ChangeStatus::CHANGED;
11624 void trackStatistics()
const override {
11629struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11630 using Base = AAPotentialValuesFloating;
11631 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11637 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11638 indicatePessimisticFixpoint();
11642 for (Argument &Arg :
F->args())
11645 ReturnedArg = &Arg;
11648 if (!
A.isFunctionIPOAmendable(*
F) ||
11649 A.hasSimplificationCallback(getIRPosition())) {
11651 indicatePessimisticFixpoint();
11653 indicateOptimisticFixpoint();
11659 auto AssumedBefore = getAssumed();
11660 bool UsedAssumedInformation =
false;
11663 Function *AnchorScope = getAnchorScope();
11669 UsedAssumedInformation,
11675 bool AllInterAreIntra =
false;
11678 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11682 for (
const AA::ValueAndContext &VAC : Values) {
11683 addValue(
A, getState(), *VAC.
getValue(),
11687 if (AllInterAreIntra)
11694 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11697 bool AddValues =
true;
11700 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11704 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11707 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11708 UsedAssumedInformation,
11710 return indicatePessimisticFixpoint();
11713 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11714 : ChangeStatus::CHANGED;
11719 return ChangeStatus::UNCHANGED;
11721 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11723 return ChangeStatus::UNCHANGED;
11724 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11726 return ChangeStatus::UNCHANGED;
11731 "Number of function with unique return");
11734 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11739 Value *RetOp = RetI.getOperand(0);
11743 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11744 Changed = ChangeStatus::CHANGED;
11747 bool UsedAssumedInformation =
false;
11748 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11749 UsedAssumedInformation,
11755 return AAPotentialValues::indicatePessimisticFixpoint();
11759 void trackStatistics()
const override{
11766struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11767 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11768 : AAPotentialValuesImpl(IRP,
A) {}
11777 void trackStatistics()
const override {
11782struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11783 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11784 : AAPotentialValuesFunction(IRP,
A) {}
11787 void trackStatistics()
const override {
11792struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11793 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11794 : AAPotentialValuesImpl(IRP,
A) {}
11798 auto AssumedBefore = getAssumed();
11802 return indicatePessimisticFixpoint();
11804 bool UsedAssumedInformation =
false;
11808 UsedAssumedInformation))
11809 return indicatePessimisticFixpoint();
11816 Values, S, UsedAssumedInformation))
11819 for (
auto &It : Values) {
11820 Value *
V = It.getValue();
11821 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11822 V, *CB, *
this, UsedAssumedInformation);
11823 if (!CallerV.has_value()) {
11827 V = *CallerV ? *CallerV :
V;
11833 giveUpOnIntraprocedural(
A);
11836 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11841 return indicatePessimisticFixpoint();
11843 return indicatePessimisticFixpoint();
11844 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11845 : ChangeStatus::CHANGED;
11849 return AAPotentialValues::indicatePessimisticFixpoint();
11853 void trackStatistics()
const override {
11858struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11859 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11860 : AAPotentialValuesFloating(IRP,
A) {}
11863 void trackStatistics()
const override {
11871struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11872 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11873 const DenseSet<StringRef> &Known)
11874 : AAAssumptionInfo(IRP,
A, Known) {}
11879 if (getKnown().isUniversal())
11880 return ChangeStatus::UNCHANGED;
11882 const IRPosition &IRP = getIRPosition();
11884 getAssumed().getSet().
end());
11886 return A.manifestAttrs(IRP,
11893 bool hasAssumption(
const StringRef Assumption)
const override {
11894 return isValidState() && setContains(Assumption);
11898 const std::string getAsStr(Attributor *
A)
const override {
11899 const SetContents &Known = getKnown();
11900 const SetContents &Assumed = getAssumed();
11904 const std::string KnownStr =
llvm::join(Set,
",");
11906 std::string AssumedStr =
"Universal";
11907 if (!Assumed.isUniversal()) {
11908 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11911 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11926struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11927 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11928 : AAAssumptionInfoImpl(IRP,
A,
11935 auto CallSitePred = [&](AbstractCallSite ACS) {
11936 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11938 DepClassTy::REQUIRED);
11942 Changed |= getIntersection(AssumptionAA->getAssumed());
11943 return !getAssumed().empty() || !getKnown().empty();
11946 bool UsedAssumedInformation =
false;
11951 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11952 UsedAssumedInformation))
11953 return indicatePessimisticFixpoint();
11955 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11958 void trackStatistics()
const override {}
11962struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11964 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11965 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11970 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11976 auto *AssumptionAA =
11977 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11979 return indicatePessimisticFixpoint();
11980 bool Changed = getIntersection(AssumptionAA->getAssumed());
11981 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11985 void trackStatistics()
const override {}
11990 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
11997 return Assumptions;
12012struct AAUnderlyingObjectsImpl
12018 const std::string getAsStr(
Attributor *
A)
const override {
12019 if (!isValidState())
12020 return "<invalid>";
12023 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
12024 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
12026 if (!InterAssumedUnderlyingObjects.empty()) {
12027 OS <<
"inter objects:\n";
12028 for (
auto *Obj : InterAssumedUnderlyingObjects)
12029 OS << *Obj <<
'\n';
12031 if (!IntraAssumedUnderlyingObjects.empty()) {
12032 OS <<
"intra objects:\n";
12033 for (
auto *Obj : IntraAssumedUnderlyingObjects)
12034 OS << *
Obj <<
'\n';
12040 void trackStatistics()
const override {}
12044 auto &Ptr = getAssociatedValue();
12046 bool UsedAssumedInformation =
false;
12047 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
12049 SmallPtrSet<Value *, 8> SeenObjects;
12053 Scope, UsedAssumedInformation))
12054 return UnderlyingObjects.
insert(&Ptr);
12058 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
12059 auto &VAC = Values[
I];
12062 if (!SeenObjects.
insert(UO ? UO : Obj).second)
12064 if (UO && UO != Obj) {
12070 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
12072 auto Pred = [&](
Value &
V) {
12080 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
12082 "The forall call should not return false at this position");
12088 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12089 UsedAssumedInformation);
12095 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12097 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12098 Scope, UsedAssumedInformation);
12112 if (!UsedAssumedInformation)
12113 indicateOptimisticFixpoint();
12114 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12117 bool forallUnderlyingObjects(
12118 function_ref<
bool(
Value &)> Pred,
12120 if (!isValidState())
12121 return Pred(getAssociatedValue());
12124 ? IntraAssumedUnderlyingObjects
12125 : InterAssumedUnderlyingObjects;
12126 for (
Value *Obj : AssumedUnderlyingObjects)
12136 bool handleIndirect(Attributor &
A,
Value &V,
12137 SmallSetVector<Value *, 8> &UnderlyingObjects,
12140 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12142 auto Pred = [&](
Value &
V) {
12146 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12148 "The forall call should not return false at this position");
12154 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12156 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12159struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12160 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12161 : AAUnderlyingObjectsImpl(IRP,
A) {}
12164struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12165 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12166 : AAUnderlyingObjectsImpl(IRP,
A) {}
12169struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12170 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12171 : AAUnderlyingObjectsImpl(IRP,
A) {}
12174struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12175 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12176 : AAUnderlyingObjectsImpl(IRP,
A) {}
12179struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12180 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12181 : AAUnderlyingObjectsImpl(IRP,
A) {}
12184struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12185 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12186 : AAUnderlyingObjectsImpl(IRP,
A) {}
12189struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12190 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12191 : AAUnderlyingObjectsImpl(IRP,
A) {}
12197struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12198 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12199 : AAGlobalValueInfo(IRP,
A) {}
12204 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12205 SmallVectorImpl<const Value *> &Worklist) {
12212 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12213 << *UInst <<
"\n");
12216 int Idx = &
Cmp->getOperandUse(0) == &
U;
12219 return U == &getAnchorValue();
12224 auto CallSitePred = [&](AbstractCallSite ACS) {
12225 Worklist.
push_back(ACS.getInstruction());
12228 bool UsedAssumedInformation =
false;
12230 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12232 UsedAssumedInformation))
12250 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12259 unsigned NumUsesBefore =
Uses.size();
12261 SmallPtrSet<const Value *, 8> Visited;
12265 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12273 return checkUse(
A, U, Follow, Worklist);
12275 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12276 Uses.insert(&OldU);
12280 while (!Worklist.
empty()) {
12282 if (!Visited.
insert(V).second)
12284 if (!
A.checkForAllUses(UsePred, *
this, *V,
12286 DepClassTy::OPTIONAL,
12287 true, EquivalentUseCB)) {
12288 return indicatePessimisticFixpoint();
12292 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12293 : ChangeStatus::CHANGED;
12296 bool isPotentialUse(
const Use &U)
const override {
12297 return !isValidState() ||
Uses.contains(&U);
12302 return ChangeStatus::UNCHANGED;
12306 const std::string getAsStr(Attributor *
A)
const override {
12307 return "[" + std::to_string(
Uses.size()) +
" uses]";
12310 void trackStatistics()
const override {
12316 SmallPtrSet<const Use *, 8>
Uses;
12322struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12323 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12324 : AAIndirectCallInfo(IRP,
A) {}
12328 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12329 if (!MD && !
A.isClosedWorldModule())
12333 for (
const auto &
Op : MD->operands())
12335 PotentialCallees.insert(Callee);
12336 }
else if (
A.isClosedWorldModule()) {
12338 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12339 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12342 if (PotentialCallees.empty())
12343 indicateOptimisticFixpoint();
12351 SmallSetVector<Function *, 4> AssumedCalleesNow;
12352 bool AllCalleesKnownNow = AllCalleesKnown;
12354 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12355 bool &UsedAssumedInformation) {
12356 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12358 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12360 UsedAssumedInformation = !GIAA->isAtFixpoint();
12364 auto AddPotentialCallees = [&]() {
12365 for (
auto *PotentialCallee : PotentialCallees) {
12366 bool UsedAssumedInformation =
false;
12367 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12368 AssumedCalleesNow.
insert(PotentialCallee);
12374 bool UsedAssumedInformation =
false;
12377 AA::ValueScope::AnyScope,
12378 UsedAssumedInformation)) {
12379 if (PotentialCallees.empty())
12380 return indicatePessimisticFixpoint();
12381 AddPotentialCallees();
12386 auto CheckPotentialCallee = [&](
Function &Fn) {
12387 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12390 auto &CachedResult = FilterResults[&Fn];
12391 if (CachedResult.has_value())
12392 return CachedResult.value();
12394 bool UsedAssumedInformation =
false;
12395 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12396 if (!UsedAssumedInformation)
12397 CachedResult =
false;
12406 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12407 bool IsKnown =
false;
12410 DepClassTy::OPTIONAL, IsKnown)) {
12412 CachedResult =
false;
12417 CachedResult =
true;
12423 for (
auto &VAC : Values) {
12431 if (CheckPotentialCallee(*VACFn))
12432 AssumedCalleesNow.
insert(VACFn);
12435 if (!PotentialCallees.empty()) {
12436 AddPotentialCallees();
12439 AllCalleesKnownNow =
false;
12442 if (AssumedCalleesNow == AssumedCallees &&
12443 AllCalleesKnown == AllCalleesKnownNow)
12444 return ChangeStatus::UNCHANGED;
12446 std::swap(AssumedCallees, AssumedCalleesNow);
12447 AllCalleesKnown = AllCalleesKnownNow;
12448 return ChangeStatus::CHANGED;
12454 if (!AllCalleesKnown && AssumedCallees.empty())
12455 return ChangeStatus::UNCHANGED;
12458 bool UsedAssumedInformation =
false;
12459 if (
A.isAssumedDead(*CB,
this,
nullptr,
12460 UsedAssumedInformation))
12461 return ChangeStatus::UNCHANGED;
12465 if (
FP->getType()->getPointerAddressSpace())
12466 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12476 if (AssumedCallees.empty()) {
12477 assert(AllCalleesKnown &&
12478 "Expected all callees to be known if there are none.");
12479 A.changeToUnreachableAfterManifest(CB);
12480 return ChangeStatus::CHANGED;
12484 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12485 auto *NewCallee = AssumedCallees.front();
12488 NumIndirectCallsPromoted++;
12489 return ChangeStatus::CHANGED;
12496 A.deleteAfterManifest(*CB);
12497 return ChangeStatus::CHANGED;
12507 bool SpecializedForAnyCallees =
false;
12508 bool SpecializedForAllCallees = AllCalleesKnown;
12509 ICmpInst *LastCmp =
nullptr;
12512 for (Function *NewCallee : AssumedCallees) {
12513 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12514 AssumedCallees.size())) {
12515 SkippedAssumedCallees.
push_back(NewCallee);
12516 SpecializedForAllCallees =
false;
12519 SpecializedForAnyCallees =
true;
12525 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12526 A.registerManifestAddedBasicBlock(*IP->getParent());
12532 A.registerManifestAddedBasicBlock(*ElseBB);
12534 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12539 CastInst *RetBC =
nullptr;
12540 CallInst *NewCall =
nullptr;
12545 NumIndirectCallsPromoted++;
12553 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12554 if (!AllCalleesKnown)
12555 return ChangeStatus::UNCHANGED;
12556 MDBuilder MDB(IndirectCB.getContext());
12557 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12558 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12559 return ChangeStatus::CHANGED;
12562 if (!SpecializedForAnyCallees)
12563 return AttachCalleeMetadata(*CB);
12566 if (SpecializedForAllCallees) {
12569 new UnreachableInst(IP->getContext(), IP);
12570 IP->eraseFromParent();
12573 CBClone->setName(CB->
getName());
12574 CBClone->insertBefore(*IP->getParent(), IP);
12575 NewCalls.
push_back({CBClone,
nullptr});
12576 AttachCalleeMetadata(*CBClone);
12583 CB->
getParent()->getFirstInsertionPt());
12584 for (
auto &It : NewCalls) {
12585 CallBase *NewCall = It.first;
12586 Instruction *CallRet = It.second ? It.second : It.first;
12598 A.deleteAfterManifest(*CB);
12599 Changed = ChangeStatus::CHANGED;
12605 const std::string getAsStr(Attributor *
A)
const override {
12606 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12607 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12611 void trackStatistics()
const override {
12612 if (AllCalleesKnown) {
12614 Eliminated, CallSites,
12615 "Number of indirect call sites eliminated via specialization")
12618 "Number of indirect call sites specialized")
12622 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12623 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12628 DenseMap<Function *, std::optional<bool>> FilterResults;
12632 SmallSetVector<Function *, 4> PotentialCallees;
12636 SmallSetVector<Function *, 4> AssumedCallees;
12640 bool AllCalleesKnown =
true;
12647struct AAInvariantLoadPointerImpl
12648 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12649 AAInvariantLoadPointer> {
12653 IS_NOALIAS = 1 << 0,
12656 IS_NOEFFECT = 1 << 1,
12658 IS_LOCALLY_INVARIANT = 1 << 2,
12660 IS_LOCALLY_CONSTRAINED = 1 << 3,
12662 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12663 IS_LOCALLY_CONSTRAINED,
12665 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12668 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12672 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12675 bool isKnownInvariant()
const final {
12676 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12679 bool isKnownLocallyInvariant()
const final {
12680 if (isKnown(IS_LOCALLY_INVARIANT))
12682 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12685 bool isAssumedInvariant()
const final {
12686 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12689 bool isAssumedLocallyInvariant()
const final {
12690 if (isAssumed(IS_LOCALLY_INVARIANT))
12692 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12699 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12700 return indicatePessimisticFixpoint();
12704 Changed |= updateLocalInvariance(
A);
12710 if (!isKnownInvariant())
12711 return ChangeStatus::UNCHANGED;
12714 const Value *Ptr = &getAssociatedValue();
12715 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12716 if (
U.get() != Ptr)
12724 if (!
A.isRunOn(
I->getFunction()))
12727 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12731 LI->setMetadata(LLVMContext::MD_invariant_load,
12733 Changed = ChangeStatus::CHANGED;
12738 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *Ptr);
12743 const std::string getAsStr(Attributor *)
const override {
12744 if (isKnownInvariant())
12745 return "load-invariant pointer";
12746 return "non-invariant pointer";
12750 void trackStatistics()
const override {}
12754 bool requiresNoAlias()
const {
12755 switch (getPositionKind()) {
12761 case IRP_CALL_SITE:
12763 case IRP_CALL_SITE_RETURNED: {
12768 case IRP_ARGUMENT: {
12769 const Function *
F = getAssociatedFunction();
12770 assert(
F &&
"no associated function for argument");
12776 bool isExternal()
const {
12777 const Function *
F = getAssociatedFunction();
12781 getPositionKind() != IRP_CALL_SITE_RETURNED;
12785 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12786 return ChangeStatus::UNCHANGED;
12789 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12790 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12791 if (ANoAlias->isKnownNoAlias()) {
12792 addKnownBits(IS_NOALIAS);
12793 return ChangeStatus::CHANGED;
12796 if (!ANoAlias->isAssumedNoAlias()) {
12797 removeAssumedBits(IS_NOALIAS);
12798 return ChangeStatus::CHANGED;
12801 return ChangeStatus::UNCHANGED;
12806 if (
const Argument *Arg = getAssociatedArgument()) {
12808 addKnownBits(IS_NOALIAS);
12809 return ChangeStatus::UNCHANGED;
12814 removeAssumedBits(IS_NOALIAS);
12815 return ChangeStatus::CHANGED;
12818 return ChangeStatus::UNCHANGED;
12822 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12823 return ChangeStatus::UNCHANGED;
12825 if (!getAssociatedFunction())
12826 return indicatePessimisticFixpoint();
12829 return indicatePessimisticFixpoint();
12831 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12833 return !LI || !LI->mayHaveSideEffects();
12835 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12836 return indicatePessimisticFixpoint();
12838 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12839 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12842 if (!AMemoryBehavior->isAssumedReadOnly())
12843 return indicatePessimisticFixpoint();
12845 if (AMemoryBehavior->isKnownReadOnly()) {
12846 addKnownBits(IS_NOEFFECT);
12847 return ChangeStatus::UNCHANGED;
12850 return ChangeStatus::UNCHANGED;
12853 if (
const Argument *Arg = getAssociatedArgument()) {
12855 addKnownBits(IS_NOEFFECT);
12856 return ChangeStatus::UNCHANGED;
12861 return indicatePessimisticFixpoint();
12864 return ChangeStatus::UNCHANGED;
12868 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12869 return ChangeStatus::UNCHANGED;
12872 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12873 getIRPosition(),
this, DepClassTy::REQUIRED);
12875 return ChangeStatus::UNCHANGED;
12877 bool UsedAssumedInformation =
false;
12878 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12879 if (!
V.getType()->isPointerTy())
12881 const auto *IsInvariantLoadPointer =
12883 DepClassTy::REQUIRED);
12885 if (!IsInvariantLoadPointer)
12888 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12890 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12893 UsedAssumedInformation =
true;
12896 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12897 return indicatePessimisticFixpoint();
12903 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12904 return indicatePessimisticFixpoint();
12909 if (!UsedAssumedInformation) {
12911 addKnownBits(IS_LOCALLY_INVARIANT);
12912 return ChangeStatus::CHANGED;
12915 return ChangeStatus::UNCHANGED;
12919struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12920 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12921 : AAInvariantLoadPointerImpl(IRP,
A) {}
12924struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12925 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12926 : AAInvariantLoadPointerImpl(IRP,
A) {}
12929 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12933struct AAInvariantLoadPointerCallSiteReturned final
12934 : AAInvariantLoadPointerImpl {
12935 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12936 : AAInvariantLoadPointerImpl(IRP,
A) {}
12939 const Function *
F = getAssociatedFunction();
12940 assert(
F &&
"no associated function for return from call");
12942 if (!
F->isDeclaration() && !
F->isIntrinsic())
12943 return AAInvariantLoadPointerImpl::initialize(
A);
12948 return AAInvariantLoadPointerImpl::initialize(
A);
12950 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12951 return AAInvariantLoadPointerImpl::initialize(
A);
12955 indicatePessimisticFixpoint();
12959struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12960 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12961 : AAInvariantLoadPointerImpl(IRP,
A) {}
12964 const Function *
F = getAssociatedFunction();
12965 assert(
F &&
"no associated function for argument");
12968 addKnownBits(IS_LOCALLY_CONSTRAINED);
12972 if (!
F->hasLocalLinkage())
12973 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12977struct AAInvariantLoadPointerCallSiteArgument final
12978 : AAInvariantLoadPointerImpl {
12979 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12980 : AAInvariantLoadPointerImpl(IRP,
A) {}
12987template <
typename InstType>
12988static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
12989 Value *OriginalValue, PointerType *NewPtrTy,
12990 bool UseOriginalValue) {
12991 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12994 if (MemInst->isVolatile()) {
12995 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
12996 *MemInst->getFunction());
12997 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
13002 if (UseOriginalValue) {
13003 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
13007 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
13009 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
13013struct AAAddressSpaceImpl :
public AAAddressSpace {
13014 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13015 : AAAddressSpace(IRP,
A) {}
13018 assert(isValidState() &&
"the AA is invalid");
13019 return AssumedAddressSpace;
13024 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13025 "Associated value is not a pointer");
13027 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
13028 indicatePessimisticFixpoint();
13032 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13033 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13034 if (AS != FlatAS) {
13035 [[maybe_unused]]
bool R = takeAddressSpace(AS);
13036 assert(R &&
"The take should happen");
13037 indicateOptimisticFixpoint();
13042 uint32_t OldAddressSpace = AssumedAddressSpace;
13043 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13045 auto CheckAddressSpace = [&](
Value &
Obj) {
13051 unsigned ObjAS =
Obj.getType()->getPointerAddressSpace();
13052 if (ObjAS != FlatAS)
13053 return takeAddressSpace(ObjAS);
13067 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
13069 if (AssumedAS != ~0U)
13070 return takeAddressSpace(AssumedAS);
13074 return takeAddressSpace(FlatAS);
13077 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
13078 DepClassTy::REQUIRED);
13079 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
13080 return indicatePessimisticFixpoint();
13082 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
13083 : ChangeStatus::CHANGED;
13090 if (NewAS == InvalidAddressSpace ||
13092 return ChangeStatus::UNCHANGED;
13094 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13096 Value *AssociatedValue = &getAssociatedValue();
13097 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13100 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13101 bool UseOriginalValue =
13106 auto Pred = [&](
const Use &
U,
bool &) {
13107 if (
U.get() != AssociatedValue)
13118 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13121 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13124 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13127 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13134 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13137 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13141 const std::string getAsStr(Attributor *
A)
const override {
13142 if (!isValidState())
13143 return "addrspace(<invalid>)";
13144 return "addrspace(" +
13145 (AssumedAddressSpace == InvalidAddressSpace
13147 : std::to_string(AssumedAddressSpace)) +
13152 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13154 bool takeAddressSpace(uint32_t AS) {
13155 if (AssumedAddressSpace == InvalidAddressSpace) {
13156 AssumedAddressSpace = AS;
13159 return AssumedAddressSpace == AS;
13162 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13164 assert(
I->getSrcAddressSpace() != FlatAS &&
13165 "there should not be flat AS -> non-flat AS");
13166 return I->getPointerOperand();
13169 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13170 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13172 "there should not be flat AS -> non-flat AS X");
13173 return C->getOperand(0);
13179struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13180 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13181 : AAAddressSpaceImpl(IRP,
A) {}
13183 void trackStatistics()
const override {
13188struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13189 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13190 : AAAddressSpaceImpl(IRP,
A) {}
13196 (void)indicatePessimisticFixpoint();
13199 void trackStatistics()
const override {
13204struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13205 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13206 : AAAddressSpaceImpl(IRP,
A) {}
13208 void trackStatistics()
const override {
13213struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13214 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13215 : AAAddressSpaceImpl(IRP,
A) {}
13220struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13221 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13222 : AAAddressSpaceImpl(IRP,
A) {}
13228 (void)indicatePessimisticFixpoint();
13231 void trackStatistics()
const override {
13246struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13247 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13248 : AANoAliasAddrSpace(IRP,
A) {}
13251 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13252 "Associated value is not a pointer");
13256 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13257 if (!FlatAS.has_value()) {
13258 indicatePessimisticFixpoint();
13264 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13265 if (AS != *FlatAS) {
13267 indicateOptimisticFixpoint();
13272 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13273 uint32_t OldAssumed = getAssumed();
13275 auto CheckAddressSpace = [&](
Value &
Obj) {
13279 unsigned AS =
Obj.getType()->getPointerAddressSpace();
13283 removeAS(
Obj.getType()->getPointerAddressSpace());
13287 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13288 getIRPosition(),
this, DepClassTy::REQUIRED);
13290 return indicatePessimisticFixpoint();
13292 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13293 : ChangeStatus::CHANGED;
13298 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13300 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13301 if (AS != FlatAS ||
Map.empty())
13302 return ChangeStatus::UNCHANGED;
13304 LLVMContext &Ctx = getAssociatedValue().getContext();
13305 MDNode *NoAliasASNode =
nullptr;
13306 MDBuilder MDB(Ctx);
13308 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13311 unsigned Upper =
I.stop();
13312 unsigned Lower =
I.start();
13313 if (!NoAliasASNode) {
13314 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13317 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13321 Value *AssociatedValue = &getAssociatedValue();
13324 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13325 if (
U.get() != AssociatedValue)
13328 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13335 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13339 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13341 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13345 const std::string getAsStr(Attributor *
A)
const override {
13346 if (!isValidState())
13347 return "<invalid>";
13349 raw_string_ostream OS(Str);
13350 OS <<
"CanNotBeAddrSpace(";
13351 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13352 unsigned Upper =
I.stop();
13353 unsigned Lower =
I.start();
13354 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13361 void removeAS(
unsigned AS) {
13362 RangeMap::iterator
I =
Map.find(AS);
13364 if (
I !=
Map.end()) {
13365 unsigned Upper =
I.stop();
13366 unsigned Lower =
I.start();
13370 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13372 if (AS != 0 &&
Lower <= AS - 1)
13377 void resetASRanges(Attributor &
A) {
13379 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13383struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13384 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13385 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13387 void trackStatistics()
const override {
13392struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13393 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13394 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13396 void trackStatistics()
const override {
13401struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13402 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13403 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13405 void trackStatistics()
const override {
13410struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13411 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13412 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13414 void trackStatistics()
const override {
13419struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13420 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13421 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13423 void trackStatistics()
const override {
13430struct AAAllocationInfoImpl :
public AAAllocationInfo {
13431 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13432 : AAAllocationInfo(IRP,
A) {}
13434 std::optional<TypeSize> getAllocatedSize()
const override {
13435 assert(isValidState() &&
"the AA is invalid");
13436 return AssumedAllocatedSize;
13439 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13440 const DataLayout &
DL) {
13443 switch (
I->getOpcode()) {
13444 case Instruction::Alloca: {
13449 return std::nullopt;
13455 const IRPosition &IRP = getIRPosition();
13460 return indicatePessimisticFixpoint();
13462 bool IsKnownNoCapture;
13464 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13465 return indicatePessimisticFixpoint();
13467 const AAPointerInfo *PI =
13468 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13471 return indicatePessimisticFixpoint();
13474 return indicatePessimisticFixpoint();
13476 const DataLayout &
DL =
A.getDataLayout();
13477 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13480 if (!AllocationSize)
13481 return indicatePessimisticFixpoint();
13485 if (*AllocationSize == 0)
13486 return indicatePessimisticFixpoint();
13492 return indicatePessimisticFixpoint();
13494 if (BinSize == 0) {
13495 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13496 if (!changeAllocationSize(NewAllocationSize))
13497 return ChangeStatus::UNCHANGED;
13498 return ChangeStatus::CHANGED;
13502 const auto &It = PI->
begin();
13505 if (It->first.Offset != 0)
13506 return indicatePessimisticFixpoint();
13508 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13510 if (SizeOfBin >= *AllocationSize)
13511 return indicatePessimisticFixpoint();
13513 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13515 if (!changeAllocationSize(NewAllocationSize))
13516 return ChangeStatus::UNCHANGED;
13518 return ChangeStatus::CHANGED;
13524 assert(isValidState() &&
13525 "Manifest should only be called if the state is valid.");
13529 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13531 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13533 switch (
I->getOpcode()) {
13535 case Instruction::Alloca: {
13539 Type *CharType = Type::getInt8Ty(
I->getContext());
13541 auto *NumBytesToValue =
13542 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13545 insertPt = std::next(insertPt);
13546 AllocaInst *NewAllocaInst =
13551 return ChangeStatus::CHANGED;
13559 return ChangeStatus::UNCHANGED;
13563 const std::string getAsStr(Attributor *
A)
const override {
13564 if (!isValidState())
13565 return "allocationinfo(<invalid>)";
13566 return "allocationinfo(" +
13567 (AssumedAllocatedSize == HasNoAllocationSize
13569 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13574 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13578 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13579 if (AssumedAllocatedSize == HasNoAllocationSize ||
13580 AssumedAllocatedSize !=
Size) {
13581 AssumedAllocatedSize =
Size;
13588struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13589 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13590 : AAAllocationInfoImpl(IRP,
A) {}
13592 void trackStatistics()
const override {
13597struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13598 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13599 : AAAllocationInfoImpl(IRP,
A) {}
13605 (void)indicatePessimisticFixpoint();
13608 void trackStatistics()
const override {
13613struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13614 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13615 : AAAllocationInfoImpl(IRP,
A) {}
13617 void trackStatistics()
const override {
13622struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13623 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13624 : AAAllocationInfoImpl(IRP,
A) {}
13626 void trackStatistics()
const override {
13631struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13632 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13633 : AAAllocationInfoImpl(IRP,
A) {}
13638 (void)indicatePessimisticFixpoint();
13641 void trackStatistics()
const override {
13690#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13691 case IRPosition::PK: \
13692 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13694#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13695 case IRPosition::PK: \
13696 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13700#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13701 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13702 CLASS *AA = nullptr; \
13703 switch (IRP.getPositionKind()) { \
13704 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13705 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13706 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13707 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13708 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13709 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13710 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13711 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13716#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13717 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13718 CLASS *AA = nullptr; \
13719 switch (IRP.getPositionKind()) { \
13720 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13721 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13722 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13723 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13724 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13725 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13726 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13727 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13732#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13733 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13734 CLASS *AA = nullptr; \
13735 switch (IRP.getPositionKind()) { \
13736 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13738 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13744#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13745 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13746 CLASS *AA = nullptr; \
13747 switch (IRP.getPositionKind()) { \
13748 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13749 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13750 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13751 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13752 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13753 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13754 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13755 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13760#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13761 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13762 CLASS *AA = nullptr; \
13763 switch (IRP.getPositionKind()) { \
13764 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13765 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13766 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13767 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13768 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13769 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13770 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13771 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13776#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13777 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13778 CLASS *AA = nullptr; \
13779 switch (IRP.getPositionKind()) { \
13780 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13781 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13782 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13783 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13784 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13785 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13786 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13787 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13839#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13840#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13841#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13842#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13843#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13844#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13845#undef SWITCH_PK_CREATE
13846#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefInfo InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI, const CycleInfo *CI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, const llvm::StringTable &StandardNames, VectorLibrary VecLib)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
bool isValid() const
Return true if the attribute is any kind of attribute.
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction; assumes that the block is well-formed.
BinaryOps getOpcode() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
Attribute getFnAttr(StringRef Kind) const
Get the attribute of a given kind for the function.
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
Conditional Branch instruction.
Value * getCondition() const
BasicBlock * getSuccessor(unsigned i) const
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static UncondBrInst * Create(BasicBlock *Target, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i=0) const
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
const Use & getOperandUse(unsigned i) const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
static constexpr unsigned MaxAlignmentExponent
The maximum alignment for instructions.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
@ Valid
The data is already valid.
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
@ Undef
Value of the register doesn't matter.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
Return a range that applies F to the elements of C.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
BumpPtrAllocatorImpl<> BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of denormal_fpenv to a known denormal mod...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
const Instruction * getCtxI() const
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalFPEnv getDefault()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.