54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *
Ptr, int64_t &BytesOffset,
345 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
670 if (Br->isConditional())
709 StateType ParentState;
713 ParentState.indicateOptimisticFixpoint();
715 for (
const BasicBlock *BB : Br->successors()) {
716 StateType ChildState;
718 size_t BeforeSize =
Uses.size();
719 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
722 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
725 ParentState &= ChildState;
799 R.indicatePessimisticFixpoint();
816 BS.indicateOptimisticFixpoint();
822 BS.indicatePessimisticFixpoint();
892 template <
typename F>
899 if (!
Range.mayOverlap(ItRange))
901 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
902 for (
auto Index : It.getSecond()) {
912 template <
typename F>
923 for (
unsigned Index : LocalList->getSecond()) {
926 if (
Range.offsetAndSizeAreUnknown())
942 RemoteI = RemoteI ? RemoteI : &
I;
946 bool AccExists =
false;
948 for (
auto Index : LocalList) {
950 if (
A.getLocalInst() == &
I) {
959 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
961 for (
auto Key : ToAdd) {
968 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
970 "New Access should have been at AccIndex");
971 LocalList.push_back(AccIndex);
980 auto Before = Current;
982 if (Current == Before)
985 auto &ExistingRanges = Before.getRanges();
986 auto &NewRanges = Current.getRanges();
993 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1000 "Expected bin to actually contain the Access.");
1001 Bin.erase(AccIndex);
1022struct AAPointerInfoImpl
1023 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1028 const std::string getAsStr(
Attributor *
A)
const override {
1029 return std::string(
"PointerInfo ") +
1030 (isValidState() ? (std::string(
"#") +
1031 std::to_string(OffsetBins.size()) +
" bins")
1036 [](int64_t O) {
return std::to_string(O); }),
1044 return AAPointerInfo::manifest(
A);
1047 const_bin_iterator
begin()
const override {
return State::begin(); }
1048 const_bin_iterator
end()
const override {
return State::end(); }
1049 int64_t numOffsetBins()
const override {
return State::numOffsetBins(); }
1050 bool reachesReturn()
const override {
1051 return !ReturnedOffsets.isUnassigned();
1053 void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1054 if (ReturnedOffsets.isUnknown()) {
1059 OffsetInfo MergedOI;
1060 for (
auto Offset : ReturnedOffsets) {
1061 OffsetInfo TmpOI = OI;
1063 MergedOI.merge(TmpOI);
1065 OI = std::move(MergedOI);
1068 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1069 if (ReturnedOffsets.isUnknown())
1070 return ChangeStatus::UNCHANGED;
1071 if (ReachedReturnedOffsets.isUnknown()) {
1072 ReturnedOffsets.setUnknown();
1073 return ChangeStatus::CHANGED;
1075 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1076 return ChangeStatus::CHANGED;
1077 return ChangeStatus::UNCHANGED;
1080 bool forallInterferingAccesses(
1082 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1084 return State::forallInterferingAccesses(
Range, CB);
1087 bool forallInterferingAccesses(
1088 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1089 bool FindInterferingWrites,
bool FindInterferingReads,
1090 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1092 function_ref<
bool(
const Access &)> SkipCB)
const override {
1093 HasBeenWrittenTo =
false;
1095 SmallPtrSet<const Access *, 8> DominatingWrites;
1103 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1105 bool AllInSameNoSyncFn = IsAssumedNoSync;
1106 bool InstIsExecutedByInitialThreadOnly =
1107 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1114 bool InstIsExecutedInAlignedRegion =
1115 FindInterferingReads && ExecDomainAA &&
1116 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1118 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1119 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1121 InformationCache &InfoCache =
A.getInfoCache();
1122 bool IsThreadLocalObj =
1131 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1132 if (IsThreadLocalObj || AllInSameNoSyncFn)
1134 const auto *FnExecDomainAA =
1135 I.getFunction() == &
Scope
1137 :
A.lookupAAFor<AAExecutionDomain>(
1140 if (!FnExecDomainAA)
1142 if (InstIsExecutedInAlignedRegion ||
1143 (FindInterferingWrites &&
1144 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1145 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1148 if (InstIsExecutedByInitialThreadOnly &&
1149 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1150 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1159 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1160 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1161 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1162 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1166 bool IsKnownNoRecurse;
1174 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1175 bool ObjHasKernelLifetime =
false;
1176 const bool UseDominanceReasoning =
1177 FindInterferingWrites && IsKnownNoRecurse;
1178 const DominatorTree *DT =
1188 case AA::GPUAddressSpace::Shared:
1189 case AA::GPUAddressSpace::Constant:
1190 case AA::GPUAddressSpace::Local:
1202 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1207 const Function *AIFn = AI->getFunction();
1208 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1209 bool IsKnownNoRecurse;
1212 IsKnownNoRecurse)) {
1213 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1218 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1219 if (ObjHasKernelLifetime)
1220 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1221 return !
A.getInfoCache().isKernel(Fn);
1229 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1230 Function *AccScope = Acc.getRemoteInst()->getFunction();
1231 bool AccInSameScope = AccScope == &
Scope;
1235 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1236 A.getInfoCache().isKernel(*AccScope))
1239 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1240 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1241 ExclusionSet.
insert(Acc.getRemoteInst());
1244 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1245 (!FindInterferingReads || !Acc.isRead()))
1248 bool Dominates = FindInterferingWrites && DT && Exact &&
1249 Acc.isMustAccess() && AccInSameScope &&
1252 DominatingWrites.
insert(&Acc);
1256 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1258 InterferingAccesses.
push_back({&Acc, Exact});
1261 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1264 HasBeenWrittenTo = !DominatingWrites.
empty();
1268 for (
const Access *Acc : DominatingWrites) {
1269 if (!LeastDominatingWriteInst) {
1270 LeastDominatingWriteInst = Acc->getRemoteInst();
1271 }
else if (DT->
dominates(LeastDominatingWriteInst,
1272 Acc->getRemoteInst())) {
1273 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1279 if (SkipCB && SkipCB(Acc))
1281 if (!CanIgnoreThreading(Acc))
1287 bool ReadChecked = !FindInterferingReads;
1288 bool WriteChecked = !FindInterferingWrites;
1294 &ExclusionSet, IsLiveInCalleeCB))
1299 if (!WriteChecked) {
1301 &ExclusionSet, IsLiveInCalleeCB))
1302 WriteChecked =
true;
1316 if (!WriteChecked && HasBeenWrittenTo &&
1317 Acc.getRemoteInst()->getFunction() != &Scope) {
1319 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1321 if (FnReachabilityAA) {
1327 if (!FnReachabilityAA->instructionCanReach(
1328 A, *LeastDominatingWriteInst,
1329 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1330 WriteChecked =
true;
1337 if (ReadChecked && WriteChecked)
1340 if (!DT || !UseDominanceReasoning)
1342 if (!DominatingWrites.count(&Acc))
1344 return LeastDominatingWriteInst != Acc.getRemoteInst();
1349 for (
auto &It : InterferingAccesses) {
1350 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1351 !CanSkipAccess(*It.first, It.second)) {
1352 if (!UserCB(*It.first, It.second))
1360 const AAPointerInfo &OtherAA,
1362 using namespace AA::PointerInfo;
1364 return indicatePessimisticFixpoint();
1367 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1368 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1369 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1372 const auto &State = OtherAAImpl.getState();
1373 for (
const auto &It : State) {
1374 for (
auto Index : It.getSecond()) {
1375 const auto &RAcc = State.getAccess(Index);
1376 if (IsByval && !RAcc.isRead())
1378 bool UsedAssumedInformation =
false;
1380 auto Content =
A.translateArgumentToCallSiteContent(
1381 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1382 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1383 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1385 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1386 RAcc.getType(), RAcc.getRemoteInst());
1392 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1393 const OffsetInfo &Offsets, CallBase &CB,
1395 using namespace AA::PointerInfo;
1397 return indicatePessimisticFixpoint();
1399 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1403 const auto &State = OtherAAImpl.getState();
1404 for (
const auto &It : State) {
1405 for (
auto Index : It.getSecond()) {
1406 const auto &RAcc = State.getAccess(Index);
1407 if (!IsMustAcc && RAcc.isAssumption())
1409 for (
auto Offset : Offsets) {
1413 if (!NewRanges.isUnknown()) {
1414 NewRanges.addToAllOffsets(Offset);
1419 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1420 RAcc.getType(), RAcc.getRemoteInst());
1429 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1432 void dumpState(raw_ostream &O) {
1433 for (
auto &It : OffsetBins) {
1434 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1435 <<
"] : " << It.getSecond().size() <<
"\n";
1436 for (
auto AccIndex : It.getSecond()) {
1437 auto &Acc = AccessList[AccIndex];
1438 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1439 if (Acc.getLocalInst() != Acc.getRemoteInst())
1440 O <<
" --> " << *Acc.getRemoteInst()
1442 if (!Acc.isWrittenValueYetUndetermined()) {
1444 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1446 else if (Acc.getWrittenValue())
1447 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1449 O <<
" - c: <unknown>\n";
1456struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1458 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1459 : AAPointerInfoImpl(IRP,
A) {}
1462 bool handleAccess(Attributor &
A, Instruction &
I,
1463 std::optional<Value *> Content,
AccessKind Kind,
1466 using namespace AA::PointerInfo;
1468 const DataLayout &
DL =
A.getDataLayout();
1469 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1478 if (!VT || VT->getElementCount().isScalable() ||
1480 (*Content)->getType() != VT ||
1481 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1492 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1497 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1499 ConstContent, ConstantInt::get(
Int32Ty, i));
1506 for (
auto &ElementOffset : ElementOffsets)
1507 ElementOffset += ElementSize;
1520 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1521 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1522 const GEPOperator *
GEP);
1525 void trackStatistics()
const override {
1526 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1530bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1531 const DataLayout &
DL,
1533 const OffsetInfo &PtrOI,
1534 const GEPOperator *
GEP) {
1535 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1536 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1539 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1540 "Don't look for constant values if the offset has already been "
1541 "determined to be unknown.");
1543 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1549 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1553 Union.addToAll(ConstantOffset.getSExtValue());
1558 for (
const auto &VI : VariableOffsets) {
1559 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1561 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1567 if (PotentialConstantsAA->undefIsContained())
1574 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1575 if (AssumedSet.empty())
1579 for (
const auto &ConstOffset : AssumedSet) {
1580 auto CopyPerOffset =
Union;
1581 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1582 VI.second.getZExtValue());
1583 Product.merge(CopyPerOffset);
1588 UsrOI = std::move(Union);
1592ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1593 using namespace AA::PointerInfo;
1595 const DataLayout &
DL =
A.getDataLayout();
1596 Value &AssociatedValue = getAssociatedValue();
1598 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1599 OffsetInfoMap[&AssociatedValue].
insert(0);
1601 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1612 "CurPtr does not exist in the map!");
1614 auto &UsrOI = OffsetInfoMap[Usr];
1615 auto &PtrOI = OffsetInfoMap[CurPtr];
1616 assert(!PtrOI.isUnassigned() &&
1617 "Cannot pass through if the input Ptr was not visited!");
1623 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1625 User *Usr =
U.getUser();
1626 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1629 "The current pointer offset should have been seeded!");
1630 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1631 "Current pointer should be assigned");
1635 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1645 auto &UsrOI = OffsetInfoMap[Usr];
1646 auto &PtrOI = OffsetInfoMap[CurPtr];
1648 if (UsrOI.isUnknown())
1651 if (PtrOI.isUnknown()) {
1657 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1663 return HandlePassthroughUser(Usr, CurPtr, Follow);
1668 if (RI->getFunction() == getAssociatedFunction()) {
1669 auto &PtrOI = OffsetInfoMap[CurPtr];
1670 Changed |= setReachesReturn(PtrOI);
1683 auto &UsrOI = PhiIt->second;
1684 auto &PtrOI = OffsetInfoMap[CurPtr];
1688 if (PtrOI.isUnknown()) {
1689 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1690 << *CurPtr <<
" in " << *
PHI <<
"\n");
1691 Follow = !UsrOI.isUnknown();
1697 if (UsrOI == PtrOI) {
1698 assert(!PtrOI.isUnassigned() &&
1699 "Cannot assign if the current Ptr was not visited!");
1700 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1710 auto It = OffsetInfoMap.
find(CurPtrBase);
1711 if (It == OffsetInfoMap.
end()) {
1712 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1713 << *CurPtr <<
" in " << *
PHI
1714 <<
" (base: " << *CurPtrBase <<
")\n");
1728 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1729 *
PHI->getFunction());
1731 auto BaseOI = It->getSecond();
1732 BaseOI.addToAll(
Offset.getZExtValue());
1733 if (IsFirstPHIUser || BaseOI == UsrOI) {
1734 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1735 <<
" in " << *Usr <<
"\n");
1736 return HandlePassthroughUser(Usr, CurPtr, Follow);
1740 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1741 << *CurPtr <<
" in " << *
PHI <<
"\n");
1760 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1761 OffsetInfoMap[CurPtr].Offsets,
Changed,
1767 return II->isAssumeLikeIntrinsic();
1778 }
while (FromI && FromI != ToI);
1783 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1784 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1787 if (IntrI.getParent() == BB) {
1788 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1794 if ((*PredIt) != BB)
1799 if (SuccBB == IntrBB)
1805 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1807 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1813 std::pair<Value *, IntrinsicInst *> Assumption;
1814 for (
const Use &LoadU : LoadI->uses()) {
1816 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1818 for (
const Use &CmpU : CmpI->uses()) {
1820 if (!IsValidAssume(*IntrI))
1822 int Idx = CmpI->getOperandUse(0) == LoadU;
1823 Assumption = {CmpI->getOperand(Idx), IntrI};
1828 if (Assumption.first)
1833 if (!Assumption.first || !Assumption.second)
1837 << *Assumption.second <<
": " << *LoadI
1838 <<
" == " << *Assumption.first <<
"\n");
1839 bool UsedAssumedInformation =
false;
1840 std::optional<Value *> Content =
nullptr;
1841 if (Assumption.first)
1843 A.getAssumedSimplified(*Assumption.first, *
this,
1845 return handleAccess(
1846 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1847 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1852 for (
auto *OtherOp : OtherOps) {
1853 if (OtherOp == CurPtr) {
1856 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1868 bool UsedAssumedInformation =
false;
1869 std::optional<Value *> Content =
nullptr;
1871 Content =
A.getAssumedSimplified(
1873 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1878 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1879 *StoreI->getValueOperand()->getType(),
1880 {StoreI->getValueOperand()}, AccessKind::AK_W);
1882 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1883 {RMWI->getValOperand()}, AccessKind::AK_RW);
1885 return HandleStoreLike(
1886 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1887 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1894 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1899 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1905 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1908 if (!CSArgPI->reachesReturn())
1909 return isValidState();
1912 if (!Callee ||
Callee->arg_size() <= ArgNo)
1914 bool UsedAssumedInformation =
false;
1915 auto ReturnedValue =
A.getAssumedSimplified(
1920 auto *Arg =
Callee->getArg(ArgNo);
1921 if (ReturnedArg && Arg != ReturnedArg)
1923 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1924 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1928 OffsetInfo OI = OffsetInfoMap[CurPtr];
1929 CSArgPI->addReturnedOffsetsTo(OI);
1931 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1932 return isValidState();
1934 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1939 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1942 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1943 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1944 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1945 if (OffsetInfoMap.
count(NewU)) {
1947 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1948 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1949 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1953 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1956 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1958 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1960 true, EquivalentUseCB)) {
1961 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1962 return indicatePessimisticFixpoint();
1966 dbgs() <<
"Accesses by bin after update:\n";
1973struct AAPointerInfoReturned final : AAPointerInfoImpl {
1974 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1975 : AAPointerInfoImpl(IRP,
A) {}
1979 return indicatePessimisticFixpoint();
1983 void trackStatistics()
const override {
1984 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1988struct AAPointerInfoArgument final : AAPointerInfoFloating {
1989 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1990 : AAPointerInfoFloating(IRP,
A) {}
1993 void trackStatistics()
const override {
1994 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1998struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1999 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2000 : AAPointerInfoFloating(IRP,
A) {}
2004 using namespace AA::PointerInfo;
2010 if (
auto Length =
MI->getLengthInBytes())
2011 LengthVal =
Length->getSExtValue();
2012 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2015 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2017 return indicatePessimisticFixpoint();
2020 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2022 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2025 dbgs() <<
"Accesses by bin after update:\n";
2036 Argument *Arg = getAssociatedArgument();
2040 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2041 if (ArgAA && ArgAA->getState().isValidState())
2042 return translateAndAddStateFromCallee(
A, *ArgAA,
2045 return indicatePessimisticFixpoint();
2048 bool IsKnownNoCapture;
2050 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2051 return indicatePessimisticFixpoint();
2053 bool IsKnown =
false;
2055 return ChangeStatus::UNCHANGED;
2058 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2064 void trackStatistics()
const override {
2065 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2069struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2070 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2071 : AAPointerInfoFloating(IRP,
A) {}
2074 void trackStatistics()
const override {
2075 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2083struct AANoUnwindImpl : AANoUnwind {
2084 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2090 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2094 const std::string getAsStr(Attributor *
A)
const override {
2095 return getAssumed() ?
"nounwind" :
"may-unwind";
2101 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2102 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2103 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2106 if (!
I.mayThrow(
true))
2110 bool IsKnownNoUnwind;
2118 bool UsedAssumedInformation =
false;
2119 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2120 UsedAssumedInformation))
2121 return indicatePessimisticFixpoint();
2123 return ChangeStatus::UNCHANGED;
2127struct AANoUnwindFunction final :
public AANoUnwindImpl {
2128 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2129 : AANoUnwindImpl(IRP,
A) {}
2136struct AANoUnwindCallSite final
2137 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2138 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2139 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2150 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2151 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2152 case Intrinsic::nvvm_barrier0_and:
2153 case Intrinsic::nvvm_barrier0_or:
2154 case Intrinsic::nvvm_barrier0_popc:
2156 case Intrinsic::amdgcn_s_barrier:
2157 if (ExecutedAligned)
2180 switch (
I->getOpcode()) {
2181 case Instruction::AtomicRMW:
2184 case Instruction::Store:
2187 case Instruction::Load:
2192 "New atomic operations need to be known in the attributor.");
2204 return !
MI->isVolatile();
2220 const std::string getAsStr(Attributor *
A)
const override {
2221 return getAssumed() ?
"nosync" :
"may-sync";
2237 if (
I.mayReadOrWriteMemory())
2251 bool UsedAssumedInformation =
false;
2252 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2253 UsedAssumedInformation) ||
2254 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2255 UsedAssumedInformation))
2256 return indicatePessimisticFixpoint();
2261struct AANoSyncFunction final :
public AANoSyncImpl {
2262 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2263 : AANoSyncImpl(IRP,
A) {}
2270struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2271 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2272 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2282struct AANoFreeImpl :
public AANoFree {
2283 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2289 DepClassTy::NONE, IsKnown));
2299 DepClassTy::REQUIRED, IsKnown);
2302 bool UsedAssumedInformation =
false;
2303 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2304 UsedAssumedInformation))
2305 return indicatePessimisticFixpoint();
2306 return ChangeStatus::UNCHANGED;
2310 const std::string getAsStr(Attributor *
A)
const override {
2311 return getAssumed() ?
"nofree" :
"may-free";
2315struct AANoFreeFunction final :
public AANoFreeImpl {
2316 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2317 : AANoFreeImpl(IRP,
A) {}
2324struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2325 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2326 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2333struct AANoFreeFloating : AANoFreeImpl {
2334 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2335 : AANoFreeImpl(IRP,
A) {}
2342 const IRPosition &IRP = getIRPosition();
2347 DepClassTy::OPTIONAL, IsKnown))
2348 return ChangeStatus::UNCHANGED;
2350 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2351 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2363 DepClassTy::REQUIRED, IsKnown);
2380 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2381 return indicatePessimisticFixpoint();
2383 return ChangeStatus::UNCHANGED;
2388struct AANoFreeArgument final : AANoFreeFloating {
2389 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2390 : AANoFreeFloating(IRP,
A) {}
2397struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2398 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2399 : AANoFreeFloating(IRP,
A) {}
2407 Argument *Arg = getAssociatedArgument();
2409 return indicatePessimisticFixpoint();
2413 DepClassTy::REQUIRED, IsKnown))
2414 return ChangeStatus::UNCHANGED;
2415 return indicatePessimisticFixpoint();
2423struct AANoFreeReturned final : AANoFreeFloating {
2424 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2425 : AANoFreeFloating(IRP,
A) {
2440 void trackStatistics()
const override {}
2444struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2445 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2446 : AANoFreeFloating(IRP,
A) {}
2449 return ChangeStatus::UNCHANGED;
2460 bool IgnoreSubsumingPositions) {
2462 AttrKinds.
push_back(Attribute::NonNull);
2465 AttrKinds.
push_back(Attribute::Dereferenceable);
2466 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2473 if (!Fn->isDeclaration()) {
2483 bool UsedAssumedInformation =
false;
2484 if (!
A.checkForAllInstructions(
2486 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2490 UsedAssumedInformation,
false,
true))
2502 Attribute::NonNull)});
2507static int64_t getKnownNonNullAndDerefBytesForUse(
2508 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2509 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2512 const Value *UseV =
U->get();
2533 const DataLayout &
DL =
A.getInfoCache().getDL();
2537 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2554 bool IsKnownNonNull;
2557 IsNonNull |= IsKnownNonNull;
2560 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2564 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2565 Loc->Size.isScalable() ||
I->isVolatile())
2571 if (
Base &&
Base == &AssociatedValue) {
2572 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2574 return std::max(int64_t(0), DerefBytes);
2581 int64_t DerefBytes = Loc->Size.getValue();
2583 return std::max(int64_t(0), DerefBytes);
2589struct AANonNullImpl : AANonNull {
2590 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2594 Value &
V = *getAssociatedValue().stripPointerCasts();
2596 indicatePessimisticFixpoint();
2600 if (Instruction *CtxI = getCtxI())
2601 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2605 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2606 AANonNull::StateType &State) {
2607 bool IsNonNull =
false;
2608 bool TrackUse =
false;
2609 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2610 IsNonNull, TrackUse);
2611 State.setKnown(IsNonNull);
2616 const std::string getAsStr(Attributor *
A)
const override {
2617 return getAssumed() ?
"nonnull" :
"may-null";
2622struct AANonNullFloating :
public AANonNullImpl {
2623 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2624 : AANonNullImpl(IRP,
A) {}
2628 auto CheckIRP = [&](
const IRPosition &IRP) {
2629 bool IsKnownNonNull;
2631 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2635 bool UsedAssumedInformation =
false;
2636 Value *AssociatedValue = &getAssociatedValue();
2638 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2643 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2649 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2650 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2653 return ChangeStatus::UNCHANGED;
2657 DepClassTy::OPTIONAL, IsKnown) &&
2660 DepClassTy::OPTIONAL, IsKnown))
2661 return ChangeStatus::UNCHANGED;
2668 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2669 return indicatePessimisticFixpoint();
2670 return ChangeStatus::UNCHANGED;
2673 for (
const auto &VAC : Values)
2675 return indicatePessimisticFixpoint();
2677 return ChangeStatus::UNCHANGED;
2685struct AANonNullReturned final
2686 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2687 false, AANonNull::IRAttributeKind, false> {
2688 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2689 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2694 const std::string getAsStr(Attributor *
A)
const override {
2695 return getAssumed() ?
"nonnull" :
"may-null";
2703struct AANonNullArgument final
2704 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2705 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2706 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2712struct AANonNullCallSiteArgument final : AANonNullFloating {
2713 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2714 : AANonNullFloating(IRP,
A) {}
2721struct AANonNullCallSiteReturned final
2722 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2723 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2724 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2733struct AAMustProgressImpl :
public AAMustProgress {
2734 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2735 : AAMustProgress(IRP,
A) {}
2741 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2746 const std::string getAsStr(Attributor *
A)
const override {
2747 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2751struct AAMustProgressFunction final : AAMustProgressImpl {
2752 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2753 : AAMustProgressImpl(IRP,
A) {}
2759 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2761 return indicateOptimisticFixpoint();
2762 return ChangeStatus::UNCHANGED;
2765 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2767 bool IsKnownMustProgress;
2769 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2773 bool AllCallSitesKnown =
true;
2774 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2777 return indicatePessimisticFixpoint();
2779 return ChangeStatus::UNCHANGED;
2783 void trackStatistics()
const override {
2789struct AAMustProgressCallSite final : AAMustProgressImpl {
2790 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2791 : AAMustProgressImpl(IRP,
A) {}
2800 bool IsKnownMustProgress;
2802 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2803 return indicatePessimisticFixpoint();
2804 return ChangeStatus::UNCHANGED;
2808 void trackStatistics()
const override {
2817struct AANoRecurseImpl :
public AANoRecurse {
2818 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2824 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2829 const std::string getAsStr(Attributor *
A)
const override {
2830 return getAssumed() ?
"norecurse" :
"may-recurse";
2834struct AANoRecurseFunction final : AANoRecurseImpl {
2835 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2836 : AANoRecurseImpl(IRP,
A) {}
2842 auto CallSitePred = [&](AbstractCallSite ACS) {
2843 bool IsKnownNoRecurse;
2847 DepClassTy::NONE, IsKnownNoRecurse))
2849 return IsKnownNoRecurse;
2851 bool UsedAssumedInformation =
false;
2852 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2853 UsedAssumedInformation)) {
2859 if (!UsedAssumedInformation)
2860 indicateOptimisticFixpoint();
2861 return ChangeStatus::UNCHANGED;
2864 const AAInterFnReachability *EdgeReachability =
2865 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2866 DepClassTy::REQUIRED);
2867 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2868 return indicatePessimisticFixpoint();
2869 return ChangeStatus::UNCHANGED;
2876struct AANoRecurseCallSite final
2877 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2878 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2879 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2889struct AANonConvergentImpl :
public AANonConvergent {
2890 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2891 : AANonConvergent(IRP,
A) {}
2894 const std::string getAsStr(Attributor *
A)
const override {
2895 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2899struct AANonConvergentFunction final : AANonConvergentImpl {
2900 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2901 : AANonConvergentImpl(IRP,
A) {}
2907 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2910 if (!Callee ||
Callee->isIntrinsic()) {
2913 if (
Callee->isDeclaration()) {
2914 return !
Callee->hasFnAttribute(Attribute::Convergent);
2916 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2918 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2921 bool UsedAssumedInformation =
false;
2922 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2923 UsedAssumedInformation)) {
2924 return indicatePessimisticFixpoint();
2926 return ChangeStatus::UNCHANGED;
2930 if (isKnownNotConvergent() &&
2931 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2932 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2933 return ChangeStatus::CHANGED;
2935 return ChangeStatus::UNCHANGED;
2945struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2946 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2947 : AAUndefinedBehavior(IRP,
A) {}
2952 const size_t UBPrevSize = KnownUBInsts.size();
2953 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2957 if (
I.isVolatile() &&
I.mayWriteToMemory())
2961 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2970 "Expected pointer operand of memory accessing instruction");
2974 std::optional<Value *> SimplifiedPtrOp =
2975 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2976 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2978 const Value *PtrOpVal = *SimplifiedPtrOp;
2984 AssumedNoUBInsts.insert(&
I);
2996 AssumedNoUBInsts.insert(&
I);
2998 KnownUBInsts.insert(&
I);
3007 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3014 if (BrInst->isUnconditional())
3019 std::optional<Value *> SimplifiedCond =
3020 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3021 if (!SimplifiedCond || !*SimplifiedCond)
3023 AssumedNoUBInsts.insert(&
I);
3031 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3040 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3046 if (idx >=
Callee->arg_size())
3058 bool IsKnownNoUndef;
3060 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3061 if (!IsKnownNoUndef)
3063 bool UsedAssumedInformation =
false;
3064 std::optional<Value *> SimplifiedVal =
3067 if (UsedAssumedInformation)
3069 if (SimplifiedVal && !*SimplifiedVal)
3072 KnownUBInsts.insert(&
I);
3078 bool IsKnownNonNull;
3080 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3082 KnownUBInsts.insert(&
I);
3091 std::optional<Value *> SimplifiedRetValue =
3092 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3093 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3111 bool IsKnownNonNull;
3116 KnownUBInsts.insert(&
I);
3122 bool UsedAssumedInformation =
false;
3123 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3124 {Instruction::Load, Instruction::Store,
3125 Instruction::AtomicCmpXchg,
3126 Instruction::AtomicRMW},
3127 UsedAssumedInformation,
3129 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3130 UsedAssumedInformation,
3132 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3133 UsedAssumedInformation);
3137 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3139 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3140 bool IsKnownNoUndef;
3142 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3144 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3145 {Instruction::Ret}, UsedAssumedInformation,
3150 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3151 UBPrevSize != KnownUBInsts.size())
3152 return ChangeStatus::CHANGED;
3153 return ChangeStatus::UNCHANGED;
3156 bool isKnownToCauseUB(Instruction *
I)
const override {
3157 return KnownUBInsts.count(
I);
3160 bool isAssumedToCauseUB(Instruction *
I)
const override {
3167 switch (
I->getOpcode()) {
3168 case Instruction::Load:
3169 case Instruction::Store:
3170 case Instruction::AtomicCmpXchg:
3171 case Instruction::AtomicRMW:
3172 return !AssumedNoUBInsts.count(
I);
3173 case Instruction::Br: {
3175 if (BrInst->isUnconditional())
3177 return !AssumedNoUBInsts.count(
I);
3186 if (KnownUBInsts.empty())
3187 return ChangeStatus::UNCHANGED;
3188 for (Instruction *
I : KnownUBInsts)
3189 A.changeToUnreachableAfterManifest(
I);
3190 return ChangeStatus::CHANGED;
3194 const std::string getAsStr(Attributor *
A)
const override {
3195 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3223 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3227 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3238 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3240 bool UsedAssumedInformation =
false;
3241 std::optional<Value *> SimplifiedV =
3244 if (!UsedAssumedInformation) {
3249 KnownUBInsts.insert(
I);
3250 return std::nullopt;
3257 KnownUBInsts.insert(
I);
3258 return std::nullopt;
3264struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3265 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3266 : AAUndefinedBehaviorImpl(IRP,
A) {}
3269 void trackStatistics()
const override {
3270 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3271 "Number of instructions known to have UB");
3273 KnownUBInsts.size();
3284static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3285 ScalarEvolution *SE =
3286 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3287 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3293 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3294 if (SCCI.hasCycle())
3304 for (
auto *L : LI->getLoopsInPreorder()) {
3311struct AAWillReturnImpl :
public AAWillReturn {
3312 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3313 : AAWillReturn(IRP,
A) {}
3319 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3324 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3325 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3330 return IsKnown || !KnownOnly;
3336 if (isImpliedByMustprogressAndReadonly(
A,
false))
3337 return ChangeStatus::UNCHANGED;
3343 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3349 bool IsKnownNoRecurse;
3351 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3354 bool UsedAssumedInformation =
false;
3355 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3356 UsedAssumedInformation))
3357 return indicatePessimisticFixpoint();
3359 return ChangeStatus::UNCHANGED;
3363 const std::string getAsStr(Attributor *
A)
const override {
3364 return getAssumed() ?
"willreturn" :
"may-noreturn";
3368struct AAWillReturnFunction final : AAWillReturnImpl {
3369 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3370 : AAWillReturnImpl(IRP,
A) {}
3374 AAWillReturnImpl::initialize(
A);
3377 assert(
F &&
"Did expect an anchor function");
3378 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3379 indicatePessimisticFixpoint();
3387struct AAWillReturnCallSite final
3388 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3389 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3390 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3394 if (isImpliedByMustprogressAndReadonly(
A,
false))
3395 return ChangeStatus::UNCHANGED;
3397 return AACalleeToCallSite::updateImpl(
A);
3419 const ToTy *
To =
nullptr;
3446 if (!ES || ES->
empty()) {
3447 ExclusionSet = nullptr;
3448 }
else if (MakeUnique) {
3449 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3474 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3476 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3480#define DefineKeys(ToTy) \
3482 ReachabilityQueryInfo<ToTy> \
3483 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3484 ReachabilityQueryInfo<ToTy>( \
3485 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3486 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3488 ReachabilityQueryInfo<ToTy> \
3489 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3490 ReachabilityQueryInfo<ToTy>( \
3491 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3492 DenseMapInfo<const ToTy *>::getTombstoneKey());
3501template <
typename BaseTy,
typename ToTy>
3502struct CachedReachabilityAA :
public BaseTy {
3503 using RQITy = ReachabilityQueryInfo<ToTy>;
3505 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3508 bool isQueryAA()
const override {
return true; }
3513 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3514 RQITy *RQI = QueryVector[
u];
3515 if (RQI->Result == RQITy::Reachable::No &&
3517 Changed = ChangeStatus::CHANGED;
3523 bool IsTemporaryRQI) = 0;
3525 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3526 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3531 QueryCache.erase(&RQI);
3537 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3538 RQITy PlainRQI(RQI.From, RQI.To);
3539 if (!QueryCache.count(&PlainRQI)) {
3540 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3542 QueryVector.push_back(RQIPtr);
3543 QueryCache.insert(RQIPtr);
3548 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3549 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3550 "Did not expect empty set!");
3551 RQITy *RQIPtr =
new (
A.Allocator)
3552 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3553 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3555 assert(!QueryCache.count(RQIPtr));
3556 QueryVector.push_back(RQIPtr);
3557 QueryCache.insert(RQIPtr);
3560 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3561 A.registerForUpdate(*
this);
3562 return Result == RQITy::Reachable::Yes;
3565 const std::string getAsStr(Attributor *
A)
const override {
3567 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3570 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3571 typename RQITy::Reachable &
Result) {
3572 if (!this->getState().isValidState()) {
3573 Result = RQITy::Reachable::Yes;
3579 if (StackRQI.ExclusionSet) {
3580 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3581 auto It = QueryCache.find(&PlainRQI);
3582 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3583 Result = RQITy::Reachable::No;
3588 auto It = QueryCache.find(&StackRQI);
3589 if (It != QueryCache.end()) {
3596 QueryCache.insert(&StackRQI);
3602 DenseSet<RQITy *> QueryCache;
3605struct AAIntraFnReachabilityFunction final
3606 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3607 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3608 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3610 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3614 bool isAssumedReachable(
3615 Attributor &
A,
const Instruction &From,
const Instruction &To,
3617 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3621 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3622 typename RQITy::Reachable
Result;
3623 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3624 return NonConstThis->isReachableImpl(
A, StackRQI,
3626 return Result == RQITy::Reachable::Yes;
3633 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3636 [&](
const auto &DeadEdge) {
3637 return LivenessAA->isEdgeDead(DeadEdge.first,
3641 return LivenessAA->isAssumedDead(BB);
3643 return ChangeStatus::UNCHANGED;
3647 return Base::updateImpl(
A);
3651 bool IsTemporaryRQI)
override {
3653 bool UsedExclusionSet =
false;
3658 while (IP && IP != &To) {
3659 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3660 UsedExclusionSet =
true;
3668 const BasicBlock *FromBB = RQI.From->getParent();
3669 const BasicBlock *ToBB = RQI.To->getParent();
3671 "Not an intra-procedural query!");
3675 if (FromBB == ToBB &&
3676 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3677 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3682 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3683 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3687 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3688 if (RQI.ExclusionSet)
3689 for (
auto *
I : *RQI.ExclusionSet)
3690 if (
I->getFunction() == Fn)
3691 ExclusionBlocks.
insert(
I->getParent());
3694 if (ExclusionBlocks.
count(FromBB) &&
3697 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3700 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3701 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3702 DeadBlocks.insert(ToBB);
3703 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3707 SmallPtrSet<const BasicBlock *, 16> Visited;
3711 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3712 while (!Worklist.
empty()) {
3714 if (!Visited.
insert(BB).second)
3716 for (
const BasicBlock *SuccBB :
successors(BB)) {
3717 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3718 LocalDeadEdges.
insert({BB, SuccBB});
3723 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3726 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3729 if (ExclusionBlocks.
count(SuccBB)) {
3730 UsedExclusionSet =
true;
3737 DeadEdges.insert_range(LocalDeadEdges);
3738 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3743 void trackStatistics()
const override {}
3748 DenseSet<const BasicBlock *> DeadBlocks;
3752 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3755 const DominatorTree *DT =
nullptr;
3763 bool IgnoreSubsumingPositions) {
3764 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3765 "Unexpected attribute kind");
3771 IgnoreSubsumingPositions =
true;
3782 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3783 IgnoreSubsumingPositions, Attribute::NoAlias))
3793 "Noalias is a pointer attribute");
3796 const std::string getAsStr(
Attributor *
A)
const override {
3797 return getAssumed() ?
"noalias" :
"may-alias";
3802struct AANoAliasFloating final : AANoAliasImpl {
3803 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3804 : AANoAliasImpl(IRP,
A) {}
3809 return indicatePessimisticFixpoint();
3813 void trackStatistics()
const override {
3819struct AANoAliasArgument final
3820 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3821 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3822 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3835 DepClassTy::OPTIONAL, IsKnownNoSycn))
3836 return Base::updateImpl(
A);
3841 return Base::updateImpl(
A);
3845 bool UsedAssumedInformation =
false;
3846 if (
A.checkForAllCallSites(
3847 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3848 true, UsedAssumedInformation))
3849 return Base::updateImpl(
A);
3857 return indicatePessimisticFixpoint();
3864struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3865 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3866 : AANoAliasImpl(IRP,
A) {}
3870 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3871 const AAMemoryBehavior &MemBehaviorAA,
3872 const CallBase &CB,
unsigned OtherArgNo) {
3874 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3882 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3886 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3887 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3894 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3896 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3897 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3903 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3907 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3909 "callsite arguments: "
3910 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3911 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3916 bool isKnownNoAliasDueToNoAliasPreservation(
3917 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3930 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3946 bool IsKnownNoCapture;
3949 DepClassTy::OPTIONAL, IsKnownNoCapture))
3955 A, *UserI, *getCtxI(), *
this,
nullptr,
3956 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3971 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3975 bool IsKnownNoCapture;
3976 const AANoCapture *NoCaptureAA =
nullptr;
3978 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3979 if (!IsAssumedNoCapture &&
3981 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3983 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3984 <<
" cannot be noalias as it is potentially captured\n");
3989 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3995 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3996 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4006 auto *MemBehaviorAA =
4007 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4009 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4010 return ChangeStatus::UNCHANGED;
4013 bool IsKnownNoAlias;
4016 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4018 <<
" is not no-alias at the definition\n");
4019 return indicatePessimisticFixpoint();
4022 AAResults *AAR =
nullptr;
4023 if (MemBehaviorAA &&
4024 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4026 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4027 return ChangeStatus::UNCHANGED;
4030 return indicatePessimisticFixpoint();
4038struct AANoAliasReturned final : AANoAliasImpl {
4039 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4040 : AANoAliasImpl(IRP,
A) {}
4045 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4056 bool IsKnownNoAlias;
4058 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4061 bool IsKnownNoCapture;
4062 const AANoCapture *NoCaptureAA =
nullptr;
4064 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4066 return IsAssumedNoCapture ||
4070 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4071 return indicatePessimisticFixpoint();
4073 return ChangeStatus::UNCHANGED;
4081struct AANoAliasCallSiteReturned final
4082 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4083 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4084 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4094struct AAIsDeadValueImpl :
public AAIsDead {
4095 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4098 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4101 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4104 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4107 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4110 bool isAssumedDead(
const Instruction *
I)
const override {
4111 return I == getCtxI() && isAssumedDead();
4115 bool isKnownDead(
const Instruction *
I)
const override {
4116 return isAssumedDead(
I) && isKnownDead();
4120 const std::string getAsStr(Attributor *
A)
const override {
4121 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4125 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4127 if (
V.getType()->isVoidTy() ||
V.use_empty())
4133 if (!
A.isRunOn(*
I->getFunction()))
4135 bool UsedAssumedInformation =
false;
4136 std::optional<Constant *>
C =
4137 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4142 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4147 return A.checkForAllUses(UsePred, *
this, V,
false,
4148 DepClassTy::REQUIRED,
4153 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4163 bool IsKnownNoUnwind;
4165 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4173struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4174 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4175 : AAIsDeadValueImpl(IRP,
A) {}
4179 AAIsDeadValueImpl::initialize(
A);
4182 indicatePessimisticFixpoint();
4187 if (!isAssumedSideEffectFree(
A,
I)) {
4189 indicatePessimisticFixpoint();
4191 removeAssumedBits(HAS_NO_EFFECT);
4195 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4196 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4198 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4200 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4204 bool isDeadStore(Attributor &
A, StoreInst &SI,
4205 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4207 if (
SI.isVolatile())
4213 bool UsedAssumedInformation =
false;
4214 if (!AssumeOnlyInst) {
4215 PotentialCopies.clear();
4217 UsedAssumedInformation)) {
4220 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4224 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4225 <<
" potential copies.\n");
4227 InformationCache &InfoCache =
A.getInfoCache();
4230 UsedAssumedInformation))
4234 auto &UserI = cast<Instruction>(*U.getUser());
4235 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4237 AssumeOnlyInst->insert(&UserI);
4240 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4246 <<
" is assumed live!\n");
4252 const std::string getAsStr(Attributor *
A)
const override {
4256 return "assumed-dead-store";
4259 return "assumed-dead-fence";
4260 return AAIsDeadValueImpl::getAsStr(
A);
4267 if (!isDeadStore(
A, *SI))
4268 return indicatePessimisticFixpoint();
4270 if (!isDeadFence(
A, *FI))
4271 return indicatePessimisticFixpoint();
4273 if (!isAssumedSideEffectFree(
A,
I))
4274 return indicatePessimisticFixpoint();
4275 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4276 return indicatePessimisticFixpoint();
4281 bool isRemovableStore()
const override {
4282 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4287 Value &
V = getAssociatedValue();
4294 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4295 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4298 A.deleteAfterManifest(*
I);
4299 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4301 for (
auto *Usr : AOI->
users())
4303 A.deleteAfterManifest(*AOI);
4309 A.deleteAfterManifest(*FI);
4313 A.deleteAfterManifest(*
I);
4321 void trackStatistics()
const override {
4327 SmallSetVector<Value *, 4> PotentialCopies;
4330struct AAIsDeadArgument :
public AAIsDeadFloating {
4331 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4332 : AAIsDeadFloating(IRP,
A) {}
4336 Argument &Arg = *getAssociatedArgument();
4337 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4338 if (
A.registerFunctionSignatureRewrite(
4342 return ChangeStatus::CHANGED;
4344 return ChangeStatus::UNCHANGED;
4351struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4352 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4353 : AAIsDeadValueImpl(IRP,
A) {}
4357 AAIsDeadValueImpl::initialize(
A);
4359 indicatePessimisticFixpoint();
4368 Argument *Arg = getAssociatedArgument();
4370 return indicatePessimisticFixpoint();
4372 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4374 return indicatePessimisticFixpoint();
4383 "Expected undef values to be filtered out!");
4385 if (
A.changeUseAfterManifest(U, UV))
4386 return ChangeStatus::CHANGED;
4387 return ChangeStatus::UNCHANGED;
4394struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4395 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4396 : AAIsDeadFloating(IRP,
A) {}
4399 bool isAssumedDead()
const override {
4400 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4405 AAIsDeadFloating::initialize(
A);
4407 indicatePessimisticFixpoint();
4412 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4418 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4419 IsAssumedSideEffectFree =
false;
4420 Changed = ChangeStatus::CHANGED;
4422 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4423 return indicatePessimisticFixpoint();
4428 void trackStatistics()
const override {
4429 if (IsAssumedSideEffectFree)
4436 const std::string getAsStr(Attributor *
A)
const override {
4437 return isAssumedDead()
4439 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4443 bool IsAssumedSideEffectFree =
true;
4446struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4447 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4448 : AAIsDeadValueImpl(IRP,
A) {}
4453 bool UsedAssumedInformation =
false;
4454 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4455 {Instruction::Ret}, UsedAssumedInformation);
4457 auto PredForCallSite = [&](AbstractCallSite ACS) {
4458 if (ACS.isCallbackCall() || !ACS.getInstruction())
4460 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4463 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4464 UsedAssumedInformation))
4465 return indicatePessimisticFixpoint();
4467 return ChangeStatus::UNCHANGED;
4473 bool AnyChange =
false;
4474 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4481 bool UsedAssumedInformation =
false;
4482 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4483 UsedAssumedInformation);
4484 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4491struct AAIsDeadFunction :
public AAIsDead {
4492 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4497 assert(
F &&
"Did expect an anchor function");
4498 if (!isAssumedDeadInternalFunction(
A)) {
4499 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4500 assumeLive(
A,
F->getEntryBlock());
4504 bool isAssumedDeadInternalFunction(Attributor &
A) {
4505 if (!getAnchorScope()->hasLocalLinkage())
4507 bool UsedAssumedInformation =
false;
4508 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4509 true, UsedAssumedInformation);
4513 const std::string getAsStr(Attributor *
A)
const override {
4514 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4515 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4516 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4517 std::to_string(KnownDeadEnds.size()) +
"]";
4522 assert(getState().isValidState() &&
4523 "Attempted to manifest an invalid state!");
4528 if (AssumedLiveBlocks.empty()) {
4529 A.deleteAfterManifest(
F);
4530 return ChangeStatus::CHANGED;
4536 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4538 KnownDeadEnds.set_union(ToBeExploredFrom);
4539 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4543 bool IsKnownNoReturn;
4551 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4553 A.changeToUnreachableAfterManifest(
4554 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4555 HasChanged = ChangeStatus::CHANGED;
4558 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4559 for (BasicBlock &BB :
F)
4560 if (!AssumedLiveBlocks.count(&BB)) {
4561 A.deleteAfterManifest(BB);
4563 HasChanged = ChangeStatus::CHANGED;
4572 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4575 "Used AAIsDead of the wrong function");
4576 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4580 void trackStatistics()
const override {}
4583 bool isAssumedDead()
const override {
return false; }
4586 bool isKnownDead()
const override {
return false; }
4589 bool isAssumedDead(
const BasicBlock *BB)
const override {
4591 "BB must be in the same anchor scope function.");
4595 return !AssumedLiveBlocks.count(BB);
4599 bool isKnownDead(
const BasicBlock *BB)
const override {
4600 return getKnown() && isAssumedDead(BB);
4604 bool isAssumedDead(
const Instruction *
I)
const override {
4605 assert(
I->getParent()->getParent() == getAnchorScope() &&
4606 "Instruction must be in the same anchor scope function.");
4613 if (!AssumedLiveBlocks.count(
I->getParent()))
4619 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4627 bool isKnownDead(
const Instruction *
I)
const override {
4628 return getKnown() && isAssumedDead(
I);
4633 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4634 if (!AssumedLiveBlocks.insert(&BB).second)
4641 for (
const Instruction &
I : BB)
4644 if (
F->hasLocalLinkage())
4645 A.markLiveInternalFunction(*
F);
4651 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4654 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4657 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4660 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4664identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4665 AbstractAttribute &AA,
4666 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4669 bool IsKnownNoReturn;
4672 return !IsKnownNoReturn;
4681identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4682 AbstractAttribute &AA,
4683 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4684 bool UsedAssumedInformation =
4690 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4691 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4695 bool IsKnownNoUnwind;
4698 UsedAssumedInformation |= !IsKnownNoUnwind;
4700 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4703 return UsedAssumedInformation;
4707identifyAliveSuccessors(Attributor &
A,
const BranchInst &BI,
4708 AbstractAttribute &AA,
4709 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4710 bool UsedAssumedInformation =
false;
4714 std::optional<Constant *>
C =
4715 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4725 UsedAssumedInformation =
false;
4728 return UsedAssumedInformation;
4732identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4733 AbstractAttribute &AA,
4734 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4735 bool UsedAssumedInformation =
false;
4739 UsedAssumedInformation)) {
4741 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4746 if (Values.
empty() ||
4747 (Values.
size() == 1 &&
4750 return UsedAssumedInformation;
4753 Type &Ty = *
SI.getCondition()->getType();
4754 SmallPtrSet<ConstantInt *, 8>
Constants;
4755 auto CheckForConstantInt = [&](
Value *
V) {
4763 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4764 return CheckForConstantInt(
VAC.getValue());
4766 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4768 return UsedAssumedInformation;
4771 unsigned MatchedCases = 0;
4772 for (
const auto &CaseIt :
SI.cases()) {
4773 if (
Constants.count(CaseIt.getCaseValue())) {
4775 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4782 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4783 return UsedAssumedInformation;
4789 if (AssumedLiveBlocks.empty()) {
4790 if (isAssumedDeadInternalFunction(
A))
4794 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4795 assumeLive(
A,
F->getEntryBlock());
4799 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4800 << getAnchorScope()->
size() <<
"] BBs and "
4801 << ToBeExploredFrom.size() <<
" exploration points and "
4802 << KnownDeadEnds.size() <<
" known dead ends\n");
4807 ToBeExploredFrom.end());
4808 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4811 while (!Worklist.
empty()) {
4818 I =
I->getNextNode();
4820 AliveSuccessors.
clear();
4822 bool UsedAssumedInformation =
false;
4823 switch (
I->getOpcode()) {
4827 "Expected non-terminators to be handled already!");
4828 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4831 case Instruction::Call:
4833 *
this, AliveSuccessors);
4835 case Instruction::Invoke:
4837 *
this, AliveSuccessors);
4839 case Instruction::Br:
4841 *
this, AliveSuccessors);
4843 case Instruction::Switch:
4845 *
this, AliveSuccessors);
4849 if (UsedAssumedInformation) {
4850 NewToBeExploredFrom.insert(
I);
4851 }
else if (AliveSuccessors.
empty() ||
4852 (
I->isTerminator() &&
4853 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4854 if (KnownDeadEnds.insert(
I))
4859 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4860 << UsedAssumedInformation <<
"\n");
4862 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4863 if (!
I->isTerminator()) {
4864 assert(AliveSuccessors.size() == 1 &&
4865 "Non-terminator expected to have a single successor!");
4869 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4870 if (AssumedLiveEdges.insert(
Edge).second)
4872 if (assumeLive(
A, *AliveSuccessor->getParent()))
4879 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4880 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4881 return !ToBeExploredFrom.count(I);
4884 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4893 if (ToBeExploredFrom.empty() &&
4894 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4895 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4896 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4898 return indicatePessimisticFixpoint();
4903struct AAIsDeadCallSite final : AAIsDeadFunction {
4904 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4905 : AAIsDeadFunction(IRP,
A) {}
4914 "supported for call sites yet!");
4919 return indicatePessimisticFixpoint();
4923 void trackStatistics()
const override {}
4930struct AADereferenceableImpl : AADereferenceable {
4931 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4932 : AADereferenceable(IRP,
A) {}
4933 using StateType = DerefState;
4937 Value &
V = *getAssociatedValue().stripPointerCasts();
4939 A.getAttrs(getIRPosition(),
4940 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4943 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4946 bool IsKnownNonNull;
4948 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4950 bool CanBeNull, CanBeFreed;
4951 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4952 A.getDataLayout(), CanBeNull, CanBeFreed));
4954 if (Instruction *CtxI = getCtxI())
4955 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4960 StateType &getState()
override {
return *
this; }
4961 const StateType &getState()
const override {
return *
this; }
4965 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4966 DerefState &State) {
4967 const Value *UseV =
U->get();
4972 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4977 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4978 if (
Base &&
Base == &getAssociatedValue())
4979 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4983 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4984 AADereferenceable::StateType &State) {
4985 bool IsNonNull =
false;
4986 bool TrackUse =
false;
4987 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4988 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4989 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4990 <<
" for instruction " << *
I <<
"\n");
4992 addAccessedBytesForUse(
A, U,
I, State);
4993 State.takeKnownDerefBytesMaximum(DerefBytes);
5000 bool IsKnownNonNull;
5002 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5003 if (IsAssumedNonNull &&
5004 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5005 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5006 return ChangeStatus::CHANGED;
5011 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5012 SmallVectorImpl<Attribute> &Attrs)
const override {
5014 bool IsKnownNonNull;
5016 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5017 if (IsAssumedNonNull)
5018 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5019 Ctx, getAssumedDereferenceableBytes()));
5021 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5022 Ctx, getAssumedDereferenceableBytes()));
5026 const std::string getAsStr(Attributor *
A)
const override {
5027 if (!getAssumedDereferenceableBytes())
5028 return "unknown-dereferenceable";
5029 bool IsKnownNonNull;
5030 bool IsAssumedNonNull =
false;
5033 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5034 return std::string(
"dereferenceable") +
5035 (IsAssumedNonNull ?
"" :
"_or_null") +
5036 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5037 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5038 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5039 (!
A ?
" [non-null is unknown]" :
"");
5044struct AADereferenceableFloating : AADereferenceableImpl {
5045 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5046 : AADereferenceableImpl(IRP,
A) {}
5051 bool UsedAssumedInformation =
false;
5053 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5055 Values.
push_back({getAssociatedValue(), getCtxI()});
5058 Stripped = Values.
size() != 1 ||
5059 Values.
front().getValue() != &getAssociatedValue();
5062 const DataLayout &
DL =
A.getDataLayout();
5065 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5067 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5068 APInt
Offset(IdxWidth, 0);
5073 const auto *AA =
A.getAAFor<AADereferenceable>(
5075 int64_t DerefBytes = 0;
5076 if (!AA || (!Stripped &&
this == AA)) {
5079 bool CanBeNull, CanBeFreed;
5081 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5082 T.GlobalState.indicatePessimisticFixpoint();
5085 DerefBytes =
DS.DerefBytesState.getAssumed();
5086 T.GlobalState &=
DS.GlobalState;
5092 int64_t OffsetSExt =
Offset.getSExtValue();
5096 T.takeAssumedDerefBytesMinimum(
5097 std::max(int64_t(0), DerefBytes - OffsetSExt));
5102 T.takeKnownDerefBytesMaximum(
5103 std::max(int64_t(0), DerefBytes - OffsetSExt));
5104 T.indicatePessimisticFixpoint();
5105 }
else if (OffsetSExt > 0) {
5111 T.indicatePessimisticFixpoint();
5115 return T.isValidState();
5118 for (
const auto &VAC : Values)
5119 if (!VisitValueCB(*
VAC.getValue()))
5120 return indicatePessimisticFixpoint();
5126 void trackStatistics()
const override {
5132struct AADereferenceableReturned final
5133 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5135 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5136 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5140 void trackStatistics()
const override {
5146struct AADereferenceableArgument final
5147 : AAArgumentFromCallSiteArguments<AADereferenceable,
5148 AADereferenceableImpl> {
5150 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5151 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5155 void trackStatistics()
const override {
5161struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5162 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5163 : AADereferenceableFloating(IRP,
A) {}
5166 void trackStatistics()
const override {
5172struct AADereferenceableCallSiteReturned final
5173 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5174 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5175 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5179 void trackStatistics()
const override {
5188static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5189 Value &AssociatedValue,
const Use *U,
5190 const Instruction *
I,
bool &TrackUse) {
5199 if (
GEP->hasAllConstantIndices())
5215 MA = MaybeAlign(AlignAA->getKnownAlign());
5218 const DataLayout &
DL =
A.getDataLayout();
5219 const Value *UseV =
U->get();
5221 if (
SI->getPointerOperand() == UseV)
5222 MA =
SI->getAlign();
5224 if (LI->getPointerOperand() == UseV)
5225 MA = LI->getAlign();
5227 if (AI->getPointerOperand() == UseV)
5228 MA = AI->getAlign();
5230 if (AI->getPointerOperand() == UseV)
5231 MA = AI->getAlign();
5237 unsigned Alignment = MA->value();
5241 if (
Base == &AssociatedValue) {
5246 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5254struct AAAlignImpl : AAAlign {
5255 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5260 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5262 takeKnownMaximum(Attr.getValueAsInt());
5264 Value &
V = *getAssociatedValue().stripPointerCasts();
5265 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5267 if (Instruction *CtxI = getCtxI())
5268 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5276 Value &AssociatedValue = getAssociatedValue();
5278 return ChangeStatus::UNCHANGED;
5280 for (
const Use &U : AssociatedValue.
uses()) {
5282 if (
SI->getPointerOperand() == &AssociatedValue)
5283 if (
SI->getAlign() < getAssumedAlign()) {
5285 "Number of times alignment added to a store");
5286 SI->setAlignment(getAssumedAlign());
5287 InstrChanged = ChangeStatus::CHANGED;
5290 if (LI->getPointerOperand() == &AssociatedValue)
5291 if (LI->getAlign() < getAssumedAlign()) {
5292 LI->setAlignment(getAssumedAlign());
5294 "Number of times alignment added to a load");
5295 InstrChanged = ChangeStatus::CHANGED;
5298 if (RMW->getPointerOperand() == &AssociatedValue) {
5299 if (RMW->getAlign() < getAssumedAlign()) {
5301 "Number of times alignment added to atomicrmw");
5303 RMW->setAlignment(getAssumedAlign());
5304 InstrChanged = ChangeStatus::CHANGED;
5308 if (CAS->getPointerOperand() == &AssociatedValue) {
5309 if (CAS->getAlign() < getAssumedAlign()) {
5311 "Number of times alignment added to cmpxchg");
5312 CAS->setAlignment(getAssumedAlign());
5313 InstrChanged = ChangeStatus::CHANGED;
5321 Align InheritAlign =
5322 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5323 if (InheritAlign >= getAssumedAlign())
5324 return InstrChanged;
5325 return Changed | InstrChanged;
5333 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5334 SmallVectorImpl<Attribute> &Attrs)
const override {
5335 if (getAssumedAlign() > 1)
5337 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5341 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5342 AAAlign::StateType &State) {
5343 bool TrackUse =
false;
5345 unsigned int KnownAlign =
5346 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5347 State.takeKnownMaximum(KnownAlign);
5353 const std::string getAsStr(Attributor *
A)
const override {
5354 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5355 std::to_string(getAssumedAlign().value()) +
">";
5360struct AAAlignFloating : AAAlignImpl {
5361 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5365 const DataLayout &
DL =
A.getDataLayout();
5368 bool UsedAssumedInformation =
false;
5370 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5372 Values.
push_back({getAssociatedValue(), getCtxI()});
5375 Stripped = Values.
size() != 1 ||
5376 Values.
front().getValue() != &getAssociatedValue();
5380 auto VisitValueCB = [&](
Value &
V) ->
bool {
5384 DepClassTy::REQUIRED);
5385 if (!AA || (!Stripped &&
this == AA)) {
5387 unsigned Alignment = 1;
5400 Alignment =
V.getPointerAlignment(
DL).value();
5403 T.takeKnownMaximum(Alignment);
5404 T.indicatePessimisticFixpoint();
5407 const AAAlign::StateType &
DS = AA->
getState();
5410 return T.isValidState();
5413 for (
const auto &VAC : Values) {
5414 if (!VisitValueCB(*
VAC.getValue()))
5415 return indicatePessimisticFixpoint();
5428struct AAAlignReturned final
5429 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5430 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5431 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5438struct AAAlignArgument final
5439 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5440 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5441 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5448 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5449 return ChangeStatus::UNCHANGED;
5450 return Base::manifest(
A);
5457struct AAAlignCallSiteArgument final : AAAlignFloating {
5458 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5459 : AAAlignFloating(IRP,
A) {}
5466 if (Argument *Arg = getAssociatedArgument())
5467 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5468 return ChangeStatus::UNCHANGED;
5470 Align InheritAlign =
5471 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5472 if (InheritAlign >= getAssumedAlign())
5473 Changed = ChangeStatus::UNCHANGED;
5480 if (Argument *Arg = getAssociatedArgument()) {
5483 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5486 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5496struct AAAlignCallSiteReturned final
5497 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5498 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5499 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5509struct AANoReturnImpl :
public AANoReturn {
5510 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5516 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5521 const std::string getAsStr(Attributor *
A)
const override {
5522 return getAssumed() ?
"noreturn" :
"may-return";
5527 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5528 bool UsedAssumedInformation =
false;
5529 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5530 {(unsigned)Instruction::Ret},
5531 UsedAssumedInformation))
5532 return indicatePessimisticFixpoint();
5533 return ChangeStatus::UNCHANGED;
5537struct AANoReturnFunction final : AANoReturnImpl {
5538 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5539 : AANoReturnImpl(IRP,
A) {}
5546struct AANoReturnCallSite final
5547 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5548 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5549 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5560struct AAInstanceInfoImpl :
public AAInstanceInfo {
5561 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5562 : AAInstanceInfo(IRP,
A) {}
5566 Value &
V = getAssociatedValue();
5568 if (
C->isThreadDependent())
5569 indicatePessimisticFixpoint();
5571 indicateOptimisticFixpoint();
5577 indicateOptimisticFixpoint();
5582 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5585 indicatePessimisticFixpoint();
5595 Value &
V = getAssociatedValue();
5598 Scope =
I->getFunction();
5601 if (!
Scope->hasLocalLinkage())
5605 return indicateOptimisticFixpoint();
5607 bool IsKnownNoRecurse;
5613 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5628 if (!Callee || !
Callee->hasLocalLinkage())
5632 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5634 DepClassTy::OPTIONAL);
5635 if (!ArgInstanceInfoAA ||
5636 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5641 A, *CB, *Scope, *
this,
nullptr,
5642 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5649 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5651 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5659 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5660 DepClassTy::OPTIONAL,
5661 true, EquivalentUseCB))
5662 return indicatePessimisticFixpoint();
5668 const std::string getAsStr(Attributor *
A)
const override {
5669 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5673 void trackStatistics()
const override {}
5677struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5678 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5679 : AAInstanceInfoImpl(IRP,
A) {}
5683struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5684 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5685 : AAInstanceInfoFloating(IRP,
A) {}
5689struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5690 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5691 : AAInstanceInfoImpl(IRP,
A) {}
5699 Argument *Arg = getAssociatedArgument();
5701 return indicatePessimisticFixpoint();
5704 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5706 return indicatePessimisticFixpoint();
5712struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5713 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5714 : AAInstanceInfoImpl(IRP,
A) {
5730struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5731 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5732 : AAInstanceInfoFloating(IRP,
A) {}
5739 bool IgnoreSubsumingPositions) {
5740 assert(ImpliedAttributeKind == Attribute::Captures &&
5741 "Unexpected attribute kind");
5751 V.getType()->getPointerAddressSpace() == 0)) {
5756 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5766 {Attribute::Captures, Attribute::ByVal}, Attrs,
5803 bool ReadOnly =
F.onlyReadsMemory();
5804 bool NoThrow =
F.doesNotThrow();
5805 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5806 if (ReadOnly && NoThrow && IsVoidReturn) {
5819 if (NoThrow && IsVoidReturn)
5824 if (!NoThrow || ArgNo < 0 ||
5825 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5828 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5829 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5830 if (U ==
unsigned(ArgNo))
5857 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5858 SmallVectorImpl<Attribute> &Attrs)
const override {
5859 if (!isAssumedNoCaptureMaybeReturned())
5862 if (isArgumentPosition()) {
5863 if (isAssumedNoCapture())
5864 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5866 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5871 const std::string getAsStr(Attributor *
A)
const override {
5872 if (isKnownNoCapture())
5873 return "known not-captured";
5874 if (isAssumedNoCapture())
5875 return "assumed not-captured";
5876 if (isKnownNoCaptureMaybeReturned())
5877 return "known not-captured-maybe-returned";
5878 if (isAssumedNoCaptureMaybeReturned())
5879 return "assumed not-captured-maybe-returned";
5880 return "assumed-captured";
5885 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5888 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5894 return isCapturedIn(State,
true,
true,
5901 return isCapturedIn(State,
true,
true,
5907 return isCapturedIn(State,
false,
false,
5909 return isCapturedIn(State,
true,
true,
5917 return isCapturedIn(State,
true,
true,
5924 bool IsKnownNoCapture;
5925 const AANoCapture *ArgNoCaptureAA =
nullptr;
5927 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5929 if (IsAssumedNoCapture)
5930 return isCapturedIn(State,
false,
false,
5934 return isCapturedIn(State,
false,
false,
5939 return isCapturedIn(State,
true,
true,
5946 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
5947 bool CapturedInInt,
bool CapturedInRet) {
5948 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5949 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5961 const IRPosition &IRP = getIRPosition();
5965 return indicatePessimisticFixpoint();
5972 return indicatePessimisticFixpoint();
5980 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5982 addKnownBits(NOT_CAPTURED_IN_MEM);
5989 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5993 UsedAssumedInformation))
5995 bool SeenConstant =
false;
5996 for (
const AA::ValueAndContext &VAC : Values) {
6000 SeenConstant =
true;
6002 VAC.getValue() == getAssociatedArgument())
6008 bool IsKnownNoUnwind;
6011 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6012 bool UsedAssumedInformation =
false;
6013 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6014 T.addKnownBits(NOT_CAPTURED_IN_RET);
6015 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6017 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6018 addKnownBits(NOT_CAPTURED_IN_RET);
6019 if (isKnown(NOT_CAPTURED_IN_MEM))
6020 return indicateOptimisticFixpoint();
6025 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6034 return checkUse(
A,
T, U, Follow);
6037 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6038 return indicatePessimisticFixpoint();
6041 auto Assumed = S.getAssumed();
6042 S.intersectAssumedBits(
T.getAssumed());
6043 if (!isAssumedNoCaptureMaybeReturned())
6044 return indicatePessimisticFixpoint();
6050struct AANoCaptureArgument final : AANoCaptureImpl {
6051 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6052 : AANoCaptureImpl(IRP,
A) {}
6059struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6060 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6061 : AANoCaptureImpl(IRP,
A) {}
6069 Argument *Arg = getAssociatedArgument();
6071 return indicatePessimisticFixpoint();
6073 bool IsKnownNoCapture;
6074 const AANoCapture *ArgAA =
nullptr;
6076 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6078 return ChangeStatus::UNCHANGED;
6080 return indicatePessimisticFixpoint();
6085 void trackStatistics()
const override {
6091struct AANoCaptureFloating final : AANoCaptureImpl {
6092 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6093 : AANoCaptureImpl(IRP,
A) {}
6096 void trackStatistics()
const override {
6102struct AANoCaptureReturned final : AANoCaptureImpl {
6103 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6104 : AANoCaptureImpl(IRP,
A) {
6119 void trackStatistics()
const override {}
6123struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6124 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6125 : AANoCaptureImpl(IRP,
A) {}
6131 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6135 void trackStatistics()
const override {
6152 dbgs() <<
"[ValueSimplify] is assumed to be "
6155 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6167 if (getAssociatedValue().
getType()->isVoidTy())
6168 indicatePessimisticFixpoint();
6169 if (
A.hasSimplificationCallback(getIRPosition()))
6170 indicatePessimisticFixpoint();
6174 const std::string getAsStr(Attributor *
A)
const override {
6176 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6177 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6178 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6180 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6185 void trackStatistics()
const override {}
6188 std::optional<Value *>
6189 getAssumedSimplifiedValue(Attributor &
A)
const override {
6190 return SimplifiedAssociatedValue;
6197 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6201 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6203 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6212 static Value *reproduceInst(Attributor &
A,
6213 const AbstractAttribute &QueryingAA,
6214 Instruction &
I,
Type &Ty, Instruction *CtxI,
6216 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6217 if (
Check && (
I.mayReadFromMemory() ||
6222 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6224 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6246 static Value *reproduceValue(Attributor &
A,
6247 const AbstractAttribute &QueryingAA,
Value &V,
6248 Type &Ty, Instruction *CtxI,
bool Check,
6250 if (
const auto &NewV = VMap.
lookup(&V))
6252 bool UsedAssumedInformation =
false;
6253 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6255 if (!SimpleV.has_value())
6259 EffectiveV = *SimpleV;
6264 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6266 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6267 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6273 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6274 Value *NewV = SimplifiedAssociatedValue
6275 ? *SimplifiedAssociatedValue
6277 if (NewV && NewV != &getAssociatedValue()) {
6281 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6283 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6291 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6292 const IRPosition &IRP,
bool Simplify =
true) {
6293 bool UsedAssumedInformation =
false;
6296 QueryingValueSimplified =
A.getAssumedSimplified(
6298 return unionAssumed(QueryingValueSimplified);
6302 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6303 if (!getAssociatedValue().
getType()->isIntegerTy())
6308 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6312 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6315 SimplifiedAssociatedValue = std::nullopt;
6316 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6319 if (
auto *
C = *COpt) {
6320 SimplifiedAssociatedValue =
C;
6321 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6327 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6328 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6330 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6338 for (
auto &U : getAssociatedValue().uses()) {
6343 IP =
PHI->getIncomingBlock(U)->getTerminator();
6344 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6346 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6347 if (
A.changeUseAfterManifest(U, *NewV))
6348 Changed = ChangeStatus::CHANGED;
6352 return Changed | AAValueSimplify::manifest(
A);
6357 SimplifiedAssociatedValue = &getAssociatedValue();
6358 return AAValueSimplify::indicatePessimisticFixpoint();
6362struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6363 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6364 : AAValueSimplifyImpl(IRP,
A) {}
6367 AAValueSimplifyImpl::initialize(
A);
6368 if (
A.hasAttr(getIRPosition(),
6369 {Attribute::InAlloca, Attribute::Preallocated,
6370 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6372 indicatePessimisticFixpoint();
6379 Argument *Arg = getAssociatedArgument();
6385 return indicatePessimisticFixpoint();
6388 auto Before = SimplifiedAssociatedValue;
6390 auto PredForCallSite = [&](AbstractCallSite ACS) {
6391 const IRPosition &ACSArgPos =
6402 bool UsedAssumedInformation =
false;
6403 std::optional<Constant *> SimpleArgOp =
6404 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6411 return unionAssumed(*SimpleArgOp);
6416 bool UsedAssumedInformation =
false;
6417 if (hasCallBaseContext() &&
6418 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6420 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6422 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6423 UsedAssumedInformation);
6426 if (!askSimplifiedValueForOtherAAs(
A))
6427 return indicatePessimisticFixpoint();
6430 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6431 : ChangeStatus ::CHANGED;
6435 void trackStatistics()
const override {
6440struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6441 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6442 : AAValueSimplifyImpl(IRP,
A) {}
6445 std::optional<Value *>
6446 getAssumedSimplifiedValue(Attributor &
A)
const override {
6447 if (!isValidState())
6449 return SimplifiedAssociatedValue;
6454 auto Before = SimplifiedAssociatedValue;
6458 return checkAndUpdate(
6463 bool UsedAssumedInformation =
false;
6464 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6465 UsedAssumedInformation))
6466 if (!askSimplifiedValueForOtherAAs(
A))
6467 return indicatePessimisticFixpoint();
6470 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6471 : ChangeStatus ::CHANGED;
6477 return ChangeStatus::UNCHANGED;
6481 void trackStatistics()
const override {
6486struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6487 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6488 : AAValueSimplifyImpl(IRP,
A) {}
6492 AAValueSimplifyImpl::initialize(
A);
6493 Value &
V = getAnchorValue();
6497 indicatePessimisticFixpoint();
6502 auto Before = SimplifiedAssociatedValue;
6503 if (!askSimplifiedValueForOtherAAs(
A))
6504 return indicatePessimisticFixpoint();
6507 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6508 : ChangeStatus ::CHANGED;
6512 void trackStatistics()
const override {
6517struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6518 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6519 : AAValueSimplifyImpl(IRP,
A) {}
6523 SimplifiedAssociatedValue =
nullptr;
6524 indicateOptimisticFixpoint();
6529 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6532 void trackStatistics()
const override {
6537struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6538 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6539 : AAValueSimplifyFunction(IRP,
A) {}
6541 void trackStatistics()
const override {
6546struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6547 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6548 : AAValueSimplifyImpl(IRP,
A) {}
6551 AAValueSimplifyImpl::initialize(
A);
6552 Function *Fn = getAssociatedFunction();
6553 assert(Fn &&
"Did expect an associted function");
6554 for (Argument &Arg : Fn->
args()) {
6559 checkAndUpdate(
A, *
this, IRP))
6560 indicateOptimisticFixpoint();
6562 indicatePessimisticFixpoint();
6570 return indicatePessimisticFixpoint();
6573 void trackStatistics()
const override {
6578struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6579 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6580 : AAValueSimplifyFloating(IRP,
A) {}
6586 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6588 if (FloatAA && FloatAA->getState().isValidState())
6591 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6593 ->getArgOperandUse(getCallSiteArgNo());
6594 if (
A.changeUseAfterManifest(U, *NewV))
6595 Changed = ChangeStatus::CHANGED;
6598 return Changed | AAValueSimplify::manifest(
A);
6601 void trackStatistics()
const override {
6609struct AAHeapToStackFunction final :
public AAHeapToStack {
6611 struct AllocationInfo {
6623 } Status = STACK_DUE_TO_USE;
6627 bool HasPotentiallyFreeingUnknownUses =
false;
6631 bool MoveAllocaIntoEntry =
true;
6634 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6637 struct DeallocationInfo {
6645 bool MightFreeUnknownObjects =
false;
6648 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6651 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6652 : AAHeapToStack(IRP,
A) {}
6654 ~AAHeapToStackFunction()
override {
6657 for (
auto &It : AllocationInfos)
6658 It.second->~AllocationInfo();
6659 for (
auto &It : DeallocationInfos)
6660 It.second->~DeallocationInfo();
6664 AAHeapToStack::initialize(
A);
6667 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6674 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6681 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6683 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6684 AllocationInfos[CB] = AI;
6686 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6692 bool UsedAssumedInformation =
false;
6693 bool Success =
A.checkForAllCallLikeInstructions(
6694 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6698 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6701 [](
const IRPosition &,
const AbstractAttribute *,
6702 bool &) -> std::optional<Value *> {
return nullptr; };
6703 for (
const auto &It : AllocationInfos)
6706 for (
const auto &It : DeallocationInfos)
6711 const std::string getAsStr(Attributor *
A)
const override {
6712 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6713 for (
const auto &It : AllocationInfos) {
6714 if (It.second->Status == AllocationInfo::INVALID)
6715 ++NumInvalidMallocs;
6719 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6720 std::to_string(NumInvalidMallocs);
6724 void trackStatistics()
const override {
6726 MallocCalls, Function,
6727 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6728 for (
const auto &It : AllocationInfos)
6729 if (It.second->Status != AllocationInfo::INVALID)
6733 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6735 if (AllocationInfo *AI =
6736 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6737 return AI->Status != AllocationInfo::INVALID;
6741 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6742 if (!isValidState())
6745 for (
const auto &It : AllocationInfos) {
6746 AllocationInfo &AI = *It.second;
6747 if (AI.Status == AllocationInfo::INVALID)
6750 if (AI.PotentialFreeCalls.count(&CB))
6758 assert(getState().isValidState() &&
6759 "Attempted to manifest an invalid state!");
6763 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6765 for (
auto &It : AllocationInfos) {
6766 AllocationInfo &AI = *It.second;
6767 if (AI.Status == AllocationInfo::INVALID)
6770 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6771 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6772 A.deleteAfterManifest(*FreeCall);
6773 HasChanged = ChangeStatus::CHANGED;
6776 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6779 auto Remark = [&](OptimizationRemark
OR) {
6781 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6782 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6783 return OR <<
"Moving globalized variable to the stack.";
6784 return OR <<
"Moving memory allocation from the heap to the stack.";
6786 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6787 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6789 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6791 const DataLayout &
DL =
A.getInfoCache().getDL();
6793 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6795 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6797 LLVMContext &Ctx = AI.CB->getContext();
6798 ObjectSizeOpts Opts;
6799 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6800 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6807 ?
F->getEntryBlock().begin()
6808 : AI.CB->getIterator();
6811 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6812 Alignment = std::max(Alignment, *RetAlign);
6814 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6815 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6816 "Expected an alignment during manifest!");
6818 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6822 unsigned AS =
DL.getAllocaAddrSpace();
6824 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6825 AI.CB->getName() +
".h2s", IP);
6827 if (Alloca->
getType() != AI.CB->getType())
6828 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6829 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6831 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6834 "Must be able to materialize initial memory state of allocation");
6839 auto *NBB =
II->getNormalDest();
6841 A.deleteAfterManifest(*AI.CB);
6843 A.deleteAfterManifest(*AI.CB);
6852 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6854 HasChanged = ChangeStatus::CHANGED;
6860 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6862 bool UsedAssumedInformation =
false;
6863 std::optional<Constant *> SimpleV =
6864 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6866 return APInt(64, 0);
6868 return CI->getValue();
6869 return std::nullopt;
6872 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6873 AllocationInfo &AI) {
6874 auto Mapper = [&](
const Value *
V) ->
const Value * {
6875 bool UsedAssumedInformation =
false;
6876 if (std::optional<Constant *> SimpleV =
6877 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6884 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6890 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6894 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6899ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6902 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6904 const auto *LivenessAA =
6907 MustBeExecutedContextExplorer *Explorer =
6908 A.getInfoCache().getMustBeExecutedContextExplorer();
6910 bool StackIsAccessibleByOtherThreads =
6911 A.getInfoCache().stackIsAccessibleByOtherThreads();
6914 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
6915 std::optional<bool> MayContainIrreducibleControl;
6917 if (&
F->getEntryBlock() == &BB)
6919 if (!MayContainIrreducibleControl.has_value())
6921 if (*MayContainIrreducibleControl)
6930 bool HasUpdatedFrees =
false;
6932 auto UpdateFrees = [&]() {
6933 HasUpdatedFrees =
true;
6935 for (
auto &It : DeallocationInfos) {
6936 DeallocationInfo &DI = *It.second;
6939 if (DI.MightFreeUnknownObjects)
6943 bool UsedAssumedInformation =
false;
6944 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6951 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6952 DI.MightFreeUnknownObjects =
true;
6965 DI.MightFreeUnknownObjects =
true;
6969 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6971 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6973 DI.MightFreeUnknownObjects =
true;
6977 DI.PotentialAllocationCalls.insert(ObjCB);
6981 auto FreeCheck = [&](AllocationInfo &AI) {
6985 if (!StackIsAccessibleByOtherThreads) {
6990 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6991 "other threads and function is not nosync:\n");
6995 if (!HasUpdatedFrees)
6999 if (AI.PotentialFreeCalls.size() != 1) {
7001 << AI.PotentialFreeCalls.size() <<
"\n");
7004 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7005 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7008 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7009 << *UniqueFree <<
"\n");
7012 if (DI->MightFreeUnknownObjects) {
7014 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7017 if (DI->PotentialAllocationCalls.empty())
7019 if (DI->PotentialAllocationCalls.size() > 1) {
7021 << DI->PotentialAllocationCalls.size()
7022 <<
" different allocations\n");
7025 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7028 <<
"[H2S] unique free call not known to free this allocation but "
7029 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7034 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7036 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7037 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7038 "with the allocation "
7039 << *UniqueFree <<
"\n");
7046 auto UsesCheck = [&](AllocationInfo &AI) {
7047 bool ValidUsesOnly =
true;
7049 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7054 if (
SI->getValueOperand() ==
U.get()) {
7056 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7057 ValidUsesOnly =
false;
7066 if (DeallocationInfos.count(CB)) {
7067 AI.PotentialFreeCalls.insert(CB);
7074 bool IsKnownNoCapture;
7083 if (!IsAssumedNoCapture ||
7084 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7085 !IsAssumedNoFree)) {
7086 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7089 auto Remark = [&](OptimizationRemarkMissed ORM) {
7091 <<
"Could not move globalized variable to the stack. "
7092 "Variable is potentially captured in call. Mark "
7093 "parameter as `__attribute__((noescape))` to override.";
7096 if (ValidUsesOnly &&
7097 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7098 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7101 ValidUsesOnly =
false;
7114 ValidUsesOnly =
false;
7117 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7119 [&](
const Use &OldU,
const Use &NewU) {
7120 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7121 return !SI || StackIsAccessibleByOtherThreads ||
7122 AA::isAssumedThreadLocalObject(
7123 A, *SI->getPointerOperand(), *this);
7126 return ValidUsesOnly;
7131 for (
auto &It : AllocationInfos) {
7132 AllocationInfo &AI = *It.second;
7133 if (AI.Status == AllocationInfo::INVALID)
7137 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7141 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7143 AI.Status = AllocationInfo::INVALID;
7148 !APAlign->isPowerOf2()) {
7149 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7151 AI.Status = AllocationInfo::INVALID;
7158 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7163 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7165 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7169 AI.Status = AllocationInfo::INVALID;
7175 switch (AI.Status) {
7176 case AllocationInfo::STACK_DUE_TO_USE:
7179 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7181 case AllocationInfo::STACK_DUE_TO_FREE:
7184 AI.Status = AllocationInfo::INVALID;
7187 case AllocationInfo::INVALID:
7194 bool IsGlobalizedLocal =
7195 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7196 if (AI.MoveAllocaIntoEntry &&
7197 (!
Size.has_value() ||
7198 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7199 AI.MoveAllocaIntoEntry =
false;
7208struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7209 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7210 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7213 AAPrivatizablePtr::indicatePessimisticFixpoint();
7214 PrivatizableType =
nullptr;
7215 return ChangeStatus::CHANGED;
7221 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7225 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7226 std::optional<Type *>
T1) {
7236 std::optional<Type *> getPrivatizableType()
const override {
7237 return PrivatizableType;
7240 const std::string getAsStr(Attributor *
A)
const override {
7241 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7245 std::optional<Type *> PrivatizableType;
7250struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7251 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7252 : AAPrivatizablePtrImpl(IRP,
A) {}
7255 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7258 bool UsedAssumedInformation =
false;
7260 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7262 if (!
Attrs.empty() &&
7263 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7264 true, UsedAssumedInformation))
7265 return Attrs[0].getValueAsType();
7267 std::optional<Type *> Ty;
7268 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7276 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7285 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7288 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7291 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7295 dbgs() <<
"<nullptr>";
7300 Ty = combineTypes(Ty, CSTy);
7303 dbgs() <<
" : New Type: ";
7305 (*Ty)->print(
dbgs());
7307 dbgs() <<
"<nullptr>";
7316 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7317 UsedAssumedInformation))
7324 PrivatizableType = identifyPrivatizableType(
A);
7325 if (!PrivatizableType)
7326 return ChangeStatus::UNCHANGED;
7327 if (!*PrivatizableType)
7328 return indicatePessimisticFixpoint();
7333 DepClassTy::OPTIONAL);
7336 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7339 return indicatePessimisticFixpoint();
7345 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7349 Function &Fn = *getIRPosition().getAnchorScope();
7351 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7353 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7355 return indicatePessimisticFixpoint();
7358 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7365 bool UsedAssumedInformation =
false;
7366 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7367 UsedAssumedInformation)) {
7369 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7371 return indicatePessimisticFixpoint();
7375 Argument *Arg = getAssociatedArgument();
7376 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7378 return indicatePessimisticFixpoint();
7385 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7388 for (
const Use *U : CallbackUses) {
7389 AbstractCallSite CBACS(U);
7390 assert(CBACS && CBACS.isCallbackCall());
7391 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7392 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7396 <<
"[AAPrivatizablePtr] Argument " << *Arg
7397 <<
"check if can be privatized in the context of its parent ("
7399 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7401 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7402 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7403 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7405 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7406 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7409 if (CBArgNo !=
int(ArgNo))
7411 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7413 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7414 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7417 if (*CBArgPrivTy == PrivatizableType)
7422 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7423 <<
" cannot be privatized in the context of its parent ("
7425 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7427 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7428 <<
").\n[AAPrivatizablePtr] for which the argument "
7429 "privatization is not compatible.\n";
7439 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7443 "Expected a direct call operand for callback call operand");
7448 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7449 <<
" check if be privatized in the context of its parent ("
7451 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7453 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7456 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7457 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7459 DepClassTy::REQUIRED);
7460 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7461 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7464 if (*DCArgPrivTy == PrivatizableType)
7470 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7471 <<
" cannot be privatized in the context of its parent ("
7473 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7476 <<
").\n[AAPrivatizablePtr] for which the argument "
7477 "privatization is not compatible.\n";
7485 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7489 return IsCompatiblePrivArgOfDirectCS(ACS);
7493 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7494 UsedAssumedInformation))
7495 return indicatePessimisticFixpoint();
7497 return ChangeStatus::UNCHANGED;
7503 identifyReplacementTypes(
Type *PrivType,
7504 SmallVectorImpl<Type *> &ReplacementTypes) {
7507 assert(PrivType &&
"Expected privatizable type!");
7511 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7512 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7514 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7515 PrivArrayType->getElementType());
7524 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7526 assert(PrivType &&
"Expected privatizable type!");
7529 const DataLayout &
DL =
F.getDataLayout();
7533 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7534 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7537 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7540 Type *PointeeTy = PrivArrayType->getElementType();
7541 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7542 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7544 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7547 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7553 void createReplacementValues(Align Alignment,
Type *PrivType,
7555 SmallVectorImpl<Value *> &ReplacementValues) {
7557 assert(PrivType &&
"Expected privatizable type!");
7565 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7566 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7567 Type *PointeeTy = PrivStructType->getElementType(u);
7571 L->setAlignment(Alignment);
7575 Type *PointeeTy = PrivArrayType->getElementType();
7576 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7577 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7580 L->setAlignment(Alignment);
7585 L->setAlignment(Alignment);
7592 if (!PrivatizableType)
7593 return ChangeStatus::UNCHANGED;
7594 assert(*PrivatizableType &&
"Expected privatizable type!");
7600 bool UsedAssumedInformation =
false;
7601 if (!
A.checkForAllInstructions(
7602 [&](Instruction &
I) {
7603 CallInst &CI = cast<CallInst>(I);
7604 if (CI.isTailCall())
7605 TailCalls.push_back(&CI);
7608 *
this, {Instruction::Call}, UsedAssumedInformation))
7609 return ChangeStatus::UNCHANGED;
7611 Argument *Arg = getAssociatedArgument();
7614 const auto *AlignAA =
7621 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7623 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7625 const DataLayout &
DL = IP->getDataLayout();
7626 unsigned AS =
DL.getAllocaAddrSpace();
7627 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7628 Arg->
getName() +
".priv", IP);
7629 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7630 ArgIt->getArgNo(), IP);
7633 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7637 for (CallInst *CI : TailCalls)
7638 CI->setTailCall(
false);
7645 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7646 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7649 createReplacementValues(
7650 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7651 *PrivatizableType, ACS,
7659 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7662 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7663 std::move(FnRepairCB),
7664 std::move(ACSRepairCB)))
7665 return ChangeStatus::CHANGED;
7666 return ChangeStatus::UNCHANGED;
7670 void trackStatistics()
const override {
7675struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7676 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7677 : AAPrivatizablePtrImpl(IRP,
A) {}
7682 indicatePessimisticFixpoint();
7687 "updateImpl will not be called");
7691 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7694 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7701 return AI->getAllocatedType();
7703 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7705 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7706 return PrivArgAA->getPrivatizableType();
7709 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7710 "alloca nor privatizable argument: "
7716 void trackStatistics()
const override {
7721struct AAPrivatizablePtrCallSiteArgument final
7722 :
public AAPrivatizablePtrFloating {
7723 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7724 : AAPrivatizablePtrFloating(IRP,
A) {}
7728 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7729 indicateOptimisticFixpoint();
7734 PrivatizableType = identifyPrivatizableType(
A);
7735 if (!PrivatizableType)
7736 return ChangeStatus::UNCHANGED;
7737 if (!*PrivatizableType)
7738 return indicatePessimisticFixpoint();
7740 const IRPosition &IRP = getIRPosition();
7741 bool IsKnownNoCapture;
7743 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7744 if (!IsAssumedNoCapture) {
7745 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7746 return indicatePessimisticFixpoint();
7749 bool IsKnownNoAlias;
7751 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7752 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7753 return indicatePessimisticFixpoint();
7758 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7759 return indicatePessimisticFixpoint();
7762 return ChangeStatus::UNCHANGED;
7766 void trackStatistics()
const override {
7771struct AAPrivatizablePtrCallSiteReturned final
7772 :
public AAPrivatizablePtrFloating {
7773 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7774 : AAPrivatizablePtrFloating(IRP,
A) {}
7779 indicatePessimisticFixpoint();
7783 void trackStatistics()
const override {
7788struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7789 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7790 : AAPrivatizablePtrFloating(IRP,
A) {}
7795 indicatePessimisticFixpoint();
7799 void trackStatistics()
const override {
7809struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7810 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7811 : AAMemoryBehavior(IRP,
A) {}
7815 intersectAssumedBits(BEST_STATE);
7816 getKnownStateFromValue(
A, getIRPosition(), getState());
7817 AAMemoryBehavior::initialize(
A);
7821 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7822 BitIntegerState &State,
7823 bool IgnoreSubsumingPositions =
false) {
7825 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7827 switch (Attr.getKindAsEnum()) {
7828 case Attribute::ReadNone:
7831 case Attribute::ReadOnly:
7834 case Attribute::WriteOnly:
7843 if (!
I->mayReadFromMemory())
7845 if (!
I->mayWriteToMemory())
7851 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7852 SmallVectorImpl<Attribute> &Attrs)
const override {
7855 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7857 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7858 else if (isAssumedWriteOnly())
7859 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7865 const IRPosition &IRP = getIRPosition();
7867 if (
A.hasAttr(IRP, Attribute::ReadNone,
7869 return ChangeStatus::UNCHANGED;
7878 return ChangeStatus::UNCHANGED;
7881 A.removeAttrs(IRP, AttrKinds);
7884 A.removeAttrs(IRP, Attribute::Writable);
7891 const std::string getAsStr(Attributor *
A)
const override {
7896 if (isAssumedWriteOnly())
7898 return "may-read/write";
7902 static const Attribute::AttrKind AttrKinds[3];
7906 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7909struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7910 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
7911 : AAMemoryBehaviorImpl(IRP,
A) {}
7917 void trackStatistics()
const override {
7922 else if (isAssumedWriteOnly())
7929 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
7930 const Instruction *UserI);
7933 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
7937struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7938 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
7939 : AAMemoryBehaviorFloating(IRP,
A) {}
7943 intersectAssumedBits(BEST_STATE);
7944 const IRPosition &IRP = getIRPosition();
7948 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7950 getKnownStateFromValue(
A, IRP, getState(),
7957 return ChangeStatus::UNCHANGED;
7961 if (
A.hasAttr(getIRPosition(),
7962 {Attribute::InAlloca, Attribute::Preallocated})) {
7963 removeKnownBits(NO_WRITES);
7964 removeAssumedBits(NO_WRITES);
7966 A.removeAttrs(getIRPosition(), AttrKinds);
7967 return AAMemoryBehaviorFloating::manifest(
A);
7971 void trackStatistics()
const override {
7976 else if (isAssumedWriteOnly())
7981struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7982 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7983 : AAMemoryBehaviorArgument(IRP,
A) {}
7989 Argument *Arg = getAssociatedArgument();
7991 indicatePessimisticFixpoint();
7995 addKnownBits(NO_WRITES);
7996 removeKnownBits(NO_READS);
7997 removeAssumedBits(NO_READS);
7999 AAMemoryBehaviorArgument::initialize(
A);
8000 if (getAssociatedFunction()->isDeclaration())
8001 indicatePessimisticFixpoint();
8010 Argument *Arg = getAssociatedArgument();
8013 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8015 return indicatePessimisticFixpoint();
8020 void trackStatistics()
const override {
8025 else if (isAssumedWriteOnly())
8031struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8032 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8033 : AAMemoryBehaviorFloating(IRP,
A) {}
8037 AAMemoryBehaviorImpl::initialize(
A);
8042 return ChangeStatus::UNCHANGED;
8046 void trackStatistics()
const override {}
8050struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8051 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8052 : AAMemoryBehaviorImpl(IRP,
A) {}
8068 else if (isAssumedWriteOnly())
8071 A.removeAttrs(getIRPosition(), AttrKinds);
8074 for (Argument &Arg :
F.args())
8076 return A.manifestAttrs(getIRPosition(),
8077 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8081 void trackStatistics()
const override {
8086 else if (isAssumedWriteOnly())
8092struct AAMemoryBehaviorCallSite final
8093 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8094 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8095 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8106 else if (isAssumedWriteOnly())
8109 A.removeAttrs(getIRPosition(), AttrKinds);
8112 for (Use &U : CB.
args())
8114 Attribute::Writable);
8115 return A.manifestAttrs(
8116 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8120 void trackStatistics()
const override {
8125 else if (isAssumedWriteOnly())
8130ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8133 auto AssumedState = getAssumed();
8140 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8142 if (MemBehaviorAA) {
8143 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8144 return !isAtFixpoint();
8149 if (
I.mayReadFromMemory())
8150 removeAssumedBits(NO_READS);
8151 if (
I.mayWriteToMemory())
8152 removeAssumedBits(NO_WRITES);
8153 return !isAtFixpoint();
8156 bool UsedAssumedInformation =
false;
8157 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8158 UsedAssumedInformation))
8159 return indicatePessimisticFixpoint();
8165ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8167 const IRPosition &IRP = getIRPosition();
8178 const auto *FnMemAA =
8181 FnMemAssumedState = FnMemAA->getAssumed();
8182 S.addKnownBits(FnMemAA->getKnown());
8183 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8189 auto AssumedState = S.getAssumed();
8195 bool IsKnownNoCapture;
8196 const AANoCapture *ArgNoCaptureAA =
nullptr;
8201 if (!IsAssumedNoCapture &&
8203 S.intersectAssumedBits(FnMemAssumedState);
8209 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8211 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8219 Follow = followUsersOfUseIn(
A, U, UserI);
8223 analyzeUseIn(
A, U, UserI);
8225 return !isAtFixpoint();
8228 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8229 return indicatePessimisticFixpoint();
8235bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8236 const Instruction *UserI) {
8254 if (
U.get()->getType()->isPointerTy()) {
8256 bool IsKnownNoCapture;
8265void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8266 const Instruction *UserI) {
8273 case Instruction::Load:
8275 removeAssumedBits(NO_READS);
8278 case Instruction::Store:
8283 removeAssumedBits(NO_WRITES);
8285 indicatePessimisticFixpoint();
8288 case Instruction::Call:
8289 case Instruction::CallBr:
8290 case Instruction::Invoke: {
8297 indicatePessimisticFixpoint();
8304 removeAssumedBits(NO_READS);
8311 if (
U.get()->getType()->isPointerTy())
8315 const auto *MemBehaviorAA =
8321 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8329 removeAssumedBits(NO_READS);
8331 removeAssumedBits(NO_WRITES);
8343 return "all memory";
8346 std::string S =
"memory:";
8352 S +=
"internal global,";
8354 S +=
"external global,";
8358 S +=
"inaccessible,";
8372 AccessKind2Accesses.fill(
nullptr);
8375 ~AAMemoryLocationImpl()
override {
8378 for (AccessSet *AS : AccessKind2Accesses)
8385 intersectAssumedBits(BEST_STATE);
8386 getKnownStateFromValue(
A, getIRPosition(), getState());
8387 AAMemoryLocation::initialize(
A);
8391 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8392 BitIntegerState &State,
8393 bool IgnoreSubsumingPositions =
false) {
8402 bool UseArgMemOnly =
true;
8404 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8408 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8417 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8422 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8426 A.manifestAttrs(IRP,
8427 Attribute::getWithMemoryEffects(
8436 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8440 A.manifestAttrs(IRP,
8441 Attribute::getWithMemoryEffects(
8451 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8452 SmallVectorImpl<Attribute> &Attrs)
const override {
8459 else if (isAssumedInaccessibleMemOnly())
8460 Attrs.push_back(Attribute::getWithMemoryEffects(
8462 else if (isAssumedArgMemOnly())
8465 else if (isAssumedInaccessibleOrArgMemOnly())
8466 Attrs.push_back(Attribute::getWithMemoryEffects(
8476 const IRPosition &IRP = getIRPosition();
8480 if (DeducedAttrs.
size() != 1)
8481 return ChangeStatus::UNCHANGED;
8484 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8489 bool checkForAllAccessesToMemoryKind(
8491 MemoryLocationsKind)>
8493 MemoryLocationsKind RequestedMLK)
const override {
8494 if (!isValidState())
8497 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8498 if (AssumedMLK == NO_LOCATIONS)
8502 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8503 CurMLK *= 2, ++Idx) {
8504 if (CurMLK & RequestedMLK)
8507 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8508 for (
const AccessInfo &AI : *
Accesses)
8509 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8522 MemoryLocationsKind KnownMLK = getKnown();
8524 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8525 if (!(CurMLK & KnownMLK))
8526 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8527 getAccessKindFromInst(
I));
8528 return AAMemoryLocation::indicatePessimisticFixpoint();
8548 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8552 return LHS.Ptr <
RHS.Ptr;
8553 if (
LHS.Kind !=
RHS.Kind)
8554 return LHS.Kind <
RHS.Kind;
8561 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8562 std::array<AccessSet *, llvm::ConstantLog2<VALID_STATE>()>
8563 AccessKind2Accesses;
8568 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8569 AAMemoryLocation::StateType &AccessedLocs,
8574 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8577 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8580 AK =
I->mayReadFromMemory() ? READ :
NONE;
8589 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8590 MemoryLocationsKind MLK,
const Instruction *
I,
8599 if (MLK == NO_UNKOWN_MEM)
8601 State.removeAssumedBits(MLK);
8606 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8607 AAMemoryLocation::StateType &State,
bool &
Changed,
8608 unsigned AccessAS = 0);
8614void AAMemoryLocationImpl::categorizePtrValue(
8615 Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8617 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8621 auto Pred = [&](
Value &Obj) {
8624 MemoryLocationsKind MLK = NO_LOCATIONS;
8643 MLK = NO_ARGUMENT_MEM;
8649 if (GVar->isConstant())
8652 if (GV->hasLocalLinkage())
8653 MLK = NO_GLOBAL_INTERNAL_MEM;
8655 MLK = NO_GLOBAL_EXTERNAL_MEM;
8663 bool IsKnownNoAlias;
8667 MLK = NO_MALLOCED_MEM;
8669 MLK = NO_UNKOWN_MEM;
8671 MLK = NO_UNKOWN_MEM;
8674 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8675 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8676 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8678 getAccessKindFromInst(&
I));
8683 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8687 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8688 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8689 getAccessKindFromInst(&
I));
8694 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8698void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8701 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8710 const auto *ArgOpMemLocationAA =
8713 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8718 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8723AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8725 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8729 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8734 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8737 <<
" [" << CBMemLocationAA <<
"]\n");
8738 if (!CBMemLocationAA) {
8739 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8740 Changed, getAccessKindFromInst(&
I));
8741 return NO_UNKOWN_MEM;
8744 if (CBMemLocationAA->isAssumedReadNone())
8745 return NO_LOCATIONS;
8747 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8748 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8749 Changed, getAccessKindFromInst(&
I));
8750 return AccessedLocs.getAssumed();
8753 uint32_t CBAssumedNotAccessedLocs =
8754 CBMemLocationAA->getAssumedNotAccessedLocation();
8757 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8758 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8760 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8761 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8763 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8764 getAccessKindFromInst(&
I));
8769 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8770 if (HasGlobalAccesses) {
8773 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr,
Changed,
8774 getAccessKindFromInst(&
I));
8777 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8778 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8779 return AccessedLocs.getWorstState();
8783 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8784 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8787 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8789 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8792 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8793 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8795 return AccessedLocs.getAssumed();
8800 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8801 <<
I <<
" [" << *
Ptr <<
"]\n");
8803 Ptr->getType()->getPointerAddressSpace());
8804 return AccessedLocs.getAssumed();
8807 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8809 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8810 getAccessKindFromInst(&
I));
8811 return AccessedLocs.getAssumed();
8815struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8816 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8817 : AAMemoryLocationImpl(IRP,
A) {}
8822 const auto *MemBehaviorAA =
8823 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8826 return indicateOptimisticFixpoint();
8828 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8829 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8830 return ChangeStatus::UNCHANGED;
8834 auto AssumedState = getAssumed();
8838 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8839 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8840 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8841 removeAssumedBits(inverseLocation(MLK,
false,
false));
8844 return getAssumedNotAccessedLocation() != VALID_STATE;
8847 bool UsedAssumedInformation =
false;
8848 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8849 UsedAssumedInformation))
8850 return indicatePessimisticFixpoint();
8852 Changed |= AssumedState != getAssumed();
8853 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8857 void trackStatistics()
const override {
8860 else if (isAssumedArgMemOnly())
8862 else if (isAssumedInaccessibleMemOnly())
8864 else if (isAssumedInaccessibleOrArgMemOnly())
8870struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8871 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8872 : AAMemoryLocationImpl(IRP,
A) {}
8883 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8885 return indicatePessimisticFixpoint();
8889 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr,
Changed,
8890 getAccessKindFromInst(
I));
8893 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8894 return indicatePessimisticFixpoint();
8895 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8899 void trackStatistics()
const override {
8909struct AADenormalFPMathImpl :
public AADenormalFPMath {
8910 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
8911 : AADenormalFPMath(IRP,
A) {}
8913 const std::string getAsStr(Attributor *
A)
const override {
8914 std::string Str(
"AADenormalFPMath[");
8915 raw_string_ostream OS(Str);
8917 DenormalState Known = getKnown();
8918 if (Known.Mode.isValid())
8919 OS <<
"denormal-fp-math=" << Known.Mode;
8923 if (Known.ModeF32.isValid())
8924 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8930struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8931 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
8932 : AADenormalFPMathImpl(IRP,
A) {}
8936 DenormalMode
Mode =
F->getDenormalModeRaw();
8937 DenormalMode ModeF32 =
F->getDenormalModeF32Raw();
8944 Known = DenormalState{
Mode, ModeF32};
8952 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
8955 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8957 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
8963 CallerInfo->getState());
8967 bool AllCallSitesKnown =
true;
8968 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8969 return indicatePessimisticFixpoint();
8971 if (Change == ChangeStatus::CHANGED && isModeFixed())
8977 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8982 AttrToRemove.
push_back(
"denormal-fp-math");
8985 Attribute::get(Ctx,
"denormal-fp-math", Known.Mode.str()));
8988 if (Known.ModeF32 != Known.Mode) {
8990 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8992 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8995 auto &IRP = getIRPosition();
8998 return A.removeAttrs(IRP, AttrToRemove) |
8999 A.manifestAttrs(IRP, AttrToAdd,
true);
9002 void trackStatistics()
const override {
9011struct AAValueConstantRangeImpl : AAValueConstantRange {
9012 using StateType = IntegerRangeState;
9013 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9014 : AAValueConstantRange(IRP,
A) {}
9018 if (
A.hasSimplificationCallback(getIRPosition())) {
9019 indicatePessimisticFixpoint();
9024 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9027 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9031 const std::string getAsStr(Attributor *
A)
const override {
9033 llvm::raw_string_ostream OS(Str);
9035 getKnown().print(OS);
9037 getAssumed().print(OS);
9044 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9045 if (!getAnchorScope())
9048 ScalarEvolution *SE =
9049 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9052 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9058 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9067 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9068 const Instruction *
I =
nullptr)
const {
9069 if (!getAnchorScope())
9072 ScalarEvolution *SE =
9073 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9076 const SCEV *S = getSCEV(
A,
I);
9086 getConstantRangeFromLVI(Attributor &
A,
9087 const Instruction *CtxI =
nullptr)
const {
9088 if (!getAnchorScope())
9091 LazyValueInfo *LVI =
9092 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9107 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9108 const Instruction *CtxI,
9109 bool AllowAACtxI)
const {
9110 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9122 InformationCache &InfoCache =
A.getInfoCache();
9123 const DominatorTree *DT =
9134 getKnownConstantRange(Attributor &
A,
9135 const Instruction *CtxI =
nullptr)
const override {
9136 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9140 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9141 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9142 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9147 getAssumedConstantRange(Attributor &
A,
9148 const Instruction *CtxI =
nullptr)
const override {
9153 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9155 return getAssumed();
9157 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9158 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9159 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9164 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9165 const ConstantRange &AssumedConstantRange) {
9167 Ty, AssumedConstantRange.
getLower())),
9169 Ty, AssumedConstantRange.
getUpper()))};
9174 static bool isBetterRange(
const ConstantRange &Assumed,
9175 const Instruction &
I) {
9179 std::optional<ConstantRange> Known;
9183 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9189 if (KnownRanges->getNumOperands() > 2)
9192 ConstantInt *
Lower =
9194 ConstantInt *
Upper =
9197 Known.emplace(
Lower->getValue(),
Upper->getValue());
9199 return !Known || (*Known != Assumed && Known->contains(Assumed));
9204 setRangeMetadataIfisBetterRange(Instruction *
I,
9205 const ConstantRange &AssumedConstantRange) {
9206 if (isBetterRange(AssumedConstantRange, *
I)) {
9207 I->setMetadata(LLVMContext::MD_range,
9208 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9209 AssumedConstantRange));
9216 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9218 const ConstantRange &AssumedConstantRange) {
9219 if (isBetterRange(AssumedConstantRange, *
I)) {
9220 A.manifestAttrs(IRP,
9221 Attribute::get(
I->getContext(), Attribute::Range,
9222 AssumedConstantRange),
9232 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9235 auto &
V = getAssociatedValue();
9239 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9240 "not the context instruction");
9242 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9243 Changed = ChangeStatus::CHANGED;
9245 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9246 AssumedConstantRange))
9247 Changed = ChangeStatus::CHANGED;
9255struct AAValueConstantRangeArgument final
9256 : AAArgumentFromCallSiteArguments<
9257 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9259 using Base = AAArgumentFromCallSiteArguments<
9260 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9262 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9266 void trackStatistics()
const override {
9271struct AAValueConstantRangeReturned
9272 : AAReturnedFromReturnedValues<AAValueConstantRange,
9273 AAValueConstantRangeImpl,
9274 AAValueConstantRangeImpl::StateType,
9277 AAReturnedFromReturnedValues<AAValueConstantRange,
9278 AAValueConstantRangeImpl,
9279 AAValueConstantRangeImpl::StateType,
9281 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9286 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9287 indicatePessimisticFixpoint();
9291 void trackStatistics()
const override {
9296struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9297 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9298 : AAValueConstantRangeImpl(IRP,
A) {}
9302 AAValueConstantRangeImpl::initialize(
A);
9306 Value &
V = getAssociatedValue();
9309 unionAssumed(ConstantRange(
C->getValue()));
9310 indicateOptimisticFixpoint();
9316 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9317 indicateOptimisticFixpoint();
9329 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9340 indicatePessimisticFixpoint();
9343 << getAssociatedValue() <<
"\n");
9346 bool calculateBinaryOperator(
9347 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9348 const Instruction *CtxI,
9349 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9354 bool UsedAssumedInformation =
false;
9355 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9358 if (!SimplifiedLHS.has_value())
9360 if (!*SimplifiedLHS)
9362 LHS = *SimplifiedLHS;
9364 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9367 if (!SimplifiedRHS.has_value())
9369 if (!*SimplifiedRHS)
9371 RHS = *SimplifiedRHS;
9377 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9379 DepClassTy::REQUIRED);
9383 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9385 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9387 DepClassTy::REQUIRED);
9391 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9393 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9395 T.unionAssumed(AssumedRange);
9399 return T.isValidState();
9402 bool calculateCastInst(
9403 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9404 const Instruction *CtxI,
9405 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9411 bool UsedAssumedInformation =
false;
9412 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9415 if (!SimplifiedOpV.has_value())
9417 if (!*SimplifiedOpV)
9419 OpV = *SimplifiedOpV;
9424 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9426 DepClassTy::REQUIRED);
9430 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9432 return T.isValidState();
9436 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9437 const Instruction *CtxI,
9438 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9443 bool UsedAssumedInformation =
false;
9444 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9447 if (!SimplifiedLHS.has_value())
9449 if (!*SimplifiedLHS)
9451 LHS = *SimplifiedLHS;
9453 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9456 if (!SimplifiedRHS.has_value())
9458 if (!*SimplifiedRHS)
9460 RHS = *SimplifiedRHS;
9466 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9468 DepClassTy::REQUIRED);
9472 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9474 DepClassTy::REQUIRED);
9478 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9479 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9482 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9485 bool MustTrue =
false, MustFalse =
false;
9487 auto AllowedRegion =
9490 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9496 assert((!MustTrue || !MustFalse) &&
9497 "Either MustTrue or MustFalse should be false!");
9500 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9502 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9504 T.unionAssumed(ConstantRange( 1,
true));
9506 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9507 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9508 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9512 return T.isValidState();
9524 bool UsedAssumedInformation =
false;
9525 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9528 if (!SimplifiedOpV.has_value())
9530 if (!*SimplifiedOpV)
9532 Value *VPtr = *SimplifiedOpV;
9535 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9537 DepClassTy::REQUIRED);
9541 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9545 return T.isValidState();
9550 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9553 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9556 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9562 T.indicatePessimisticFixpoint();
9569 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9570 if (QueriedAA !=
this)
9573 if (
T.getAssumed() == getState().getAssumed())
9575 T.indicatePessimisticFixpoint();
9578 return T.isValidState();
9581 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9582 return indicatePessimisticFixpoint();
9587 return ChangeStatus::UNCHANGED;
9588 if (++NumChanges > MaxNumChanges) {
9589 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9590 <<
" but only " << MaxNumChanges
9591 <<
" are allowed to avoid cyclic reasoning.");
9592 return indicatePessimisticFixpoint();
9594 return ChangeStatus::CHANGED;
9598 void trackStatistics()
const override {
9607 static constexpr int MaxNumChanges = 5;
9610struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9611 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9612 : AAValueConstantRangeImpl(IRP,
A) {}
9616 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9624struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9625 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9626 : AAValueConstantRangeFunction(IRP,
A) {}
9632struct AAValueConstantRangeCallSiteReturned
9633 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9634 AAValueConstantRangeImpl::StateType,
9636 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9637 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9638 AAValueConstantRangeImpl::StateType,
9645 if (std::optional<ConstantRange>
Range = CI->getRange())
9646 intersectKnown(*
Range);
9649 AAValueConstantRangeImpl::initialize(
A);
9653 void trackStatistics()
const override {
9657struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9658 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9659 : AAValueConstantRangeFloating(IRP,
A) {}
9663 return ChangeStatus::UNCHANGED;
9667 void trackStatistics()
const override {
9676struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9679 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9680 : AAPotentialConstantValues(IRP,
A) {}
9684 if (
A.hasSimplificationCallback(getIRPosition()))
9685 indicatePessimisticFixpoint();
9687 AAPotentialConstantValues::initialize(
A);
9690 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9691 bool &ContainsUndef,
bool ForSelf) {
9693 bool UsedAssumedInformation =
false;
9695 UsedAssumedInformation)) {
9702 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9703 *
this, IRP, DepClassTy::REQUIRED);
9704 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9706 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9707 S = PotentialValuesAA->getState().getAssumedSet();
9714 ContainsUndef =
false;
9715 for (
auto &It : Values) {
9717 ContainsUndef =
true;
9723 S.insert(CI->getValue());
9725 ContainsUndef &= S.empty();
9731 const std::string getAsStr(Attributor *
A)
const override {
9733 llvm::raw_string_ostream OS(Str);
9740 return indicatePessimisticFixpoint();
9744struct AAPotentialConstantValuesArgument final
9745 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9746 AAPotentialConstantValuesImpl,
9747 PotentialConstantIntValuesState> {
9748 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9749 AAPotentialConstantValuesImpl,
9751 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9755 void trackStatistics()
const override {
9760struct AAPotentialConstantValuesReturned
9761 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9762 AAPotentialConstantValuesImpl> {
9763 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9764 AAPotentialConstantValuesImpl>;
9765 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9769 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9770 indicatePessimisticFixpoint();
9771 Base::initialize(
A);
9775 void trackStatistics()
const override {
9780struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9781 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9782 : AAPotentialConstantValuesImpl(IRP,
A) {}
9786 AAPotentialConstantValuesImpl::initialize(
A);
9790 Value &
V = getAssociatedValue();
9793 unionAssumed(
C->getValue());
9794 indicateOptimisticFixpoint();
9799 unionAssumedWithUndef();
9800 indicateOptimisticFixpoint();
9810 indicatePessimisticFixpoint();
9813 << getAssociatedValue() <<
"\n");
9816 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9821 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9822 uint32_t ResultBitWidth) {
9827 case Instruction::Trunc:
9828 return Src.trunc(ResultBitWidth);
9829 case Instruction::SExt:
9830 return Src.sext(ResultBitWidth);
9831 case Instruction::ZExt:
9832 return Src.zext(ResultBitWidth);
9833 case Instruction::BitCast:
9838 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9839 const APInt &
LHS,
const APInt &
RHS,
9840 bool &SkipOperation,
bool &Unsupported) {
9847 switch (BinOpcode) {
9851 case Instruction::Add:
9853 case Instruction::Sub:
9855 case Instruction::Mul:
9857 case Instruction::UDiv:
9859 SkipOperation =
true;
9863 case Instruction::SDiv:
9865 SkipOperation =
true;
9869 case Instruction::URem:
9871 SkipOperation =
true;
9875 case Instruction::SRem:
9877 SkipOperation =
true;
9881 case Instruction::Shl:
9883 case Instruction::LShr:
9885 case Instruction::AShr:
9887 case Instruction::And:
9889 case Instruction::Or:
9891 case Instruction::Xor:
9896 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9897 const APInt &
LHS,
const APInt &
RHS) {
9898 bool SkipOperation =
false;
9901 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9906 unionAssumed(Result);
9907 return isValidState();
9910 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9911 auto AssumedBefore = getAssumed();
9915 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9916 SetTy LHSAAPVS, RHSAAPVS;
9918 LHSContainsUndef,
false) ||
9920 RHSContainsUndef,
false))
9921 return indicatePessimisticFixpoint();
9924 bool MaybeTrue =
false, MaybeFalse =
false;
9926 if (LHSContainsUndef && RHSContainsUndef) {
9929 unionAssumedWithUndef();
9930 }
else if (LHSContainsUndef) {
9931 for (
const APInt &R : RHSAAPVS) {
9932 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9933 MaybeTrue |= CmpResult;
9934 MaybeFalse |= !CmpResult;
9935 if (MaybeTrue & MaybeFalse)
9936 return indicatePessimisticFixpoint();
9938 }
else if (RHSContainsUndef) {
9939 for (
const APInt &L : LHSAAPVS) {
9940 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9941 MaybeTrue |= CmpResult;
9942 MaybeFalse |= !CmpResult;
9943 if (MaybeTrue & MaybeFalse)
9944 return indicatePessimisticFixpoint();
9947 for (
const APInt &L : LHSAAPVS) {
9948 for (
const APInt &R : RHSAAPVS) {
9949 bool CmpResult = calculateICmpInst(ICI, L, R);
9950 MaybeTrue |= CmpResult;
9951 MaybeFalse |= !CmpResult;
9952 if (MaybeTrue & MaybeFalse)
9953 return indicatePessimisticFixpoint();
9958 unionAssumed(APInt( 1, 1));
9960 unionAssumed(APInt( 1, 0));
9961 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9962 : ChangeStatus::CHANGED;
9965 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
9966 auto AssumedBefore = getAssumed();
9970 bool UsedAssumedInformation =
false;
9971 std::optional<Constant *>
C =
A.getAssumedConstant(
9972 *
SI->getCondition(), *
this, UsedAssumedInformation);
9975 bool OnlyLeft =
false, OnlyRight =
false;
9976 if (
C && *
C && (*C)->isOneValue())
9978 else if (
C && *
C && (*C)->isZeroValue())
9981 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9982 SetTy LHSAAPVS, RHSAAPVS;
9985 LHSContainsUndef,
false))
9986 return indicatePessimisticFixpoint();
9990 RHSContainsUndef,
false))
9991 return indicatePessimisticFixpoint();
9993 if (OnlyLeft || OnlyRight) {
9995 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9996 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9999 unionAssumedWithUndef();
10001 for (
const auto &It : *OpAA)
10005 }
else if (LHSContainsUndef && RHSContainsUndef) {
10007 unionAssumedWithUndef();
10009 for (
const auto &It : LHSAAPVS)
10011 for (
const auto &It : RHSAAPVS)
10014 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10015 : ChangeStatus::CHANGED;
10018 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10019 auto AssumedBefore = getAssumed();
10021 return indicatePessimisticFixpoint();
10026 bool SrcContainsUndef =
false;
10029 SrcContainsUndef,
false))
10030 return indicatePessimisticFixpoint();
10032 if (SrcContainsUndef)
10033 unionAssumedWithUndef();
10035 for (
const APInt &S : SrcPVS) {
10036 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10040 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10041 : ChangeStatus::CHANGED;
10044 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10045 auto AssumedBefore = getAssumed();
10049 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10050 SetTy LHSAAPVS, RHSAAPVS;
10052 LHSContainsUndef,
false) ||
10054 RHSContainsUndef,
false))
10055 return indicatePessimisticFixpoint();
10060 if (LHSContainsUndef && RHSContainsUndef) {
10061 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10062 return indicatePessimisticFixpoint();
10063 }
else if (LHSContainsUndef) {
10064 for (
const APInt &R : RHSAAPVS) {
10065 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10066 return indicatePessimisticFixpoint();
10068 }
else if (RHSContainsUndef) {
10069 for (
const APInt &L : LHSAAPVS) {
10070 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10071 return indicatePessimisticFixpoint();
10074 for (
const APInt &L : LHSAAPVS) {
10075 for (
const APInt &R : RHSAAPVS) {
10076 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10077 return indicatePessimisticFixpoint();
10081 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10082 : ChangeStatus::CHANGED;
10085 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10086 auto AssumedBefore = getAssumed();
10088 bool ContainsUndef;
10090 ContainsUndef,
true))
10091 return indicatePessimisticFixpoint();
10092 if (ContainsUndef) {
10093 unionAssumedWithUndef();
10095 for (
const auto &It : Incoming)
10098 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10099 : ChangeStatus::CHANGED;
10104 Value &
V = getAssociatedValue();
10108 return updateWithICmpInst(
A, ICI);
10111 return updateWithSelectInst(
A, SI);
10114 return updateWithCastInst(
A, CI);
10117 return updateWithBinaryOperator(
A, BinOp);
10120 return updateWithInstruction(
A,
I);
10122 return indicatePessimisticFixpoint();
10126 void trackStatistics()
const override {
10131struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10132 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10133 : AAPotentialConstantValuesImpl(IRP,
A) {}
10138 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10143 void trackStatistics()
const override {
10148struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10149 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10150 : AAPotentialConstantValuesFunction(IRP,
A) {}
10153 void trackStatistics()
const override {
10158struct AAPotentialConstantValuesCallSiteReturned
10159 : AACalleeToCallSite<AAPotentialConstantValues,
10160 AAPotentialConstantValuesImpl> {
10161 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10163 : AACalleeToCallSite<AAPotentialConstantValues,
10164 AAPotentialConstantValuesImpl>(IRP,
A) {}
10167 void trackStatistics()
const override {
10172struct AAPotentialConstantValuesCallSiteArgument
10173 : AAPotentialConstantValuesFloating {
10174 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10176 : AAPotentialConstantValuesFloating(IRP,
A) {}
10180 AAPotentialConstantValuesImpl::initialize(
A);
10181 if (isAtFixpoint())
10184 Value &
V = getAssociatedValue();
10187 unionAssumed(
C->getValue());
10188 indicateOptimisticFixpoint();
10193 unionAssumedWithUndef();
10194 indicateOptimisticFixpoint();
10201 Value &
V = getAssociatedValue();
10202 auto AssumedBefore = getAssumed();
10203 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10206 return indicatePessimisticFixpoint();
10207 const auto &S = AA->getAssumed();
10209 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10210 : ChangeStatus::CHANGED;
10214 void trackStatistics()
const override {
10223 bool IgnoreSubsumingPositions) {
10224 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10225 "Unexpected attribute kind");
10226 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10227 Attribute::NoUndef))
10247 Value &V = getAssociatedValue();
10249 indicatePessimisticFixpoint();
10250 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10254 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10255 AANoUndef::StateType &State) {
10256 const Value *UseV =
U->get();
10257 const DominatorTree *DT =
nullptr;
10258 AssumptionCache *AC =
nullptr;
10259 InformationCache &InfoCache =
A.getInfoCache();
10260 if (Function *
F = getAnchorScope()) {
10265 bool TrackUse =
false;
10274 const std::string getAsStr(Attributor *
A)
const override {
10275 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10282 bool UsedAssumedInformation =
false;
10283 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10284 UsedAssumedInformation))
10285 return ChangeStatus::UNCHANGED;
10289 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10292 return ChangeStatus::UNCHANGED;
10293 return AANoUndef::manifest(
A);
10297struct AANoUndefFloating :
public AANoUndefImpl {
10298 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10299 : AANoUndefImpl(IRP,
A) {}
10303 AANoUndefImpl::initialize(
A);
10304 if (!getState().isAtFixpoint() && getAnchorScope() &&
10305 !getAnchorScope()->isDeclaration())
10306 if (Instruction *CtxI = getCtxI())
10307 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10312 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10313 bool IsKnownNoUndef;
10315 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10319 bool UsedAssumedInformation =
false;
10320 Value *AssociatedValue = &getAssociatedValue();
10322 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10327 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10335 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10336 return indicatePessimisticFixpoint();
10337 return ChangeStatus::UNCHANGED;
10340 for (
const auto &VAC : Values)
10342 return indicatePessimisticFixpoint();
10344 return ChangeStatus::UNCHANGED;
10351struct AANoUndefReturned final
10352 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10353 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10354 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10360struct AANoUndefArgument final
10361 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10362 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10363 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10369struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10370 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10371 : AANoUndefFloating(IRP,
A) {}
10377struct AANoUndefCallSiteReturned final
10378 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10379 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10380 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10388struct AANoFPClassImpl : AANoFPClass {
10389 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10392 const IRPosition &IRP = getIRPosition();
10396 indicateOptimisticFixpoint();
10401 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10402 for (
const auto &Attr : Attrs) {
10406 const DataLayout &
DL =
A.getDataLayout();
10412 if (Instruction *CtxI = getCtxI())
10413 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10417 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10418 AANoFPClass::StateType &State) {
10429 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10430 State.addKnownBits(NoFPAA->getState().getKnown());
10434 const std::string getAsStr(Attributor *
A)
const override {
10435 std::string
Result =
"nofpclass";
10436 raw_string_ostream OS(Result);
10437 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10441 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10442 SmallVectorImpl<Attribute> &Attrs)
const override {
10443 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10447struct AANoFPClassFloating :
public AANoFPClassImpl {
10448 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10449 : AANoFPClassImpl(IRP,
A) {}
10454 bool UsedAssumedInformation =
false;
10455 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10457 Values.
push_back({getAssociatedValue(), getCtxI()});
10463 DepClassTy::REQUIRED);
10464 if (!AA ||
this == AA) {
10465 T.indicatePessimisticFixpoint();
10467 const AANoFPClass::StateType &S =
10468 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10471 return T.isValidState();
10474 for (
const auto &VAC : Values)
10475 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10476 return indicatePessimisticFixpoint();
10482 void trackStatistics()
const override {
10487struct AANoFPClassReturned final
10488 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10489 AANoFPClassImpl::StateType, false,
10490 Attribute::None, false> {
10491 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10492 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10493 AANoFPClassImpl::StateType,
false,
10497 void trackStatistics()
const override {
10502struct AANoFPClassArgument final
10503 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10504 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10505 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10511struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10512 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10513 : AANoFPClassFloating(IRP,
A) {}
10516 void trackStatistics()
const override {
10521struct AANoFPClassCallSiteReturned final
10522 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10523 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10524 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10527 void trackStatistics()
const override {
10532struct AACallEdgesImpl :
public AACallEdges {
10533 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10535 const SetVector<Function *> &getOptimisticEdges()
const override {
10536 return CalledFunctions;
10539 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10541 bool hasNonAsmUnknownCallee()
const override {
10542 return HasUnknownCalleeNonAsm;
10545 const std::string getAsStr(Attributor *
A)
const override {
10546 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10547 std::to_string(CalledFunctions.size()) +
"]";
10550 void trackStatistics()
const override {}
10553 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10554 if (CalledFunctions.insert(Fn)) {
10555 Change = ChangeStatus::CHANGED;
10561 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10562 if (!HasUnknownCallee)
10563 Change = ChangeStatus::CHANGED;
10564 if (NonAsm && !HasUnknownCalleeNonAsm)
10565 Change = ChangeStatus::CHANGED;
10566 HasUnknownCalleeNonAsm |= NonAsm;
10567 HasUnknownCallee =
true;
10572 SetVector<Function *> CalledFunctions;
10575 bool HasUnknownCallee =
false;
10578 bool HasUnknownCalleeNonAsm =
false;
10581struct AACallEdgesCallSite :
public AACallEdgesImpl {
10582 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10583 : AACallEdgesImpl(IRP,
A) {}
10590 addCalledFunction(Fn, Change);
10592 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10593 setHasUnknownCallee(
true, Change);
10604 VisitValue(*V, CtxI);
10608 bool UsedAssumedInformation =
false;
10614 for (
auto &VAC : Values)
10615 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10621 if (
IA->hasSideEffects() &&
10624 setHasUnknownCallee(
false, Change);
10630 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10631 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10632 if (IndirectCallAA->foreachCallee(
10633 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10642 for (
const Use *U : CallbackUses)
10643 ProcessCalledOperand(
U->get(), CB);
10649struct AACallEdgesFunction :
public AACallEdgesImpl {
10650 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10651 : AACallEdgesImpl(IRP,
A) {}
10660 auto *CBEdges =
A.getAAFor<AACallEdges>(
10664 if (CBEdges->hasNonAsmUnknownCallee())
10665 setHasUnknownCallee(
true, Change);
10666 if (CBEdges->hasUnknownCallee())
10667 setHasUnknownCallee(
false, Change);
10669 for (Function *
F : CBEdges->getOptimisticEdges())
10670 addCalledFunction(
F, Change);
10676 bool UsedAssumedInformation =
false;
10677 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10678 UsedAssumedInformation,
10682 setHasUnknownCallee(
true, Change);
10691struct AAInterFnReachabilityFunction
10692 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10693 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10694 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10697 bool instructionCanReach(
10698 Attributor &
A,
const Instruction &From,
const Function &To,
10701 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10703 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10704 typename RQITy::Reachable
Result;
10705 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10706 return NonConstThis->isReachableImpl(
A, StackRQI,
10708 return Result == RQITy::Reachable::Yes;
10712 bool IsTemporaryRQI)
override {
10714 &RQI.From->getFunction()->getEntryBlock().front();
10715 if (EntryI != RQI.From &&
10716 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10717 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10720 auto CheckReachableCallBase = [&](CallBase *CB) {
10721 auto *CBEdges =
A.getAAFor<AACallEdges>(
10723 if (!CBEdges || !CBEdges->getState().isValidState())
10726 if (CBEdges->hasUnknownCallee())
10729 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10740 if (Fn == getAnchorScope()) {
10741 if (EntryI == RQI.From)
10746 const AAInterFnReachability *InterFnReachability =
10748 DepClassTy::OPTIONAL);
10751 if (!InterFnReachability ||
10759 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10761 DepClassTy::OPTIONAL);
10769 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10770 A, *RQI.From, CBInst, RQI.ExclusionSet);
10773 bool UsedExclusionSet =
true;
10774 bool UsedAssumedInformation =
false;
10775 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10776 UsedAssumedInformation,
10778 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10781 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10785 void trackStatistics()
const override {}
10789template <
typename AAType>
10790static std::optional<Constant *>
10793 if (!Ty.isIntegerTy())
10801 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10803 if (!COpt.has_value()) {
10805 return std::nullopt;
10807 if (
auto *
C = *COpt) {
10818 std::optional<Value *> V;
10819 for (
auto &It : Values) {
10821 if (V.has_value() && !*V)
10824 if (!V.has_value())
10838 if (
A.hasSimplificationCallback(getIRPosition())) {
10839 indicatePessimisticFixpoint();
10842 Value *Stripped = getAssociatedValue().stripPointerCasts();
10844 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10846 indicateOptimisticFixpoint();
10849 AAPotentialValues::initialize(
A);
10853 const std::string getAsStr(Attributor *
A)
const override {
10855 llvm::raw_string_ostream OS(Str);
10860 template <
typename AAType>
10861 static std::optional<Value *> askOtherAA(Attributor &
A,
10862 const AbstractAttribute &AA,
10863 const IRPosition &IRP,
Type &Ty) {
10868 return std::nullopt;
10875 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10877 Function *AnchorScope)
const {
10881 for (
const auto &U : CB->
args()) {
10891 Type &Ty = *getAssociatedType();
10892 std::optional<Value *> SimpleV =
10893 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10894 if (SimpleV.has_value() && !*SimpleV) {
10895 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10896 *
this, ValIRP, DepClassTy::OPTIONAL);
10897 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10898 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10899 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10900 if (PotentialConstantsAA->undefIsContained())
10905 if (!SimpleV.has_value())
10917 State.unionAssumed({{*VPtr, CtxI}, S});
10923 AA::ValueAndContext
I;
10927 return II.I ==
I &&
II.S == S;
10930 return std::tie(
I, S) < std::tie(
II.I,
II.S);
10934 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
10935 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
10940 bool UsedAssumedInformation =
false;
10942 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10943 UsedAssumedInformation))
10946 for (
auto &It : Values)
10947 ValueScopeMap[It] += CS;
10949 for (
auto &It : ValueScopeMap)
10950 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10956 void giveUpOnIntraprocedural(Attributor &
A) {
10957 auto NewS = StateType::getBestState(getState());
10958 for (
const auto &It : getAssumedSet()) {
10961 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10964 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10972 getState() = StateType::getBestState(getState());
10973 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10974 AAPotentialValues::indicateOptimisticFixpoint();
10975 return ChangeStatus::CHANGED;
10980 return indicatePessimisticFixpoint();
10988 if (!getAssumedSimplifiedValues(
A, Values, S))
10990 Value &OldV = getAssociatedValue();
10993 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10994 if (!NewV || NewV == &OldV)
10999 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11000 return ChangeStatus::CHANGED;
11002 return ChangeStatus::UNCHANGED;
11005 bool getAssumedSimplifiedValues(
11006 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11007 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11008 if (!isValidState())
11010 bool UsedAssumedInformation =
false;
11011 for (
const auto &It : getAssumedSet())
11012 if (It.second & S) {
11013 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11015 if (
A.getAssumedSimplifiedValues(
11017 this, Values, S, UsedAssumedInformation))
11022 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11027struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11028 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11029 : AAPotentialValuesImpl(IRP,
A) {}
11033 auto AssumedBefore = getAssumed();
11035 genericValueTraversal(
A, &getAssociatedValue());
11037 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11038 : ChangeStatus::CHANGED;
11042 struct LivenessInfo {
11043 const AAIsDead *LivenessAA =
nullptr;
11044 bool AnyDead =
false;
11054 SmallVectorImpl<ItemInfo> &Worklist) {
11057 bool UsedAssumedInformation =
false;
11059 auto GetSimplifiedValues = [&](
Value &
V,
11061 if (!
A.getAssumedSimplifiedValues(
11065 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11067 return Values.
empty();
11069 if (GetSimplifiedValues(*
LHS, LHSValues))
11071 if (GetSimplifiedValues(*
RHS, RHSValues))
11076 InformationCache &InfoCache =
A.getInfoCache();
11083 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11088 const DataLayout &
DL =
A.getDataLayout();
11089 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11091 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11094 nullptr,
II.S, getAnchorScope());
11100 if (&LHSV == &RHSV &&
11102 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11104 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11111 if (TypedLHS && TypedRHS) {
11113 if (NewV && NewV != &Cmp) {
11114 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11126 if (!LHSIsNull && !RHSIsNull)
11132 assert((LHSIsNull || RHSIsNull) &&
11133 "Expected nullptr versus non-nullptr comparison at this point");
11136 unsigned PtrIdx = LHSIsNull;
11137 bool IsKnownNonNull;
11140 DepClassTy::REQUIRED, IsKnownNonNull);
11141 if (!IsAssumedNonNull)
11147 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11152 for (
auto &LHSValue : LHSValues)
11153 for (
auto &RHSValue : RHSValues)
11154 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11159 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11160 SmallVectorImpl<ItemInfo> &Worklist) {
11162 bool UsedAssumedInformation =
false;
11164 std::optional<Constant *>
C =
11165 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11166 bool NoValueYet = !
C.has_value();
11174 }
else if (&SI == &getAssociatedValue()) {
11179 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11181 if (!SimpleV.has_value())
11184 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11192 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11193 SmallVectorImpl<ItemInfo> &Worklist) {
11194 SmallSetVector<Value *, 4> PotentialCopies;
11195 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11196 bool UsedAssumedInformation =
false;
11198 PotentialValueOrigins, *
this,
11199 UsedAssumedInformation,
11201 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11202 "loaded values for load instruction "
11210 InformationCache &InfoCache =
A.getInfoCache();
11212 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11216 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11218 UsedAssumedInformation,
11220 return A.isAssumedDead(*
I,
this,
nullptr,
11221 UsedAssumedInformation,
11224 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11225 "and we cannot delete all the stores: "
11236 bool AllLocal = ScopeIsLocal;
11241 if (!DynamicallyUnique) {
11242 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11243 "values are dynamically unique: "
11248 for (
auto *PotentialCopy : PotentialCopies) {
11250 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11255 if (!AllLocal && ScopeIsLocal)
11260 bool handlePHINode(
11261 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11262 SmallVectorImpl<ItemInfo> &Worklist,
11263 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11264 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11265 LivenessInfo &LI = LivenessAAs[&
F];
11266 if (!LI.LivenessAA)
11272 if (&
PHI == &getAssociatedValue()) {
11273 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11275 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11276 *
PHI.getFunction());
11280 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11282 if (LI.LivenessAA &&
11283 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11302 bool UsedAssumedInformation =
false;
11303 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11305 if (!SimpleV.has_value())
11309 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11316 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11317 SmallVectorImpl<ItemInfo> &Worklist) {
11318 bool SomeSimplified =
false;
11319 bool UsedAssumedInformation =
false;
11321 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11324 const auto &SimplifiedOp =
A.getAssumedSimplified(
11329 if (!SimplifiedOp.has_value())
11333 NewOps[Idx] = *SimplifiedOp;
11337 SomeSimplified |= (NewOps[Idx] !=
Op);
11343 if (!SomeSimplified)
11346 InformationCache &InfoCache =
A.getInfoCache();
11350 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11353 const DataLayout &
DL =
I.getDataLayout();
11354 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11356 if (!NewV || NewV == &
I)
11359 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11366 Attributor &
A, Instruction &
I, ItemInfo
II,
11367 SmallVectorImpl<ItemInfo> &Worklist,
11368 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11371 CI->getPredicate(),
II, Worklist);
11373 switch (
I.getOpcode()) {
11374 case Instruction::Select:
11376 case Instruction::PHI:
11378 case Instruction::Load:
11381 return handleGenericInst(
A,
I,
II, Worklist);
11386 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11387 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11389 SmallSet<ItemInfo, 16> Visited;
11408 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11409 << Iteration <<
"!\n");
11410 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11416 Value *NewV =
nullptr;
11417 if (
V->getType()->isPointerTy()) {
11423 for (Argument &Arg :
Callee->args())
11430 if (NewV && NewV != V) {
11431 Worklist.
push_back({{*NewV, CtxI}, S});
11445 if (V == InitialV && CtxI == getCtxI()) {
11446 indicatePessimisticFixpoint();
11450 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11451 }
while (!Worklist.
empty());
11455 for (
auto &It : LivenessAAs)
11456 if (It.second.AnyDead)
11457 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11461 void trackStatistics()
const override {
11466struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11467 using Base = AAPotentialValuesImpl;
11468 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11475 indicatePessimisticFixpoint();
11480 auto AssumedBefore = getAssumed();
11482 unsigned ArgNo = getCalleeArgNo();
11484 bool UsedAssumedInformation =
false;
11486 auto CallSitePred = [&](AbstractCallSite ACS) {
11488 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11491 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11493 UsedAssumedInformation))
11496 return isValidState();
11499 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11501 UsedAssumedInformation))
11502 return indicatePessimisticFixpoint();
11504 Function *Fn = getAssociatedFunction();
11505 bool AnyNonLocal =
false;
11506 for (
auto &It : Values) {
11508 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11513 return indicatePessimisticFixpoint();
11517 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11523 AnyNonLocal =
true;
11525 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11527 giveUpOnIntraprocedural(
A);
11529 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11530 : ChangeStatus::CHANGED;
11534 void trackStatistics()
const override {
11539struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11540 using Base = AAPotentialValuesFloating;
11541 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11547 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11548 indicatePessimisticFixpoint();
11552 for (Argument &Arg :
F->args())
11555 ReturnedArg = &Arg;
11558 if (!
A.isFunctionIPOAmendable(*
F) ||
11559 A.hasSimplificationCallback(getIRPosition())) {
11561 indicatePessimisticFixpoint();
11563 indicateOptimisticFixpoint();
11569 auto AssumedBefore = getAssumed();
11570 bool UsedAssumedInformation =
false;
11573 Function *AnchorScope = getAnchorScope();
11579 UsedAssumedInformation,
11585 bool AllInterAreIntra =
false;
11588 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11592 for (
const AA::ValueAndContext &VAC : Values) {
11593 addValue(
A, getState(), *
VAC.getValue(),
11594 VAC.getCtxI() ?
VAC.getCtxI() : CtxI,
11597 if (AllInterAreIntra)
11604 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11607 bool AddValues =
true;
11610 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11614 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11617 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11618 UsedAssumedInformation,
11620 return indicatePessimisticFixpoint();
11623 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11624 : ChangeStatus::CHANGED;
11629 return ChangeStatus::UNCHANGED;
11631 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11633 return ChangeStatus::UNCHANGED;
11634 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11636 return ChangeStatus::UNCHANGED;
11641 "Number of function with unique return");
11644 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11649 Value *RetOp = RetI.getOperand(0);
11653 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11654 Changed = ChangeStatus::CHANGED;
11657 bool UsedAssumedInformation =
false;
11658 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11659 UsedAssumedInformation,
11665 return AAPotentialValues::indicatePessimisticFixpoint();
11669 void trackStatistics()
const override{
11676struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11677 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11678 : AAPotentialValuesImpl(IRP,
A) {}
11687 void trackStatistics()
const override {
11692struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11693 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11694 : AAPotentialValuesFunction(IRP,
A) {}
11697 void trackStatistics()
const override {
11702struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11703 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11704 : AAPotentialValuesImpl(IRP,
A) {}
11708 auto AssumedBefore = getAssumed();
11712 return indicatePessimisticFixpoint();
11714 bool UsedAssumedInformation =
false;
11718 UsedAssumedInformation))
11719 return indicatePessimisticFixpoint();
11726 Values, S, UsedAssumedInformation))
11729 for (
auto &It : Values) {
11730 Value *
V = It.getValue();
11731 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11732 V, *CB, *
this, UsedAssumedInformation);
11733 if (!CallerV.has_value()) {
11737 V = *CallerV ? *CallerV :
V;
11743 giveUpOnIntraprocedural(
A);
11746 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11751 return indicatePessimisticFixpoint();
11753 return indicatePessimisticFixpoint();
11754 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11755 : ChangeStatus::CHANGED;
11759 return AAPotentialValues::indicatePessimisticFixpoint();
11763 void trackStatistics()
const override {
11768struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11769 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11770 : AAPotentialValuesFloating(IRP,
A) {}
11773 void trackStatistics()
const override {
11781struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11782 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11783 const DenseSet<StringRef> &Known)
11784 : AAAssumptionInfo(IRP,
A, Known) {}
11789 if (getKnown().isUniversal())
11790 return ChangeStatus::UNCHANGED;
11792 const IRPosition &IRP = getIRPosition();
11794 getAssumed().getSet().
end());
11796 return A.manifestAttrs(IRP,
11803 bool hasAssumption(
const StringRef Assumption)
const override {
11804 return isValidState() && setContains(Assumption);
11808 const std::string getAsStr(Attributor *
A)
const override {
11809 const SetContents &Known = getKnown();
11810 const SetContents &Assumed = getAssumed();
11814 const std::string KnownStr =
llvm::join(Set,
",");
11816 std::string AssumedStr =
"Universal";
11817 if (!Assumed.isUniversal()) {
11818 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11821 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11836struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11837 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11838 : AAAssumptionInfoImpl(IRP,
A,
11845 auto CallSitePred = [&](AbstractCallSite ACS) {
11846 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11848 DepClassTy::REQUIRED);
11852 Changed |= getIntersection(AssumptionAA->getAssumed());
11853 return !getAssumed().empty() || !getKnown().empty();
11856 bool UsedAssumedInformation =
false;
11861 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11862 UsedAssumedInformation))
11863 return indicatePessimisticFixpoint();
11865 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11868 void trackStatistics()
const override {}
11872struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11874 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11875 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11880 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11886 auto *AssumptionAA =
11887 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11889 return indicatePessimisticFixpoint();
11890 bool Changed = getIntersection(AssumptionAA->getAssumed());
11891 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11895 void trackStatistics()
const override {}
11900 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
11907 return Assumptions;
11922struct AAUnderlyingObjectsImpl
11928 const std::string getAsStr(
Attributor *
A)
const override {
11929 if (!isValidState())
11930 return "<invalid>";
11933 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11934 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11936 if (!InterAssumedUnderlyingObjects.empty()) {
11937 OS <<
"inter objects:\n";
11938 for (
auto *Obj : InterAssumedUnderlyingObjects)
11939 OS << *Obj <<
'\n';
11941 if (!IntraAssumedUnderlyingObjects.empty()) {
11942 OS <<
"intra objects:\n";
11943 for (
auto *Obj : IntraAssumedUnderlyingObjects)
11944 OS << *Obj <<
'\n';
11950 void trackStatistics()
const override {}
11954 auto &
Ptr = getAssociatedValue();
11956 bool UsedAssumedInformation =
false;
11957 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
11959 SmallPtrSet<Value *, 8> SeenObjects;
11963 Scope, UsedAssumedInformation))
11968 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11969 auto &
VAC = Values[
I];
11970 auto *Obj =
VAC.getValue();
11972 if (!SeenObjects.
insert(UO ? UO : Obj).second)
11974 if (UO && UO != Obj) {
11980 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
11982 auto Pred = [&](
Value &
V) {
11990 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11992 "The forall call should not return false at this position");
11998 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
11999 UsedAssumedInformation);
12005 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12007 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12008 Scope, UsedAssumedInformation);
12022 if (!UsedAssumedInformation)
12023 indicateOptimisticFixpoint();
12024 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12027 bool forallUnderlyingObjects(
12028 function_ref<
bool(
Value &)> Pred,
12030 if (!isValidState())
12031 return Pred(getAssociatedValue());
12034 ? IntraAssumedUnderlyingObjects
12035 : InterAssumedUnderlyingObjects;
12036 for (
Value *Obj : AssumedUnderlyingObjects)
12046 bool handleIndirect(Attributor &
A,
Value &V,
12047 SmallSetVector<Value *, 8> &UnderlyingObjects,
12050 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12052 auto Pred = [&](
Value &
V) {
12056 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12058 "The forall call should not return false at this position");
12064 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12066 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12069struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12070 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12071 : AAUnderlyingObjectsImpl(IRP,
A) {}
12074struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12075 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12076 : AAUnderlyingObjectsImpl(IRP,
A) {}
12079struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12080 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12081 : AAUnderlyingObjectsImpl(IRP,
A) {}
12084struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12085 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12086 : AAUnderlyingObjectsImpl(IRP,
A) {}
12089struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12090 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12091 : AAUnderlyingObjectsImpl(IRP,
A) {}
12094struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12095 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12096 : AAUnderlyingObjectsImpl(IRP,
A) {}
12099struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12100 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12101 : AAUnderlyingObjectsImpl(IRP,
A) {}
12107struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12108 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12109 : AAGlobalValueInfo(IRP,
A) {}
12114 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12115 SmallVectorImpl<const Value *> &Worklist) {
12122 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12123 << *UInst <<
"\n");
12126 int Idx = &
Cmp->getOperandUse(0) == &
U;
12129 return U == &getAnchorValue();
12134 auto CallSitePred = [&](AbstractCallSite ACS) {
12135 Worklist.
push_back(ACS.getInstruction());
12138 bool UsedAssumedInformation =
false;
12140 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12142 UsedAssumedInformation))
12160 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12169 unsigned NumUsesBefore =
Uses.size();
12171 SmallPtrSet<const Value *, 8> Visited;
12175 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12183 return checkUse(
A, U, Follow, Worklist);
12185 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12186 Uses.insert(&OldU);
12190 while (!Worklist.
empty()) {
12192 if (!Visited.
insert(V).second)
12194 if (!
A.checkForAllUses(UsePred, *
this, *V,
12196 DepClassTy::OPTIONAL,
12197 true, EquivalentUseCB)) {
12198 return indicatePessimisticFixpoint();
12202 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12203 : ChangeStatus::CHANGED;
12206 bool isPotentialUse(
const Use &U)
const override {
12207 return !isValidState() ||
Uses.contains(&U);
12212 return ChangeStatus::UNCHANGED;
12216 const std::string getAsStr(Attributor *
A)
const override {
12217 return "[" + std::to_string(
Uses.size()) +
" uses]";
12220 void trackStatistics()
const override {
12226 SmallPtrSet<const Use *, 8>
Uses;
12232struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12233 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12234 : AAIndirectCallInfo(IRP,
A) {}
12238 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12239 if (!MD && !
A.isClosedWorldModule())
12243 for (
const auto &
Op : MD->operands())
12245 PotentialCallees.insert(Callee);
12246 }
else if (
A.isClosedWorldModule()) {
12248 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12249 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12252 if (PotentialCallees.empty())
12253 indicateOptimisticFixpoint();
12261 SmallSetVector<Function *, 4> AssumedCalleesNow;
12262 bool AllCalleesKnownNow = AllCalleesKnown;
12264 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12265 bool &UsedAssumedInformation) {
12266 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12268 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12270 UsedAssumedInformation = !GIAA->isAtFixpoint();
12274 auto AddPotentialCallees = [&]() {
12275 for (
auto *PotentialCallee : PotentialCallees) {
12276 bool UsedAssumedInformation =
false;
12277 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12278 AssumedCalleesNow.
insert(PotentialCallee);
12284 bool UsedAssumedInformation =
false;
12287 AA::ValueScope::AnyScope,
12288 UsedAssumedInformation)) {
12289 if (PotentialCallees.empty())
12290 return indicatePessimisticFixpoint();
12291 AddPotentialCallees();
12296 auto CheckPotentialCallee = [&](
Function &Fn) {
12297 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12300 auto &CachedResult = FilterResults[&Fn];
12301 if (CachedResult.has_value())
12302 return CachedResult.value();
12304 bool UsedAssumedInformation =
false;
12305 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12306 if (!UsedAssumedInformation)
12307 CachedResult =
false;
12316 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12317 bool IsKnown =
false;
12320 DepClassTy::OPTIONAL, IsKnown)) {
12322 CachedResult =
false;
12327 CachedResult =
true;
12333 for (
auto &VAC : Values) {
12337 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12341 if (CheckPotentialCallee(*VACFn))
12342 AssumedCalleesNow.
insert(VACFn);
12345 if (!PotentialCallees.empty()) {
12346 AddPotentialCallees();
12349 AllCalleesKnownNow =
false;
12352 if (AssumedCalleesNow == AssumedCallees &&
12353 AllCalleesKnown == AllCalleesKnownNow)
12354 return ChangeStatus::UNCHANGED;
12356 std::swap(AssumedCallees, AssumedCalleesNow);
12357 AllCalleesKnown = AllCalleesKnownNow;
12358 return ChangeStatus::CHANGED;
12364 if (!AllCalleesKnown && AssumedCallees.empty())
12365 return ChangeStatus::UNCHANGED;
12368 bool UsedAssumedInformation =
false;
12369 if (
A.isAssumedDead(*CB,
this,
nullptr,
12370 UsedAssumedInformation))
12371 return ChangeStatus::UNCHANGED;
12375 if (
FP->getType()->getPointerAddressSpace())
12376 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12386 if (AssumedCallees.empty()) {
12387 assert(AllCalleesKnown &&
12388 "Expected all callees to be known if there are none.");
12389 A.changeToUnreachableAfterManifest(CB);
12390 return ChangeStatus::CHANGED;
12394 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12395 auto *NewCallee = AssumedCallees.front();
12398 NumIndirectCallsPromoted++;
12399 return ChangeStatus::CHANGED;
12406 A.deleteAfterManifest(*CB);
12407 return ChangeStatus::CHANGED;
12417 bool SpecializedForAnyCallees =
false;
12418 bool SpecializedForAllCallees = AllCalleesKnown;
12419 ICmpInst *LastCmp =
nullptr;
12422 for (Function *NewCallee : AssumedCallees) {
12423 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12424 AssumedCallees.size())) {
12425 SkippedAssumedCallees.
push_back(NewCallee);
12426 SpecializedForAllCallees =
false;
12429 SpecializedForAnyCallees =
true;
12435 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12436 A.registerManifestAddedBasicBlock(*IP->getParent());
12442 A.registerManifestAddedBasicBlock(*ElseBB);
12444 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12449 CastInst *RetBC =
nullptr;
12450 CallInst *NewCall =
nullptr;
12455 NumIndirectCallsPromoted++;
12463 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12464 if (!AllCalleesKnown)
12465 return ChangeStatus::UNCHANGED;
12466 MDBuilder MDB(IndirectCB.getContext());
12467 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12468 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12469 return ChangeStatus::CHANGED;
12472 if (!SpecializedForAnyCallees)
12473 return AttachCalleeMetadata(*CB);
12476 if (SpecializedForAllCallees) {
12479 new UnreachableInst(IP->getContext(), IP);
12480 IP->eraseFromParent();
12483 CBClone->setName(CB->
getName());
12484 CBClone->insertBefore(*IP->getParent(), IP);
12485 NewCalls.
push_back({CBClone,
nullptr});
12486 AttachCalleeMetadata(*CBClone);
12493 CB->
getParent()->getFirstInsertionPt());
12494 for (
auto &It : NewCalls) {
12495 CallBase *NewCall = It.first;
12496 Instruction *CallRet = It.second ? It.second : It.first;
12508 A.deleteAfterManifest(*CB);
12509 Changed = ChangeStatus::CHANGED;
12515 const std::string getAsStr(Attributor *
A)
const override {
12516 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12517 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12521 void trackStatistics()
const override {
12522 if (AllCalleesKnown) {
12524 Eliminated, CallSites,
12525 "Number of indirect call sites eliminated via specialization")
12528 "Number of indirect call sites specialized")
12532 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12533 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12538 DenseMap<Function *, std::optional<bool>> FilterResults;
12542 SmallSetVector<Function *, 4> PotentialCallees;
12546 SmallSetVector<Function *, 4> AssumedCallees;
12550 bool AllCalleesKnown =
true;
12557struct AAInvariantLoadPointerImpl
12558 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12559 AAInvariantLoadPointer> {
12563 IS_NOALIAS = 1 << 0,
12566 IS_NOEFFECT = 1 << 1,
12568 IS_LOCALLY_INVARIANT = 1 << 2,
12570 IS_LOCALLY_CONSTRAINED = 1 << 3,
12572 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12573 IS_LOCALLY_CONSTRAINED,
12575 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12578 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12582 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12585 bool isKnownInvariant()
const final {
12586 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12589 bool isKnownLocallyInvariant()
const final {
12590 if (isKnown(IS_LOCALLY_INVARIANT))
12592 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12595 bool isAssumedInvariant()
const final {
12596 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12599 bool isAssumedLocallyInvariant()
const final {
12600 if (isAssumed(IS_LOCALLY_INVARIANT))
12602 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12609 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12610 return indicatePessimisticFixpoint();
12614 Changed |= updateLocalInvariance(
A);
12620 if (!isKnownInvariant())
12621 return ChangeStatus::UNCHANGED;
12624 const Value *
Ptr = &getAssociatedValue();
12625 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12626 if (
U.get() !=
Ptr)
12634 if (!
A.isRunOn(
I->getFunction()))
12637 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12641 LI->setMetadata(LLVMContext::MD_invariant_load,
12643 Changed = ChangeStatus::CHANGED;
12648 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *
Ptr);
12653 const std::string getAsStr(Attributor *)
const override {
12654 if (isKnownInvariant())
12655 return "load-invariant pointer";
12656 return "non-invariant pointer";
12660 void trackStatistics()
const override {}
12664 bool requiresNoAlias()
const {
12665 switch (getPositionKind()) {
12671 case IRP_CALL_SITE:
12673 case IRP_CALL_SITE_RETURNED: {
12678 case IRP_ARGUMENT: {
12679 const Function *
F = getAssociatedFunction();
12680 assert(
F &&
"no associated function for argument");
12686 bool isExternal()
const {
12687 const Function *
F = getAssociatedFunction();
12691 getPositionKind() != IRP_CALL_SITE_RETURNED;
12695 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12696 return ChangeStatus::UNCHANGED;
12699 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12700 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12701 if (ANoAlias->isKnownNoAlias()) {
12702 addKnownBits(IS_NOALIAS);
12703 return ChangeStatus::CHANGED;
12706 if (!ANoAlias->isAssumedNoAlias()) {
12707 removeAssumedBits(IS_NOALIAS);
12708 return ChangeStatus::CHANGED;
12711 return ChangeStatus::UNCHANGED;
12716 if (
const Argument *Arg = getAssociatedArgument()) {
12718 addKnownBits(IS_NOALIAS);
12719 return ChangeStatus::UNCHANGED;
12724 removeAssumedBits(IS_NOALIAS);
12725 return ChangeStatus::CHANGED;
12728 return ChangeStatus::UNCHANGED;
12732 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12733 return ChangeStatus::UNCHANGED;
12735 if (!getAssociatedFunction())
12736 return indicatePessimisticFixpoint();
12739 return indicatePessimisticFixpoint();
12741 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12743 return !LI || !LI->mayHaveSideEffects();
12745 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12746 return indicatePessimisticFixpoint();
12748 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12749 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12752 if (!AMemoryBehavior->isAssumedReadOnly())
12753 return indicatePessimisticFixpoint();
12755 if (AMemoryBehavior->isKnownReadOnly()) {
12756 addKnownBits(IS_NOEFFECT);
12757 return ChangeStatus::UNCHANGED;
12760 return ChangeStatus::UNCHANGED;
12763 if (
const Argument *Arg = getAssociatedArgument()) {
12765 addKnownBits(IS_NOEFFECT);
12766 return ChangeStatus::UNCHANGED;
12771 return indicatePessimisticFixpoint();
12774 return ChangeStatus::UNCHANGED;
12778 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12779 return ChangeStatus::UNCHANGED;
12782 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12783 getIRPosition(),
this, DepClassTy::REQUIRED);
12785 return ChangeStatus::UNCHANGED;
12787 bool UsedAssumedInformation =
false;
12788 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12789 if (!
V.getType()->isPointerTy())
12791 const auto *IsInvariantLoadPointer =
12793 DepClassTy::REQUIRED);
12795 if (!IsInvariantLoadPointer)
12798 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12800 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12803 UsedAssumedInformation =
true;
12806 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12807 return indicatePessimisticFixpoint();
12813 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12814 return indicatePessimisticFixpoint();
12819 if (!UsedAssumedInformation) {
12821 addKnownBits(IS_LOCALLY_INVARIANT);
12822 return ChangeStatus::CHANGED;
12825 return ChangeStatus::UNCHANGED;
12829struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12830 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12831 : AAInvariantLoadPointerImpl(IRP,
A) {}
12834struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12835 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12836 : AAInvariantLoadPointerImpl(IRP,
A) {}
12839 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12843struct AAInvariantLoadPointerCallSiteReturned final
12844 : AAInvariantLoadPointerImpl {
12845 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12846 : AAInvariantLoadPointerImpl(IRP,
A) {}
12849 const Function *
F = getAssociatedFunction();
12850 assert(
F &&
"no associated function for return from call");
12852 if (!
F->isDeclaration() && !
F->isIntrinsic())
12853 return AAInvariantLoadPointerImpl::initialize(
A);
12858 return AAInvariantLoadPointerImpl::initialize(
A);
12860 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12861 return AAInvariantLoadPointerImpl::initialize(
A);
12865 indicatePessimisticFixpoint();
12869struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12870 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12871 : AAInvariantLoadPointerImpl(IRP,
A) {}
12874 const Function *
F = getAssociatedFunction();
12875 assert(
F &&
"no associated function for argument");
12878 addKnownBits(IS_LOCALLY_CONSTRAINED);
12882 if (!
F->hasLocalLinkage())
12883 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12887struct AAInvariantLoadPointerCallSiteArgument final
12888 : AAInvariantLoadPointerImpl {
12889 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12890 : AAInvariantLoadPointerImpl(IRP,
A) {}
12897template <
typename InstType>
12898static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
12899 Value *OriginalValue, PointerType *NewPtrTy,
12900 bool UseOriginalValue) {
12901 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12904 if (MemInst->isVolatile()) {
12905 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
12906 *MemInst->getFunction());
12907 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12912 if (UseOriginalValue) {
12913 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
12917 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
12919 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
12923struct AAAddressSpaceImpl :
public AAAddressSpace {
12924 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
12925 : AAAddressSpace(IRP,
A) {}
12928 assert(isValidState() &&
"the AA is invalid");
12929 return AssumedAddressSpace;
12934 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12935 "Associated value is not a pointer");
12937 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
12938 indicatePessimisticFixpoint();
12942 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12943 unsigned AS = getAssociatedType()->getPointerAddressSpace();
12944 if (AS != FlatAS) {
12945 [[maybe_unused]]
bool R = takeAddressSpace(AS);
12946 assert(R &&
"The take should happen");
12947 indicateOptimisticFixpoint();
12952 uint32_t OldAddressSpace = AssumedAddressSpace;
12953 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12955 auto CheckAddressSpace = [&](
Value &Obj) {
12961 unsigned ObjAS = Obj.getType()->getPointerAddressSpace();
12962 if (ObjAS != FlatAS)
12963 return takeAddressSpace(ObjAS);
12977 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
12979 if (AssumedAS != ~0U)
12980 return takeAddressSpace(AssumedAS);
12984 return takeAddressSpace(FlatAS);
12987 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
12988 DepClassTy::REQUIRED);
12989 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
12990 return indicatePessimisticFixpoint();
12992 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12993 : ChangeStatus::CHANGED;
13000 if (NewAS == InvalidAddressSpace ||
13002 return ChangeStatus::UNCHANGED;
13004 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13006 Value *AssociatedValue = &getAssociatedValue();
13007 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13010 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13011 bool UseOriginalValue =
13016 auto Pred = [&](
const Use &
U,
bool &) {
13017 if (
U.get() != AssociatedValue)
13028 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13031 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13034 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13037 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13044 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13047 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13051 const std::string getAsStr(Attributor *
A)
const override {
13052 if (!isValidState())
13053 return "addrspace(<invalid>)";
13054 return "addrspace(" +
13055 (AssumedAddressSpace == InvalidAddressSpace
13057 : std::to_string(AssumedAddressSpace)) +
13062 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13064 bool takeAddressSpace(uint32_t AS) {
13065 if (AssumedAddressSpace == InvalidAddressSpace) {
13066 AssumedAddressSpace = AS;
13069 return AssumedAddressSpace == AS;
13072 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13074 assert(
I->getSrcAddressSpace() != FlatAS &&
13075 "there should not be flat AS -> non-flat AS");
13076 return I->getPointerOperand();
13079 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13080 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13082 "there should not be flat AS -> non-flat AS X");
13083 return C->getOperand(0);
13089struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13090 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13091 : AAAddressSpaceImpl(IRP,
A) {}
13093 void trackStatistics()
const override {
13098struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13099 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13100 : AAAddressSpaceImpl(IRP,
A) {}
13106 (void)indicatePessimisticFixpoint();
13109 void trackStatistics()
const override {
13114struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13115 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13116 : AAAddressSpaceImpl(IRP,
A) {}
13118 void trackStatistics()
const override {
13123struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13124 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13125 : AAAddressSpaceImpl(IRP,
A) {}
13130struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13131 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13132 : AAAddressSpaceImpl(IRP,
A) {}
13138 (void)indicatePessimisticFixpoint();
13141 void trackStatistics()
const override {
13156struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13157 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13158 : AANoAliasAddrSpace(IRP,
A) {}
13161 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13162 "Associated value is not a pointer");
13166 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13167 if (!FlatAS.has_value()) {
13168 indicatePessimisticFixpoint();
13174 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13175 if (AS != *FlatAS) {
13177 indicateOptimisticFixpoint();
13182 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13183 uint32_t OldAssumed = getAssumed();
13185 auto CheckAddressSpace = [&](
Value &Obj) {
13189 unsigned AS = Obj.getType()->getPointerAddressSpace();
13193 removeAS(Obj.getType()->getPointerAddressSpace());
13197 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13198 getIRPosition(),
this, DepClassTy::REQUIRED);
13200 return indicatePessimisticFixpoint();
13202 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13203 : ChangeStatus::CHANGED;
13208 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13210 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13211 if (AS != FlatAS ||
Map.empty())
13212 return ChangeStatus::UNCHANGED;
13214 LLVMContext &Ctx = getAssociatedValue().getContext();
13215 MDNode *NoAliasASNode =
nullptr;
13216 MDBuilder MDB(Ctx);
13218 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13221 unsigned Upper =
I.stop();
13222 unsigned Lower =
I.start();
13223 if (!NoAliasASNode) {
13224 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13227 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13231 Value *AssociatedValue = &getAssociatedValue();
13234 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13235 if (
U.get() != AssociatedValue)
13238 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13245 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13249 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13251 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13255 const std::string getAsStr(Attributor *
A)
const override {
13256 if (!isValidState())
13257 return "<invalid>";
13259 raw_string_ostream OS(Str);
13260 OS <<
"CanNotBeAddrSpace(";
13261 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13262 unsigned Upper =
I.stop();
13263 unsigned Lower =
I.start();
13264 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13271 void removeAS(
unsigned AS) {
13272 RangeMap::iterator
I =
Map.find(AS);
13274 if (
I !=
Map.end()) {
13275 unsigned Upper =
I.stop();
13276 unsigned Lower =
I.start();
13280 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13282 if (AS != 0 &&
Lower <= AS - 1)
13287 void resetASRanges(Attributor &
A) {
13289 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13293struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13294 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13295 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13297 void trackStatistics()
const override {
13302struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13303 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13304 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13306 void trackStatistics()
const override {
13311struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13312 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13313 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13315 void trackStatistics()
const override {
13320struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13321 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13322 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13324 void trackStatistics()
const override {
13329struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13330 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13331 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13333 void trackStatistics()
const override {
13340struct AAAllocationInfoImpl :
public AAAllocationInfo {
13341 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13342 : AAAllocationInfo(IRP,
A) {}
13344 std::optional<TypeSize> getAllocatedSize()
const override {
13345 assert(isValidState() &&
"the AA is invalid");
13346 return AssumedAllocatedSize;
13349 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13350 const DataLayout &
DL) {
13353 switch (
I->getOpcode()) {
13354 case Instruction::Alloca: {
13359 return std::nullopt;
13365 const IRPosition &IRP = getIRPosition();
13370 return indicatePessimisticFixpoint();
13372 bool IsKnownNoCapture;
13374 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13375 return indicatePessimisticFixpoint();
13377 const AAPointerInfo *PI =
13378 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13381 return indicatePessimisticFixpoint();
13384 return indicatePessimisticFixpoint();
13386 const DataLayout &
DL =
A.getDataLayout();
13387 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13390 if (!AllocationSize)
13391 return indicatePessimisticFixpoint();
13395 if (*AllocationSize == 0)
13396 return indicatePessimisticFixpoint();
13402 return indicatePessimisticFixpoint();
13404 if (BinSize == 0) {
13405 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13406 if (!changeAllocationSize(NewAllocationSize))
13407 return ChangeStatus::UNCHANGED;
13408 return ChangeStatus::CHANGED;
13412 const auto &It = PI->
begin();
13415 if (It->first.Offset != 0)
13416 return indicatePessimisticFixpoint();
13418 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13420 if (SizeOfBin >= *AllocationSize)
13421 return indicatePessimisticFixpoint();
13423 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13425 if (!changeAllocationSize(NewAllocationSize))
13426 return ChangeStatus::UNCHANGED;
13428 return ChangeStatus::CHANGED;
13434 assert(isValidState() &&
13435 "Manifest should only be called if the state is valid.");
13439 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13441 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13443 switch (
I->getOpcode()) {
13445 case Instruction::Alloca: {
13449 Type *CharType = Type::getInt8Ty(
I->getContext());
13451 auto *NumBytesToValue =
13452 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13455 insertPt = std::next(insertPt);
13456 AllocaInst *NewAllocaInst =
13461 return ChangeStatus::CHANGED;
13469 return ChangeStatus::UNCHANGED;
13473 const std::string getAsStr(Attributor *
A)
const override {
13474 if (!isValidState())
13475 return "allocationinfo(<invalid>)";
13476 return "allocationinfo(" +
13477 (AssumedAllocatedSize == HasNoAllocationSize
13479 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13484 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13488 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13489 if (AssumedAllocatedSize == HasNoAllocationSize ||
13490 AssumedAllocatedSize !=
Size) {
13491 AssumedAllocatedSize =
Size;
13498struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13499 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13500 : AAAllocationInfoImpl(IRP,
A) {}
13502 void trackStatistics()
const override {
13507struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13508 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13509 : AAAllocationInfoImpl(IRP,
A) {}
13515 (void)indicatePessimisticFixpoint();
13518 void trackStatistics()
const override {
13523struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13524 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13525 : AAAllocationInfoImpl(IRP,
A) {}
13527 void trackStatistics()
const override {
13532struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13533 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13534 : AAAllocationInfoImpl(IRP,
A) {}
13536 void trackStatistics()
const override {
13541struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13542 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13543 : AAAllocationInfoImpl(IRP,
A) {}
13548 (void)indicatePessimisticFixpoint();
13551 void trackStatistics()
const override {
13600#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13601 case IRPosition::PK: \
13602 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13604#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13605 case IRPosition::PK: \
13606 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13610#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13611 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13612 CLASS *AA = nullptr; \
13613 switch (IRP.getPositionKind()) { \
13614 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13615 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13616 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13617 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13618 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13619 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13620 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13621 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13626#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13627 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13628 CLASS *AA = nullptr; \
13629 switch (IRP.getPositionKind()) { \
13630 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13631 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13632 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13633 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13634 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13635 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13636 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13637 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13642#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13643 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13644 CLASS *AA = nullptr; \
13645 switch (IRP.getPositionKind()) { \
13646 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13648 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13654#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13655 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13656 CLASS *AA = nullptr; \
13657 switch (IRP.getPositionKind()) { \
13658 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13659 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13660 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13661 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13662 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13663 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13664 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13665 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13670#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13671 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13672 CLASS *AA = nullptr; \
13673 switch (IRP.getPositionKind()) { \
13674 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13675 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13676 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13677 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13678 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13679 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13680 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13681 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13686#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13687 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13688 CLASS *AA = nullptr; \
13689 switch (IRP.getPositionKind()) { \
13690 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13691 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13692 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13693 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13694 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13695 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13696 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13697 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13749#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13750#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13751#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13752#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13753#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13754#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13755#undef SWITCH_PK_CREATE
13756#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefInfo InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Development, "development", "for training")))
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
BumpPtrAllocatorImpl BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
GenericCycleInfo< SSAContext > CycleInfo
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.