55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
323 if (Range.isFullSet())
329 ROffset = Range.getSignedMin();
331 ROffset = Range.getSignedMax();
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
422 clampReturnedValueStates<AAType, StateType, IRAttributeKind, RecurseForSelectAndPHI>(
424 PropagateCallBaseContext ? this->getCallBaseContext() :
nullptr);
427 return clampStateAndIndicateChange<StateType>(this->getState(), S);
433template <
typename AAType,
typename StateType =
typename AAType::StateType,
435static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
437 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
438 << QueryingAA <<
" into " << S <<
"\n");
440 assert(QueryingAA.getIRPosition().getPositionKind() ==
442 "Can only clamp call site argument states for an argument position!");
446 std::optional<StateType>
T;
449 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
461 return AA::hasAssumedIRAttr<IRAttributeKind>(
462 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
466 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
469 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
470 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
472 const StateType &AAS = AA->getState();
474 T = StateType::getBestState(AAS);
476 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
478 return T->isValidState();
481 bool UsedAssumedInformation =
false;
482 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
483 UsedAssumedInformation))
484 S.indicatePessimisticFixpoint();
491template <
typename AAType,
typename BaseType,
492 typename StateType =
typename AAType::StateType,
494bool getArgumentStateFromCallBaseContext(
Attributor &
A,
498 "Expected an 'argument' position !");
504 assert(ArgNo >= 0 &&
"Invalid Arg No!");
510 return AA::hasAssumedIRAttr<IRAttributeKind>(
511 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
515 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
518 const StateType &CBArgumentState =
519 static_cast<const StateType &
>(AA->getState());
521 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
522 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
526 State ^= CBArgumentState;
531template <
typename AAType,
typename BaseType,
532 typename StateType =
typename AAType::StateType,
533 bool BridgeCallBaseContext =
false,
535struct AAArgumentFromCallSiteArguments :
public BaseType {
541 StateType S = StateType::getBestState(this->getState());
543 if (BridgeCallBaseContext) {
545 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
547 A, *
this, this->getIRPosition(), S);
549 return clampStateAndIndicateChange<StateType>(this->getState(), S);
551 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
556 return clampStateAndIndicateChange<StateType>(this->getState(), S);
561template <
typename AAType,
typename BaseType,
562 typename StateType =
typename BaseType::StateType,
563 bool IntroduceCallBaseContext =
false,
565struct AACalleeToCallSite :
public BaseType {
570 auto IRPKind = this->getIRPosition().getPositionKind();
573 "Can only wrap function returned positions for call site "
574 "returned positions!");
575 auto &S = this->getState();
577 CallBase &CB = cast<CallBase>(this->getAnchorValue());
578 if (IntroduceCallBaseContext)
579 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
584 for (
const Function *Callee : Callees) {
588 IntroduceCallBaseContext ? &CB :
nullptr)
590 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
595 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
601 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
605 if (S.isAtFixpoint())
606 return S.isValidState();
610 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
611 return S.indicatePessimisticFixpoint();
617template <
class AAType,
typename StateType =
typename AAType::StateType>
618static void followUsesInContext(AAType &AA,
Attributor &
A,
623 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
624 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
626 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
628 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
629 for (
const Use &Us : UserI->
uses())
644template <
class AAType,
typename StateType =
typename AAType::StateType>
645static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
648 A.getInfoCache().getMustBeExecutedContextExplorer();
654 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
657 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
659 if (S.isAtFixpoint())
664 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
665 if (Br->isConditional())
704 StateType ParentState;
708 ParentState.indicateOptimisticFixpoint();
710 for (
const BasicBlock *BB : Br->successors()) {
711 StateType ChildState;
713 size_t BeforeSize =
Uses.size();
714 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
717 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
720 ParentState &= ChildState;
733namespace PointerInfo {
794 R.indicatePessimisticFixpoint();
888 if (!Range.mayOverlap(ItRange))
890 bool IsExact = Range == ItRange && !Range.offsetOrSizeAreUnknown();
891 for (
auto Index : It.getSecond()) {
893 if (!CB(Access, IsExact))
913 for (
unsigned Index : LocalList->getSecond()) {
916 if (Range.offsetAndSizeAreUnknown())
932 RemoteI = RemoteI ? RemoteI : &
I;
936 bool AccExists =
false;
938 for (
auto Index : LocalList) {
940 if (
A.getLocalInst() == &
I) {
949 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
951 for (
auto Key : ToAdd) {
960 "New Access should have been at AccIndex");
961 LocalList.push_back(AccIndex);
975 auto &ExistingRanges =
Before.getRanges();
976 auto &NewRanges = Current.getRanges();
983 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
990 "Expected bin to actually contain the Access.");
1009 using const_iterator = VecTy::const_iterator;
1012 const_iterator begin()
const {
return Offsets.begin(); }
1013 const_iterator end()
const {
return Offsets.end(); }
1016 return Offsets ==
RHS.Offsets;
1022 bool isUnassigned()
const {
return Offsets.size() == 0; }
1024 bool isUnknown()
const {
1037 void addToAll(int64_t Inc) {
1038 for (
auto &
Offset : Offsets) {
1047 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1062struct AAPointerInfoImpl
1063 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1068 const std::string getAsStr(
Attributor *
A)
const override {
1069 return std::string(
"PointerInfo ") +
1070 (isValidState() ? (std::string(
"#") +
1071 std::to_string(OffsetBins.
size()) +
" bins")
1077 return AAPointerInfo::manifest(
A);
1080 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1081 virtual const_bin_iterator
end()
const override {
return State::end(); }
1082 virtual int64_t numOffsetBins()
const override {
1083 return State::numOffsetBins();
1086 bool forallInterferingAccesses(
1090 return State::forallInterferingAccesses(Range, CB);
1093 bool forallInterferingAccesses(
1095 bool FindInterferingWrites,
bool FindInterferingReads,
1096 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1098 function_ref<
bool(
const Access &)> SkipCB)
const override {
1099 HasBeenWrittenTo =
false;
1106 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1111 bool AllInSameNoSyncFn = IsAssumedNoSync;
1112 bool InstIsExecutedByInitialThreadOnly =
1113 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1120 bool InstIsExecutedInAlignedRegion =
1121 FindInterferingReads && ExecDomainAA &&
1122 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1124 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1125 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1128 bool IsThreadLocalObj =
1137 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1138 if (IsThreadLocalObj || AllInSameNoSyncFn)
1140 const auto *FnExecDomainAA =
1141 I.getFunction() == &
Scope
1146 if (!FnExecDomainAA)
1148 if (InstIsExecutedInAlignedRegion ||
1149 (FindInterferingWrites &&
1150 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1151 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1154 if (InstIsExecutedByInitialThreadOnly &&
1155 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1156 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1165 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1166 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1167 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1168 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1172 bool IsKnownNoRecurse;
1173 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1180 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1181 bool ObjHasKernelLifetime =
false;
1182 const bool UseDominanceReasoning =
1183 FindInterferingWrites && IsKnownNoRecurse;
1194 case AA::GPUAddressSpace::Shared:
1195 case AA::GPUAddressSpace::Constant:
1196 case AA::GPUAddressSpace::Local:
1208 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1210 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1215 bool IsKnownNoRecurse;
1216 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1218 IsKnownNoRecurse)) {
1219 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1221 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1224 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1225 if (ObjHasKernelLifetime)
1226 IsLiveInCalleeCB = [](
const Function &Fn) {
1227 return !Fn.hasFnAttribute(
"kernel");
1235 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1237 bool AccInSameScope = AccScope == &
Scope;
1241 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1245 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1246 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1247 ExclusionSet.
insert(Acc.getRemoteInst());
1250 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1251 (!FindInterferingReads || !Acc.isRead()))
1254 bool Dominates = FindInterferingWrites && DT && Exact &&
1255 Acc.isMustAccess() && AccInSameScope &&
1258 DominatingWrites.
insert(&Acc);
1262 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1264 InterferingAccesses.
push_back({&Acc, Exact});
1267 if (!State::forallInterferingAccesses(
I, AccessCB, Range))
1270 HasBeenWrittenTo = !DominatingWrites.
empty();
1274 for (
const Access *Acc : DominatingWrites) {
1275 if (!LeastDominatingWriteInst) {
1276 LeastDominatingWriteInst = Acc->getRemoteInst();
1277 }
else if (DT->
dominates(LeastDominatingWriteInst,
1278 Acc->getRemoteInst())) {
1279 LeastDominatingWriteInst = Acc->getRemoteInst();
1284 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1285 if (SkipCB && SkipCB(Acc))
1287 if (!CanIgnoreThreading(Acc))
1293 bool ReadChecked = !FindInterferingReads;
1294 bool WriteChecked = !FindInterferingWrites;
1300 &ExclusionSet, IsLiveInCalleeCB))
1305 if (!WriteChecked) {
1307 &ExclusionSet, IsLiveInCalleeCB))
1308 WriteChecked =
true;
1322 if (!WriteChecked && HasBeenWrittenTo &&
1323 Acc.getRemoteInst()->getFunction() != &
Scope) {
1333 if (!FnReachabilityAA ||
1334 !FnReachabilityAA->instructionCanReach(
1335 A, *LeastDominatingWriteInst,
1336 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1337 WriteChecked =
true;
1343 if (ReadChecked && WriteChecked)
1346 if (!DT || !UseDominanceReasoning)
1348 if (!DominatingWrites.count(&Acc))
1350 return LeastDominatingWriteInst != Acc.getRemoteInst();
1355 for (
auto &It : InterferingAccesses) {
1356 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1357 !CanSkipAccess(*It.first, It.second)) {
1358 if (!UserCB(*It.first, It.second))
1368 using namespace AA::PointerInfo;
1370 return indicatePessimisticFixpoint();
1372 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1373 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1377 const auto &State = OtherAAImpl.getState();
1378 for (
const auto &It : State) {
1379 for (
auto Index : It.getSecond()) {
1380 const auto &RAcc = State.getAccess(
Index);
1381 if (IsByval && !RAcc.isRead())
1383 bool UsedAssumedInformation =
false;
1385 auto Content =
A.translateArgumentToCallSiteContent(
1386 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1387 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1388 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1390 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1391 RAcc.getType(), RAcc.getRemoteInst());
1398 const OffsetInfo &Offsets,
CallBase &CB) {
1399 using namespace AA::PointerInfo;
1401 return indicatePessimisticFixpoint();
1403 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1407 const auto &State = OtherAAImpl.getState();
1408 for (
const auto &It : State) {
1409 for (
auto Index : It.getSecond()) {
1410 const auto &RAcc = State.getAccess(
Index);
1411 for (
auto Offset : Offsets) {
1415 if (!NewRanges.isUnknown()) {
1416 NewRanges.addToAllOffsets(
Offset);
1419 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1420 RAcc.getType(), RAcc.getRemoteInst());
1429 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1433 for (
auto &It : OffsetBins) {
1434 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1435 <<
"] : " << It.getSecond().size() <<
"\n";
1436 for (
auto AccIndex : It.getSecond()) {
1437 auto &Acc = AccessList[AccIndex];
1438 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1439 if (Acc.getLocalInst() != Acc.getRemoteInst())
1440 O <<
" --> " << *Acc.getRemoteInst()
1442 if (!Acc.isWrittenValueYetUndetermined()) {
1443 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1444 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1446 else if (Acc.getWrittenValue())
1447 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1449 O <<
" - c: <unknown>\n";
1456struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1459 : AAPointerInfoImpl(IRP,
A) {}
1466 using namespace AA::PointerInfo;
1469 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1479 if (!VT || VT->getElementCount().isScalable() ||
1481 (*Content)->getType() != VT ||
1482 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1492 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1493 auto *ConstContent = cast<Constant>(*
Content);
1497 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1499 ConstContent, ConstantInt::get(
Int32Ty, i));
1502 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1506 for (
auto &ElementOffset : ElementOffsets)
1507 ElementOffset += ElementSize;
1521 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1525 void trackStatistics()
const override {
1526 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1530bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1533 const OffsetInfo &PtrOI,
1535 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1539 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1540 "Don't look for constant values if the offset has already been "
1541 "determined to be unknown.");
1543 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1549 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1553 Union.addToAll(ConstantOffset.getSExtValue());
1558 for (
const auto &VI : VariableOffsets) {
1561 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1567 if (PotentialConstantsAA->undefIsContained())
1575 if (AssumedSet.empty())
1579 for (
const auto &ConstOffset : AssumedSet) {
1580 auto CopyPerOffset =
Union;
1581 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1582 VI.second.getZExtValue());
1583 Product.merge(CopyPerOffset);
1588 UsrOI = std::move(Union);
1593 using namespace AA::PointerInfo;
1596 Value &AssociatedValue = getAssociatedValue();
1599 OffsetInfoMap[&AssociatedValue].
insert(0);
1601 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1610 auto &UsrOI = OffsetInfoMap[Usr];
1611 auto &PtrOI = OffsetInfoMap[CurPtr];
1612 assert(!PtrOI.isUnassigned() &&
1613 "Cannot pass through if the input Ptr was not visited!");
1619 const auto *
F = getAnchorScope();
1624 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
1626 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1628 User *Usr =
U.getUser();
1629 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1632 "The current pointer offset should have been seeded!");
1636 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 if (
CE->isCompare())
1639 if (!isa<GEPOperator>(CE)) {
1640 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1645 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1648 auto &UsrOI = OffsetInfoMap[Usr];
1649 auto &PtrOI = OffsetInfoMap[CurPtr];
1651 if (UsrOI.isUnknown())
1654 if (PtrOI.isUnknown()) {
1660 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1663 if (isa<PtrToIntInst>(Usr))
1665 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1666 return HandlePassthroughUser(Usr, CurPtr, Follow);
1671 if (isa<PHINode>(Usr)) {
1674 bool IsFirstPHIUser = !OffsetInfoMap.
count(Usr);
1675 auto &UsrOI = OffsetInfoMap[Usr];
1676 auto &PtrOI = OffsetInfoMap[CurPtr];
1680 if (PtrOI.isUnknown()) {
1681 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1682 << *CurPtr <<
" in " << *Usr <<
"\n");
1683 Follow = !UsrOI.isUnknown();
1689 if (UsrOI == PtrOI) {
1690 assert(!PtrOI.isUnassigned() &&
1691 "Cannot assign if the current Ptr was not visited!");
1692 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1702 auto It = OffsetInfoMap.
find(CurPtrBase);
1703 if (It == OffsetInfoMap.
end()) {
1704 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1705 << *CurPtr <<
" in " << *Usr <<
"\n");
1719 auto BaseOI = It->getSecond();
1720 BaseOI.addToAll(
Offset.getZExtValue());
1721 if (IsFirstPHIUser || BaseOI == UsrOI) {
1722 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1723 <<
" in " << *Usr <<
"\n");
1724 return HandlePassthroughUser(Usr, CurPtr, Follow);
1728 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1729 << *CurPtr <<
" in " << *Usr <<
"\n");
1740 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1748 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1749 OffsetInfoMap[CurPtr].Offsets, Changed,
1754 if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
1755 return II->isAssumeLikeIntrinsic();
1766 }
while (FromI && FromI != ToI);
1772 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1775 if (IntrI.getParent() == BB) {
1776 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1782 if ((*PredIt) != BB)
1787 if (SuccBB == IntrBB)
1789 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1793 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1796 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1802 std::pair<Value *, IntrinsicInst *> Assumption;
1803 for (
const Use &LoadU : LoadI->
uses()) {
1804 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1805 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1807 for (
const Use &CmpU : CmpI->
uses()) {
1808 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1809 if (!IsValidAssume(*IntrI))
1811 int Idx = CmpI->getOperandUse(0) == LoadU;
1812 Assumption = {CmpI->getOperand(
Idx), IntrI};
1817 if (Assumption.first)
1822 if (!Assumption.first || !Assumption.second)
1826 << *Assumption.second <<
": " << *LoadI
1827 <<
" == " << *Assumption.first <<
"\n");
1828 bool UsedAssumedInformation =
false;
1829 std::optional<Value *>
Content =
nullptr;
1830 if (Assumption.first)
1832 A.getAssumedSimplified(*Assumption.first, *
this,
1834 return handleAccess(
1835 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1836 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1841 for (
auto *OtherOp : OtherOps) {
1842 if (OtherOp == CurPtr) {
1845 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1857 bool UsedAssumedInformation =
false;
1858 std::optional<Value *>
Content =
nullptr;
1862 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1866 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1867 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1868 *StoreI->getValueOperand()->getType(),
1869 {StoreI->getValueOperand()}, AccessKind::AK_W);
1870 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1871 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1872 {RMWI->getValOperand()}, AccessKind::AK_RW);
1873 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1874 return HandleStoreLike(
1875 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1876 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1879 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1892 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1894 return isValidState();
1896 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1902 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1905 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1906 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1907 if (OffsetInfoMap.
count(NewU)) {
1909 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1910 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1911 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1915 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1917 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1920 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1922 true, EquivalentUseCB)) {
1923 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1924 return indicatePessimisticFixpoint();
1928 dbgs() <<
"Accesses by bin after update:\n";
1935struct AAPointerInfoReturned final : AAPointerInfoImpl {
1937 : AAPointerInfoImpl(IRP,
A) {}
1941 return indicatePessimisticFixpoint();
1945 void trackStatistics()
const override {
1946 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1950struct AAPointerInfoArgument final : AAPointerInfoFloating {
1952 : AAPointerInfoFloating(IRP,
A) {}
1955 void trackStatistics()
const override {
1956 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1960struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1962 : AAPointerInfoFloating(IRP,
A) {}
1966 using namespace AA::PointerInfo;
1970 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1974 LengthVal =
Length->getSExtValue();
1975 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1978 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1980 return indicatePessimisticFixpoint();
1983 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1985 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1988 dbgs() <<
"Accesses by bin after update:\n";
1999 Argument *Arg = getAssociatedArgument();
2004 if (ArgAA && ArgAA->getState().isValidState())
2005 return translateAndAddStateFromCallee(
A, *ArgAA,
2006 *cast<CallBase>(getCtxI()));
2008 return indicatePessimisticFixpoint();
2011 bool IsKnownNoCapture;
2012 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2013 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2014 return indicatePessimisticFixpoint();
2016 bool IsKnown =
false;
2018 return ChangeStatus::UNCHANGED;
2021 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2027 void trackStatistics()
const override {
2028 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2032struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2034 : AAPointerInfoFloating(IRP,
A) {}
2037 void trackStatistics()
const override {
2038 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2052 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2053 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2057 const std::string getAsStr(
Attributor *
A)
const override {
2058 return getAssumed() ?
"nounwind" :
"may-unwind";
2064 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2065 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2066 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2069 if (!
I.mayThrow(
true))
2072 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2073 bool IsKnownNoUnwind;
2074 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2081 bool UsedAssumedInformation =
false;
2082 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2083 UsedAssumedInformation))
2084 return indicatePessimisticFixpoint();
2086 return ChangeStatus::UNCHANGED;
2090struct AANoUnwindFunction final :
public AANoUnwindImpl {
2092 : AANoUnwindImpl(IRP,
A) {}
2099struct AANoUnwindCallSite final
2100 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2102 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2113 case Intrinsic::nvvm_barrier0:
2114 case Intrinsic::nvvm_barrier0_and:
2115 case Intrinsic::nvvm_barrier0_or:
2116 case Intrinsic::nvvm_barrier0_popc:
2118 case Intrinsic::amdgcn_s_barrier:
2119 if (ExecutedAligned)
2132 if (
auto *FI = dyn_cast<FenceInst>(
I))
2135 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2142 switch (
I->getOpcode()) {
2143 case Instruction::AtomicRMW:
2144 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2146 case Instruction::Store:
2147 Ordering = cast<StoreInst>(
I)->getOrdering();
2149 case Instruction::Load:
2150 Ordering = cast<LoadInst>(
I)->getOrdering();
2154 "New atomic operations need to be known in the attributor.");
2165 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2166 return !
MI->isVolatile();
2177 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2178 DepClassTy::NONE, IsKnown));
2182 const std::string getAsStr(
Attributor *
A)
const override {
2183 return getAssumed() ?
"nosync" :
"may-sync";
2199 if (
I.mayReadOrWriteMemory())
2204 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2213 bool UsedAssumedInformation =
false;
2214 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2215 UsedAssumedInformation) ||
2216 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2217 UsedAssumedInformation))
2218 return indicatePessimisticFixpoint();
2223struct AANoSyncFunction final :
public AANoSyncImpl {
2225 : AANoSyncImpl(IRP,
A) {}
2232struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2234 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2244struct AANoFreeImpl :
public AANoFree {
2250 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2251 DepClassTy::NONE, IsKnown));
2259 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2261 DepClassTy::REQUIRED, IsKnown);
2264 bool UsedAssumedInformation =
false;
2265 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2266 UsedAssumedInformation))
2267 return indicatePessimisticFixpoint();
2268 return ChangeStatus::UNCHANGED;
2272 const std::string getAsStr(
Attributor *
A)
const override {
2273 return getAssumed() ?
"nofree" :
"may-free";
2277struct AANoFreeFunction final :
public AANoFreeImpl {
2279 : AANoFreeImpl(IRP,
A) {}
2286struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2288 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2295struct AANoFreeFloating : AANoFreeImpl {
2297 : AANoFreeImpl(IRP,
A) {}
2307 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2309 DepClassTy::OPTIONAL, IsKnown))
2310 return ChangeStatus::UNCHANGED;
2312 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2313 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2315 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2323 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2325 DepClassTy::REQUIRED, IsKnown);
2328 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
2329 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
2333 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2334 isa<ReturnInst>(UserI))
2340 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2341 return indicatePessimisticFixpoint();
2343 return ChangeStatus::UNCHANGED;
2348struct AANoFreeArgument final : AANoFreeFloating {
2350 : AANoFreeFloating(IRP,
A) {}
2357struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2359 : AANoFreeFloating(IRP,
A) {}
2367 Argument *Arg = getAssociatedArgument();
2369 return indicatePessimisticFixpoint();
2372 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2373 DepClassTy::REQUIRED, IsKnown))
2374 return ChangeStatus::UNCHANGED;
2375 return indicatePessimisticFixpoint();
2383struct AANoFreeReturned final : AANoFreeFloating {
2385 : AANoFreeFloating(IRP,
A) {
2400 void trackStatistics()
const override {}
2404struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2406 : AANoFreeFloating(IRP,
A) {}
2409 return ChangeStatus::UNCHANGED;
2420 bool IgnoreSubsumingPositions) {
2422 AttrKinds.
push_back(Attribute::NonNull);
2425 AttrKinds.
push_back(Attribute::Dereferenceable);
2426 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2433 if (!Fn->isDeclaration()) {
2443 bool UsedAssumedInformation =
false;
2444 if (!
A.checkForAllInstructions(
2446 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2450 UsedAssumedInformation))
2462 Attribute::NonNull)});
2467static int64_t getKnownNonNullAndDerefBytesForUse(
2469 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2472 const Value *UseV =
U->get();
2479 if (isa<CastInst>(
I)) {
2484 if (isa<GetElementPtrInst>(
I)) {
2494 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2497 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2514 bool IsKnownNonNull;
2515 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2517 IsNonNull |= IsKnownNonNull;
2524 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2525 Loc->Size.isScalable() ||
I->isVolatile())
2531 if (
Base &&
Base == &AssociatedValue) {
2532 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2534 return std::max(int64_t(0), DerefBytes);
2541 int64_t DerefBytes = Loc->Size.getValue();
2543 return std::max(int64_t(0), DerefBytes);
2554 Value &
V = *getAssociatedValue().stripPointerCasts();
2555 if (isa<ConstantPointerNull>(V)) {
2556 indicatePessimisticFixpoint();
2561 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2567 bool IsNonNull =
false;
2568 bool TrackUse =
false;
2569 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2570 IsNonNull, TrackUse);
2571 State.setKnown(IsNonNull);
2576 const std::string getAsStr(
Attributor *
A)
const override {
2577 return getAssumed() ?
"nonnull" :
"may-null";
2582struct AANonNullFloating :
public AANonNullImpl {
2584 : AANonNullImpl(IRP,
A) {}
2589 bool IsKnownNonNull;
2590 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2591 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2595 bool UsedAssumedInformation =
false;
2596 Value *AssociatedValue = &getAssociatedValue();
2598 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2603 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2607 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2609 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2610 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2613 return ChangeStatus::UNCHANGED;
2614 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2615 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2617 DepClassTy::OPTIONAL, IsKnown) &&
2618 AA::hasAssumedIRAttr<Attribute::NonNull>(
2620 DepClassTy::OPTIONAL, IsKnown))
2621 return ChangeStatus::UNCHANGED;
2628 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2629 return indicatePessimisticFixpoint();
2630 return ChangeStatus::UNCHANGED;
2633 for (
const auto &VAC : Values)
2635 return indicatePessimisticFixpoint();
2637 return ChangeStatus::UNCHANGED;
2645struct AANonNullReturned final
2646 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2647 false, AANonNull::IRAttributeKind, false> {
2654 const std::string getAsStr(
Attributor *
A)
const override {
2655 return getAssumed() ?
"nonnull" :
"may-null";
2663struct AANonNullArgument final
2664 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2666 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2672struct AANonNullCallSiteArgument final : AANonNullFloating {
2674 : AANonNullFloating(IRP,
A) {}
2681struct AANonNullCallSiteReturned final
2682 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2684 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2700 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2701 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2706 const std::string getAsStr(
Attributor *
A)
const override {
2707 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2711struct AAMustProgressFunction final : AAMustProgressImpl {
2713 : AAMustProgressImpl(IRP,
A) {}
2718 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2719 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2721 return indicateOptimisticFixpoint();
2722 return ChangeStatus::UNCHANGED;
2727 bool IsKnownMustProgress;
2728 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2729 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2733 bool AllCallSitesKnown =
true;
2734 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2737 return indicatePessimisticFixpoint();
2739 return ChangeStatus::UNCHANGED;
2743 void trackStatistics()
const override {
2749struct AAMustProgressCallSite final : AAMustProgressImpl {
2751 : AAMustProgressImpl(IRP,
A) {}
2760 bool IsKnownMustProgress;
2761 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2762 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2763 return indicatePessimisticFixpoint();
2764 return ChangeStatus::UNCHANGED;
2768 void trackStatistics()
const override {
2783 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2784 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2789 const std::string getAsStr(
Attributor *
A)
const override {
2790 return getAssumed() ?
"norecurse" :
"may-recurse";
2794struct AANoRecurseFunction final : AANoRecurseImpl {
2796 : AANoRecurseImpl(IRP,
A) {}
2803 bool IsKnownNoRecurse;
2804 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2807 DepClassTy::NONE, IsKnownNoRecurse))
2809 return IsKnownNoRecurse;
2811 bool UsedAssumedInformation =
false;
2812 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2813 UsedAssumedInformation)) {
2819 if (!UsedAssumedInformation)
2820 indicateOptimisticFixpoint();
2821 return ChangeStatus::UNCHANGED;
2826 DepClassTy::REQUIRED);
2827 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2828 return indicatePessimisticFixpoint();
2829 return ChangeStatus::UNCHANGED;
2836struct AANoRecurseCallSite final
2837 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2839 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2854 const std::string getAsStr(
Attributor *
A)
const override {
2855 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2859struct AANonConvergentFunction final : AANonConvergentImpl {
2861 : AANonConvergentImpl(IRP,
A) {}
2867 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2868 CallBase &CB = cast<CallBase>(Inst);
2870 if (!Callee ||
Callee->isIntrinsic()) {
2873 if (
Callee->isDeclaration()) {
2874 return !
Callee->hasFnAttribute(Attribute::Convergent);
2881 bool UsedAssumedInformation =
false;
2882 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2883 UsedAssumedInformation)) {
2884 return indicatePessimisticFixpoint();
2886 return ChangeStatus::UNCHANGED;
2890 if (isKnownNotConvergent() &&
2891 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2892 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2893 return ChangeStatus::CHANGED;
2895 return ChangeStatus::UNCHANGED;
2912 const size_t UBPrevSize = KnownUBInsts.size();
2913 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2917 if (
I.isVolatile() &&
I.mayWriteToMemory())
2921 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2930 "Expected pointer operand of memory accessing instruction");
2934 std::optional<Value *> SimplifiedPtrOp =
2935 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2936 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2938 const Value *PtrOpVal = *SimplifiedPtrOp;
2943 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2944 AssumedNoUBInsts.insert(&
I);
2956 AssumedNoUBInsts.insert(&
I);
2958 KnownUBInsts.insert(&
I);
2967 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2971 auto *BrInst = cast<BranchInst>(&
I);
2974 if (BrInst->isUnconditional())
2979 std::optional<Value *> SimplifiedCond =
2980 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
2981 if (!SimplifiedCond || !*SimplifiedCond)
2983 AssumedNoUBInsts.insert(&
I);
2991 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3000 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3006 if (idx >=
Callee->arg_size())
3018 bool IsKnownNoUndef;
3019 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3020 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3021 if (!IsKnownNoUndef)
3023 bool UsedAssumedInformation =
false;
3024 std::optional<Value *> SimplifiedVal =
3027 if (UsedAssumedInformation)
3029 if (SimplifiedVal && !*SimplifiedVal)
3031 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3032 KnownUBInsts.insert(&
I);
3036 !isa<ConstantPointerNull>(**SimplifiedVal))
3038 bool IsKnownNonNull;
3039 AA::hasAssumedIRAttr<Attribute::NonNull>(
3040 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3042 KnownUBInsts.insert(&
I);
3048 auto &RI = cast<ReturnInst>(
I);
3051 std::optional<Value *> SimplifiedRetValue =
3052 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3053 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3070 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3071 bool IsKnownNonNull;
3072 AA::hasAssumedIRAttr<Attribute::NonNull>(
3076 KnownUBInsts.insert(&
I);
3082 bool UsedAssumedInformation =
false;
3083 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3084 {Instruction::Load, Instruction::Store,
3085 Instruction::AtomicCmpXchg,
3086 Instruction::AtomicRMW},
3087 UsedAssumedInformation,
3089 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3090 UsedAssumedInformation,
3092 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3093 UsedAssumedInformation);
3097 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3099 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3100 bool IsKnownNoUndef;
3101 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3102 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3104 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3105 {Instruction::Ret}, UsedAssumedInformation,
3110 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3111 UBPrevSize != KnownUBInsts.size())
3112 return ChangeStatus::CHANGED;
3113 return ChangeStatus::UNCHANGED;
3117 return KnownUBInsts.count(
I);
3120 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3127 switch (
I->getOpcode()) {
3128 case Instruction::Load:
3129 case Instruction::Store:
3130 case Instruction::AtomicCmpXchg:
3131 case Instruction::AtomicRMW:
3132 return !AssumedNoUBInsts.count(
I);
3133 case Instruction::Br: {
3134 auto *BrInst = cast<BranchInst>(
I);
3135 if (BrInst->isUnconditional())
3137 return !AssumedNoUBInsts.count(
I);
3146 if (KnownUBInsts.empty())
3147 return ChangeStatus::UNCHANGED;
3149 A.changeToUnreachableAfterManifest(
I);
3150 return ChangeStatus::CHANGED;
3154 const std::string getAsStr(
Attributor *
A)
const override {
3155 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3200 bool UsedAssumedInformation =
false;
3201 std::optional<Value *> SimplifiedV =
3204 if (!UsedAssumedInformation) {
3209 KnownUBInsts.insert(
I);
3210 return std::nullopt;
3216 if (isa<UndefValue>(V)) {
3217 KnownUBInsts.insert(
I);
3218 return std::nullopt;
3224struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3226 : AAUndefinedBehaviorImpl(IRP,
A) {}
3229 void trackStatistics()
const override {
3231 "Number of instructions known to have UB");
3233 KnownUBInsts.size();
3254 if (SCCI.hasCycle())
3264 for (
auto *L : LI->getLoopsInPreorder()) {
3278 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3279 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3284 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3285 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3290 return IsKnown || !KnownOnly;
3296 if (isImpliedByMustprogressAndReadonly(
A,
false))
3297 return ChangeStatus::UNCHANGED;
3302 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3303 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3309 bool IsKnownNoRecurse;
3310 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3311 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3314 bool UsedAssumedInformation =
false;
3315 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3316 UsedAssumedInformation))
3317 return indicatePessimisticFixpoint();
3319 return ChangeStatus::UNCHANGED;
3323 const std::string getAsStr(
Attributor *
A)
const override {
3324 return getAssumed() ?
"willreturn" :
"may-noreturn";
3328struct AAWillReturnFunction final : AAWillReturnImpl {
3330 : AAWillReturnImpl(IRP,
A) {}
3334 AAWillReturnImpl::initialize(
A);
3337 assert(
F &&
"Did expect an anchor function");
3338 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3339 indicatePessimisticFixpoint();
3347struct AAWillReturnCallSite final
3348 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3350 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3354 if (isImpliedByMustprogressAndReadonly(
A,
false))
3355 return ChangeStatus::UNCHANGED;
3357 return AACalleeToCallSite::updateImpl(
A);
3379 const ToTy *To =
nullptr;
3389 assert(Hash == 0 &&
"Computed hash twice!");
3393 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3394 InstSetDMI::getHashValue(ExclusionSet));
3404 :
From(&
From), To(&To), ExclusionSet(ES) {
3406 if (!ES || ES->
empty()) {
3407 ExclusionSet =
nullptr;
3408 }
else if (MakeUnique) {
3409 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3414 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3427 return &TombstoneKey;
3434 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3436 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3440#define DefineKeys(ToTy) \
3442 ReachabilityQueryInfo<ToTy> \
3443 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3444 ReachabilityQueryInfo<ToTy>( \
3445 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3446 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3448 ReachabilityQueryInfo<ToTy> \
3449 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3450 ReachabilityQueryInfo<ToTy>( \
3451 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3452 DenseMapInfo<const ToTy *>::getTombstoneKey());
3461template <
typename BaseTy,
typename ToTy>
3462struct CachedReachabilityAA :
public BaseTy {
3468 bool isQueryAA()
const override {
return true; }
3473 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3474 RQITy *RQI = QueryVector[
u];
3475 if (RQI->Result == RQITy::Reachable::No &&
3476 isReachableImpl(
A, *RQI,
false))
3477 Changed = ChangeStatus::CHANGED;
3482 virtual bool isReachableImpl(
Attributor &
A, RQITy &RQI,
3483 bool IsTemporaryRQI) = 0;
3486 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3491 QueryCache.erase(&RQI);
3497 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3498 RQITy PlainRQI(RQI.From, RQI.To);
3499 if (!QueryCache.count(&PlainRQI)) {
3500 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3502 QueryVector.push_back(RQIPtr);
3503 QueryCache.insert(RQIPtr);
3508 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3509 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3510 "Did not expect empty set!");
3511 RQITy *RQIPtr =
new (
A.Allocator)
3512 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3513 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3515 assert(!QueryCache.count(RQIPtr));
3516 QueryVector.push_back(RQIPtr);
3517 QueryCache.insert(RQIPtr);
3520 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3521 A.registerForUpdate(*
this);
3522 return Result == RQITy::Reachable::Yes;
3525 const std::string getAsStr(
Attributor *
A)
const override {
3527 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3530 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3531 typename RQITy::Reachable &
Result) {
3532 if (!this->getState().isValidState()) {
3533 Result = RQITy::Reachable::Yes;
3539 if (StackRQI.ExclusionSet) {
3540 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3541 auto It = QueryCache.find(&PlainRQI);
3542 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3543 Result = RQITy::Reachable::No;
3548 auto It = QueryCache.find(&StackRQI);
3549 if (It != QueryCache.end()) {
3556 QueryCache.insert(&StackRQI);
3565struct AAIntraFnReachabilityFunction final
3566 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3567 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3574 bool isAssumedReachable(
3577 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3581 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3582 typename RQITy::Reachable
Result;
3583 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3584 return NonConstThis->isReachableImpl(
A, StackRQI,
3586 return Result == RQITy::Reachable::Yes;
3593 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3596 [&](
const auto &DeadEdge) {
3597 return LivenessAA->isEdgeDead(DeadEdge.first,
3601 return LivenessAA->isAssumedDead(BB);
3603 return ChangeStatus::UNCHANGED;
3607 return Base::updateImpl(
A);
3611 bool IsTemporaryRQI)
override {
3613 bool UsedExclusionSet =
false;
3618 while (IP && IP != &To) {
3619 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3620 UsedExclusionSet =
true;
3631 "Not an intra-procedural query!");
3635 if (FromBB == ToBB &&
3636 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3637 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3642 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3643 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3648 if (RQI.ExclusionSet)
3649 for (
auto *
I : *RQI.ExclusionSet)
3650 if (
I->getFunction() == Fn)
3651 ExclusionBlocks.
insert(
I->getParent());
3654 if (ExclusionBlocks.
count(FromBB) &&
3657 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3660 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3661 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3662 DeadBlocks.insert(ToBB);
3663 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3672 while (!Worklist.
empty()) {
3674 if (!Visited.
insert(BB).second)
3677 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3678 LocalDeadEdges.
insert({BB, SuccBB});
3683 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3686 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3689 if (ExclusionBlocks.
count(SuccBB)) {
3690 UsedExclusionSet =
true;
3697 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3698 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3703 void trackStatistics()
const override {}
3723 bool IgnoreSubsumingPositions) {
3724 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3725 "Unexpected attribute kind");
3728 if (isa<AllocaInst>(Val))
3731 IgnoreSubsumingPositions =
true;
3734 if (isa<UndefValue>(Val))
3737 if (isa<ConstantPointerNull>(Val) &&
3742 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3743 IgnoreSubsumingPositions, Attribute::NoAlias))
3753 "Noalias is a pointer attribute");
3756 const std::string getAsStr(
Attributor *
A)
const override {
3757 return getAssumed() ?
"noalias" :
"may-alias";
3762struct AANoAliasFloating final : AANoAliasImpl {
3764 : AANoAliasImpl(IRP,
A) {}
3769 return indicatePessimisticFixpoint();
3773 void trackStatistics()
const override {
3779struct AANoAliasArgument final
3780 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3781 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3793 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3795 DepClassTy::OPTIONAL, IsKnownNoSycn))
3796 return Base::updateImpl(
A);
3801 return Base::updateImpl(
A);
3805 bool UsedAssumedInformation =
false;
3806 if (
A.checkForAllCallSites(
3808 true, UsedAssumedInformation))
3809 return Base::updateImpl(
A);
3817 return indicatePessimisticFixpoint();
3824struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3826 : AANoAliasImpl(IRP,
A) {}
3832 const CallBase &CB,
unsigned OtherArgNo) {
3834 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3846 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3847 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3854 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3856 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3857 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3863 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3867 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3869 "callsite arguments: "
3870 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3871 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3876 bool isKnownNoAliasDueToNoAliasPreservation(
3896 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3907 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3912 bool IsKnownNoCapture;
3913 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3915 DepClassTy::OPTIONAL, IsKnownNoCapture))
3921 A, *UserI, *getCtxI(), *
this,
nullptr,
3922 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3930 case UseCaptureKind::NO_CAPTURE:
3932 case UseCaptureKind::MAY_CAPTURE:
3936 case UseCaptureKind::PASSTHROUGH:
3943 bool IsKnownNoCapture;
3945 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3946 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3947 if (!IsAssumedNoCapture &&
3949 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3951 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3952 <<
" cannot be noalias as it is potentially captured\n");
3957 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3962 const auto &CB = cast<CallBase>(getAnchorValue());
3963 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3964 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3974 auto *MemBehaviorAA =
3977 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3978 return ChangeStatus::UNCHANGED;
3981 bool IsKnownNoAlias;
3983 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
3984 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
3986 <<
" is not no-alias at the definition\n");
3987 return indicatePessimisticFixpoint();
3991 if (MemBehaviorAA &&
3992 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
3994 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
3995 return ChangeStatus::UNCHANGED;
3998 return indicatePessimisticFixpoint();
4006struct AANoAliasReturned final : AANoAliasImpl {
4008 : AANoAliasImpl(IRP,
A) {}
4013 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4014 if (
Constant *
C = dyn_cast<Constant>(&RV))
4015 if (
C->isNullValue() || isa<UndefValue>(
C))
4020 if (!isa<CallBase>(&RV))
4024 bool IsKnownNoAlias;
4025 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4026 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4029 bool IsKnownNoCapture;
4031 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4032 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4034 return IsAssumedNoCapture ||
4038 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4039 return indicatePessimisticFixpoint();
4041 return ChangeStatus::UNCHANGED;
4049struct AANoAliasCallSiteReturned final
4050 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4052 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4062struct AAIsDeadValueImpl :
public AAIsDead {
4066 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4069 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4072 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4075 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4078 bool isAssumedDead(
const Instruction *
I)
const override {
4079 return I == getCtxI() && isAssumedDead();
4083 bool isKnownDead(
const Instruction *
I)
const override {
4084 return isAssumedDead(
I) && isKnownDead();
4088 const std::string getAsStr(
Attributor *
A)
const override {
4089 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4095 if (
V.getType()->isVoidTy() ||
V.use_empty())
4099 if (!isa<Constant>(V)) {
4100 if (
auto *
I = dyn_cast<Instruction>(&V))
4101 if (!
A.isRunOn(*
I->getFunction()))
4103 bool UsedAssumedInformation =
false;
4104 std::optional<Constant *>
C =
4105 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4110 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4115 return A.checkForAllUses(UsePred, *
this, V,
false,
4116 DepClassTy::REQUIRED,
4125 auto *CB = dyn_cast<CallBase>(
I);
4126 if (!CB || isa<IntrinsicInst>(CB))
4131 bool IsKnownNoUnwind;
4132 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4133 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4141struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4143 : AAIsDeadValueImpl(IRP,
A) {}
4147 AAIsDeadValueImpl::initialize(
A);
4149 if (isa<UndefValue>(getAssociatedValue())) {
4150 indicatePessimisticFixpoint();
4154 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4155 if (!isAssumedSideEffectFree(
A,
I)) {
4156 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4157 indicatePessimisticFixpoint();
4159 removeAssumedBits(HAS_NO_EFFECT);
4166 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4168 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4175 if (
SI.isVolatile())
4181 bool UsedAssumedInformation =
false;
4182 if (!AssumeOnlyInst) {
4183 PotentialCopies.clear();
4185 UsedAssumedInformation)) {
4188 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4192 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4193 <<
" potential copies.\n");
4198 UsedAssumedInformation))
4200 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4202 auto &UserI = cast<Instruction>(*U.getUser());
4203 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4205 AssumeOnlyInst->insert(&UserI);
4208 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4214 <<
" is assumed live!\n");
4220 const std::string getAsStr(
Attributor *
A)
const override {
4221 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4222 if (isa_and_nonnull<StoreInst>(
I))
4224 return "assumed-dead-store";
4225 if (isa_and_nonnull<FenceInst>(
I))
4227 return "assumed-dead-fence";
4228 return AAIsDeadValueImpl::getAsStr(
A);
4233 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4234 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4235 if (!isDeadStore(
A, *SI))
4236 return indicatePessimisticFixpoint();
4237 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4238 if (!isDeadFence(
A, *FI))
4239 return indicatePessimisticFixpoint();
4241 if (!isAssumedSideEffectFree(
A,
I))
4242 return indicatePessimisticFixpoint();
4243 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4244 return indicatePessimisticFixpoint();
4249 bool isRemovableStore()
const override {
4250 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4255 Value &
V = getAssociatedValue();
4256 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4261 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4263 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4266 A.deleteAfterManifest(*
I);
4267 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4269 for (
auto *Usr : AOI->
users())
4270 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4271 A.deleteAfterManifest(*AOI);
4275 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4277 A.deleteAfterManifest(*FI);
4280 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4281 A.deleteAfterManifest(*
I);
4289 void trackStatistics()
const override {
4298struct AAIsDeadArgument :
public AAIsDeadFloating {
4300 : AAIsDeadFloating(IRP,
A) {}
4304 Argument &Arg = *getAssociatedArgument();
4305 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4306 if (
A.registerFunctionSignatureRewrite(
4310 return ChangeStatus::CHANGED;
4312 return ChangeStatus::UNCHANGED;
4319struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4321 : AAIsDeadValueImpl(IRP,
A) {}
4325 AAIsDeadValueImpl::initialize(
A);
4326 if (isa<UndefValue>(getAssociatedValue()))
4327 indicatePessimisticFixpoint();
4336 Argument *Arg = getAssociatedArgument();
4338 return indicatePessimisticFixpoint();
4340 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4342 return indicatePessimisticFixpoint();
4348 CallBase &CB = cast<CallBase>(getAnchorValue());
4350 assert(!isa<UndefValue>(
U.get()) &&
4351 "Expected undef values to be filtered out!");
4353 if (
A.changeUseAfterManifest(U, UV))
4354 return ChangeStatus::CHANGED;
4355 return ChangeStatus::UNCHANGED;
4362struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4364 : AAIsDeadFloating(IRP,
A) {}
4367 bool isAssumedDead()
const override {
4368 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4373 AAIsDeadFloating::initialize(
A);
4374 if (isa<UndefValue>(getAssociatedValue())) {
4375 indicatePessimisticFixpoint();
4380 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4386 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4387 IsAssumedSideEffectFree =
false;
4388 Changed = ChangeStatus::CHANGED;
4390 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4391 return indicatePessimisticFixpoint();
4396 void trackStatistics()
const override {
4397 if (IsAssumedSideEffectFree)
4404 const std::string getAsStr(
Attributor *
A)
const override {
4405 return isAssumedDead()
4407 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4411 bool IsAssumedSideEffectFree =
true;
4414struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4416 : AAIsDeadValueImpl(IRP,
A) {}
4421 bool UsedAssumedInformation =
false;
4422 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4423 {Instruction::Ret}, UsedAssumedInformation);
4426 if (ACS.isCallbackCall() || !ACS.getInstruction())
4428 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4431 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4432 UsedAssumedInformation))
4433 return indicatePessimisticFixpoint();
4435 return ChangeStatus::UNCHANGED;
4441 bool AnyChange =
false;
4449 bool UsedAssumedInformation =
false;
4450 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4451 UsedAssumedInformation);
4452 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4459struct AAIsDeadFunction :
public AAIsDead {
4465 assert(
F &&
"Did expect an anchor function");
4466 if (!isAssumedDeadInternalFunction(
A)) {
4467 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4468 assumeLive(
A,
F->getEntryBlock());
4472 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4473 if (!getAnchorScope()->hasLocalLinkage())
4475 bool UsedAssumedInformation =
false;
4477 true, UsedAssumedInformation);
4481 const std::string getAsStr(
Attributor *
A)
const override {
4482 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4483 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4484 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4485 std::to_string(KnownDeadEnds.size()) +
"]";
4490 assert(getState().isValidState() &&
4491 "Attempted to manifest an invalid state!");
4496 if (AssumedLiveBlocks.empty()) {
4497 A.deleteAfterManifest(
F);
4498 return ChangeStatus::CHANGED;
4504 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4506 KnownDeadEnds.set_union(ToBeExploredFrom);
4507 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4508 auto *CB = dyn_cast<CallBase>(DeadEndI);
4511 bool IsKnownNoReturn;
4512 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4515 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4518 if (
auto *II = dyn_cast<InvokeInst>(DeadEndI))
4519 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*II));
4521 A.changeToUnreachableAfterManifest(
4522 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4523 HasChanged = ChangeStatus::CHANGED;
4528 if (!AssumedLiveBlocks.count(&BB)) {
4529 A.deleteAfterManifest(BB);
4531 HasChanged = ChangeStatus::CHANGED;
4541 assert(
From->getParent() == getAnchorScope() &&
4543 "Used AAIsDead of the wrong function");
4544 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4548 void trackStatistics()
const override {}
4551 bool isAssumedDead()
const override {
return false; }
4554 bool isKnownDead()
const override {
return false; }
4557 bool isAssumedDead(
const BasicBlock *BB)
const override {
4559 "BB must be in the same anchor scope function.");
4563 return !AssumedLiveBlocks.count(BB);
4567 bool isKnownDead(
const BasicBlock *BB)
const override {
4568 return getKnown() && isAssumedDead(BB);
4572 bool isAssumedDead(
const Instruction *
I)
const override {
4573 assert(
I->getParent()->getParent() == getAnchorScope() &&
4574 "Instruction must be in the same anchor scope function.");
4581 if (!AssumedLiveBlocks.count(
I->getParent()))
4587 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4595 bool isKnownDead(
const Instruction *
I)
const override {
4596 return getKnown() && isAssumedDead(
I);
4602 if (!AssumedLiveBlocks.insert(&BB).second)
4610 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4612 if (
F->hasLocalLinkage())
4613 A.markLiveInternalFunction(*
F);
4637 bool IsKnownNoReturn;
4638 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4640 return !IsKnownNoReturn;
4652 bool UsedAssumedInformation =
4653 identifyAliveSuccessors(
A, cast<CallBase>(II), AA, AliveSuccessors);
4658 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*II.
getFunction())) {
4663 bool IsKnownNoUnwind;
4664 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4666 UsedAssumedInformation |= !IsKnownNoUnwind;
4671 return UsedAssumedInformation;
4678 bool UsedAssumedInformation =
false;
4682 std::optional<Constant *>
C =
4683 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4684 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4686 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4688 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4693 UsedAssumedInformation =
false;
4696 return UsedAssumedInformation;
4703 bool UsedAssumedInformation =
false;
4707 UsedAssumedInformation)) {
4714 if (Values.
empty() ||
4715 (Values.
size() == 1 &&
4716 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4718 return UsedAssumedInformation;
4721 Type &Ty = *
SI.getCondition()->getType();
4723 auto CheckForConstantInt = [&](
Value *
V) {
4724 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4732 return CheckForConstantInt(
VAC.getValue());
4736 return UsedAssumedInformation;
4739 unsigned MatchedCases = 0;
4740 for (
const auto &CaseIt :
SI.cases()) {
4741 if (
Constants.count(CaseIt.getCaseValue())) {
4743 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4750 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4751 return UsedAssumedInformation;
4757 if (AssumedLiveBlocks.empty()) {
4758 if (isAssumedDeadInternalFunction(
A))
4762 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4763 assumeLive(
A,
F->getEntryBlock());
4767 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4768 << getAnchorScope()->
size() <<
"] BBs and "
4769 << ToBeExploredFrom.size() <<
" exploration points and "
4770 << KnownDeadEnds.size() <<
" known dead ends\n");
4775 ToBeExploredFrom.end());
4776 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4779 while (!Worklist.
empty()) {
4785 while (!
I->isTerminator() && !isa<CallBase>(
I))
4786 I =
I->getNextNode();
4788 AliveSuccessors.
clear();
4790 bool UsedAssumedInformation =
false;
4791 switch (
I->getOpcode()) {
4795 "Expected non-terminators to be handled already!");
4799 case Instruction::Call:
4800 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4801 *
this, AliveSuccessors);
4803 case Instruction::Invoke:
4804 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4805 *
this, AliveSuccessors);
4807 case Instruction::Br:
4808 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4809 *
this, AliveSuccessors);
4811 case Instruction::Switch:
4812 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4813 *
this, AliveSuccessors);
4817 if (UsedAssumedInformation) {
4818 NewToBeExploredFrom.insert(
I);
4819 }
else if (AliveSuccessors.
empty() ||
4820 (
I->isTerminator() &&
4821 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4822 if (KnownDeadEnds.insert(
I))
4827 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4828 << UsedAssumedInformation <<
"\n");
4830 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4831 if (!
I->isTerminator()) {
4832 assert(AliveSuccessors.size() == 1 &&
4833 "Non-terminator expected to have a single successor!");
4837 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4838 if (AssumedLiveEdges.insert(Edge).second)
4840 if (assumeLive(
A, *AliveSuccessor->getParent()))
4847 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4849 return !ToBeExploredFrom.count(I);
4852 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4861 if (ToBeExploredFrom.empty() &&
4862 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4864 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4866 return indicatePessimisticFixpoint();
4871struct AAIsDeadCallSite final : AAIsDeadFunction {
4873 : AAIsDeadFunction(IRP,
A) {}
4882 "supported for call sites yet!");
4887 return indicatePessimisticFixpoint();
4891 void trackStatistics()
const override {}
4905 Value &
V = *getAssociatedValue().stripPointerCasts();
4907 A.getAttrs(getIRPosition(),
4908 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4911 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4914 bool IsKnownNonNull;
4915 AA::hasAssumedIRAttr<Attribute::NonNull>(
4916 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4918 bool CanBeNull, CanBeFreed;
4919 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4920 A.getDataLayout(), CanBeNull, CanBeFreed));
4923 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4928 StateType &getState()
override {
return *
this; }
4929 const StateType &getState()
const override {
return *
this; }
4935 const Value *UseV =
U->get();
4940 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4945 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4946 if (
Base &&
Base == &getAssociatedValue())
4947 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4953 bool IsNonNull =
false;
4954 bool TrackUse =
false;
4955 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4956 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4957 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4958 <<
" for instruction " << *
I <<
"\n");
4960 addAccessedBytesForUse(
A, U,
I, State);
4961 State.takeKnownDerefBytesMaximum(DerefBytes);
4968 bool IsKnownNonNull;
4969 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4970 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4971 if (IsAssumedNonNull &&
4972 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
4973 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
4974 return ChangeStatus::CHANGED;
4982 bool IsKnownNonNull;
4983 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4984 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4985 if (IsAssumedNonNull)
4987 Ctx, getAssumedDereferenceableBytes()));
4990 Ctx, getAssumedDereferenceableBytes()));
4994 const std::string getAsStr(
Attributor *
A)
const override {
4995 if (!getAssumedDereferenceableBytes())
4996 return "unknown-dereferenceable";
4997 bool IsKnownNonNull;
4998 bool IsAssumedNonNull =
false;
5000 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5001 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5002 return std::string(
"dereferenceable") +
5003 (IsAssumedNonNull ?
"" :
"_or_null") +
5004 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5005 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5006 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5007 (!
A ?
" [non-null is unknown]" :
"");
5012struct AADereferenceableFloating : AADereferenceableImpl {
5014 : AADereferenceableImpl(IRP,
A) {}
5019 bool UsedAssumedInformation =
false;
5021 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5023 Values.
push_back({getAssociatedValue(), getCtxI()});
5026 Stripped = Values.
size() != 1 ||
5027 Values.
front().getValue() != &getAssociatedValue();
5033 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5035 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5043 int64_t DerefBytes = 0;
5044 if (!AA || (!Stripped &&
this == AA)) {
5047 bool CanBeNull, CanBeFreed;
5049 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5050 T.GlobalState.indicatePessimisticFixpoint();
5053 DerefBytes =
DS.DerefBytesState.getAssumed();
5054 T.GlobalState &=
DS.GlobalState;
5060 int64_t OffsetSExt =
Offset.getSExtValue();
5064 T.takeAssumedDerefBytesMinimum(
5065 std::max(int64_t(0), DerefBytes - OffsetSExt));
5070 T.takeKnownDerefBytesMaximum(
5071 std::max(int64_t(0), DerefBytes - OffsetSExt));
5072 T.indicatePessimisticFixpoint();
5073 }
else if (OffsetSExt > 0) {
5079 T.indicatePessimisticFixpoint();
5083 return T.isValidState();
5086 for (
const auto &VAC : Values)
5087 if (!VisitValueCB(*
VAC.getValue()))
5088 return indicatePessimisticFixpoint();
5094 void trackStatistics()
const override {
5100struct AADereferenceableReturned final
5101 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5103 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5108 void trackStatistics()
const override {
5114struct AADereferenceableArgument final
5115 : AAArgumentFromCallSiteArguments<AADereferenceable,
5116 AADereferenceableImpl> {
5118 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5123 void trackStatistics()
const override {
5129struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5131 : AADereferenceableFloating(IRP,
A) {}
5134 void trackStatistics()
const override {
5140struct AADereferenceableCallSiteReturned final
5141 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5142 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5147 void trackStatistics()
const override {
5157 Value &AssociatedValue,
const Use *U,
5161 if (isa<CastInst>(
I)) {
5163 TrackUse = !isa<PtrToIntInst>(
I);
5166 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5167 if (
GEP->hasAllConstantIndices())
5173 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5187 const Value *UseV =
U->get();
5188 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5189 if (
SI->getPointerOperand() == UseV)
5190 MA =
SI->getAlign();
5191 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5192 if (LI->getPointerOperand() == UseV)
5193 MA = LI->getAlign();
5194 }
else if (
auto *AI = dyn_cast<AtomicRMWInst>(
I)) {
5195 if (AI->getPointerOperand() == UseV)
5196 MA = AI->getAlign();
5197 }
else if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
5198 if (AI->getPointerOperand() == UseV)
5199 MA = AI->getAlign();
5205 unsigned Alignment = MA->value();
5209 if (
Base == &AssociatedValue) {
5228 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5230 takeKnownMaximum(Attr.getValueAsInt());
5232 Value &
V = *getAssociatedValue().stripPointerCasts();
5233 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5236 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5241 ChangeStatus LoadStoreChanged = ChangeStatus::UNCHANGED;
5244 Value &AssociatedValue = getAssociatedValue();
5245 for (
const Use &U : AssociatedValue.
uses()) {
5246 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5247 if (
SI->getPointerOperand() == &AssociatedValue)
5248 if (
SI->getAlign() < getAssumedAlign()) {
5250 "Number of times alignment added to a store");
5251 SI->setAlignment(getAssumedAlign());
5252 LoadStoreChanged = ChangeStatus::CHANGED;
5254 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5255 if (LI->getPointerOperand() == &AssociatedValue)
5256 if (LI->getAlign() < getAssumedAlign()) {
5257 LI->setAlignment(getAssumedAlign());
5259 "Number of times alignment added to a load");
5260 LoadStoreChanged = ChangeStatus::CHANGED;
5267 Align InheritAlign =
5268 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5269 if (InheritAlign >= getAssumedAlign())
5270 return LoadStoreChanged;
5271 return Changed | LoadStoreChanged;
5281 if (getAssumedAlign() > 1)
5289 bool TrackUse =
false;
5291 unsigned int KnownAlign =
5292 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5293 State.takeKnownMaximum(KnownAlign);
5299 const std::string getAsStr(
Attributor *
A)
const override {
5300 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5301 std::to_string(getAssumedAlign().
value()) +
">";
5306struct AAAlignFloating : AAAlignImpl {
5314 bool UsedAssumedInformation =
false;
5316 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5318 Values.
push_back({getAssociatedValue(), getCtxI()});
5321 Stripped = Values.
size() != 1 ||
5322 Values.
front().getValue() != &getAssociatedValue();
5326 auto VisitValueCB = [&](
Value &
V) ->
bool {
5327 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5330 DepClassTy::REQUIRED);
5331 if (!AA || (!Stripped &&
this == AA)) {
5333 unsigned Alignment = 1;
5346 Alignment =
V.getPointerAlignment(
DL).value();
5349 T.takeKnownMaximum(Alignment);
5350 T.indicatePessimisticFixpoint();
5356 return T.isValidState();
5359 for (
const auto &VAC : Values) {
5360 if (!VisitValueCB(*
VAC.getValue()))
5361 return indicatePessimisticFixpoint();
5374struct AAAlignReturned final
5375 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5376 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5384struct AAAlignArgument final
5385 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5386 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5394 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5395 return ChangeStatus::UNCHANGED;
5396 return Base::manifest(
A);
5403struct AAAlignCallSiteArgument final : AAAlignFloating {
5405 : AAAlignFloating(IRP,
A) {}
5412 if (
Argument *Arg = getAssociatedArgument())
5413 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5414 return ChangeStatus::UNCHANGED;
5416 Align InheritAlign =
5417 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5418 if (InheritAlign >= getAssumedAlign())
5419 Changed = ChangeStatus::UNCHANGED;
5426 if (
Argument *Arg = getAssociatedArgument()) {
5429 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5432 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5442struct AAAlignCallSiteReturned final
5443 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5444 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5461 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5462 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5467 const std::string getAsStr(
Attributor *
A)
const override {
5468 return getAssumed() ?
"noreturn" :
"may-return";
5473 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5474 bool UsedAssumedInformation =
false;
5475 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5476 {(unsigned)Instruction::Ret},
5477 UsedAssumedInformation))
5478 return indicatePessimisticFixpoint();
5479 return ChangeStatus::UNCHANGED;
5483struct AANoReturnFunction final : AANoReturnImpl {
5485 : AANoReturnImpl(IRP,
A) {}
5492struct AANoReturnCallSite final
5493 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5495 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5512 Value &
V = getAssociatedValue();
5513 if (
auto *
C = dyn_cast<Constant>(&V)) {
5514 if (
C->isThreadDependent())
5515 indicatePessimisticFixpoint();
5517 indicateOptimisticFixpoint();
5520 if (
auto *CB = dyn_cast<CallBase>(&V))
5523 indicateOptimisticFixpoint();
5526 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5531 indicatePessimisticFixpoint();
5541 Value &
V = getAssociatedValue();
5543 if (
auto *
I = dyn_cast<Instruction>(&V))
5544 Scope =
I->getFunction();
5545 if (
auto *
A = dyn_cast<Argument>(&V)) {
5547 if (!
Scope->hasLocalLinkage())
5551 return indicateOptimisticFixpoint();
5553 bool IsKnownNoRecurse;
5554 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5559 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5560 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5561 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5562 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5566 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5567 (isa<StoreInst>(UserI) &&
5568 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5570 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5574 if (!Callee || !
Callee->hasLocalLinkage())
5580 DepClassTy::OPTIONAL);
5581 if (!ArgInstanceInfoAA ||
5582 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5587 A, *CB, *Scope, *
this,
nullptr,
5595 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5596 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5597 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5605 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5606 DepClassTy::OPTIONAL,
5607 true, EquivalentUseCB))
5608 return indicatePessimisticFixpoint();
5614 const std::string getAsStr(
Attributor *
A)
const override {
5615 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5619 void trackStatistics()
const override {}
5623struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5625 : AAInstanceInfoImpl(IRP,
A) {}
5629struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5631 : AAInstanceInfoFloating(IRP,
A) {}
5635struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5637 : AAInstanceInfoImpl(IRP,
A) {}
5645 Argument *Arg = getAssociatedArgument();
5647 return indicatePessimisticFixpoint();
5652 return indicatePessimisticFixpoint();
5658struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5660 : AAInstanceInfoImpl(IRP,
A) {
5676struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5678 : AAInstanceInfoFloating(IRP,
A) {}
5685 bool IgnoreSubsumingPositions) {
5686 assert(ImpliedAttributeKind == Attribute::NoCapture &&
5687 "Unexpected attribute kind");
5690 return V.use_empty();
5693 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5694 V.getType()->getPointerAddressSpace() == 0)) {
5698 if (
A.hasAttr(IRP, {Attribute::NoCapture},
5699 true, Attribute::NoCapture))
5705 {Attribute::NoCapture, Attribute::ByVal},
5707 A.manifestAttrs(IRP,
5715 determineFunctionCaptureCapabilities(IRP, *
F, State);
5717 A.manifestAttrs(IRP,
5736 bool ReadOnly =
F.onlyReadsMemory();
5737 bool NoThrow =
F.doesNotThrow();
5738 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5739 if (ReadOnly && NoThrow && IsVoidReturn) {
5752 if (NoThrow && IsVoidReturn)
5757 if (!NoThrow || ArgNo < 0 ||
5758 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5761 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5762 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5763 if (U ==
unsigned(ArgNo))
5781 assert(!AA::hasAssumedIRAttr<Attribute::NoCapture>(
5782 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5792 if (!isAssumedNoCaptureMaybeReturned())
5795 if (isArgumentPosition()) {
5796 if (isAssumedNoCapture())
5804 const std::string getAsStr(
Attributor *
A)
const override {
5805 if (isKnownNoCapture())
5806 return "known not-captured";
5807 if (isAssumedNoCapture())
5808 return "assumed not-captured";
5809 if (isKnownNoCaptureMaybeReturned())
5810 return "known not-captured-maybe-returned";
5811 if (isAssumedNoCaptureMaybeReturned())
5812 return "assumed not-captured-maybe-returned";
5813 return "assumed-captured";
5821 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5825 if (isa<PtrToIntInst>(UInst)) {
5827 return isCapturedIn(State,
true,
true,
5833 if (isa<StoreInst>(UInst))
5834 return isCapturedIn(State,
true,
true,
5838 if (isa<ReturnInst>(UInst)) {
5840 return isCapturedIn(State,
false,
false,
5842 return isCapturedIn(State,
true,
true,
5848 auto *CB = dyn_cast<CallBase>(UInst);
5850 return isCapturedIn(State,
true,
true,
5857 bool IsKnownNoCapture;
5859 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
5860 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5862 if (IsAssumedNoCapture)
5863 return isCapturedIn(State,
false,
false,
5867 return isCapturedIn(State,
false,
false,
5872 return isCapturedIn(State,
true,
true,
5880 bool CapturedInInt,
bool CapturedInRet) {
5881 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5882 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5898 return indicatePessimisticFixpoint();
5902 assert(
F &&
"Expected a function!");
5910 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5912 addKnownBits(NOT_CAPTURED_IN_MEM);
5919 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5923 UsedAssumedInformation))
5925 bool SeenConstant =
false;
5927 if (isa<Constant>(
VAC.getValue())) {
5930 SeenConstant =
true;
5931 }
else if (!isa<Argument>(
VAC.getValue()) ||
5932 VAC.getValue() == getAssociatedArgument())
5938 bool IsKnownNoUnwind;
5939 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
5941 bool IsVoidTy =
F->getReturnType()->isVoidTy();
5942 bool UsedAssumedInformation =
false;
5943 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
5944 T.addKnownBits(NOT_CAPTURED_IN_RET);
5945 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
5947 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
5948 addKnownBits(NOT_CAPTURED_IN_RET);
5949 if (isKnown(NOT_CAPTURED_IN_MEM))
5950 return indicateOptimisticFixpoint();
5961 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
5966 return checkUse(
A,
T, U, Follow);
5974 if (!
A.checkForAllUses(UseCheck, *
this, *V))
5975 return indicatePessimisticFixpoint();
5978 auto Assumed = S.getAssumed();
5979 S.intersectAssumedBits(
T.getAssumed());
5980 if (!isAssumedNoCaptureMaybeReturned())
5981 return indicatePessimisticFixpoint();
5987struct AANoCaptureArgument final : AANoCaptureImpl {
5989 : AANoCaptureImpl(IRP,
A) {}
5996struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
5998 : AANoCaptureImpl(IRP,
A) {}
6006 Argument *Arg = getAssociatedArgument();
6008 return indicatePessimisticFixpoint();
6010 bool IsKnownNoCapture;
6012 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
6013 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6015 return ChangeStatus::UNCHANGED;
6017 return indicatePessimisticFixpoint();
6026struct AANoCaptureFloating final : AANoCaptureImpl {
6028 : AANoCaptureImpl(IRP,
A) {}
6031 void trackStatistics()
const override {
6037struct AANoCaptureReturned final : AANoCaptureImpl {
6039 : AANoCaptureImpl(IRP,
A) {
6054 void trackStatistics()
const override {}
6058struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6060 : AANoCaptureImpl(IRP,
A) {}
6066 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6070 void trackStatistics()
const override {
6081 SimplifiedAssociatedValue,
Other, Ty);
6082 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6086 if (SimplifiedAssociatedValue)
6087 dbgs() <<
"[ValueSimplify] is assumed to be "
6088 << **SimplifiedAssociatedValue <<
"\n";
6090 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6102 if (getAssociatedValue().
getType()->isVoidTy())
6103 indicatePessimisticFixpoint();
6104 if (
A.hasSimplificationCallback(getIRPosition()))
6105 indicatePessimisticFixpoint();
6109 const std::string getAsStr(
Attributor *
A)
const override {
6111 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6112 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6113 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6115 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6120 void trackStatistics()
const override {}
6123 std::optional<Value *>
6124 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6125 return SimplifiedAssociatedValue;
6136 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6138 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6151 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6152 if (
Check && (
I.mayReadFromMemory() ||
6157 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6159 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6185 if (
const auto &NewV = VMap.
lookup(&V))
6187 bool UsedAssumedInformation =
false;
6188 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6190 if (!SimpleV.has_value())
6194 EffectiveV = *SimpleV;
6195 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6199 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6200 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6201 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6202 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6209 Value *NewV = SimplifiedAssociatedValue
6210 ? *SimplifiedAssociatedValue
6212 if (NewV && NewV != &getAssociatedValue()) {
6216 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6218 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6227 const IRPosition &IRP,
bool Simplify =
true) {
6228 bool UsedAssumedInformation =
false;
6231 QueryingValueSimplified =
A.getAssumedSimplified(
6233 return unionAssumed(QueryingValueSimplified);
6237 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6238 if (!getAssociatedValue().
getType()->isIntegerTy())
6243 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6247 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6250 SimplifiedAssociatedValue = std::nullopt;
6251 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6254 if (
auto *
C = *COpt) {
6255 SimplifiedAssociatedValue =
C;
6256 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6262 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6263 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6265 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6273 for (
auto &U : getAssociatedValue().
uses()) {
6277 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6278 IP =
PHI->getIncomingBlock(U)->getTerminator();
6279 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6281 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6282 if (
A.changeUseAfterManifest(U, *NewV))
6283 Changed = ChangeStatus::CHANGED;
6287 return Changed | AAValueSimplify::manifest(
A);
6292 SimplifiedAssociatedValue = &getAssociatedValue();
6293 return AAValueSimplify::indicatePessimisticFixpoint();
6297struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6299 : AAValueSimplifyImpl(IRP,
A) {}
6302 AAValueSimplifyImpl::initialize(
A);
6303 if (
A.hasAttr(getIRPosition(),
6304 {Attribute::InAlloca, Attribute::Preallocated,
6305 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6307 indicatePessimisticFixpoint();
6314 Argument *Arg = getAssociatedArgument();
6320 return indicatePessimisticFixpoint();
6323 auto Before = SimplifiedAssociatedValue;
6337 bool UsedAssumedInformation =
false;
6338 std::optional<Constant *> SimpleArgOp =
6339 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6346 return unionAssumed(*SimpleArgOp);
6351 bool UsedAssumedInformation =
false;
6352 if (hasCallBaseContext() &&
6353 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6357 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6358 UsedAssumedInformation);
6361 if (!askSimplifiedValueForOtherAAs(
A))
6362 return indicatePessimisticFixpoint();
6365 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6366 : ChangeStatus ::CHANGED;
6370 void trackStatistics()
const override {
6375struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6377 : AAValueSimplifyImpl(IRP,
A) {}
6380 std::optional<Value *>
6381 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6382 if (!isValidState())
6384 return SimplifiedAssociatedValue;
6389 auto Before = SimplifiedAssociatedValue;
6392 auto &RI = cast<ReturnInst>(
I);
6393 return checkAndUpdate(
6398 bool UsedAssumedInformation =
false;
6399 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6400 UsedAssumedInformation))
6401 if (!askSimplifiedValueForOtherAAs(
A))
6402 return indicatePessimisticFixpoint();
6405 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6406 : ChangeStatus ::CHANGED;
6412 return ChangeStatus::UNCHANGED;
6416 void trackStatistics()
const override {
6421struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6423 : AAValueSimplifyImpl(IRP,
A) {}
6427 AAValueSimplifyImpl::initialize(
A);
6428 Value &
V = getAnchorValue();
6431 if (isa<Constant>(V))
6432 indicatePessimisticFixpoint();
6437 auto Before = SimplifiedAssociatedValue;
6438 if (!askSimplifiedValueForOtherAAs(
A))
6439 return indicatePessimisticFixpoint();
6442 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6443 : ChangeStatus ::CHANGED;
6447 void trackStatistics()
const override {
6452struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6454 : AAValueSimplifyImpl(IRP,
A) {}
6458 SimplifiedAssociatedValue =
nullptr;
6459 indicateOptimisticFixpoint();
6464 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6467 void trackStatistics()
const override {
6472struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6474 : AAValueSimplifyFunction(IRP,
A) {}
6476 void trackStatistics()
const override {
6481struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6483 : AAValueSimplifyImpl(IRP,
A) {}
6486 AAValueSimplifyImpl::initialize(
A);
6487 Function *Fn = getAssociatedFunction();
6488 assert(Fn &&
"Did expect an associted function");
6494 checkAndUpdate(
A, *
this, IRP))
6495 indicateOptimisticFixpoint();
6497 indicatePessimisticFixpoint();
6505 return indicatePessimisticFixpoint();
6508 void trackStatistics()
const override {
6513struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6515 : AAValueSimplifyFloating(IRP,
A) {}
6523 if (FloatAA && FloatAA->getState().isValidState())
6526 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6527 Use &
U = cast<CallBase>(&getAnchorValue())
6528 ->getArgOperandUse(getCallSiteArgNo());
6529 if (
A.changeUseAfterManifest(U, *NewV))
6530 Changed = ChangeStatus::CHANGED;
6533 return Changed | AAValueSimplify::manifest(
A);
6536 void trackStatistics()
const override {
6546 struct AllocationInfo {
6558 }
Status = STACK_DUE_TO_USE;
6562 bool HasPotentiallyFreeingUnknownUses =
false;
6566 bool MoveAllocaIntoEntry =
true;
6572 struct DeallocationInfo {
6580 bool MightFreeUnknownObjects =
false;
6589 ~AAHeapToStackFunction() {
6592 for (
auto &It : AllocationInfos)
6593 It.second->~AllocationInfo();
6594 for (
auto &It : DeallocationInfos)
6595 It.second->~DeallocationInfo();
6599 AAHeapToStack::initialize(
A);
6602 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6609 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6618 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6619 AllocationInfos[CB] = AI;
6621 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6627 bool UsedAssumedInformation =
false;
6628 bool Success =
A.checkForAllCallLikeInstructions(
6629 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6633 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6637 bool &) -> std::optional<Value *> {
return nullptr; };
6638 for (
const auto &It : AllocationInfos)
6641 for (
const auto &It : DeallocationInfos)
6646 const std::string getAsStr(
Attributor *
A)
const override {
6647 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6648 for (
const auto &It : AllocationInfos) {
6649 if (It.second->Status == AllocationInfo::INVALID)
6650 ++NumInvalidMallocs;
6654 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6655 std::to_string(NumInvalidMallocs);
6659 void trackStatistics()
const override {
6662 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6663 for (
const auto &It : AllocationInfos)
6664 if (It.second->Status != AllocationInfo::INVALID)
6668 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6670 if (AllocationInfo *AI =
6671 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6672 return AI->Status != AllocationInfo::INVALID;
6676 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6677 if (!isValidState())
6680 for (
const auto &It : AllocationInfos) {
6681 AllocationInfo &AI = *It.second;
6682 if (AI.Status == AllocationInfo::INVALID)
6685 if (AI.PotentialFreeCalls.count(&CB))
6693 assert(getState().isValidState() &&
6694 "Attempted to manifest an invalid state!");
6698 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6700 for (
auto &It : AllocationInfos) {
6701 AllocationInfo &AI = *It.second;
6702 if (AI.Status == AllocationInfo::INVALID)
6705 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6706 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6707 A.deleteAfterManifest(*FreeCall);
6708 HasChanged = ChangeStatus::CHANGED;
6711 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6716 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6717 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6718 return OR <<
"Moving globalized variable to the stack.";
6719 return OR <<
"Moving memory allocation from the heap to the stack.";
6721 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6728 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6730 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6737 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6742 ?
F->getEntryBlock().begin()
6743 : AI.CB->getIterator();
6746 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6747 Alignment = std::max(Alignment, *RetAlign);
6749 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6750 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6751 "Expected an alignment during manifest!");
6753 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6757 unsigned AS =
DL.getAllocaAddrSpace();
6760 AI.CB->getName() +
".h2s", IP);
6762 if (Alloca->
getType() != AI.CB->getType())
6763 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6764 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6769 "Must be able to materialize initial memory state of allocation");
6773 if (
auto *II = dyn_cast<InvokeInst>(AI.CB)) {
6776 A.deleteAfterManifest(*AI.CB);
6778 A.deleteAfterManifest(*AI.CB);
6784 if (!isa<UndefValue>(InitVal)) {
6787 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6789 HasChanged = ChangeStatus::CHANGED;
6797 bool UsedAssumedInformation =
false;
6798 std::optional<Constant *> SimpleV =
6799 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6801 return APInt(64, 0);
6802 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6803 return CI->getValue();
6804 return std::nullopt;
6808 AllocationInfo &AI) {
6809 auto Mapper = [&](
const Value *
V) ->
const Value * {
6810 bool UsedAssumedInformation =
false;
6811 if (std::optional<Constant *> SimpleV =
6812 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6819 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6837 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6839 const auto *LivenessAA =
6843 A.getInfoCache().getMustBeExecutedContextExplorer();
6845 bool StackIsAccessibleByOtherThreads =
6846 A.getInfoCache().stackIsAccessibleByOtherThreads();
6849 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6850 std::optional<bool> MayContainIrreducibleControl;
6852 if (&
F->getEntryBlock() == &BB)
6854 if (!MayContainIrreducibleControl.has_value())
6856 if (*MayContainIrreducibleControl)
6865 bool HasUpdatedFrees =
false;
6867 auto UpdateFrees = [&]() {
6868 HasUpdatedFrees =
true;
6870 for (
auto &It : DeallocationInfos) {
6871 DeallocationInfo &DI = *It.second;
6874 if (DI.MightFreeUnknownObjects)
6878 bool UsedAssumedInformation =
false;
6879 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6886 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6887 DI.MightFreeUnknownObjects =
true;
6893 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6896 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6900 DI.MightFreeUnknownObjects =
true;
6904 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6906 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6908 DI.MightFreeUnknownObjects =
true;
6912 DI.PotentialAllocationCalls.insert(ObjCB);
6916 auto FreeCheck = [&](AllocationInfo &AI) {
6920 if (!StackIsAccessibleByOtherThreads) {
6922 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6925 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6926 "other threads and function is not nosync:\n");
6930 if (!HasUpdatedFrees)
6934 if (AI.PotentialFreeCalls.size() != 1) {
6936 << AI.PotentialFreeCalls.size() <<
"\n");
6939 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
6940 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
6943 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
6944 << *UniqueFree <<
"\n");
6947 if (DI->MightFreeUnknownObjects) {
6949 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
6952 if (DI->PotentialAllocationCalls.empty())
6954 if (DI->PotentialAllocationCalls.size() > 1) {
6956 << DI->PotentialAllocationCalls.size()
6957 <<
" different allocations\n");
6960 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
6963 <<
"[H2S] unique free call not known to free this allocation but "
6964 << **DI->PotentialAllocationCalls.begin() <<
"\n");
6969 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
6971 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
6974 <<
"[H2S] unique free call might not be executed with the allocation "
6975 << *UniqueFree <<
"\n");
6982 auto UsesCheck = [&](AllocationInfo &AI) {
6983 bool ValidUsesOnly =
true;
6985 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
6987 if (isa<LoadInst>(UserI))
6989 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
6990 if (
SI->getValueOperand() ==
U.get()) {
6992 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
6993 ValidUsesOnly =
false;
6999 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
7002 if (DeallocationInfos.count(CB)) {
7003 AI.PotentialFreeCalls.insert(CB);
7010 bool IsKnownNoCapture;
7011 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7016 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7019 if (!IsAssumedNoCapture ||
7020 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7021 !IsAssumedNoFree)) {
7022 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7027 <<
"Could not move globalized variable to the stack. "
7028 "Variable is potentially captured in call. Mark "
7029 "parameter as `__attribute__((noescape))` to override.";
7032 if (ValidUsesOnly &&
7033 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7037 ValidUsesOnly =
false;
7042 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7043 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7050 ValidUsesOnly =
false;
7053 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7055 [&](
const Use &OldU,
const Use &NewU) {
7056 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7057 return !SI || StackIsAccessibleByOtherThreads ||
7058 AA::isAssumedThreadLocalObject(
7059 A, *SI->getPointerOperand(), *this);
7062 return ValidUsesOnly;
7067 for (
auto &It : AllocationInfos) {
7068 AllocationInfo &AI = *It.second;
7069 if (AI.Status == AllocationInfo::INVALID)
7073 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7077 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7079 AI.Status = AllocationInfo::INVALID;
7084 !APAlign->isPowerOf2()) {
7085 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7087 AI.Status = AllocationInfo::INVALID;
7094 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7099 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7101 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7105 AI.Status = AllocationInfo::INVALID;
7111 switch (AI.Status) {
7112 case AllocationInfo::STACK_DUE_TO_USE:
7115 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7117 case AllocationInfo::STACK_DUE_TO_FREE:
7120 AI.Status = AllocationInfo::INVALID;
7123 case AllocationInfo::INVALID:
7130 bool IsGlobalizedLocal =
7131 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7132 if (AI.MoveAllocaIntoEntry &&
7133 (!
Size.has_value() ||
7134 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7135 AI.MoveAllocaIntoEntry =
false;
7149 AAPrivatizablePtr::indicatePessimisticFixpoint();
7150 PrivatizableType =
nullptr;
7151 return ChangeStatus::CHANGED;
7157 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7161 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7162 std::optional<Type *>
T1) {
7172 std::optional<Type *> getPrivatizableType()
const override {
7173 return PrivatizableType;
7176 const std::string getAsStr(
Attributor *
A)
const override {
7177 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7181 std::optional<Type *> PrivatizableType;
7186struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7188 : AAPrivatizablePtrImpl(IRP,
A) {}
7191 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7194 bool UsedAssumedInformation =
false;
7196 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7198 if (!
Attrs.empty() &&
7200 true, UsedAssumedInformation))
7201 return Attrs[0].getValueAsType();
7203 std::optional<Type *> Ty;
7204 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7227 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7231 dbgs() <<
"<nullptr>";
7236 Ty = combineTypes(Ty, CSTy);
7239 dbgs() <<
" : New Type: ";
7241 (*Ty)->print(
dbgs());
7243 dbgs() <<
"<nullptr>";
7252 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7253 UsedAssumedInformation))
7260 PrivatizableType = identifyPrivatizableType(
A);
7261 if (!PrivatizableType)
7262 return ChangeStatus::UNCHANGED;
7263 if (!*PrivatizableType)
7264 return indicatePessimisticFixpoint();
7269 DepClassTy::OPTIONAL);
7272 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7275 return indicatePessimisticFixpoint();
7281 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7285 Function &Fn = *getIRPosition().getAnchorScope();
7289 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7291 return indicatePessimisticFixpoint();
7301 bool UsedAssumedInformation =
false;
7302 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7303 UsedAssumedInformation)) {
7305 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7307 return indicatePessimisticFixpoint();
7311 Argument *Arg = getAssociatedArgument();
7312 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7314 return indicatePessimisticFixpoint();
7321 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7324 for (
const Use *U : CallbackUses) {
7326 assert(CBACS && CBACS.isCallbackCall());
7327 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7328 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7332 <<
"[AAPrivatizablePtr] Argument " << *Arg
7333 <<
"check if can be privatized in the context of its parent ("
7335 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7337 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7338 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7339 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7341 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7342 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7345 if (CBArgNo !=
int(ArgNo))
7349 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7353 if (*CBArgPrivTy == PrivatizableType)
7358 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7359 <<
" cannot be privatized in the context of its parent ("
7361 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7363 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7364 <<
").\n[AAPrivatizablePtr] for which the argument "
7365 "privatization is not compatible.\n";
7379 "Expected a direct call operand for callback call operand");
7384 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7385 <<
" check if be privatized in the context of its parent ("
7387 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7389 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7392 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7395 DepClassTy::REQUIRED);
7396 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7400 if (*DCArgPrivTy == PrivatizableType)
7406 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7407 <<
" cannot be privatized in the context of its parent ("
7409 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7412 <<
").\n[AAPrivatizablePtr] for which the argument "
7413 "privatization is not compatible.\n";
7425 return IsCompatiblePrivArgOfDirectCS(ACS);
7429 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7430 UsedAssumedInformation))
7431 return indicatePessimisticFixpoint();
7433 return ChangeStatus::UNCHANGED;
7439 identifyReplacementTypes(
Type *PrivType,
7443 assert(PrivType &&
"Expected privatizable type!");
7446 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7447 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7448 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7449 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7450 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7451 PrivArrayType->getElementType());
7462 assert(PrivType &&
"Expected privatizable type!");
7468 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7469 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7470 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7475 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7476 Type *PointeeTy = PrivArrayType->getElementType();
7477 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7478 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7489 void createReplacementValues(
Align Alignment,
Type *PrivType,
7493 assert(PrivType &&
"Expected privatizable type!");
7500 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7501 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7502 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7503 Type *PointeeTy = PrivStructType->getElementType(u);
7507 L->setAlignment(Alignment);
7510 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7511 Type *PointeeTy = PrivArrayType->getElementType();
7512 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7513 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7516 L->setAlignment(Alignment);
7521 L->setAlignment(Alignment);
7528 if (!PrivatizableType)
7529 return ChangeStatus::UNCHANGED;
7530 assert(*PrivatizableType &&
"Expected privatizable type!");
7536 bool UsedAssumedInformation =
false;
7537 if (!
A.checkForAllInstructions(
7539 CallInst &CI = cast<CallInst>(I);
7540 if (CI.isTailCall())
7541 TailCalls.push_back(&CI);
7544 *
this, {Instruction::Call}, UsedAssumedInformation))
7545 return ChangeStatus::UNCHANGED;
7547 Argument *Arg = getAssociatedArgument();
7550 const auto *AlignAA =
7559 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7561 const DataLayout &
DL = IP->getModule()->getDataLayout();
7562 unsigned AS =
DL.getAllocaAddrSpace();
7564 Arg->
getName() +
".priv", IP);
7565 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7566 ArgIt->getArgNo(), IP);
7569 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7574 CI->setTailCall(
false);
7585 createReplacementValues(
7586 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7587 *PrivatizableType, ACS,
7595 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7598 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7599 std::move(FnRepairCB),
7600 std::move(ACSRepairCB)))
7601 return ChangeStatus::CHANGED;
7602 return ChangeStatus::UNCHANGED;
7606 void trackStatistics()
const override {
7611struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7613 : AAPrivatizablePtrImpl(IRP,
A) {}
7618 indicatePessimisticFixpoint();
7623 "updateImpl will not be called");
7627 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7630 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7634 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7635 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7637 return AI->getAllocatedType();
7638 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7641 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7645 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7646 "alloca nor privatizable argument: "
7652 void trackStatistics()
const override {
7657struct AAPrivatizablePtrCallSiteArgument final
7658 :
public AAPrivatizablePtrFloating {
7660 : AAPrivatizablePtrFloating(IRP,
A) {}
7664 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7665 indicateOptimisticFixpoint();
7670 PrivatizableType = identifyPrivatizableType(
A);
7671 if (!PrivatizableType)
7672 return ChangeStatus::UNCHANGED;
7673 if (!*PrivatizableType)
7674 return indicatePessimisticFixpoint();
7677 bool IsKnownNoCapture;
7678 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7679 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7680 if (!IsAssumedNoCapture) {
7681 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7682 return indicatePessimisticFixpoint();
7685 bool IsKnownNoAlias;
7686 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7687 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7688 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7689 return indicatePessimisticFixpoint();
7694 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7695 return indicatePessimisticFixpoint();
7698 return ChangeStatus::UNCHANGED;
7702 void trackStatistics()
const override {
7707struct AAPrivatizablePtrCallSiteReturned final
7708 :
public AAPrivatizablePtrFloating {
7710 : AAPrivatizablePtrFloating(IRP,
A) {}
7715 indicatePessimisticFixpoint();
7719 void trackStatistics()
const override {
7724struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7726 : AAPrivatizablePtrFloating(IRP,
A) {}
7731 indicatePessimisticFixpoint();
7735 void trackStatistics()
const override {
7751 intersectAssumedBits(BEST_STATE);
7752 getKnownStateFromValue(
A, getIRPosition(), getState());
7753 AAMemoryBehavior::initialize(
A);
7759 bool IgnoreSubsumingPositions =
false) {
7761 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7763 switch (Attr.getKindAsEnum()) {
7764 case Attribute::ReadNone:
7767 case Attribute::ReadOnly:
7770 case Attribute::WriteOnly:
7779 if (!
I->mayReadFromMemory())
7781 if (!
I->mayWriteToMemory())
7794 else if (isAssumedWriteOnly())
7803 if (
A.hasAttr(IRP, Attribute::ReadNone,
7805 return ChangeStatus::UNCHANGED;
7814 return ChangeStatus::UNCHANGED;
7817 A.removeAttrs(IRP, AttrKinds);
7820 A.removeAttrs(IRP, Attribute::Writable);
7827 const std::string getAsStr(
Attributor *
A)
const override {
7832 if (isAssumedWriteOnly())
7834 return "may-read/write";
7842 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7845struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7847 : AAMemoryBehaviorImpl(IRP,
A) {}
7853 void trackStatistics()
const override {
7858 else if (isAssumedWriteOnly())
7873struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7875 : AAMemoryBehaviorFloating(IRP,
A) {}
7879 intersectAssumedBits(BEST_STATE);
7884 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7886 getKnownStateFromValue(
A, IRP, getState(),
7893 return ChangeStatus::UNCHANGED;
7897 if (
A.hasAttr(getIRPosition(),
7898 {Attribute::InAlloca, Attribute::Preallocated})) {
7899 removeKnownBits(NO_WRITES);
7900 removeAssumedBits(NO_WRITES);
7902 A.removeAttrs(getIRPosition(), AttrKinds);
7903 return AAMemoryBehaviorFloating::manifest(
A);
7907 void trackStatistics()
const override {
7912 else if (isAssumedWriteOnly())
7917struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7919 : AAMemoryBehaviorArgument(IRP,
A) {}
7925 Argument *Arg = getAssociatedArgument();
7927 indicatePessimisticFixpoint();
7931 addKnownBits(NO_WRITES);
7932 removeKnownBits(NO_READS);
7933 removeAssumedBits(NO_READS);
7935 AAMemoryBehaviorArgument::initialize(
A);
7936 if (getAssociatedFunction()->isDeclaration())
7937 indicatePessimisticFixpoint();
7946 Argument *Arg = getAssociatedArgument();
7951 return indicatePessimisticFixpoint();
7956 void trackStatistics()
const override {
7961 else if (isAssumedWriteOnly())
7967struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
7969 : AAMemoryBehaviorFloating(IRP,
A) {}
7973 AAMemoryBehaviorImpl::initialize(
A);
7978 return ChangeStatus::UNCHANGED;
7982 void trackStatistics()
const override {}
7986struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
7988 : AAMemoryBehaviorImpl(IRP,
A) {}
7998 Function &
F = cast<Function>(getAnchorValue());
8004 else if (isAssumedWriteOnly())
8007 A.removeAttrs(getIRPosition(), AttrKinds);
8012 return A.manifestAttrs(getIRPosition(),
8017 void trackStatistics()
const override {
8022 else if (isAssumedWriteOnly())
8028struct AAMemoryBehaviorCallSite final
8029 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8036 CallBase &CB = cast<CallBase>(getAnchorValue());
8042 else if (isAssumedWriteOnly())
8045 A.removeAttrs(getIRPosition(), AttrKinds);
8050 Attribute::Writable);
8051 return A.manifestAttrs(
8056 void trackStatistics()
const override {
8061 else if (isAssumedWriteOnly())
8069 auto AssumedState = getAssumed();
8075 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8078 if (MemBehaviorAA) {
8079 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8080 return !isAtFixpoint();
8085 if (
I.mayReadFromMemory())
8086 removeAssumedBits(NO_READS);
8087 if (
I.mayWriteToMemory())
8088 removeAssumedBits(NO_WRITES);
8089 return !isAtFixpoint();
8092 bool UsedAssumedInformation =
false;
8093 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8094 UsedAssumedInformation))
8095 return indicatePessimisticFixpoint();
8114 const auto *FnMemAA =
8118 S.addKnownBits(FnMemAA->getKnown());
8119 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8125 auto AssumedState = S.getAssumed();
8131 bool IsKnownNoCapture;
8133 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
8137 if (!IsAssumedNoCapture &&
8139 S.intersectAssumedBits(FnMemAssumedState);
8145 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8147 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8155 Follow = followUsersOfUseIn(
A, U, UserI);
8159 analyzeUseIn(
A, U, UserI);
8161 return !isAtFixpoint();
8164 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8165 return indicatePessimisticFixpoint();
8171bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8175 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8180 const auto *CB = dyn_cast<CallBase>(UserI);
8190 if (
U.get()->getType()->isPointerTy()) {
8192 bool IsKnownNoCapture;
8193 return !AA::hasAssumedIRAttr<Attribute::NoCapture>(
8201void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8209 case Instruction::Load:
8211 removeAssumedBits(NO_READS);
8214 case Instruction::Store:
8219 removeAssumedBits(NO_WRITES);
8221 indicatePessimisticFixpoint();
8224 case Instruction::Call:
8225 case Instruction::CallBr:
8226 case Instruction::Invoke: {
8229 const auto *CB = cast<CallBase>(UserI);
8233 indicatePessimisticFixpoint();
8240 removeAssumedBits(NO_READS);
8247 if (
U.get()->getType()->isPointerTy())
8251 const auto *MemBehaviorAA =
8257 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8265 removeAssumedBits(NO_READS);
8267 removeAssumedBits(NO_WRITES);
8279 return "all memory";
8282 std::string S =
"memory:";
8288 S +=
"internal global,";
8290 S +=
"external global,";
8294 S +=
"inaccessible,";
8308 AccessKind2Accesses.fill(
nullptr);
8311 ~AAMemoryLocationImpl() {
8314 for (AccessSet *AS : AccessKind2Accesses)
8321 intersectAssumedBits(BEST_STATE);
8322 getKnownStateFromValue(
A, getIRPosition(), getState());
8323 AAMemoryLocation::initialize(
A);
8329 bool IgnoreSubsumingPositions =
false) {
8338 bool UseArgMemOnly =
true;
8340 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8344 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8353 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8358 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8362 A.manifestAttrs(IRP,
8372 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8376 A.manifestAttrs(IRP,
8395 else if (isAssumedInaccessibleMemOnly())
8398 else if (isAssumedArgMemOnly())
8401 else if (isAssumedInaccessibleOrArgMemOnly())
8416 if (DeducedAttrs.
size() != 1)
8417 return ChangeStatus::UNCHANGED;
8425 bool checkForAllAccessesToMemoryKind(
8427 MemoryLocationsKind)>
8429 MemoryLocationsKind RequestedMLK)
const override {
8430 if (!isValidState())
8433 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8434 if (AssumedMLK == NO_LOCATIONS)
8438 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8439 CurMLK *= 2, ++
Idx) {
8440 if (CurMLK & RequestedMLK)
8443 if (
const AccessSet *Accesses = AccessKind2Accesses[
Idx])
8444 for (
const AccessInfo &AI : *Accesses)
8445 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8457 bool Changed =
false;
8458 MemoryLocationsKind KnownMLK = getKnown();
8459 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8460 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8461 if (!(CurMLK & KnownMLK))
8462 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8463 getAccessKindFromInst(
I));
8464 return AAMemoryLocation::indicatePessimisticFixpoint();
8484 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8488 return LHS.Ptr <
RHS.Ptr;
8489 if (
LHS.Kind !=
RHS.Kind)
8490 return LHS.Kind <
RHS.Kind;
8498 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8515 AK =
I->mayReadFromMemory() ? READ :
NONE;
8533 Changed |= Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8534 if (MLK == NO_UNKOWN_MEM)
8536 State.removeAssumedBits(MLK);
8543 unsigned AccessAS = 0);
8549void AAMemoryLocationImpl::categorizePtrValue(
8552 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8554 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8556 auto Pred = [&](
Value &Obj) {
8559 MemoryLocationsKind MLK = NO_LOCATIONS;
8569 if (isa<UndefValue>(&Obj))
8571 if (isa<Argument>(&Obj)) {
8578 MLK = NO_ARGUMENT_MEM;
8579 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8583 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8584 if (GVar->isConstant())
8587 if (GV->hasLocalLinkage())
8588 MLK = NO_GLOBAL_INTERNAL_MEM;
8590 MLK = NO_GLOBAL_EXTERNAL_MEM;
8591 }
else if (isa<ConstantPointerNull>(&Obj) &&
8595 }
else if (isa<AllocaInst>(&Obj)) {
8597 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8598 bool IsKnownNoAlias;
8599 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8602 MLK = NO_MALLOCED_MEM;
8604 MLK = NO_UNKOWN_MEM;
8606 MLK = NO_UNKOWN_MEM;
8609 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8610 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8611 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8612 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8613 getAccessKindFromInst(&
I));
8622 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8623 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8624 getAccessKindFromInst(&
I));
8629 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8630 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8633void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8636 for (
unsigned ArgNo = 0, E = CB.
arg_size(); ArgNo < E; ++ArgNo) {
8645 const auto *ArgOpMemLocationAA =
8648 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8653 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8660 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8664 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8666 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8672 <<
" [" << CBMemLocationAA <<
"]\n");
8673 if (!CBMemLocationAA) {
8674 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8675 Changed, getAccessKindFromInst(&
I));
8676 return NO_UNKOWN_MEM;
8679 if (CBMemLocationAA->isAssumedReadNone())
8680 return NO_LOCATIONS;
8682 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8683 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8684 Changed, getAccessKindFromInst(&
I));
8685 return AccessedLocs.getAssumed();
8688 uint32_t CBAssumedNotAccessedLocs =
8689 CBMemLocationAA->getAssumedNotAccessedLocation();
8692 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8693 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8695 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8696 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8698 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8699 getAccessKindFromInst(&
I));
8704 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8705 if (HasGlobalAccesses) {
8708 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8709 getAccessKindFromInst(&
I));
8712 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8713 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8714 return AccessedLocs.getWorstState();
8718 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8719 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8722 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8724 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8727 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8728 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8730 return AccessedLocs.getAssumed();
8735 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8736 <<
I <<
" [" << *
Ptr <<
"]\n");
8737 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8738 Ptr->getType()->getPointerAddressSpace());
8739 return AccessedLocs.getAssumed();
8742 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8744 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8745 getAccessKindFromInst(&
I));
8746 return AccessedLocs.getAssumed();
8750struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8752 : AAMemoryLocationImpl(IRP,
A) {}
8757 const auto *MemBehaviorAA =
8761 return indicateOptimisticFixpoint();
8763 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8764 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8765 return ChangeStatus::UNCHANGED;
8769 auto AssumedState = getAssumed();
8770 bool Changed =
false;
8773 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8774 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8775 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8776 removeAssumedBits(inverseLocation(MLK,
false,
false));
8779 return getAssumedNotAccessedLocation() != VALID_STATE;
8782 bool UsedAssumedInformation =
false;
8783 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8784 UsedAssumedInformation))
8785 return indicatePessimisticFixpoint();
8787 Changed |= AssumedState != getAssumed();
8788 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8792 void trackStatistics()
const override {
8795 else if (isAssumedArgMemOnly())
8797 else if (isAssumedInaccessibleMemOnly())
8799 else if (isAssumedInaccessibleOrArgMemOnly())
8805struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8807 : AAMemoryLocationImpl(IRP,
A) {}
8820 return indicatePessimisticFixpoint();
8821 bool Changed =
false;
8824 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8825 getAccessKindFromInst(
I));
8828 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8829 return indicatePessimisticFixpoint();
8830 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8834 void trackStatistics()
const override {
8848 const std::string getAsStr(
Attributor *
A)
const override {
8849 std::string Str(
"AADenormalFPMath[");
8852 DenormalState Known = getKnown();
8853 if (Known.Mode.isValid())
8854 OS <<
"denormal-fp-math=" << Known.Mode;
8858 if (Known.ModeF32.isValid())
8859 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8865struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8867 : AADenormalFPMathImpl(IRP,
A) {}
8879 Known = DenormalState{
Mode, ModeF32};
8890 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8898 CallerInfo->getState());
8902 bool AllCallSitesKnown =
true;
8903 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8904 return indicatePessimisticFixpoint();
8906 if (Change == ChangeStatus::CHANGED && isModeFixed())
8912 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8917 AttrToRemove.
push_back(
"denormal-fp-math");
8923 if (Known.ModeF32 != Known.Mode) {
8925 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8927 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8930 auto &IRP = getIRPosition();
8933 return A.removeAttrs(IRP, AttrToRemove) |
8934 A.manifestAttrs(IRP, AttrToAdd,
true);
8937 void trackStatistics()
const override {
8953 if (
A.hasSimplificationCallback(getIRPosition())) {
8954 indicatePessimisticFixpoint();
8959 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
8962 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
8966 const std::string getAsStr(
Attributor *
A)
const override {
8970 getKnown().print(
OS);
8972 getAssumed().print(
OS);
8980 if (!getAnchorScope())
8993 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9004 if (!getAnchorScope())
9011 const SCEV *S = getSCEV(
A,
I);
9023 if (!getAnchorScope())
9042 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9044 bool AllowAACtxI)
const {
9045 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9056 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9070 const Instruction *CtxI =
nullptr)
const override {
9071 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9077 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9083 const Instruction *CtxI =
nullptr)
const override {
9088 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9090 return getAssumed();
9094 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9102 Ty, AssumedConstantRange.
getLower())),
9104 Ty, AssumedConstantRange.
getUpper()))};
9126 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(0));
9128 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(1));
9131 return Known.contains(Assumed) && Known != Assumed;
9138 auto *OldRangeMD =
I->getMetadata(LLVMContext::MD_range);
9139 if (isBetterRange(AssumedConstantRange, OldRangeMD)) {
9141 I->setMetadata(LLVMContext::MD_range,
9142 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9143 AssumedConstantRange));
9156 auto &
V = getAssociatedValue();
9160 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9161 "not the context instruction");
9162 if (isa<CallInst>(
I) || isa<LoadInst>(
I))
9163 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9164 Changed = ChangeStatus::CHANGED;
9172struct AAValueConstantRangeArgument final
9173 : AAArgumentFromCallSiteArguments<
9174 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9176 using Base = AAArgumentFromCallSiteArguments<
9183 void trackStatistics()
const override {
9188struct AAValueConstantRangeReturned
9189 : AAReturnedFromReturnedValues<AAValueConstantRange,
9190 AAValueConstantRangeImpl,
9191 AAValueConstantRangeImpl::StateType,
9195 AAValueConstantRangeImpl,
9203 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9204 indicatePessimisticFixpoint();
9208 void trackStatistics()
const override {
9213struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9215 : AAValueConstantRangeImpl(IRP,
A) {}
9219 AAValueConstantRangeImpl::initialize(
A);
9223 Value &
V = getAssociatedValue();
9225 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9227 indicateOptimisticFixpoint();
9231 if (isa<UndefValue>(&V)) {
9234 indicateOptimisticFixpoint();
9238 if (isa<CallBase>(&V))
9241 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9245 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9246 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9253 if (isa<SelectInst>(V) || isa<PHINode>(V))
9257 indicatePessimisticFixpoint();
9260 << getAssociatedValue() <<
"\n");
9263 bool calculateBinaryOperator(
9271 bool UsedAssumedInformation =
false;
9272 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9275 if (!SimplifiedLHS.has_value())
9277 if (!*SimplifiedLHS)
9279 LHS = *SimplifiedLHS;
9281 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9284 if (!SimplifiedRHS.has_value())
9286 if (!*SimplifiedRHS)
9288 RHS = *SimplifiedRHS;
9296 DepClassTy::REQUIRED);
9300 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9304 DepClassTy::REQUIRED);
9308 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9310 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9312 T.unionAssumed(AssumedRange);
9316 return T.isValidState();
9319 bool calculateCastInst(
9328 bool UsedAssumedInformation =
false;
9329 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9332 if (!SimplifiedOpV.has_value())
9334 if (!*SimplifiedOpV)
9336 OpV = *SimplifiedOpV;
9343 DepClassTy::REQUIRED);
9347 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9349 return T.isValidState();
9360 bool UsedAssumedInformation =
false;
9361 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9364 if (!SimplifiedLHS.has_value())
9366 if (!*SimplifiedLHS)
9368 LHS = *SimplifiedLHS;
9370 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9373 if (!SimplifiedRHS.has_value())
9375 if (!*SimplifiedRHS)
9377 RHS = *SimplifiedRHS;
9385 DepClassTy::REQUIRED);
9391 DepClassTy::REQUIRED);
9395 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9396 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9399 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9402 bool MustTrue =
false, MustFalse =
false;
9404 auto AllowedRegion =
9407 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9413 assert((!MustTrue || !MustFalse) &&
9414 "Either MustTrue or MustFalse should be false!");
9423 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9424 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9425 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9429 return T.isValidState();
9438 if (!
I || isa<CallBase>(
I)) {
9441 bool UsedAssumedInformation =
false;
9442 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9445 if (!SimplifiedOpV.has_value())
9447 if (!*SimplifiedOpV)
9449 Value *VPtr = *SimplifiedOpV;
9454 DepClassTy::REQUIRED);
9458 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9462 return T.isValidState();
9466 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9467 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9469 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9470 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9472 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9473 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9479 T.indicatePessimisticFixpoint();
9487 if (QueriedAA !=
this)
9490 if (
T.getAssumed() == getState().getAssumed())
9492 T.indicatePessimisticFixpoint();
9495 return T.isValidState();
9498 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9499 return indicatePessimisticFixpoint();
9504 return ChangeStatus::UNCHANGED;
9505 if (++NumChanges > MaxNumChanges) {
9506 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9507 <<
" but only " << MaxNumChanges
9508 <<
" are allowed to avoid cyclic reasoning.");
9509 return indicatePessimisticFixpoint();
9511 return ChangeStatus::CHANGED;
9515 void trackStatistics()
const override {
9524 static constexpr int MaxNumChanges = 5;
9527struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9529 : AAValueConstantRangeImpl(IRP,
A) {}
9533 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9541struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9543 : AAValueConstantRangeFunction(IRP,
A) {}
9549struct AAValueConstantRangeCallSiteReturned
9550 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9551 AAValueConstantRangeImpl::StateType,
9555 AAValueConstantRangeImpl::StateType,
9561 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue()))
9562 if (
auto *RangeMD = CI->getMetadata(LLVMContext::MD_range))
9565 AAValueConstantRangeImpl::initialize(
A);
9569 void trackStatistics()
const override {
9573struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9575 : AAValueConstantRangeFloating(IRP,
A) {}
9579 return ChangeStatus::UNCHANGED;
9583 void trackStatistics()
const override {
9600 if (
A.hasSimplificationCallback(getIRPosition()))
9601 indicatePessimisticFixpoint();
9603 AAPotentialConstantValues::initialize(
A);
9607 bool &ContainsUndef,
bool ForSelf) {
9609 bool UsedAssumedInformation =
false;
9611 UsedAssumedInformation)) {
9619 *
this, IRP, DepClassTy::REQUIRED);
9620 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9622 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9623 S = PotentialValuesAA->getState().getAssumedSet();
9630 ContainsUndef =
false;
9631 for (
auto &It : Values) {
9632 if (isa<UndefValue>(It.getValue())) {
9633 ContainsUndef =
true;
9636 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9639 S.insert(CI->getValue());
9641 ContainsUndef &= S.empty();
9647 const std::string getAsStr(
Attributor *
A)
const override {
9656 return indicatePessimisticFixpoint();
9660struct AAPotentialConstantValuesArgument final
9661 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9662 AAPotentialConstantValuesImpl,
9663 PotentialConstantIntValuesState> {
9665 AAPotentialConstantValuesImpl,
9671 void trackStatistics()
const override {
9676struct AAPotentialConstantValuesReturned
9677 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9678 AAPotentialConstantValuesImpl> {
9680 AAPotentialConstantValuesImpl>;
9685 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9686 indicatePessimisticFixpoint();
9687 Base::initialize(
A);
9691 void trackStatistics()
const override {
9696struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9698 : AAPotentialConstantValuesImpl(IRP,
A) {}
9702 AAPotentialConstantValuesImpl::initialize(
A);
9706 Value &
V = getAssociatedValue();
9708 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9709 unionAssumed(
C->getValue());
9710 indicateOptimisticFixpoint();
9714 if (isa<UndefValue>(&V)) {
9715 unionAssumedWithUndef();
9716 indicateOptimisticFixpoint();
9720 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9723 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9726 indicatePessimisticFixpoint();
9729 << getAssociatedValue() <<
"\n");
9743 case Instruction::Trunc:
9744 return Src.trunc(ResultBitWidth);
9745 case Instruction::SExt:
9746 return Src.sext(ResultBitWidth);
9747 case Instruction::ZExt:
9748 return Src.zext(ResultBitWidth);
9749 case Instruction::BitCast:
9756 bool &SkipOperation,
bool &Unsupported) {
9763 switch (BinOpcode) {
9767 case Instruction::Add:
9769 case Instruction::Sub:
9771 case Instruction::Mul:
9773 case Instruction::UDiv:
9775 SkipOperation =
true;
9779 case Instruction::SDiv:
9781 SkipOperation =
true;
9785 case Instruction::URem:
9787 SkipOperation =
true;
9791 case Instruction::SRem:
9793 SkipOperation =
true;
9797 case Instruction::Shl:
9799 case Instruction::LShr:
9801 case Instruction::AShr:
9803 case Instruction::And:
9805 case Instruction::Or:
9807 case Instruction::Xor:
9812 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9814 bool SkipOperation =
false;
9817 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9822 unionAssumed(Result);
9823 return isValidState();
9827 auto AssumedBefore = getAssumed();
9831 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9832 SetTy LHSAAPVS, RHSAAPVS;
9834 LHSContainsUndef,
false) ||
9836 RHSContainsUndef,
false))
9837 return indicatePessimisticFixpoint();
9840 bool MaybeTrue =
false, MaybeFalse =
false;
9842 if (LHSContainsUndef && RHSContainsUndef) {
9845 unionAssumedWithUndef();
9846 }
else if (LHSContainsUndef) {
9847 for (
const APInt &R : RHSAAPVS) {
9848 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9849 MaybeTrue |= CmpResult;
9850 MaybeFalse |= !CmpResult;
9851 if (MaybeTrue & MaybeFalse)
9852 return indicatePessimisticFixpoint();
9854 }
else if (RHSContainsUndef) {
9855 for (
const APInt &L : LHSAAPVS) {
9856 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9857 MaybeTrue |= CmpResult;
9858 MaybeFalse |= !CmpResult;
9859 if (MaybeTrue & MaybeFalse)
9860 return indicatePessimisticFixpoint();
9863 for (
const APInt &L : LHSAAPVS) {
9864 for (
const APInt &R : RHSAAPVS) {
9865 bool CmpResult = calculateICmpInst(ICI, L, R);
9866 MaybeTrue |= CmpResult;
9867 MaybeFalse |= !CmpResult;
9868 if (MaybeTrue & MaybeFalse)
9869 return indicatePessimisticFixpoint();
9874 unionAssumed(
APInt( 1, 1));
9876 unionAssumed(
APInt( 1, 0));
9877 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9878 : ChangeStatus::CHANGED;
9882 auto AssumedBefore = getAssumed();
9886 bool UsedAssumedInformation =
false;
9887 std::optional<Constant *>
C =
A.getAssumedConstant(
9888 *
SI->getCondition(), *
this, UsedAssumedInformation);
9891 bool OnlyLeft =
false, OnlyRight =
false;
9892 if (
C && *
C && (*C)->isOneValue())
9894 else if (
C && *
C && (*C)->isZeroValue())
9897 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9898 SetTy LHSAAPVS, RHSAAPVS;
9901 LHSContainsUndef,
false))
9902 return indicatePessimisticFixpoint();
9906 RHSContainsUndef,
false))
9907 return indicatePessimisticFixpoint();
9909 if (OnlyLeft || OnlyRight) {
9911 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9912 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9915 unionAssumedWithUndef();
9917 for (
const auto &It : *OpAA)
9921 }
else if (LHSContainsUndef && RHSContainsUndef) {
9923 unionAssumedWithUndef();
9925 for (
const auto &It : LHSAAPVS)
9927 for (
const auto &It : RHSAAPVS)
9930 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9931 : ChangeStatus::CHANGED;
9935 auto AssumedBefore = getAssumed();
9937 return indicatePessimisticFixpoint();
9942 bool SrcContainsUndef =
false;
9945 SrcContainsUndef,
false))
9946 return indicatePessimisticFixpoint();
9948 if (SrcContainsUndef)
9949 unionAssumedWithUndef();
9951 for (
const APInt &S : SrcPVS) {
9952 APInt T = calculateCastInst(CI, S, ResultBitWidth);
9956 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9957 : ChangeStatus::CHANGED;
9961 auto AssumedBefore = getAssumed();
9965 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9966 SetTy LHSAAPVS, RHSAAPVS;
9968 LHSContainsUndef,
false) ||
9970 RHSContainsUndef,
false))
9971 return indicatePessimisticFixpoint();
9976 if (LHSContainsUndef && RHSContainsUndef) {
9977 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
9978 return indicatePessimisticFixpoint();
9979 }
else if (LHSContainsUndef) {
9980 for (
const APInt &R : RHSAAPVS) {
9981 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
9982 return indicatePessimisticFixpoint();
9984 }
else if (RHSContainsUndef) {
9985 for (
const APInt &L : LHSAAPVS) {
9986 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
9987 return indicatePessimisticFixpoint();
9990 for (
const APInt &L : LHSAAPVS) {
9991 for (
const APInt &R : RHSAAPVS) {
9992 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
9993 return indicatePessimisticFixpoint();
9997 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9998 : ChangeStatus::CHANGED;
10002 auto AssumedBefore = getAssumed();
10004 bool ContainsUndef;
10006 ContainsUndef,
true))
10007 return indicatePessimisticFixpoint();
10008 if (ContainsUndef) {
10009 unionAssumedWithUndef();
10014 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10015 : ChangeStatus::CHANGED;
10020 Value &
V = getAssociatedValue();
10023 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10024 return updateWithICmpInst(
A, ICI);
10026 if (
auto *SI = dyn_cast<SelectInst>(
I))
10027 return updateWithSelectInst(
A, SI);
10029 if (
auto *CI = dyn_cast<CastInst>(
I))
10030 return updateWithCastInst(
A, CI);
10032 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10033 return updateWithBinaryOperator(
A, BinOp);
10035 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10036 return updateWithInstruction(
A,
I);
10038 return indicatePessimisticFixpoint();
10042 void trackStatistics()
const override {
10047struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10049 : AAPotentialConstantValuesImpl(IRP,
A) {}
10054 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10059 void trackStatistics()
const override {
10064struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10066 : AAPotentialConstantValuesFunction(IRP,
A) {}
10069 void trackStatistics()
const override {
10074struct AAPotentialConstantValuesCallSiteReturned
10075 : AACalleeToCallSite<AAPotentialConstantValues,
10076 AAPotentialConstantValuesImpl> {
10077 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10080 AAPotentialConstantValuesImpl>(IRP,
A) {}
10083 void trackStatistics()
const override {
10088struct AAPotentialConstantValuesCallSiteArgument
10089 : AAPotentialConstantValuesFloating {
10090 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10092 : AAPotentialConstantValuesFloating(IRP,
A) {}
10096 AAPotentialConstantValuesImpl::initialize(
A);
10097 if (isAtFixpoint())
10100 Value &
V = getAssociatedValue();
10102 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10103 unionAssumed(
C->getValue());
10104 indicateOptimisticFixpoint();
10108 if (isa<UndefValue>(&V)) {
10109 unionAssumedWithUndef();
10110 indicateOptimisticFixpoint();
10117 Value &
V = getAssociatedValue();
10118 auto AssumedBefore = getAssumed();
10122 return indicatePessimisticFixpoint();
10123 const auto &S = AA->getAssumed();
10125 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10126 : ChangeStatus::CHANGED;
10130 void trackStatistics()
const override {
10139 bool IgnoreSubsumingPositions) {
10140 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10141 "Unexpected attribute kind");
10142 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10143 Attribute::NoUndef))
10163 Value &V = getAssociatedValue();
10164 if (isa<UndefValue>(V))
10165 indicatePessimisticFixpoint();
10166 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10172 const Value *UseV =
U->get();
10181 bool TrackUse =
false;
10184 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10190 const std::string getAsStr(
Attributor *
A)
const override {
10191 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10198 bool UsedAssumedInformation =
false;
10199 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10200 UsedAssumedInformation))
10201 return ChangeStatus::UNCHANGED;
10205 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10208 return ChangeStatus::UNCHANGED;
10209 return AANoUndef::manifest(
A);
10213struct AANoUndefFloating :
public AANoUndefImpl {
10215 : AANoUndefImpl(IRP,
A) {}
10219 AANoUndefImpl::initialize(
A);
10220 if (!getState().isAtFixpoint() && getAnchorScope() &&
10221 !getAnchorScope()->isDeclaration())
10223 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10228 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10229 bool IsKnownNoUndef;
10230 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10231 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10235 bool UsedAssumedInformation =
false;
10236 Value *AssociatedValue = &getAssociatedValue();
10238 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10243 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10251 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10252 return indicatePessimisticFixpoint();
10253 return ChangeStatus::UNCHANGED;
10256 for (
const auto &VAC : Values)
10258 return indicatePessimisticFixpoint();
10260 return ChangeStatus::UNCHANGED;
10267struct AANoUndefReturned final
10268 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10270 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10276struct AANoUndefArgument final
10277 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10279 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10285struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10287 : AANoUndefFloating(IRP,
A) {}
10293struct AANoUndefCallSiteReturned final
10294 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10296 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10311 if (isa<UndefValue>(V)) {
10312 indicateOptimisticFixpoint();
10317 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10318 for (
const auto &Attr : Attrs) {
10329 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10335 const Value *UseV =
U->get();
10352 0, TLI, AC,
I, DT);
10355 if (
auto *CI = dyn_cast<CallInst>(UseV)) {
10357 switch (CI->getIntrinsicID()) {
10358 case Intrinsic::frexp:
10370 return !isa<LoadInst, AtomicRMWInst>(UseV);
10373 const std::string getAsStr(
Attributor *
A)
const override {
10374 std::string
Result =
"nofpclass";
10376 OS << getAssumedNoFPClass();
10386struct AANoFPClassFloating :
public AANoFPClassImpl {
10388 : AANoFPClassImpl(IRP,
A) {}
10393 bool UsedAssumedInformation =
false;
10394 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10396 Values.
push_back({getAssociatedValue(), getCtxI()});
10402 DepClassTy::REQUIRED);
10403 if (!AA ||
this == AA) {
10404 T.indicatePessimisticFixpoint();
10410 return T.isValidState();
10413 for (
const auto &VAC : Values)
10414 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10415 return indicatePessimisticFixpoint();
10421 void trackStatistics()
const override {
10426struct AANoFPClassReturned final
10427 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10428 AANoFPClassImpl::StateType, false, Attribute::None, false> {
10430 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10435 void trackStatistics()
const override {
10440struct AANoFPClassArgument final
10441 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10443 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10449struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10451 : AANoFPClassFloating(IRP,
A) {}
10454 void trackStatistics()
const override {
10459struct AANoFPClassCallSiteReturned final
10460 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10462 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10465 void trackStatistics()
const override {
10474 return CalledFunctions;
10477 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10479 bool hasNonAsmUnknownCallee()
const override {
10480 return HasUnknownCalleeNonAsm;
10483 const std::string getAsStr(
Attributor *
A)
const override {
10484 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10485 std::to_string(CalledFunctions.size()) +
"]";
10488 void trackStatistics()
const override {}
10492 if (CalledFunctions.insert(Fn)) {
10493 Change = ChangeStatus::CHANGED;
10499 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10500 if (!HasUnknownCallee)
10501 Change = ChangeStatus::CHANGED;
10502 if (NonAsm && !HasUnknownCalleeNonAsm)
10503 Change = ChangeStatus::CHANGED;
10504 HasUnknownCalleeNonAsm |= NonAsm;
10505 HasUnknownCallee =
true;
10513 bool HasUnknownCallee =
false;
10516 bool HasUnknownCalleeNonAsm =
false;
10519struct AACallEdgesCallSite :
public AACallEdgesImpl {
10521 : AACallEdgesImpl(IRP,
A) {}
10527 if (
Function *Fn = dyn_cast<Function>(&V)) {
10528 addCalledFunction(Fn, Change);
10530 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10531 setHasUnknownCallee(
true, Change);
10541 if (isa<Constant>(V)) {
10542 VisitValue(*V, CtxI);
10546 bool UsedAssumedInformation =
false;
10552 for (
auto &VAC : Values)
10553 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10556 CallBase *CB = cast<CallBase>(getCtxI());
10559 if (
IA->hasSideEffects() &&
10562 setHasUnknownCallee(
false, Change);
10569 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10570 if (IndirectCallAA->foreachCallee(
10571 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10580 for (
const Use *U : CallbackUses)
10581 ProcessCalledOperand(
U->get(), CB);
10587struct AACallEdgesFunction :
public AACallEdgesImpl {
10589 : AACallEdgesImpl(IRP,
A) {}
10596 CallBase &CB = cast<CallBase>(Inst);
10602 if (CBEdges->hasNonAsmUnknownCallee())
10603 setHasUnknownCallee(
true, Change);
10604 if (CBEdges->hasUnknownCallee())
10605 setHasUnknownCallee(
false, Change);
10607 for (
Function *
F : CBEdges->getOptimisticEdges())
10608 addCalledFunction(
F, Change);
10614 bool UsedAssumedInformation =
false;
10615 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10616 UsedAssumedInformation,
10620 setHasUnknownCallee(
true, Change);
10629struct AAInterFnReachabilityFunction
10630 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10631 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10635 bool instructionCanReach(
10638 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10639 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10641 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10642 typename RQITy::Reachable
Result;
10643 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10644 return NonConstThis->isReachableImpl(
A, StackRQI,
10646 return Result == RQITy::Reachable::Yes;
10650 bool IsTemporaryRQI)
override {
10653 if (EntryI != RQI.From &&
10654 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10655 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10658 auto CheckReachableCallBase = [&](
CallBase *CB) {
10661 if (!CBEdges || !CBEdges->getState().isValidState())
10664 if (CBEdges->hasUnknownCallee())
10667 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10678 if (Fn == getAnchorScope()) {
10679 if (EntryI == RQI.From)
10686 DepClassTy::OPTIONAL);
10689 if (!InterFnReachability ||
10699 DepClassTy::OPTIONAL);
10705 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10708 A, *RQI.From, CBInst, RQI.ExclusionSet);
10711 bool UsedExclusionSet =
true;
10712 bool UsedAssumedInformation =
false;
10713 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10714 UsedAssumedInformation,
10716 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10719 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10723 void trackStatistics()
const override {}
10727template <
typename AAType>
10728static std::optional<Constant *>
10739 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10741 if (!COpt.has_value()) {
10743 return std::nullopt;
10745 if (
auto *
C = *COpt) {
10756 std::optional<Value *> V;
10757 for (
auto &It : Values) {
10759 if (V.has_value() && !*V)
10762 if (!V.has_value())
10776 if (
A.hasSimplificationCallback(getIRPosition())) {
10777 indicatePessimisticFixpoint();
10780 Value *Stripped = getAssociatedValue().stripPointerCasts();
10781 auto *
CE = dyn_cast<ConstantExpr>(Stripped);
10782 if (isa<Constant>(Stripped) &&
10783 (!CE ||
CE->getOpcode() != Instruction::ICmp)) {
10784 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10786 indicateOptimisticFixpoint();
10789 AAPotentialValues::initialize(
A);
10793 const std::string getAsStr(
Attributor *
A)
const override {
10800 template <
typename AAType>
10801 static std::optional<Value *> askOtherAA(
Attributor &
A,
10806 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10808 return std::nullopt;
10820 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10821 for (
const auto &U : CB->
args()) {
10831 Type &Ty = *getAssociatedType();
10832 std::optional<Value *> SimpleV =
10833 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10834 if (SimpleV.has_value() && !*SimpleV) {
10836 *
this, ValIRP, DepClassTy::OPTIONAL);
10837 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10838 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10839 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10840 if (PotentialConstantsAA->undefIsContained())
10845 if (!SimpleV.has_value())
10852 if (isa<ConstantInt>(VPtr))
10857 State.unionAssumed({{*VPtr, CtxI}, S});
10867 return II.I ==
I && II.S == S;
10869 bool operator<(
const ItemInfo &II)
const {
10882 bool UsedAssumedInformation =
false;
10884 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10885 UsedAssumedInformation))
10888 for (
auto &It : Values)
10889 ValueScopeMap[It] += CS;
10891 for (
auto &It : ValueScopeMap)
10892 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10899 auto NewS = StateType::getBestState(getState());
10900 for (
const auto &It : getAssumedSet()) {
10903 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10906 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10914 getState() = StateType::getBestState(getState());
10915 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10922 return indicatePessimisticFixpoint();
10930 if (!getAssumedSimplifiedValues(
A, Values, S))
10932 Value &OldV = getAssociatedValue();
10933 if (isa<UndefValue>(OldV))
10935 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10936 if (!NewV || NewV == &OldV)
10941 if (
A.changeAfterManifest(getIRPosition(), *NewV))
10947 bool getAssumedSimplifiedValues(
10949 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
10950 if (!isValidState())
10952 bool UsedAssumedInformation =
false;
10953 for (
const auto &It : getAssumedSet())
10954 if (It.second & S) {
10955 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
10956 isa<SelectInst>(It.first.getValue()))) {
10957 if (
A.getAssumedSimplifiedValues(
10959 this, Values, S, UsedAssumedInformation))
10964 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10969struct AAPotentialValuesFloating : AAPotentialValuesImpl {
10971 : AAPotentialValuesImpl(IRP,
A) {}
10975 auto AssumedBefore = getAssumed();
10977 genericValueTraversal(
A, &getAssociatedValue());
10979 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
10980 : ChangeStatus::CHANGED;
10984 struct LivenessInfo {
10985 const AAIsDead *LivenessAA =
nullptr;
10986 bool AnyDead =
false;
10999 bool UsedAssumedInformation =
false;
11001 auto GetSimplifiedValues = [&](
Value &
V,
11003 if (!
A.getAssumedSimplifiedValues(
11009 return Values.
empty();
11011 if (GetSimplifiedValues(*
LHS, LHSValues))
11013 if (GetSimplifiedValues(*
RHS, RHSValues))
11025 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11033 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11034 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11036 nullptr, II.S, getAnchorScope());
11042 if (&LHSV == &RHSV &&
11046 addValue(
A, getState(), *NewV,
nullptr, II.S,
11053 if (TypedLHS && TypedRHS) {
11055 if (NewV && NewV != &Cmp) {
11056 addValue(
A, getState(), *NewV,
nullptr, II.S,
11066 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11067 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11068 if (!LHSIsNull && !RHSIsNull)
11074 assert((LHSIsNull || RHSIsNull) &&
11075 "Expected nullptr versus non-nullptr comparison at this point");
11078 unsigned PtrIdx = LHSIsNull;
11079 bool IsKnownNonNull;
11080 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11082 DepClassTy::REQUIRED, IsKnownNonNull);
11083 if (!IsAssumedNonNull)
11089 addValue(
A, getState(), *NewV,
nullptr, II.S,
11094 for (
auto &LHSValue : LHSValues)
11095 for (
auto &RHSValue : RHSValues)
11096 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11104 bool UsedAssumedInformation =
false;
11106 std::optional<Constant *>
C =
11107 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11108 bool NoValueYet = !
C.has_value();
11109 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11111 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11113 Worklist.
push_back({{*
SI.getFalseValue(), CtxI}, II.S});
11115 Worklist.
push_back({{*
SI.getTrueValue(), CtxI}, II.S});
11116 }
else if (&SI == &getAssociatedValue()) {
11118 Worklist.
push_back({{*
SI.getTrueValue(), CtxI}, II.S});
11119 Worklist.
push_back({{*
SI.getFalseValue(), CtxI}, II.S});
11121 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11123 if (!SimpleV.has_value())
11126 addValue(
A, getState(), **SimpleV, CtxI, II.S, getAnchorScope());
11138 bool UsedAssumedInformation =
false;
11140 PotentialValueOrigins, *
this,
11141 UsedAssumedInformation,
11143 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11144 "loaded values for load instruction "
11155 if (!
I || isa<AssumeInst>(
I))
11157 if (
auto *SI = dyn_cast<StoreInst>(
I))
11158 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11160 UsedAssumedInformation,
11162 return A.isAssumedDead(*
I,
this,
nullptr,
11163 UsedAssumedInformation,
11166 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11167 "and we cannot delete all the stores: "
11178 bool AllLocal = ScopeIsLocal;
11183 if (!DynamicallyUnique) {
11184 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11185 "values are dynamically unique: "
11190 for (
auto *PotentialCopy : PotentialCopies) {
11192 Worklist.
push_back({{*PotentialCopy, CtxI}, II.S});
11197 if (!AllLocal && ScopeIsLocal)
11202 bool handlePHINode(
11206 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11207 LivenessInfo &LI = LivenessAAs[&
F];
11208 if (!LI.LivenessAA)
11214 if (&
PHI == &getAssociatedValue()) {
11215 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11218 *
PHI.getFunction());
11222 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11224 if (LI.LivenessAA &&
11225 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11235 if (CyclePHI && isa<Instruction>(V) &&
11236 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11244 bool UsedAssumedInformation =
false;
11245 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11247 if (!SimpleV.has_value())
11251 addValue(
A, getState(), **SimpleV, &
PHI, II.S, getAnchorScope());
11260 bool SomeSimplified =
false;
11261 bool UsedAssumedInformation =
false;
11266 const auto &SimplifiedOp =
A.getAssumedSimplified(
11271 if (!SimplifiedOp.has_value())
11275 NewOps[
Idx] = *SimplifiedOp;
11279 SomeSimplified |= (NewOps[
Idx] !=
Op);
11285 if (!SomeSimplified)
11292 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11298 if (!NewV || NewV == &
I)
11301 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11303 Worklist.
push_back({{*NewV, II.I.getCtxI()}, II.S});
11311 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11313 CI->getPredicate(), II, Worklist);
11315 switch (
I.getOpcode()) {
11316 case Instruction::Select:
11317 return handleSelectInst(
A, cast<SelectInst>(
I), II, Worklist);
11318 case Instruction::PHI:
11319 return handlePHINode(
A, cast<PHINode>(
I), II, Worklist, LivenessAAs);
11320 case Instruction::Load:
11321 return handleLoadInst(
A, cast<LoadInst>(
I), II, Worklist);
11323 return handleGenericInst(
A,
I, II, Worklist);
11338 Value *
V = II.I.getValue();
11345 if (!Visited.
insert(II).second)
11350 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11351 << Iteration <<
"!\n");
11352 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11358 Value *NewV =
nullptr;
11359 if (
V->getType()->isPointerTy()) {
11362 if (
auto *CB = dyn_cast<CallBase>(V))
11372 if (NewV && NewV != V) {
11373 Worklist.
push_back({{*NewV, CtxI}, S});
11377 if (
auto *CE = dyn_cast<ConstantExpr>(V)) {
11378 if (
CE->getOpcode() == Instruction::ICmp)
11379 if (handleCmp(
A, *CE,
CE->getOperand(0),
CE->getOperand(1),
11384 if (
auto *
I = dyn_cast<Instruction>(V)) {
11389 if (V != InitialV || isa<Argument>(V))
11394 if (V == InitialV && CtxI == getCtxI()) {
11395 indicatePessimisticFixpoint();
11399 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11400 }
while (!Worklist.
empty());
11404 for (
auto &It : LivenessAAs)
11405 if (It.second.AnyDead)
11406 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11410 void trackStatistics()
const override {
11415struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11416 using Base = AAPotentialValuesImpl;
11422 auto &Arg = cast<Argument>(getAssociatedValue());
11424 indicatePessimisticFixpoint();
11429 auto AssumedBefore = getAssumed();
11431 unsigned ArgNo = getCalleeArgNo();
11433 bool UsedAssumedInformation =
false;
11437 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11440 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11442 UsedAssumedInformation))
11445 return isValidState();
11448 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11450 UsedAssumedInformation))
11451 return indicatePessimisticFixpoint();
11453 Function *Fn = getAssociatedFunction();
11454 bool AnyNonLocal =
false;
11455 for (
auto &It : Values) {
11456 if (isa<Constant>(It.getValue())) {
11457 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11462 return indicatePessimisticFixpoint();
11464 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11466 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11472 AnyNonLocal =
true;
11474 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11476 giveUpOnIntraprocedural(
A);
11478 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11479 : ChangeStatus::CHANGED;
11483 void trackStatistics()
const override {
11488struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11489 using Base = AAPotentialValuesFloating;
11496 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11497 indicatePessimisticFixpoint();
11504 ReturnedArg = &Arg;
11507 if (!
A.isFunctionIPOAmendable(*
F) ||
11508 A.hasSimplificationCallback(getIRPosition())) {
11510 indicatePessimisticFixpoint();
11512 indicateOptimisticFixpoint();
11518 auto AssumedBefore = getAssumed();
11519 bool UsedAssumedInformation =
false;
11522 Function *AnchorScope = getAnchorScope();
11528 UsedAssumedInformation,
11534 addValue(
A, getState(), *
VAC.getValue(),
11535 VAC.getCtxI() ?
VAC.getCtxI() : CtxI, S, AnchorScope);
11541 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11544 bool AddValues =
true;
11545 if (isa<PHINode>(RetI.getOperand(0)) ||
11546 isa<SelectInst>(RetI.getOperand(0))) {
11547 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11551 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11554 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11555 UsedAssumedInformation,
11557 return indicatePessimisticFixpoint();
11560 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11561 : ChangeStatus::CHANGED;
11566 Function *AnchorScope)
const override {
11568 if (
auto *CB = dyn_cast<CallBase>(&V))
11571 Base::addValue(
A, State, V, CtxI, S, AnchorScope);
11576 return ChangeStatus::UNCHANGED;
11578 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11580 return ChangeStatus::UNCHANGED;
11581 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11583 return ChangeStatus::UNCHANGED;
11586 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11588 "Number of function with unique return");
11589 Changed |=
A.manifestAttrs(
11596 Value *RetOp = RetI.getOperand(0);
11597 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11600 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11601 Changed = ChangeStatus::CHANGED;
11604 bool UsedAssumedInformation =
false;
11605 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11606 UsedAssumedInformation,
11616 void trackStatistics()
const override{
11623struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11625 : AAPotentialValuesImpl(IRP,
A) {}
11634 void trackStatistics()
const override {
11639struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11641 : AAPotentialValuesFunction(IRP,
A) {}
11644 void trackStatistics()
const override {
11649struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11651 : AAPotentialValuesImpl(IRP,
A) {}
11655 auto AssumedBefore = getAssumed();
11659 return indicatePessimisticFixpoint();
11661 bool UsedAssumedInformation =
false;
11662 auto *CB = cast<CallBase>(getCtxI());
11665 UsedAssumedInformation))
11666 return indicatePessimisticFixpoint();
11671 UsedAssumedInformation))
11672 return indicatePessimisticFixpoint();
11676 bool AnyNonLocal =
false;
11677 for (
auto &It : Values) {
11678 Value *
V = It.getValue();
11679 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11680 V, *CB, *
this, UsedAssumedInformation);
11681 if (!CallerV.has_value()) {
11685 V = *CallerV ? *CallerV :
V;
11691 if (
auto *Arg = dyn_cast<Argument>(V))
11697 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11699 AnyNonLocal =
true;
11707 UsedAssumedInformation))
11708 return indicatePessimisticFixpoint();
11709 AnyNonLocal =
false;
11711 for (
auto &It : Values) {
11712 Value *
V = It.getValue();
11714 return indicatePessimisticFixpoint();
11716 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11718 AnyNonLocal =
true;
11724 giveUpOnIntraprocedural(
A);
11726 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11727 : ChangeStatus::CHANGED;
11731 return AAPotentialValues::indicatePessimisticFixpoint();
11735 void trackStatistics()
const override {
11740struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11742 : AAPotentialValuesFloating(IRP,
A) {}
11745 void trackStatistics()
const override {
11761 if (getKnown().isUniversal())
11762 return ChangeStatus::UNCHANGED;
11765 return A.manifestAttrs(
11768 llvm::join(getAssumed().getSet(),
",")),
11773 return isValidState() && setContains(Assumption);
11777 const std::string getAsStr(
Attributor *
A)
const override {
11778 const SetContents &Known = getKnown();
11779 const SetContents &Assumed = getAssumed();
11781 const std::string KnownStr =
11782 llvm::join(Known.getSet().begin(), Known.getSet().end(),
",");
11783 const std::string AssumedStr =
11784 (Assumed.isUniversal())
11786 : llvm::join(Assumed.getSet().begin(), Assumed.getSet().end(),
",");
11788 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11803struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11805 : AAAssumptionInfoImpl(IRP,
A,
11810 bool Changed =
false;
11815 DepClassTy::REQUIRED);
11819 Changed |= getIntersection(AssumptionAA->getAssumed());
11820 return !getAssumed().empty() || !getKnown().empty();
11823 bool UsedAssumedInformation =
false;
11828 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11829 UsedAssumedInformation))
11830 return indicatePessimisticFixpoint();
11832 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11835 void trackStatistics()
const override {}
11839struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11842 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11853 auto *AssumptionAA =
11856 return indicatePessimisticFixpoint();
11857 bool Changed = getIntersection(AssumptionAA->getAssumed());
11858 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11862 void trackStatistics()
const override {}
11874 return Assumptions;
11889struct AAUnderlyingObjectsImpl
11895 const std::string getAsStr(
Attributor *
A)
const override {
11896 return std::string(
"UnderlyingObjects ") +
11898 ? (std::string(
"inter #") +
11899 std::to_string(InterAssumedUnderlyingObjects.size()) +
11900 " objs" + std::string(
", intra #") +
11901 std::to_string(IntraAssumedUnderlyingObjects.size()) +
11907 void trackStatistics()
const override {}
11911 auto &
Ptr = getAssociatedValue();
11915 bool UsedAssumedInformation =
false;
11920 Scope, UsedAssumedInformation))
11923 bool Changed =
false;
11925 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11926 auto &
VAC = Values[
I];
11927 auto *Obj =
VAC.getValue();
11929 if (UO && UO !=
VAC.getValue() && SeenObjects.
insert(UO).second) {
11932 auto Pred = [&Values](
Value &
V) {
11937 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11939 "The forall call should not return false at this position");
11944 if (isa<SelectInst>(Obj)) {
11945 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope);
11948 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
11951 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
11952 Changed |= handleIndirect(
A, *
PHI->getIncomingValue(u),
11953 UnderlyingObjects, Scope);
11958 Changed |= UnderlyingObjects.
insert(Obj);
11964 bool Changed =
false;
11968 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11971 bool forallUnderlyingObjects(
11974 if (!isValidState())
11975 return Pred(getAssociatedValue());
11978 ? IntraAssumedUnderlyingObjects
11979 : InterAssumedUnderlyingObjects;
11980 for (
Value *Obj : AssumedUnderlyingObjects)
11993 bool Changed =
false;
11996 auto Pred = [&](
Value &
V) {
11997 Changed |= UnderlyingObjects.
insert(&V);
12000 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12002 "The forall call should not return false at this position");
12012struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12014 : AAUnderlyingObjectsImpl(IRP,
A) {}
12017struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12019 : AAUnderlyingObjectsImpl(IRP,
A) {}
12022struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12024 : AAUnderlyingObjectsImpl(IRP,
A) {}
12027struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12029 : AAUnderlyingObjectsImpl(IRP,
A) {}
12032struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12034 : AAUnderlyingObjectsImpl(IRP,
A) {}
12037struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12039 : AAUnderlyingObjectsImpl(IRP,
A) {}
12042struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12044 : AAUnderlyingObjectsImpl(IRP,
A) {}
12059 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12065 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12066 << *UInst <<
"\n");
12068 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12069 int Idx = &
Cmp->getOperandUse(0) == &
U;
12070 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12072 return U == &getAnchorValue();
12076 if (isa<ReturnInst>(UInst)) {
12078 Worklist.
push_back(ACS.getInstruction());
12081 bool UsedAssumedInformation =
false;
12083 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12085 UsedAssumedInformation))
12092 auto *CB = dyn_cast<CallBase>(UInst);
12103 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12112 unsigned NumUsesBefore =
Uses.size();
12118 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12121 case UseCaptureKind::NO_CAPTURE:
12122 return checkUse(
A, U, Follow, Worklist);
12123 case UseCaptureKind::MAY_CAPTURE:
12124 return checkUse(
A, U, Follow, Worklist);
12125 case UseCaptureKind::PASSTHROUGH:
12131 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12132 Uses.insert(&OldU);
12136 while (!Worklist.
empty()) {
12138 if (!Visited.
insert(V).second)
12140 if (!
A.checkForAllUses(UsePred, *
this, *V,
12142 DepClassTy::OPTIONAL,
12143 true, EquivalentUseCB)) {
12144 return indicatePessimisticFixpoint();
12148 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12149 : ChangeStatus::CHANGED;
12152 bool isPotentialUse(
const Use &U)
const override {
12153 return !isValidState() ||
Uses.contains(&U);
12158 return ChangeStatus::UNCHANGED;
12162 const std::string getAsStr(
Attributor *
A)
const override {
12163 return "[" + std::to_string(
Uses.size()) +
" uses]";
12166 void trackStatistics()
const override {
12184 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12185 if (!MD && !
A.isClosedWorldModule())
12189 for (
const auto &
Op : MD->operands())
12190 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12191 PotentialCallees.insert(Callee);
12192 }
else if (
A.isClosedWorldModule()) {
12194 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12195 PotentialCallees.insert(IndirectlyCallableFunctions.
begin(),
12196 IndirectlyCallableFunctions.
end());
12199 if (PotentialCallees.empty())
12200 indicateOptimisticFixpoint();
12204 CallBase *CB = cast<CallBase>(getCtxI());
12209 bool AllCalleesKnownNow = AllCalleesKnown;
12211 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12212 bool &UsedAssumedInformation) {
12215 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12217 UsedAssumedInformation = !GIAA->isAtFixpoint();
12221 auto AddPotentialCallees = [&]() {
12222 for (
auto *PotentialCallee : PotentialCallees) {
12223 bool UsedAssumedInformation =
false;
12224 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12225 AssumedCalleesNow.
insert(PotentialCallee);
12231 bool UsedAssumedInformation =
false;
12234 AA::ValueScope::AnyScope,
12235 UsedAssumedInformation)) {
12236 if (PotentialCallees.empty())
12237 return indicatePessimisticFixpoint();
12238 AddPotentialCallees();
12243 auto CheckPotentialCallee = [&](
Function &Fn) {
12244 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12247 auto &CachedResult = FilterResults[&Fn];
12248 if (CachedResult.has_value())
12249 return CachedResult.value();
12251 bool UsedAssumedInformation =
false;
12252 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12253 if (!UsedAssumedInformation)
12254 CachedResult =
false;
12263 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12264 bool IsKnown =
false;
12265 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12267 DepClassTy::OPTIONAL, IsKnown)) {
12269 CachedResult =
false;
12274 CachedResult =
true;
12280 for (
auto &VAC : Values) {
12281 if (isa<UndefValue>(
VAC.getValue()))
12283 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12284 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12287 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12288 if (CheckPotentialCallee(*VACFn))
12289 AssumedCalleesNow.
insert(VACFn);
12292 if (!PotentialCallees.empty()) {
12293 AddPotentialCallees();
12296 AllCalleesKnownNow =
false;
12299 if (AssumedCalleesNow == AssumedCallees &&
12300 AllCalleesKnown == AllCalleesKnownNow)
12301 return ChangeStatus::UNCHANGED;
12303 std::swap(AssumedCallees, AssumedCalleesNow);
12304 AllCalleesKnown = AllCalleesKnownNow;
12305 return ChangeStatus::CHANGED;
12311 if (!AllCalleesKnown && AssumedCallees.empty())
12312 return ChangeStatus::UNCHANGED;
12314 CallBase *CB = cast<CallBase>(getCtxI());
12315 bool UsedAssumedInformation =
false;
12316 if (
A.isAssumedDead(*CB,
this,
nullptr,
12317 UsedAssumedInformation))
12318 return ChangeStatus::UNCHANGED;
12322 if (
FP->getType()->getPointerAddressSpace())
12333 if (AssumedCallees.empty()) {
12334 assert(AllCalleesKnown &&
12335 "Expected all callees to be known if there are none.");
12336 A.changeToUnreachableAfterManifest(CB);
12337 return ChangeStatus::CHANGED;
12341 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12342 auto *NewCallee = AssumedCallees.front();
12345 return ChangeStatus::CHANGED;
12352 A.deleteAfterManifest(*CB);
12353 return ChangeStatus::CHANGED;
12363 bool SpecializedForAnyCallees =
false;
12364 bool SpecializedForAllCallees = AllCalleesKnown;
12368 for (
Function *NewCallee : AssumedCallees) {
12369 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee)) {
12370 SkippedAssumedCallees.
push_back(NewCallee);
12371 SpecializedForAllCallees =
false;
12374 SpecializedForAnyCallees =
true;
12380 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12381 A.registerManifestAddedBasicBlock(*IP->getParent());
12382 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12387 A.registerManifestAddedBasicBlock(*ElseBB);
12389 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12397 auto *CBClone = cast<CallBase>(CB->
clone());
12398 CBClone->insertBefore(ThenTI);
12399 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12407 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12408 if (!AllCalleesKnown)
12409 return ChangeStatus::UNCHANGED;
12410 MDBuilder MDB(IndirectCB.getContext());
12411 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12412 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12413 return ChangeStatus::CHANGED;
12416 if (!SpecializedForAnyCallees)
12417 return AttachCalleeMetadata(*CB);
12420 if (SpecializedForAllCallees) {
12424 IP->eraseFromParent();
12426 auto *CBClone = cast<CallInst>(CB->
clone());
12427 CBClone->setName(CB->
getName());
12428 CBClone->insertBefore(*IP->getParent(), IP);
12429 NewCalls.
push_back({CBClone,
nullptr});
12430 AttachCalleeMetadata(*CBClone);
12438 for (
auto &It : NewCalls) {
12440 Instruction *CallRet = It.second ? It.second : It.first;
12452 A.deleteAfterManifest(*CB);
12453 Changed = ChangeStatus::CHANGED;
12459 const std::string getAsStr(
Attributor *
A)
const override {
12460 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12461 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12465 void trackStatistics()
const override {
12466 if (AllCalleesKnown) {
12468 Eliminated, CallSites,
12469 "Number of indirect call sites eliminated via specialization")
12472 "Number of indirect call sites specialized")
12477 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12494 bool AllCalleesKnown =
true;
12505 assert(isValidState() &&
"the AA is invalid");
12506 return AssumedAddressSpace;
12511 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12512 "Associated value is not a pointer");
12516 int32_t OldAddressSpace = AssumedAddressSpace;
12518 DepClassTy::REQUIRED);
12519 auto Pred = [&](
Value &Obj) {
12520 if (isa<UndefValue>(&Obj))
12525 if (!AUO->forallUnderlyingObjects(Pred))
12526 return indicatePessimisticFixpoint();
12528 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12529 : ChangeStatus::CHANGED;
12534 Value *AssociatedValue = &getAssociatedValue();
12535 Value *OriginalValue = peelAddrspacecast(AssociatedValue);
12539 return ChangeStatus::UNCHANGED;
12541 Type *NewPtrTy = PointerType::get(getAssociatedType()->getContext(),
12543 bool UseOriginalValue =
12547 bool Changed =
false;
12551 if (UseOriginalValue) {
12552 A.changeUseAfterManifest(U, *OriginalValue);
12557 A.changeUseAfterManifest(U, *
CastInst);
12560 auto Pred = [&](
const Use &
U,
bool &) {
12561 if (
U.get() != AssociatedValue)
12563 auto *Inst = dyn_cast<Instruction>(
U.getUser());
12570 if (isa<LoadInst>(Inst))
12571 MakeChange(Inst,
const_cast<Use &
>(U));
12572 if (isa<StoreInst>(Inst)) {
12574 if (
U.getOperandNo() == 1)
12575 MakeChange(Inst,
const_cast<Use &
>(U));
12582 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
12585 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12589 const std::string getAsStr(
Attributor *
A)
const override {
12590 if (!isValidState())
12591 return "addrspace(<invalid>)";
12592 return "addrspace(" +
12593 (AssumedAddressSpace == NoAddressSpace
12595 : std::to_string(AssumedAddressSpace)) +
12600 int32_t AssumedAddressSpace = NoAddressSpace;
12602 bool takeAddressSpace(int32_t AS) {
12603 if (AssumedAddressSpace == NoAddressSpace) {
12604 AssumedAddressSpace = AS;
12607 return AssumedAddressSpace == AS;
12611 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V))
12612 return peelAddrspacecast(
I->getPointerOperand());
12613 if (
auto *
C = dyn_cast<ConstantExpr>(V))
12614 if (
C->getOpcode() == Instruction::AddrSpaceCast)
12615 return peelAddrspacecast(
C->getOperand(0));
12620struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
12622 : AAAddressSpaceImpl(IRP,
A) {}
12624 void trackStatistics()
const override {
12629struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
12631 : AAAddressSpaceImpl(IRP,
A) {}
12637 (void)indicatePessimisticFixpoint();
12640 void trackStatistics()
const override {
12645struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
12647 : AAAddressSpaceImpl(IRP,
A) {}
12649 void trackStatistics()
const override {
12654struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
12656 : AAAddressSpaceImpl(IRP,
A) {}
12661struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
12663 : AAAddressSpaceImpl(IRP,
A) {}
12669 (void)indicatePessimisticFixpoint();
12672 void trackStatistics()
const override {
12684 std::optional<TypeSize> getAllocatedSize()
const override {
12685 assert(isValidState() &&
"the AA is invalid");
12686 return AssumedAllocatedSize;
12689 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
12693 switch (
I->getOpcode()) {
12694 case Instruction::Alloca: {
12699 return std::nullopt;
12709 if (!isa<AllocaInst>(
I))
12710 return indicatePessimisticFixpoint();
12712 bool IsKnownNoCapture;
12713 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
12714 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
12715 return indicatePessimisticFixpoint();
12718 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
12721 return indicatePessimisticFixpoint();
12724 return indicatePessimisticFixpoint();
12727 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
12730 if (!AllocationSize)
12731 return indicatePessimisticFixpoint();
12735 if (*AllocationSize == 0)
12736 return indicatePessimisticFixpoint();
12742 return indicatePessimisticFixpoint();
12744 if (BinSize == 0) {
12745 auto NewAllocationSize = std::optional<TypeSize>(
TypeSize(0,
false));
12746 if (!changeAllocationSize(NewAllocationSize))
12747 return ChangeStatus::UNCHANGED;
12748 return ChangeStatus::CHANGED;
12752 const auto &It = PI->
begin();
12755 if (It->first.Offset != 0)
12756 return indicatePessimisticFixpoint();
12758 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
12760 if (SizeOfBin >= *AllocationSize)
12761 return indicatePessimisticFixpoint();
12763 auto NewAllocationSize =
12764 std::optional<TypeSize>(
TypeSize(SizeOfBin * 8,
false));
12766 if (!changeAllocationSize(NewAllocationSize))
12767 return ChangeStatus::UNCHANGED;
12769 return ChangeStatus::CHANGED;
12775 assert(isValidState() &&
12776 "Manifest should only be called if the state is valid.");
12780 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
12782 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
12784 switch (
I->getOpcode()) {
12786 case Instruction::Alloca: {
12792 auto *NumBytesToValue =
12793 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
12796 insertPt = std::next(insertPt);
12802 return ChangeStatus::CHANGED;
12810 return ChangeStatus::UNCHANGED;
12814 const std::string getAsStr(
Attributor *
A)
const override {
12815 if (!isValidState())
12816 return "allocationinfo(<invalid>)";
12817 return "allocationinfo(" +
12818 (AssumedAllocatedSize == HasNoAllocationSize
12820 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
12825 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
12829 bool changeAllocationSize(std::optional<TypeSize>
Size) {
12830 if (AssumedAllocatedSize == HasNoAllocationSize ||
12831 AssumedAllocatedSize !=
Size) {
12832 AssumedAllocatedSize =
Size;
12839struct AAAllocationInfoFloating : AAAllocationInfoImpl {
12841 : AAAllocationInfoImpl(IRP,
A) {}
12843 void trackStatistics()
const override {
12848struct AAAllocationInfoReturned : AAAllocationInfoImpl {
12850 : AAAllocationInfoImpl(IRP,
A) {}
12856 (void)indicatePessimisticFixpoint();
12859 void trackStatistics()
const override {
12864struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
12866 : AAAllocationInfoImpl(IRP,
A) {}
12868 void trackStatistics()
const override {
12873struct AAAllocationInfoArgument : AAAllocationInfoImpl {
12875 : AAAllocationInfoImpl(IRP,
A) {}
12877 void trackStatistics()
const override {
12882struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
12884 : AAAllocationInfoImpl(IRP,
A) {}
12889 (void)indicatePessimisticFixpoint();
12892 void trackStatistics()
const override {
12939#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
12940 case IRPosition::PK: \
12941 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
12943#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
12944 case IRPosition::PK: \
12945 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
12949#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12950 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12951 CLASS *AA = nullptr; \
12952 switch (IRP.getPositionKind()) { \
12953 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12954 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
12955 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
12956 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
12957 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
12958 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
12959 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12960 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12965#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12966 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12967 CLASS *AA = nullptr; \
12968 switch (IRP.getPositionKind()) { \
12969 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12970 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
12971 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
12972 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12973 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12974 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12975 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12976 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
12981#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
12982 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12983 CLASS *AA = nullptr; \
12984 switch (IRP.getPositionKind()) { \
12985 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
12987 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
12993#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12994 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12995 CLASS *AA = nullptr; \
12996 switch (IRP.getPositionKind()) { \
12997 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12998 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12999 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13000 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13001 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13002 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13003 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13004 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13009#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13010 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13011 CLASS *AA = nullptr; \
13012 switch (IRP.getPositionKind()) { \
13013 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13014 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13015 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13016 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13017 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13018 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13019 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13020 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13025#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13026 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13027 CLASS *AA = nullptr; \
13028 switch (IRP.getPositionKind()) { \
13029 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13030 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13031 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13032 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13033 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13034 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13035 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13036 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13086#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13087#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13088#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13089#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13090#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13091#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13092#undef SWITCH_PK_CREATE
13093#undef SWITCH_PK_INV
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
Rewrite Partial Register Uses
static LoopDeletionResult merge(LoopDeletionResult A, LoopDeletionResult B)
This file implements a map that provides insertion order iteration.
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysis::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Development, "development", "for training")))
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
APInt gcd(const SCEVConstant *C1, const SCEVConstant *C2)
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
AACallGraphNode * operator*() const
A manager for alias analyses.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
MemoryEffects getMemoryEffects() const
Returns memory effects.
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static bool isEnumAttrKind(AttrKind Kind)
static Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
static Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
LLVMContext & getContext() const
Get the context in which this basic block lives.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, BasicBlock::iterator InsertBefore)
unsigned getNumSuccessors() const
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
User::op_iterator arg_begin()
Return the iterator pointing to the beginning of the argument list.
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
Value * getArgOperand(unsigned i) const
User::op_iterator arg_end()
Return the iterator pointing to the end of the argument list.
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr, BasicBlock::iterator InsertBefore)
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", bool IsInBounds=false)
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
const BasicBlock * getParent() const
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
A wrapper class for inspecting calls to intrinsic functions.
BasicBlock * getUnwindDest() const
BasicBlock * getNormalDest() const
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr, BasicBlock::iterator InsertBefore)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
unsigned getSmallConstantMaxTripCount(const Loop *L)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Provides information about what library functions are available for the current target.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isFPOrFPVectorTy() const
Return true if this is a FP type or a vector of FP.
bool isVoidTy() const
Return true if this is 'void'.
'undef' values are things that do not have specified contents.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
AddressSpace getAddressSpace(T *V)
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
static unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
pred_iterator pred_end(BasicBlock *BB)
bool operator<(int64_t V1, const APSInt &V2)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
bool operator!=(uint64_t V1, const APInt &V2)
UseCaptureKind DetermineUseCaptureKind(const Use &U, llvm::function_ref< bool(Value *, const DataLayout &)> IsDereferenceableOrNull)
Determine what kind of capture behaviour U may exhibit.
Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value,...
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
pred_iterator pred_begin(BasicBlock *BB)
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
Value * simplifyCmpInst(unsigned Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
PotentialValuesState< APInt > PotentialConstantIntValuesState
DWARFExpression::Operation Op
bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr)
Return true if the instruction does not have any effects besides calculating the result and does not ...
constexpr unsigned BitWidth
std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, unsigned Depth, const SimplifyQuery &SQ)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
bool forallInterferingAccesses(Instruction &I, function_ref< bool(const AAPointerInfo::Access &, bool)> CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
bool forallInterferingAccesses(AA::RangeTy Range, function_ref< bool(const AAPointerInfo::Access &, bool)> CB) const
See AAPointerInfo::forallInterferingAccesses.
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
An abstract attribute for getting assumption information.
static const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for liveness abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
static const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual const_bin_iterator begin() const =0
static const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
static const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
static PotentialValuesState getBestState()
Return empty set as the best state of potential values.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.