55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
889 if (!
Range.mayOverlap(ItRange))
891 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
892 for (
auto Index : It.getSecond()) {
894 if (!CB(Access, IsExact))
914 for (
unsigned Index : LocalList->getSecond()) {
917 if (
Range.offsetAndSizeAreUnknown())
933 RemoteI = RemoteI ? RemoteI : &
I;
937 bool AccExists =
false;
939 for (
auto Index : LocalList) {
941 if (
A.getLocalInst() == &
I) {
950 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
952 for (
auto Key : ToAdd) {
961 "New Access should have been at AccIndex");
962 LocalList.push_back(AccIndex);
976 auto &ExistingRanges =
Before.getRanges();
977 auto &NewRanges = Current.getRanges();
984 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
991 "Expected bin to actually contain the Access.");
1010 using const_iterator = VecTy::const_iterator;
1013 const_iterator begin()
const {
return Offsets.begin(); }
1014 const_iterator end()
const {
return Offsets.end(); }
1017 return Offsets ==
RHS.Offsets;
1023 bool isUnassigned()
const {
return Offsets.size() == 0; }
1025 bool isUnknown()
const {
1038 void addToAll(int64_t Inc) {
1039 for (
auto &
Offset : Offsets) {
1048 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1063struct AAPointerInfoImpl
1064 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1069 const std::string getAsStr(
Attributor *
A)
const override {
1070 return std::string(
"PointerInfo ") +
1071 (isValidState() ? (std::string(
"#") +
1072 std::to_string(OffsetBins.
size()) +
" bins")
1078 return AAPointerInfo::manifest(
A);
1081 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1082 virtual const_bin_iterator
end()
const override {
return State::end(); }
1083 virtual int64_t numOffsetBins()
const override {
1084 return State::numOffsetBins();
1087 bool forallInterferingAccesses(
1091 return State::forallInterferingAccesses(
Range, CB);
1094 bool forallInterferingAccesses(
1096 bool FindInterferingWrites,
bool FindInterferingReads,
1097 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1099 function_ref<
bool(
const Access &)> SkipCB)
const override {
1100 HasBeenWrittenTo =
false;
1107 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1112 bool AllInSameNoSyncFn = IsAssumedNoSync;
1113 bool InstIsExecutedByInitialThreadOnly =
1114 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1121 bool InstIsExecutedInAlignedRegion =
1122 FindInterferingReads && ExecDomainAA &&
1123 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1125 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1126 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1129 bool IsThreadLocalObj =
1138 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1139 if (IsThreadLocalObj || AllInSameNoSyncFn)
1141 const auto *FnExecDomainAA =
1142 I.getFunction() == &
Scope
1147 if (!FnExecDomainAA)
1149 if (InstIsExecutedInAlignedRegion ||
1150 (FindInterferingWrites &&
1151 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1155 if (InstIsExecutedByInitialThreadOnly &&
1156 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1157 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1166 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1167 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1168 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1169 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1173 bool IsKnownNoRecurse;
1174 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1181 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1182 bool ObjHasKernelLifetime =
false;
1183 const bool UseDominanceReasoning =
1184 FindInterferingWrites && IsKnownNoRecurse;
1195 case AA::GPUAddressSpace::Shared:
1196 case AA::GPUAddressSpace::Constant:
1197 case AA::GPUAddressSpace::Local:
1209 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1211 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1216 bool IsKnownNoRecurse;
1217 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1219 IsKnownNoRecurse)) {
1220 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1222 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1225 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1226 if (ObjHasKernelLifetime)
1227 IsLiveInCalleeCB = [](
const Function &Fn) {
1228 return !Fn.hasFnAttribute(
"kernel");
1236 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1238 bool AccInSameScope = AccScope == &
Scope;
1242 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1246 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1247 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1248 ExclusionSet.
insert(Acc.getRemoteInst());
1251 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1252 (!FindInterferingReads || !Acc.isRead()))
1255 bool Dominates = FindInterferingWrites && DT && Exact &&
1256 Acc.isMustAccess() && AccInSameScope &&
1259 DominatingWrites.
insert(&Acc);
1263 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1265 InterferingAccesses.
push_back({&Acc, Exact});
1268 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1271 HasBeenWrittenTo = !DominatingWrites.
empty();
1275 for (
const Access *Acc : DominatingWrites) {
1276 if (!LeastDominatingWriteInst) {
1277 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 }
else if (DT->
dominates(LeastDominatingWriteInst,
1279 Acc->getRemoteInst())) {
1280 LeastDominatingWriteInst = Acc->getRemoteInst();
1285 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1286 if (SkipCB && SkipCB(Acc))
1288 if (!CanIgnoreThreading(Acc))
1294 bool ReadChecked = !FindInterferingReads;
1295 bool WriteChecked = !FindInterferingWrites;
1301 &ExclusionSet, IsLiveInCalleeCB))
1306 if (!WriteChecked) {
1308 &ExclusionSet, IsLiveInCalleeCB))
1309 WriteChecked =
true;
1323 if (!WriteChecked && HasBeenWrittenTo &&
1324 Acc.getRemoteInst()->getFunction() != &
Scope) {
1328 if (FnReachabilityAA) {
1334 if (!FnReachabilityAA->instructionCanReach(
1335 A, *LeastDominatingWriteInst,
1336 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1337 WriteChecked =
true;
1344 if (ReadChecked && WriteChecked)
1347 if (!DT || !UseDominanceReasoning)
1349 if (!DominatingWrites.count(&Acc))
1351 return LeastDominatingWriteInst != Acc.getRemoteInst();
1356 for (
auto &It : InterferingAccesses) {
1357 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1358 !CanSkipAccess(*It.first, It.second)) {
1359 if (!UserCB(*It.first, It.second))
1369 using namespace AA::PointerInfo;
1371 return indicatePessimisticFixpoint();
1373 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1374 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1378 const auto &State = OtherAAImpl.getState();
1379 for (
const auto &It : State) {
1380 for (
auto Index : It.getSecond()) {
1381 const auto &RAcc = State.getAccess(
Index);
1382 if (IsByval && !RAcc.isRead())
1384 bool UsedAssumedInformation =
false;
1386 auto Content =
A.translateArgumentToCallSiteContent(
1387 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1388 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1389 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1391 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1392 RAcc.getType(), RAcc.getRemoteInst());
1399 const OffsetInfo &Offsets,
CallBase &CB) {
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1408 const auto &State = OtherAAImpl.getState();
1409 for (
const auto &It : State) {
1410 for (
auto Index : It.getSecond()) {
1411 const auto &RAcc = State.getAccess(
Index);
1412 for (
auto Offset : Offsets) {
1416 if (!NewRanges.isUnknown()) {
1417 NewRanges.addToAllOffsets(
Offset);
1420 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1421 RAcc.getType(), RAcc.getRemoteInst());
1430 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1434 for (
auto &It : OffsetBins) {
1435 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1436 <<
"] : " << It.getSecond().size() <<
"\n";
1437 for (
auto AccIndex : It.getSecond()) {
1438 auto &Acc = AccessList[AccIndex];
1439 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1440 if (Acc.getLocalInst() != Acc.getRemoteInst())
1441 O <<
" --> " << *Acc.getRemoteInst()
1443 if (!Acc.isWrittenValueYetUndetermined()) {
1444 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1445 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1447 else if (Acc.getWrittenValue())
1448 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1450 O <<
" - c: <unknown>\n";
1457struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1460 : AAPointerInfoImpl(IRP,
A) {}
1467 using namespace AA::PointerInfo;
1470 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1493 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1494 auto *ConstContent = cast<Constant>(*
Content);
1498 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1500 ConstContent, ConstantInt::get(Int32Ty, i));
1503 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1507 for (
auto &ElementOffset : ElementOffsets)
1508 ElementOffset += ElementSize;
1522 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1526 void trackStatistics()
const override {
1527 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1531bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1534 const OffsetInfo &PtrOI,
1536 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1540 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1541 "Don't look for constant values if the offset has already been "
1542 "determined to be unknown.");
1544 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1550 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1554 Union.addToAll(ConstantOffset.getSExtValue());
1559 for (
const auto &VI : VariableOffsets) {
1562 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1568 if (PotentialConstantsAA->undefIsContained())
1576 if (AssumedSet.empty())
1580 for (
const auto &ConstOffset : AssumedSet) {
1581 auto CopyPerOffset =
Union;
1582 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1583 VI.second.getZExtValue());
1584 Product.merge(CopyPerOffset);
1589 UsrOI = std::move(Union);
1594 using namespace AA::PointerInfo;
1597 Value &AssociatedValue = getAssociatedValue();
1600 OffsetInfoMap[&AssociatedValue].
insert(0);
1602 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1613 "CurPtr does not exist in the map!");
1615 auto &UsrOI = OffsetInfoMap[Usr];
1616 auto &PtrOI = OffsetInfoMap[CurPtr];
1617 assert(!PtrOI.isUnassigned() &&
1618 "Cannot pass through if the input Ptr was not visited!");
1624 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1626 User *Usr =
U.getUser();
1627 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1630 "The current pointer offset should have been seeded!");
1631 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1632 "Current pointer should be assigned");
1636 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 if (!isa<GEPOperator>(CE)) {
1638 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1643 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1646 auto &UsrOI = OffsetInfoMap[Usr];
1647 auto &PtrOI = OffsetInfoMap[CurPtr];
1649 if (UsrOI.isUnknown())
1652 if (PtrOI.isUnknown()) {
1658 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1661 if (isa<PtrToIntInst>(Usr))
1663 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1664 return HandlePassthroughUser(Usr, CurPtr, Follow);
1669 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1672 bool IsFirstPHIUser = !OffsetInfoMap.
count(
PHI);
1673 auto &UsrOI = OffsetInfoMap[
PHI];
1674 auto &PtrOI = OffsetInfoMap[CurPtr];
1678 if (PtrOI.isUnknown()) {
1679 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1680 << *CurPtr <<
" in " << *
PHI <<
"\n");
1681 Follow = !UsrOI.isUnknown();
1687 if (UsrOI == PtrOI) {
1688 assert(!PtrOI.isUnassigned() &&
1689 "Cannot assign if the current Ptr was not visited!");
1690 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1700 auto It = OffsetInfoMap.
find(CurPtrBase);
1701 if (It == OffsetInfoMap.
end()) {
1702 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1703 << *CurPtr <<
" in " << *
PHI
1704 <<
" (base: " << *CurPtrBase <<
")\n");
1719 *
PHI->getFunction());
1721 auto BaseOI = It->getSecond();
1722 BaseOI.addToAll(
Offset.getZExtValue());
1723 if (IsFirstPHIUser || BaseOI == UsrOI) {
1724 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1725 <<
" in " << *Usr <<
"\n");
1726 return HandlePassthroughUser(Usr, CurPtr, Follow);
1730 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1731 << *CurPtr <<
" in " << *
PHI <<
"\n");
1742 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1750 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1751 OffsetInfoMap[CurPtr].Offsets, Changed,
1756 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1757 return II->isAssumeLikeIntrinsic();
1768 }
while (FromI && FromI != ToI);
1774 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1777 if (IntrI.getParent() == BB) {
1778 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1784 if ((*PredIt) != BB)
1789 if (SuccBB == IntrBB)
1791 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1795 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1798 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1804 std::pair<Value *, IntrinsicInst *> Assumption;
1805 for (
const Use &LoadU : LoadI->
uses()) {
1806 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1807 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1809 for (
const Use &CmpU : CmpI->
uses()) {
1810 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1811 if (!IsValidAssume(*IntrI))
1813 int Idx = CmpI->getOperandUse(0) == LoadU;
1814 Assumption = {CmpI->getOperand(
Idx), IntrI};
1819 if (Assumption.first)
1824 if (!Assumption.first || !Assumption.second)
1828 << *Assumption.second <<
": " << *LoadI
1829 <<
" == " << *Assumption.first <<
"\n");
1830 bool UsedAssumedInformation =
false;
1831 std::optional<Value *>
Content =
nullptr;
1832 if (Assumption.first)
1834 A.getAssumedSimplified(*Assumption.first, *
this,
1836 return handleAccess(
1837 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1838 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1843 for (
auto *OtherOp : OtherOps) {
1844 if (OtherOp == CurPtr) {
1847 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1859 bool UsedAssumedInformation =
false;
1860 std::optional<Value *>
Content =
nullptr;
1864 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1868 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1869 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1870 *StoreI->getValueOperand()->getType(),
1871 {StoreI->getValueOperand()}, AccessKind::AK_W);
1872 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1873 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1874 {RMWI->getValOperand()}, AccessKind::AK_RW);
1875 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1876 return HandleStoreLike(
1877 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1878 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1881 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1885 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1896 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1898 return isValidState();
1900 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1906 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1909 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1910 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1911 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1912 if (OffsetInfoMap.
count(NewU)) {
1914 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1915 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1916 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1920 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1923 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1925 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1927 true, EquivalentUseCB)) {
1928 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1929 return indicatePessimisticFixpoint();
1933 dbgs() <<
"Accesses by bin after update:\n";
1940struct AAPointerInfoReturned final : AAPointerInfoImpl {
1942 : AAPointerInfoImpl(IRP,
A) {}
1946 return indicatePessimisticFixpoint();
1950 void trackStatistics()
const override {
1951 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1955struct AAPointerInfoArgument final : AAPointerInfoFloating {
1957 : AAPointerInfoFloating(IRP,
A) {}
1960 void trackStatistics()
const override {
1961 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1965struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1967 : AAPointerInfoFloating(IRP,
A) {}
1971 using namespace AA::PointerInfo;
1975 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1979 LengthVal =
Length->getSExtValue();
1980 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1983 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1985 return indicatePessimisticFixpoint();
1988 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1990 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1993 dbgs() <<
"Accesses by bin after update:\n";
2004 Argument *Arg = getAssociatedArgument();
2009 if (ArgAA && ArgAA->getState().isValidState())
2010 return translateAndAddStateFromCallee(
A, *ArgAA,
2011 *cast<CallBase>(getCtxI()));
2013 return indicatePessimisticFixpoint();
2016 bool IsKnownNoCapture;
2017 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2018 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2019 return indicatePessimisticFixpoint();
2021 bool IsKnown =
false;
2023 return ChangeStatus::UNCHANGED;
2026 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2032 void trackStatistics()
const override {
2033 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2037struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2039 : AAPointerInfoFloating(IRP,
A) {}
2042 void trackStatistics()
const override {
2043 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2057 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2058 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2062 const std::string getAsStr(
Attributor *
A)
const override {
2063 return getAssumed() ?
"nounwind" :
"may-unwind";
2069 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2070 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2071 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2074 if (!
I.mayThrow(
true))
2077 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2078 bool IsKnownNoUnwind;
2079 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2086 bool UsedAssumedInformation =
false;
2087 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2088 UsedAssumedInformation))
2089 return indicatePessimisticFixpoint();
2091 return ChangeStatus::UNCHANGED;
2095struct AANoUnwindFunction final :
public AANoUnwindImpl {
2097 : AANoUnwindImpl(IRP,
A) {}
2104struct AANoUnwindCallSite final
2105 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2107 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2118 case Intrinsic::nvvm_barrier0:
2119 case Intrinsic::nvvm_barrier0_and:
2120 case Intrinsic::nvvm_barrier0_or:
2121 case Intrinsic::nvvm_barrier0_popc:
2123 case Intrinsic::amdgcn_s_barrier:
2124 if (ExecutedAligned)
2137 if (
auto *FI = dyn_cast<FenceInst>(
I))
2140 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2147 switch (
I->getOpcode()) {
2148 case Instruction::AtomicRMW:
2149 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2151 case Instruction::Store:
2152 Ordering = cast<StoreInst>(
I)->getOrdering();
2154 case Instruction::Load:
2155 Ordering = cast<LoadInst>(
I)->getOrdering();
2159 "New atomic operations need to be known in the attributor.");
2170 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2171 return !
MI->isVolatile();
2182 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2183 DepClassTy::NONE, IsKnown));
2187 const std::string getAsStr(
Attributor *
A)
const override {
2188 return getAssumed() ?
"nosync" :
"may-sync";
2204 if (
I.mayReadOrWriteMemory())
2209 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2218 bool UsedAssumedInformation =
false;
2219 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2220 UsedAssumedInformation) ||
2221 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2222 UsedAssumedInformation))
2223 return indicatePessimisticFixpoint();
2228struct AANoSyncFunction final :
public AANoSyncImpl {
2230 : AANoSyncImpl(IRP,
A) {}
2237struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2239 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2249struct AANoFreeImpl :
public AANoFree {
2255 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2256 DepClassTy::NONE, IsKnown));
2264 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2266 DepClassTy::REQUIRED, IsKnown);
2269 bool UsedAssumedInformation =
false;
2270 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2271 UsedAssumedInformation))
2272 return indicatePessimisticFixpoint();
2273 return ChangeStatus::UNCHANGED;
2277 const std::string getAsStr(
Attributor *
A)
const override {
2278 return getAssumed() ?
"nofree" :
"may-free";
2282struct AANoFreeFunction final :
public AANoFreeImpl {
2284 : AANoFreeImpl(IRP,
A) {}
2291struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2293 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2300struct AANoFreeFloating : AANoFreeImpl {
2302 : AANoFreeImpl(IRP,
A) {}
2312 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2314 DepClassTy::OPTIONAL, IsKnown))
2315 return ChangeStatus::UNCHANGED;
2317 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2318 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2320 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2328 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2330 DepClassTy::REQUIRED, IsKnown);
2333 if (isa<GetElementPtrInst>(UserI) || isa<PHINode>(UserI) ||
2334 isa<SelectInst>(UserI)) {
2338 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2339 isa<ReturnInst>(UserI))
2345 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2346 return indicatePessimisticFixpoint();
2348 return ChangeStatus::UNCHANGED;
2353struct AANoFreeArgument final : AANoFreeFloating {
2355 : AANoFreeFloating(IRP,
A) {}
2362struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2364 : AANoFreeFloating(IRP,
A) {}
2372 Argument *Arg = getAssociatedArgument();
2374 return indicatePessimisticFixpoint();
2377 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2378 DepClassTy::REQUIRED, IsKnown))
2379 return ChangeStatus::UNCHANGED;
2380 return indicatePessimisticFixpoint();
2388struct AANoFreeReturned final : AANoFreeFloating {
2390 : AANoFreeFloating(IRP,
A) {
2405 void trackStatistics()
const override {}
2409struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2411 : AANoFreeFloating(IRP,
A) {}
2414 return ChangeStatus::UNCHANGED;
2425 bool IgnoreSubsumingPositions) {
2427 AttrKinds.
push_back(Attribute::NonNull);
2430 AttrKinds.
push_back(Attribute::Dereferenceable);
2431 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2438 if (!Fn->isDeclaration()) {
2448 bool UsedAssumedInformation =
false;
2449 if (!
A.checkForAllInstructions(
2451 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2455 UsedAssumedInformation,
false,
true))
2467 Attribute::NonNull)});
2472static int64_t getKnownNonNullAndDerefBytesForUse(
2474 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2477 const Value *UseV =
U->get();
2484 if (isa<CastInst>(
I)) {
2489 if (isa<GetElementPtrInst>(
I)) {
2499 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2502 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2519 bool IsKnownNonNull;
2520 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2522 IsNonNull |= IsKnownNonNull;
2529 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2530 Loc->Size.isScalable() ||
I->isVolatile())
2536 if (
Base &&
Base == &AssociatedValue) {
2537 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2539 return std::max(int64_t(0), DerefBytes);
2546 int64_t DerefBytes = Loc->Size.getValue();
2548 return std::max(int64_t(0), DerefBytes);
2559 Value &
V = *getAssociatedValue().stripPointerCasts();
2560 if (isa<ConstantPointerNull>(V)) {
2561 indicatePessimisticFixpoint();
2566 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2572 bool IsNonNull =
false;
2573 bool TrackUse =
false;
2574 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2575 IsNonNull, TrackUse);
2576 State.setKnown(IsNonNull);
2581 const std::string getAsStr(
Attributor *
A)
const override {
2582 return getAssumed() ?
"nonnull" :
"may-null";
2587struct AANonNullFloating :
public AANonNullImpl {
2589 : AANonNullImpl(IRP,
A) {}
2594 bool IsKnownNonNull;
2595 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2596 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2600 bool UsedAssumedInformation =
false;
2601 Value *AssociatedValue = &getAssociatedValue();
2603 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2608 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2612 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2614 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2615 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2618 return ChangeStatus::UNCHANGED;
2619 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2620 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2622 DepClassTy::OPTIONAL, IsKnown) &&
2623 AA::hasAssumedIRAttr<Attribute::NonNull>(
2625 DepClassTy::OPTIONAL, IsKnown))
2626 return ChangeStatus::UNCHANGED;
2633 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2634 return indicatePessimisticFixpoint();
2635 return ChangeStatus::UNCHANGED;
2638 for (
const auto &VAC : Values)
2640 return indicatePessimisticFixpoint();
2642 return ChangeStatus::UNCHANGED;
2650struct AANonNullReturned final
2651 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2652 false, AANonNull::IRAttributeKind, false> {
2659 const std::string getAsStr(
Attributor *
A)
const override {
2660 return getAssumed() ?
"nonnull" :
"may-null";
2668struct AANonNullArgument final
2669 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2671 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2677struct AANonNullCallSiteArgument final : AANonNullFloating {
2679 : AANonNullFloating(IRP,
A) {}
2686struct AANonNullCallSiteReturned final
2687 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2689 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2705 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2706 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2711 const std::string getAsStr(
Attributor *
A)
const override {
2712 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2716struct AAMustProgressFunction final : AAMustProgressImpl {
2718 : AAMustProgressImpl(IRP,
A) {}
2723 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2724 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2726 return indicateOptimisticFixpoint();
2727 return ChangeStatus::UNCHANGED;
2732 bool IsKnownMustProgress;
2733 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2734 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2738 bool AllCallSitesKnown =
true;
2739 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2742 return indicatePessimisticFixpoint();
2744 return ChangeStatus::UNCHANGED;
2748 void trackStatistics()
const override {
2754struct AAMustProgressCallSite final : AAMustProgressImpl {
2756 : AAMustProgressImpl(IRP,
A) {}
2765 bool IsKnownMustProgress;
2766 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2767 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2768 return indicatePessimisticFixpoint();
2769 return ChangeStatus::UNCHANGED;
2773 void trackStatistics()
const override {
2788 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2789 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2794 const std::string getAsStr(
Attributor *
A)
const override {
2795 return getAssumed() ?
"norecurse" :
"may-recurse";
2799struct AANoRecurseFunction final : AANoRecurseImpl {
2801 : AANoRecurseImpl(IRP,
A) {}
2808 bool IsKnownNoRecurse;
2809 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2812 DepClassTy::NONE, IsKnownNoRecurse))
2814 return IsKnownNoRecurse;
2816 bool UsedAssumedInformation =
false;
2817 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2818 UsedAssumedInformation)) {
2824 if (!UsedAssumedInformation)
2825 indicateOptimisticFixpoint();
2826 return ChangeStatus::UNCHANGED;
2831 DepClassTy::REQUIRED);
2832 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2833 return indicatePessimisticFixpoint();
2834 return ChangeStatus::UNCHANGED;
2841struct AANoRecurseCallSite final
2842 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2844 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2859 const std::string getAsStr(
Attributor *
A)
const override {
2860 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2864struct AANonConvergentFunction final : AANonConvergentImpl {
2866 : AANonConvergentImpl(IRP,
A) {}
2872 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2873 CallBase &CB = cast<CallBase>(Inst);
2875 if (!Callee ||
Callee->isIntrinsic()) {
2878 if (
Callee->isDeclaration()) {
2879 return !
Callee->hasFnAttribute(Attribute::Convergent);
2886 bool UsedAssumedInformation =
false;
2887 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2888 UsedAssumedInformation)) {
2889 return indicatePessimisticFixpoint();
2891 return ChangeStatus::UNCHANGED;
2895 if (isKnownNotConvergent() &&
2896 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2897 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2898 return ChangeStatus::CHANGED;
2900 return ChangeStatus::UNCHANGED;
2917 const size_t UBPrevSize = KnownUBInsts.size();
2918 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2922 if (
I.isVolatile() &&
I.mayWriteToMemory())
2926 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2935 "Expected pointer operand of memory accessing instruction");
2939 std::optional<Value *> SimplifiedPtrOp =
2940 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2941 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2943 const Value *PtrOpVal = *SimplifiedPtrOp;
2948 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2949 AssumedNoUBInsts.insert(&
I);
2961 AssumedNoUBInsts.insert(&
I);
2963 KnownUBInsts.insert(&
I);
2972 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2976 auto *BrInst = cast<BranchInst>(&
I);
2979 if (BrInst->isUnconditional())
2984 std::optional<Value *> SimplifiedCond =
2985 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
2986 if (!SimplifiedCond || !*SimplifiedCond)
2988 AssumedNoUBInsts.insert(&
I);
2996 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3005 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3011 if (idx >=
Callee->arg_size())
3023 bool IsKnownNoUndef;
3024 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3025 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3026 if (!IsKnownNoUndef)
3028 bool UsedAssumedInformation =
false;
3029 std::optional<Value *> SimplifiedVal =
3032 if (UsedAssumedInformation)
3034 if (SimplifiedVal && !*SimplifiedVal)
3036 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3037 KnownUBInsts.insert(&
I);
3041 !isa<ConstantPointerNull>(**SimplifiedVal))
3043 bool IsKnownNonNull;
3044 AA::hasAssumedIRAttr<Attribute::NonNull>(
3045 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3047 KnownUBInsts.insert(&
I);
3053 auto &RI = cast<ReturnInst>(
I);
3056 std::optional<Value *> SimplifiedRetValue =
3057 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3058 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3075 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3076 bool IsKnownNonNull;
3077 AA::hasAssumedIRAttr<Attribute::NonNull>(
3081 KnownUBInsts.insert(&
I);
3087 bool UsedAssumedInformation =
false;
3088 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3089 {Instruction::Load, Instruction::Store,
3090 Instruction::AtomicCmpXchg,
3091 Instruction::AtomicRMW},
3092 UsedAssumedInformation,
3094 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3095 UsedAssumedInformation,
3097 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3098 UsedAssumedInformation);
3102 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3104 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3105 bool IsKnownNoUndef;
3106 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3107 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3109 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3110 {Instruction::Ret}, UsedAssumedInformation,
3115 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3116 UBPrevSize != KnownUBInsts.size())
3117 return ChangeStatus::CHANGED;
3118 return ChangeStatus::UNCHANGED;
3122 return KnownUBInsts.count(
I);
3125 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3132 switch (
I->getOpcode()) {
3133 case Instruction::Load:
3134 case Instruction::Store:
3135 case Instruction::AtomicCmpXchg:
3136 case Instruction::AtomicRMW:
3137 return !AssumedNoUBInsts.count(
I);
3138 case Instruction::Br: {
3139 auto *BrInst = cast<BranchInst>(
I);
3140 if (BrInst->isUnconditional())
3142 return !AssumedNoUBInsts.count(
I);
3151 if (KnownUBInsts.empty())
3152 return ChangeStatus::UNCHANGED;
3154 A.changeToUnreachableAfterManifest(
I);
3155 return ChangeStatus::CHANGED;
3159 const std::string getAsStr(
Attributor *
A)
const override {
3160 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3205 bool UsedAssumedInformation =
false;
3206 std::optional<Value *> SimplifiedV =
3209 if (!UsedAssumedInformation) {
3214 KnownUBInsts.insert(
I);
3215 return std::nullopt;
3221 if (isa<UndefValue>(V)) {
3222 KnownUBInsts.insert(
I);
3223 return std::nullopt;
3229struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3231 : AAUndefinedBehaviorImpl(IRP,
A) {}
3234 void trackStatistics()
const override {
3236 "Number of instructions known to have UB");
3238 KnownUBInsts.size();
3259 if (SCCI.hasCycle())
3269 for (
auto *L : LI->getLoopsInPreorder()) {
3283 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3284 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3289 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3290 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3295 return IsKnown || !KnownOnly;
3301 if (isImpliedByMustprogressAndReadonly(
A,
false))
3302 return ChangeStatus::UNCHANGED;
3307 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3308 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3314 bool IsKnownNoRecurse;
3315 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3316 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3319 bool UsedAssumedInformation =
false;
3320 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3321 UsedAssumedInformation))
3322 return indicatePessimisticFixpoint();
3324 return ChangeStatus::UNCHANGED;
3328 const std::string getAsStr(
Attributor *
A)
const override {
3329 return getAssumed() ?
"willreturn" :
"may-noreturn";
3333struct AAWillReturnFunction final : AAWillReturnImpl {
3335 : AAWillReturnImpl(IRP,
A) {}
3339 AAWillReturnImpl::initialize(
A);
3342 assert(
F &&
"Did expect an anchor function");
3343 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3344 indicatePessimisticFixpoint();
3352struct AAWillReturnCallSite final
3353 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3355 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3359 if (isImpliedByMustprogressAndReadonly(
A,
false))
3360 return ChangeStatus::UNCHANGED;
3362 return AACalleeToCallSite::updateImpl(
A);
3384 const ToTy *To =
nullptr;
3394 assert(Hash == 0 &&
"Computed hash twice!");
3398 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3399 InstSetDMI::getHashValue(ExclusionSet));
3409 :
From(&
From), To(&To), ExclusionSet(ES) {
3411 if (!ES || ES->
empty()) {
3412 ExclusionSet =
nullptr;
3413 }
else if (MakeUnique) {
3414 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3419 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3432 return &TombstoneKey;
3439 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3441 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3445#define DefineKeys(ToTy) \
3447 ReachabilityQueryInfo<ToTy> \
3448 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3449 ReachabilityQueryInfo<ToTy>( \
3450 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3451 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3453 ReachabilityQueryInfo<ToTy> \
3454 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3455 ReachabilityQueryInfo<ToTy>( \
3456 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3457 DenseMapInfo<const ToTy *>::getTombstoneKey());
3466template <
typename BaseTy,
typename ToTy>
3467struct CachedReachabilityAA :
public BaseTy {
3473 bool isQueryAA()
const override {
return true; }
3478 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3479 RQITy *RQI = QueryVector[
u];
3480 if (RQI->Result == RQITy::Reachable::No &&
3482 Changed = ChangeStatus::CHANGED;
3488 bool IsTemporaryRQI) = 0;
3491 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3496 QueryCache.erase(&RQI);
3502 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3503 RQITy PlainRQI(RQI.From, RQI.To);
3504 if (!QueryCache.count(&PlainRQI)) {
3505 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3507 QueryVector.push_back(RQIPtr);
3508 QueryCache.insert(RQIPtr);
3513 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3514 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3515 "Did not expect empty set!");
3516 RQITy *RQIPtr =
new (
A.Allocator)
3517 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3518 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3520 assert(!QueryCache.count(RQIPtr));
3521 QueryVector.push_back(RQIPtr);
3522 QueryCache.insert(RQIPtr);
3525 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3526 A.registerForUpdate(*
this);
3527 return Result == RQITy::Reachable::Yes;
3530 const std::string getAsStr(
Attributor *
A)
const override {
3532 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3535 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3536 typename RQITy::Reachable &
Result) {
3537 if (!this->getState().isValidState()) {
3538 Result = RQITy::Reachable::Yes;
3544 if (StackRQI.ExclusionSet) {
3545 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3546 auto It = QueryCache.find(&PlainRQI);
3547 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3548 Result = RQITy::Reachable::No;
3553 auto It = QueryCache.find(&StackRQI);
3554 if (It != QueryCache.end()) {
3561 QueryCache.insert(&StackRQI);
3570struct AAIntraFnReachabilityFunction final
3571 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3572 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3579 bool isAssumedReachable(
3582 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3586 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3587 typename RQITy::Reachable
Result;
3588 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3589 return NonConstThis->isReachableImpl(
A, StackRQI,
3591 return Result == RQITy::Reachable::Yes;
3598 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3601 [&](
const auto &DeadEdge) {
3602 return LivenessAA->isEdgeDead(DeadEdge.first,
3606 return LivenessAA->isAssumedDead(BB);
3608 return ChangeStatus::UNCHANGED;
3612 return Base::updateImpl(
A);
3616 bool IsTemporaryRQI)
override {
3618 bool UsedExclusionSet =
false;
3623 while (IP && IP != &To) {
3624 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3625 UsedExclusionSet =
true;
3636 "Not an intra-procedural query!");
3640 if (FromBB == ToBB &&
3641 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3642 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3647 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3648 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3653 if (RQI.ExclusionSet)
3654 for (
auto *
I : *RQI.ExclusionSet)
3655 if (
I->getFunction() == Fn)
3656 ExclusionBlocks.
insert(
I->getParent());
3659 if (ExclusionBlocks.
count(FromBB) &&
3662 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3665 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3666 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3667 DeadBlocks.insert(ToBB);
3668 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3677 while (!Worklist.
empty()) {
3679 if (!Visited.
insert(BB).second)
3682 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3683 LocalDeadEdges.
insert({BB, SuccBB});
3688 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3691 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3694 if (ExclusionBlocks.
count(SuccBB)) {
3695 UsedExclusionSet =
true;
3702 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3703 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3708 void trackStatistics()
const override {}
3728 bool IgnoreSubsumingPositions) {
3729 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3730 "Unexpected attribute kind");
3733 if (isa<AllocaInst>(Val))
3736 IgnoreSubsumingPositions =
true;
3739 if (isa<UndefValue>(Val))
3742 if (isa<ConstantPointerNull>(Val) &&
3747 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3748 IgnoreSubsumingPositions, Attribute::NoAlias))
3758 "Noalias is a pointer attribute");
3761 const std::string getAsStr(
Attributor *
A)
const override {
3762 return getAssumed() ?
"noalias" :
"may-alias";
3767struct AANoAliasFloating final : AANoAliasImpl {
3769 : AANoAliasImpl(IRP,
A) {}
3774 return indicatePessimisticFixpoint();
3778 void trackStatistics()
const override {
3784struct AANoAliasArgument final
3785 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3786 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3798 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3800 DepClassTy::OPTIONAL, IsKnownNoSycn))
3801 return Base::updateImpl(
A);
3806 return Base::updateImpl(
A);
3810 bool UsedAssumedInformation =
false;
3811 if (
A.checkForAllCallSites(
3813 true, UsedAssumedInformation))
3814 return Base::updateImpl(
A);
3822 return indicatePessimisticFixpoint();
3829struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3831 : AANoAliasImpl(IRP,
A) {}
3837 const CallBase &CB,
unsigned OtherArgNo) {
3839 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3851 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3852 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3859 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3861 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3862 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3868 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3872 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3874 "callsite arguments: "
3875 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3876 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3881 bool isKnownNoAliasDueToNoAliasPreservation(
3901 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3912 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3917 bool IsKnownNoCapture;
3918 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3920 DepClassTy::OPTIONAL, IsKnownNoCapture))
3926 A, *UserI, *getCtxI(), *
this,
nullptr,
3927 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3935 case UseCaptureKind::NO_CAPTURE:
3937 case UseCaptureKind::MAY_CAPTURE:
3941 case UseCaptureKind::PASSTHROUGH:
3948 bool IsKnownNoCapture;
3950 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3951 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3952 if (!IsAssumedNoCapture &&
3954 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3956 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3957 <<
" cannot be noalias as it is potentially captured\n");
3962 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3967 const auto &CB = cast<CallBase>(getAnchorValue());
3968 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3969 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3979 auto *MemBehaviorAA =
3982 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3983 return ChangeStatus::UNCHANGED;
3986 bool IsKnownNoAlias;
3988 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
3989 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
3991 <<
" is not no-alias at the definition\n");
3992 return indicatePessimisticFixpoint();
3996 if (MemBehaviorAA &&
3997 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
3999 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4000 return ChangeStatus::UNCHANGED;
4003 return indicatePessimisticFixpoint();
4011struct AANoAliasReturned final : AANoAliasImpl {
4013 : AANoAliasImpl(IRP,
A) {}
4018 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4019 if (
Constant *
C = dyn_cast<Constant>(&RV))
4020 if (
C->isNullValue() || isa<UndefValue>(
C))
4025 if (!isa<CallBase>(&RV))
4029 bool IsKnownNoAlias;
4030 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4031 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4034 bool IsKnownNoCapture;
4036 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4037 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4039 return IsAssumedNoCapture ||
4043 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4044 return indicatePessimisticFixpoint();
4046 return ChangeStatus::UNCHANGED;
4054struct AANoAliasCallSiteReturned final
4055 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4057 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4067struct AAIsDeadValueImpl :
public AAIsDead {
4071 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4074 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4077 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4080 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4083 bool isAssumedDead(
const Instruction *
I)
const override {
4084 return I == getCtxI() && isAssumedDead();
4088 bool isKnownDead(
const Instruction *
I)
const override {
4089 return isAssumedDead(
I) && isKnownDead();
4093 const std::string getAsStr(
Attributor *
A)
const override {
4094 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4100 if (
V.getType()->isVoidTy() ||
V.use_empty())
4104 if (!isa<Constant>(V)) {
4105 if (
auto *
I = dyn_cast<Instruction>(&V))
4106 if (!
A.isRunOn(*
I->getFunction()))
4108 bool UsedAssumedInformation =
false;
4109 std::optional<Constant *>
C =
4110 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4115 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4120 return A.checkForAllUses(UsePred, *
this, V,
false,
4121 DepClassTy::REQUIRED,
4130 auto *CB = dyn_cast<CallBase>(
I);
4131 if (!CB || isa<IntrinsicInst>(CB))
4136 bool IsKnownNoUnwind;
4137 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4138 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4146struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4148 : AAIsDeadValueImpl(IRP,
A) {}
4152 AAIsDeadValueImpl::initialize(
A);
4154 if (isa<UndefValue>(getAssociatedValue())) {
4155 indicatePessimisticFixpoint();
4159 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4160 if (!isAssumedSideEffectFree(
A,
I)) {
4161 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4162 indicatePessimisticFixpoint();
4164 removeAssumedBits(HAS_NO_EFFECT);
4171 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4173 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4180 if (
SI.isVolatile())
4186 bool UsedAssumedInformation =
false;
4187 if (!AssumeOnlyInst) {
4188 PotentialCopies.clear();
4190 UsedAssumedInformation)) {
4193 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4197 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4198 <<
" potential copies.\n");
4203 UsedAssumedInformation))
4205 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4207 auto &UserI = cast<Instruction>(*U.getUser());
4208 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4210 AssumeOnlyInst->insert(&UserI);
4213 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4219 <<
" is assumed live!\n");
4225 const std::string getAsStr(
Attributor *
A)
const override {
4226 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4227 if (isa_and_nonnull<StoreInst>(
I))
4229 return "assumed-dead-store";
4230 if (isa_and_nonnull<FenceInst>(
I))
4232 return "assumed-dead-fence";
4233 return AAIsDeadValueImpl::getAsStr(
A);
4238 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4239 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4240 if (!isDeadStore(
A, *SI))
4241 return indicatePessimisticFixpoint();
4242 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4243 if (!isDeadFence(
A, *FI))
4244 return indicatePessimisticFixpoint();
4246 if (!isAssumedSideEffectFree(
A,
I))
4247 return indicatePessimisticFixpoint();
4248 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4249 return indicatePessimisticFixpoint();
4254 bool isRemovableStore()
const override {
4255 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4260 Value &
V = getAssociatedValue();
4261 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4266 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4268 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4271 A.deleteAfterManifest(*
I);
4272 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4274 for (
auto *Usr : AOI->
users())
4275 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4276 A.deleteAfterManifest(*AOI);
4280 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4282 A.deleteAfterManifest(*FI);
4285 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4286 A.deleteAfterManifest(*
I);
4294 void trackStatistics()
const override {
4303struct AAIsDeadArgument :
public AAIsDeadFloating {
4305 : AAIsDeadFloating(IRP,
A) {}
4309 Argument &Arg = *getAssociatedArgument();
4310 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4311 if (
A.registerFunctionSignatureRewrite(
4315 return ChangeStatus::CHANGED;
4317 return ChangeStatus::UNCHANGED;
4324struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4326 : AAIsDeadValueImpl(IRP,
A) {}
4330 AAIsDeadValueImpl::initialize(
A);
4331 if (isa<UndefValue>(getAssociatedValue()))
4332 indicatePessimisticFixpoint();
4341 Argument *Arg = getAssociatedArgument();
4343 return indicatePessimisticFixpoint();
4345 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4347 return indicatePessimisticFixpoint();
4353 CallBase &CB = cast<CallBase>(getAnchorValue());
4355 assert(!isa<UndefValue>(
U.get()) &&
4356 "Expected undef values to be filtered out!");
4358 if (
A.changeUseAfterManifest(U, UV))
4359 return ChangeStatus::CHANGED;
4360 return ChangeStatus::UNCHANGED;
4367struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4369 : AAIsDeadFloating(IRP,
A) {}
4372 bool isAssumedDead()
const override {
4373 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4378 AAIsDeadFloating::initialize(
A);
4379 if (isa<UndefValue>(getAssociatedValue())) {
4380 indicatePessimisticFixpoint();
4385 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4391 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4392 IsAssumedSideEffectFree =
false;
4393 Changed = ChangeStatus::CHANGED;
4395 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4396 return indicatePessimisticFixpoint();
4401 void trackStatistics()
const override {
4402 if (IsAssumedSideEffectFree)
4409 const std::string getAsStr(
Attributor *
A)
const override {
4410 return isAssumedDead()
4412 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4416 bool IsAssumedSideEffectFree =
true;
4419struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4421 : AAIsDeadValueImpl(IRP,
A) {}
4426 bool UsedAssumedInformation =
false;
4427 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4428 {Instruction::Ret}, UsedAssumedInformation);
4431 if (ACS.isCallbackCall() || !ACS.getInstruction())
4433 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4436 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4437 UsedAssumedInformation))
4438 return indicatePessimisticFixpoint();
4440 return ChangeStatus::UNCHANGED;
4446 bool AnyChange =
false;
4454 bool UsedAssumedInformation =
false;
4455 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4456 UsedAssumedInformation);
4457 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4464struct AAIsDeadFunction :
public AAIsDead {
4470 assert(
F &&
"Did expect an anchor function");
4471 if (!isAssumedDeadInternalFunction(
A)) {
4472 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4473 assumeLive(
A,
F->getEntryBlock());
4477 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4478 if (!getAnchorScope()->hasLocalLinkage())
4480 bool UsedAssumedInformation =
false;
4482 true, UsedAssumedInformation);
4486 const std::string getAsStr(
Attributor *
A)
const override {
4487 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4488 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4489 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4490 std::to_string(KnownDeadEnds.size()) +
"]";
4495 assert(getState().isValidState() &&
4496 "Attempted to manifest an invalid state!");
4501 if (AssumedLiveBlocks.empty()) {
4502 A.deleteAfterManifest(
F);
4503 return ChangeStatus::CHANGED;
4509 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4511 KnownDeadEnds.set_union(ToBeExploredFrom);
4512 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4513 auto *CB = dyn_cast<CallBase>(DeadEndI);
4516 bool IsKnownNoReturn;
4517 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4520 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4523 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4524 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4526 A.changeToUnreachableAfterManifest(
4527 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4528 HasChanged = ChangeStatus::CHANGED;
4533 if (!AssumedLiveBlocks.count(&BB)) {
4534 A.deleteAfterManifest(BB);
4536 HasChanged = ChangeStatus::CHANGED;
4546 assert(
From->getParent() == getAnchorScope() &&
4548 "Used AAIsDead of the wrong function");
4549 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4553 void trackStatistics()
const override {}
4556 bool isAssumedDead()
const override {
return false; }
4559 bool isKnownDead()
const override {
return false; }
4562 bool isAssumedDead(
const BasicBlock *BB)
const override {
4564 "BB must be in the same anchor scope function.");
4568 return !AssumedLiveBlocks.count(BB);
4572 bool isKnownDead(
const BasicBlock *BB)
const override {
4573 return getKnown() && isAssumedDead(BB);
4577 bool isAssumedDead(
const Instruction *
I)
const override {
4578 assert(
I->getParent()->getParent() == getAnchorScope() &&
4579 "Instruction must be in the same anchor scope function.");
4586 if (!AssumedLiveBlocks.count(
I->getParent()))
4592 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4600 bool isKnownDead(
const Instruction *
I)
const override {
4601 return getKnown() && isAssumedDead(
I);
4607 if (!AssumedLiveBlocks.insert(&BB).second)
4615 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4617 if (
F->hasLocalLinkage())
4618 A.markLiveInternalFunction(*
F);
4642 bool IsKnownNoReturn;
4643 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4645 return !IsKnownNoReturn;
4657 bool UsedAssumedInformation =
4658 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4663 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4664 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4668 bool IsKnownNoUnwind;
4669 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4671 UsedAssumedInformation |= !IsKnownNoUnwind;
4673 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4676 return UsedAssumedInformation;
4683 bool UsedAssumedInformation =
false;
4687 std::optional<Constant *>
C =
4688 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4689 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4691 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4693 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4698 UsedAssumedInformation =
false;
4701 return UsedAssumedInformation;
4708 bool UsedAssumedInformation =
false;
4712 UsedAssumedInformation)) {
4719 if (Values.
empty() ||
4720 (Values.
size() == 1 &&
4721 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4723 return UsedAssumedInformation;
4726 Type &Ty = *
SI.getCondition()->getType();
4728 auto CheckForConstantInt = [&](
Value *
V) {
4729 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4737 return CheckForConstantInt(
VAC.getValue());
4741 return UsedAssumedInformation;
4744 unsigned MatchedCases = 0;
4745 for (
const auto &CaseIt :
SI.cases()) {
4746 if (
Constants.count(CaseIt.getCaseValue())) {
4748 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4755 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4756 return UsedAssumedInformation;
4762 if (AssumedLiveBlocks.empty()) {
4763 if (isAssumedDeadInternalFunction(
A))
4767 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4768 assumeLive(
A,
F->getEntryBlock());
4772 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4773 << getAnchorScope()->
size() <<
"] BBs and "
4774 << ToBeExploredFrom.size() <<
" exploration points and "
4775 << KnownDeadEnds.size() <<
" known dead ends\n");
4780 ToBeExploredFrom.end());
4781 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4784 while (!Worklist.
empty()) {
4790 while (!
I->isTerminator() && !isa<CallBase>(
I))
4791 I =
I->getNextNode();
4793 AliveSuccessors.
clear();
4795 bool UsedAssumedInformation =
false;
4796 switch (
I->getOpcode()) {
4800 "Expected non-terminators to be handled already!");
4804 case Instruction::Call:
4805 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4806 *
this, AliveSuccessors);
4808 case Instruction::Invoke:
4809 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4810 *
this, AliveSuccessors);
4812 case Instruction::Br:
4813 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4814 *
this, AliveSuccessors);
4816 case Instruction::Switch:
4817 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4818 *
this, AliveSuccessors);
4822 if (UsedAssumedInformation) {
4823 NewToBeExploredFrom.insert(
I);
4824 }
else if (AliveSuccessors.
empty() ||
4825 (
I->isTerminator() &&
4826 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4827 if (KnownDeadEnds.insert(
I))
4832 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4833 << UsedAssumedInformation <<
"\n");
4835 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4836 if (!
I->isTerminator()) {
4837 assert(AliveSuccessors.size() == 1 &&
4838 "Non-terminator expected to have a single successor!");
4842 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4843 if (AssumedLiveEdges.insert(Edge).second)
4845 if (assumeLive(
A, *AliveSuccessor->getParent()))
4852 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4854 return !ToBeExploredFrom.count(I);
4857 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4866 if (ToBeExploredFrom.empty() &&
4867 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4869 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4871 return indicatePessimisticFixpoint();
4876struct AAIsDeadCallSite final : AAIsDeadFunction {
4878 : AAIsDeadFunction(IRP,
A) {}
4887 "supported for call sites yet!");
4892 return indicatePessimisticFixpoint();
4896 void trackStatistics()
const override {}
4910 Value &
V = *getAssociatedValue().stripPointerCasts();
4912 A.getAttrs(getIRPosition(),
4913 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4916 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4919 bool IsKnownNonNull;
4920 AA::hasAssumedIRAttr<Attribute::NonNull>(
4921 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4923 bool CanBeNull, CanBeFreed;
4924 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4925 A.getDataLayout(), CanBeNull, CanBeFreed));
4928 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4933 StateType &getState()
override {
return *
this; }
4934 const StateType &getState()
const override {
return *
this; }
4940 const Value *UseV =
U->get();
4945 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4950 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4951 if (
Base &&
Base == &getAssociatedValue())
4952 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4958 bool IsNonNull =
false;
4959 bool TrackUse =
false;
4960 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4961 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4962 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4963 <<
" for instruction " << *
I <<
"\n");
4965 addAccessedBytesForUse(
A, U,
I, State);
4966 State.takeKnownDerefBytesMaximum(DerefBytes);
4973 bool IsKnownNonNull;
4974 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4975 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4976 if (IsAssumedNonNull &&
4977 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
4978 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
4979 return ChangeStatus::CHANGED;
4987 bool IsKnownNonNull;
4988 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4989 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4990 if (IsAssumedNonNull)
4992 Ctx, getAssumedDereferenceableBytes()));
4995 Ctx, getAssumedDereferenceableBytes()));
4999 const std::string getAsStr(
Attributor *
A)
const override {
5000 if (!getAssumedDereferenceableBytes())
5001 return "unknown-dereferenceable";
5002 bool IsKnownNonNull;
5003 bool IsAssumedNonNull =
false;
5005 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5006 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5007 return std::string(
"dereferenceable") +
5008 (IsAssumedNonNull ?
"" :
"_or_null") +
5009 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5010 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5011 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5012 (!
A ?
" [non-null is unknown]" :
"");
5017struct AADereferenceableFloating : AADereferenceableImpl {
5019 : AADereferenceableImpl(IRP,
A) {}
5024 bool UsedAssumedInformation =
false;
5026 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5028 Values.
push_back({getAssociatedValue(), getCtxI()});
5031 Stripped = Values.
size() != 1 ||
5032 Values.
front().getValue() != &getAssociatedValue();
5038 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5040 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5048 int64_t DerefBytes = 0;
5049 if (!AA || (!Stripped &&
this == AA)) {
5052 bool CanBeNull, CanBeFreed;
5054 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5055 T.GlobalState.indicatePessimisticFixpoint();
5058 DerefBytes =
DS.DerefBytesState.getAssumed();
5059 T.GlobalState &=
DS.GlobalState;
5065 int64_t OffsetSExt =
Offset.getSExtValue();
5069 T.takeAssumedDerefBytesMinimum(
5070 std::max(int64_t(0), DerefBytes - OffsetSExt));
5075 T.takeKnownDerefBytesMaximum(
5076 std::max(int64_t(0), DerefBytes - OffsetSExt));
5077 T.indicatePessimisticFixpoint();
5078 }
else if (OffsetSExt > 0) {
5084 T.indicatePessimisticFixpoint();
5088 return T.isValidState();
5091 for (
const auto &VAC : Values)
5092 if (!VisitValueCB(*
VAC.getValue()))
5093 return indicatePessimisticFixpoint();
5099 void trackStatistics()
const override {
5105struct AADereferenceableReturned final
5106 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5108 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5113 void trackStatistics()
const override {
5119struct AADereferenceableArgument final
5120 : AAArgumentFromCallSiteArguments<AADereferenceable,
5121 AADereferenceableImpl> {
5123 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5128 void trackStatistics()
const override {
5134struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5136 : AADereferenceableFloating(IRP,
A) {}
5139 void trackStatistics()
const override {
5145struct AADereferenceableCallSiteReturned final
5146 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5147 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5152 void trackStatistics()
const override {
5162 Value &AssociatedValue,
const Use *U,
5166 if (isa<CastInst>(
I)) {
5168 TrackUse = !isa<PtrToIntInst>(
I);
5171 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5172 if (
GEP->hasAllConstantIndices())
5178 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5192 const Value *UseV =
U->get();
5193 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5194 if (
SI->getPointerOperand() == UseV)
5195 MA =
SI->getAlign();
5196 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5197 if (LI->getPointerOperand() == UseV)
5198 MA = LI->getAlign();
5199 }
else if (
auto *AI = dyn_cast<AtomicRMWInst>(
I)) {
5200 if (AI->getPointerOperand() == UseV)
5201 MA = AI->getAlign();
5202 }
else if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
5203 if (AI->getPointerOperand() == UseV)
5204 MA = AI->getAlign();
5210 unsigned Alignment = MA->value();
5214 if (
Base == &AssociatedValue) {
5233 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5235 takeKnownMaximum(Attr.getValueAsInt());
5237 Value &
V = *getAssociatedValue().stripPointerCasts();
5238 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5241 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5246 ChangeStatus LoadStoreChanged = ChangeStatus::UNCHANGED;
5249 Value &AssociatedValue = getAssociatedValue();
5250 for (
const Use &U : AssociatedValue.
uses()) {
5251 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5252 if (
SI->getPointerOperand() == &AssociatedValue)
5253 if (
SI->getAlign() < getAssumedAlign()) {
5255 "Number of times alignment added to a store");
5256 SI->setAlignment(getAssumedAlign());
5257 LoadStoreChanged = ChangeStatus::CHANGED;
5259 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5260 if (LI->getPointerOperand() == &AssociatedValue)
5261 if (LI->getAlign() < getAssumedAlign()) {
5262 LI->setAlignment(getAssumedAlign());
5264 "Number of times alignment added to a load");
5265 LoadStoreChanged = ChangeStatus::CHANGED;
5272 Align InheritAlign =
5273 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5274 if (InheritAlign >= getAssumedAlign())
5275 return LoadStoreChanged;
5276 return Changed | LoadStoreChanged;
5286 if (getAssumedAlign() > 1)
5294 bool TrackUse =
false;
5296 unsigned int KnownAlign =
5297 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5298 State.takeKnownMaximum(KnownAlign);
5304 const std::string getAsStr(
Attributor *
A)
const override {
5305 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5306 std::to_string(getAssumedAlign().
value()) +
">";
5311struct AAAlignFloating : AAAlignImpl {
5319 bool UsedAssumedInformation =
false;
5321 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5323 Values.
push_back({getAssociatedValue(), getCtxI()});
5326 Stripped = Values.
size() != 1 ||
5327 Values.
front().getValue() != &getAssociatedValue();
5331 auto VisitValueCB = [&](
Value &
V) ->
bool {
5332 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5335 DepClassTy::REQUIRED);
5336 if (!AA || (!Stripped &&
this == AA)) {
5338 unsigned Alignment = 1;
5351 Alignment =
V.getPointerAlignment(
DL).value();
5354 T.takeKnownMaximum(Alignment);
5355 T.indicatePessimisticFixpoint();
5361 return T.isValidState();
5364 for (
const auto &VAC : Values) {
5365 if (!VisitValueCB(*
VAC.getValue()))
5366 return indicatePessimisticFixpoint();
5379struct AAAlignReturned final
5380 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5381 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5389struct AAAlignArgument final
5390 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5391 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5399 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5400 return ChangeStatus::UNCHANGED;
5401 return Base::manifest(
A);
5408struct AAAlignCallSiteArgument final : AAAlignFloating {
5410 : AAAlignFloating(IRP,
A) {}
5417 if (
Argument *Arg = getAssociatedArgument())
5418 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5419 return ChangeStatus::UNCHANGED;
5421 Align InheritAlign =
5422 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5423 if (InheritAlign >= getAssumedAlign())
5424 Changed = ChangeStatus::UNCHANGED;
5431 if (
Argument *Arg = getAssociatedArgument()) {
5434 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5437 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5447struct AAAlignCallSiteReturned final
5448 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5449 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5466 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5467 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5472 const std::string getAsStr(
Attributor *
A)
const override {
5473 return getAssumed() ?
"noreturn" :
"may-return";
5478 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5479 bool UsedAssumedInformation =
false;
5480 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5481 {(unsigned)Instruction::Ret},
5482 UsedAssumedInformation))
5483 return indicatePessimisticFixpoint();
5484 return ChangeStatus::UNCHANGED;
5488struct AANoReturnFunction final : AANoReturnImpl {
5490 : AANoReturnImpl(IRP,
A) {}
5497struct AANoReturnCallSite final
5498 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5500 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5517 Value &
V = getAssociatedValue();
5518 if (
auto *
C = dyn_cast<Constant>(&V)) {
5519 if (
C->isThreadDependent())
5520 indicatePessimisticFixpoint();
5522 indicateOptimisticFixpoint();
5525 if (
auto *CB = dyn_cast<CallBase>(&V))
5528 indicateOptimisticFixpoint();
5531 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5536 indicatePessimisticFixpoint();
5546 Value &
V = getAssociatedValue();
5548 if (
auto *
I = dyn_cast<Instruction>(&V))
5549 Scope =
I->getFunction();
5550 if (
auto *
A = dyn_cast<Argument>(&V)) {
5552 if (!
Scope->hasLocalLinkage())
5556 return indicateOptimisticFixpoint();
5558 bool IsKnownNoRecurse;
5559 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5564 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5565 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5566 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5567 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5571 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5572 (isa<StoreInst>(UserI) &&
5573 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5575 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5579 if (!Callee || !
Callee->hasLocalLinkage())
5585 DepClassTy::OPTIONAL);
5586 if (!ArgInstanceInfoAA ||
5587 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5592 A, *CB, *Scope, *
this,
nullptr,
5600 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5601 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5602 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5610 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5611 DepClassTy::OPTIONAL,
5612 true, EquivalentUseCB))
5613 return indicatePessimisticFixpoint();
5619 const std::string getAsStr(
Attributor *
A)
const override {
5620 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5624 void trackStatistics()
const override {}
5628struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5630 : AAInstanceInfoImpl(IRP,
A) {}
5634struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5636 : AAInstanceInfoFloating(IRP,
A) {}
5640struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5642 : AAInstanceInfoImpl(IRP,
A) {}
5650 Argument *Arg = getAssociatedArgument();
5652 return indicatePessimisticFixpoint();
5657 return indicatePessimisticFixpoint();
5663struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5665 : AAInstanceInfoImpl(IRP,
A) {
5681struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5683 : AAInstanceInfoFloating(IRP,
A) {}
5690 bool IgnoreSubsumingPositions) {
5691 assert(ImpliedAttributeKind == Attribute::NoCapture &&
5692 "Unexpected attribute kind");
5695 return V.use_empty();
5701 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5702 V.getType()->getPointerAddressSpace() == 0)) {
5706 if (
A.hasAttr(IRP, {Attribute::NoCapture},
5707 true, Attribute::NoCapture))
5713 {Attribute::NoCapture, Attribute::ByVal},
5715 A.manifestAttrs(IRP,
5723 determineFunctionCaptureCapabilities(IRP, *
F, State);
5725 A.manifestAttrs(IRP,
5744 bool ReadOnly =
F.onlyReadsMemory();
5745 bool NoThrow =
F.doesNotThrow();
5746 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5747 if (ReadOnly && NoThrow && IsVoidReturn) {
5760 if (NoThrow && IsVoidReturn)
5765 if (!NoThrow || ArgNo < 0 ||
5766 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5769 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5770 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5771 if (U ==
unsigned(ArgNo))
5789 assert(!AA::hasAssumedIRAttr<Attribute::NoCapture>(
5790 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5800 if (!isAssumedNoCaptureMaybeReturned())
5803 if (isArgumentPosition()) {
5804 if (isAssumedNoCapture())
5812 const std::string getAsStr(
Attributor *
A)
const override {
5813 if (isKnownNoCapture())
5814 return "known not-captured";
5815 if (isAssumedNoCapture())
5816 return "assumed not-captured";
5817 if (isKnownNoCaptureMaybeReturned())
5818 return "known not-captured-maybe-returned";
5819 if (isAssumedNoCaptureMaybeReturned())
5820 return "assumed not-captured-maybe-returned";
5821 return "assumed-captured";
5829 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5833 if (isa<PtrToIntInst>(UInst)) {
5835 return isCapturedIn(State,
true,
true,
5841 if (isa<StoreInst>(UInst))
5842 return isCapturedIn(State,
true,
true,
5846 if (isa<ReturnInst>(UInst)) {
5848 return isCapturedIn(State,
false,
false,
5850 return isCapturedIn(State,
true,
true,
5856 auto *CB = dyn_cast<CallBase>(UInst);
5858 return isCapturedIn(State,
true,
true,
5865 bool IsKnownNoCapture;
5867 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
5868 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5870 if (IsAssumedNoCapture)
5871 return isCapturedIn(State,
false,
false,
5875 return isCapturedIn(State,
false,
false,
5880 return isCapturedIn(State,
true,
true,
5888 bool CapturedInInt,
bool CapturedInRet) {
5889 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5890 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5906 return indicatePessimisticFixpoint();
5913 return indicatePessimisticFixpoint();
5921 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5923 addKnownBits(NOT_CAPTURED_IN_MEM);
5930 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5934 UsedAssumedInformation))
5936 bool SeenConstant =
false;
5938 if (isa<Constant>(
VAC.getValue())) {
5941 SeenConstant =
true;
5942 }
else if (!isa<Argument>(
VAC.getValue()) ||
5943 VAC.getValue() == getAssociatedArgument())
5949 bool IsKnownNoUnwind;
5950 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
5952 bool IsVoidTy =
F->getReturnType()->isVoidTy();
5953 bool UsedAssumedInformation =
false;
5954 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
5955 T.addKnownBits(NOT_CAPTURED_IN_RET);
5956 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
5958 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
5959 addKnownBits(NOT_CAPTURED_IN_RET);
5960 if (isKnown(NOT_CAPTURED_IN_MEM))
5961 return indicateOptimisticFixpoint();
5972 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
5977 return checkUse(
A,
T, U, Follow);
5985 if (!
A.checkForAllUses(UseCheck, *
this, *V))
5986 return indicatePessimisticFixpoint();
5989 auto Assumed = S.getAssumed();
5990 S.intersectAssumedBits(
T.getAssumed());
5991 if (!isAssumedNoCaptureMaybeReturned())
5992 return indicatePessimisticFixpoint();
5998struct AANoCaptureArgument final : AANoCaptureImpl {
6000 : AANoCaptureImpl(IRP,
A) {}
6007struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6009 : AANoCaptureImpl(IRP,
A) {}
6017 Argument *Arg = getAssociatedArgument();
6019 return indicatePessimisticFixpoint();
6021 bool IsKnownNoCapture;
6023 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
6024 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6026 return ChangeStatus::UNCHANGED;
6028 return indicatePessimisticFixpoint();
6037struct AANoCaptureFloating final : AANoCaptureImpl {
6039 : AANoCaptureImpl(IRP,
A) {}
6042 void trackStatistics()
const override {
6048struct AANoCaptureReturned final : AANoCaptureImpl {
6050 : AANoCaptureImpl(IRP,
A) {
6065 void trackStatistics()
const override {}
6069struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6071 : AANoCaptureImpl(IRP,
A) {}
6077 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6081 void trackStatistics()
const override {
6092 SimplifiedAssociatedValue,
Other, Ty);
6093 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6097 if (SimplifiedAssociatedValue)
6098 dbgs() <<
"[ValueSimplify] is assumed to be "
6099 << **SimplifiedAssociatedValue <<
"\n";
6101 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6113 if (getAssociatedValue().
getType()->isVoidTy())
6114 indicatePessimisticFixpoint();
6115 if (
A.hasSimplificationCallback(getIRPosition()))
6116 indicatePessimisticFixpoint();
6120 const std::string getAsStr(
Attributor *
A)
const override {
6122 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6123 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6124 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6126 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6131 void trackStatistics()
const override {}
6134 std::optional<Value *>
6135 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6136 return SimplifiedAssociatedValue;
6147 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6149 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6162 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6163 if (
Check && (
I.mayReadFromMemory() ||
6168 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6170 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6196 if (
const auto &NewV = VMap.
lookup(&V))
6198 bool UsedAssumedInformation =
false;
6199 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6201 if (!SimpleV.has_value())
6205 EffectiveV = *SimpleV;
6206 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6210 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6211 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6212 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6213 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6220 Value *NewV = SimplifiedAssociatedValue
6221 ? *SimplifiedAssociatedValue
6223 if (NewV && NewV != &getAssociatedValue()) {
6227 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6229 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6238 const IRPosition &IRP,
bool Simplify =
true) {
6239 bool UsedAssumedInformation =
false;
6242 QueryingValueSimplified =
A.getAssumedSimplified(
6244 return unionAssumed(QueryingValueSimplified);
6248 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6249 if (!getAssociatedValue().
getType()->isIntegerTy())
6254 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6258 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6261 SimplifiedAssociatedValue = std::nullopt;
6262 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6265 if (
auto *
C = *COpt) {
6266 SimplifiedAssociatedValue =
C;
6267 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6273 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6274 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6276 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6284 for (
auto &U : getAssociatedValue().
uses()) {
6288 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6289 IP =
PHI->getIncomingBlock(U)->getTerminator();
6290 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6292 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6293 if (
A.changeUseAfterManifest(U, *NewV))
6294 Changed = ChangeStatus::CHANGED;
6298 return Changed | AAValueSimplify::manifest(
A);
6303 SimplifiedAssociatedValue = &getAssociatedValue();
6304 return AAValueSimplify::indicatePessimisticFixpoint();
6308struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6310 : AAValueSimplifyImpl(IRP,
A) {}
6313 AAValueSimplifyImpl::initialize(
A);
6314 if (
A.hasAttr(getIRPosition(),
6315 {Attribute::InAlloca, Attribute::Preallocated,
6316 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6318 indicatePessimisticFixpoint();
6325 Argument *Arg = getAssociatedArgument();
6331 return indicatePessimisticFixpoint();
6334 auto Before = SimplifiedAssociatedValue;
6348 bool UsedAssumedInformation =
false;
6349 std::optional<Constant *> SimpleArgOp =
6350 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6357 return unionAssumed(*SimpleArgOp);
6362 bool UsedAssumedInformation =
false;
6363 if (hasCallBaseContext() &&
6364 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6368 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6369 UsedAssumedInformation);
6372 if (!askSimplifiedValueForOtherAAs(
A))
6373 return indicatePessimisticFixpoint();
6376 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6377 : ChangeStatus ::CHANGED;
6381 void trackStatistics()
const override {
6386struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6388 : AAValueSimplifyImpl(IRP,
A) {}
6391 std::optional<Value *>
6392 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6393 if (!isValidState())
6395 return SimplifiedAssociatedValue;
6400 auto Before = SimplifiedAssociatedValue;
6403 auto &RI = cast<ReturnInst>(
I);
6404 return checkAndUpdate(
6409 bool UsedAssumedInformation =
false;
6410 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6411 UsedAssumedInformation))
6412 if (!askSimplifiedValueForOtherAAs(
A))
6413 return indicatePessimisticFixpoint();
6416 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6417 : ChangeStatus ::CHANGED;
6423 return ChangeStatus::UNCHANGED;
6427 void trackStatistics()
const override {
6432struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6434 : AAValueSimplifyImpl(IRP,
A) {}
6438 AAValueSimplifyImpl::initialize(
A);
6439 Value &
V = getAnchorValue();
6442 if (isa<Constant>(V))
6443 indicatePessimisticFixpoint();
6448 auto Before = SimplifiedAssociatedValue;
6449 if (!askSimplifiedValueForOtherAAs(
A))
6450 return indicatePessimisticFixpoint();
6453 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6454 : ChangeStatus ::CHANGED;
6458 void trackStatistics()
const override {
6463struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6465 : AAValueSimplifyImpl(IRP,
A) {}
6469 SimplifiedAssociatedValue =
nullptr;
6470 indicateOptimisticFixpoint();
6475 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6478 void trackStatistics()
const override {
6483struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6485 : AAValueSimplifyFunction(IRP,
A) {}
6487 void trackStatistics()
const override {
6492struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6494 : AAValueSimplifyImpl(IRP,
A) {}
6497 AAValueSimplifyImpl::initialize(
A);
6498 Function *Fn = getAssociatedFunction();
6499 assert(Fn &&
"Did expect an associted function");
6505 checkAndUpdate(
A, *
this, IRP))
6506 indicateOptimisticFixpoint();
6508 indicatePessimisticFixpoint();
6516 return indicatePessimisticFixpoint();
6519 void trackStatistics()
const override {
6524struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6526 : AAValueSimplifyFloating(IRP,
A) {}
6534 if (FloatAA && FloatAA->getState().isValidState())
6537 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6538 Use &
U = cast<CallBase>(&getAnchorValue())
6539 ->getArgOperandUse(getCallSiteArgNo());
6540 if (
A.changeUseAfterManifest(U, *NewV))
6541 Changed = ChangeStatus::CHANGED;
6544 return Changed | AAValueSimplify::manifest(
A);
6547 void trackStatistics()
const override {
6557 struct AllocationInfo {
6569 }
Status = STACK_DUE_TO_USE;
6573 bool HasPotentiallyFreeingUnknownUses =
false;
6577 bool MoveAllocaIntoEntry =
true;
6583 struct DeallocationInfo {
6591 bool MightFreeUnknownObjects =
false;
6600 ~AAHeapToStackFunction() {
6603 for (
auto &It : AllocationInfos)
6604 It.second->~AllocationInfo();
6605 for (
auto &It : DeallocationInfos)
6606 It.second->~DeallocationInfo();
6610 AAHeapToStack::initialize(
A);
6613 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6620 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6629 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6630 AllocationInfos[CB] = AI;
6632 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6638 bool UsedAssumedInformation =
false;
6639 bool Success =
A.checkForAllCallLikeInstructions(
6640 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6644 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6648 bool &) -> std::optional<Value *> {
return nullptr; };
6649 for (
const auto &It : AllocationInfos)
6652 for (
const auto &It : DeallocationInfos)
6657 const std::string getAsStr(
Attributor *
A)
const override {
6658 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6659 for (
const auto &It : AllocationInfos) {
6660 if (It.second->Status == AllocationInfo::INVALID)
6661 ++NumInvalidMallocs;
6665 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6666 std::to_string(NumInvalidMallocs);
6670 void trackStatistics()
const override {
6673 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6674 for (
const auto &It : AllocationInfos)
6675 if (It.second->Status != AllocationInfo::INVALID)
6679 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6681 if (AllocationInfo *AI =
6682 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6683 return AI->Status != AllocationInfo::INVALID;
6687 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6688 if (!isValidState())
6691 for (
const auto &It : AllocationInfos) {
6692 AllocationInfo &AI = *It.second;
6693 if (AI.Status == AllocationInfo::INVALID)
6696 if (AI.PotentialFreeCalls.count(&CB))
6704 assert(getState().isValidState() &&
6705 "Attempted to manifest an invalid state!");
6709 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6711 for (
auto &It : AllocationInfos) {
6712 AllocationInfo &AI = *It.second;
6713 if (AI.Status == AllocationInfo::INVALID)
6716 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6717 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6718 A.deleteAfterManifest(*FreeCall);
6719 HasChanged = ChangeStatus::CHANGED;
6722 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6727 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6728 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6729 return OR <<
"Moving globalized variable to the stack.";
6730 return OR <<
"Moving memory allocation from the heap to the stack.";
6732 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6739 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6741 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6748 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6753 ?
F->getEntryBlock().begin()
6754 : AI.CB->getIterator();
6757 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6758 Alignment = std::max(Alignment, *RetAlign);
6760 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6761 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6762 "Expected an alignment during manifest!");
6764 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6768 unsigned AS =
DL.getAllocaAddrSpace();
6771 AI.CB->getName() +
".h2s", IP);
6773 if (Alloca->
getType() != AI.CB->getType())
6774 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6775 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6780 "Must be able to materialize initial memory state of allocation");
6784 if (
auto *
II = dyn_cast<InvokeInst>(AI.CB)) {
6785 auto *NBB =
II->getNormalDest();
6787 A.deleteAfterManifest(*AI.CB);
6789 A.deleteAfterManifest(*AI.CB);
6795 if (!isa<UndefValue>(InitVal)) {
6798 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6800 HasChanged = ChangeStatus::CHANGED;
6808 bool UsedAssumedInformation =
false;
6809 std::optional<Constant *> SimpleV =
6810 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6812 return APInt(64, 0);
6813 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6814 return CI->getValue();
6815 return std::nullopt;
6819 AllocationInfo &AI) {
6820 auto Mapper = [&](
const Value *
V) ->
const Value * {
6821 bool UsedAssumedInformation =
false;
6822 if (std::optional<Constant *> SimpleV =
6823 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6830 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6848 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6850 const auto *LivenessAA =
6854 A.getInfoCache().getMustBeExecutedContextExplorer();
6856 bool StackIsAccessibleByOtherThreads =
6857 A.getInfoCache().stackIsAccessibleByOtherThreads();
6860 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6861 std::optional<bool> MayContainIrreducibleControl;
6863 if (&
F->getEntryBlock() == &BB)
6865 if (!MayContainIrreducibleControl.has_value())
6867 if (*MayContainIrreducibleControl)
6876 bool HasUpdatedFrees =
false;
6878 auto UpdateFrees = [&]() {
6879 HasUpdatedFrees =
true;
6881 for (
auto &It : DeallocationInfos) {
6882 DeallocationInfo &DI = *It.second;
6885 if (DI.MightFreeUnknownObjects)
6889 bool UsedAssumedInformation =
false;
6890 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6897 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6898 DI.MightFreeUnknownObjects =
true;
6904 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6907 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6911 DI.MightFreeUnknownObjects =
true;
6915 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6917 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6919 DI.MightFreeUnknownObjects =
true;
6923 DI.PotentialAllocationCalls.insert(ObjCB);
6927 auto FreeCheck = [&](AllocationInfo &AI) {
6931 if (!StackIsAccessibleByOtherThreads) {
6933 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6936 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6937 "other threads and function is not nosync:\n");
6941 if (!HasUpdatedFrees)
6945 if (AI.PotentialFreeCalls.size() != 1) {
6947 << AI.PotentialFreeCalls.size() <<
"\n");
6950 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
6951 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
6954 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
6955 << *UniqueFree <<
"\n");
6958 if (DI->MightFreeUnknownObjects) {
6960 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
6963 if (DI->PotentialAllocationCalls.empty())
6965 if (DI->PotentialAllocationCalls.size() > 1) {
6967 << DI->PotentialAllocationCalls.size()
6968 <<
" different allocations\n");
6971 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
6974 <<
"[H2S] unique free call not known to free this allocation but "
6975 << **DI->PotentialAllocationCalls.begin() <<
"\n");
6980 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
6982 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
6983 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
6984 "with the allocation "
6985 << *UniqueFree <<
"\n");
6992 auto UsesCheck = [&](AllocationInfo &AI) {
6993 bool ValidUsesOnly =
true;
6995 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
6997 if (isa<LoadInst>(UserI))
6999 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
7000 if (
SI->getValueOperand() ==
U.get()) {
7002 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7003 ValidUsesOnly =
false;
7009 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
7012 if (DeallocationInfos.count(CB)) {
7013 AI.PotentialFreeCalls.insert(CB);
7020 bool IsKnownNoCapture;
7021 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7026 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7029 if (!IsAssumedNoCapture ||
7030 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7031 !IsAssumedNoFree)) {
7032 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7037 <<
"Could not move globalized variable to the stack. "
7038 "Variable is potentially captured in call. Mark "
7039 "parameter as `__attribute__((noescape))` to override.";
7042 if (ValidUsesOnly &&
7043 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7047 ValidUsesOnly =
false;
7052 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7053 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7060 ValidUsesOnly =
false;
7063 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7065 [&](
const Use &OldU,
const Use &NewU) {
7066 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7067 return !SI || StackIsAccessibleByOtherThreads ||
7068 AA::isAssumedThreadLocalObject(
7069 A, *SI->getPointerOperand(), *this);
7072 return ValidUsesOnly;
7077 for (
auto &It : AllocationInfos) {
7078 AllocationInfo &AI = *It.second;
7079 if (AI.Status == AllocationInfo::INVALID)
7083 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7087 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7089 AI.Status = AllocationInfo::INVALID;
7094 !APAlign->isPowerOf2()) {
7095 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7097 AI.Status = AllocationInfo::INVALID;
7104 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7109 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7111 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7115 AI.Status = AllocationInfo::INVALID;
7121 switch (AI.Status) {
7122 case AllocationInfo::STACK_DUE_TO_USE:
7125 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7127 case AllocationInfo::STACK_DUE_TO_FREE:
7130 AI.Status = AllocationInfo::INVALID;
7133 case AllocationInfo::INVALID:
7140 bool IsGlobalizedLocal =
7141 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7142 if (AI.MoveAllocaIntoEntry &&
7143 (!
Size.has_value() ||
7144 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7145 AI.MoveAllocaIntoEntry =
false;
7159 AAPrivatizablePtr::indicatePessimisticFixpoint();
7160 PrivatizableType =
nullptr;
7161 return ChangeStatus::CHANGED;
7167 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7171 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7172 std::optional<Type *>
T1) {
7182 std::optional<Type *> getPrivatizableType()
const override {
7183 return PrivatizableType;
7186 const std::string getAsStr(
Attributor *
A)
const override {
7187 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7191 std::optional<Type *> PrivatizableType;
7196struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7198 : AAPrivatizablePtrImpl(IRP,
A) {}
7201 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7204 bool UsedAssumedInformation =
false;
7206 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7208 if (!
Attrs.empty() &&
7210 true, UsedAssumedInformation))
7211 return Attrs[0].getValueAsType();
7213 std::optional<Type *> Ty;
7214 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7237 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7241 dbgs() <<
"<nullptr>";
7246 Ty = combineTypes(Ty, CSTy);
7249 dbgs() <<
" : New Type: ";
7251 (*Ty)->print(
dbgs());
7253 dbgs() <<
"<nullptr>";
7262 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7263 UsedAssumedInformation))
7270 PrivatizableType = identifyPrivatizableType(
A);
7271 if (!PrivatizableType)
7272 return ChangeStatus::UNCHANGED;
7273 if (!*PrivatizableType)
7274 return indicatePessimisticFixpoint();
7279 DepClassTy::OPTIONAL);
7282 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7285 return indicatePessimisticFixpoint();
7291 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7295 Function &Fn = *getIRPosition().getAnchorScope();
7299 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7301 return indicatePessimisticFixpoint();
7311 bool UsedAssumedInformation =
false;
7312 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7313 UsedAssumedInformation)) {
7315 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7317 return indicatePessimisticFixpoint();
7321 Argument *Arg = getAssociatedArgument();
7322 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7324 return indicatePessimisticFixpoint();
7331 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7334 for (
const Use *U : CallbackUses) {
7336 assert(CBACS && CBACS.isCallbackCall());
7337 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7338 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7342 <<
"[AAPrivatizablePtr] Argument " << *Arg
7343 <<
"check if can be privatized in the context of its parent ("
7345 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7347 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7348 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7349 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7351 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7352 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7355 if (CBArgNo !=
int(ArgNo))
7359 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7363 if (*CBArgPrivTy == PrivatizableType)
7368 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7369 <<
" cannot be privatized in the context of its parent ("
7371 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7373 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7374 <<
").\n[AAPrivatizablePtr] for which the argument "
7375 "privatization is not compatible.\n";
7389 "Expected a direct call operand for callback call operand");
7394 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7395 <<
" check if be privatized in the context of its parent ("
7397 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7399 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7402 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7405 DepClassTy::REQUIRED);
7406 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7410 if (*DCArgPrivTy == PrivatizableType)
7416 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7417 <<
" cannot be privatized in the context of its parent ("
7419 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7422 <<
").\n[AAPrivatizablePtr] for which the argument "
7423 "privatization is not compatible.\n";
7435 return IsCompatiblePrivArgOfDirectCS(ACS);
7439 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7440 UsedAssumedInformation))
7441 return indicatePessimisticFixpoint();
7443 return ChangeStatus::UNCHANGED;
7449 identifyReplacementTypes(
Type *PrivType,
7453 assert(PrivType &&
"Expected privatizable type!");
7456 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7457 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7458 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7459 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7460 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7461 PrivArrayType->getElementType());
7472 assert(PrivType &&
"Expected privatizable type!");
7478 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7479 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7480 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7485 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7486 Type *PointeeTy = PrivArrayType->getElementType();
7487 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7488 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7499 void createReplacementValues(
Align Alignment,
Type *PrivType,
7503 assert(PrivType &&
"Expected privatizable type!");
7510 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7511 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7512 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7513 Type *PointeeTy = PrivStructType->getElementType(u);
7517 L->setAlignment(Alignment);
7520 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7521 Type *PointeeTy = PrivArrayType->getElementType();
7522 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7523 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7526 L->setAlignment(Alignment);
7531 L->setAlignment(Alignment);
7538 if (!PrivatizableType)
7539 return ChangeStatus::UNCHANGED;
7540 assert(*PrivatizableType &&
"Expected privatizable type!");
7546 bool UsedAssumedInformation =
false;
7547 if (!
A.checkForAllInstructions(
7549 CallInst &CI = cast<CallInst>(I);
7550 if (CI.isTailCall())
7551 TailCalls.push_back(&CI);
7554 *
this, {Instruction::Call}, UsedAssumedInformation))
7555 return ChangeStatus::UNCHANGED;
7557 Argument *Arg = getAssociatedArgument();
7560 const auto *AlignAA =
7569 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7572 unsigned AS =
DL.getAllocaAddrSpace();
7574 Arg->
getName() +
".priv", IP);
7575 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7576 ArgIt->getArgNo(), IP);
7579 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7584 CI->setTailCall(
false);
7595 createReplacementValues(
7596 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7597 *PrivatizableType, ACS,
7605 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7608 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7609 std::move(FnRepairCB),
7610 std::move(ACSRepairCB)))
7611 return ChangeStatus::CHANGED;
7612 return ChangeStatus::UNCHANGED;
7616 void trackStatistics()
const override {
7621struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7623 : AAPrivatizablePtrImpl(IRP,
A) {}
7628 indicatePessimisticFixpoint();
7633 "updateImpl will not be called");
7637 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7640 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7644 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7645 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7647 return AI->getAllocatedType();
7648 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7651 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7655 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7656 "alloca nor privatizable argument: "
7662 void trackStatistics()
const override {
7667struct AAPrivatizablePtrCallSiteArgument final
7668 :
public AAPrivatizablePtrFloating {
7670 : AAPrivatizablePtrFloating(IRP,
A) {}
7674 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7675 indicateOptimisticFixpoint();
7680 PrivatizableType = identifyPrivatizableType(
A);
7681 if (!PrivatizableType)
7682 return ChangeStatus::UNCHANGED;
7683 if (!*PrivatizableType)
7684 return indicatePessimisticFixpoint();
7687 bool IsKnownNoCapture;
7688 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7689 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7690 if (!IsAssumedNoCapture) {
7691 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7692 return indicatePessimisticFixpoint();
7695 bool IsKnownNoAlias;
7696 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7697 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7698 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7699 return indicatePessimisticFixpoint();
7704 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7705 return indicatePessimisticFixpoint();
7708 return ChangeStatus::UNCHANGED;
7712 void trackStatistics()
const override {
7717struct AAPrivatizablePtrCallSiteReturned final
7718 :
public AAPrivatizablePtrFloating {
7720 : AAPrivatizablePtrFloating(IRP,
A) {}
7725 indicatePessimisticFixpoint();
7729 void trackStatistics()
const override {
7734struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7736 : AAPrivatizablePtrFloating(IRP,
A) {}
7741 indicatePessimisticFixpoint();
7745 void trackStatistics()
const override {
7761 intersectAssumedBits(BEST_STATE);
7762 getKnownStateFromValue(
A, getIRPosition(), getState());
7763 AAMemoryBehavior::initialize(
A);
7769 bool IgnoreSubsumingPositions =
false) {
7771 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7773 switch (Attr.getKindAsEnum()) {
7774 case Attribute::ReadNone:
7777 case Attribute::ReadOnly:
7780 case Attribute::WriteOnly:
7789 if (!
I->mayReadFromMemory())
7791 if (!
I->mayWriteToMemory())
7804 else if (isAssumedWriteOnly())
7813 if (
A.hasAttr(IRP, Attribute::ReadNone,
7815 return ChangeStatus::UNCHANGED;
7824 return ChangeStatus::UNCHANGED;
7827 A.removeAttrs(IRP, AttrKinds);
7830 A.removeAttrs(IRP, Attribute::Writable);
7837 const std::string getAsStr(
Attributor *
A)
const override {
7842 if (isAssumedWriteOnly())
7844 return "may-read/write";
7852 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7855struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7857 : AAMemoryBehaviorImpl(IRP,
A) {}
7863 void trackStatistics()
const override {
7868 else if (isAssumedWriteOnly())
7883struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7885 : AAMemoryBehaviorFloating(IRP,
A) {}
7889 intersectAssumedBits(BEST_STATE);
7894 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7896 getKnownStateFromValue(
A, IRP, getState(),
7903 return ChangeStatus::UNCHANGED;
7907 if (
A.hasAttr(getIRPosition(),
7908 {Attribute::InAlloca, Attribute::Preallocated})) {
7909 removeKnownBits(NO_WRITES);
7910 removeAssumedBits(NO_WRITES);
7912 A.removeAttrs(getIRPosition(), AttrKinds);
7913 return AAMemoryBehaviorFloating::manifest(
A);
7917 void trackStatistics()
const override {
7922 else if (isAssumedWriteOnly())
7927struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7929 : AAMemoryBehaviorArgument(IRP,
A) {}
7935 Argument *Arg = getAssociatedArgument();
7937 indicatePessimisticFixpoint();
7941 addKnownBits(NO_WRITES);
7942 removeKnownBits(NO_READS);
7943 removeAssumedBits(NO_READS);
7945 AAMemoryBehaviorArgument::initialize(
A);
7946 if (getAssociatedFunction()->isDeclaration())
7947 indicatePessimisticFixpoint();
7956 Argument *Arg = getAssociatedArgument();
7961 return indicatePessimisticFixpoint();
7966 void trackStatistics()
const override {
7971 else if (isAssumedWriteOnly())
7977struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
7979 : AAMemoryBehaviorFloating(IRP,
A) {}
7983 AAMemoryBehaviorImpl::initialize(
A);
7988 return ChangeStatus::UNCHANGED;
7992 void trackStatistics()
const override {}
7996struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
7998 : AAMemoryBehaviorImpl(IRP,
A) {}
8008 Function &
F = cast<Function>(getAnchorValue());
8014 else if (isAssumedWriteOnly())
8017 A.removeAttrs(getIRPosition(), AttrKinds);
8022 return A.manifestAttrs(getIRPosition(),
8027 void trackStatistics()
const override {
8032 else if (isAssumedWriteOnly())
8038struct AAMemoryBehaviorCallSite final
8039 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8046 CallBase &CB = cast<CallBase>(getAnchorValue());
8052 else if (isAssumedWriteOnly())
8055 A.removeAttrs(getIRPosition(), AttrKinds);
8060 Attribute::Writable);
8061 return A.manifestAttrs(
8066 void trackStatistics()
const override {
8071 else if (isAssumedWriteOnly())
8079 auto AssumedState = getAssumed();
8085 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8088 if (MemBehaviorAA) {
8089 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8090 return !isAtFixpoint();
8095 if (
I.mayReadFromMemory())
8096 removeAssumedBits(NO_READS);
8097 if (
I.mayWriteToMemory())
8098 removeAssumedBits(NO_WRITES);
8099 return !isAtFixpoint();
8102 bool UsedAssumedInformation =
false;
8103 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8104 UsedAssumedInformation))
8105 return indicatePessimisticFixpoint();
8124 const auto *FnMemAA =
8128 S.addKnownBits(FnMemAA->getKnown());
8129 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8135 auto AssumedState = S.getAssumed();
8141 bool IsKnownNoCapture;
8143 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
8147 if (!IsAssumedNoCapture &&
8149 S.intersectAssumedBits(FnMemAssumedState);
8155 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8157 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8165 Follow = followUsersOfUseIn(
A, U, UserI);
8169 analyzeUseIn(
A, U, UserI);
8171 return !isAtFixpoint();
8174 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8175 return indicatePessimisticFixpoint();
8181bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8185 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8190 const auto *CB = dyn_cast<CallBase>(UserI);
8200 if (
U.get()->getType()->isPointerTy()) {
8202 bool IsKnownNoCapture;
8203 return !AA::hasAssumedIRAttr<Attribute::NoCapture>(
8211void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8219 case Instruction::Load:
8221 removeAssumedBits(NO_READS);
8224 case Instruction::Store:
8229 removeAssumedBits(NO_WRITES);
8231 indicatePessimisticFixpoint();
8234 case Instruction::Call:
8235 case Instruction::CallBr:
8236 case Instruction::Invoke: {
8239 const auto *CB = cast<CallBase>(UserI);
8243 indicatePessimisticFixpoint();
8250 removeAssumedBits(NO_READS);
8257 if (
U.get()->getType()->isPointerTy())
8261 const auto *MemBehaviorAA =
8267 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8275 removeAssumedBits(NO_READS);
8277 removeAssumedBits(NO_WRITES);
8289 return "all memory";
8292 std::string S =
"memory:";
8298 S +=
"internal global,";
8300 S +=
"external global,";
8304 S +=
"inaccessible,";
8318 AccessKind2Accesses.fill(
nullptr);
8321 ~AAMemoryLocationImpl() {
8324 for (AccessSet *AS : AccessKind2Accesses)
8331 intersectAssumedBits(BEST_STATE);
8332 getKnownStateFromValue(
A, getIRPosition(), getState());
8333 AAMemoryLocation::initialize(
A);
8339 bool IgnoreSubsumingPositions =
false) {
8348 bool UseArgMemOnly =
true;
8350 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8354 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8363 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8368 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8372 A.manifestAttrs(IRP,
8382 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8386 A.manifestAttrs(IRP,
8405 else if (isAssumedInaccessibleMemOnly())
8408 else if (isAssumedArgMemOnly())
8411 else if (isAssumedInaccessibleOrArgMemOnly())
8426 if (DeducedAttrs.
size() != 1)
8427 return ChangeStatus::UNCHANGED;
8435 bool checkForAllAccessesToMemoryKind(
8437 MemoryLocationsKind)>
8439 MemoryLocationsKind RequestedMLK)
const override {
8440 if (!isValidState())
8443 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8444 if (AssumedMLK == NO_LOCATIONS)
8448 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8449 CurMLK *= 2, ++
Idx) {
8450 if (CurMLK & RequestedMLK)
8453 if (
const AccessSet *Accesses = AccessKind2Accesses[
Idx])
8454 for (
const AccessInfo &AI : *Accesses)
8455 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8467 bool Changed =
false;
8468 MemoryLocationsKind KnownMLK = getKnown();
8469 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8470 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8471 if (!(CurMLK & KnownMLK))
8472 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8473 getAccessKindFromInst(
I));
8474 return AAMemoryLocation::indicatePessimisticFixpoint();
8494 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8498 return LHS.Ptr <
RHS.Ptr;
8499 if (
LHS.Kind !=
RHS.Kind)
8500 return LHS.Kind <
RHS.Kind;
8508 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8525 AK =
I->mayReadFromMemory() ? READ :
NONE;
8543 Changed |= Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8544 if (MLK == NO_UNKOWN_MEM)
8546 State.removeAssumedBits(MLK);
8553 unsigned AccessAS = 0);
8559void AAMemoryLocationImpl::categorizePtrValue(
8562 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8564 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8566 auto Pred = [&](
Value &Obj) {
8569 MemoryLocationsKind MLK = NO_LOCATIONS;
8579 if (isa<UndefValue>(&Obj))
8581 if (isa<Argument>(&Obj)) {
8588 MLK = NO_ARGUMENT_MEM;
8589 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8593 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8594 if (GVar->isConstant())
8597 if (GV->hasLocalLinkage())
8598 MLK = NO_GLOBAL_INTERNAL_MEM;
8600 MLK = NO_GLOBAL_EXTERNAL_MEM;
8601 }
else if (isa<ConstantPointerNull>(&Obj) &&
8605 }
else if (isa<AllocaInst>(&Obj)) {
8607 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8608 bool IsKnownNoAlias;
8609 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8612 MLK = NO_MALLOCED_MEM;
8614 MLK = NO_UNKOWN_MEM;
8616 MLK = NO_UNKOWN_MEM;
8619 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8620 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8621 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8622 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8623 getAccessKindFromInst(&
I));
8632 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8633 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8634 getAccessKindFromInst(&
I));
8639 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8640 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8643void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8646 for (
unsigned ArgNo = 0, E = CB.
arg_size(); ArgNo < E; ++ArgNo) {
8655 const auto *ArgOpMemLocationAA =
8658 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8663 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8670 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8674 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8676 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8682 <<
" [" << CBMemLocationAA <<
"]\n");
8683 if (!CBMemLocationAA) {
8684 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8685 Changed, getAccessKindFromInst(&
I));
8686 return NO_UNKOWN_MEM;
8689 if (CBMemLocationAA->isAssumedReadNone())
8690 return NO_LOCATIONS;
8692 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8693 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8694 Changed, getAccessKindFromInst(&
I));
8695 return AccessedLocs.getAssumed();
8698 uint32_t CBAssumedNotAccessedLocs =
8699 CBMemLocationAA->getAssumedNotAccessedLocation();
8702 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8703 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8705 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8706 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8708 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8709 getAccessKindFromInst(&
I));
8714 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8715 if (HasGlobalAccesses) {
8718 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8719 getAccessKindFromInst(&
I));
8722 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8723 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8724 return AccessedLocs.getWorstState();
8728 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8729 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8732 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8734 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8737 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8738 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8740 return AccessedLocs.getAssumed();
8745 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8746 <<
I <<
" [" << *
Ptr <<
"]\n");
8747 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8748 Ptr->getType()->getPointerAddressSpace());
8749 return AccessedLocs.getAssumed();
8752 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8754 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8755 getAccessKindFromInst(&
I));
8756 return AccessedLocs.getAssumed();
8760struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8762 : AAMemoryLocationImpl(IRP,
A) {}
8767 const auto *MemBehaviorAA =
8771 return indicateOptimisticFixpoint();
8773 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8774 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8775 return ChangeStatus::UNCHANGED;
8779 auto AssumedState = getAssumed();
8780 bool Changed =
false;
8783 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8784 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8785 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8786 removeAssumedBits(inverseLocation(MLK,
false,
false));
8789 return getAssumedNotAccessedLocation() != VALID_STATE;
8792 bool UsedAssumedInformation =
false;
8793 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8794 UsedAssumedInformation))
8795 return indicatePessimisticFixpoint();
8797 Changed |= AssumedState != getAssumed();
8798 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8802 void trackStatistics()
const override {
8805 else if (isAssumedArgMemOnly())
8807 else if (isAssumedInaccessibleMemOnly())
8809 else if (isAssumedInaccessibleOrArgMemOnly())
8815struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8817 : AAMemoryLocationImpl(IRP,
A) {}
8830 return indicatePessimisticFixpoint();
8831 bool Changed =
false;
8834 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8835 getAccessKindFromInst(
I));
8838 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8839 return indicatePessimisticFixpoint();
8840 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8844 void trackStatistics()
const override {
8858 const std::string getAsStr(
Attributor *
A)
const override {
8859 std::string Str(
"AADenormalFPMath[");
8862 DenormalState Known = getKnown();
8863 if (Known.Mode.isValid())
8864 OS <<
"denormal-fp-math=" << Known.Mode;
8868 if (Known.ModeF32.isValid())
8869 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8875struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8877 : AADenormalFPMathImpl(IRP,
A) {}
8889 Known = DenormalState{
Mode, ModeF32};
8900 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8908 CallerInfo->getState());
8912 bool AllCallSitesKnown =
true;
8913 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8914 return indicatePessimisticFixpoint();
8916 if (Change == ChangeStatus::CHANGED && isModeFixed())
8922 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8927 AttrToRemove.
push_back(
"denormal-fp-math");
8933 if (Known.ModeF32 != Known.Mode) {
8935 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8937 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8940 auto &IRP = getIRPosition();
8943 return A.removeAttrs(IRP, AttrToRemove) |
8944 A.manifestAttrs(IRP, AttrToAdd,
true);
8947 void trackStatistics()
const override {
8963 if (
A.hasSimplificationCallback(getIRPosition())) {
8964 indicatePessimisticFixpoint();
8969 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
8972 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
8976 const std::string getAsStr(
Attributor *
A)
const override {
8980 getKnown().print(
OS);
8982 getAssumed().print(
OS);
8990 if (!getAnchorScope())
9003 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9014 if (!getAnchorScope())
9021 const SCEV *S = getSCEV(
A,
I);
9033 if (!getAnchorScope())
9052 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9054 bool AllowAACtxI)
const {
9055 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9066 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9080 const Instruction *CtxI =
nullptr)
const override {
9081 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9087 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9093 const Instruction *CtxI =
nullptr)
const override {
9098 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9100 return getAssumed();
9104 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9112 Ty, AssumedConstantRange.
getLower())),
9114 Ty, AssumedConstantRange.
getUpper()))};
9136 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(0));
9138 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(1));
9141 return Known.contains(Assumed) && Known != Assumed;
9148 auto *OldRangeMD =
I->getMetadata(LLVMContext::MD_range);
9149 if (isBetterRange(AssumedConstantRange, OldRangeMD)) {
9151 I->setMetadata(LLVMContext::MD_range,
9152 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9153 AssumedConstantRange));
9166 auto &
V = getAssociatedValue();
9170 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9171 "not the context instruction");
9172 if (isa<CallInst>(
I) || isa<LoadInst>(
I))
9173 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9174 Changed = ChangeStatus::CHANGED;
9182struct AAValueConstantRangeArgument final
9183 : AAArgumentFromCallSiteArguments<
9184 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9186 using Base = AAArgumentFromCallSiteArguments<
9193 void trackStatistics()
const override {
9198struct AAValueConstantRangeReturned
9199 : AAReturnedFromReturnedValues<AAValueConstantRange,
9200 AAValueConstantRangeImpl,
9201 AAValueConstantRangeImpl::StateType,
9205 AAValueConstantRangeImpl,
9213 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9214 indicatePessimisticFixpoint();
9218 void trackStatistics()
const override {
9223struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9225 : AAValueConstantRangeImpl(IRP,
A) {}
9229 AAValueConstantRangeImpl::initialize(
A);
9233 Value &
V = getAssociatedValue();
9235 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9237 indicateOptimisticFixpoint();
9241 if (isa<UndefValue>(&V)) {
9244 indicateOptimisticFixpoint();
9248 if (isa<CallBase>(&V))
9251 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9255 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9256 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9263 if (isa<SelectInst>(V) || isa<PHINode>(V))
9267 indicatePessimisticFixpoint();
9270 << getAssociatedValue() <<
"\n");
9273 bool calculateBinaryOperator(
9281 bool UsedAssumedInformation =
false;
9282 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9285 if (!SimplifiedLHS.has_value())
9287 if (!*SimplifiedLHS)
9289 LHS = *SimplifiedLHS;
9291 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9294 if (!SimplifiedRHS.has_value())
9296 if (!*SimplifiedRHS)
9298 RHS = *SimplifiedRHS;
9306 DepClassTy::REQUIRED);
9310 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9314 DepClassTy::REQUIRED);
9318 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9320 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9322 T.unionAssumed(AssumedRange);
9326 return T.isValidState();
9329 bool calculateCastInst(
9338 bool UsedAssumedInformation =
false;
9339 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9342 if (!SimplifiedOpV.has_value())
9344 if (!*SimplifiedOpV)
9346 OpV = *SimplifiedOpV;
9353 DepClassTy::REQUIRED);
9357 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9359 return T.isValidState();
9370 bool UsedAssumedInformation =
false;
9371 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9374 if (!SimplifiedLHS.has_value())
9376 if (!*SimplifiedLHS)
9378 LHS = *SimplifiedLHS;
9380 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9383 if (!SimplifiedRHS.has_value())
9385 if (!*SimplifiedRHS)
9387 RHS = *SimplifiedRHS;
9395 DepClassTy::REQUIRED);
9401 DepClassTy::REQUIRED);
9405 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9406 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9409 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9412 bool MustTrue =
false, MustFalse =
false;
9414 auto AllowedRegion =
9417 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9423 assert((!MustTrue || !MustFalse) &&
9424 "Either MustTrue or MustFalse should be false!");
9433 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9434 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9435 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9439 return T.isValidState();
9448 if (!
I || isa<CallBase>(
I)) {
9451 bool UsedAssumedInformation =
false;
9452 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9455 if (!SimplifiedOpV.has_value())
9457 if (!*SimplifiedOpV)
9459 Value *VPtr = *SimplifiedOpV;
9464 DepClassTy::REQUIRED);
9468 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9472 return T.isValidState();
9476 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9477 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9479 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9480 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9482 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9483 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9489 T.indicatePessimisticFixpoint();
9497 if (QueriedAA !=
this)
9500 if (
T.getAssumed() == getState().getAssumed())
9502 T.indicatePessimisticFixpoint();
9505 return T.isValidState();
9508 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9509 return indicatePessimisticFixpoint();
9514 return ChangeStatus::UNCHANGED;
9515 if (++NumChanges > MaxNumChanges) {
9516 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9517 <<
" but only " << MaxNumChanges
9518 <<
" are allowed to avoid cyclic reasoning.");
9519 return indicatePessimisticFixpoint();
9521 return ChangeStatus::CHANGED;
9525 void trackStatistics()
const override {
9534 static constexpr int MaxNumChanges = 5;
9537struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9539 : AAValueConstantRangeImpl(IRP,
A) {}
9543 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9551struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9553 : AAValueConstantRangeFunction(IRP,
A) {}
9559struct AAValueConstantRangeCallSiteReturned
9560 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9561 AAValueConstantRangeImpl::StateType,
9565 AAValueConstantRangeImpl::StateType,
9571 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue()))
9572 if (
auto *RangeMD = CI->getMetadata(LLVMContext::MD_range))
9575 AAValueConstantRangeImpl::initialize(
A);
9579 void trackStatistics()
const override {
9583struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9585 : AAValueConstantRangeFloating(IRP,
A) {}
9589 return ChangeStatus::UNCHANGED;
9593 void trackStatistics()
const override {
9610 if (
A.hasSimplificationCallback(getIRPosition()))
9611 indicatePessimisticFixpoint();
9613 AAPotentialConstantValues::initialize(
A);
9617 bool &ContainsUndef,
bool ForSelf) {
9619 bool UsedAssumedInformation =
false;
9621 UsedAssumedInformation)) {
9629 *
this, IRP, DepClassTy::REQUIRED);
9630 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9632 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9633 S = PotentialValuesAA->getState().getAssumedSet();
9640 ContainsUndef =
false;
9641 for (
auto &It : Values) {
9642 if (isa<UndefValue>(It.getValue())) {
9643 ContainsUndef =
true;
9646 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9649 S.insert(CI->getValue());
9651 ContainsUndef &= S.empty();
9657 const std::string getAsStr(
Attributor *
A)
const override {
9666 return indicatePessimisticFixpoint();
9670struct AAPotentialConstantValuesArgument final
9671 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9672 AAPotentialConstantValuesImpl,
9673 PotentialConstantIntValuesState> {
9675 AAPotentialConstantValuesImpl,
9681 void trackStatistics()
const override {
9686struct AAPotentialConstantValuesReturned
9687 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9688 AAPotentialConstantValuesImpl> {
9690 AAPotentialConstantValuesImpl>;
9695 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9696 indicatePessimisticFixpoint();
9697 Base::initialize(
A);
9701 void trackStatistics()
const override {
9706struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9708 : AAPotentialConstantValuesImpl(IRP,
A) {}
9712 AAPotentialConstantValuesImpl::initialize(
A);
9716 Value &
V = getAssociatedValue();
9718 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9719 unionAssumed(
C->getValue());
9720 indicateOptimisticFixpoint();
9724 if (isa<UndefValue>(&V)) {
9725 unionAssumedWithUndef();
9726 indicateOptimisticFixpoint();
9730 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9733 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9736 indicatePessimisticFixpoint();
9739 << getAssociatedValue() <<
"\n");
9753 case Instruction::Trunc:
9754 return Src.trunc(ResultBitWidth);
9755 case Instruction::SExt:
9756 return Src.sext(ResultBitWidth);
9757 case Instruction::ZExt:
9758 return Src.zext(ResultBitWidth);
9759 case Instruction::BitCast:
9766 bool &SkipOperation,
bool &Unsupported) {
9773 switch (BinOpcode) {
9777 case Instruction::Add:
9779 case Instruction::Sub:
9781 case Instruction::Mul:
9783 case Instruction::UDiv:
9785 SkipOperation =
true;
9789 case Instruction::SDiv:
9791 SkipOperation =
true;
9795 case Instruction::URem:
9797 SkipOperation =
true;
9801 case Instruction::SRem:
9803 SkipOperation =
true;
9807 case Instruction::Shl:
9809 case Instruction::LShr:
9811 case Instruction::AShr:
9813 case Instruction::And:
9815 case Instruction::Or:
9817 case Instruction::Xor:
9822 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9824 bool SkipOperation =
false;
9827 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9832 unionAssumed(Result);
9833 return isValidState();
9837 auto AssumedBefore = getAssumed();
9841 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9842 SetTy LHSAAPVS, RHSAAPVS;
9844 LHSContainsUndef,
false) ||
9846 RHSContainsUndef,
false))
9847 return indicatePessimisticFixpoint();
9850 bool MaybeTrue =
false, MaybeFalse =
false;
9852 if (LHSContainsUndef && RHSContainsUndef) {
9855 unionAssumedWithUndef();
9856 }
else if (LHSContainsUndef) {
9857 for (
const APInt &R : RHSAAPVS) {
9858 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9859 MaybeTrue |= CmpResult;
9860 MaybeFalse |= !CmpResult;
9861 if (MaybeTrue & MaybeFalse)
9862 return indicatePessimisticFixpoint();
9864 }
else if (RHSContainsUndef) {
9865 for (
const APInt &L : LHSAAPVS) {
9866 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9867 MaybeTrue |= CmpResult;
9868 MaybeFalse |= !CmpResult;
9869 if (MaybeTrue & MaybeFalse)
9870 return indicatePessimisticFixpoint();
9873 for (
const APInt &L : LHSAAPVS) {
9874 for (
const APInt &R : RHSAAPVS) {
9875 bool CmpResult = calculateICmpInst(ICI, L, R);
9876 MaybeTrue |= CmpResult;
9877 MaybeFalse |= !CmpResult;
9878 if (MaybeTrue & MaybeFalse)
9879 return indicatePessimisticFixpoint();
9884 unionAssumed(
APInt( 1, 1));
9886 unionAssumed(
APInt( 1, 0));
9887 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9888 : ChangeStatus::CHANGED;
9892 auto AssumedBefore = getAssumed();
9896 bool UsedAssumedInformation =
false;
9897 std::optional<Constant *>
C =
A.getAssumedConstant(
9898 *
SI->getCondition(), *
this, UsedAssumedInformation);
9901 bool OnlyLeft =
false, OnlyRight =
false;
9902 if (
C && *
C && (*C)->isOneValue())
9904 else if (
C && *
C && (*C)->isZeroValue())
9907 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9908 SetTy LHSAAPVS, RHSAAPVS;
9911 LHSContainsUndef,
false))
9912 return indicatePessimisticFixpoint();
9916 RHSContainsUndef,
false))
9917 return indicatePessimisticFixpoint();
9919 if (OnlyLeft || OnlyRight) {
9921 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9922 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9925 unionAssumedWithUndef();
9927 for (
const auto &It : *OpAA)
9931 }
else if (LHSContainsUndef && RHSContainsUndef) {
9933 unionAssumedWithUndef();
9935 for (
const auto &It : LHSAAPVS)
9937 for (
const auto &It : RHSAAPVS)
9940 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9941 : ChangeStatus::CHANGED;
9945 auto AssumedBefore = getAssumed();
9947 return indicatePessimisticFixpoint();
9952 bool SrcContainsUndef =
false;
9955 SrcContainsUndef,
false))
9956 return indicatePessimisticFixpoint();
9958 if (SrcContainsUndef)
9959 unionAssumedWithUndef();
9961 for (
const APInt &S : SrcPVS) {
9962 APInt T = calculateCastInst(CI, S, ResultBitWidth);
9966 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9967 : ChangeStatus::CHANGED;
9971 auto AssumedBefore = getAssumed();
9975 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9976 SetTy LHSAAPVS, RHSAAPVS;
9978 LHSContainsUndef,
false) ||
9980 RHSContainsUndef,
false))
9981 return indicatePessimisticFixpoint();
9986 if (LHSContainsUndef && RHSContainsUndef) {
9987 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
9988 return indicatePessimisticFixpoint();
9989 }
else if (LHSContainsUndef) {
9990 for (
const APInt &R : RHSAAPVS) {
9991 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
9992 return indicatePessimisticFixpoint();
9994 }
else if (RHSContainsUndef) {
9995 for (
const APInt &L : LHSAAPVS) {
9996 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
9997 return indicatePessimisticFixpoint();
10000 for (
const APInt &L : LHSAAPVS) {
10001 for (
const APInt &R : RHSAAPVS) {
10002 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10003 return indicatePessimisticFixpoint();
10007 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10008 : ChangeStatus::CHANGED;
10012 auto AssumedBefore = getAssumed();
10014 bool ContainsUndef;
10016 ContainsUndef,
true))
10017 return indicatePessimisticFixpoint();
10018 if (ContainsUndef) {
10019 unionAssumedWithUndef();
10024 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10025 : ChangeStatus::CHANGED;
10030 Value &
V = getAssociatedValue();
10033 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10034 return updateWithICmpInst(
A, ICI);
10036 if (
auto *SI = dyn_cast<SelectInst>(
I))
10037 return updateWithSelectInst(
A, SI);
10039 if (
auto *CI = dyn_cast<CastInst>(
I))
10040 return updateWithCastInst(
A, CI);
10042 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10043 return updateWithBinaryOperator(
A, BinOp);
10045 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10046 return updateWithInstruction(
A,
I);
10048 return indicatePessimisticFixpoint();
10052 void trackStatistics()
const override {
10057struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10059 : AAPotentialConstantValuesImpl(IRP,
A) {}
10064 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10069 void trackStatistics()
const override {
10074struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10076 : AAPotentialConstantValuesFunction(IRP,
A) {}
10079 void trackStatistics()
const override {
10084struct AAPotentialConstantValuesCallSiteReturned
10085 : AACalleeToCallSite<AAPotentialConstantValues,
10086 AAPotentialConstantValuesImpl> {
10087 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10090 AAPotentialConstantValuesImpl>(IRP,
A) {}
10093 void trackStatistics()
const override {
10098struct AAPotentialConstantValuesCallSiteArgument
10099 : AAPotentialConstantValuesFloating {
10100 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10102 : AAPotentialConstantValuesFloating(IRP,
A) {}
10106 AAPotentialConstantValuesImpl::initialize(
A);
10107 if (isAtFixpoint())
10110 Value &
V = getAssociatedValue();
10112 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10113 unionAssumed(
C->getValue());
10114 indicateOptimisticFixpoint();
10118 if (isa<UndefValue>(&V)) {
10119 unionAssumedWithUndef();
10120 indicateOptimisticFixpoint();
10127 Value &
V = getAssociatedValue();
10128 auto AssumedBefore = getAssumed();
10132 return indicatePessimisticFixpoint();
10133 const auto &S = AA->getAssumed();
10135 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10136 : ChangeStatus::CHANGED;
10140 void trackStatistics()
const override {
10149 bool IgnoreSubsumingPositions) {
10150 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10151 "Unexpected attribute kind");
10152 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10153 Attribute::NoUndef))
10173 Value &V = getAssociatedValue();
10174 if (isa<UndefValue>(V))
10175 indicatePessimisticFixpoint();
10176 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10182 const Value *UseV =
U->get();
10191 bool TrackUse =
false;
10194 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10200 const std::string getAsStr(
Attributor *
A)
const override {
10201 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10208 bool UsedAssumedInformation =
false;
10209 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10210 UsedAssumedInformation))
10211 return ChangeStatus::UNCHANGED;
10215 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10218 return ChangeStatus::UNCHANGED;
10219 return AANoUndef::manifest(
A);
10223struct AANoUndefFloating :
public AANoUndefImpl {
10225 : AANoUndefImpl(IRP,
A) {}
10229 AANoUndefImpl::initialize(
A);
10230 if (!getState().isAtFixpoint() && getAnchorScope() &&
10231 !getAnchorScope()->isDeclaration())
10233 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10238 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10239 bool IsKnownNoUndef;
10240 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10241 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10245 bool UsedAssumedInformation =
false;
10246 Value *AssociatedValue = &getAssociatedValue();
10248 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10253 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10261 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10262 return indicatePessimisticFixpoint();
10263 return ChangeStatus::UNCHANGED;
10266 for (
const auto &VAC : Values)
10268 return indicatePessimisticFixpoint();
10270 return ChangeStatus::UNCHANGED;
10277struct AANoUndefReturned final
10278 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10280 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10286struct AANoUndefArgument final
10287 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10289 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10295struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10297 : AANoUndefFloating(IRP,
A) {}
10303struct AANoUndefCallSiteReturned final
10304 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10306 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10321 if (isa<UndefValue>(V)) {
10322 indicateOptimisticFixpoint();
10327 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10328 for (
const auto &Attr : Attrs) {
10339 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10346 auto *CB = dyn_cast<CallBase>(
I);
10355 if (
auto *NoFPAA =
A.getAAFor<
AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10356 State.addKnownBits(NoFPAA->getState().getKnown());
10360 const std::string getAsStr(
Attributor *
A)
const override {
10361 std::string
Result =
"nofpclass";
10363 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10373struct AANoFPClassFloating :
public AANoFPClassImpl {
10375 : AANoFPClassImpl(IRP,
A) {}
10380 bool UsedAssumedInformation =
false;
10381 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10383 Values.
push_back({getAssociatedValue(), getCtxI()});
10389 DepClassTy::REQUIRED);
10390 if (!AA ||
this == AA) {
10391 T.indicatePessimisticFixpoint();
10397 return T.isValidState();
10400 for (
const auto &VAC : Values)
10401 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10402 return indicatePessimisticFixpoint();
10408 void trackStatistics()
const override {
10413struct AANoFPClassReturned final
10414 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10415 AANoFPClassImpl::StateType, false,
10416 Attribute::None, false> {
10418 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10419 AANoFPClassImpl::StateType,
false,
10423 void trackStatistics()
const override {
10428struct AANoFPClassArgument final
10429 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10431 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10437struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10439 : AANoFPClassFloating(IRP,
A) {}
10442 void trackStatistics()
const override {
10447struct AANoFPClassCallSiteReturned final
10448 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10450 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10453 void trackStatistics()
const override {
10462 return CalledFunctions;
10465 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10467 bool hasNonAsmUnknownCallee()
const override {
10468 return HasUnknownCalleeNonAsm;
10471 const std::string getAsStr(
Attributor *
A)
const override {
10472 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10473 std::to_string(CalledFunctions.size()) +
"]";
10476 void trackStatistics()
const override {}
10480 if (CalledFunctions.insert(Fn)) {
10481 Change = ChangeStatus::CHANGED;
10487 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10488 if (!HasUnknownCallee)
10489 Change = ChangeStatus::CHANGED;
10490 if (NonAsm && !HasUnknownCalleeNonAsm)
10491 Change = ChangeStatus::CHANGED;
10492 HasUnknownCalleeNonAsm |= NonAsm;
10493 HasUnknownCallee =
true;
10501 bool HasUnknownCallee =
false;
10504 bool HasUnknownCalleeNonAsm =
false;
10507struct AACallEdgesCallSite :
public AACallEdgesImpl {
10509 : AACallEdgesImpl(IRP,
A) {}
10515 if (
Function *Fn = dyn_cast<Function>(&V)) {
10516 addCalledFunction(Fn, Change);
10518 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10519 setHasUnknownCallee(
true, Change);
10529 if (isa<Constant>(V)) {
10530 VisitValue(*V, CtxI);
10534 bool UsedAssumedInformation =
false;
10540 for (
auto &VAC : Values)
10541 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10544 CallBase *CB = cast<CallBase>(getCtxI());
10547 if (
IA->hasSideEffects() &&
10550 setHasUnknownCallee(
false, Change);
10557 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10558 if (IndirectCallAA->foreachCallee(
10559 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10568 for (
const Use *U : CallbackUses)
10569 ProcessCalledOperand(
U->get(), CB);
10575struct AACallEdgesFunction :
public AACallEdgesImpl {
10577 : AACallEdgesImpl(IRP,
A) {}
10584 CallBase &CB = cast<CallBase>(Inst);
10590 if (CBEdges->hasNonAsmUnknownCallee())
10591 setHasUnknownCallee(
true, Change);
10592 if (CBEdges->hasUnknownCallee())
10593 setHasUnknownCallee(
false, Change);
10595 for (
Function *
F : CBEdges->getOptimisticEdges())
10596 addCalledFunction(
F, Change);
10602 bool UsedAssumedInformation =
false;
10603 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10604 UsedAssumedInformation,
10608 setHasUnknownCallee(
true, Change);
10617struct AAInterFnReachabilityFunction
10618 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10619 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10623 bool instructionCanReach(
10626 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10627 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10629 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10630 typename RQITy::Reachable
Result;
10631 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10632 return NonConstThis->isReachableImpl(
A, StackRQI,
10634 return Result == RQITy::Reachable::Yes;
10638 bool IsTemporaryRQI)
override {
10641 if (EntryI != RQI.From &&
10642 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10643 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10646 auto CheckReachableCallBase = [&](
CallBase *CB) {
10649 if (!CBEdges || !CBEdges->getState().isValidState())
10652 if (CBEdges->hasUnknownCallee())
10655 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10666 if (Fn == getAnchorScope()) {
10667 if (EntryI == RQI.From)
10674 DepClassTy::OPTIONAL);
10677 if (!InterFnReachability ||
10687 DepClassTy::OPTIONAL);
10693 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10696 A, *RQI.From, CBInst, RQI.ExclusionSet);
10699 bool UsedExclusionSet =
true;
10700 bool UsedAssumedInformation =
false;
10701 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10702 UsedAssumedInformation,
10704 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10707 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10711 void trackStatistics()
const override {}
10715template <
typename AAType>
10716static std::optional<Constant *>
10727 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10729 if (!COpt.has_value()) {
10731 return std::nullopt;
10733 if (
auto *
C = *COpt) {
10744 std::optional<Value *> V;
10745 for (
auto &It : Values) {
10747 if (V.has_value() && !*V)
10750 if (!V.has_value())
10764 if (
A.hasSimplificationCallback(getIRPosition())) {
10765 indicatePessimisticFixpoint();
10768 Value *Stripped = getAssociatedValue().stripPointerCasts();
10769 if (isa<Constant>(Stripped) && !isa<ConstantExpr>(Stripped)) {
10770 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10772 indicateOptimisticFixpoint();
10775 AAPotentialValues::initialize(
A);
10779 const std::string getAsStr(
Attributor *
A)
const override {
10786 template <
typename AAType>
10787 static std::optional<Value *> askOtherAA(
Attributor &
A,
10792 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10794 return std::nullopt;
10806 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10807 for (
const auto &U : CB->
args()) {
10817 Type &Ty = *getAssociatedType();
10818 std::optional<Value *> SimpleV =
10819 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10820 if (SimpleV.has_value() && !*SimpleV) {
10822 *
this, ValIRP, DepClassTy::OPTIONAL);
10823 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10824 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10825 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10826 if (PotentialConstantsAA->undefIsContained())
10831 if (!SimpleV.has_value())
10838 if (isa<ConstantInt>(VPtr))
10843 State.unionAssumed({{*VPtr, CtxI}, S});
10853 return II.I ==
I &&
II.S == S;
10868 bool UsedAssumedInformation =
false;
10870 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10871 UsedAssumedInformation))
10874 for (
auto &It : Values)
10875 ValueScopeMap[It] += CS;
10877 for (
auto &It : ValueScopeMap)
10878 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10885 auto NewS = StateType::getBestState(getState());
10886 for (
const auto &It : getAssumedSet()) {
10889 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10892 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10900 getState() = StateType::getBestState(getState());
10901 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10908 return indicatePessimisticFixpoint();
10916 if (!getAssumedSimplifiedValues(
A, Values, S))
10918 Value &OldV = getAssociatedValue();
10919 if (isa<UndefValue>(OldV))
10921 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10922 if (!NewV || NewV == &OldV)
10927 if (
A.changeAfterManifest(getIRPosition(), *NewV))
10933 bool getAssumedSimplifiedValues(
10935 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
10936 if (!isValidState())
10938 bool UsedAssumedInformation =
false;
10939 for (
const auto &It : getAssumedSet())
10940 if (It.second & S) {
10941 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
10942 isa<SelectInst>(It.first.getValue()))) {
10943 if (
A.getAssumedSimplifiedValues(
10945 this, Values, S, UsedAssumedInformation))
10950 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10955struct AAPotentialValuesFloating : AAPotentialValuesImpl {
10957 : AAPotentialValuesImpl(IRP,
A) {}
10961 auto AssumedBefore = getAssumed();
10963 genericValueTraversal(
A, &getAssociatedValue());
10965 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
10966 : ChangeStatus::CHANGED;
10970 struct LivenessInfo {
10971 const AAIsDead *LivenessAA =
nullptr;
10972 bool AnyDead =
false;
10985 bool UsedAssumedInformation =
false;
10987 auto GetSimplifiedValues = [&](
Value &
V,
10989 if (!
A.getAssumedSimplifiedValues(
10995 return Values.
empty();
10997 if (GetSimplifiedValues(*
LHS, LHSValues))
10999 if (GetSimplifiedValues(*
RHS, RHSValues))
11011 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11019 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11020 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11022 nullptr,
II.S, getAnchorScope());
11028 if (&LHSV == &RHSV &&
11032 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11039 if (TypedLHS && TypedRHS) {
11041 if (NewV && NewV != &Cmp) {
11042 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11052 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11053 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11054 if (!LHSIsNull && !RHSIsNull)
11060 assert((LHSIsNull || RHSIsNull) &&
11061 "Expected nullptr versus non-nullptr comparison at this point");
11064 unsigned PtrIdx = LHSIsNull;
11065 bool IsKnownNonNull;
11066 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11068 DepClassTy::REQUIRED, IsKnownNonNull);
11069 if (!IsAssumedNonNull)
11075 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11080 for (
auto &LHSValue : LHSValues)
11081 for (
auto &RHSValue : RHSValues)
11082 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11090 bool UsedAssumedInformation =
false;
11092 std::optional<Constant *>
C =
11093 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11094 bool NoValueYet = !
C.has_value();
11095 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11097 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11102 }
else if (&SI == &getAssociatedValue()) {
11107 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11109 if (!SimpleV.has_value())
11112 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11124 bool UsedAssumedInformation =
false;
11126 PotentialValueOrigins, *
this,
11127 UsedAssumedInformation,
11129 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11130 "loaded values for load instruction "
11141 if (!
I || isa<AssumeInst>(
I))
11143 if (
auto *SI = dyn_cast<StoreInst>(
I))
11144 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11146 UsedAssumedInformation,
11148 return A.isAssumedDead(*
I,
this,
nullptr,
11149 UsedAssumedInformation,
11152 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11153 "and we cannot delete all the stores: "
11164 bool AllLocal = ScopeIsLocal;
11169 if (!DynamicallyUnique) {
11170 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11171 "values are dynamically unique: "
11176 for (
auto *PotentialCopy : PotentialCopies) {
11178 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11183 if (!AllLocal && ScopeIsLocal)
11188 bool handlePHINode(
11192 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11193 LivenessInfo &LI = LivenessAAs[&
F];
11194 if (!LI.LivenessAA)
11200 if (&
PHI == &getAssociatedValue()) {
11201 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11204 *
PHI.getFunction());
11208 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11210 if (LI.LivenessAA &&
11211 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11221 if (CyclePHI && isa<Instruction>(V) &&
11222 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11230 bool UsedAssumedInformation =
false;
11231 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11233 if (!SimpleV.has_value())
11237 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11246 bool SomeSimplified =
false;
11247 bool UsedAssumedInformation =
false;
11252 const auto &SimplifiedOp =
A.getAssumedSimplified(
11257 if (!SimplifiedOp.has_value())
11261 NewOps[
Idx] = *SimplifiedOp;
11265 SomeSimplified |= (NewOps[
Idx] !=
Op);
11271 if (!SomeSimplified)
11278 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11284 if (!NewV || NewV == &
I)
11287 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11297 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11299 CI->getPredicate(),
II, Worklist);
11301 switch (
I.getOpcode()) {
11302 case Instruction::Select:
11303 return handleSelectInst(
A, cast<SelectInst>(
I),
II, Worklist);
11304 case Instruction::PHI:
11305 return handlePHINode(
A, cast<PHINode>(
I),
II, Worklist, LivenessAAs);
11306 case Instruction::Load:
11307 return handleLoadInst(
A, cast<LoadInst>(
I),
II, Worklist);
11309 return handleGenericInst(
A,
I,
II, Worklist);
11336 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11337 << Iteration <<
"!\n");
11338 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11344 Value *NewV =
nullptr;
11345 if (
V->getType()->isPointerTy()) {
11348 if (
auto *CB = dyn_cast<CallBase>(V))
11358 if (NewV && NewV != V) {
11359 Worklist.
push_back({{*NewV, CtxI}, S});
11363 if (
auto *
I = dyn_cast<Instruction>(V)) {
11368 if (V != InitialV || isa<Argument>(V))
11373 if (V == InitialV && CtxI == getCtxI()) {
11374 indicatePessimisticFixpoint();
11378 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11379 }
while (!Worklist.
empty());
11383 for (
auto &It : LivenessAAs)
11384 if (It.second.AnyDead)
11385 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11389 void trackStatistics()
const override {
11394struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11395 using Base = AAPotentialValuesImpl;
11401 auto &Arg = cast<Argument>(getAssociatedValue());
11403 indicatePessimisticFixpoint();
11408 auto AssumedBefore = getAssumed();
11410 unsigned ArgNo = getCalleeArgNo();
11412 bool UsedAssumedInformation =
false;
11416 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11419 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11421 UsedAssumedInformation))
11424 return isValidState();
11427 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11429 UsedAssumedInformation))
11430 return indicatePessimisticFixpoint();
11432 Function *Fn = getAssociatedFunction();
11433 bool AnyNonLocal =
false;
11434 for (
auto &It : Values) {
11435 if (isa<Constant>(It.getValue())) {
11436 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11441 return indicatePessimisticFixpoint();
11443 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11445 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11451 AnyNonLocal =
true;
11453 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11455 giveUpOnIntraprocedural(
A);
11457 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11458 : ChangeStatus::CHANGED;
11462 void trackStatistics()
const override {
11467struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11468 using Base = AAPotentialValuesFloating;
11475 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11476 indicatePessimisticFixpoint();
11483 ReturnedArg = &Arg;
11486 if (!
A.isFunctionIPOAmendable(*
F) ||
11487 A.hasSimplificationCallback(getIRPosition())) {
11489 indicatePessimisticFixpoint();
11491 indicateOptimisticFixpoint();
11497 auto AssumedBefore = getAssumed();
11498 bool UsedAssumedInformation =
false;
11501 Function *AnchorScope = getAnchorScope();
11507 UsedAssumedInformation,
11513 addValue(
A, getState(), *
VAC.getValue(),
11514 VAC.getCtxI() ?
VAC.getCtxI() : CtxI, S, AnchorScope);
11520 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11523 bool AddValues =
true;
11524 if (isa<PHINode>(RetI.getOperand(0)) ||
11525 isa<SelectInst>(RetI.getOperand(0))) {
11526 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11530 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11533 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11534 UsedAssumedInformation,
11536 return indicatePessimisticFixpoint();
11539 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11540 : ChangeStatus::CHANGED;
11545 Function *AnchorScope)
const override {
11547 if (
auto *CB = dyn_cast<CallBase>(&V))
11550 Base::addValue(
A, State, V, CtxI, S, AnchorScope);
11555 return ChangeStatus::UNCHANGED;
11557 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11559 return ChangeStatus::UNCHANGED;
11560 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11562 return ChangeStatus::UNCHANGED;
11565 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11567 "Number of function with unique return");
11568 Changed |=
A.manifestAttrs(
11575 Value *RetOp = RetI.getOperand(0);
11576 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11579 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11580 Changed = ChangeStatus::CHANGED;
11583 bool UsedAssumedInformation =
false;
11584 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11585 UsedAssumedInformation,
11595 void trackStatistics()
const override{
11602struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11604 : AAPotentialValuesImpl(IRP,
A) {}
11613 void trackStatistics()
const override {
11618struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11620 : AAPotentialValuesFunction(IRP,
A) {}
11623 void trackStatistics()
const override {
11628struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11630 : AAPotentialValuesImpl(IRP,
A) {}
11634 auto AssumedBefore = getAssumed();
11638 return indicatePessimisticFixpoint();
11640 bool UsedAssumedInformation =
false;
11641 auto *CB = cast<CallBase>(getCtxI());
11644 UsedAssumedInformation))
11645 return indicatePessimisticFixpoint();
11650 UsedAssumedInformation))
11651 return indicatePessimisticFixpoint();
11655 bool AnyNonLocal =
false;
11656 for (
auto &It : Values) {
11657 Value *
V = It.getValue();
11658 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11659 V, *CB, *
this, UsedAssumedInformation);
11660 if (!CallerV.has_value()) {
11664 V = *CallerV ? *CallerV :
V;
11670 if (
auto *Arg = dyn_cast<Argument>(V))
11676 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11678 AnyNonLocal =
true;
11686 UsedAssumedInformation))
11687 return indicatePessimisticFixpoint();
11688 AnyNonLocal =
false;
11690 for (
auto &It : Values) {
11691 Value *
V = It.getValue();
11693 return indicatePessimisticFixpoint();
11695 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11697 AnyNonLocal =
true;
11703 giveUpOnIntraprocedural(
A);
11705 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11706 : ChangeStatus::CHANGED;
11710 return AAPotentialValues::indicatePessimisticFixpoint();
11714 void trackStatistics()
const override {
11719struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11721 : AAPotentialValuesFloating(IRP,
A) {}
11724 void trackStatistics()
const override {
11740 if (getKnown().isUniversal())
11741 return ChangeStatus::UNCHANGED;
11745 getAssumed().getSet().
end());
11747 return A.manifestAttrs(IRP,
11750 llvm::join(Set,
",")),
11755 return isValidState() && setContains(Assumption);
11759 const std::string getAsStr(
Attributor *
A)
const override {
11760 const SetContents &Known = getKnown();
11761 const SetContents &Assumed = getAssumed();
11765 const std::string KnownStr = llvm::join(Set,
",");
11767 std::string AssumedStr =
"Universal";
11768 if (!Assumed.isUniversal()) {
11769 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11770 AssumedStr = llvm::join(Set,
",");
11772 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11787struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11789 : AAAssumptionInfoImpl(IRP,
A,
11794 bool Changed =
false;
11799 DepClassTy::REQUIRED);
11803 Changed |= getIntersection(AssumptionAA->getAssumed());
11804 return !getAssumed().empty() || !getKnown().empty();
11807 bool UsedAssumedInformation =
false;
11812 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11813 UsedAssumedInformation))
11814 return indicatePessimisticFixpoint();
11816 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11819 void trackStatistics()
const override {}
11823struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11826 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11837 auto *AssumptionAA =
11840 return indicatePessimisticFixpoint();
11841 bool Changed = getIntersection(AssumptionAA->getAssumed());
11842 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11846 void trackStatistics()
const override {}
11858 return Assumptions;
11873struct AAUnderlyingObjectsImpl
11879 const std::string getAsStr(
Attributor *
A)
const override {
11880 if (!isValidState())
11881 return "<invalid>";
11884 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11885 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11887 if (!InterAssumedUnderlyingObjects.empty()) {
11888 OS <<
"inter objects:\n";
11889 for (
auto *Obj : InterAssumedUnderlyingObjects)
11890 OS << *Obj <<
'\n';
11892 if (!IntraAssumedUnderlyingObjects.empty()) {
11893 OS <<
"intra objects:\n";
11894 for (
auto *Obj : IntraAssumedUnderlyingObjects)
11895 OS << *Obj <<
'\n';
11901 void trackStatistics()
const override {}
11905 auto &
Ptr = getAssociatedValue();
11909 bool UsedAssumedInformation =
false;
11914 Scope, UsedAssumedInformation))
11917 bool Changed =
false;
11919 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11920 auto &
VAC = Values[
I];
11921 auto *Obj =
VAC.getValue();
11923 if (UO && UO !=
VAC.getValue() && SeenObjects.
insert(UO).second) {
11926 auto Pred = [&Values](
Value &
V) {
11931 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11933 "The forall call should not return false at this position");
11938 if (isa<SelectInst>(Obj)) {
11939 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope);
11942 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
11945 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
11946 Changed |= handleIndirect(
A, *
PHI->getIncomingValue(u),
11947 UnderlyingObjects, Scope);
11952 Changed |= UnderlyingObjects.
insert(Obj);
11958 bool Changed =
false;
11962 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11965 bool forallUnderlyingObjects(
11968 if (!isValidState())
11969 return Pred(getAssociatedValue());
11972 ? IntraAssumedUnderlyingObjects
11973 : InterAssumedUnderlyingObjects;
11974 for (
Value *Obj : AssumedUnderlyingObjects)
11987 bool Changed =
false;
11990 auto Pred = [&](
Value &
V) {
11991 Changed |= UnderlyingObjects.
insert(&V);
11994 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
11996 "The forall call should not return false at this position");
12006struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12008 : AAUnderlyingObjectsImpl(IRP,
A) {}
12011struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12013 : AAUnderlyingObjectsImpl(IRP,
A) {}
12016struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12018 : AAUnderlyingObjectsImpl(IRP,
A) {}
12021struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12023 : AAUnderlyingObjectsImpl(IRP,
A) {}
12026struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12028 : AAUnderlyingObjectsImpl(IRP,
A) {}
12031struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12033 : AAUnderlyingObjectsImpl(IRP,
A) {}
12036struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12038 : AAUnderlyingObjectsImpl(IRP,
A) {}
12053 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12059 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12060 << *UInst <<
"\n");
12062 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12063 int Idx = &
Cmp->getOperandUse(0) == &
U;
12064 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12066 return U == &getAnchorValue();
12070 if (isa<ReturnInst>(UInst)) {
12072 Worklist.
push_back(ACS.getInstruction());
12075 bool UsedAssumedInformation =
false;
12077 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12079 UsedAssumedInformation))
12086 auto *CB = dyn_cast<CallBase>(UInst);
12097 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12106 unsigned NumUsesBefore =
Uses.size();
12112 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12115 case UseCaptureKind::NO_CAPTURE:
12116 return checkUse(
A, U, Follow, Worklist);
12117 case UseCaptureKind::MAY_CAPTURE:
12118 return checkUse(
A, U, Follow, Worklist);
12119 case UseCaptureKind::PASSTHROUGH:
12125 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12126 Uses.insert(&OldU);
12130 while (!Worklist.
empty()) {
12132 if (!Visited.
insert(V).second)
12134 if (!
A.checkForAllUses(UsePred, *
this, *V,
12136 DepClassTy::OPTIONAL,
12137 true, EquivalentUseCB)) {
12138 return indicatePessimisticFixpoint();
12142 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12143 : ChangeStatus::CHANGED;
12146 bool isPotentialUse(
const Use &U)
const override {
12147 return !isValidState() ||
Uses.contains(&U);
12152 return ChangeStatus::UNCHANGED;
12156 const std::string getAsStr(
Attributor *
A)
const override {
12157 return "[" + std::to_string(
Uses.size()) +
" uses]";
12160 void trackStatistics()
const override {
12178 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12179 if (!MD && !
A.isClosedWorldModule())
12183 for (
const auto &
Op : MD->operands())
12184 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12185 PotentialCallees.insert(Callee);
12186 }
else if (
A.isClosedWorldModule()) {
12188 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12189 PotentialCallees.insert(IndirectlyCallableFunctions.
begin(),
12190 IndirectlyCallableFunctions.
end());
12193 if (PotentialCallees.empty())
12194 indicateOptimisticFixpoint();
12198 CallBase *CB = cast<CallBase>(getCtxI());
12203 bool AllCalleesKnownNow = AllCalleesKnown;
12205 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12206 bool &UsedAssumedInformation) {
12209 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12211 UsedAssumedInformation = !GIAA->isAtFixpoint();
12215 auto AddPotentialCallees = [&]() {
12216 for (
auto *PotentialCallee : PotentialCallees) {
12217 bool UsedAssumedInformation =
false;
12218 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12219 AssumedCalleesNow.
insert(PotentialCallee);
12225 bool UsedAssumedInformation =
false;
12228 AA::ValueScope::AnyScope,
12229 UsedAssumedInformation)) {
12230 if (PotentialCallees.empty())
12231 return indicatePessimisticFixpoint();
12232 AddPotentialCallees();
12237 auto CheckPotentialCallee = [&](
Function &Fn) {
12238 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12241 auto &CachedResult = FilterResults[&Fn];
12242 if (CachedResult.has_value())
12243 return CachedResult.value();
12245 bool UsedAssumedInformation =
false;
12246 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12247 if (!UsedAssumedInformation)
12248 CachedResult =
false;
12257 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12258 bool IsKnown =
false;
12259 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12261 DepClassTy::OPTIONAL, IsKnown)) {
12263 CachedResult =
false;
12268 CachedResult =
true;
12274 for (
auto &VAC : Values) {
12275 if (isa<UndefValue>(
VAC.getValue()))
12277 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12278 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12281 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12282 if (CheckPotentialCallee(*VACFn))
12283 AssumedCalleesNow.
insert(VACFn);
12286 if (!PotentialCallees.empty()) {
12287 AddPotentialCallees();
12290 AllCalleesKnownNow =
false;
12293 if (AssumedCalleesNow == AssumedCallees &&
12294 AllCalleesKnown == AllCalleesKnownNow)
12295 return ChangeStatus::UNCHANGED;
12297 std::swap(AssumedCallees, AssumedCalleesNow);
12298 AllCalleesKnown = AllCalleesKnownNow;
12299 return ChangeStatus::CHANGED;
12305 if (!AllCalleesKnown && AssumedCallees.empty())
12306 return ChangeStatus::UNCHANGED;
12308 CallBase *CB = cast<CallBase>(getCtxI());
12309 bool UsedAssumedInformation =
false;
12310 if (
A.isAssumedDead(*CB,
this,
nullptr,
12311 UsedAssumedInformation))
12312 return ChangeStatus::UNCHANGED;
12316 if (
FP->getType()->getPointerAddressSpace())
12327 if (AssumedCallees.empty()) {
12328 assert(AllCalleesKnown &&
12329 "Expected all callees to be known if there are none.");
12330 A.changeToUnreachableAfterManifest(CB);
12331 return ChangeStatus::CHANGED;
12335 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12336 auto *NewCallee = AssumedCallees.front();
12339 return ChangeStatus::CHANGED;
12346 A.deleteAfterManifest(*CB);
12347 return ChangeStatus::CHANGED;
12357 bool SpecializedForAnyCallees =
false;
12358 bool SpecializedForAllCallees = AllCalleesKnown;
12362 for (
Function *NewCallee : AssumedCallees) {
12363 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12364 AssumedCallees.size())) {
12365 SkippedAssumedCallees.
push_back(NewCallee);
12366 SpecializedForAllCallees =
false;
12369 SpecializedForAnyCallees =
true;
12375 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12376 A.registerManifestAddedBasicBlock(*IP->getParent());
12377 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12382 A.registerManifestAddedBasicBlock(*ElseBB);
12384 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12392 auto *CBClone = cast<CallBase>(CB->
clone());
12393 CBClone->insertBefore(ThenTI);
12394 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12402 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12403 if (!AllCalleesKnown)
12404 return ChangeStatus::UNCHANGED;
12405 MDBuilder MDB(IndirectCB.getContext());
12406 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12407 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12408 return ChangeStatus::CHANGED;
12411 if (!SpecializedForAnyCallees)
12412 return AttachCalleeMetadata(*CB);
12415 if (SpecializedForAllCallees) {
12419 IP->eraseFromParent();
12421 auto *CBClone = cast<CallInst>(CB->
clone());
12422 CBClone->setName(CB->
getName());
12423 CBClone->insertBefore(*IP->getParent(), IP);
12424 NewCalls.
push_back({CBClone,
nullptr});
12425 AttachCalleeMetadata(*CBClone);
12432 CB->
getParent()->getFirstInsertionPt());
12433 for (
auto &It : NewCalls) {
12435 Instruction *CallRet = It.second ? It.second : It.first;
12447 A.deleteAfterManifest(*CB);
12448 Changed = ChangeStatus::CHANGED;
12454 const std::string getAsStr(
Attributor *
A)
const override {
12455 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12456 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12460 void trackStatistics()
const override {
12461 if (AllCalleesKnown) {
12463 Eliminated, CallSites,
12464 "Number of indirect call sites eliminated via specialization")
12467 "Number of indirect call sites specialized")
12472 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12489 bool AllCalleesKnown =
true;
12500 assert(isValidState() &&
"the AA is invalid");
12501 return AssumedAddressSpace;
12506 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12507 "Associated value is not a pointer");
12509 indicateOptimisticFixpoint();
12513 int32_t OldAddressSpace = AssumedAddressSpace;
12515 DepClassTy::REQUIRED);
12516 auto Pred = [&](
Value &Obj) {
12517 if (isa<UndefValue>(&Obj))
12522 if (!AUO->forallUnderlyingObjects(Pred))
12523 return indicatePessimisticFixpoint();
12525 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12526 : ChangeStatus::CHANGED;
12531 Value *AssociatedValue = &getAssociatedValue();
12532 Value *OriginalValue = peelAddrspacecast(AssociatedValue);
12536 return ChangeStatus::UNCHANGED;
12538 Type *NewPtrTy = PointerType::get(getAssociatedType()->getContext(),
12540 bool UseOriginalValue =
12544 bool Changed =
false;
12548 if (UseOriginalValue) {
12549 A.changeUseAfterManifest(U, *OriginalValue);
12554 A.changeUseAfterManifest(U, *
CastInst);
12557 auto Pred = [&](
const Use &
U,
bool &) {
12558 if (
U.get() != AssociatedValue)
12560 auto *Inst = dyn_cast<Instruction>(
U.getUser());
12567 if (isa<LoadInst>(Inst))
12568 MakeChange(Inst,
const_cast<Use &
>(U));
12569 if (isa<StoreInst>(Inst)) {
12571 if (
U.getOperandNo() == 1)
12572 MakeChange(Inst,
const_cast<Use &
>(U));
12579 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
12582 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12586 const std::string getAsStr(
Attributor *
A)
const override {
12587 if (!isValidState())
12588 return "addrspace(<invalid>)";
12589 return "addrspace(" +
12590 (AssumedAddressSpace == NoAddressSpace
12592 : std::to_string(AssumedAddressSpace)) +
12597 int32_t AssumedAddressSpace = NoAddressSpace;
12599 bool takeAddressSpace(int32_t AS) {
12600 if (AssumedAddressSpace == NoAddressSpace) {
12601 AssumedAddressSpace = AS;
12604 return AssumedAddressSpace == AS;
12608 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V))
12609 return peelAddrspacecast(
I->getPointerOperand());
12610 if (
auto *
C = dyn_cast<ConstantExpr>(V))
12611 if (
C->getOpcode() == Instruction::AddrSpaceCast)
12612 return peelAddrspacecast(
C->getOperand(0));
12617struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
12619 : AAAddressSpaceImpl(IRP,
A) {}
12621 void trackStatistics()
const override {
12626struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
12628 : AAAddressSpaceImpl(IRP,
A) {}
12634 (void)indicatePessimisticFixpoint();
12637 void trackStatistics()
const override {
12642struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
12644 : AAAddressSpaceImpl(IRP,
A) {}
12646 void trackStatistics()
const override {
12651struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
12653 : AAAddressSpaceImpl(IRP,
A) {}
12658struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
12660 : AAAddressSpaceImpl(IRP,
A) {}
12666 (void)indicatePessimisticFixpoint();
12669 void trackStatistics()
const override {
12681 std::optional<TypeSize> getAllocatedSize()
const override {
12682 assert(isValidState() &&
"the AA is invalid");
12683 return AssumedAllocatedSize;
12686 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
12690 switch (
I->getOpcode()) {
12691 case Instruction::Alloca: {
12696 return std::nullopt;
12706 if (!isa<AllocaInst>(
I))
12707 return indicatePessimisticFixpoint();
12709 bool IsKnownNoCapture;
12710 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
12711 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
12712 return indicatePessimisticFixpoint();
12715 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
12718 return indicatePessimisticFixpoint();
12721 return indicatePessimisticFixpoint();
12724 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
12727 if (!AllocationSize)
12728 return indicatePessimisticFixpoint();
12732 if (*AllocationSize == 0)
12733 return indicatePessimisticFixpoint();
12739 return indicatePessimisticFixpoint();
12741 if (BinSize == 0) {
12742 auto NewAllocationSize = std::optional<TypeSize>(
TypeSize(0,
false));
12743 if (!changeAllocationSize(NewAllocationSize))
12744 return ChangeStatus::UNCHANGED;
12745 return ChangeStatus::CHANGED;
12749 const auto &It = PI->
begin();
12752 if (It->first.Offset != 0)
12753 return indicatePessimisticFixpoint();
12755 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
12757 if (SizeOfBin >= *AllocationSize)
12758 return indicatePessimisticFixpoint();
12760 auto NewAllocationSize =
12761 std::optional<TypeSize>(
TypeSize(SizeOfBin * 8,
false));
12763 if (!changeAllocationSize(NewAllocationSize))
12764 return ChangeStatus::UNCHANGED;
12766 return ChangeStatus::CHANGED;
12772 assert(isValidState() &&
12773 "Manifest should only be called if the state is valid.");
12777 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
12779 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
12781 switch (
I->getOpcode()) {
12783 case Instruction::Alloca: {
12789 auto *NumBytesToValue =
12790 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
12793 insertPt = std::next(insertPt);
12799 return ChangeStatus::CHANGED;
12807 return ChangeStatus::UNCHANGED;
12811 const std::string getAsStr(
Attributor *
A)
const override {
12812 if (!isValidState())
12813 return "allocationinfo(<invalid>)";
12814 return "allocationinfo(" +
12815 (AssumedAllocatedSize == HasNoAllocationSize
12817 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
12822 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
12826 bool changeAllocationSize(std::optional<TypeSize>
Size) {
12827 if (AssumedAllocatedSize == HasNoAllocationSize ||
12828 AssumedAllocatedSize !=
Size) {
12829 AssumedAllocatedSize =
Size;
12836struct AAAllocationInfoFloating : AAAllocationInfoImpl {
12838 : AAAllocationInfoImpl(IRP,
A) {}
12840 void trackStatistics()
const override {
12845struct AAAllocationInfoReturned : AAAllocationInfoImpl {
12847 : AAAllocationInfoImpl(IRP,
A) {}
12853 (void)indicatePessimisticFixpoint();
12856 void trackStatistics()
const override {
12861struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
12863 : AAAllocationInfoImpl(IRP,
A) {}
12865 void trackStatistics()
const override {
12870struct AAAllocationInfoArgument : AAAllocationInfoImpl {
12872 : AAAllocationInfoImpl(IRP,
A) {}
12874 void trackStatistics()
const override {
12879struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
12881 : AAAllocationInfoImpl(IRP,
A) {}
12886 (void)indicatePessimisticFixpoint();
12889 void trackStatistics()
const override {
12936#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
12937 case IRPosition::PK: \
12938 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
12940#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
12941 case IRPosition::PK: \
12942 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
12946#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12947 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12948 CLASS *AA = nullptr; \
12949 switch (IRP.getPositionKind()) { \
12950 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12951 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
12952 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
12953 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
12954 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
12955 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
12956 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12957 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12962#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12963 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12964 CLASS *AA = nullptr; \
12965 switch (IRP.getPositionKind()) { \
12966 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12967 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
12968 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
12969 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12970 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12971 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12972 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12973 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
12978#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
12979 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12980 CLASS *AA = nullptr; \
12981 switch (IRP.getPositionKind()) { \
12982 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
12984 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
12990#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12991 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12992 CLASS *AA = nullptr; \
12993 switch (IRP.getPositionKind()) { \
12994 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12995 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12996 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12997 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12998 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12999 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13000 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13001 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13006#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13007 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13008 CLASS *AA = nullptr; \
13009 switch (IRP.getPositionKind()) { \
13010 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13011 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13012 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13013 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13014 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13015 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13016 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13017 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13022#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13023 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13024 CLASS *AA = nullptr; \
13025 switch (IRP.getPositionKind()) { \
13026 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13027 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13028 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13029 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13030 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13031 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13032 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13033 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13083#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13084#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13085#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13086#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13087#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13088#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13089#undef SWITCH_PK_CREATE
13090#undef SWITCH_PK_INV
amdgpu AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
Rewrite Partial Register Uses
static LoopDeletionResult merge(LoopDeletionResult A, LoopDeletionResult B)
This file implements a map that provides insertion order iteration.
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysis::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Development, "development", "for training")))
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
AACallGraphNode * operator*() const
A manager for alias analyses.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
MemoryEffects getMemoryEffects() const
Returns memory effects.
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static bool isEnumAttrKind(AttrKind Kind)
static Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
static Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
User::op_iterator arg_begin()
Return the iterator pointing to the beginning of the argument list.
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
Value * getArgOperand(unsigned i) const
User::op_iterator arg_end()
Return the iterator pointing to the end of the argument list.
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
bool isEmptySet() const
Return true if this set contains no members.
APInt getSignedMin() const
Return the smallest signed value contained in the ConstantRange.
bool isSingleElement() const
Return true if this set contains exactly one member.
static ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
APInt getSignedMax() const
Return the largest signed value contained in the ConstantRange.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
unsigned getSmallConstantMaxTripCount(const Loop *L)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
'undef' values are things that do not have specified contents.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
AddressSpace getAddressSpace(T *V)
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
pred_iterator pred_end(BasicBlock *BB)
bool operator<(int64_t V1, const APSInt &V2)
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
bool operator!=(uint64_t V1, const APInt &V2)
UseCaptureKind DetermineUseCaptureKind(const Use &U, llvm::function_ref< bool(Value *, const DataLayout &)> IsDereferenceableOrNull)
Determine what kind of capture behaviour U may exhibit.
Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
auto unique(Range &&R, Predicate P)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
pred_iterator pred_begin(BasicBlock *BB)
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
Value * simplifyCmpInst(unsigned Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
PotentialValuesState< APInt > PotentialConstantIntValuesState
DWARFExpression::Operation Op
bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, unsigned Depth, const SimplifyQuery &SQ)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
bool forallInterferingAccesses(Instruction &I, function_ref< bool(const AAPointerInfo::Access &, bool)> CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
bool forallInterferingAccesses(AA::RangeTy Range, function_ref< bool(const AAPointerInfo::Access &, bool)> CB) const
See AAPointerInfo::forallInterferingAccesses.
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
An abstract attribute for getting assumption information.
static const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for liveness abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
static const char ID
Unique ID (due to the unique address)
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual const_bin_iterator begin() const =0
static const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
static const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
static PotentialValuesState getBestState()
Return empty set as the best state of potential values.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.