55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
889 if (!
Range.mayOverlap(ItRange))
891 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
892 for (
auto Index : It.getSecond()) {
894 if (!CB(Access, IsExact))
914 for (
unsigned Index : LocalList->getSecond()) {
917 if (
Range.offsetAndSizeAreUnknown())
933 RemoteI = RemoteI ? RemoteI : &
I;
937 bool AccExists =
false;
939 for (
auto Index : LocalList) {
941 if (
A.getLocalInst() == &
I) {
950 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
952 for (
auto Key : ToAdd) {
961 "New Access should have been at AccIndex");
962 LocalList.push_back(AccIndex);
976 auto &ExistingRanges =
Before.getRanges();
977 auto &NewRanges = Current.getRanges();
984 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
991 "Expected bin to actually contain the Access.");
1010 using const_iterator = VecTy::const_iterator;
1013 const_iterator begin()
const {
return Offsets.begin(); }
1014 const_iterator end()
const {
return Offsets.end(); }
1017 return Offsets ==
RHS.Offsets;
1023 bool isUnassigned()
const {
return Offsets.size() == 0; }
1025 bool isUnknown()
const {
1038 void addToAll(int64_t Inc) {
1039 for (
auto &
Offset : Offsets) {
1048 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1063struct AAPointerInfoImpl
1064 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1069 const std::string getAsStr(
Attributor *
A)
const override {
1070 return std::string(
"PointerInfo ") +
1071 (isValidState() ? (std::string(
"#") +
1072 std::to_string(OffsetBins.
size()) +
" bins")
1078 return AAPointerInfo::manifest(
A);
1081 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1082 virtual const_bin_iterator
end()
const override {
return State::end(); }
1083 virtual int64_t numOffsetBins()
const override {
1084 return State::numOffsetBins();
1087 bool forallInterferingAccesses(
1091 return State::forallInterferingAccesses(
Range, CB);
1094 bool forallInterferingAccesses(
1096 bool FindInterferingWrites,
bool FindInterferingReads,
1097 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1099 function_ref<
bool(
const Access &)> SkipCB)
const override {
1100 HasBeenWrittenTo =
false;
1107 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1112 bool AllInSameNoSyncFn = IsAssumedNoSync;
1113 bool InstIsExecutedByInitialThreadOnly =
1114 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1121 bool InstIsExecutedInAlignedRegion =
1122 FindInterferingReads && ExecDomainAA &&
1123 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1125 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1126 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1129 bool IsThreadLocalObj =
1138 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1139 if (IsThreadLocalObj || AllInSameNoSyncFn)
1141 const auto *FnExecDomainAA =
1142 I.getFunction() == &
Scope
1147 if (!FnExecDomainAA)
1149 if (InstIsExecutedInAlignedRegion ||
1150 (FindInterferingWrites &&
1151 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1155 if (InstIsExecutedByInitialThreadOnly &&
1156 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1157 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1166 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1167 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1168 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1169 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1173 bool IsKnownNoRecurse;
1174 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1181 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1182 bool ObjHasKernelLifetime =
false;
1183 const bool UseDominanceReasoning =
1184 FindInterferingWrites && IsKnownNoRecurse;
1195 case AA::GPUAddressSpace::Shared:
1196 case AA::GPUAddressSpace::Constant:
1197 case AA::GPUAddressSpace::Local:
1209 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1211 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1216 bool IsKnownNoRecurse;
1217 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1219 IsKnownNoRecurse)) {
1220 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1222 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1225 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1226 if (ObjHasKernelLifetime)
1227 IsLiveInCalleeCB = [](
const Function &Fn) {
1228 return !Fn.hasFnAttribute(
"kernel");
1236 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1238 bool AccInSameScope = AccScope == &
Scope;
1242 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1246 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1247 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1248 ExclusionSet.
insert(Acc.getRemoteInst());
1251 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1252 (!FindInterferingReads || !Acc.isRead()))
1255 bool Dominates = FindInterferingWrites && DT && Exact &&
1256 Acc.isMustAccess() && AccInSameScope &&
1259 DominatingWrites.
insert(&Acc);
1263 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1265 InterferingAccesses.
push_back({&Acc, Exact});
1268 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1271 HasBeenWrittenTo = !DominatingWrites.
empty();
1275 for (
const Access *Acc : DominatingWrites) {
1276 if (!LeastDominatingWriteInst) {
1277 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 }
else if (DT->
dominates(LeastDominatingWriteInst,
1279 Acc->getRemoteInst())) {
1280 LeastDominatingWriteInst = Acc->getRemoteInst();
1285 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1286 if (SkipCB && SkipCB(Acc))
1288 if (!CanIgnoreThreading(Acc))
1294 bool ReadChecked = !FindInterferingReads;
1295 bool WriteChecked = !FindInterferingWrites;
1301 &ExclusionSet, IsLiveInCalleeCB))
1306 if (!WriteChecked) {
1308 &ExclusionSet, IsLiveInCalleeCB))
1309 WriteChecked =
true;
1323 if (!WriteChecked && HasBeenWrittenTo &&
1324 Acc.getRemoteInst()->getFunction() != &
Scope) {
1334 if (!FnReachabilityAA ||
1335 !FnReachabilityAA->instructionCanReach(
1336 A, *LeastDominatingWriteInst,
1337 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1338 WriteChecked =
true;
1344 if (ReadChecked && WriteChecked)
1347 if (!DT || !UseDominanceReasoning)
1349 if (!DominatingWrites.count(&Acc))
1351 return LeastDominatingWriteInst != Acc.getRemoteInst();
1356 for (
auto &It : InterferingAccesses) {
1357 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1358 !CanSkipAccess(*It.first, It.second)) {
1359 if (!UserCB(*It.first, It.second))
1369 using namespace AA::PointerInfo;
1371 return indicatePessimisticFixpoint();
1373 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1374 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1378 const auto &State = OtherAAImpl.getState();
1379 for (
const auto &It : State) {
1380 for (
auto Index : It.getSecond()) {
1381 const auto &RAcc = State.getAccess(
Index);
1382 if (IsByval && !RAcc.isRead())
1384 bool UsedAssumedInformation =
false;
1386 auto Content =
A.translateArgumentToCallSiteContent(
1387 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1388 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1389 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1391 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1392 RAcc.getType(), RAcc.getRemoteInst());
1399 const OffsetInfo &Offsets,
CallBase &CB) {
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1408 const auto &State = OtherAAImpl.getState();
1409 for (
const auto &It : State) {
1410 for (
auto Index : It.getSecond()) {
1411 const auto &RAcc = State.getAccess(
Index);
1412 for (
auto Offset : Offsets) {
1416 if (!NewRanges.isUnknown()) {
1417 NewRanges.addToAllOffsets(
Offset);
1420 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1421 RAcc.getType(), RAcc.getRemoteInst());
1430 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1434 for (
auto &It : OffsetBins) {
1435 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1436 <<
"] : " << It.getSecond().size() <<
"\n";
1437 for (
auto AccIndex : It.getSecond()) {
1438 auto &Acc = AccessList[AccIndex];
1439 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1440 if (Acc.getLocalInst() != Acc.getRemoteInst())
1441 O <<
" --> " << *Acc.getRemoteInst()
1443 if (!Acc.isWrittenValueYetUndetermined()) {
1444 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1445 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1447 else if (Acc.getWrittenValue())
1448 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1450 O <<
" - c: <unknown>\n";
1457struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1460 : AAPointerInfoImpl(IRP,
A) {}
1467 using namespace AA::PointerInfo;
1470 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1493 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1494 auto *ConstContent = cast<Constant>(*
Content);
1498 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1500 ConstContent, ConstantInt::get(Int32Ty, i));
1503 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1507 for (
auto &ElementOffset : ElementOffsets)
1508 ElementOffset += ElementSize;
1522 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1526 void trackStatistics()
const override {
1527 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1531bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1534 const OffsetInfo &PtrOI,
1536 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1540 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1541 "Don't look for constant values if the offset has already been "
1542 "determined to be unknown.");
1544 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1550 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1554 Union.addToAll(ConstantOffset.getSExtValue());
1559 for (
const auto &VI : VariableOffsets) {
1562 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1568 if (PotentialConstantsAA->undefIsContained())
1576 if (AssumedSet.empty())
1580 for (
const auto &ConstOffset : AssumedSet) {
1581 auto CopyPerOffset =
Union;
1582 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1583 VI.second.getZExtValue());
1584 Product.merge(CopyPerOffset);
1589 UsrOI = std::move(Union);
1594 using namespace AA::PointerInfo;
1597 Value &AssociatedValue = getAssociatedValue();
1600 OffsetInfoMap[&AssociatedValue].
insert(0);
1602 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1613 "CurPtr does not exist in the map!");
1615 auto &UsrOI = OffsetInfoMap[Usr];
1616 auto &PtrOI = OffsetInfoMap[CurPtr];
1617 assert(!PtrOI.isUnassigned() &&
1618 "Cannot pass through if the input Ptr was not visited!");
1624 const auto *
F = getAnchorScope();
1629 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
1631 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1633 User *Usr =
U.getUser();
1634 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1637 "The current pointer offset should have been seeded!");
1641 return HandlePassthroughUser(Usr, CurPtr, Follow);
1642 if (!isa<GEPOperator>(CE)) {
1643 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1648 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1651 auto &UsrOI = OffsetInfoMap[Usr];
1652 auto &PtrOI = OffsetInfoMap[CurPtr];
1654 if (UsrOI.isUnknown())
1657 if (PtrOI.isUnknown()) {
1663 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1666 if (isa<PtrToIntInst>(Usr))
1668 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1669 return HandlePassthroughUser(Usr, CurPtr, Follow);
1674 if (isa<PHINode>(Usr)) {
1677 bool IsFirstPHIUser = !OffsetInfoMap.
count(Usr);
1678 auto &UsrOI = OffsetInfoMap[Usr];
1679 auto &PtrOI = OffsetInfoMap[CurPtr];
1683 if (PtrOI.isUnknown()) {
1684 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1685 << *CurPtr <<
" in " << *Usr <<
"\n");
1686 Follow = !UsrOI.isUnknown();
1692 if (UsrOI == PtrOI) {
1693 assert(!PtrOI.isUnassigned() &&
1694 "Cannot assign if the current Ptr was not visited!");
1695 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1705 auto It = OffsetInfoMap.
find(CurPtrBase);
1706 if (It == OffsetInfoMap.
end()) {
1707 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1708 << *CurPtr <<
" in " << *Usr <<
"\n");
1722 auto BaseOI = It->getSecond();
1723 BaseOI.addToAll(
Offset.getZExtValue());
1724 if (IsFirstPHIUser || BaseOI == UsrOI) {
1725 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1726 <<
" in " << *Usr <<
"\n");
1727 return HandlePassthroughUser(Usr, CurPtr, Follow);
1731 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1732 << *CurPtr <<
" in " << *Usr <<
"\n");
1743 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1751 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1752 OffsetInfoMap[CurPtr].Offsets, Changed,
1757 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1758 return II->isAssumeLikeIntrinsic();
1769 }
while (FromI && FromI != ToI);
1775 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1778 if (IntrI.getParent() == BB) {
1779 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1785 if ((*PredIt) != BB)
1790 if (SuccBB == IntrBB)
1792 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1796 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1799 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1805 std::pair<Value *, IntrinsicInst *> Assumption;
1806 for (
const Use &LoadU : LoadI->
uses()) {
1807 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1808 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1810 for (
const Use &CmpU : CmpI->
uses()) {
1811 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1812 if (!IsValidAssume(*IntrI))
1814 int Idx = CmpI->getOperandUse(0) == LoadU;
1815 Assumption = {CmpI->getOperand(
Idx), IntrI};
1820 if (Assumption.first)
1825 if (!Assumption.first || !Assumption.second)
1829 << *Assumption.second <<
": " << *LoadI
1830 <<
" == " << *Assumption.first <<
"\n");
1831 bool UsedAssumedInformation =
false;
1832 std::optional<Value *>
Content =
nullptr;
1833 if (Assumption.first)
1835 A.getAssumedSimplified(*Assumption.first, *
this,
1837 return handleAccess(
1838 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1839 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1844 for (
auto *OtherOp : OtherOps) {
1845 if (OtherOp == CurPtr) {
1848 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1860 bool UsedAssumedInformation =
false;
1861 std::optional<Value *>
Content =
nullptr;
1865 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1869 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1870 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1871 *StoreI->getValueOperand()->getType(),
1872 {StoreI->getValueOperand()}, AccessKind::AK_W);
1873 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1874 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1875 {RMWI->getValOperand()}, AccessKind::AK_RW);
1876 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1877 return HandleStoreLike(
1878 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1879 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1882 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1895 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1897 return isValidState();
1899 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1905 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1908 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1909 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1910 if (OffsetInfoMap.
count(NewU)) {
1912 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1913 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1914 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1918 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1920 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1923 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1925 true, EquivalentUseCB)) {
1926 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1927 return indicatePessimisticFixpoint();
1931 dbgs() <<
"Accesses by bin after update:\n";
1938struct AAPointerInfoReturned final : AAPointerInfoImpl {
1940 : AAPointerInfoImpl(IRP,
A) {}
1944 return indicatePessimisticFixpoint();
1948 void trackStatistics()
const override {
1949 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1953struct AAPointerInfoArgument final : AAPointerInfoFloating {
1955 : AAPointerInfoFloating(IRP,
A) {}
1958 void trackStatistics()
const override {
1959 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1963struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1965 : AAPointerInfoFloating(IRP,
A) {}
1969 using namespace AA::PointerInfo;
1973 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1977 LengthVal =
Length->getSExtValue();
1978 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1981 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1983 return indicatePessimisticFixpoint();
1986 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1988 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1991 dbgs() <<
"Accesses by bin after update:\n";
2002 Argument *Arg = getAssociatedArgument();
2007 if (ArgAA && ArgAA->getState().isValidState())
2008 return translateAndAddStateFromCallee(
A, *ArgAA,
2009 *cast<CallBase>(getCtxI()));
2011 return indicatePessimisticFixpoint();
2014 bool IsKnownNoCapture;
2015 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2016 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2017 return indicatePessimisticFixpoint();
2019 bool IsKnown =
false;
2021 return ChangeStatus::UNCHANGED;
2024 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2030 void trackStatistics()
const override {
2031 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2035struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2037 : AAPointerInfoFloating(IRP,
A) {}
2040 void trackStatistics()
const override {
2041 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2055 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2056 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2060 const std::string getAsStr(
Attributor *
A)
const override {
2061 return getAssumed() ?
"nounwind" :
"may-unwind";
2067 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2068 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2069 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2072 if (!
I.mayThrow(
true))
2075 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2076 bool IsKnownNoUnwind;
2077 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2084 bool UsedAssumedInformation =
false;
2085 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2086 UsedAssumedInformation))
2087 return indicatePessimisticFixpoint();
2089 return ChangeStatus::UNCHANGED;
2093struct AANoUnwindFunction final :
public AANoUnwindImpl {
2095 : AANoUnwindImpl(IRP,
A) {}
2102struct AANoUnwindCallSite final
2103 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2105 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2116 case Intrinsic::nvvm_barrier0:
2117 case Intrinsic::nvvm_barrier0_and:
2118 case Intrinsic::nvvm_barrier0_or:
2119 case Intrinsic::nvvm_barrier0_popc:
2121 case Intrinsic::amdgcn_s_barrier:
2122 if (ExecutedAligned)
2135 if (
auto *FI = dyn_cast<FenceInst>(
I))
2138 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2145 switch (
I->getOpcode()) {
2146 case Instruction::AtomicRMW:
2147 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2149 case Instruction::Store:
2150 Ordering = cast<StoreInst>(
I)->getOrdering();
2152 case Instruction::Load:
2153 Ordering = cast<LoadInst>(
I)->getOrdering();
2157 "New atomic operations need to be known in the attributor.");
2168 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2169 return !
MI->isVolatile();
2180 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2181 DepClassTy::NONE, IsKnown));
2185 const std::string getAsStr(
Attributor *
A)
const override {
2186 return getAssumed() ?
"nosync" :
"may-sync";
2202 if (
I.mayReadOrWriteMemory())
2207 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2216 bool UsedAssumedInformation =
false;
2217 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2218 UsedAssumedInformation) ||
2219 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2220 UsedAssumedInformation))
2221 return indicatePessimisticFixpoint();
2226struct AANoSyncFunction final :
public AANoSyncImpl {
2228 : AANoSyncImpl(IRP,
A) {}
2235struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2237 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2247struct AANoFreeImpl :
public AANoFree {
2253 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2254 DepClassTy::NONE, IsKnown));
2262 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2264 DepClassTy::REQUIRED, IsKnown);
2267 bool UsedAssumedInformation =
false;
2268 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2269 UsedAssumedInformation))
2270 return indicatePessimisticFixpoint();
2271 return ChangeStatus::UNCHANGED;
2275 const std::string getAsStr(
Attributor *
A)
const override {
2276 return getAssumed() ?
"nofree" :
"may-free";
2280struct AANoFreeFunction final :
public AANoFreeImpl {
2282 : AANoFreeImpl(IRP,
A) {}
2289struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2291 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2298struct AANoFreeFloating : AANoFreeImpl {
2300 : AANoFreeImpl(IRP,
A) {}
2310 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2312 DepClassTy::OPTIONAL, IsKnown))
2313 return ChangeStatus::UNCHANGED;
2315 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2316 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2318 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2326 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2328 DepClassTy::REQUIRED, IsKnown);
2331 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
2332 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
2336 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2337 isa<ReturnInst>(UserI))
2343 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2344 return indicatePessimisticFixpoint();
2346 return ChangeStatus::UNCHANGED;
2351struct AANoFreeArgument final : AANoFreeFloating {
2353 : AANoFreeFloating(IRP,
A) {}
2360struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2362 : AANoFreeFloating(IRP,
A) {}
2370 Argument *Arg = getAssociatedArgument();
2372 return indicatePessimisticFixpoint();
2375 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2376 DepClassTy::REQUIRED, IsKnown))
2377 return ChangeStatus::UNCHANGED;
2378 return indicatePessimisticFixpoint();
2386struct AANoFreeReturned final : AANoFreeFloating {
2388 : AANoFreeFloating(IRP,
A) {
2403 void trackStatistics()
const override {}
2407struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2409 : AANoFreeFloating(IRP,
A) {}
2412 return ChangeStatus::UNCHANGED;
2423 bool IgnoreSubsumingPositions) {
2425 AttrKinds.
push_back(Attribute::NonNull);
2428 AttrKinds.
push_back(Attribute::Dereferenceable);
2429 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2436 if (!Fn->isDeclaration()) {
2446 bool UsedAssumedInformation =
false;
2447 if (!
A.checkForAllInstructions(
2449 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2453 UsedAssumedInformation,
false,
true))
2465 Attribute::NonNull)});
2470static int64_t getKnownNonNullAndDerefBytesForUse(
2472 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2475 const Value *UseV =
U->get();
2482 if (isa<CastInst>(
I)) {
2487 if (isa<GetElementPtrInst>(
I)) {
2497 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2500 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2517 bool IsKnownNonNull;
2518 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2520 IsNonNull |= IsKnownNonNull;
2527 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2528 Loc->Size.isScalable() ||
I->isVolatile())
2534 if (
Base &&
Base == &AssociatedValue) {
2535 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2537 return std::max(int64_t(0), DerefBytes);
2544 int64_t DerefBytes = Loc->Size.getValue();
2546 return std::max(int64_t(0), DerefBytes);
2557 Value &
V = *getAssociatedValue().stripPointerCasts();
2558 if (isa<ConstantPointerNull>(V)) {
2559 indicatePessimisticFixpoint();
2564 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2570 bool IsNonNull =
false;
2571 bool TrackUse =
false;
2572 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2573 IsNonNull, TrackUse);
2574 State.setKnown(IsNonNull);
2579 const std::string getAsStr(
Attributor *
A)
const override {
2580 return getAssumed() ?
"nonnull" :
"may-null";
2585struct AANonNullFloating :
public AANonNullImpl {
2587 : AANonNullImpl(IRP,
A) {}
2592 bool IsKnownNonNull;
2593 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2594 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2598 bool UsedAssumedInformation =
false;
2599 Value *AssociatedValue = &getAssociatedValue();
2601 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2606 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2610 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2612 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2613 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2616 return ChangeStatus::UNCHANGED;
2617 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2618 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2620 DepClassTy::OPTIONAL, IsKnown) &&
2621 AA::hasAssumedIRAttr<Attribute::NonNull>(
2623 DepClassTy::OPTIONAL, IsKnown))
2624 return ChangeStatus::UNCHANGED;
2631 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2632 return indicatePessimisticFixpoint();
2633 return ChangeStatus::UNCHANGED;
2636 for (
const auto &VAC : Values)
2638 return indicatePessimisticFixpoint();
2640 return ChangeStatus::UNCHANGED;
2648struct AANonNullReturned final
2649 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2650 false, AANonNull::IRAttributeKind, false> {
2657 const std::string getAsStr(
Attributor *
A)
const override {
2658 return getAssumed() ?
"nonnull" :
"may-null";
2666struct AANonNullArgument final
2667 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2669 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2675struct AANonNullCallSiteArgument final : AANonNullFloating {
2677 : AANonNullFloating(IRP,
A) {}
2684struct AANonNullCallSiteReturned final
2685 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2687 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2703 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2704 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2709 const std::string getAsStr(
Attributor *
A)
const override {
2710 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2714struct AAMustProgressFunction final : AAMustProgressImpl {
2716 : AAMustProgressImpl(IRP,
A) {}
2721 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2722 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2724 return indicateOptimisticFixpoint();
2725 return ChangeStatus::UNCHANGED;
2730 bool IsKnownMustProgress;
2731 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2732 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2736 bool AllCallSitesKnown =
true;
2737 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2740 return indicatePessimisticFixpoint();
2742 return ChangeStatus::UNCHANGED;
2746 void trackStatistics()
const override {
2752struct AAMustProgressCallSite final : AAMustProgressImpl {
2754 : AAMustProgressImpl(IRP,
A) {}
2763 bool IsKnownMustProgress;
2764 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2765 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2766 return indicatePessimisticFixpoint();
2767 return ChangeStatus::UNCHANGED;
2771 void trackStatistics()
const override {
2786 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2787 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2792 const std::string getAsStr(
Attributor *
A)
const override {
2793 return getAssumed() ?
"norecurse" :
"may-recurse";
2797struct AANoRecurseFunction final : AANoRecurseImpl {
2799 : AANoRecurseImpl(IRP,
A) {}
2806 bool IsKnownNoRecurse;
2807 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2810 DepClassTy::NONE, IsKnownNoRecurse))
2812 return IsKnownNoRecurse;
2814 bool UsedAssumedInformation =
false;
2815 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2816 UsedAssumedInformation)) {
2822 if (!UsedAssumedInformation)
2823 indicateOptimisticFixpoint();
2824 return ChangeStatus::UNCHANGED;
2829 DepClassTy::REQUIRED);
2830 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2831 return indicatePessimisticFixpoint();
2832 return ChangeStatus::UNCHANGED;
2839struct AANoRecurseCallSite final
2840 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2842 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2857 const std::string getAsStr(
Attributor *
A)
const override {
2858 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2862struct AANonConvergentFunction final : AANonConvergentImpl {
2864 : AANonConvergentImpl(IRP,
A) {}
2870 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2871 CallBase &CB = cast<CallBase>(Inst);
2873 if (!Callee ||
Callee->isIntrinsic()) {
2876 if (
Callee->isDeclaration()) {
2877 return !
Callee->hasFnAttribute(Attribute::Convergent);
2884 bool UsedAssumedInformation =
false;
2885 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2886 UsedAssumedInformation)) {
2887 return indicatePessimisticFixpoint();
2889 return ChangeStatus::UNCHANGED;
2893 if (isKnownNotConvergent() &&
2894 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2895 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2896 return ChangeStatus::CHANGED;
2898 return ChangeStatus::UNCHANGED;
2915 const size_t UBPrevSize = KnownUBInsts.size();
2916 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2920 if (
I.isVolatile() &&
I.mayWriteToMemory())
2924 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2933 "Expected pointer operand of memory accessing instruction");
2937 std::optional<Value *> SimplifiedPtrOp =
2938 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2939 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2941 const Value *PtrOpVal = *SimplifiedPtrOp;
2946 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2947 AssumedNoUBInsts.insert(&
I);
2959 AssumedNoUBInsts.insert(&
I);
2961 KnownUBInsts.insert(&
I);
2970 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2974 auto *BrInst = cast<BranchInst>(&
I);
2977 if (BrInst->isUnconditional())
2982 std::optional<Value *> SimplifiedCond =
2983 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
2984 if (!SimplifiedCond || !*SimplifiedCond)
2986 AssumedNoUBInsts.insert(&
I);
2994 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3003 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3009 if (idx >=
Callee->arg_size())
3021 bool IsKnownNoUndef;
3022 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3023 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3024 if (!IsKnownNoUndef)
3026 bool UsedAssumedInformation =
false;
3027 std::optional<Value *> SimplifiedVal =
3030 if (UsedAssumedInformation)
3032 if (SimplifiedVal && !*SimplifiedVal)
3034 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3035 KnownUBInsts.insert(&
I);
3039 !isa<ConstantPointerNull>(**SimplifiedVal))
3041 bool IsKnownNonNull;
3042 AA::hasAssumedIRAttr<Attribute::NonNull>(
3043 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3045 KnownUBInsts.insert(&
I);
3051 auto &RI = cast<ReturnInst>(
I);
3054 std::optional<Value *> SimplifiedRetValue =
3055 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3056 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3073 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3074 bool IsKnownNonNull;
3075 AA::hasAssumedIRAttr<Attribute::NonNull>(
3079 KnownUBInsts.insert(&
I);
3085 bool UsedAssumedInformation =
false;
3086 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3087 {Instruction::Load, Instruction::Store,
3088 Instruction::AtomicCmpXchg,
3089 Instruction::AtomicRMW},
3090 UsedAssumedInformation,
3092 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3093 UsedAssumedInformation,
3095 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3096 UsedAssumedInformation);
3100 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3102 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3103 bool IsKnownNoUndef;
3104 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3105 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3107 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3108 {Instruction::Ret}, UsedAssumedInformation,
3113 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3114 UBPrevSize != KnownUBInsts.size())
3115 return ChangeStatus::CHANGED;
3116 return ChangeStatus::UNCHANGED;
3120 return KnownUBInsts.count(
I);
3123 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3130 switch (
I->getOpcode()) {
3131 case Instruction::Load:
3132 case Instruction::Store:
3133 case Instruction::AtomicCmpXchg:
3134 case Instruction::AtomicRMW:
3135 return !AssumedNoUBInsts.count(
I);
3136 case Instruction::Br: {
3137 auto *BrInst = cast<BranchInst>(
I);
3138 if (BrInst->isUnconditional())
3140 return !AssumedNoUBInsts.count(
I);
3149 if (KnownUBInsts.empty())
3150 return ChangeStatus::UNCHANGED;
3152 A.changeToUnreachableAfterManifest(
I);
3153 return ChangeStatus::CHANGED;
3157 const std::string getAsStr(
Attributor *
A)
const override {
3158 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3203 bool UsedAssumedInformation =
false;
3204 std::optional<Value *> SimplifiedV =
3207 if (!UsedAssumedInformation) {
3212 KnownUBInsts.insert(
I);
3213 return std::nullopt;
3219 if (isa<UndefValue>(V)) {
3220 KnownUBInsts.insert(
I);
3221 return std::nullopt;
3227struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3229 : AAUndefinedBehaviorImpl(IRP,
A) {}
3232 void trackStatistics()
const override {
3234 "Number of instructions known to have UB");
3236 KnownUBInsts.size();
3257 if (SCCI.hasCycle())
3267 for (
auto *L : LI->getLoopsInPreorder()) {
3281 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3282 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3287 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3288 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3293 return IsKnown || !KnownOnly;
3299 if (isImpliedByMustprogressAndReadonly(
A,
false))
3300 return ChangeStatus::UNCHANGED;
3305 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3306 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3312 bool IsKnownNoRecurse;
3313 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3314 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3317 bool UsedAssumedInformation =
false;
3318 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3319 UsedAssumedInformation))
3320 return indicatePessimisticFixpoint();
3322 return ChangeStatus::UNCHANGED;
3326 const std::string getAsStr(
Attributor *
A)
const override {
3327 return getAssumed() ?
"willreturn" :
"may-noreturn";
3331struct AAWillReturnFunction final : AAWillReturnImpl {
3333 : AAWillReturnImpl(IRP,
A) {}
3337 AAWillReturnImpl::initialize(
A);
3340 assert(
F &&
"Did expect an anchor function");
3341 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3342 indicatePessimisticFixpoint();
3350struct AAWillReturnCallSite final
3351 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3353 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3357 if (isImpliedByMustprogressAndReadonly(
A,
false))
3358 return ChangeStatus::UNCHANGED;
3360 return AACalleeToCallSite::updateImpl(
A);
3382 const ToTy *To =
nullptr;
3392 assert(Hash == 0 &&
"Computed hash twice!");
3396 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3397 InstSetDMI::getHashValue(ExclusionSet));
3407 :
From(&
From), To(&To), ExclusionSet(ES) {
3409 if (!ES || ES->
empty()) {
3410 ExclusionSet =
nullptr;
3411 }
else if (MakeUnique) {
3412 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3417 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3430 return &TombstoneKey;
3437 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3439 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3443#define DefineKeys(ToTy) \
3445 ReachabilityQueryInfo<ToTy> \
3446 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3447 ReachabilityQueryInfo<ToTy>( \
3448 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3449 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3451 ReachabilityQueryInfo<ToTy> \
3452 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3453 ReachabilityQueryInfo<ToTy>( \
3454 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3455 DenseMapInfo<const ToTy *>::getTombstoneKey());
3464template <
typename BaseTy,
typename ToTy>
3465struct CachedReachabilityAA :
public BaseTy {
3471 bool isQueryAA()
const override {
return true; }
3476 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3477 RQITy *RQI = QueryVector[
u];
3478 if (RQI->Result == RQITy::Reachable::No &&
3480 Changed = ChangeStatus::CHANGED;
3486 bool IsTemporaryRQI) = 0;
3489 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3494 QueryCache.erase(&RQI);
3500 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3501 RQITy PlainRQI(RQI.From, RQI.To);
3502 if (!QueryCache.count(&PlainRQI)) {
3503 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3505 QueryVector.push_back(RQIPtr);
3506 QueryCache.insert(RQIPtr);
3511 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3512 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3513 "Did not expect empty set!");
3514 RQITy *RQIPtr =
new (
A.Allocator)
3515 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3516 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3518 assert(!QueryCache.count(RQIPtr));
3519 QueryVector.push_back(RQIPtr);
3520 QueryCache.insert(RQIPtr);
3523 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3524 A.registerForUpdate(*
this);
3525 return Result == RQITy::Reachable::Yes;
3528 const std::string getAsStr(
Attributor *
A)
const override {
3530 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3533 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3534 typename RQITy::Reachable &
Result) {
3535 if (!this->getState().isValidState()) {
3536 Result = RQITy::Reachable::Yes;
3542 if (StackRQI.ExclusionSet) {
3543 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3544 auto It = QueryCache.find(&PlainRQI);
3545 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3546 Result = RQITy::Reachable::No;
3551 auto It = QueryCache.find(&StackRQI);
3552 if (It != QueryCache.end()) {
3559 QueryCache.insert(&StackRQI);
3568struct AAIntraFnReachabilityFunction final
3569 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3570 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3577 bool isAssumedReachable(
3580 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3584 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3585 typename RQITy::Reachable
Result;
3586 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3587 return NonConstThis->isReachableImpl(
A, StackRQI,
3589 return Result == RQITy::Reachable::Yes;
3596 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3599 [&](
const auto &DeadEdge) {
3600 return LivenessAA->isEdgeDead(DeadEdge.first,
3604 return LivenessAA->isAssumedDead(BB);
3606 return ChangeStatus::UNCHANGED;
3610 return Base::updateImpl(
A);
3614 bool IsTemporaryRQI)
override {
3616 bool UsedExclusionSet =
false;
3621 while (IP && IP != &To) {
3622 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3623 UsedExclusionSet =
true;
3634 "Not an intra-procedural query!");
3638 if (FromBB == ToBB &&
3639 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3640 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3645 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3646 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3651 if (RQI.ExclusionSet)
3652 for (
auto *
I : *RQI.ExclusionSet)
3653 if (
I->getFunction() == Fn)
3654 ExclusionBlocks.
insert(
I->getParent());
3657 if (ExclusionBlocks.
count(FromBB) &&
3660 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3663 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3664 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3665 DeadBlocks.insert(ToBB);
3666 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3675 while (!Worklist.
empty()) {
3677 if (!Visited.
insert(BB).second)
3680 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3681 LocalDeadEdges.
insert({BB, SuccBB});
3686 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3689 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3692 if (ExclusionBlocks.
count(SuccBB)) {
3693 UsedExclusionSet =
true;
3700 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3701 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3706 void trackStatistics()
const override {}
3726 bool IgnoreSubsumingPositions) {
3727 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3728 "Unexpected attribute kind");
3731 if (isa<AllocaInst>(Val))
3734 IgnoreSubsumingPositions =
true;
3737 if (isa<UndefValue>(Val))
3740 if (isa<ConstantPointerNull>(Val) &&
3745 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3746 IgnoreSubsumingPositions, Attribute::NoAlias))
3756 "Noalias is a pointer attribute");
3759 const std::string getAsStr(
Attributor *
A)
const override {
3760 return getAssumed() ?
"noalias" :
"may-alias";
3765struct AANoAliasFloating final : AANoAliasImpl {
3767 : AANoAliasImpl(IRP,
A) {}
3772 return indicatePessimisticFixpoint();
3776 void trackStatistics()
const override {
3782struct AANoAliasArgument final
3783 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3784 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3796 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3798 DepClassTy::OPTIONAL, IsKnownNoSycn))
3799 return Base::updateImpl(
A);
3804 return Base::updateImpl(
A);
3808 bool UsedAssumedInformation =
false;
3809 if (
A.checkForAllCallSites(
3811 true, UsedAssumedInformation))
3812 return Base::updateImpl(
A);
3820 return indicatePessimisticFixpoint();
3827struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3829 : AANoAliasImpl(IRP,
A) {}
3835 const CallBase &CB,
unsigned OtherArgNo) {
3837 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3849 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3850 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3857 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3859 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3860 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3866 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3870 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3872 "callsite arguments: "
3873 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3874 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3879 bool isKnownNoAliasDueToNoAliasPreservation(
3899 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3910 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3915 bool IsKnownNoCapture;
3916 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3918 DepClassTy::OPTIONAL, IsKnownNoCapture))
3924 A, *UserI, *getCtxI(), *
this,
nullptr,
3925 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3933 case UseCaptureKind::NO_CAPTURE:
3935 case UseCaptureKind::MAY_CAPTURE:
3939 case UseCaptureKind::PASSTHROUGH:
3946 bool IsKnownNoCapture;
3948 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3949 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3950 if (!IsAssumedNoCapture &&
3952 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3954 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3955 <<
" cannot be noalias as it is potentially captured\n");
3960 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3965 const auto &CB = cast<CallBase>(getAnchorValue());
3966 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3967 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3977 auto *MemBehaviorAA =
3980 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3981 return ChangeStatus::UNCHANGED;
3984 bool IsKnownNoAlias;
3986 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
3987 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
3989 <<
" is not no-alias at the definition\n");
3990 return indicatePessimisticFixpoint();
3994 if (MemBehaviorAA &&
3995 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
3997 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
3998 return ChangeStatus::UNCHANGED;
4001 return indicatePessimisticFixpoint();
4009struct AANoAliasReturned final : AANoAliasImpl {
4011 : AANoAliasImpl(IRP,
A) {}
4016 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4017 if (
Constant *
C = dyn_cast<Constant>(&RV))
4018 if (
C->isNullValue() || isa<UndefValue>(
C))
4023 if (!isa<CallBase>(&RV))
4027 bool IsKnownNoAlias;
4028 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4029 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4032 bool IsKnownNoCapture;
4034 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4035 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4037 return IsAssumedNoCapture ||
4041 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4042 return indicatePessimisticFixpoint();
4044 return ChangeStatus::UNCHANGED;
4052struct AANoAliasCallSiteReturned final
4053 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4055 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4065struct AAIsDeadValueImpl :
public AAIsDead {
4069 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4072 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4075 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4078 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4081 bool isAssumedDead(
const Instruction *
I)
const override {
4082 return I == getCtxI() && isAssumedDead();
4086 bool isKnownDead(
const Instruction *
I)
const override {
4087 return isAssumedDead(
I) && isKnownDead();
4091 const std::string getAsStr(
Attributor *
A)
const override {
4092 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4098 if (
V.getType()->isVoidTy() ||
V.use_empty())
4102 if (!isa<Constant>(V)) {
4103 if (
auto *
I = dyn_cast<Instruction>(&V))
4104 if (!
A.isRunOn(*
I->getFunction()))
4106 bool UsedAssumedInformation =
false;
4107 std::optional<Constant *>
C =
4108 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4113 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4118 return A.checkForAllUses(UsePred, *
this, V,
false,
4119 DepClassTy::REQUIRED,
4128 auto *CB = dyn_cast<CallBase>(
I);
4129 if (!CB || isa<IntrinsicInst>(CB))
4134 bool IsKnownNoUnwind;
4135 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4136 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4144struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4146 : AAIsDeadValueImpl(IRP,
A) {}
4150 AAIsDeadValueImpl::initialize(
A);
4152 if (isa<UndefValue>(getAssociatedValue())) {
4153 indicatePessimisticFixpoint();
4157 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4158 if (!isAssumedSideEffectFree(
A,
I)) {
4159 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4160 indicatePessimisticFixpoint();
4162 removeAssumedBits(HAS_NO_EFFECT);
4169 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4171 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4178 if (
SI.isVolatile())
4184 bool UsedAssumedInformation =
false;
4185 if (!AssumeOnlyInst) {
4186 PotentialCopies.clear();
4188 UsedAssumedInformation)) {
4191 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4195 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4196 <<
" potential copies.\n");
4201 UsedAssumedInformation))
4203 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4205 auto &UserI = cast<Instruction>(*U.getUser());
4206 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4208 AssumeOnlyInst->insert(&UserI);
4211 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4217 <<
" is assumed live!\n");
4223 const std::string getAsStr(
Attributor *
A)
const override {
4224 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4225 if (isa_and_nonnull<StoreInst>(
I))
4227 return "assumed-dead-store";
4228 if (isa_and_nonnull<FenceInst>(
I))
4230 return "assumed-dead-fence";
4231 return AAIsDeadValueImpl::getAsStr(
A);
4236 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4237 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4238 if (!isDeadStore(
A, *SI))
4239 return indicatePessimisticFixpoint();
4240 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4241 if (!isDeadFence(
A, *FI))
4242 return indicatePessimisticFixpoint();
4244 if (!isAssumedSideEffectFree(
A,
I))
4245 return indicatePessimisticFixpoint();
4246 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4247 return indicatePessimisticFixpoint();
4252 bool isRemovableStore()
const override {
4253 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4258 Value &
V = getAssociatedValue();
4259 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4264 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4266 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4269 A.deleteAfterManifest(*
I);
4270 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4272 for (
auto *Usr : AOI->
users())
4273 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4274 A.deleteAfterManifest(*AOI);
4278 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4280 A.deleteAfterManifest(*FI);
4283 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4284 A.deleteAfterManifest(*
I);
4292 void trackStatistics()
const override {
4301struct AAIsDeadArgument :
public AAIsDeadFloating {
4303 : AAIsDeadFloating(IRP,
A) {}
4307 Argument &Arg = *getAssociatedArgument();
4308 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4309 if (
A.registerFunctionSignatureRewrite(
4313 return ChangeStatus::CHANGED;
4315 return ChangeStatus::UNCHANGED;
4322struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4324 : AAIsDeadValueImpl(IRP,
A) {}
4328 AAIsDeadValueImpl::initialize(
A);
4329 if (isa<UndefValue>(getAssociatedValue()))
4330 indicatePessimisticFixpoint();
4339 Argument *Arg = getAssociatedArgument();
4341 return indicatePessimisticFixpoint();
4343 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4345 return indicatePessimisticFixpoint();
4351 CallBase &CB = cast<CallBase>(getAnchorValue());
4353 assert(!isa<UndefValue>(
U.get()) &&
4354 "Expected undef values to be filtered out!");
4356 if (
A.changeUseAfterManifest(U, UV))
4357 return ChangeStatus::CHANGED;
4358 return ChangeStatus::UNCHANGED;
4365struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4367 : AAIsDeadFloating(IRP,
A) {}
4370 bool isAssumedDead()
const override {
4371 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4376 AAIsDeadFloating::initialize(
A);
4377 if (isa<UndefValue>(getAssociatedValue())) {
4378 indicatePessimisticFixpoint();
4383 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4389 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4390 IsAssumedSideEffectFree =
false;
4391 Changed = ChangeStatus::CHANGED;
4393 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4394 return indicatePessimisticFixpoint();
4399 void trackStatistics()
const override {
4400 if (IsAssumedSideEffectFree)
4407 const std::string getAsStr(
Attributor *
A)
const override {
4408 return isAssumedDead()
4410 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4414 bool IsAssumedSideEffectFree =
true;
4417struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4419 : AAIsDeadValueImpl(IRP,
A) {}
4424 bool UsedAssumedInformation =
false;
4425 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4426 {Instruction::Ret}, UsedAssumedInformation);
4429 if (ACS.isCallbackCall() || !ACS.getInstruction())
4431 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4434 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4435 UsedAssumedInformation))
4436 return indicatePessimisticFixpoint();
4438 return ChangeStatus::UNCHANGED;
4444 bool AnyChange =
false;
4452 bool UsedAssumedInformation =
false;
4453 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4454 UsedAssumedInformation);
4455 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4462struct AAIsDeadFunction :
public AAIsDead {
4468 assert(
F &&
"Did expect an anchor function");
4469 if (!isAssumedDeadInternalFunction(
A)) {
4470 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4471 assumeLive(
A,
F->getEntryBlock());
4475 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4476 if (!getAnchorScope()->hasLocalLinkage())
4478 bool UsedAssumedInformation =
false;
4480 true, UsedAssumedInformation);
4484 const std::string getAsStr(
Attributor *
A)
const override {
4485 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4486 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4487 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4488 std::to_string(KnownDeadEnds.size()) +
"]";
4493 assert(getState().isValidState() &&
4494 "Attempted to manifest an invalid state!");
4499 if (AssumedLiveBlocks.empty()) {
4500 A.deleteAfterManifest(
F);
4501 return ChangeStatus::CHANGED;
4507 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4509 KnownDeadEnds.set_union(ToBeExploredFrom);
4510 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4511 auto *CB = dyn_cast<CallBase>(DeadEndI);
4514 bool IsKnownNoReturn;
4515 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4518 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4521 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4522 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4524 A.changeToUnreachableAfterManifest(
4525 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4526 HasChanged = ChangeStatus::CHANGED;
4531 if (!AssumedLiveBlocks.count(&BB)) {
4532 A.deleteAfterManifest(BB);
4534 HasChanged = ChangeStatus::CHANGED;
4544 assert(
From->getParent() == getAnchorScope() &&
4546 "Used AAIsDead of the wrong function");
4547 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4551 void trackStatistics()
const override {}
4554 bool isAssumedDead()
const override {
return false; }
4557 bool isKnownDead()
const override {
return false; }
4560 bool isAssumedDead(
const BasicBlock *BB)
const override {
4562 "BB must be in the same anchor scope function.");
4566 return !AssumedLiveBlocks.count(BB);
4570 bool isKnownDead(
const BasicBlock *BB)
const override {
4571 return getKnown() && isAssumedDead(BB);
4575 bool isAssumedDead(
const Instruction *
I)
const override {
4576 assert(
I->getParent()->getParent() == getAnchorScope() &&
4577 "Instruction must be in the same anchor scope function.");
4584 if (!AssumedLiveBlocks.count(
I->getParent()))
4590 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4598 bool isKnownDead(
const Instruction *
I)
const override {
4599 return getKnown() && isAssumedDead(
I);
4605 if (!AssumedLiveBlocks.insert(&BB).second)
4613 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4615 if (
F->hasLocalLinkage())
4616 A.markLiveInternalFunction(*
F);
4640 bool IsKnownNoReturn;
4641 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4643 return !IsKnownNoReturn;
4655 bool UsedAssumedInformation =
4656 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4661 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4662 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4666 bool IsKnownNoUnwind;
4667 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4669 UsedAssumedInformation |= !IsKnownNoUnwind;
4671 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4674 return UsedAssumedInformation;
4681 bool UsedAssumedInformation =
false;
4685 std::optional<Constant *>
C =
4686 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4687 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4689 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4691 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4696 UsedAssumedInformation =
false;
4699 return UsedAssumedInformation;
4706 bool UsedAssumedInformation =
false;
4710 UsedAssumedInformation)) {
4717 if (Values.
empty() ||
4718 (Values.
size() == 1 &&
4719 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4721 return UsedAssumedInformation;
4724 Type &Ty = *
SI.getCondition()->getType();
4726 auto CheckForConstantInt = [&](
Value *
V) {
4727 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4735 return CheckForConstantInt(
VAC.getValue());
4739 return UsedAssumedInformation;
4742 unsigned MatchedCases = 0;
4743 for (
const auto &CaseIt :
SI.cases()) {
4744 if (
Constants.count(CaseIt.getCaseValue())) {
4746 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4753 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4754 return UsedAssumedInformation;
4760 if (AssumedLiveBlocks.empty()) {
4761 if (isAssumedDeadInternalFunction(
A))
4765 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4766 assumeLive(
A,
F->getEntryBlock());
4770 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4771 << getAnchorScope()->
size() <<
"] BBs and "
4772 << ToBeExploredFrom.size() <<
" exploration points and "
4773 << KnownDeadEnds.size() <<
" known dead ends\n");
4778 ToBeExploredFrom.end());
4779 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4782 while (!Worklist.
empty()) {
4788 while (!
I->isTerminator() && !isa<CallBase>(
I))
4789 I =
I->getNextNode();
4791 AliveSuccessors.
clear();
4793 bool UsedAssumedInformation =
false;
4794 switch (
I->getOpcode()) {
4798 "Expected non-terminators to be handled already!");
4802 case Instruction::Call:
4803 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4804 *
this, AliveSuccessors);
4806 case Instruction::Invoke:
4807 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4808 *
this, AliveSuccessors);
4810 case Instruction::Br:
4811 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4812 *
this, AliveSuccessors);
4814 case Instruction::Switch:
4815 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4816 *
this, AliveSuccessors);
4820 if (UsedAssumedInformation) {
4821 NewToBeExploredFrom.insert(
I);
4822 }
else if (AliveSuccessors.
empty() ||
4823 (
I->isTerminator() &&
4824 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4825 if (KnownDeadEnds.insert(
I))
4830 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4831 << UsedAssumedInformation <<
"\n");
4833 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4834 if (!
I->isTerminator()) {
4835 assert(AliveSuccessors.size() == 1 &&
4836 "Non-terminator expected to have a single successor!");
4840 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4841 if (AssumedLiveEdges.insert(Edge).second)
4843 if (assumeLive(
A, *AliveSuccessor->getParent()))
4850 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4852 return !ToBeExploredFrom.count(I);
4855 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4864 if (ToBeExploredFrom.empty() &&
4865 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4867 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4869 return indicatePessimisticFixpoint();
4874struct AAIsDeadCallSite final : AAIsDeadFunction {
4876 : AAIsDeadFunction(IRP,
A) {}
4885 "supported for call sites yet!");
4890 return indicatePessimisticFixpoint();
4894 void trackStatistics()
const override {}
4908 Value &
V = *getAssociatedValue().stripPointerCasts();
4910 A.getAttrs(getIRPosition(),
4911 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4914 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4917 bool IsKnownNonNull;
4918 AA::hasAssumedIRAttr<Attribute::NonNull>(
4919 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4921 bool CanBeNull, CanBeFreed;
4922 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4923 A.getDataLayout(), CanBeNull, CanBeFreed));
4926 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4931 StateType &getState()
override {
return *
this; }
4932 const StateType &getState()
const override {
return *
this; }
4938 const Value *UseV =
U->get();
4943 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4948 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4949 if (
Base &&
Base == &getAssociatedValue())
4950 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4956 bool IsNonNull =
false;
4957 bool TrackUse =
false;
4958 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4959 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4960 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4961 <<
" for instruction " << *
I <<
"\n");
4963 addAccessedBytesForUse(
A, U,
I, State);
4964 State.takeKnownDerefBytesMaximum(DerefBytes);
4971 bool IsKnownNonNull;
4972 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4973 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4974 if (IsAssumedNonNull &&
4975 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
4976 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
4977 return ChangeStatus::CHANGED;
4985 bool IsKnownNonNull;
4986 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4987 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4988 if (IsAssumedNonNull)
4990 Ctx, getAssumedDereferenceableBytes()));
4993 Ctx, getAssumedDereferenceableBytes()));
4997 const std::string getAsStr(
Attributor *
A)
const override {
4998 if (!getAssumedDereferenceableBytes())
4999 return "unknown-dereferenceable";
5000 bool IsKnownNonNull;
5001 bool IsAssumedNonNull =
false;
5003 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5004 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5005 return std::string(
"dereferenceable") +
5006 (IsAssumedNonNull ?
"" :
"_or_null") +
5007 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5008 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5009 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5010 (!
A ?
" [non-null is unknown]" :
"");
5015struct AADereferenceableFloating : AADereferenceableImpl {
5017 : AADereferenceableImpl(IRP,
A) {}
5022 bool UsedAssumedInformation =
false;
5024 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5026 Values.
push_back({getAssociatedValue(), getCtxI()});
5029 Stripped = Values.
size() != 1 ||
5030 Values.
front().getValue() != &getAssociatedValue();
5036 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5038 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5046 int64_t DerefBytes = 0;
5047 if (!AA || (!Stripped &&
this == AA)) {
5050 bool CanBeNull, CanBeFreed;
5052 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5053 T.GlobalState.indicatePessimisticFixpoint();
5056 DerefBytes =
DS.DerefBytesState.getAssumed();
5057 T.GlobalState &=
DS.GlobalState;
5063 int64_t OffsetSExt =
Offset.getSExtValue();
5067 T.takeAssumedDerefBytesMinimum(
5068 std::max(int64_t(0), DerefBytes - OffsetSExt));
5073 T.takeKnownDerefBytesMaximum(
5074 std::max(int64_t(0), DerefBytes - OffsetSExt));
5075 T.indicatePessimisticFixpoint();
5076 }
else if (OffsetSExt > 0) {
5082 T.indicatePessimisticFixpoint();
5086 return T.isValidState();
5089 for (
const auto &VAC : Values)
5090 if (!VisitValueCB(*
VAC.getValue()))
5091 return indicatePessimisticFixpoint();
5097 void trackStatistics()
const override {
5103struct AADereferenceableReturned final
5104 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5106 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5111 void trackStatistics()
const override {
5117struct AADereferenceableArgument final
5118 : AAArgumentFromCallSiteArguments<AADereferenceable,
5119 AADereferenceableImpl> {
5121 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5126 void trackStatistics()
const override {
5132struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5134 : AADereferenceableFloating(IRP,
A) {}
5137 void trackStatistics()
const override {
5143struct AADereferenceableCallSiteReturned final
5144 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5145 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5150 void trackStatistics()
const override {
5160 Value &AssociatedValue,
const Use *U,
5164 if (isa<CastInst>(
I)) {
5166 TrackUse = !isa<PtrToIntInst>(
I);
5169 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5170 if (
GEP->hasAllConstantIndices())
5176 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5190 const Value *UseV =
U->get();
5191 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5192 if (
SI->getPointerOperand() == UseV)
5193 MA =
SI->getAlign();
5194 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5195 if (LI->getPointerOperand() == UseV)
5196 MA = LI->getAlign();
5197 }
else if (
auto *AI = dyn_cast<AtomicRMWInst>(
I)) {
5198 if (AI->getPointerOperand() == UseV)
5199 MA = AI->getAlign();
5200 }
else if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
5201 if (AI->getPointerOperand() == UseV)
5202 MA = AI->getAlign();
5208 unsigned Alignment = MA->value();
5212 if (
Base == &AssociatedValue) {
5231 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5233 takeKnownMaximum(Attr.getValueAsInt());
5235 Value &
V = *getAssociatedValue().stripPointerCasts();
5236 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5239 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5244 ChangeStatus LoadStoreChanged = ChangeStatus::UNCHANGED;
5247 Value &AssociatedValue = getAssociatedValue();
5248 for (
const Use &U : AssociatedValue.
uses()) {
5249 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5250 if (
SI->getPointerOperand() == &AssociatedValue)
5251 if (
SI->getAlign() < getAssumedAlign()) {
5253 "Number of times alignment added to a store");
5254 SI->setAlignment(getAssumedAlign());
5255 LoadStoreChanged = ChangeStatus::CHANGED;
5257 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5258 if (LI->getPointerOperand() == &AssociatedValue)
5259 if (LI->getAlign() < getAssumedAlign()) {
5260 LI->setAlignment(getAssumedAlign());
5262 "Number of times alignment added to a load");
5263 LoadStoreChanged = ChangeStatus::CHANGED;
5270 Align InheritAlign =
5271 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5272 if (InheritAlign >= getAssumedAlign())
5273 return LoadStoreChanged;
5274 return Changed | LoadStoreChanged;
5284 if (getAssumedAlign() > 1)
5292 bool TrackUse =
false;
5294 unsigned int KnownAlign =
5295 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5296 State.takeKnownMaximum(KnownAlign);
5302 const std::string getAsStr(
Attributor *
A)
const override {
5303 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5304 std::to_string(getAssumedAlign().
value()) +
">";
5309struct AAAlignFloating : AAAlignImpl {
5317 bool UsedAssumedInformation =
false;
5319 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5321 Values.
push_back({getAssociatedValue(), getCtxI()});
5324 Stripped = Values.
size() != 1 ||
5325 Values.
front().getValue() != &getAssociatedValue();
5329 auto VisitValueCB = [&](
Value &
V) ->
bool {
5330 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5333 DepClassTy::REQUIRED);
5334 if (!AA || (!Stripped &&
this == AA)) {
5336 unsigned Alignment = 1;
5349 Alignment =
V.getPointerAlignment(
DL).value();
5352 T.takeKnownMaximum(Alignment);
5353 T.indicatePessimisticFixpoint();
5359 return T.isValidState();
5362 for (
const auto &VAC : Values) {
5363 if (!VisitValueCB(*
VAC.getValue()))
5364 return indicatePessimisticFixpoint();
5377struct AAAlignReturned final
5378 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5379 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5387struct AAAlignArgument final
5388 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5389 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5397 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5398 return ChangeStatus::UNCHANGED;
5399 return Base::manifest(
A);
5406struct AAAlignCallSiteArgument final : AAAlignFloating {
5408 : AAAlignFloating(IRP,
A) {}
5415 if (
Argument *Arg = getAssociatedArgument())
5416 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5417 return ChangeStatus::UNCHANGED;
5419 Align InheritAlign =
5420 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5421 if (InheritAlign >= getAssumedAlign())
5422 Changed = ChangeStatus::UNCHANGED;
5429 if (
Argument *Arg = getAssociatedArgument()) {
5432 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5435 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5445struct AAAlignCallSiteReturned final
5446 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5447 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5464 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5465 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5470 const std::string getAsStr(
Attributor *
A)
const override {
5471 return getAssumed() ?
"noreturn" :
"may-return";
5476 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5477 bool UsedAssumedInformation =
false;
5478 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5479 {(unsigned)Instruction::Ret},
5480 UsedAssumedInformation))
5481 return indicatePessimisticFixpoint();
5482 return ChangeStatus::UNCHANGED;
5486struct AANoReturnFunction final : AANoReturnImpl {
5488 : AANoReturnImpl(IRP,
A) {}
5495struct AANoReturnCallSite final
5496 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5498 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5515 Value &
V = getAssociatedValue();
5516 if (
auto *
C = dyn_cast<Constant>(&V)) {
5517 if (
C->isThreadDependent())
5518 indicatePessimisticFixpoint();
5520 indicateOptimisticFixpoint();
5523 if (
auto *CB = dyn_cast<CallBase>(&V))
5526 indicateOptimisticFixpoint();
5529 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5534 indicatePessimisticFixpoint();
5544 Value &
V = getAssociatedValue();
5546 if (
auto *
I = dyn_cast<Instruction>(&V))
5547 Scope =
I->getFunction();
5548 if (
auto *
A = dyn_cast<Argument>(&V)) {
5550 if (!
Scope->hasLocalLinkage())
5554 return indicateOptimisticFixpoint();
5556 bool IsKnownNoRecurse;
5557 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5562 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5563 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5564 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5565 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5569 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5570 (isa<StoreInst>(UserI) &&
5571 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5573 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5577 if (!Callee || !
Callee->hasLocalLinkage())
5583 DepClassTy::OPTIONAL);
5584 if (!ArgInstanceInfoAA ||
5585 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5590 A, *CB, *Scope, *
this,
nullptr,
5598 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5599 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5600 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5608 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5609 DepClassTy::OPTIONAL,
5610 true, EquivalentUseCB))
5611 return indicatePessimisticFixpoint();
5617 const std::string getAsStr(
Attributor *
A)
const override {
5618 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5622 void trackStatistics()
const override {}
5626struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5628 : AAInstanceInfoImpl(IRP,
A) {}
5632struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5634 : AAInstanceInfoFloating(IRP,
A) {}
5638struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5640 : AAInstanceInfoImpl(IRP,
A) {}
5648 Argument *Arg = getAssociatedArgument();
5650 return indicatePessimisticFixpoint();
5655 return indicatePessimisticFixpoint();
5661struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5663 : AAInstanceInfoImpl(IRP,
A) {
5679struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5681 : AAInstanceInfoFloating(IRP,
A) {}
5688 bool IgnoreSubsumingPositions) {
5689 assert(ImpliedAttributeKind == Attribute::NoCapture &&
5690 "Unexpected attribute kind");
5693 return V.use_empty();
5699 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5700 V.getType()->getPointerAddressSpace() == 0)) {
5704 if (
A.hasAttr(IRP, {Attribute::NoCapture},
5705 true, Attribute::NoCapture))
5711 {Attribute::NoCapture, Attribute::ByVal},
5713 A.manifestAttrs(IRP,
5721 determineFunctionCaptureCapabilities(IRP, *
F, State);
5723 A.manifestAttrs(IRP,
5742 bool ReadOnly =
F.onlyReadsMemory();
5743 bool NoThrow =
F.doesNotThrow();
5744 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5745 if (ReadOnly && NoThrow && IsVoidReturn) {
5758 if (NoThrow && IsVoidReturn)
5763 if (!NoThrow || ArgNo < 0 ||
5764 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5767 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5768 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5769 if (U ==
unsigned(ArgNo))
5787 assert(!AA::hasAssumedIRAttr<Attribute::NoCapture>(
5788 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5798 if (!isAssumedNoCaptureMaybeReturned())
5801 if (isArgumentPosition()) {
5802 if (isAssumedNoCapture())
5810 const std::string getAsStr(
Attributor *
A)
const override {
5811 if (isKnownNoCapture())
5812 return "known not-captured";
5813 if (isAssumedNoCapture())
5814 return "assumed not-captured";
5815 if (isKnownNoCaptureMaybeReturned())
5816 return "known not-captured-maybe-returned";
5817 if (isAssumedNoCaptureMaybeReturned())
5818 return "assumed not-captured-maybe-returned";
5819 return "assumed-captured";
5827 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5831 if (isa<PtrToIntInst>(UInst)) {
5833 return isCapturedIn(State,
true,
true,
5839 if (isa<StoreInst>(UInst))
5840 return isCapturedIn(State,
true,
true,
5844 if (isa<ReturnInst>(UInst)) {
5846 return isCapturedIn(State,
false,
false,
5848 return isCapturedIn(State,
true,
true,
5854 auto *CB = dyn_cast<CallBase>(UInst);
5856 return isCapturedIn(State,
true,
true,
5863 bool IsKnownNoCapture;
5865 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
5866 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5868 if (IsAssumedNoCapture)
5869 return isCapturedIn(State,
false,
false,
5873 return isCapturedIn(State,
false,
false,
5878 return isCapturedIn(State,
true,
true,
5886 bool CapturedInInt,
bool CapturedInRet) {
5887 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5888 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5904 return indicatePessimisticFixpoint();
5911 return indicatePessimisticFixpoint();
5919 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5921 addKnownBits(NOT_CAPTURED_IN_MEM);
5928 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5932 UsedAssumedInformation))
5934 bool SeenConstant =
false;
5936 if (isa<Constant>(
VAC.getValue())) {
5939 SeenConstant =
true;
5940 }
else if (!isa<Argument>(
VAC.getValue()) ||
5941 VAC.getValue() == getAssociatedArgument())
5947 bool IsKnownNoUnwind;
5948 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
5950 bool IsVoidTy =
F->getReturnType()->isVoidTy();
5951 bool UsedAssumedInformation =
false;
5952 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
5953 T.addKnownBits(NOT_CAPTURED_IN_RET);
5954 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
5956 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
5957 addKnownBits(NOT_CAPTURED_IN_RET);
5958 if (isKnown(NOT_CAPTURED_IN_MEM))
5959 return indicateOptimisticFixpoint();
5970 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
5975 return checkUse(
A,
T, U, Follow);
5983 if (!
A.checkForAllUses(UseCheck, *
this, *V))
5984 return indicatePessimisticFixpoint();
5987 auto Assumed = S.getAssumed();
5988 S.intersectAssumedBits(
T.getAssumed());
5989 if (!isAssumedNoCaptureMaybeReturned())
5990 return indicatePessimisticFixpoint();
5996struct AANoCaptureArgument final : AANoCaptureImpl {
5998 : AANoCaptureImpl(IRP,
A) {}
6005struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6007 : AANoCaptureImpl(IRP,
A) {}
6015 Argument *Arg = getAssociatedArgument();
6017 return indicatePessimisticFixpoint();
6019 bool IsKnownNoCapture;
6021 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
6022 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6024 return ChangeStatus::UNCHANGED;
6026 return indicatePessimisticFixpoint();
6035struct AANoCaptureFloating final : AANoCaptureImpl {
6037 : AANoCaptureImpl(IRP,
A) {}
6040 void trackStatistics()
const override {
6046struct AANoCaptureReturned final : AANoCaptureImpl {
6048 : AANoCaptureImpl(IRP,
A) {
6063 void trackStatistics()
const override {}
6067struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6069 : AANoCaptureImpl(IRP,
A) {}
6075 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6079 void trackStatistics()
const override {
6090 SimplifiedAssociatedValue,
Other, Ty);
6091 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6095 if (SimplifiedAssociatedValue)
6096 dbgs() <<
"[ValueSimplify] is assumed to be "
6097 << **SimplifiedAssociatedValue <<
"\n";
6099 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6111 if (getAssociatedValue().
getType()->isVoidTy())
6112 indicatePessimisticFixpoint();
6113 if (
A.hasSimplificationCallback(getIRPosition()))
6114 indicatePessimisticFixpoint();
6118 const std::string getAsStr(
Attributor *
A)
const override {
6120 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6121 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6122 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6124 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6129 void trackStatistics()
const override {}
6132 std::optional<Value *>
6133 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6134 return SimplifiedAssociatedValue;
6145 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6147 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6160 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6161 if (
Check && (
I.mayReadFromMemory() ||
6166 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6168 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6194 if (
const auto &NewV = VMap.
lookup(&V))
6196 bool UsedAssumedInformation =
false;
6197 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6199 if (!SimpleV.has_value())
6203 EffectiveV = *SimpleV;
6204 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6208 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6209 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6210 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6211 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6218 Value *NewV = SimplifiedAssociatedValue
6219 ? *SimplifiedAssociatedValue
6221 if (NewV && NewV != &getAssociatedValue()) {
6225 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6227 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6236 const IRPosition &IRP,
bool Simplify =
true) {
6237 bool UsedAssumedInformation =
false;
6240 QueryingValueSimplified =
A.getAssumedSimplified(
6242 return unionAssumed(QueryingValueSimplified);
6246 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6247 if (!getAssociatedValue().
getType()->isIntegerTy())
6252 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6256 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6259 SimplifiedAssociatedValue = std::nullopt;
6260 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6263 if (
auto *
C = *COpt) {
6264 SimplifiedAssociatedValue =
C;
6265 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6271 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6272 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6274 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6282 for (
auto &U : getAssociatedValue().
uses()) {
6286 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6287 IP =
PHI->getIncomingBlock(U)->getTerminator();
6288 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6290 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6291 if (
A.changeUseAfterManifest(U, *NewV))
6292 Changed = ChangeStatus::CHANGED;
6296 return Changed | AAValueSimplify::manifest(
A);
6301 SimplifiedAssociatedValue = &getAssociatedValue();
6302 return AAValueSimplify::indicatePessimisticFixpoint();
6306struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6308 : AAValueSimplifyImpl(IRP,
A) {}
6311 AAValueSimplifyImpl::initialize(
A);
6312 if (
A.hasAttr(getIRPosition(),
6313 {Attribute::InAlloca, Attribute::Preallocated,
6314 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6316 indicatePessimisticFixpoint();
6323 Argument *Arg = getAssociatedArgument();
6329 return indicatePessimisticFixpoint();
6332 auto Before = SimplifiedAssociatedValue;
6346 bool UsedAssumedInformation =
false;
6347 std::optional<Constant *> SimpleArgOp =
6348 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6355 return unionAssumed(*SimpleArgOp);
6360 bool UsedAssumedInformation =
false;
6361 if (hasCallBaseContext() &&
6362 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6366 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6367 UsedAssumedInformation);
6370 if (!askSimplifiedValueForOtherAAs(
A))
6371 return indicatePessimisticFixpoint();
6374 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6375 : ChangeStatus ::CHANGED;
6379 void trackStatistics()
const override {
6384struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6386 : AAValueSimplifyImpl(IRP,
A) {}
6389 std::optional<Value *>
6390 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6391 if (!isValidState())
6393 return SimplifiedAssociatedValue;
6398 auto Before = SimplifiedAssociatedValue;
6401 auto &RI = cast<ReturnInst>(
I);
6402 return checkAndUpdate(
6407 bool UsedAssumedInformation =
false;
6408 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6409 UsedAssumedInformation))
6410 if (!askSimplifiedValueForOtherAAs(
A))
6411 return indicatePessimisticFixpoint();
6414 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6415 : ChangeStatus ::CHANGED;
6421 return ChangeStatus::UNCHANGED;
6425 void trackStatistics()
const override {
6430struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6432 : AAValueSimplifyImpl(IRP,
A) {}
6436 AAValueSimplifyImpl::initialize(
A);
6437 Value &
V = getAnchorValue();
6440 if (isa<Constant>(V))
6441 indicatePessimisticFixpoint();
6446 auto Before = SimplifiedAssociatedValue;
6447 if (!askSimplifiedValueForOtherAAs(
A))
6448 return indicatePessimisticFixpoint();
6451 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6452 : ChangeStatus ::CHANGED;
6456 void trackStatistics()
const override {
6461struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6463 : AAValueSimplifyImpl(IRP,
A) {}
6467 SimplifiedAssociatedValue =
nullptr;
6468 indicateOptimisticFixpoint();
6473 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6476 void trackStatistics()
const override {
6481struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6483 : AAValueSimplifyFunction(IRP,
A) {}
6485 void trackStatistics()
const override {
6490struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6492 : AAValueSimplifyImpl(IRP,
A) {}
6495 AAValueSimplifyImpl::initialize(
A);
6496 Function *Fn = getAssociatedFunction();
6497 assert(Fn &&
"Did expect an associted function");
6503 checkAndUpdate(
A, *
this, IRP))
6504 indicateOptimisticFixpoint();
6506 indicatePessimisticFixpoint();
6514 return indicatePessimisticFixpoint();
6517 void trackStatistics()
const override {
6522struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6524 : AAValueSimplifyFloating(IRP,
A) {}
6532 if (FloatAA && FloatAA->getState().isValidState())
6535 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6536 Use &
U = cast<CallBase>(&getAnchorValue())
6537 ->getArgOperandUse(getCallSiteArgNo());
6538 if (
A.changeUseAfterManifest(U, *NewV))
6539 Changed = ChangeStatus::CHANGED;
6542 return Changed | AAValueSimplify::manifest(
A);
6545 void trackStatistics()
const override {
6555 struct AllocationInfo {
6567 }
Status = STACK_DUE_TO_USE;
6571 bool HasPotentiallyFreeingUnknownUses =
false;
6575 bool MoveAllocaIntoEntry =
true;
6581 struct DeallocationInfo {
6589 bool MightFreeUnknownObjects =
false;
6598 ~AAHeapToStackFunction() {
6601 for (
auto &It : AllocationInfos)
6602 It.second->~AllocationInfo();
6603 for (
auto &It : DeallocationInfos)
6604 It.second->~DeallocationInfo();
6608 AAHeapToStack::initialize(
A);
6611 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6618 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6627 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6628 AllocationInfos[CB] = AI;
6630 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6636 bool UsedAssumedInformation =
false;
6637 bool Success =
A.checkForAllCallLikeInstructions(
6638 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6642 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6646 bool &) -> std::optional<Value *> {
return nullptr; };
6647 for (
const auto &It : AllocationInfos)
6650 for (
const auto &It : DeallocationInfos)
6655 const std::string getAsStr(
Attributor *
A)
const override {
6656 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6657 for (
const auto &It : AllocationInfos) {
6658 if (It.second->Status == AllocationInfo::INVALID)
6659 ++NumInvalidMallocs;
6663 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6664 std::to_string(NumInvalidMallocs);
6668 void trackStatistics()
const override {
6671 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6672 for (
const auto &It : AllocationInfos)
6673 if (It.second->Status != AllocationInfo::INVALID)
6677 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6679 if (AllocationInfo *AI =
6680 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6681 return AI->Status != AllocationInfo::INVALID;
6685 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6686 if (!isValidState())
6689 for (
const auto &It : AllocationInfos) {
6690 AllocationInfo &AI = *It.second;
6691 if (AI.Status == AllocationInfo::INVALID)
6694 if (AI.PotentialFreeCalls.count(&CB))
6702 assert(getState().isValidState() &&
6703 "Attempted to manifest an invalid state!");
6707 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6709 for (
auto &It : AllocationInfos) {
6710 AllocationInfo &AI = *It.second;
6711 if (AI.Status == AllocationInfo::INVALID)
6714 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6715 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6716 A.deleteAfterManifest(*FreeCall);
6717 HasChanged = ChangeStatus::CHANGED;
6720 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6725 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6726 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6727 return OR <<
"Moving globalized variable to the stack.";
6728 return OR <<
"Moving memory allocation from the heap to the stack.";
6730 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6737 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6739 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6746 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6751 ?
F->getEntryBlock().begin()
6752 : AI.CB->getIterator();
6755 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6756 Alignment = std::max(Alignment, *RetAlign);
6758 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6759 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6760 "Expected an alignment during manifest!");
6762 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6766 unsigned AS =
DL.getAllocaAddrSpace();
6769 AI.CB->getName() +
".h2s", IP);
6771 if (Alloca->
getType() != AI.CB->getType())
6772 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6773 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6778 "Must be able to materialize initial memory state of allocation");
6782 if (
auto *
II = dyn_cast<InvokeInst>(AI.CB)) {
6783 auto *NBB =
II->getNormalDest();
6785 A.deleteAfterManifest(*AI.CB);
6787 A.deleteAfterManifest(*AI.CB);
6793 if (!isa<UndefValue>(InitVal)) {
6796 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6798 HasChanged = ChangeStatus::CHANGED;
6806 bool UsedAssumedInformation =
false;
6807 std::optional<Constant *> SimpleV =
6808 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6810 return APInt(64, 0);
6811 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6812 return CI->getValue();
6813 return std::nullopt;
6817 AllocationInfo &AI) {
6818 auto Mapper = [&](
const Value *
V) ->
const Value * {
6819 bool UsedAssumedInformation =
false;
6820 if (std::optional<Constant *> SimpleV =
6821 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6828 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6846 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6848 const auto *LivenessAA =
6852 A.getInfoCache().getMustBeExecutedContextExplorer();
6854 bool StackIsAccessibleByOtherThreads =
6855 A.getInfoCache().stackIsAccessibleByOtherThreads();
6858 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6859 std::optional<bool> MayContainIrreducibleControl;
6861 if (&
F->getEntryBlock() == &BB)
6863 if (!MayContainIrreducibleControl.has_value())
6865 if (*MayContainIrreducibleControl)
6874 bool HasUpdatedFrees =
false;
6876 auto UpdateFrees = [&]() {
6877 HasUpdatedFrees =
true;
6879 for (
auto &It : DeallocationInfos) {
6880 DeallocationInfo &DI = *It.second;
6883 if (DI.MightFreeUnknownObjects)
6887 bool UsedAssumedInformation =
false;
6888 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6895 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6896 DI.MightFreeUnknownObjects =
true;
6902 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6905 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6909 DI.MightFreeUnknownObjects =
true;
6913 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6915 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6917 DI.MightFreeUnknownObjects =
true;
6921 DI.PotentialAllocationCalls.insert(ObjCB);
6925 auto FreeCheck = [&](AllocationInfo &AI) {
6929 if (!StackIsAccessibleByOtherThreads) {
6931 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6934 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6935 "other threads and function is not nosync:\n");
6939 if (!HasUpdatedFrees)
6943 if (AI.PotentialFreeCalls.size() != 1) {
6945 << AI.PotentialFreeCalls.size() <<
"\n");
6948 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
6949 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
6952 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
6953 << *UniqueFree <<
"\n");
6956 if (DI->MightFreeUnknownObjects) {
6958 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
6961 if (DI->PotentialAllocationCalls.empty())
6963 if (DI->PotentialAllocationCalls.size() > 1) {
6965 << DI->PotentialAllocationCalls.size()
6966 <<
" different allocations\n");
6969 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
6972 <<
"[H2S] unique free call not known to free this allocation but "
6973 << **DI->PotentialAllocationCalls.begin() <<
"\n");
6978 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
6980 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
6981 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
6982 "with the allocation "
6983 << *UniqueFree <<
"\n");
6990 auto UsesCheck = [&](AllocationInfo &AI) {
6991 bool ValidUsesOnly =
true;
6993 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
6995 if (isa<LoadInst>(UserI))
6997 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
6998 if (
SI->getValueOperand() ==
U.get()) {
7000 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7001 ValidUsesOnly =
false;
7007 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
7010 if (DeallocationInfos.count(CB)) {
7011 AI.PotentialFreeCalls.insert(CB);
7018 bool IsKnownNoCapture;
7019 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7024 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7027 if (!IsAssumedNoCapture ||
7028 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7029 !IsAssumedNoFree)) {
7030 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7035 <<
"Could not move globalized variable to the stack. "
7036 "Variable is potentially captured in call. Mark "
7037 "parameter as `__attribute__((noescape))` to override.";
7040 if (ValidUsesOnly &&
7041 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7045 ValidUsesOnly =
false;
7050 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7051 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7058 ValidUsesOnly =
false;
7061 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7063 [&](
const Use &OldU,
const Use &NewU) {
7064 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7065 return !SI || StackIsAccessibleByOtherThreads ||
7066 AA::isAssumedThreadLocalObject(
7067 A, *SI->getPointerOperand(), *this);
7070 return ValidUsesOnly;
7075 for (
auto &It : AllocationInfos) {
7076 AllocationInfo &AI = *It.second;
7077 if (AI.Status == AllocationInfo::INVALID)
7081 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7085 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7087 AI.Status = AllocationInfo::INVALID;
7092 !APAlign->isPowerOf2()) {
7093 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7095 AI.Status = AllocationInfo::INVALID;
7102 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7107 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7109 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7113 AI.Status = AllocationInfo::INVALID;
7119 switch (AI.Status) {
7120 case AllocationInfo::STACK_DUE_TO_USE:
7123 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7125 case AllocationInfo::STACK_DUE_TO_FREE:
7128 AI.Status = AllocationInfo::INVALID;
7131 case AllocationInfo::INVALID:
7138 bool IsGlobalizedLocal =
7139 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7140 if (AI.MoveAllocaIntoEntry &&
7141 (!
Size.has_value() ||
7142 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7143 AI.MoveAllocaIntoEntry =
false;
7157 AAPrivatizablePtr::indicatePessimisticFixpoint();
7158 PrivatizableType =
nullptr;
7159 return ChangeStatus::CHANGED;
7165 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7169 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7170 std::optional<Type *>
T1) {
7180 std::optional<Type *> getPrivatizableType()
const override {
7181 return PrivatizableType;
7184 const std::string getAsStr(
Attributor *
A)
const override {
7185 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7189 std::optional<Type *> PrivatizableType;
7194struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7196 : AAPrivatizablePtrImpl(IRP,
A) {}
7199 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7202 bool UsedAssumedInformation =
false;
7204 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7206 if (!
Attrs.empty() &&
7208 true, UsedAssumedInformation))
7209 return Attrs[0].getValueAsType();
7211 std::optional<Type *> Ty;
7212 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7235 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7239 dbgs() <<
"<nullptr>";
7244 Ty = combineTypes(Ty, CSTy);
7247 dbgs() <<
" : New Type: ";
7249 (*Ty)->print(
dbgs());
7251 dbgs() <<
"<nullptr>";
7260 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7261 UsedAssumedInformation))
7268 PrivatizableType = identifyPrivatizableType(
A);
7269 if (!PrivatizableType)
7270 return ChangeStatus::UNCHANGED;
7271 if (!*PrivatizableType)
7272 return indicatePessimisticFixpoint();
7277 DepClassTy::OPTIONAL);
7280 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7283 return indicatePessimisticFixpoint();
7289 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7293 Function &Fn = *getIRPosition().getAnchorScope();
7297 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7299 return indicatePessimisticFixpoint();
7309 bool UsedAssumedInformation =
false;
7310 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7311 UsedAssumedInformation)) {
7313 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7315 return indicatePessimisticFixpoint();
7319 Argument *Arg = getAssociatedArgument();
7320 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7322 return indicatePessimisticFixpoint();
7329 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7332 for (
const Use *U : CallbackUses) {
7334 assert(CBACS && CBACS.isCallbackCall());
7335 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7336 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7340 <<
"[AAPrivatizablePtr] Argument " << *Arg
7341 <<
"check if can be privatized in the context of its parent ("
7343 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7345 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7346 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7347 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7349 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7350 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7353 if (CBArgNo !=
int(ArgNo))
7357 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7361 if (*CBArgPrivTy == PrivatizableType)
7366 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7367 <<
" cannot be privatized in the context of its parent ("
7369 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7371 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7372 <<
").\n[AAPrivatizablePtr] for which the argument "
7373 "privatization is not compatible.\n";
7387 "Expected a direct call operand for callback call operand");
7392 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7393 <<
" check if be privatized in the context of its parent ("
7395 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7397 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7400 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7403 DepClassTy::REQUIRED);
7404 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7408 if (*DCArgPrivTy == PrivatizableType)
7414 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7415 <<
" cannot be privatized in the context of its parent ("
7417 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7420 <<
").\n[AAPrivatizablePtr] for which the argument "
7421 "privatization is not compatible.\n";
7433 return IsCompatiblePrivArgOfDirectCS(ACS);
7437 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7438 UsedAssumedInformation))
7439 return indicatePessimisticFixpoint();
7441 return ChangeStatus::UNCHANGED;
7447 identifyReplacementTypes(
Type *PrivType,
7451 assert(PrivType &&
"Expected privatizable type!");
7454 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7455 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7456 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7457 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7458 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7459 PrivArrayType->getElementType());
7470 assert(PrivType &&
"Expected privatizable type!");
7476 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7477 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7478 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7483 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7484 Type *PointeeTy = PrivArrayType->getElementType();
7485 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7486 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7497 void createReplacementValues(
Align Alignment,
Type *PrivType,
7501 assert(PrivType &&
"Expected privatizable type!");
7508 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7509 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7510 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7511 Type *PointeeTy = PrivStructType->getElementType(u);
7515 L->setAlignment(Alignment);
7518 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7519 Type *PointeeTy = PrivArrayType->getElementType();
7520 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7521 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7524 L->setAlignment(Alignment);
7529 L->setAlignment(Alignment);
7536 if (!PrivatizableType)
7537 return ChangeStatus::UNCHANGED;
7538 assert(*PrivatizableType &&
"Expected privatizable type!");
7544 bool UsedAssumedInformation =
false;
7545 if (!
A.checkForAllInstructions(
7547 CallInst &CI = cast<CallInst>(I);
7548 if (CI.isTailCall())
7549 TailCalls.push_back(&CI);
7552 *
this, {Instruction::Call}, UsedAssumedInformation))
7553 return ChangeStatus::UNCHANGED;
7555 Argument *Arg = getAssociatedArgument();
7558 const auto *AlignAA =
7567 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7570 unsigned AS =
DL.getAllocaAddrSpace();
7572 Arg->
getName() +
".priv", IP);
7573 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7574 ArgIt->getArgNo(), IP);
7577 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7582 CI->setTailCall(
false);
7593 createReplacementValues(
7594 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7595 *PrivatizableType, ACS,
7603 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7606 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7607 std::move(FnRepairCB),
7608 std::move(ACSRepairCB)))
7609 return ChangeStatus::CHANGED;
7610 return ChangeStatus::UNCHANGED;
7614 void trackStatistics()
const override {
7619struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7621 : AAPrivatizablePtrImpl(IRP,
A) {}
7626 indicatePessimisticFixpoint();
7631 "updateImpl will not be called");
7635 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7638 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7642 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7643 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7645 return AI->getAllocatedType();
7646 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7649 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7653 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7654 "alloca nor privatizable argument: "
7660 void trackStatistics()
const override {
7665struct AAPrivatizablePtrCallSiteArgument final
7666 :
public AAPrivatizablePtrFloating {
7668 : AAPrivatizablePtrFloating(IRP,
A) {}
7672 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7673 indicateOptimisticFixpoint();
7678 PrivatizableType = identifyPrivatizableType(
A);
7679 if (!PrivatizableType)
7680 return ChangeStatus::UNCHANGED;
7681 if (!*PrivatizableType)
7682 return indicatePessimisticFixpoint();
7685 bool IsKnownNoCapture;
7686 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7687 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7688 if (!IsAssumedNoCapture) {
7689 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7690 return indicatePessimisticFixpoint();
7693 bool IsKnownNoAlias;
7694 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7695 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7696 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7697 return indicatePessimisticFixpoint();
7702 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7703 return indicatePessimisticFixpoint();
7706 return ChangeStatus::UNCHANGED;
7710 void trackStatistics()
const override {
7715struct AAPrivatizablePtrCallSiteReturned final
7716 :
public AAPrivatizablePtrFloating {
7718 : AAPrivatizablePtrFloating(IRP,
A) {}
7723 indicatePessimisticFixpoint();
7727 void trackStatistics()
const override {
7732struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7734 : AAPrivatizablePtrFloating(IRP,
A) {}
7739 indicatePessimisticFixpoint();
7743 void trackStatistics()
const override {
7759 intersectAssumedBits(BEST_STATE);
7760 getKnownStateFromValue(
A, getIRPosition(), getState());
7761 AAMemoryBehavior::initialize(
A);
7767 bool IgnoreSubsumingPositions =
false) {
7769 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7771 switch (Attr.getKindAsEnum()) {
7772 case Attribute::ReadNone:
7775 case Attribute::ReadOnly:
7778 case Attribute::WriteOnly:
7787 if (!
I->mayReadFromMemory())
7789 if (!
I->mayWriteToMemory())
7802 else if (isAssumedWriteOnly())
7811 if (
A.hasAttr(IRP, Attribute::ReadNone,
7813 return ChangeStatus::UNCHANGED;
7822 return ChangeStatus::UNCHANGED;
7825 A.removeAttrs(IRP, AttrKinds);
7828 A.removeAttrs(IRP, Attribute::Writable);
7835 const std::string getAsStr(
Attributor *
A)
const override {
7840 if (isAssumedWriteOnly())
7842 return "may-read/write";
7850 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7853struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7855 : AAMemoryBehaviorImpl(IRP,
A) {}
7861 void trackStatistics()
const override {
7866 else if (isAssumedWriteOnly())
7881struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7883 : AAMemoryBehaviorFloating(IRP,
A) {}
7887 intersectAssumedBits(BEST_STATE);
7892 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7894 getKnownStateFromValue(
A, IRP, getState(),
7901 return ChangeStatus::UNCHANGED;
7905 if (
A.hasAttr(getIRPosition(),
7906 {Attribute::InAlloca, Attribute::Preallocated})) {
7907 removeKnownBits(NO_WRITES);
7908 removeAssumedBits(NO_WRITES);
7910 A.removeAttrs(getIRPosition(), AttrKinds);
7911 return AAMemoryBehaviorFloating::manifest(
A);
7915 void trackStatistics()
const override {
7920 else if (isAssumedWriteOnly())
7925struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7927 : AAMemoryBehaviorArgument(IRP,
A) {}
7933 Argument *Arg = getAssociatedArgument();
7935 indicatePessimisticFixpoint();
7939 addKnownBits(NO_WRITES);
7940 removeKnownBits(NO_READS);
7941 removeAssumedBits(NO_READS);
7943 AAMemoryBehaviorArgument::initialize(
A);
7944 if (getAssociatedFunction()->isDeclaration())
7945 indicatePessimisticFixpoint();
7954 Argument *Arg = getAssociatedArgument();
7959 return indicatePessimisticFixpoint();
7964 void trackStatistics()
const override {
7969 else if (isAssumedWriteOnly())
7975struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
7977 : AAMemoryBehaviorFloating(IRP,
A) {}
7981 AAMemoryBehaviorImpl::initialize(
A);
7986 return ChangeStatus::UNCHANGED;
7990 void trackStatistics()
const override {}
7994struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
7996 : AAMemoryBehaviorImpl(IRP,
A) {}
8006 Function &
F = cast<Function>(getAnchorValue());
8012 else if (isAssumedWriteOnly())
8015 A.removeAttrs(getIRPosition(), AttrKinds);
8020 return A.manifestAttrs(getIRPosition(),
8025 void trackStatistics()
const override {
8030 else if (isAssumedWriteOnly())
8036struct AAMemoryBehaviorCallSite final
8037 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8044 CallBase &CB = cast<CallBase>(getAnchorValue());
8050 else if (isAssumedWriteOnly())
8053 A.removeAttrs(getIRPosition(), AttrKinds);
8058 Attribute::Writable);
8059 return A.manifestAttrs(
8064 void trackStatistics()
const override {
8069 else if (isAssumedWriteOnly())
8077 auto AssumedState = getAssumed();
8083 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8086 if (MemBehaviorAA) {
8087 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8088 return !isAtFixpoint();
8093 if (
I.mayReadFromMemory())
8094 removeAssumedBits(NO_READS);
8095 if (
I.mayWriteToMemory())
8096 removeAssumedBits(NO_WRITES);
8097 return !isAtFixpoint();
8100 bool UsedAssumedInformation =
false;
8101 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8102 UsedAssumedInformation))
8103 return indicatePessimisticFixpoint();
8122 const auto *FnMemAA =
8126 S.addKnownBits(FnMemAA->getKnown());
8127 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8133 auto AssumedState = S.getAssumed();
8139 bool IsKnownNoCapture;
8141 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
8145 if (!IsAssumedNoCapture &&
8147 S.intersectAssumedBits(FnMemAssumedState);
8153 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8155 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8163 Follow = followUsersOfUseIn(
A, U, UserI);
8167 analyzeUseIn(
A, U, UserI);
8169 return !isAtFixpoint();
8172 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8173 return indicatePessimisticFixpoint();
8179bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8183 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8188 const auto *CB = dyn_cast<CallBase>(UserI);
8198 if (
U.get()->getType()->isPointerTy()) {
8200 bool IsKnownNoCapture;
8201 return !AA::hasAssumedIRAttr<Attribute::NoCapture>(
8209void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8217 case Instruction::Load:
8219 removeAssumedBits(NO_READS);
8222 case Instruction::Store:
8227 removeAssumedBits(NO_WRITES);
8229 indicatePessimisticFixpoint();
8232 case Instruction::Call:
8233 case Instruction::CallBr:
8234 case Instruction::Invoke: {
8237 const auto *CB = cast<CallBase>(UserI);
8241 indicatePessimisticFixpoint();
8248 removeAssumedBits(NO_READS);
8255 if (
U.get()->getType()->isPointerTy())
8259 const auto *MemBehaviorAA =
8265 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8273 removeAssumedBits(NO_READS);
8275 removeAssumedBits(NO_WRITES);
8287 return "all memory";
8290 std::string S =
"memory:";
8296 S +=
"internal global,";
8298 S +=
"external global,";
8302 S +=
"inaccessible,";
8316 AccessKind2Accesses.fill(
nullptr);
8319 ~AAMemoryLocationImpl() {
8322 for (AccessSet *AS : AccessKind2Accesses)
8329 intersectAssumedBits(BEST_STATE);
8330 getKnownStateFromValue(
A, getIRPosition(), getState());
8331 AAMemoryLocation::initialize(
A);
8337 bool IgnoreSubsumingPositions =
false) {
8346 bool UseArgMemOnly =
true;
8348 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8352 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8361 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8366 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8370 A.manifestAttrs(IRP,
8380 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8384 A.manifestAttrs(IRP,
8403 else if (isAssumedInaccessibleMemOnly())
8406 else if (isAssumedArgMemOnly())
8409 else if (isAssumedInaccessibleOrArgMemOnly())
8424 if (DeducedAttrs.
size() != 1)
8425 return ChangeStatus::UNCHANGED;
8433 bool checkForAllAccessesToMemoryKind(
8435 MemoryLocationsKind)>
8437 MemoryLocationsKind RequestedMLK)
const override {
8438 if (!isValidState())
8441 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8442 if (AssumedMLK == NO_LOCATIONS)
8446 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8447 CurMLK *= 2, ++
Idx) {
8448 if (CurMLK & RequestedMLK)
8451 if (
const AccessSet *Accesses = AccessKind2Accesses[
Idx])
8452 for (
const AccessInfo &AI : *Accesses)
8453 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8465 bool Changed =
false;
8466 MemoryLocationsKind KnownMLK = getKnown();
8467 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8468 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8469 if (!(CurMLK & KnownMLK))
8470 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8471 getAccessKindFromInst(
I));
8472 return AAMemoryLocation::indicatePessimisticFixpoint();
8492 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8496 return LHS.Ptr <
RHS.Ptr;
8497 if (
LHS.Kind !=
RHS.Kind)
8498 return LHS.Kind <
RHS.Kind;
8506 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8523 AK =
I->mayReadFromMemory() ? READ :
NONE;
8541 Changed |= Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8542 if (MLK == NO_UNKOWN_MEM)
8544 State.removeAssumedBits(MLK);
8551 unsigned AccessAS = 0);
8557void AAMemoryLocationImpl::categorizePtrValue(
8560 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8562 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8564 auto Pred = [&](
Value &Obj) {
8567 MemoryLocationsKind MLK = NO_LOCATIONS;
8577 if (isa<UndefValue>(&Obj))
8579 if (isa<Argument>(&Obj)) {
8586 MLK = NO_ARGUMENT_MEM;
8587 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8591 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8592 if (GVar->isConstant())
8595 if (GV->hasLocalLinkage())
8596 MLK = NO_GLOBAL_INTERNAL_MEM;
8598 MLK = NO_GLOBAL_EXTERNAL_MEM;
8599 }
else if (isa<ConstantPointerNull>(&Obj) &&
8603 }
else if (isa<AllocaInst>(&Obj)) {
8605 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8606 bool IsKnownNoAlias;
8607 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8610 MLK = NO_MALLOCED_MEM;
8612 MLK = NO_UNKOWN_MEM;
8614 MLK = NO_UNKOWN_MEM;
8617 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8618 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8619 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8620 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8621 getAccessKindFromInst(&
I));
8630 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8631 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8632 getAccessKindFromInst(&
I));
8637 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8638 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8641void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8644 for (
unsigned ArgNo = 0, E = CB.
arg_size(); ArgNo < E; ++ArgNo) {
8653 const auto *ArgOpMemLocationAA =
8656 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8661 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8668 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8672 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8674 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8680 <<
" [" << CBMemLocationAA <<
"]\n");
8681 if (!CBMemLocationAA) {
8682 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8683 Changed, getAccessKindFromInst(&
I));
8684 return NO_UNKOWN_MEM;
8687 if (CBMemLocationAA->isAssumedReadNone())
8688 return NO_LOCATIONS;
8690 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8691 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8692 Changed, getAccessKindFromInst(&
I));
8693 return AccessedLocs.getAssumed();
8696 uint32_t CBAssumedNotAccessedLocs =
8697 CBMemLocationAA->getAssumedNotAccessedLocation();
8700 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8701 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8703 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8704 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8706 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8707 getAccessKindFromInst(&
I));
8712 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8713 if (HasGlobalAccesses) {
8716 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8717 getAccessKindFromInst(&
I));
8720 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8721 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8722 return AccessedLocs.getWorstState();
8726 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8727 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8730 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8732 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8735 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8736 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8738 return AccessedLocs.getAssumed();
8743 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8744 <<
I <<
" [" << *
Ptr <<
"]\n");
8745 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8746 Ptr->getType()->getPointerAddressSpace());
8747 return AccessedLocs.getAssumed();
8750 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8752 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8753 getAccessKindFromInst(&
I));
8754 return AccessedLocs.getAssumed();
8758struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8760 : AAMemoryLocationImpl(IRP,
A) {}
8765 const auto *MemBehaviorAA =
8769 return indicateOptimisticFixpoint();
8771 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8772 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8773 return ChangeStatus::UNCHANGED;
8777 auto AssumedState = getAssumed();
8778 bool Changed =
false;
8781 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8782 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8783 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8784 removeAssumedBits(inverseLocation(MLK,
false,
false));
8787 return getAssumedNotAccessedLocation() != VALID_STATE;
8790 bool UsedAssumedInformation =
false;
8791 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8792 UsedAssumedInformation))
8793 return indicatePessimisticFixpoint();
8795 Changed |= AssumedState != getAssumed();
8796 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8800 void trackStatistics()
const override {
8803 else if (isAssumedArgMemOnly())
8805 else if (isAssumedInaccessibleMemOnly())
8807 else if (isAssumedInaccessibleOrArgMemOnly())
8813struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8815 : AAMemoryLocationImpl(IRP,
A) {}
8828 return indicatePessimisticFixpoint();
8829 bool Changed =
false;
8832 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8833 getAccessKindFromInst(
I));
8836 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8837 return indicatePessimisticFixpoint();
8838 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8842 void trackStatistics()
const override {
8856 const std::string getAsStr(
Attributor *
A)
const override {
8857 std::string Str(
"AADenormalFPMath[");
8860 DenormalState Known = getKnown();
8861 if (Known.Mode.isValid())
8862 OS <<
"denormal-fp-math=" << Known.Mode;
8866 if (Known.ModeF32.isValid())
8867 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8873struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8875 : AADenormalFPMathImpl(IRP,
A) {}
8887 Known = DenormalState{
Mode, ModeF32};
8898 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8906 CallerInfo->getState());
8910 bool AllCallSitesKnown =
true;
8911 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8912 return indicatePessimisticFixpoint();
8914 if (Change == ChangeStatus::CHANGED && isModeFixed())
8920 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8925 AttrToRemove.
push_back(
"denormal-fp-math");
8931 if (Known.ModeF32 != Known.Mode) {
8933 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8935 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8938 auto &IRP = getIRPosition();
8941 return A.removeAttrs(IRP, AttrToRemove) |
8942 A.manifestAttrs(IRP, AttrToAdd,
true);
8945 void trackStatistics()
const override {
8961 if (
A.hasSimplificationCallback(getIRPosition())) {
8962 indicatePessimisticFixpoint();
8967 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
8970 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
8974 const std::string getAsStr(
Attributor *
A)
const override {
8978 getKnown().print(
OS);
8980 getAssumed().print(
OS);
8988 if (!getAnchorScope())
9001 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9012 if (!getAnchorScope())
9019 const SCEV *S = getSCEV(
A,
I);
9031 if (!getAnchorScope())
9050 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9052 bool AllowAACtxI)
const {
9053 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9064 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9078 const Instruction *CtxI =
nullptr)
const override {
9079 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9085 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9091 const Instruction *CtxI =
nullptr)
const override {
9096 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9098 return getAssumed();
9102 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9110 Ty, AssumedConstantRange.
getLower())),
9112 Ty, AssumedConstantRange.
getUpper()))};
9134 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(0));
9136 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(1));
9139 return Known.contains(Assumed) && Known != Assumed;
9146 auto *OldRangeMD =
I->getMetadata(LLVMContext::MD_range);
9147 if (isBetterRange(AssumedConstantRange, OldRangeMD)) {
9149 I->setMetadata(LLVMContext::MD_range,
9150 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9151 AssumedConstantRange));
9164 auto &
V = getAssociatedValue();
9168 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9169 "not the context instruction");
9170 if (isa<CallInst>(
I) || isa<LoadInst>(
I))
9171 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9172 Changed = ChangeStatus::CHANGED;
9180struct AAValueConstantRangeArgument final
9181 : AAArgumentFromCallSiteArguments<
9182 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9184 using Base = AAArgumentFromCallSiteArguments<
9191 void trackStatistics()
const override {
9196struct AAValueConstantRangeReturned
9197 : AAReturnedFromReturnedValues<AAValueConstantRange,
9198 AAValueConstantRangeImpl,
9199 AAValueConstantRangeImpl::StateType,
9203 AAValueConstantRangeImpl,
9211 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9212 indicatePessimisticFixpoint();
9216 void trackStatistics()
const override {
9221struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9223 : AAValueConstantRangeImpl(IRP,
A) {}
9227 AAValueConstantRangeImpl::initialize(
A);
9231 Value &
V = getAssociatedValue();
9233 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9235 indicateOptimisticFixpoint();
9239 if (isa<UndefValue>(&V)) {
9242 indicateOptimisticFixpoint();
9246 if (isa<CallBase>(&V))
9249 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9253 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9254 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9261 if (isa<SelectInst>(V) || isa<PHINode>(V))
9265 indicatePessimisticFixpoint();
9268 << getAssociatedValue() <<
"\n");
9271 bool calculateBinaryOperator(
9279 bool UsedAssumedInformation =
false;
9280 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9283 if (!SimplifiedLHS.has_value())
9285 if (!*SimplifiedLHS)
9287 LHS = *SimplifiedLHS;
9289 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9292 if (!SimplifiedRHS.has_value())
9294 if (!*SimplifiedRHS)
9296 RHS = *SimplifiedRHS;
9304 DepClassTy::REQUIRED);
9308 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9312 DepClassTy::REQUIRED);
9316 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9318 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9320 T.unionAssumed(AssumedRange);
9324 return T.isValidState();
9327 bool calculateCastInst(
9336 bool UsedAssumedInformation =
false;
9337 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9340 if (!SimplifiedOpV.has_value())
9342 if (!*SimplifiedOpV)
9344 OpV = *SimplifiedOpV;
9351 DepClassTy::REQUIRED);
9355 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9357 return T.isValidState();
9368 bool UsedAssumedInformation =
false;
9369 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9372 if (!SimplifiedLHS.has_value())
9374 if (!*SimplifiedLHS)
9376 LHS = *SimplifiedLHS;
9378 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9381 if (!SimplifiedRHS.has_value())
9383 if (!*SimplifiedRHS)
9385 RHS = *SimplifiedRHS;
9393 DepClassTy::REQUIRED);
9399 DepClassTy::REQUIRED);
9403 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9404 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9407 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9410 bool MustTrue =
false, MustFalse =
false;
9412 auto AllowedRegion =
9415 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9421 assert((!MustTrue || !MustFalse) &&
9422 "Either MustTrue or MustFalse should be false!");
9431 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9432 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9433 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9437 return T.isValidState();
9446 if (!
I || isa<CallBase>(
I)) {
9449 bool UsedAssumedInformation =
false;
9450 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9453 if (!SimplifiedOpV.has_value())
9455 if (!*SimplifiedOpV)
9457 Value *VPtr = *SimplifiedOpV;
9462 DepClassTy::REQUIRED);
9466 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9470 return T.isValidState();
9474 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9475 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9477 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9478 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9480 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9481 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9487 T.indicatePessimisticFixpoint();
9495 if (QueriedAA !=
this)
9498 if (
T.getAssumed() == getState().getAssumed())
9500 T.indicatePessimisticFixpoint();
9503 return T.isValidState();
9506 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9507 return indicatePessimisticFixpoint();
9512 return ChangeStatus::UNCHANGED;
9513 if (++NumChanges > MaxNumChanges) {
9514 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9515 <<
" but only " << MaxNumChanges
9516 <<
" are allowed to avoid cyclic reasoning.");
9517 return indicatePessimisticFixpoint();
9519 return ChangeStatus::CHANGED;
9523 void trackStatistics()
const override {
9532 static constexpr int MaxNumChanges = 5;
9535struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9537 : AAValueConstantRangeImpl(IRP,
A) {}
9541 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9549struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9551 : AAValueConstantRangeFunction(IRP,
A) {}
9557struct AAValueConstantRangeCallSiteReturned
9558 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9559 AAValueConstantRangeImpl::StateType,
9563 AAValueConstantRangeImpl::StateType,
9569 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue()))
9570 if (
auto *RangeMD = CI->getMetadata(LLVMContext::MD_range))
9573 AAValueConstantRangeImpl::initialize(
A);
9577 void trackStatistics()
const override {
9581struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9583 : AAValueConstantRangeFloating(IRP,
A) {}
9587 return ChangeStatus::UNCHANGED;
9591 void trackStatistics()
const override {
9608 if (
A.hasSimplificationCallback(getIRPosition()))
9609 indicatePessimisticFixpoint();
9611 AAPotentialConstantValues::initialize(
A);
9615 bool &ContainsUndef,
bool ForSelf) {
9617 bool UsedAssumedInformation =
false;
9619 UsedAssumedInformation)) {
9627 *
this, IRP, DepClassTy::REQUIRED);
9628 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9630 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9631 S = PotentialValuesAA->getState().getAssumedSet();
9638 ContainsUndef =
false;
9639 for (
auto &It : Values) {
9640 if (isa<UndefValue>(It.getValue())) {
9641 ContainsUndef =
true;
9644 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9647 S.insert(CI->getValue());
9649 ContainsUndef &= S.empty();
9655 const std::string getAsStr(
Attributor *
A)
const override {
9664 return indicatePessimisticFixpoint();
9668struct AAPotentialConstantValuesArgument final
9669 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9670 AAPotentialConstantValuesImpl,
9671 PotentialConstantIntValuesState> {
9673 AAPotentialConstantValuesImpl,
9679 void trackStatistics()
const override {
9684struct AAPotentialConstantValuesReturned
9685 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9686 AAPotentialConstantValuesImpl> {
9688 AAPotentialConstantValuesImpl>;
9693 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9694 indicatePessimisticFixpoint();
9695 Base::initialize(
A);
9699 void trackStatistics()
const override {
9704struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9706 : AAPotentialConstantValuesImpl(IRP,
A) {}
9710 AAPotentialConstantValuesImpl::initialize(
A);
9714 Value &
V = getAssociatedValue();
9716 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9717 unionAssumed(
C->getValue());
9718 indicateOptimisticFixpoint();
9722 if (isa<UndefValue>(&V)) {
9723 unionAssumedWithUndef();
9724 indicateOptimisticFixpoint();
9728 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9731 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9734 indicatePessimisticFixpoint();
9737 << getAssociatedValue() <<
"\n");
9751 case Instruction::Trunc:
9752 return Src.trunc(ResultBitWidth);
9753 case Instruction::SExt:
9754 return Src.sext(ResultBitWidth);
9755 case Instruction::ZExt:
9756 return Src.zext(ResultBitWidth);
9757 case Instruction::BitCast:
9764 bool &SkipOperation,
bool &Unsupported) {
9771 switch (BinOpcode) {
9775 case Instruction::Add:
9777 case Instruction::Sub:
9779 case Instruction::Mul:
9781 case Instruction::UDiv:
9783 SkipOperation =
true;
9787 case Instruction::SDiv:
9789 SkipOperation =
true;
9793 case Instruction::URem:
9795 SkipOperation =
true;
9799 case Instruction::SRem:
9801 SkipOperation =
true;
9805 case Instruction::Shl:
9807 case Instruction::LShr:
9809 case Instruction::AShr:
9811 case Instruction::And:
9813 case Instruction::Or:
9815 case Instruction::Xor:
9820 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9822 bool SkipOperation =
false;
9825 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9830 unionAssumed(Result);
9831 return isValidState();
9835 auto AssumedBefore = getAssumed();
9839 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9840 SetTy LHSAAPVS, RHSAAPVS;
9842 LHSContainsUndef,
false) ||
9844 RHSContainsUndef,
false))
9845 return indicatePessimisticFixpoint();
9848 bool MaybeTrue =
false, MaybeFalse =
false;
9850 if (LHSContainsUndef && RHSContainsUndef) {
9853 unionAssumedWithUndef();
9854 }
else if (LHSContainsUndef) {
9855 for (
const APInt &R : RHSAAPVS) {
9856 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9857 MaybeTrue |= CmpResult;
9858 MaybeFalse |= !CmpResult;
9859 if (MaybeTrue & MaybeFalse)
9860 return indicatePessimisticFixpoint();
9862 }
else if (RHSContainsUndef) {
9863 for (
const APInt &L : LHSAAPVS) {
9864 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9865 MaybeTrue |= CmpResult;
9866 MaybeFalse |= !CmpResult;
9867 if (MaybeTrue & MaybeFalse)
9868 return indicatePessimisticFixpoint();
9871 for (
const APInt &L : LHSAAPVS) {
9872 for (
const APInt &R : RHSAAPVS) {
9873 bool CmpResult = calculateICmpInst(ICI, L, R);
9874 MaybeTrue |= CmpResult;
9875 MaybeFalse |= !CmpResult;
9876 if (MaybeTrue & MaybeFalse)
9877 return indicatePessimisticFixpoint();
9882 unionAssumed(
APInt( 1, 1));
9884 unionAssumed(
APInt( 1, 0));
9885 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9886 : ChangeStatus::CHANGED;
9890 auto AssumedBefore = getAssumed();
9894 bool UsedAssumedInformation =
false;
9895 std::optional<Constant *>
C =
A.getAssumedConstant(
9896 *
SI->getCondition(), *
this, UsedAssumedInformation);
9899 bool OnlyLeft =
false, OnlyRight =
false;
9900 if (
C && *
C && (*C)->isOneValue())
9902 else if (
C && *
C && (*C)->isZeroValue())
9905 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9906 SetTy LHSAAPVS, RHSAAPVS;
9909 LHSContainsUndef,
false))
9910 return indicatePessimisticFixpoint();
9914 RHSContainsUndef,
false))
9915 return indicatePessimisticFixpoint();
9917 if (OnlyLeft || OnlyRight) {
9919 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9920 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9923 unionAssumedWithUndef();
9925 for (
const auto &It : *OpAA)
9929 }
else if (LHSContainsUndef && RHSContainsUndef) {
9931 unionAssumedWithUndef();
9933 for (
const auto &It : LHSAAPVS)
9935 for (
const auto &It : RHSAAPVS)
9938 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9939 : ChangeStatus::CHANGED;
9943 auto AssumedBefore = getAssumed();
9945 return indicatePessimisticFixpoint();
9950 bool SrcContainsUndef =
false;
9953 SrcContainsUndef,
false))
9954 return indicatePessimisticFixpoint();
9956 if (SrcContainsUndef)
9957 unionAssumedWithUndef();
9959 for (
const APInt &S : SrcPVS) {
9960 APInt T = calculateCastInst(CI, S, ResultBitWidth);
9964 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9965 : ChangeStatus::CHANGED;
9969 auto AssumedBefore = getAssumed();
9973 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9974 SetTy LHSAAPVS, RHSAAPVS;
9976 LHSContainsUndef,
false) ||
9978 RHSContainsUndef,
false))
9979 return indicatePessimisticFixpoint();
9984 if (LHSContainsUndef && RHSContainsUndef) {
9985 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
9986 return indicatePessimisticFixpoint();
9987 }
else if (LHSContainsUndef) {
9988 for (
const APInt &R : RHSAAPVS) {
9989 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
9990 return indicatePessimisticFixpoint();
9992 }
else if (RHSContainsUndef) {
9993 for (
const APInt &L : LHSAAPVS) {
9994 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
9995 return indicatePessimisticFixpoint();
9998 for (
const APInt &L : LHSAAPVS) {
9999 for (
const APInt &R : RHSAAPVS) {
10000 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10001 return indicatePessimisticFixpoint();
10005 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10006 : ChangeStatus::CHANGED;
10010 auto AssumedBefore = getAssumed();
10012 bool ContainsUndef;
10014 ContainsUndef,
true))
10015 return indicatePessimisticFixpoint();
10016 if (ContainsUndef) {
10017 unionAssumedWithUndef();
10022 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10023 : ChangeStatus::CHANGED;
10028 Value &
V = getAssociatedValue();
10031 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10032 return updateWithICmpInst(
A, ICI);
10034 if (
auto *SI = dyn_cast<SelectInst>(
I))
10035 return updateWithSelectInst(
A, SI);
10037 if (
auto *CI = dyn_cast<CastInst>(
I))
10038 return updateWithCastInst(
A, CI);
10040 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10041 return updateWithBinaryOperator(
A, BinOp);
10043 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10044 return updateWithInstruction(
A,
I);
10046 return indicatePessimisticFixpoint();
10050 void trackStatistics()
const override {
10055struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10057 : AAPotentialConstantValuesImpl(IRP,
A) {}
10062 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10067 void trackStatistics()
const override {
10072struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10074 : AAPotentialConstantValuesFunction(IRP,
A) {}
10077 void trackStatistics()
const override {
10082struct AAPotentialConstantValuesCallSiteReturned
10083 : AACalleeToCallSite<AAPotentialConstantValues,
10084 AAPotentialConstantValuesImpl> {
10085 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10088 AAPotentialConstantValuesImpl>(IRP,
A) {}
10091 void trackStatistics()
const override {
10096struct AAPotentialConstantValuesCallSiteArgument
10097 : AAPotentialConstantValuesFloating {
10098 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10100 : AAPotentialConstantValuesFloating(IRP,
A) {}
10104 AAPotentialConstantValuesImpl::initialize(
A);
10105 if (isAtFixpoint())
10108 Value &
V = getAssociatedValue();
10110 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10111 unionAssumed(
C->getValue());
10112 indicateOptimisticFixpoint();
10116 if (isa<UndefValue>(&V)) {
10117 unionAssumedWithUndef();
10118 indicateOptimisticFixpoint();
10125 Value &
V = getAssociatedValue();
10126 auto AssumedBefore = getAssumed();
10130 return indicatePessimisticFixpoint();
10131 const auto &S = AA->getAssumed();
10133 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10134 : ChangeStatus::CHANGED;
10138 void trackStatistics()
const override {
10147 bool IgnoreSubsumingPositions) {
10148 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10149 "Unexpected attribute kind");
10150 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10151 Attribute::NoUndef))
10171 Value &V = getAssociatedValue();
10172 if (isa<UndefValue>(V))
10173 indicatePessimisticFixpoint();
10174 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10180 const Value *UseV =
U->get();
10189 bool TrackUse =
false;
10192 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10198 const std::string getAsStr(
Attributor *
A)
const override {
10199 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10206 bool UsedAssumedInformation =
false;
10207 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10208 UsedAssumedInformation))
10209 return ChangeStatus::UNCHANGED;
10213 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10216 return ChangeStatus::UNCHANGED;
10217 return AANoUndef::manifest(
A);
10221struct AANoUndefFloating :
public AANoUndefImpl {
10223 : AANoUndefImpl(IRP,
A) {}
10227 AANoUndefImpl::initialize(
A);
10228 if (!getState().isAtFixpoint() && getAnchorScope() &&
10229 !getAnchorScope()->isDeclaration())
10231 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10236 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10237 bool IsKnownNoUndef;
10238 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10239 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10243 bool UsedAssumedInformation =
false;
10244 Value *AssociatedValue = &getAssociatedValue();
10246 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10251 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10259 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10260 return indicatePessimisticFixpoint();
10261 return ChangeStatus::UNCHANGED;
10264 for (
const auto &VAC : Values)
10266 return indicatePessimisticFixpoint();
10268 return ChangeStatus::UNCHANGED;
10275struct AANoUndefReturned final
10276 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10278 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10284struct AANoUndefArgument final
10285 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10287 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10293struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10295 : AANoUndefFloating(IRP,
A) {}
10301struct AANoUndefCallSiteReturned final
10302 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10304 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10319 if (isa<UndefValue>(V)) {
10320 indicateOptimisticFixpoint();
10325 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10326 for (
const auto &Attr : Attrs) {
10337 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10344 auto *CB = dyn_cast<CallBase>(
I);
10353 if (
auto *NoFPAA =
A.getAAFor<
AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10354 State.addKnownBits(NoFPAA->getState().getKnown());
10358 const std::string getAsStr(
Attributor *
A)
const override {
10359 std::string
Result =
"nofpclass";
10361 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10371struct AANoFPClassFloating :
public AANoFPClassImpl {
10373 : AANoFPClassImpl(IRP,
A) {}
10378 bool UsedAssumedInformation =
false;
10379 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10381 Values.
push_back({getAssociatedValue(), getCtxI()});
10387 DepClassTy::REQUIRED);
10388 if (!AA ||
this == AA) {
10389 T.indicatePessimisticFixpoint();
10395 return T.isValidState();
10398 for (
const auto &VAC : Values)
10399 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10400 return indicatePessimisticFixpoint();
10406 void trackStatistics()
const override {
10411struct AANoFPClassReturned final
10412 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10413 AANoFPClassImpl::StateType, false,
10414 Attribute::None, false> {
10416 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10417 AANoFPClassImpl::StateType,
false,
10421 void trackStatistics()
const override {
10426struct AANoFPClassArgument final
10427 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10429 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10435struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10437 : AANoFPClassFloating(IRP,
A) {}
10440 void trackStatistics()
const override {
10445struct AANoFPClassCallSiteReturned final
10446 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10448 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10451 void trackStatistics()
const override {
10460 return CalledFunctions;
10463 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10465 bool hasNonAsmUnknownCallee()
const override {
10466 return HasUnknownCalleeNonAsm;
10469 const std::string getAsStr(
Attributor *
A)
const override {
10470 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10471 std::to_string(CalledFunctions.size()) +
"]";
10474 void trackStatistics()
const override {}
10478 if (CalledFunctions.insert(Fn)) {
10479 Change = ChangeStatus::CHANGED;
10485 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10486 if (!HasUnknownCallee)
10487 Change = ChangeStatus::CHANGED;
10488 if (NonAsm && !HasUnknownCalleeNonAsm)
10489 Change = ChangeStatus::CHANGED;
10490 HasUnknownCalleeNonAsm |= NonAsm;
10491 HasUnknownCallee =
true;
10499 bool HasUnknownCallee =
false;
10502 bool HasUnknownCalleeNonAsm =
false;
10505struct AACallEdgesCallSite :
public AACallEdgesImpl {
10507 : AACallEdgesImpl(IRP,
A) {}
10513 if (
Function *Fn = dyn_cast<Function>(&V)) {
10514 addCalledFunction(Fn, Change);
10516 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10517 setHasUnknownCallee(
true, Change);
10527 if (isa<Constant>(V)) {
10528 VisitValue(*V, CtxI);
10532 bool UsedAssumedInformation =
false;
10538 for (
auto &VAC : Values)
10539 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10542 CallBase *CB = cast<CallBase>(getCtxI());
10545 if (
IA->hasSideEffects() &&
10548 setHasUnknownCallee(
false, Change);
10555 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10556 if (IndirectCallAA->foreachCallee(
10557 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10566 for (
const Use *U : CallbackUses)
10567 ProcessCalledOperand(
U->get(), CB);
10573struct AACallEdgesFunction :
public AACallEdgesImpl {
10575 : AACallEdgesImpl(IRP,
A) {}
10582 CallBase &CB = cast<CallBase>(Inst);
10588 if (CBEdges->hasNonAsmUnknownCallee())
10589 setHasUnknownCallee(
true, Change);
10590 if (CBEdges->hasUnknownCallee())
10591 setHasUnknownCallee(
false, Change);
10593 for (
Function *
F : CBEdges->getOptimisticEdges())
10594 addCalledFunction(
F, Change);
10600 bool UsedAssumedInformation =
false;
10601 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10602 UsedAssumedInformation,
10606 setHasUnknownCallee(
true, Change);
10615struct AAInterFnReachabilityFunction
10616 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10617 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10621 bool instructionCanReach(
10624 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10625 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10627 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10628 typename RQITy::Reachable
Result;
10629 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10630 return NonConstThis->isReachableImpl(
A, StackRQI,
10632 return Result == RQITy::Reachable::Yes;
10636 bool IsTemporaryRQI)
override {
10639 if (EntryI != RQI.From &&
10640 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10641 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10644 auto CheckReachableCallBase = [&](
CallBase *CB) {
10647 if (!CBEdges || !CBEdges->getState().isValidState())
10650 if (CBEdges->hasUnknownCallee())
10653 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10664 if (Fn == getAnchorScope()) {
10665 if (EntryI == RQI.From)
10672 DepClassTy::OPTIONAL);
10675 if (!InterFnReachability ||
10685 DepClassTy::OPTIONAL);
10691 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10694 A, *RQI.From, CBInst, RQI.ExclusionSet);
10697 bool UsedExclusionSet =
true;
10698 bool UsedAssumedInformation =
false;
10699 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10700 UsedAssumedInformation,
10702 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10705 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10709 void trackStatistics()
const override {}
10713template <
typename AAType>
10714static std::optional<Constant *>
10725 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10727 if (!COpt.has_value()) {
10729 return std::nullopt;
10731 if (
auto *
C = *COpt) {
10742 std::optional<Value *> V;
10743 for (
auto &It : Values) {
10745 if (V.has_value() && !*V)
10748 if (!V.has_value())
10762 if (
A.hasSimplificationCallback(getIRPosition())) {
10763 indicatePessimisticFixpoint();
10766 Value *Stripped = getAssociatedValue().stripPointerCasts();
10767 if (isa<Constant>(Stripped) && !isa<ConstantExpr>(Stripped)) {
10768 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10770 indicateOptimisticFixpoint();
10773 AAPotentialValues::initialize(
A);
10777 const std::string getAsStr(
Attributor *
A)
const override {
10784 template <
typename AAType>
10785 static std::optional<Value *> askOtherAA(
Attributor &
A,
10790 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10792 return std::nullopt;
10804 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10805 for (
const auto &U : CB->
args()) {
10815 Type &Ty = *getAssociatedType();
10816 std::optional<Value *> SimpleV =
10817 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10818 if (SimpleV.has_value() && !*SimpleV) {
10820 *
this, ValIRP, DepClassTy::OPTIONAL);
10821 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10822 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10823 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10824 if (PotentialConstantsAA->undefIsContained())
10829 if (!SimpleV.has_value())
10836 if (isa<ConstantInt>(VPtr))
10841 State.unionAssumed({{*VPtr, CtxI}, S});
10851 return II.I ==
I &&
II.S == S;
10866 bool UsedAssumedInformation =
false;
10868 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10869 UsedAssumedInformation))
10872 for (
auto &It : Values)
10873 ValueScopeMap[It] += CS;
10875 for (
auto &It : ValueScopeMap)
10876 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10883 auto NewS = StateType::getBestState(getState());
10884 for (
const auto &It : getAssumedSet()) {
10887 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10890 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10898 getState() = StateType::getBestState(getState());
10899 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10906 return indicatePessimisticFixpoint();
10914 if (!getAssumedSimplifiedValues(
A, Values, S))
10916 Value &OldV = getAssociatedValue();
10917 if (isa<UndefValue>(OldV))
10919 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10920 if (!NewV || NewV == &OldV)
10925 if (
A.changeAfterManifest(getIRPosition(), *NewV))
10931 bool getAssumedSimplifiedValues(
10933 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
10934 if (!isValidState())
10936 bool UsedAssumedInformation =
false;
10937 for (
const auto &It : getAssumedSet())
10938 if (It.second & S) {
10939 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
10940 isa<SelectInst>(It.first.getValue()))) {
10941 if (
A.getAssumedSimplifiedValues(
10943 this, Values, S, UsedAssumedInformation))
10948 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10953struct AAPotentialValuesFloating : AAPotentialValuesImpl {
10955 : AAPotentialValuesImpl(IRP,
A) {}
10959 auto AssumedBefore = getAssumed();
10961 genericValueTraversal(
A, &getAssociatedValue());
10963 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
10964 : ChangeStatus::CHANGED;
10968 struct LivenessInfo {
10969 const AAIsDead *LivenessAA =
nullptr;
10970 bool AnyDead =
false;
10983 bool UsedAssumedInformation =
false;
10985 auto GetSimplifiedValues = [&](
Value &
V,
10987 if (!
A.getAssumedSimplifiedValues(
10993 return Values.
empty();
10995 if (GetSimplifiedValues(*
LHS, LHSValues))
10997 if (GetSimplifiedValues(*
RHS, RHSValues))
11009 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11017 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11018 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11020 nullptr,
II.S, getAnchorScope());
11026 if (&LHSV == &RHSV &&
11030 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11037 if (TypedLHS && TypedRHS) {
11039 if (NewV && NewV != &Cmp) {
11040 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11050 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11051 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11052 if (!LHSIsNull && !RHSIsNull)
11058 assert((LHSIsNull || RHSIsNull) &&
11059 "Expected nullptr versus non-nullptr comparison at this point");
11062 unsigned PtrIdx = LHSIsNull;
11063 bool IsKnownNonNull;
11064 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11066 DepClassTy::REQUIRED, IsKnownNonNull);
11067 if (!IsAssumedNonNull)
11073 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11078 for (
auto &LHSValue : LHSValues)
11079 for (
auto &RHSValue : RHSValues)
11080 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11088 bool UsedAssumedInformation =
false;
11090 std::optional<Constant *>
C =
11091 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11092 bool NoValueYet = !
C.has_value();
11093 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11095 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11100 }
else if (&SI == &getAssociatedValue()) {
11105 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11107 if (!SimpleV.has_value())
11110 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11122 bool UsedAssumedInformation =
false;
11124 PotentialValueOrigins, *
this,
11125 UsedAssumedInformation,
11127 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11128 "loaded values for load instruction "
11139 if (!
I || isa<AssumeInst>(
I))
11141 if (
auto *SI = dyn_cast<StoreInst>(
I))
11142 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11144 UsedAssumedInformation,
11146 return A.isAssumedDead(*
I,
this,
nullptr,
11147 UsedAssumedInformation,
11150 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11151 "and we cannot delete all the stores: "
11162 bool AllLocal = ScopeIsLocal;
11167 if (!DynamicallyUnique) {
11168 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11169 "values are dynamically unique: "
11174 for (
auto *PotentialCopy : PotentialCopies) {
11176 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11181 if (!AllLocal && ScopeIsLocal)
11186 bool handlePHINode(
11190 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11191 LivenessInfo &LI = LivenessAAs[&
F];
11192 if (!LI.LivenessAA)
11198 if (&
PHI == &getAssociatedValue()) {
11199 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11202 *
PHI.getFunction());
11206 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11208 if (LI.LivenessAA &&
11209 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11219 if (CyclePHI && isa<Instruction>(V) &&
11220 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11228 bool UsedAssumedInformation =
false;
11229 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11231 if (!SimpleV.has_value())
11235 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11244 bool SomeSimplified =
false;
11245 bool UsedAssumedInformation =
false;
11250 const auto &SimplifiedOp =
A.getAssumedSimplified(
11255 if (!SimplifiedOp.has_value())
11259 NewOps[
Idx] = *SimplifiedOp;
11263 SomeSimplified |= (NewOps[
Idx] !=
Op);
11269 if (!SomeSimplified)
11276 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11282 if (!NewV || NewV == &
I)
11285 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11295 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11297 CI->getPredicate(),
II, Worklist);
11299 switch (
I.getOpcode()) {
11300 case Instruction::Select:
11301 return handleSelectInst(
A, cast<SelectInst>(
I),
II, Worklist);
11302 case Instruction::PHI:
11303 return handlePHINode(
A, cast<PHINode>(
I),
II, Worklist, LivenessAAs);
11304 case Instruction::Load:
11305 return handleLoadInst(
A, cast<LoadInst>(
I),
II, Worklist);
11307 return handleGenericInst(
A,
I,
II, Worklist);
11334 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11335 << Iteration <<
"!\n");
11336 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11342 Value *NewV =
nullptr;
11343 if (
V->getType()->isPointerTy()) {
11346 if (
auto *CB = dyn_cast<CallBase>(V))
11356 if (NewV && NewV != V) {
11357 Worklist.
push_back({{*NewV, CtxI}, S});
11361 if (
auto *
I = dyn_cast<Instruction>(V)) {
11366 if (V != InitialV || isa<Argument>(V))
11371 if (V == InitialV && CtxI == getCtxI()) {
11372 indicatePessimisticFixpoint();
11376 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11377 }
while (!Worklist.
empty());
11381 for (
auto &It : LivenessAAs)
11382 if (It.second.AnyDead)
11383 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11387 void trackStatistics()
const override {
11392struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11393 using Base = AAPotentialValuesImpl;
11399 auto &Arg = cast<Argument>(getAssociatedValue());
11401 indicatePessimisticFixpoint();
11406 auto AssumedBefore = getAssumed();
11408 unsigned ArgNo = getCalleeArgNo();
11410 bool UsedAssumedInformation =
false;
11414 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11417 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11419 UsedAssumedInformation))
11422 return isValidState();
11425 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11427 UsedAssumedInformation))
11428 return indicatePessimisticFixpoint();
11430 Function *Fn = getAssociatedFunction();
11431 bool AnyNonLocal =
false;
11432 for (
auto &It : Values) {
11433 if (isa<Constant>(It.getValue())) {
11434 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11439 return indicatePessimisticFixpoint();
11441 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11443 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11449 AnyNonLocal =
true;
11451 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11453 giveUpOnIntraprocedural(
A);
11455 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11456 : ChangeStatus::CHANGED;
11460 void trackStatistics()
const override {
11465struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11466 using Base = AAPotentialValuesFloating;
11473 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11474 indicatePessimisticFixpoint();
11481 ReturnedArg = &Arg;
11484 if (!
A.isFunctionIPOAmendable(*
F) ||
11485 A.hasSimplificationCallback(getIRPosition())) {
11487 indicatePessimisticFixpoint();
11489 indicateOptimisticFixpoint();
11495 auto AssumedBefore = getAssumed();
11496 bool UsedAssumedInformation =
false;
11499 Function *AnchorScope = getAnchorScope();
11505 UsedAssumedInformation,
11511 addValue(
A, getState(), *
VAC.getValue(),
11512 VAC.getCtxI() ?
VAC.getCtxI() : CtxI, S, AnchorScope);
11518 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11521 bool AddValues =
true;
11522 if (isa<PHINode>(RetI.getOperand(0)) ||
11523 isa<SelectInst>(RetI.getOperand(0))) {
11524 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11528 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11531 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11532 UsedAssumedInformation,
11534 return indicatePessimisticFixpoint();
11537 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11538 : ChangeStatus::CHANGED;
11543 Function *AnchorScope)
const override {
11545 if (
auto *CB = dyn_cast<CallBase>(&V))
11548 Base::addValue(
A, State, V, CtxI, S, AnchorScope);
11553 return ChangeStatus::UNCHANGED;
11555 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11557 return ChangeStatus::UNCHANGED;
11558 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11560 return ChangeStatus::UNCHANGED;
11563 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11565 "Number of function with unique return");
11566 Changed |=
A.manifestAttrs(
11573 Value *RetOp = RetI.getOperand(0);
11574 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11577 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11578 Changed = ChangeStatus::CHANGED;
11581 bool UsedAssumedInformation =
false;
11582 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11583 UsedAssumedInformation,
11593 void trackStatistics()
const override{
11600struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11602 : AAPotentialValuesImpl(IRP,
A) {}
11611 void trackStatistics()
const override {
11616struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11618 : AAPotentialValuesFunction(IRP,
A) {}
11621 void trackStatistics()
const override {
11626struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11628 : AAPotentialValuesImpl(IRP,
A) {}
11632 auto AssumedBefore = getAssumed();
11636 return indicatePessimisticFixpoint();
11638 bool UsedAssumedInformation =
false;
11639 auto *CB = cast<CallBase>(getCtxI());
11642 UsedAssumedInformation))
11643 return indicatePessimisticFixpoint();
11648 UsedAssumedInformation))
11649 return indicatePessimisticFixpoint();
11653 bool AnyNonLocal =
false;
11654 for (
auto &It : Values) {
11655 Value *
V = It.getValue();
11656 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11657 V, *CB, *
this, UsedAssumedInformation);
11658 if (!CallerV.has_value()) {
11662 V = *CallerV ? *CallerV :
V;
11668 if (
auto *Arg = dyn_cast<Argument>(V))
11674 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11676 AnyNonLocal =
true;
11684 UsedAssumedInformation))
11685 return indicatePessimisticFixpoint();
11686 AnyNonLocal =
false;
11688 for (
auto &It : Values) {
11689 Value *
V = It.getValue();
11691 return indicatePessimisticFixpoint();
11693 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11695 AnyNonLocal =
true;
11701 giveUpOnIntraprocedural(
A);
11703 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11704 : ChangeStatus::CHANGED;
11708 return AAPotentialValues::indicatePessimisticFixpoint();
11712 void trackStatistics()
const override {
11717struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11719 : AAPotentialValuesFloating(IRP,
A) {}
11722 void trackStatistics()
const override {
11738 if (getKnown().isUniversal())
11739 return ChangeStatus::UNCHANGED;
11743 getAssumed().getSet().
end());
11745 return A.manifestAttrs(IRP,
11748 llvm::join(Set,
",")),
11753 return isValidState() && setContains(Assumption);
11757 const std::string getAsStr(
Attributor *
A)
const override {
11758 const SetContents &Known = getKnown();
11759 const SetContents &Assumed = getAssumed();
11763 const std::string KnownStr = llvm::join(Set,
",");
11765 std::string AssumedStr =
"Universal";
11766 if (!Assumed.isUniversal()) {
11767 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11768 AssumedStr = llvm::join(Set,
",");
11770 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11785struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11787 : AAAssumptionInfoImpl(IRP,
A,
11792 bool Changed =
false;
11797 DepClassTy::REQUIRED);
11801 Changed |= getIntersection(AssumptionAA->getAssumed());
11802 return !getAssumed().empty() || !getKnown().empty();
11805 bool UsedAssumedInformation =
false;
11810 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11811 UsedAssumedInformation))
11812 return indicatePessimisticFixpoint();
11814 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11817 void trackStatistics()
const override {}
11821struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11824 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11835 auto *AssumptionAA =
11838 return indicatePessimisticFixpoint();
11839 bool Changed = getIntersection(AssumptionAA->getAssumed());
11840 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11844 void trackStatistics()
const override {}
11856 return Assumptions;
11871struct AAUnderlyingObjectsImpl
11877 const std::string getAsStr(
Attributor *
A)
const override {
11878 return std::string(
"UnderlyingObjects ") +
11880 ? (std::string(
"inter #") +
11881 std::to_string(InterAssumedUnderlyingObjects.size()) +
11882 " objs" + std::string(
", intra #") +
11883 std::to_string(IntraAssumedUnderlyingObjects.size()) +
11889 void trackStatistics()
const override {}
11893 auto &
Ptr = getAssociatedValue();
11897 bool UsedAssumedInformation =
false;
11902 Scope, UsedAssumedInformation))
11905 bool Changed =
false;
11907 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11908 auto &
VAC = Values[
I];
11909 auto *Obj =
VAC.getValue();
11911 if (UO && UO !=
VAC.getValue() && SeenObjects.
insert(UO).second) {
11914 auto Pred = [&Values](
Value &
V) {
11919 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11921 "The forall call should not return false at this position");
11926 if (isa<SelectInst>(Obj)) {
11927 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope);
11930 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
11933 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
11934 Changed |= handleIndirect(
A, *
PHI->getIncomingValue(u),
11935 UnderlyingObjects, Scope);
11940 Changed |= UnderlyingObjects.
insert(Obj);
11946 bool Changed =
false;
11950 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11953 bool forallUnderlyingObjects(
11956 if (!isValidState())
11957 return Pred(getAssociatedValue());
11960 ? IntraAssumedUnderlyingObjects
11961 : InterAssumedUnderlyingObjects;
11962 for (
Value *Obj : AssumedUnderlyingObjects)
11975 bool Changed =
false;
11978 auto Pred = [&](
Value &
V) {
11979 Changed |= UnderlyingObjects.
insert(&V);
11982 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
11984 "The forall call should not return false at this position");
11994struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
11996 : AAUnderlyingObjectsImpl(IRP,
A) {}
11999struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12001 : AAUnderlyingObjectsImpl(IRP,
A) {}
12004struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12006 : AAUnderlyingObjectsImpl(IRP,
A) {}
12009struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12011 : AAUnderlyingObjectsImpl(IRP,
A) {}
12014struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12016 : AAUnderlyingObjectsImpl(IRP,
A) {}
12019struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12021 : AAUnderlyingObjectsImpl(IRP,
A) {}
12024struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12026 : AAUnderlyingObjectsImpl(IRP,
A) {}
12041 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12047 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12048 << *UInst <<
"\n");
12050 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12051 int Idx = &
Cmp->getOperandUse(0) == &
U;
12052 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12054 return U == &getAnchorValue();
12058 if (isa<ReturnInst>(UInst)) {
12060 Worklist.
push_back(ACS.getInstruction());
12063 bool UsedAssumedInformation =
false;
12065 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12067 UsedAssumedInformation))
12074 auto *CB = dyn_cast<CallBase>(UInst);
12085 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12094 unsigned NumUsesBefore =
Uses.size();
12100 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12103 case UseCaptureKind::NO_CAPTURE:
12104 return checkUse(
A, U, Follow, Worklist);
12105 case UseCaptureKind::MAY_CAPTURE:
12106 return checkUse(
A, U, Follow, Worklist);
12107 case UseCaptureKind::PASSTHROUGH:
12113 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12114 Uses.insert(&OldU);
12118 while (!Worklist.
empty()) {
12120 if (!Visited.
insert(V).second)
12122 if (!
A.checkForAllUses(UsePred, *
this, *V,
12124 DepClassTy::OPTIONAL,
12125 true, EquivalentUseCB)) {
12126 return indicatePessimisticFixpoint();
12130 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12131 : ChangeStatus::CHANGED;
12134 bool isPotentialUse(
const Use &U)
const override {
12135 return !isValidState() ||
Uses.contains(&U);
12140 return ChangeStatus::UNCHANGED;
12144 const std::string getAsStr(
Attributor *
A)
const override {
12145 return "[" + std::to_string(
Uses.size()) +
" uses]";
12148 void trackStatistics()
const override {
12166 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12167 if (!MD && !
A.isClosedWorldModule())
12171 for (
const auto &
Op : MD->operands())
12172 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12173 PotentialCallees.insert(Callee);
12174 }
else if (
A.isClosedWorldModule()) {
12176 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12177 PotentialCallees.insert(IndirectlyCallableFunctions.
begin(),
12178 IndirectlyCallableFunctions.
end());
12181 if (PotentialCallees.empty())
12182 indicateOptimisticFixpoint();
12186 CallBase *CB = cast<CallBase>(getCtxI());
12191 bool AllCalleesKnownNow = AllCalleesKnown;
12193 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12194 bool &UsedAssumedInformation) {
12197 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12199 UsedAssumedInformation = !GIAA->isAtFixpoint();
12203 auto AddPotentialCallees = [&]() {
12204 for (
auto *PotentialCallee : PotentialCallees) {
12205 bool UsedAssumedInformation =
false;
12206 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12207 AssumedCalleesNow.
insert(PotentialCallee);
12213 bool UsedAssumedInformation =
false;
12216 AA::ValueScope::AnyScope,
12217 UsedAssumedInformation)) {
12218 if (PotentialCallees.empty())
12219 return indicatePessimisticFixpoint();
12220 AddPotentialCallees();
12225 auto CheckPotentialCallee = [&](
Function &Fn) {
12226 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12229 auto &CachedResult = FilterResults[&Fn];
12230 if (CachedResult.has_value())
12231 return CachedResult.value();
12233 bool UsedAssumedInformation =
false;
12234 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12235 if (!UsedAssumedInformation)
12236 CachedResult =
false;
12245 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12246 bool IsKnown =
false;
12247 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12249 DepClassTy::OPTIONAL, IsKnown)) {
12251 CachedResult =
false;
12256 CachedResult =
true;
12262 for (
auto &VAC : Values) {
12263 if (isa<UndefValue>(
VAC.getValue()))
12265 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12266 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12269 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12270 if (CheckPotentialCallee(*VACFn))
12271 AssumedCalleesNow.
insert(VACFn);
12274 if (!PotentialCallees.empty()) {
12275 AddPotentialCallees();
12278 AllCalleesKnownNow =
false;
12281 if (AssumedCalleesNow == AssumedCallees &&
12282 AllCalleesKnown == AllCalleesKnownNow)
12283 return ChangeStatus::UNCHANGED;
12285 std::swap(AssumedCallees, AssumedCalleesNow);
12286 AllCalleesKnown = AllCalleesKnownNow;
12287 return ChangeStatus::CHANGED;
12293 if (!AllCalleesKnown && AssumedCallees.empty())
12294 return ChangeStatus::UNCHANGED;
12296 CallBase *CB = cast<CallBase>(getCtxI());
12297 bool UsedAssumedInformation =
false;
12298 if (
A.isAssumedDead(*CB,
this,
nullptr,
12299 UsedAssumedInformation))
12300 return ChangeStatus::UNCHANGED;
12304 if (
FP->getType()->getPointerAddressSpace())
12315 if (AssumedCallees.empty()) {
12316 assert(AllCalleesKnown &&
12317 "Expected all callees to be known if there are none.");
12318 A.changeToUnreachableAfterManifest(CB);
12319 return ChangeStatus::CHANGED;
12323 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12324 auto *NewCallee = AssumedCallees.front();
12327 return ChangeStatus::CHANGED;
12334 A.deleteAfterManifest(*CB);
12335 return ChangeStatus::CHANGED;
12345 bool SpecializedForAnyCallees =
false;
12346 bool SpecializedForAllCallees = AllCalleesKnown;
12350 for (
Function *NewCallee : AssumedCallees) {
12351 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee)) {
12352 SkippedAssumedCallees.
push_back(NewCallee);
12353 SpecializedForAllCallees =
false;
12356 SpecializedForAnyCallees =
true;
12362 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12363 A.registerManifestAddedBasicBlock(*IP->getParent());
12364 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12369 A.registerManifestAddedBasicBlock(*ElseBB);
12371 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12379 auto *CBClone = cast<CallBase>(CB->
clone());
12380 CBClone->insertBefore(ThenTI);
12381 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12389 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12390 if (!AllCalleesKnown)
12391 return ChangeStatus::UNCHANGED;
12392 MDBuilder MDB(IndirectCB.getContext());
12393 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12394 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12395 return ChangeStatus::CHANGED;
12398 if (!SpecializedForAnyCallees)
12399 return AttachCalleeMetadata(*CB);
12402 if (SpecializedForAllCallees) {
12406 IP->eraseFromParent();
12408 auto *CBClone = cast<CallInst>(CB->
clone());
12409 CBClone->setName(CB->
getName());
12410 CBClone->insertBefore(*IP->getParent(), IP);
12411 NewCalls.
push_back({CBClone,
nullptr});
12412 AttachCalleeMetadata(*CBClone);
12419 CB->
getParent()->getFirstInsertionPt());
12420 for (
auto &It : NewCalls) {
12422 Instruction *CallRet = It.second ? It.second : It.first;
12434 A.deleteAfterManifest(*CB);
12435 Changed = ChangeStatus::CHANGED;
12441 const std::string getAsStr(
Attributor *
A)
const override {
12442 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12443 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12447 void trackStatistics()
const override {
12448 if (AllCalleesKnown) {
12450 Eliminated, CallSites,
12451 "Number of indirect call sites eliminated via specialization")
12454 "Number of indirect call sites specialized")
12459 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12476 bool AllCalleesKnown =
true;
12487 assert(isValidState() &&
"the AA is invalid");
12488 return AssumedAddressSpace;
12493 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12494 "Associated value is not a pointer");
12498 int32_t OldAddressSpace = AssumedAddressSpace;
12500 DepClassTy::REQUIRED);
12501 auto Pred = [&](
Value &Obj) {
12502 if (isa<UndefValue>(&Obj))
12507 if (!AUO->forallUnderlyingObjects(Pred))
12508 return indicatePessimisticFixpoint();
12510 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12511 : ChangeStatus::CHANGED;
12516 Value *AssociatedValue = &getAssociatedValue();
12517 Value *OriginalValue = peelAddrspacecast(AssociatedValue);
12521 return ChangeStatus::UNCHANGED;
12523 Type *NewPtrTy = PointerType::get(getAssociatedType()->getContext(),
12525 bool UseOriginalValue =
12529 bool Changed =
false;
12533 if (UseOriginalValue) {
12534 A.changeUseAfterManifest(U, *OriginalValue);
12539 A.changeUseAfterManifest(U, *
CastInst);
12542 auto Pred = [&](
const Use &
U,
bool &) {
12543 if (
U.get() != AssociatedValue)
12545 auto *Inst = dyn_cast<Instruction>(
U.getUser());
12552 if (isa<LoadInst>(Inst))
12553 MakeChange(Inst,
const_cast<Use &
>(U));
12554 if (isa<StoreInst>(Inst)) {
12556 if (
U.getOperandNo() == 1)
12557 MakeChange(Inst,
const_cast<Use &
>(U));
12564 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
12567 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12571 const std::string getAsStr(
Attributor *
A)
const override {
12572 if (!isValidState())
12573 return "addrspace(<invalid>)";
12574 return "addrspace(" +
12575 (AssumedAddressSpace == NoAddressSpace
12577 : std::to_string(AssumedAddressSpace)) +
12582 int32_t AssumedAddressSpace = NoAddressSpace;
12584 bool takeAddressSpace(int32_t AS) {
12585 if (AssumedAddressSpace == NoAddressSpace) {
12586 AssumedAddressSpace = AS;
12589 return AssumedAddressSpace == AS;
12593 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V))
12594 return peelAddrspacecast(
I->getPointerOperand());
12595 if (
auto *
C = dyn_cast<ConstantExpr>(V))
12596 if (
C->getOpcode() == Instruction::AddrSpaceCast)
12597 return peelAddrspacecast(
C->getOperand(0));
12602struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
12604 : AAAddressSpaceImpl(IRP,
A) {}
12606 void trackStatistics()
const override {
12611struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
12613 : AAAddressSpaceImpl(IRP,
A) {}
12619 (void)indicatePessimisticFixpoint();
12622 void trackStatistics()
const override {
12627struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
12629 : AAAddressSpaceImpl(IRP,
A) {}
12631 void trackStatistics()
const override {
12636struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
12638 : AAAddressSpaceImpl(IRP,
A) {}
12643struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
12645 : AAAddressSpaceImpl(IRP,
A) {}
12651 (void)indicatePessimisticFixpoint();
12654 void trackStatistics()
const override {
12666 std::optional<TypeSize> getAllocatedSize()
const override {
12667 assert(isValidState() &&
"the AA is invalid");
12668 return AssumedAllocatedSize;
12671 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
12675 switch (
I->getOpcode()) {
12676 case Instruction::Alloca: {
12681 return std::nullopt;
12691 if (!isa<AllocaInst>(
I))
12692 return indicatePessimisticFixpoint();
12694 bool IsKnownNoCapture;
12695 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
12696 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
12697 return indicatePessimisticFixpoint();
12700 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
12703 return indicatePessimisticFixpoint();
12706 return indicatePessimisticFixpoint();
12709 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
12712 if (!AllocationSize)
12713 return indicatePessimisticFixpoint();
12717 if (*AllocationSize == 0)
12718 return indicatePessimisticFixpoint();
12724 return indicatePessimisticFixpoint();
12726 if (BinSize == 0) {
12727 auto NewAllocationSize = std::optional<TypeSize>(
TypeSize(0,
false));
12728 if (!changeAllocationSize(NewAllocationSize))
12729 return ChangeStatus::UNCHANGED;
12730 return ChangeStatus::CHANGED;
12734 const auto &It = PI->
begin();
12737 if (It->first.Offset != 0)
12738 return indicatePessimisticFixpoint();
12740 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
12742 if (SizeOfBin >= *AllocationSize)
12743 return indicatePessimisticFixpoint();
12745 auto NewAllocationSize =
12746 std::optional<TypeSize>(
TypeSize(SizeOfBin * 8,
false));
12748 if (!changeAllocationSize(NewAllocationSize))
12749 return ChangeStatus::UNCHANGED;
12751 return ChangeStatus::CHANGED;
12757 assert(isValidState() &&
12758 "Manifest should only be called if the state is valid.");
12762 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
12764 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
12766 switch (
I->getOpcode()) {
12768 case Instruction::Alloca: {
12774 auto *NumBytesToValue =
12775 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
12778 insertPt = std::next(insertPt);
12784 return ChangeStatus::CHANGED;
12792 return ChangeStatus::UNCHANGED;
12796 const std::string getAsStr(
Attributor *
A)
const override {
12797 if (!isValidState())
12798 return "allocationinfo(<invalid>)";
12799 return "allocationinfo(" +
12800 (AssumedAllocatedSize == HasNoAllocationSize
12802 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
12807 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
12811 bool changeAllocationSize(std::optional<TypeSize>
Size) {
12812 if (AssumedAllocatedSize == HasNoAllocationSize ||
12813 AssumedAllocatedSize !=
Size) {
12814 AssumedAllocatedSize =
Size;
12821struct AAAllocationInfoFloating : AAAllocationInfoImpl {
12823 : AAAllocationInfoImpl(IRP,
A) {}
12825 void trackStatistics()
const override {
12830struct AAAllocationInfoReturned : AAAllocationInfoImpl {
12832 : AAAllocationInfoImpl(IRP,
A) {}
12838 (void)indicatePessimisticFixpoint();
12841 void trackStatistics()
const override {
12846struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
12848 : AAAllocationInfoImpl(IRP,
A) {}
12850 void trackStatistics()
const override {
12855struct AAAllocationInfoArgument : AAAllocationInfoImpl {
12857 : AAAllocationInfoImpl(IRP,
A) {}
12859 void trackStatistics()
const override {
12864struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
12866 : AAAllocationInfoImpl(IRP,
A) {}
12871 (void)indicatePessimisticFixpoint();
12874 void trackStatistics()
const override {
12921#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
12922 case IRPosition::PK: \
12923 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
12925#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
12926 case IRPosition::PK: \
12927 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
12931#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12932 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12933 CLASS *AA = nullptr; \
12934 switch (IRP.getPositionKind()) { \
12935 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12936 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
12937 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
12938 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
12939 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
12940 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
12941 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12942 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12947#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12948 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12949 CLASS *AA = nullptr; \
12950 switch (IRP.getPositionKind()) { \
12951 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12952 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
12953 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
12954 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12955 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12956 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12957 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12958 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
12963#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
12964 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12965 CLASS *AA = nullptr; \
12966 switch (IRP.getPositionKind()) { \
12967 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
12969 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
12975#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12976 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12977 CLASS *AA = nullptr; \
12978 switch (IRP.getPositionKind()) { \
12979 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12980 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12981 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12982 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12983 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12984 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12985 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12986 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
12991#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12992 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12993 CLASS *AA = nullptr; \
12994 switch (IRP.getPositionKind()) { \
12995 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12996 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
12997 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
12998 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
12999 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13000 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13001 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13002 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13007#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13008 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13009 CLASS *AA = nullptr; \
13010 switch (IRP.getPositionKind()) { \
13011 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13012 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13013 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13014 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13015 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13016 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13017 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13018 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13068#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13069#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13070#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13071#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13072#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13073#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13074#undef SWITCH_PK_CREATE
13075#undef SWITCH_PK_INV
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
Rewrite Partial Register Uses
static LoopDeletionResult merge(LoopDeletionResult A, LoopDeletionResult B)
This file implements a map that provides insertion order iteration.
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysis::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Development, "development", "for training")))
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
AACallGraphNode * operator*() const
A manager for alias analyses.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
MemoryEffects getMemoryEffects() const
Returns memory effects.
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static bool isEnumAttrKind(AttrKind Kind)
static Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
static Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
User::op_iterator arg_begin()
Return the iterator pointing to the beginning of the argument list.
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
Value * getArgOperand(unsigned i) const
User::op_iterator arg_end()
Return the iterator pointing to the end of the argument list.
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
bool isEmptySet() const
Return true if this set contains no members.
APInt getSignedMin() const
Return the smallest signed value contained in the ConstantRange.
bool isSingleElement() const
Return true if this set contains exactly one member.
static ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
APInt getSignedMax() const
Return the largest signed value contained in the ConstantRange.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
unsigned getSmallConstantMaxTripCount(const Loop *L)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
'undef' values are things that do not have specified contents.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
AddressSpace getAddressSpace(T *V)
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
pred_iterator pred_end(BasicBlock *BB)
bool operator<(int64_t V1, const APSInt &V2)
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
bool operator!=(uint64_t V1, const APInt &V2)
UseCaptureKind DetermineUseCaptureKind(const Use &U, llvm::function_ref< bool(Value *, const DataLayout &)> IsDereferenceableOrNull)
Determine what kind of capture behaviour U may exhibit.
Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
auto unique(Range &&R, Predicate P)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
pred_iterator pred_begin(BasicBlock *BB)
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
Value * simplifyCmpInst(unsigned Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
PotentialValuesState< APInt > PotentialConstantIntValuesState
DWARFExpression::Operation Op
bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr)
Return true if the instruction does not have any effects besides calculating the result and does not ...
constexpr unsigned BitWidth
std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, unsigned Depth, const SimplifyQuery &SQ)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
bool forallInterferingAccesses(Instruction &I, function_ref< bool(const AAPointerInfo::Access &, bool)> CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
bool forallInterferingAccesses(AA::RangeTy Range, function_ref< bool(const AAPointerInfo::Access &, bool)> CB) const
See AAPointerInfo::forallInterferingAccesses.
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
An abstract attribute for getting assumption information.
static const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for liveness abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
static void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual const_bin_iterator begin() const =0
static const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
static const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
static PotentialValuesState getBestState()
Return empty set as the best state of potential values.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.