54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
80#define DEBUG_TYPE "attributor"
84 cl::desc(
"Manifest Attributor internal string attributes."),
97 cl::desc(
"Maximum number of potential values to be "
98 "tracked for each position."),
103 "attributor-max-potential-values-iterations",
cl::Hidden,
105 "Maximum number of iterations we keep dismantling potential values."),
108STATISTIC(NumAAs,
"Number of abstract attributes created");
109STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
896 if (!
Range.mayOverlap(ItRange))
898 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
899 for (
auto Index : It.getSecond()) {
921 for (
unsigned Index : LocalList->getSecond()) {
924 if (
Range.offsetAndSizeAreUnknown())
940 RemoteI = RemoteI ? RemoteI : &
I;
944 bool AccExists =
false;
946 for (
auto Index : LocalList) {
948 if (
A.getLocalInst() == &
I) {
957 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
959 for (
auto Key : ToAdd) {
968 "New Access should have been at AccIndex");
969 LocalList.push_back(AccIndex);
983 auto &ExistingRanges =
Before.getRanges();
984 auto &NewRanges = Current.getRanges();
991 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
998 "Expected bin to actually contain the Access.");
1025struct AAPointerInfoImpl
1026 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1031 const std::string getAsStr(
Attributor *
A)
const override {
1032 return std::string(
"PointerInfo ") +
1033 (isValidState() ? (std::string(
"#") +
1034 std::to_string(OffsetBins.
size()) +
" bins")
1039 [](int64_t O) {
return std::to_string(O); }),
1047 return AAPointerInfo::manifest(
A);
1050 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1051 virtual const_bin_iterator
end()
const override {
return State::end(); }
1052 virtual int64_t numOffsetBins()
const override {
1053 return State::numOffsetBins();
1055 virtual bool reachesReturn()
const override {
1058 virtual void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1064 OffsetInfo MergedOI;
1065 for (
auto Offset : ReturnedOffsets) {
1066 OffsetInfo TmpOI = OI;
1068 MergedOI.merge(TmpOI);
1070 OI = std::move(MergedOI);
1073 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1074 if (ReturnedOffsets.isUnknown())
1075 return ChangeStatus::UNCHANGED;
1076 if (ReachedReturnedOffsets.isUnknown()) {
1077 ReturnedOffsets.setUnknown();
1078 return ChangeStatus::CHANGED;
1080 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1081 return ChangeStatus::CHANGED;
1082 return ChangeStatus::UNCHANGED;
1085 bool forallInterferingAccesses(
1089 return State::forallInterferingAccesses(
Range, CB);
1092 bool forallInterferingAccesses(
1094 bool FindInterferingWrites,
bool FindInterferingReads,
1098 HasBeenWrittenTo =
false;
1105 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1110 bool AllInSameNoSyncFn = IsAssumedNoSync;
1111 bool InstIsExecutedByInitialThreadOnly =
1112 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1119 bool InstIsExecutedInAlignedRegion =
1120 FindInterferingReads && ExecDomainAA &&
1121 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1123 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1124 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1127 bool IsThreadLocalObj =
1136 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1137 if (IsThreadLocalObj || AllInSameNoSyncFn)
1139 const auto *FnExecDomainAA =
1140 I.getFunction() == &
Scope
1145 if (!FnExecDomainAA)
1147 if (InstIsExecutedInAlignedRegion ||
1148 (FindInterferingWrites &&
1149 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1150 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1153 if (InstIsExecutedByInitialThreadOnly &&
1154 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1155 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1164 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1165 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1166 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1167 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1171 bool IsKnownNoRecurse;
1172 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1179 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1180 bool ObjHasKernelLifetime =
false;
1181 const bool UseDominanceReasoning =
1182 FindInterferingWrites && IsKnownNoRecurse;
1193 case AA::GPUAddressSpace::Shared:
1194 case AA::GPUAddressSpace::Constant:
1195 case AA::GPUAddressSpace::Local:
1207 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1209 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1214 bool IsKnownNoRecurse;
1215 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1217 IsKnownNoRecurse)) {
1218 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1220 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1223 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1224 if (ObjHasKernelLifetime)
1225 IsLiveInCalleeCB = [](
const Function &Fn) {
1226 return !Fn.hasFnAttribute(
"kernel");
1234 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1236 bool AccInSameScope = AccScope == &
Scope;
1240 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1244 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1245 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1246 ExclusionSet.
insert(Acc.getRemoteInst());
1249 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1250 (!FindInterferingReads || !Acc.isRead()))
1253 bool Dominates = FindInterferingWrites && DT && Exact &&
1254 Acc.isMustAccess() && AccInSameScope &&
1257 DominatingWrites.
insert(&Acc);
1261 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1263 InterferingAccesses.
push_back({&Acc, Exact});
1266 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1269 HasBeenWrittenTo = !DominatingWrites.
empty();
1273 for (
const Access *Acc : DominatingWrites) {
1274 if (!LeastDominatingWriteInst) {
1275 LeastDominatingWriteInst = Acc->getRemoteInst();
1276 }
else if (DT->
dominates(LeastDominatingWriteInst,
1277 Acc->getRemoteInst())) {
1278 LeastDominatingWriteInst = Acc->getRemoteInst();
1283 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1284 if (SkipCB && SkipCB(Acc))
1286 if (!CanIgnoreThreading(Acc))
1292 bool ReadChecked = !FindInterferingReads;
1293 bool WriteChecked = !FindInterferingWrites;
1299 &ExclusionSet, IsLiveInCalleeCB))
1304 if (!WriteChecked) {
1306 &ExclusionSet, IsLiveInCalleeCB))
1307 WriteChecked =
true;
1321 if (!WriteChecked && HasBeenWrittenTo &&
1322 Acc.getRemoteInst()->getFunction() != &
Scope) {
1326 if (FnReachabilityAA) {
1332 if (!FnReachabilityAA->instructionCanReach(
1333 A, *LeastDominatingWriteInst,
1334 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1335 WriteChecked =
true;
1342 if (ReadChecked && WriteChecked)
1345 if (!DT || !UseDominanceReasoning)
1347 if (!DominatingWrites.count(&Acc))
1349 return LeastDominatingWriteInst != Acc.getRemoteInst();
1354 for (
auto &It : InterferingAccesses) {
1355 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1356 !CanSkipAccess(*It.first, It.second)) {
1357 if (!UserCB(*It.first, It.second))
1367 using namespace AA::PointerInfo;
1369 return indicatePessimisticFixpoint();
1372 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1373 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1374 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1377 const auto &State = OtherAAImpl.getState();
1378 for (
const auto &It : State) {
1379 for (
auto Index : It.getSecond()) {
1380 const auto &RAcc = State.getAccess(
Index);
1381 if (IsByval && !RAcc.isRead())
1383 bool UsedAssumedInformation =
false;
1385 auto Content =
A.translateArgumentToCallSiteContent(
1386 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1387 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1388 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1390 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1391 RAcc.getType(), RAcc.getRemoteInst());
1398 const OffsetInfo &Offsets,
CallBase &CB,
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1408 const auto &State = OtherAAImpl.getState();
1409 for (
const auto &It : State) {
1410 for (
auto Index : It.getSecond()) {
1411 const auto &RAcc = State.getAccess(
Index);
1412 if (!IsMustAcc && RAcc.isAssumption())
1414 for (
auto Offset : Offsets) {
1418 if (!NewRanges.isUnknown()) {
1419 NewRanges.addToAllOffsets(
Offset);
1424 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1425 RAcc.getType(), RAcc.getRemoteInst());
1434 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1438 for (
auto &It : OffsetBins) {
1439 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1440 <<
"] : " << It.getSecond().size() <<
"\n";
1441 for (
auto AccIndex : It.getSecond()) {
1442 auto &Acc = AccessList[AccIndex];
1443 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1444 if (Acc.getLocalInst() != Acc.getRemoteInst())
1445 O <<
" --> " << *Acc.getRemoteInst()
1447 if (!Acc.isWrittenValueYetUndetermined()) {
1448 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1449 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1451 else if (Acc.getWrittenValue())
1452 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1454 O <<
" - c: <unknown>\n";
1461struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1464 : AAPointerInfoImpl(IRP,
A) {}
1471 using namespace AA::PointerInfo;
1474 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1483 if (!VT || VT->getElementCount().isScalable() ||
1485 (*Content)->getType() != VT ||
1486 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1497 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1498 auto *ConstContent = cast<Constant>(*
Content);
1502 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1504 ConstContent, ConstantInt::get(Int32Ty, i));
1507 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1511 for (
auto &ElementOffset : ElementOffsets)
1512 ElementOffset += ElementSize;
1526 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1530 void trackStatistics()
const override {
1531 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1535bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1538 const OffsetInfo &PtrOI,
1540 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1544 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1545 "Don't look for constant values if the offset has already been "
1546 "determined to be unknown.");
1548 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1554 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1558 Union.addToAll(ConstantOffset.getSExtValue());
1563 for (
const auto &VI : VariableOffsets) {
1566 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1572 if (PotentialConstantsAA->undefIsContained())
1580 if (AssumedSet.empty())
1584 for (
const auto &ConstOffset : AssumedSet) {
1585 auto CopyPerOffset =
Union;
1586 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1587 VI.second.getZExtValue());
1588 Product.merge(CopyPerOffset);
1593 UsrOI = std::move(Union);
1598 using namespace AA::PointerInfo;
1601 Value &AssociatedValue = getAssociatedValue();
1604 OffsetInfoMap[&AssociatedValue].
insert(0);
1606 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1617 "CurPtr does not exist in the map!");
1619 auto &UsrOI = OffsetInfoMap[Usr];
1620 auto &PtrOI = OffsetInfoMap[CurPtr];
1621 assert(!PtrOI.isUnassigned() &&
1622 "Cannot pass through if the input Ptr was not visited!");
1628 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1630 User *Usr =
U.getUser();
1631 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1634 "The current pointer offset should have been seeded!");
1635 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1636 "Current pointer should be assigned");
1640 return HandlePassthroughUser(Usr, CurPtr, Follow);
1641 if (!isa<GEPOperator>(CE)) {
1642 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1647 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1650 auto &UsrOI = OffsetInfoMap[Usr];
1651 auto &PtrOI = OffsetInfoMap[CurPtr];
1653 if (UsrOI.isUnknown())
1656 if (PtrOI.isUnknown()) {
1662 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1665 if (isa<PtrToIntInst>(Usr))
1667 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr))
1668 return HandlePassthroughUser(Usr, CurPtr, Follow);
1672 if (
auto *RI = dyn_cast<ReturnInst>(Usr)) {
1673 if (RI->getFunction() == getAssociatedFunction()) {
1674 auto &PtrOI = OffsetInfoMap[CurPtr];
1675 Changed |= setReachesReturn(PtrOI);
1684 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1687 bool IsFirstPHIUser = !OffsetInfoMap.
count(
PHI);
1688 auto &UsrOI = OffsetInfoMap[
PHI];
1689 auto &PtrOI = OffsetInfoMap[CurPtr];
1693 if (PtrOI.isUnknown()) {
1694 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1695 << *CurPtr <<
" in " << *
PHI <<
"\n");
1696 Follow = !UsrOI.isUnknown();
1702 if (UsrOI == PtrOI) {
1703 assert(!PtrOI.isUnassigned() &&
1704 "Cannot assign if the current Ptr was not visited!");
1705 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1715 auto It = OffsetInfoMap.
find(CurPtrBase);
1716 if (It == OffsetInfoMap.
end()) {
1717 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1718 << *CurPtr <<
" in " << *
PHI
1719 <<
" (base: " << *CurPtrBase <<
")\n");
1734 *
PHI->getFunction());
1736 auto BaseOI = It->getSecond();
1737 BaseOI.addToAll(
Offset.getZExtValue());
1738 if (IsFirstPHIUser || BaseOI == UsrOI) {
1739 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1740 <<
" in " << *Usr <<
"\n");
1741 return HandlePassthroughUser(Usr, CurPtr, Follow);
1745 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1746 << *CurPtr <<
" in " << *
PHI <<
"\n");
1757 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1765 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1766 OffsetInfoMap[CurPtr].Offsets, Changed,
1771 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1772 return II->isAssumeLikeIntrinsic();
1783 }
while (FromI && FromI != ToI);
1789 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1792 if (IntrI.getParent() == BB) {
1793 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1799 if ((*PredIt) != BB)
1804 if (SuccBB == IntrBB)
1806 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1810 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1813 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1819 std::pair<Value *, IntrinsicInst *> Assumption;
1820 for (
const Use &LoadU : LoadI->
uses()) {
1821 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1822 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1824 for (
const Use &CmpU : CmpI->
uses()) {
1825 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1826 if (!IsValidAssume(*IntrI))
1828 int Idx = CmpI->getOperandUse(0) == LoadU;
1829 Assumption = {CmpI->getOperand(
Idx), IntrI};
1834 if (Assumption.first)
1839 if (!Assumption.first || !Assumption.second)
1843 << *Assumption.second <<
": " << *LoadI
1844 <<
" == " << *Assumption.first <<
"\n");
1845 bool UsedAssumedInformation =
false;
1846 std::optional<Value *>
Content =
nullptr;
1847 if (Assumption.first)
1849 A.getAssumedSimplified(*Assumption.first, *
this,
1851 return handleAccess(
1852 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1853 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1858 for (
auto *OtherOp : OtherOps) {
1859 if (OtherOp == CurPtr) {
1862 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1874 bool UsedAssumedInformation =
false;
1875 std::optional<Value *>
Content =
nullptr;
1879 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1883 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1884 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1885 *StoreI->getValueOperand()->getType(),
1886 {StoreI->getValueOperand()}, AccessKind::AK_W);
1887 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1888 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1889 {RMWI->getValOperand()}, AccessKind::AK_RW);
1890 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1891 return HandleStoreLike(
1892 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1893 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1896 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1900 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1911 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1914 if (!CSArgPI->reachesReturn())
1915 return isValidState();
1918 if (!Callee ||
Callee->arg_size() <= ArgNo)
1920 bool UsedAssumedInformation =
false;
1921 auto ReturnedValue =
A.getAssumedSimplified(
1925 dyn_cast_or_null<Argument>(ReturnedValue.value_or(
nullptr));
1926 auto *Arg =
Callee->getArg(ArgNo);
1927 if (ReturnedArg && Arg != ReturnedArg)
1929 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1934 OffsetInfo OI = OffsetInfoMap[CurPtr];
1935 CSArgPI->addReturnedOffsetsTo(OI);
1937 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) | Changed;
1938 return isValidState();
1940 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1945 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1948 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1949 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1950 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1951 if (OffsetInfoMap.
count(NewU)) {
1953 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1954 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1955 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1959 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1962 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1964 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1966 true, EquivalentUseCB)) {
1967 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1968 return indicatePessimisticFixpoint();
1972 dbgs() <<
"Accesses by bin after update:\n";
1979struct AAPointerInfoReturned final : AAPointerInfoImpl {
1981 : AAPointerInfoImpl(IRP,
A) {}
1985 return indicatePessimisticFixpoint();
1989 void trackStatistics()
const override {
1990 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1994struct AAPointerInfoArgument final : AAPointerInfoFloating {
1996 : AAPointerInfoFloating(IRP,
A) {}
1999 void trackStatistics()
const override {
2000 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2004struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
2006 : AAPointerInfoFloating(IRP,
A) {}
2010 using namespace AA::PointerInfo;
2014 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
2018 LengthVal =
Length->getSExtValue();
2019 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2022 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2024 return indicatePessimisticFixpoint();
2027 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2029 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2032 dbgs() <<
"Accesses by bin after update:\n";
2043 Argument *Arg = getAssociatedArgument();
2048 if (ArgAA && ArgAA->getState().isValidState())
2049 return translateAndAddStateFromCallee(
A, *ArgAA,
2050 *cast<CallBase>(getCtxI()));
2052 return indicatePessimisticFixpoint();
2055 bool IsKnownNoCapture;
2056 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2057 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2058 return indicatePessimisticFixpoint();
2060 bool IsKnown =
false;
2062 return ChangeStatus::UNCHANGED;
2065 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2071 void trackStatistics()
const override {
2072 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2076struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2078 : AAPointerInfoFloating(IRP,
A) {}
2081 void trackStatistics()
const override {
2082 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2096 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2097 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2101 const std::string getAsStr(
Attributor *
A)
const override {
2102 return getAssumed() ?
"nounwind" :
"may-unwind";
2108 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2109 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2110 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2113 if (!
I.mayThrow(
true))
2116 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2117 bool IsKnownNoUnwind;
2118 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2125 bool UsedAssumedInformation =
false;
2126 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2127 UsedAssumedInformation))
2128 return indicatePessimisticFixpoint();
2130 return ChangeStatus::UNCHANGED;
2134struct AANoUnwindFunction final :
public AANoUnwindImpl {
2136 : AANoUnwindImpl(IRP,
A) {}
2143struct AANoUnwindCallSite final
2144 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2146 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2157 case Intrinsic::nvvm_barrier0:
2158 case Intrinsic::nvvm_barrier0_and:
2159 case Intrinsic::nvvm_barrier0_or:
2160 case Intrinsic::nvvm_barrier0_popc:
2162 case Intrinsic::amdgcn_s_barrier:
2163 if (ExecutedAligned)
2176 if (
auto *FI = dyn_cast<FenceInst>(
I))
2179 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2186 switch (
I->getOpcode()) {
2187 case Instruction::AtomicRMW:
2188 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2190 case Instruction::Store:
2191 Ordering = cast<StoreInst>(
I)->getOrdering();
2193 case Instruction::Load:
2194 Ordering = cast<LoadInst>(
I)->getOrdering();
2198 "New atomic operations need to be known in the attributor.");
2209 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2210 return !
MI->isVolatile();
2221 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2222 DepClassTy::NONE, IsKnown));
2226 const std::string getAsStr(
Attributor *
A)
const override {
2227 return getAssumed() ?
"nosync" :
"may-sync";
2243 if (
I.mayReadOrWriteMemory())
2248 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2257 bool UsedAssumedInformation =
false;
2258 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2259 UsedAssumedInformation) ||
2260 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2261 UsedAssumedInformation))
2262 return indicatePessimisticFixpoint();
2267struct AANoSyncFunction final :
public AANoSyncImpl {
2269 : AANoSyncImpl(IRP,
A) {}
2276struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2278 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2288struct AANoFreeImpl :
public AANoFree {
2294 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2295 DepClassTy::NONE, IsKnown));
2303 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2305 DepClassTy::REQUIRED, IsKnown);
2308 bool UsedAssumedInformation =
false;
2309 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2310 UsedAssumedInformation))
2311 return indicatePessimisticFixpoint();
2312 return ChangeStatus::UNCHANGED;
2316 const std::string getAsStr(
Attributor *
A)
const override {
2317 return getAssumed() ?
"nofree" :
"may-free";
2321struct AANoFreeFunction final :
public AANoFreeImpl {
2323 : AANoFreeImpl(IRP,
A) {}
2330struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2332 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2339struct AANoFreeFloating : AANoFreeImpl {
2341 : AANoFreeImpl(IRP,
A) {}
2351 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2353 DepClassTy::OPTIONAL, IsKnown))
2354 return ChangeStatus::UNCHANGED;
2356 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2357 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2359 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2367 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2369 DepClassTy::REQUIRED, IsKnown);
2372 if (isa<GetElementPtrInst>(UserI) || isa<PHINode>(UserI) ||
2373 isa<SelectInst>(UserI)) {
2377 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI))
2380 if (isa<ReturnInst>(UserI) && getIRPosition().isArgumentPosition())
2386 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2387 return indicatePessimisticFixpoint();
2389 return ChangeStatus::UNCHANGED;
2394struct AANoFreeArgument final : AANoFreeFloating {
2396 : AANoFreeFloating(IRP,
A) {}
2403struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2405 : AANoFreeFloating(IRP,
A) {}
2413 Argument *Arg = getAssociatedArgument();
2415 return indicatePessimisticFixpoint();
2418 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2419 DepClassTy::REQUIRED, IsKnown))
2420 return ChangeStatus::UNCHANGED;
2421 return indicatePessimisticFixpoint();
2429struct AANoFreeReturned final : AANoFreeFloating {
2431 : AANoFreeFloating(IRP,
A) {
2446 void trackStatistics()
const override {}
2450struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2452 : AANoFreeFloating(IRP,
A) {}
2455 return ChangeStatus::UNCHANGED;
2466 bool IgnoreSubsumingPositions) {
2468 AttrKinds.
push_back(Attribute::NonNull);
2471 AttrKinds.
push_back(Attribute::Dereferenceable);
2472 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2479 if (!Fn->isDeclaration()) {
2489 bool UsedAssumedInformation =
false;
2490 if (!
A.checkForAllInstructions(
2492 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2496 UsedAssumedInformation,
false,
true))
2508 Attribute::NonNull)});
2513static int64_t getKnownNonNullAndDerefBytesForUse(
2515 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2518 const Value *UseV =
U->get();
2525 if (isa<CastInst>(
I)) {
2530 if (isa<GetElementPtrInst>(
I)) {
2540 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2543 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2560 bool IsKnownNonNull;
2561 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2563 IsNonNull |= IsKnownNonNull;
2570 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2571 Loc->Size.isScalable() ||
I->isVolatile())
2577 if (
Base &&
Base == &AssociatedValue) {
2578 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2580 return std::max(int64_t(0), DerefBytes);
2587 int64_t DerefBytes = Loc->Size.getValue();
2589 return std::max(int64_t(0), DerefBytes);
2600 Value &
V = *getAssociatedValue().stripPointerCasts();
2601 if (isa<ConstantPointerNull>(V)) {
2602 indicatePessimisticFixpoint();
2607 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2613 bool IsNonNull =
false;
2614 bool TrackUse =
false;
2615 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2616 IsNonNull, TrackUse);
2617 State.setKnown(IsNonNull);
2622 const std::string getAsStr(
Attributor *
A)
const override {
2623 return getAssumed() ?
"nonnull" :
"may-null";
2628struct AANonNullFloating :
public AANonNullImpl {
2630 : AANonNullImpl(IRP,
A) {}
2635 bool IsKnownNonNull;
2636 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2637 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2641 bool UsedAssumedInformation =
false;
2642 Value *AssociatedValue = &getAssociatedValue();
2644 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2649 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2653 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2655 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2656 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2659 return ChangeStatus::UNCHANGED;
2660 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2661 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2663 DepClassTy::OPTIONAL, IsKnown) &&
2664 AA::hasAssumedIRAttr<Attribute::NonNull>(
2666 DepClassTy::OPTIONAL, IsKnown))
2667 return ChangeStatus::UNCHANGED;
2674 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2675 return indicatePessimisticFixpoint();
2676 return ChangeStatus::UNCHANGED;
2679 for (
const auto &VAC : Values)
2681 return indicatePessimisticFixpoint();
2683 return ChangeStatus::UNCHANGED;
2691struct AANonNullReturned final
2692 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2693 false, AANonNull::IRAttributeKind, false> {
2700 const std::string getAsStr(
Attributor *
A)
const override {
2701 return getAssumed() ?
"nonnull" :
"may-null";
2709struct AANonNullArgument final
2710 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2712 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2718struct AANonNullCallSiteArgument final : AANonNullFloating {
2720 : AANonNullFloating(IRP,
A) {}
2727struct AANonNullCallSiteReturned final
2728 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2730 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2746 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2747 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2752 const std::string getAsStr(
Attributor *
A)
const override {
2753 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2757struct AAMustProgressFunction final : AAMustProgressImpl {
2759 : AAMustProgressImpl(IRP,
A) {}
2764 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2765 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2767 return indicateOptimisticFixpoint();
2768 return ChangeStatus::UNCHANGED;
2773 bool IsKnownMustProgress;
2774 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2775 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2779 bool AllCallSitesKnown =
true;
2780 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2783 return indicatePessimisticFixpoint();
2785 return ChangeStatus::UNCHANGED;
2789 void trackStatistics()
const override {
2795struct AAMustProgressCallSite final : AAMustProgressImpl {
2797 : AAMustProgressImpl(IRP,
A) {}
2806 bool IsKnownMustProgress;
2807 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2808 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2809 return indicatePessimisticFixpoint();
2810 return ChangeStatus::UNCHANGED;
2814 void trackStatistics()
const override {
2829 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2830 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2835 const std::string getAsStr(
Attributor *
A)
const override {
2836 return getAssumed() ?
"norecurse" :
"may-recurse";
2840struct AANoRecurseFunction final : AANoRecurseImpl {
2842 : AANoRecurseImpl(IRP,
A) {}
2849 bool IsKnownNoRecurse;
2850 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2853 DepClassTy::NONE, IsKnownNoRecurse))
2855 return IsKnownNoRecurse;
2857 bool UsedAssumedInformation =
false;
2858 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2859 UsedAssumedInformation)) {
2865 if (!UsedAssumedInformation)
2866 indicateOptimisticFixpoint();
2867 return ChangeStatus::UNCHANGED;
2872 DepClassTy::REQUIRED);
2873 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2874 return indicatePessimisticFixpoint();
2875 return ChangeStatus::UNCHANGED;
2882struct AANoRecurseCallSite final
2883 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2885 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2900 const std::string getAsStr(
Attributor *
A)
const override {
2901 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2905struct AANonConvergentFunction final : AANonConvergentImpl {
2907 : AANonConvergentImpl(IRP,
A) {}
2913 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2914 CallBase &CB = cast<CallBase>(Inst);
2916 if (!Callee ||
Callee->isIntrinsic()) {
2919 if (
Callee->isDeclaration()) {
2920 return !
Callee->hasFnAttribute(Attribute::Convergent);
2927 bool UsedAssumedInformation =
false;
2928 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2929 UsedAssumedInformation)) {
2930 return indicatePessimisticFixpoint();
2932 return ChangeStatus::UNCHANGED;
2936 if (isKnownNotConvergent() &&
2937 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2938 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2939 return ChangeStatus::CHANGED;
2941 return ChangeStatus::UNCHANGED;
2958 const size_t UBPrevSize = KnownUBInsts.size();
2959 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2963 if (
I.isVolatile() &&
I.mayWriteToMemory())
2967 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2976 "Expected pointer operand of memory accessing instruction");
2980 std::optional<Value *> SimplifiedPtrOp =
2981 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2982 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2984 const Value *PtrOpVal = *SimplifiedPtrOp;
2989 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2990 AssumedNoUBInsts.insert(&
I);
3002 AssumedNoUBInsts.insert(&
I);
3004 KnownUBInsts.insert(&
I);
3013 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3017 auto *BrInst = cast<BranchInst>(&
I);
3020 if (BrInst->isUnconditional())
3025 std::optional<Value *> SimplifiedCond =
3026 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3027 if (!SimplifiedCond || !*SimplifiedCond)
3029 AssumedNoUBInsts.insert(&
I);
3037 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3046 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3052 if (idx >=
Callee->arg_size())
3064 bool IsKnownNoUndef;
3065 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3066 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3067 if (!IsKnownNoUndef)
3069 bool UsedAssumedInformation =
false;
3070 std::optional<Value *> SimplifiedVal =
3073 if (UsedAssumedInformation)
3075 if (SimplifiedVal && !*SimplifiedVal)
3077 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3078 KnownUBInsts.insert(&
I);
3082 !isa<ConstantPointerNull>(**SimplifiedVal))
3084 bool IsKnownNonNull;
3085 AA::hasAssumedIRAttr<Attribute::NonNull>(
3086 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3088 KnownUBInsts.insert(&
I);
3094 auto &RI = cast<ReturnInst>(
I);
3097 std::optional<Value *> SimplifiedRetValue =
3098 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3099 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3116 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3117 bool IsKnownNonNull;
3118 AA::hasAssumedIRAttr<Attribute::NonNull>(
3122 KnownUBInsts.insert(&
I);
3128 bool UsedAssumedInformation =
false;
3129 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3130 {Instruction::Load, Instruction::Store,
3131 Instruction::AtomicCmpXchg,
3132 Instruction::AtomicRMW},
3133 UsedAssumedInformation,
3135 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3136 UsedAssumedInformation,
3138 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3139 UsedAssumedInformation);
3143 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3145 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3146 bool IsKnownNoUndef;
3147 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3148 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3150 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3151 {Instruction::Ret}, UsedAssumedInformation,
3156 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3157 UBPrevSize != KnownUBInsts.size())
3158 return ChangeStatus::CHANGED;
3159 return ChangeStatus::UNCHANGED;
3163 return KnownUBInsts.count(
I);
3166 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3173 switch (
I->getOpcode()) {
3174 case Instruction::Load:
3175 case Instruction::Store:
3176 case Instruction::AtomicCmpXchg:
3177 case Instruction::AtomicRMW:
3178 return !AssumedNoUBInsts.count(
I);
3179 case Instruction::Br: {
3180 auto *BrInst = cast<BranchInst>(
I);
3181 if (BrInst->isUnconditional())
3183 return !AssumedNoUBInsts.count(
I);
3192 if (KnownUBInsts.empty())
3193 return ChangeStatus::UNCHANGED;
3195 A.changeToUnreachableAfterManifest(
I);
3196 return ChangeStatus::CHANGED;
3200 const std::string getAsStr(
Attributor *
A)
const override {
3201 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3246 bool UsedAssumedInformation =
false;
3247 std::optional<Value *> SimplifiedV =
3250 if (!UsedAssumedInformation) {
3255 KnownUBInsts.insert(
I);
3256 return std::nullopt;
3262 if (isa<UndefValue>(V)) {
3263 KnownUBInsts.insert(
I);
3264 return std::nullopt;
3270struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3272 : AAUndefinedBehaviorImpl(IRP,
A) {}
3275 void trackStatistics()
const override {
3277 "Number of instructions known to have UB");
3279 KnownUBInsts.size();
3300 if (SCCI.hasCycle())
3310 for (
auto *L : LI->getLoopsInPreorder()) {
3324 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3325 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3330 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3331 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3336 return IsKnown || !KnownOnly;
3342 if (isImpliedByMustprogressAndReadonly(
A,
false))
3343 return ChangeStatus::UNCHANGED;
3348 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3349 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3355 bool IsKnownNoRecurse;
3356 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3357 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3360 bool UsedAssumedInformation =
false;
3361 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3362 UsedAssumedInformation))
3363 return indicatePessimisticFixpoint();
3365 return ChangeStatus::UNCHANGED;
3369 const std::string getAsStr(
Attributor *
A)
const override {
3370 return getAssumed() ?
"willreturn" :
"may-noreturn";
3374struct AAWillReturnFunction final : AAWillReturnImpl {
3376 : AAWillReturnImpl(IRP,
A) {}
3380 AAWillReturnImpl::initialize(
A);
3383 assert(
F &&
"Did expect an anchor function");
3384 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3385 indicatePessimisticFixpoint();
3393struct AAWillReturnCallSite final
3394 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3396 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3400 if (isImpliedByMustprogressAndReadonly(
A,
false))
3401 return ChangeStatus::UNCHANGED;
3403 return AACalleeToCallSite::updateImpl(
A);
3425 const ToTy *To =
nullptr;
3435 assert(Hash == 0 &&
"Computed hash twice!");
3439 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3440 InstSetDMI::getHashValue(ExclusionSet));
3450 :
From(&
From), To(&To), ExclusionSet(ES) {
3452 if (!ES || ES->
empty()) {
3453 ExclusionSet =
nullptr;
3454 }
else if (MakeUnique) {
3455 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3460 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3473 return &TombstoneKey;
3480 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3482 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3486#define DefineKeys(ToTy) \
3488 ReachabilityQueryInfo<ToTy> \
3489 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3490 ReachabilityQueryInfo<ToTy>( \
3491 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3492 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3494 ReachabilityQueryInfo<ToTy> \
3495 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3496 ReachabilityQueryInfo<ToTy>( \
3497 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3498 DenseMapInfo<const ToTy *>::getTombstoneKey());
3507template <
typename BaseTy,
typename ToTy>
3508struct CachedReachabilityAA :
public BaseTy {
3514 bool isQueryAA()
const override {
return true; }
3519 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3520 RQITy *RQI = QueryVector[
u];
3521 if (RQI->Result == RQITy::Reachable::No &&
3523 Changed = ChangeStatus::CHANGED;
3529 bool IsTemporaryRQI) = 0;
3532 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3537 QueryCache.erase(&RQI);
3543 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3544 RQITy PlainRQI(RQI.From, RQI.To);
3545 if (!QueryCache.count(&PlainRQI)) {
3546 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3548 QueryVector.push_back(RQIPtr);
3549 QueryCache.insert(RQIPtr);
3554 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3555 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3556 "Did not expect empty set!");
3557 RQITy *RQIPtr =
new (
A.Allocator)
3558 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3559 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3561 assert(!QueryCache.count(RQIPtr));
3562 QueryVector.push_back(RQIPtr);
3563 QueryCache.insert(RQIPtr);
3566 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3567 A.registerForUpdate(*
this);
3568 return Result == RQITy::Reachable::Yes;
3571 const std::string getAsStr(
Attributor *
A)
const override {
3573 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3576 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3577 typename RQITy::Reachable &
Result) {
3578 if (!this->getState().isValidState()) {
3579 Result = RQITy::Reachable::Yes;
3585 if (StackRQI.ExclusionSet) {
3586 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3587 auto It = QueryCache.find(&PlainRQI);
3588 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3589 Result = RQITy::Reachable::No;
3594 auto It = QueryCache.find(&StackRQI);
3595 if (It != QueryCache.end()) {
3602 QueryCache.insert(&StackRQI);
3611struct AAIntraFnReachabilityFunction final
3612 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3613 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3620 bool isAssumedReachable(
3623 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3627 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3628 typename RQITy::Reachable
Result;
3629 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3630 return NonConstThis->isReachableImpl(
A, StackRQI,
3632 return Result == RQITy::Reachable::Yes;
3639 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3642 [&](
const auto &DeadEdge) {
3643 return LivenessAA->isEdgeDead(DeadEdge.first,
3647 return LivenessAA->isAssumedDead(BB);
3649 return ChangeStatus::UNCHANGED;
3653 return Base::updateImpl(
A);
3657 bool IsTemporaryRQI)
override {
3659 bool UsedExclusionSet =
false;
3664 while (IP && IP != &To) {
3665 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3666 UsedExclusionSet =
true;
3677 "Not an intra-procedural query!");
3681 if (FromBB == ToBB &&
3682 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3683 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3688 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3689 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3694 if (RQI.ExclusionSet)
3695 for (
auto *
I : *RQI.ExclusionSet)
3696 if (
I->getFunction() == Fn)
3697 ExclusionBlocks.
insert(
I->getParent());
3700 if (ExclusionBlocks.
count(FromBB) &&
3703 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3706 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3707 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3708 DeadBlocks.insert(ToBB);
3709 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3718 while (!Worklist.
empty()) {
3720 if (!Visited.
insert(BB).second)
3723 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3724 LocalDeadEdges.
insert({BB, SuccBB});
3729 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3732 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3735 if (ExclusionBlocks.
count(SuccBB)) {
3736 UsedExclusionSet =
true;
3743 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3744 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3749 void trackStatistics()
const override {}
3769 bool IgnoreSubsumingPositions) {
3770 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3771 "Unexpected attribute kind");
3774 if (isa<AllocaInst>(Val))
3777 IgnoreSubsumingPositions =
true;
3780 if (isa<UndefValue>(Val))
3783 if (isa<ConstantPointerNull>(Val) &&
3788 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3789 IgnoreSubsumingPositions, Attribute::NoAlias))
3799 "Noalias is a pointer attribute");
3802 const std::string getAsStr(
Attributor *
A)
const override {
3803 return getAssumed() ?
"noalias" :
"may-alias";
3808struct AANoAliasFloating final : AANoAliasImpl {
3810 : AANoAliasImpl(IRP,
A) {}
3815 return indicatePessimisticFixpoint();
3819 void trackStatistics()
const override {
3825struct AANoAliasArgument final
3826 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3827 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3839 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3841 DepClassTy::OPTIONAL, IsKnownNoSycn))
3842 return Base::updateImpl(
A);
3847 return Base::updateImpl(
A);
3851 bool UsedAssumedInformation =
false;
3852 if (
A.checkForAllCallSites(
3854 true, UsedAssumedInformation))
3855 return Base::updateImpl(
A);
3863 return indicatePessimisticFixpoint();
3870struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3872 : AANoAliasImpl(IRP,
A) {}
3878 const CallBase &CB,
unsigned OtherArgNo) {
3880 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3892 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3893 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3900 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3902 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3903 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3909 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3913 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3915 "callsite arguments: "
3916 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3917 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3922 bool isKnownNoAliasDueToNoAliasPreservation(
3942 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3953 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3958 bool IsKnownNoCapture;
3959 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3961 DepClassTy::OPTIONAL, IsKnownNoCapture))
3967 A, *UserI, *getCtxI(), *
this,
nullptr,
3968 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3976 case UseCaptureKind::NO_CAPTURE:
3978 case UseCaptureKind::MAY_CAPTURE:
3982 case UseCaptureKind::PASSTHROUGH:
3989 bool IsKnownNoCapture;
3991 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3992 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3993 if (!IsAssumedNoCapture &&
3995 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3997 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3998 <<
" cannot be noalias as it is potentially captured\n");
4003 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
4008 const auto &CB = cast<CallBase>(getAnchorValue());
4009 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
4010 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4020 auto *MemBehaviorAA =
4023 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4024 return ChangeStatus::UNCHANGED;
4027 bool IsKnownNoAlias;
4029 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4030 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4032 <<
" is not no-alias at the definition\n");
4033 return indicatePessimisticFixpoint();
4037 if (MemBehaviorAA &&
4038 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4040 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4041 return ChangeStatus::UNCHANGED;
4044 return indicatePessimisticFixpoint();
4052struct AANoAliasReturned final : AANoAliasImpl {
4054 : AANoAliasImpl(IRP,
A) {}
4059 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4060 if (
Constant *
C = dyn_cast<Constant>(&RV))
4061 if (
C->isNullValue() || isa<UndefValue>(
C))
4066 if (!isa<CallBase>(&RV))
4070 bool IsKnownNoAlias;
4071 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4072 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4075 bool IsKnownNoCapture;
4077 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4078 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4080 return IsAssumedNoCapture ||
4084 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4085 return indicatePessimisticFixpoint();
4087 return ChangeStatus::UNCHANGED;
4095struct AANoAliasCallSiteReturned final
4096 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4098 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4108struct AAIsDeadValueImpl :
public AAIsDead {
4112 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4115 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4118 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4121 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4124 bool isAssumedDead(
const Instruction *
I)
const override {
4125 return I == getCtxI() && isAssumedDead();
4129 bool isKnownDead(
const Instruction *
I)
const override {
4130 return isAssumedDead(
I) && isKnownDead();
4134 const std::string getAsStr(
Attributor *
A)
const override {
4135 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4141 if (
V.getType()->isVoidTy() ||
V.use_empty())
4145 if (!isa<Constant>(V)) {
4146 if (
auto *
I = dyn_cast<Instruction>(&V))
4147 if (!
A.isRunOn(*
I->getFunction()))
4149 bool UsedAssumedInformation =
false;
4150 std::optional<Constant *>
C =
4151 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4156 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4161 return A.checkForAllUses(UsePred, *
this, V,
false,
4162 DepClassTy::REQUIRED,
4171 auto *CB = dyn_cast<CallBase>(
I);
4172 if (!CB || isa<IntrinsicInst>(CB))
4177 bool IsKnownNoUnwind;
4178 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4179 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4187struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4189 : AAIsDeadValueImpl(IRP,
A) {}
4193 AAIsDeadValueImpl::initialize(
A);
4195 if (isa<UndefValue>(getAssociatedValue())) {
4196 indicatePessimisticFixpoint();
4200 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4201 if (!isAssumedSideEffectFree(
A,
I)) {
4202 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4203 indicatePessimisticFixpoint();
4205 removeAssumedBits(HAS_NO_EFFECT);
4212 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4214 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4221 if (
SI.isVolatile())
4227 bool UsedAssumedInformation =
false;
4228 if (!AssumeOnlyInst) {
4229 PotentialCopies.clear();
4231 UsedAssumedInformation)) {
4234 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4238 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4239 <<
" potential copies.\n");
4244 UsedAssumedInformation))
4246 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4248 auto &UserI = cast<Instruction>(*U.getUser());
4249 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4251 AssumeOnlyInst->insert(&UserI);
4254 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4260 <<
" is assumed live!\n");
4266 const std::string getAsStr(
Attributor *
A)
const override {
4267 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4268 if (isa_and_nonnull<StoreInst>(
I))
4270 return "assumed-dead-store";
4271 if (isa_and_nonnull<FenceInst>(
I))
4273 return "assumed-dead-fence";
4274 return AAIsDeadValueImpl::getAsStr(
A);
4279 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4280 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4281 if (!isDeadStore(
A, *SI))
4282 return indicatePessimisticFixpoint();
4283 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4284 if (!isDeadFence(
A, *FI))
4285 return indicatePessimisticFixpoint();
4287 if (!isAssumedSideEffectFree(
A,
I))
4288 return indicatePessimisticFixpoint();
4289 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4290 return indicatePessimisticFixpoint();
4295 bool isRemovableStore()
const override {
4296 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4301 Value &
V = getAssociatedValue();
4302 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4307 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4309 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4312 A.deleteAfterManifest(*
I);
4313 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4315 for (
auto *Usr : AOI->
users())
4316 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4317 A.deleteAfterManifest(*AOI);
4321 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4323 A.deleteAfterManifest(*FI);
4326 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4327 A.deleteAfterManifest(*
I);
4335 void trackStatistics()
const override {
4344struct AAIsDeadArgument :
public AAIsDeadFloating {
4346 : AAIsDeadFloating(IRP,
A) {}
4350 Argument &Arg = *getAssociatedArgument();
4351 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4352 if (
A.registerFunctionSignatureRewrite(
4356 return ChangeStatus::CHANGED;
4358 return ChangeStatus::UNCHANGED;
4365struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4367 : AAIsDeadValueImpl(IRP,
A) {}
4371 AAIsDeadValueImpl::initialize(
A);
4372 if (isa<UndefValue>(getAssociatedValue()))
4373 indicatePessimisticFixpoint();
4382 Argument *Arg = getAssociatedArgument();
4384 return indicatePessimisticFixpoint();
4386 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4388 return indicatePessimisticFixpoint();
4394 CallBase &CB = cast<CallBase>(getAnchorValue());
4396 assert(!isa<UndefValue>(
U.get()) &&
4397 "Expected undef values to be filtered out!");
4399 if (
A.changeUseAfterManifest(U, UV))
4400 return ChangeStatus::CHANGED;
4401 return ChangeStatus::UNCHANGED;
4408struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4410 : AAIsDeadFloating(IRP,
A) {}
4413 bool isAssumedDead()
const override {
4414 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4419 AAIsDeadFloating::initialize(
A);
4420 if (isa<UndefValue>(getAssociatedValue())) {
4421 indicatePessimisticFixpoint();
4426 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4432 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4433 IsAssumedSideEffectFree =
false;
4434 Changed = ChangeStatus::CHANGED;
4436 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4437 return indicatePessimisticFixpoint();
4442 void trackStatistics()
const override {
4443 if (IsAssumedSideEffectFree)
4450 const std::string getAsStr(
Attributor *
A)
const override {
4451 return isAssumedDead()
4453 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4457 bool IsAssumedSideEffectFree =
true;
4460struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4462 : AAIsDeadValueImpl(IRP,
A) {}
4467 bool UsedAssumedInformation =
false;
4468 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4469 {Instruction::Ret}, UsedAssumedInformation);
4472 if (ACS.isCallbackCall() || !ACS.getInstruction())
4474 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4477 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4478 UsedAssumedInformation))
4479 return indicatePessimisticFixpoint();
4481 return ChangeStatus::UNCHANGED;
4487 bool AnyChange =
false;
4495 bool UsedAssumedInformation =
false;
4496 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4497 UsedAssumedInformation);
4498 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4505struct AAIsDeadFunction :
public AAIsDead {
4511 assert(
F &&
"Did expect an anchor function");
4512 if (!isAssumedDeadInternalFunction(
A)) {
4513 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4514 assumeLive(
A,
F->getEntryBlock());
4518 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4519 if (!getAnchorScope()->hasLocalLinkage())
4521 bool UsedAssumedInformation =
false;
4523 true, UsedAssumedInformation);
4527 const std::string getAsStr(
Attributor *
A)
const override {
4528 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4529 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4530 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4531 std::to_string(KnownDeadEnds.size()) +
"]";
4536 assert(getState().isValidState() &&
4537 "Attempted to manifest an invalid state!");
4542 if (AssumedLiveBlocks.empty()) {
4543 A.deleteAfterManifest(
F);
4544 return ChangeStatus::CHANGED;
4550 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4552 KnownDeadEnds.set_union(ToBeExploredFrom);
4553 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4554 auto *CB = dyn_cast<CallBase>(DeadEndI);
4557 bool IsKnownNoReturn;
4558 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4561 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4564 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4565 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4567 A.changeToUnreachableAfterManifest(
4568 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4569 HasChanged = ChangeStatus::CHANGED;
4574 if (!AssumedLiveBlocks.count(&BB)) {
4575 A.deleteAfterManifest(BB);
4577 HasChanged = ChangeStatus::CHANGED;
4587 assert(
From->getParent() == getAnchorScope() &&
4589 "Used AAIsDead of the wrong function");
4590 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4594 void trackStatistics()
const override {}
4597 bool isAssumedDead()
const override {
return false; }
4600 bool isKnownDead()
const override {
return false; }
4603 bool isAssumedDead(
const BasicBlock *BB)
const override {
4605 "BB must be in the same anchor scope function.");
4609 return !AssumedLiveBlocks.count(BB);
4613 bool isKnownDead(
const BasicBlock *BB)
const override {
4614 return getKnown() && isAssumedDead(BB);
4618 bool isAssumedDead(
const Instruction *
I)
const override {
4619 assert(
I->getParent()->getParent() == getAnchorScope() &&
4620 "Instruction must be in the same anchor scope function.");
4627 if (!AssumedLiveBlocks.count(
I->getParent()))
4633 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4641 bool isKnownDead(
const Instruction *
I)
const override {
4642 return getKnown() && isAssumedDead(
I);
4648 if (!AssumedLiveBlocks.insert(&BB).second)
4656 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4658 if (
F->hasLocalLinkage())
4659 A.markLiveInternalFunction(*
F);
4683 bool IsKnownNoReturn;
4684 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4686 return !IsKnownNoReturn;
4698 bool UsedAssumedInformation =
4699 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4704 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4705 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4709 bool IsKnownNoUnwind;
4710 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4712 UsedAssumedInformation |= !IsKnownNoUnwind;
4714 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4717 return UsedAssumedInformation;
4724 bool UsedAssumedInformation =
false;
4728 std::optional<Constant *>
C =
4729 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4730 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4732 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4734 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4739 UsedAssumedInformation =
false;
4742 return UsedAssumedInformation;
4749 bool UsedAssumedInformation =
false;
4753 UsedAssumedInformation)) {
4760 if (Values.
empty() ||
4761 (Values.
size() == 1 &&
4762 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4764 return UsedAssumedInformation;
4767 Type &Ty = *
SI.getCondition()->getType();
4769 auto CheckForConstantInt = [&](
Value *
V) {
4770 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4778 return CheckForConstantInt(
VAC.getValue());
4782 return UsedAssumedInformation;
4785 unsigned MatchedCases = 0;
4786 for (
const auto &CaseIt :
SI.cases()) {
4787 if (
Constants.count(CaseIt.getCaseValue())) {
4789 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4796 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4797 return UsedAssumedInformation;
4803 if (AssumedLiveBlocks.empty()) {
4804 if (isAssumedDeadInternalFunction(
A))
4808 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4809 assumeLive(
A,
F->getEntryBlock());
4813 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4814 << getAnchorScope()->
size() <<
"] BBs and "
4815 << ToBeExploredFrom.size() <<
" exploration points and "
4816 << KnownDeadEnds.size() <<
" known dead ends\n");
4821 ToBeExploredFrom.end());
4822 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4825 while (!Worklist.
empty()) {
4831 while (!
I->isTerminator() && !isa<CallBase>(
I))
4832 I =
I->getNextNode();
4834 AliveSuccessors.
clear();
4836 bool UsedAssumedInformation =
false;
4837 switch (
I->getOpcode()) {
4841 "Expected non-terminators to be handled already!");
4845 case Instruction::Call:
4846 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4847 *
this, AliveSuccessors);
4849 case Instruction::Invoke:
4850 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4851 *
this, AliveSuccessors);
4853 case Instruction::Br:
4854 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4855 *
this, AliveSuccessors);
4857 case Instruction::Switch:
4858 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4859 *
this, AliveSuccessors);
4863 if (UsedAssumedInformation) {
4864 NewToBeExploredFrom.insert(
I);
4865 }
else if (AliveSuccessors.
empty() ||
4866 (
I->isTerminator() &&
4867 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4868 if (KnownDeadEnds.insert(
I))
4873 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4874 << UsedAssumedInformation <<
"\n");
4876 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4877 if (!
I->isTerminator()) {
4878 assert(AliveSuccessors.size() == 1 &&
4879 "Non-terminator expected to have a single successor!");
4883 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4884 if (AssumedLiveEdges.insert(Edge).second)
4886 if (assumeLive(
A, *AliveSuccessor->getParent()))
4893 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4895 return !ToBeExploredFrom.count(I);
4898 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4907 if (ToBeExploredFrom.empty() &&
4908 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4910 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4912 return indicatePessimisticFixpoint();
4917struct AAIsDeadCallSite final : AAIsDeadFunction {
4919 : AAIsDeadFunction(IRP,
A) {}
4928 "supported for call sites yet!");
4933 return indicatePessimisticFixpoint();
4937 void trackStatistics()
const override {}
4951 Value &
V = *getAssociatedValue().stripPointerCasts();
4953 A.getAttrs(getIRPosition(),
4954 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4957 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4960 bool IsKnownNonNull;
4961 AA::hasAssumedIRAttr<Attribute::NonNull>(
4962 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4964 bool CanBeNull, CanBeFreed;
4965 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4966 A.getDataLayout(), CanBeNull, CanBeFreed));
4969 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4974 StateType &getState()
override {
return *
this; }
4975 const StateType &getState()
const override {
return *
this; }
4981 const Value *UseV =
U->get();
4986 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4991 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4992 if (
Base &&
Base == &getAssociatedValue())
4993 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4999 bool IsNonNull =
false;
5000 bool TrackUse =
false;
5001 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
5002 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
5003 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
5004 <<
" for instruction " << *
I <<
"\n");
5006 addAccessedBytesForUse(
A, U,
I, State);
5007 State.takeKnownDerefBytesMaximum(DerefBytes);
5014 bool IsKnownNonNull;
5015 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5016 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5017 if (IsAssumedNonNull &&
5018 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5019 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5020 return ChangeStatus::CHANGED;
5028 bool IsKnownNonNull;
5029 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5030 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5031 if (IsAssumedNonNull)
5033 Ctx, getAssumedDereferenceableBytes()));
5036 Ctx, getAssumedDereferenceableBytes()));
5040 const std::string getAsStr(
Attributor *
A)
const override {
5041 if (!getAssumedDereferenceableBytes())
5042 return "unknown-dereferenceable";
5043 bool IsKnownNonNull;
5044 bool IsAssumedNonNull =
false;
5046 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5047 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5048 return std::string(
"dereferenceable") +
5049 (IsAssumedNonNull ?
"" :
"_or_null") +
5050 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5051 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5052 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5053 (!
A ?
" [non-null is unknown]" :
"");
5058struct AADereferenceableFloating : AADereferenceableImpl {
5060 : AADereferenceableImpl(IRP,
A) {}
5065 bool UsedAssumedInformation =
false;
5067 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5069 Values.
push_back({getAssociatedValue(), getCtxI()});
5072 Stripped = Values.
size() != 1 ||
5073 Values.
front().getValue() != &getAssociatedValue();
5079 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5081 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5089 int64_t DerefBytes = 0;
5090 if (!AA || (!Stripped &&
this == AA)) {
5093 bool CanBeNull, CanBeFreed;
5095 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5096 T.GlobalState.indicatePessimisticFixpoint();
5099 DerefBytes =
DS.DerefBytesState.getAssumed();
5100 T.GlobalState &=
DS.GlobalState;
5106 int64_t OffsetSExt =
Offset.getSExtValue();
5110 T.takeAssumedDerefBytesMinimum(
5111 std::max(int64_t(0), DerefBytes - OffsetSExt));
5116 T.takeKnownDerefBytesMaximum(
5117 std::max(int64_t(0), DerefBytes - OffsetSExt));
5118 T.indicatePessimisticFixpoint();
5119 }
else if (OffsetSExt > 0) {
5125 T.indicatePessimisticFixpoint();
5129 return T.isValidState();
5132 for (
const auto &VAC : Values)
5133 if (!VisitValueCB(*
VAC.getValue()))
5134 return indicatePessimisticFixpoint();
5140 void trackStatistics()
const override {
5146struct AADereferenceableReturned final
5147 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5149 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5154 void trackStatistics()
const override {
5160struct AADereferenceableArgument final
5161 : AAArgumentFromCallSiteArguments<AADereferenceable,
5162 AADereferenceableImpl> {
5164 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5169 void trackStatistics()
const override {
5175struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5177 : AADereferenceableFloating(IRP,
A) {}
5180 void trackStatistics()
const override {
5186struct AADereferenceableCallSiteReturned final
5187 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5188 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5193 void trackStatistics()
const override {
5203 Value &AssociatedValue,
const Use *U,
5207 if (isa<CastInst>(
I)) {
5209 TrackUse = !isa<PtrToIntInst>(
I);
5212 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5213 if (
GEP->hasAllConstantIndices())
5219 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5233 const Value *UseV =
U->get();
5234 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5235 if (
SI->getPointerOperand() == UseV)
5236 MA =
SI->getAlign();
5237 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5238 if (LI->getPointerOperand() == UseV)
5239 MA = LI->getAlign();
5240 }
else if (
auto *AI = dyn_cast<AtomicRMWInst>(
I)) {
5241 if (AI->getPointerOperand() == UseV)
5242 MA = AI->getAlign();
5243 }
else if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
5244 if (AI->getPointerOperand() == UseV)
5245 MA = AI->getAlign();
5251 unsigned Alignment = MA->value();
5255 if (
Base == &AssociatedValue) {
5274 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5276 takeKnownMaximum(Attr.getValueAsInt());
5278 Value &
V = *getAssociatedValue().stripPointerCasts();
5279 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5282 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5287 ChangeStatus LoadStoreChanged = ChangeStatus::UNCHANGED;
5290 Value &AssociatedValue = getAssociatedValue();
5291 for (
const Use &U : AssociatedValue.
uses()) {
5292 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5293 if (
SI->getPointerOperand() == &AssociatedValue)
5294 if (
SI->getAlign() < getAssumedAlign()) {
5296 "Number of times alignment added to a store");
5297 SI->setAlignment(getAssumedAlign());
5298 LoadStoreChanged = ChangeStatus::CHANGED;
5300 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5301 if (LI->getPointerOperand() == &AssociatedValue)
5302 if (LI->getAlign() < getAssumedAlign()) {
5303 LI->setAlignment(getAssumedAlign());
5305 "Number of times alignment added to a load");
5306 LoadStoreChanged = ChangeStatus::CHANGED;
5313 Align InheritAlign =
5314 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5315 if (InheritAlign >= getAssumedAlign())
5316 return LoadStoreChanged;
5317 return Changed | LoadStoreChanged;
5327 if (getAssumedAlign() > 1)
5335 bool TrackUse =
false;
5337 unsigned int KnownAlign =
5338 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5339 State.takeKnownMaximum(KnownAlign);
5345 const std::string getAsStr(
Attributor *
A)
const override {
5346 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5347 std::to_string(getAssumedAlign().
value()) +
">";
5352struct AAAlignFloating : AAAlignImpl {
5360 bool UsedAssumedInformation =
false;
5362 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5364 Values.
push_back({getAssociatedValue(), getCtxI()});
5367 Stripped = Values.
size() != 1 ||
5368 Values.
front().getValue() != &getAssociatedValue();
5372 auto VisitValueCB = [&](
Value &
V) ->
bool {
5373 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5376 DepClassTy::REQUIRED);
5377 if (!AA || (!Stripped &&
this == AA)) {
5379 unsigned Alignment = 1;
5392 Alignment =
V.getPointerAlignment(
DL).value();
5395 T.takeKnownMaximum(Alignment);
5396 T.indicatePessimisticFixpoint();
5402 return T.isValidState();
5405 for (
const auto &VAC : Values) {
5406 if (!VisitValueCB(*
VAC.getValue()))
5407 return indicatePessimisticFixpoint();
5420struct AAAlignReturned final
5421 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5422 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5430struct AAAlignArgument final
5431 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5432 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5440 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5441 return ChangeStatus::UNCHANGED;
5442 return Base::manifest(
A);
5449struct AAAlignCallSiteArgument final : AAAlignFloating {
5451 : AAAlignFloating(IRP,
A) {}
5458 if (
Argument *Arg = getAssociatedArgument())
5459 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5460 return ChangeStatus::UNCHANGED;
5462 Align InheritAlign =
5463 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5464 if (InheritAlign >= getAssumedAlign())
5465 Changed = ChangeStatus::UNCHANGED;
5472 if (
Argument *Arg = getAssociatedArgument()) {
5475 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5478 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5488struct AAAlignCallSiteReturned final
5489 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5490 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5507 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5508 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5513 const std::string getAsStr(
Attributor *
A)
const override {
5514 return getAssumed() ?
"noreturn" :
"may-return";
5519 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5520 bool UsedAssumedInformation =
false;
5521 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5522 {(unsigned)Instruction::Ret},
5523 UsedAssumedInformation))
5524 return indicatePessimisticFixpoint();
5525 return ChangeStatus::UNCHANGED;
5529struct AANoReturnFunction final : AANoReturnImpl {
5531 : AANoReturnImpl(IRP,
A) {}
5538struct AANoReturnCallSite final
5539 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5541 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5558 Value &
V = getAssociatedValue();
5559 if (
auto *
C = dyn_cast<Constant>(&V)) {
5560 if (
C->isThreadDependent())
5561 indicatePessimisticFixpoint();
5563 indicateOptimisticFixpoint();
5566 if (
auto *CB = dyn_cast<CallBase>(&V))
5569 indicateOptimisticFixpoint();
5572 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5577 indicatePessimisticFixpoint();
5587 Value &
V = getAssociatedValue();
5589 if (
auto *
I = dyn_cast<Instruction>(&V))
5590 Scope =
I->getFunction();
5591 if (
auto *
A = dyn_cast<Argument>(&V)) {
5593 if (!
Scope->hasLocalLinkage())
5597 return indicateOptimisticFixpoint();
5599 bool IsKnownNoRecurse;
5600 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5605 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5606 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5607 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5608 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5612 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5613 (isa<StoreInst>(UserI) &&
5614 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5616 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5620 if (!Callee || !
Callee->hasLocalLinkage())
5626 DepClassTy::OPTIONAL);
5627 if (!ArgInstanceInfoAA ||
5628 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5633 A, *CB, *Scope, *
this,
nullptr,
5641 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5642 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5643 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5651 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5652 DepClassTy::OPTIONAL,
5653 true, EquivalentUseCB))
5654 return indicatePessimisticFixpoint();
5660 const std::string getAsStr(
Attributor *
A)
const override {
5661 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5665 void trackStatistics()
const override {}
5669struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5671 : AAInstanceInfoImpl(IRP,
A) {}
5675struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5677 : AAInstanceInfoFloating(IRP,
A) {}
5681struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5683 : AAInstanceInfoImpl(IRP,
A) {}
5691 Argument *Arg = getAssociatedArgument();
5693 return indicatePessimisticFixpoint();
5698 return indicatePessimisticFixpoint();
5704struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5706 : AAInstanceInfoImpl(IRP,
A) {
5722struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5724 : AAInstanceInfoFloating(IRP,
A) {}
5731 bool IgnoreSubsumingPositions) {
5732 assert(ImpliedAttributeKind == Attribute::NoCapture &&
5733 "Unexpected attribute kind");
5736 return V.use_empty();
5742 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5743 V.getType()->getPointerAddressSpace() == 0)) {
5747 if (
A.hasAttr(IRP, {Attribute::NoCapture},
5748 true, Attribute::NoCapture))
5754 {Attribute::NoCapture, Attribute::ByVal},
5756 A.manifestAttrs(IRP,
5764 determineFunctionCaptureCapabilities(IRP, *
F, State);
5766 A.manifestAttrs(IRP,
5785 bool ReadOnly =
F.onlyReadsMemory();
5786 bool NoThrow =
F.doesNotThrow();
5787 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5788 if (ReadOnly && NoThrow && IsVoidReturn) {
5801 if (NoThrow && IsVoidReturn)
5806 if (!NoThrow || ArgNo < 0 ||
5807 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5810 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5811 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5812 if (U ==
unsigned(ArgNo))
5830 assert(!AA::hasAssumedIRAttr<Attribute::NoCapture>(
5831 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5841 if (!isAssumedNoCaptureMaybeReturned())
5844 if (isArgumentPosition()) {
5845 if (isAssumedNoCapture())
5853 const std::string getAsStr(
Attributor *
A)
const override {
5854 if (isKnownNoCapture())
5855 return "known not-captured";
5856 if (isAssumedNoCapture())
5857 return "assumed not-captured";
5858 if (isKnownNoCaptureMaybeReturned())
5859 return "known not-captured-maybe-returned";
5860 if (isAssumedNoCaptureMaybeReturned())
5861 return "assumed not-captured-maybe-returned";
5862 return "assumed-captured";
5870 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5874 if (isa<PtrToIntInst>(UInst)) {
5876 return isCapturedIn(State,
true,
true,
5882 if (isa<StoreInst>(UInst))
5883 return isCapturedIn(State,
true,
true,
5887 if (isa<ReturnInst>(UInst)) {
5889 return isCapturedIn(State,
false,
false,
5891 return isCapturedIn(State,
true,
true,
5897 auto *CB = dyn_cast<CallBase>(UInst);
5899 return isCapturedIn(State,
true,
true,
5906 bool IsKnownNoCapture;
5908 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
5909 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5911 if (IsAssumedNoCapture)
5912 return isCapturedIn(State,
false,
false,
5916 return isCapturedIn(State,
false,
false,
5921 return isCapturedIn(State,
true,
true,
5929 bool CapturedInInt,
bool CapturedInRet) {
5930 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5931 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5947 return indicatePessimisticFixpoint();
5954 return indicatePessimisticFixpoint();
5962 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5964 addKnownBits(NOT_CAPTURED_IN_MEM);
5971 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5975 UsedAssumedInformation))
5977 bool SeenConstant =
false;
5979 if (isa<Constant>(
VAC.getValue())) {
5982 SeenConstant =
true;
5983 }
else if (!isa<Argument>(
VAC.getValue()) ||
5984 VAC.getValue() == getAssociatedArgument())
5990 bool IsKnownNoUnwind;
5991 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
5993 bool IsVoidTy =
F->getReturnType()->isVoidTy();
5994 bool UsedAssumedInformation =
false;
5995 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
5996 T.addKnownBits(NOT_CAPTURED_IN_RET);
5997 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
5999 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6000 addKnownBits(NOT_CAPTURED_IN_RET);
6001 if (isKnown(NOT_CAPTURED_IN_MEM))
6002 return indicateOptimisticFixpoint();
6013 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6018 return checkUse(
A,
T, U, Follow);
6026 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6027 return indicatePessimisticFixpoint();
6030 auto Assumed = S.getAssumed();
6031 S.intersectAssumedBits(
T.getAssumed());
6032 if (!isAssumedNoCaptureMaybeReturned())
6033 return indicatePessimisticFixpoint();
6039struct AANoCaptureArgument final : AANoCaptureImpl {
6041 : AANoCaptureImpl(IRP,
A) {}
6048struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6050 : AANoCaptureImpl(IRP,
A) {}
6058 Argument *Arg = getAssociatedArgument();
6060 return indicatePessimisticFixpoint();
6062 bool IsKnownNoCapture;
6064 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
6065 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6067 return ChangeStatus::UNCHANGED;
6069 return indicatePessimisticFixpoint();
6078struct AANoCaptureFloating final : AANoCaptureImpl {
6080 : AANoCaptureImpl(IRP,
A) {}
6083 void trackStatistics()
const override {
6089struct AANoCaptureReturned final : AANoCaptureImpl {
6091 : AANoCaptureImpl(IRP,
A) {
6106 void trackStatistics()
const override {}
6110struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6112 : AANoCaptureImpl(IRP,
A) {}
6118 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6122 void trackStatistics()
const override {
6133 SimplifiedAssociatedValue,
Other, Ty);
6134 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6138 if (SimplifiedAssociatedValue)
6139 dbgs() <<
"[ValueSimplify] is assumed to be "
6140 << **SimplifiedAssociatedValue <<
"\n";
6142 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6154 if (getAssociatedValue().
getType()->isVoidTy())
6155 indicatePessimisticFixpoint();
6156 if (
A.hasSimplificationCallback(getIRPosition()))
6157 indicatePessimisticFixpoint();
6161 const std::string getAsStr(
Attributor *
A)
const override {
6163 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6164 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6165 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6167 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6172 void trackStatistics()
const override {}
6175 std::optional<Value *>
6176 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6177 return SimplifiedAssociatedValue;
6188 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6190 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6203 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6204 if (
Check && (
I.mayReadFromMemory() ||
6209 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6211 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6237 if (
const auto &NewV = VMap.
lookup(&V))
6239 bool UsedAssumedInformation =
false;
6240 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6242 if (!SimpleV.has_value())
6246 EffectiveV = *SimpleV;
6247 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6251 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6252 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6253 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6254 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6261 Value *NewV = SimplifiedAssociatedValue
6262 ? *SimplifiedAssociatedValue
6264 if (NewV && NewV != &getAssociatedValue()) {
6268 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6270 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6279 const IRPosition &IRP,
bool Simplify =
true) {
6280 bool UsedAssumedInformation =
false;
6283 QueryingValueSimplified =
A.getAssumedSimplified(
6285 return unionAssumed(QueryingValueSimplified);
6289 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6290 if (!getAssociatedValue().
getType()->isIntegerTy())
6295 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6299 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6302 SimplifiedAssociatedValue = std::nullopt;
6303 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6306 if (
auto *
C = *COpt) {
6307 SimplifiedAssociatedValue =
C;
6308 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6314 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6315 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6317 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6325 for (
auto &U : getAssociatedValue().
uses()) {
6329 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6330 IP =
PHI->getIncomingBlock(U)->getTerminator();
6331 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6333 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6334 if (
A.changeUseAfterManifest(U, *NewV))
6335 Changed = ChangeStatus::CHANGED;
6339 return Changed | AAValueSimplify::manifest(
A);
6344 SimplifiedAssociatedValue = &getAssociatedValue();
6345 return AAValueSimplify::indicatePessimisticFixpoint();
6349struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6351 : AAValueSimplifyImpl(IRP,
A) {}
6354 AAValueSimplifyImpl::initialize(
A);
6355 if (
A.hasAttr(getIRPosition(),
6356 {Attribute::InAlloca, Attribute::Preallocated,
6357 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6359 indicatePessimisticFixpoint();
6366 Argument *Arg = getAssociatedArgument();
6372 return indicatePessimisticFixpoint();
6375 auto Before = SimplifiedAssociatedValue;
6389 bool UsedAssumedInformation =
false;
6390 std::optional<Constant *> SimpleArgOp =
6391 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6398 return unionAssumed(*SimpleArgOp);
6403 bool UsedAssumedInformation =
false;
6404 if (hasCallBaseContext() &&
6405 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6409 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6410 UsedAssumedInformation);
6413 if (!askSimplifiedValueForOtherAAs(
A))
6414 return indicatePessimisticFixpoint();
6417 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6418 : ChangeStatus ::CHANGED;
6422 void trackStatistics()
const override {
6427struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6429 : AAValueSimplifyImpl(IRP,
A) {}
6432 std::optional<Value *>
6433 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6434 if (!isValidState())
6436 return SimplifiedAssociatedValue;
6441 auto Before = SimplifiedAssociatedValue;
6444 auto &RI = cast<ReturnInst>(
I);
6445 return checkAndUpdate(
6450 bool UsedAssumedInformation =
false;
6451 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6452 UsedAssumedInformation))
6453 if (!askSimplifiedValueForOtherAAs(
A))
6454 return indicatePessimisticFixpoint();
6457 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6458 : ChangeStatus ::CHANGED;
6464 return ChangeStatus::UNCHANGED;
6468 void trackStatistics()
const override {
6473struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6475 : AAValueSimplifyImpl(IRP,
A) {}
6479 AAValueSimplifyImpl::initialize(
A);
6480 Value &
V = getAnchorValue();
6483 if (isa<Constant>(V))
6484 indicatePessimisticFixpoint();
6489 auto Before = SimplifiedAssociatedValue;
6490 if (!askSimplifiedValueForOtherAAs(
A))
6491 return indicatePessimisticFixpoint();
6494 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6495 : ChangeStatus ::CHANGED;
6499 void trackStatistics()
const override {
6504struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6506 : AAValueSimplifyImpl(IRP,
A) {}
6510 SimplifiedAssociatedValue =
nullptr;
6511 indicateOptimisticFixpoint();
6516 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6519 void trackStatistics()
const override {
6524struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6526 : AAValueSimplifyFunction(IRP,
A) {}
6528 void trackStatistics()
const override {
6533struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6535 : AAValueSimplifyImpl(IRP,
A) {}
6538 AAValueSimplifyImpl::initialize(
A);
6539 Function *Fn = getAssociatedFunction();
6540 assert(Fn &&
"Did expect an associted function");
6546 checkAndUpdate(
A, *
this, IRP))
6547 indicateOptimisticFixpoint();
6549 indicatePessimisticFixpoint();
6557 return indicatePessimisticFixpoint();
6560 void trackStatistics()
const override {
6565struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6567 : AAValueSimplifyFloating(IRP,
A) {}
6575 if (FloatAA && FloatAA->getState().isValidState())
6578 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6579 Use &
U = cast<CallBase>(&getAnchorValue())
6580 ->getArgOperandUse(getCallSiteArgNo());
6581 if (
A.changeUseAfterManifest(U, *NewV))
6582 Changed = ChangeStatus::CHANGED;
6585 return Changed | AAValueSimplify::manifest(
A);
6588 void trackStatistics()
const override {
6598 struct AllocationInfo {
6610 }
Status = STACK_DUE_TO_USE;
6614 bool HasPotentiallyFreeingUnknownUses =
false;
6618 bool MoveAllocaIntoEntry =
true;
6624 struct DeallocationInfo {
6632 bool MightFreeUnknownObjects =
false;
6641 ~AAHeapToStackFunction() {
6644 for (
auto &It : AllocationInfos)
6645 It.second->~AllocationInfo();
6646 for (
auto &It : DeallocationInfos)
6647 It.second->~DeallocationInfo();
6651 AAHeapToStack::initialize(
A);
6654 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6661 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6670 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6671 AllocationInfos[CB] = AI;
6673 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6679 bool UsedAssumedInformation =
false;
6680 bool Success =
A.checkForAllCallLikeInstructions(
6681 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6685 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6689 bool &) -> std::optional<Value *> {
return nullptr; };
6690 for (
const auto &It : AllocationInfos)
6693 for (
const auto &It : DeallocationInfos)
6698 const std::string getAsStr(
Attributor *
A)
const override {
6699 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6700 for (
const auto &It : AllocationInfos) {
6701 if (It.second->Status == AllocationInfo::INVALID)
6702 ++NumInvalidMallocs;
6706 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6707 std::to_string(NumInvalidMallocs);
6711 void trackStatistics()
const override {
6714 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6715 for (
const auto &It : AllocationInfos)
6716 if (It.second->Status != AllocationInfo::INVALID)
6720 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6722 if (AllocationInfo *AI =
6723 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6724 return AI->Status != AllocationInfo::INVALID;
6728 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6729 if (!isValidState())
6732 for (
const auto &It : AllocationInfos) {
6733 AllocationInfo &AI = *It.second;
6734 if (AI.Status == AllocationInfo::INVALID)
6737 if (AI.PotentialFreeCalls.count(&CB))
6745 assert(getState().isValidState() &&
6746 "Attempted to manifest an invalid state!");
6750 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6752 for (
auto &It : AllocationInfos) {
6753 AllocationInfo &AI = *It.second;
6754 if (AI.Status == AllocationInfo::INVALID)
6757 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6758 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6759 A.deleteAfterManifest(*FreeCall);
6760 HasChanged = ChangeStatus::CHANGED;
6763 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6768 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6769 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6770 return OR <<
"Moving globalized variable to the stack.";
6771 return OR <<
"Moving memory allocation from the heap to the stack.";
6773 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6780 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6782 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6789 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6794 ?
F->getEntryBlock().begin()
6795 : AI.CB->getIterator();
6798 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6799 Alignment = std::max(Alignment, *RetAlign);
6801 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6802 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6803 "Expected an alignment during manifest!");
6805 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6809 unsigned AS =
DL.getAllocaAddrSpace();
6812 AI.CB->getName() +
".h2s", IP);
6814 if (Alloca->
getType() != AI.CB->getType())
6815 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6816 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6821 "Must be able to materialize initial memory state of allocation");
6825 if (
auto *
II = dyn_cast<InvokeInst>(AI.CB)) {
6826 auto *NBB =
II->getNormalDest();
6828 A.deleteAfterManifest(*AI.CB);
6830 A.deleteAfterManifest(*AI.CB);
6836 if (!isa<UndefValue>(InitVal)) {
6839 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6841 HasChanged = ChangeStatus::CHANGED;
6849 bool UsedAssumedInformation =
false;
6850 std::optional<Constant *> SimpleV =
6851 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6853 return APInt(64, 0);
6854 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6855 return CI->getValue();
6856 return std::nullopt;
6860 AllocationInfo &AI) {
6861 auto Mapper = [&](
const Value *
V) ->
const Value * {
6862 bool UsedAssumedInformation =
false;
6863 if (std::optional<Constant *> SimpleV =
6864 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6871 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6889 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6891 const auto *LivenessAA =
6895 A.getInfoCache().getMustBeExecutedContextExplorer();
6897 bool StackIsAccessibleByOtherThreads =
6898 A.getInfoCache().stackIsAccessibleByOtherThreads();
6901 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6902 std::optional<bool> MayContainIrreducibleControl;
6904 if (&
F->getEntryBlock() == &BB)
6906 if (!MayContainIrreducibleControl.has_value())
6908 if (*MayContainIrreducibleControl)
6917 bool HasUpdatedFrees =
false;
6919 auto UpdateFrees = [&]() {
6920 HasUpdatedFrees =
true;
6922 for (
auto &It : DeallocationInfos) {
6923 DeallocationInfo &DI = *It.second;
6926 if (DI.MightFreeUnknownObjects)
6930 bool UsedAssumedInformation =
false;
6931 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6938 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6939 DI.MightFreeUnknownObjects =
true;
6945 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6948 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6952 DI.MightFreeUnknownObjects =
true;
6956 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6958 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6960 DI.MightFreeUnknownObjects =
true;
6964 DI.PotentialAllocationCalls.insert(ObjCB);
6968 auto FreeCheck = [&](AllocationInfo &AI) {
6972 if (!StackIsAccessibleByOtherThreads) {
6974 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6977 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6978 "other threads and function is not nosync:\n");
6982 if (!HasUpdatedFrees)
6986 if (AI.PotentialFreeCalls.size() != 1) {
6988 << AI.PotentialFreeCalls.size() <<
"\n");
6991 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
6992 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
6995 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
6996 << *UniqueFree <<
"\n");
6999 if (DI->MightFreeUnknownObjects) {
7001 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7004 if (DI->PotentialAllocationCalls.empty())
7006 if (DI->PotentialAllocationCalls.size() > 1) {
7008 << DI->PotentialAllocationCalls.size()
7009 <<
" different allocations\n");
7012 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7015 <<
"[H2S] unique free call not known to free this allocation but "
7016 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7021 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7023 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7024 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7025 "with the allocation "
7026 << *UniqueFree <<
"\n");
7033 auto UsesCheck = [&](AllocationInfo &AI) {
7034 bool ValidUsesOnly =
true;
7036 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7038 if (isa<LoadInst>(UserI))
7040 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
7041 if (
SI->getValueOperand() ==
U.get()) {
7043 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7044 ValidUsesOnly =
false;
7050 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
7053 if (DeallocationInfos.count(CB)) {
7054 AI.PotentialFreeCalls.insert(CB);
7061 bool IsKnownNoCapture;
7062 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7067 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7070 if (!IsAssumedNoCapture ||
7071 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7072 !IsAssumedNoFree)) {
7073 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7078 <<
"Could not move globalized variable to the stack. "
7079 "Variable is potentially captured in call. Mark "
7080 "parameter as `__attribute__((noescape))` to override.";
7083 if (ValidUsesOnly &&
7084 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7088 ValidUsesOnly =
false;
7093 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7094 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7101 ValidUsesOnly =
false;
7104 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7106 [&](
const Use &OldU,
const Use &NewU) {
7107 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7108 return !SI || StackIsAccessibleByOtherThreads ||
7109 AA::isAssumedThreadLocalObject(
7110 A, *SI->getPointerOperand(), *this);
7113 return ValidUsesOnly;
7118 for (
auto &It : AllocationInfos) {
7119 AllocationInfo &AI = *It.second;
7120 if (AI.Status == AllocationInfo::INVALID)
7124 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7128 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7130 AI.Status = AllocationInfo::INVALID;
7135 !APAlign->isPowerOf2()) {
7136 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7138 AI.Status = AllocationInfo::INVALID;
7145 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7150 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7152 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7156 AI.Status = AllocationInfo::INVALID;
7162 switch (AI.Status) {
7163 case AllocationInfo::STACK_DUE_TO_USE:
7166 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7168 case AllocationInfo::STACK_DUE_TO_FREE:
7171 AI.Status = AllocationInfo::INVALID;
7174 case AllocationInfo::INVALID:
7181 bool IsGlobalizedLocal =
7182 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7183 if (AI.MoveAllocaIntoEntry &&
7184 (!
Size.has_value() ||
7185 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7186 AI.MoveAllocaIntoEntry =
false;
7200 AAPrivatizablePtr::indicatePessimisticFixpoint();
7201 PrivatizableType =
nullptr;
7202 return ChangeStatus::CHANGED;
7208 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7212 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7213 std::optional<Type *>
T1) {
7223 std::optional<Type *> getPrivatizableType()
const override {
7224 return PrivatizableType;
7227 const std::string getAsStr(
Attributor *
A)
const override {
7228 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7232 std::optional<Type *> PrivatizableType;
7237struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7239 : AAPrivatizablePtrImpl(IRP,
A) {}
7242 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7245 bool UsedAssumedInformation =
false;
7247 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7249 if (!
Attrs.empty() &&
7251 true, UsedAssumedInformation))
7252 return Attrs[0].getValueAsType();
7254 std::optional<Type *> Ty;
7255 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7278 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7282 dbgs() <<
"<nullptr>";
7287 Ty = combineTypes(Ty, CSTy);
7290 dbgs() <<
" : New Type: ";
7292 (*Ty)->print(
dbgs());
7294 dbgs() <<
"<nullptr>";
7303 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7304 UsedAssumedInformation))
7311 PrivatizableType = identifyPrivatizableType(
A);
7312 if (!PrivatizableType)
7313 return ChangeStatus::UNCHANGED;
7314 if (!*PrivatizableType)
7315 return indicatePessimisticFixpoint();
7320 DepClassTy::OPTIONAL);
7323 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7326 return indicatePessimisticFixpoint();
7332 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7336 Function &Fn = *getIRPosition().getAnchorScope();
7340 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7342 return indicatePessimisticFixpoint();
7352 bool UsedAssumedInformation =
false;
7353 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7354 UsedAssumedInformation)) {
7356 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7358 return indicatePessimisticFixpoint();
7362 Argument *Arg = getAssociatedArgument();
7363 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7365 return indicatePessimisticFixpoint();
7372 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7375 for (
const Use *U : CallbackUses) {
7377 assert(CBACS && CBACS.isCallbackCall());
7378 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7379 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7383 <<
"[AAPrivatizablePtr] Argument " << *Arg
7384 <<
"check if can be privatized in the context of its parent ("
7386 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7388 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7389 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7390 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7392 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7393 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7396 if (CBArgNo !=
int(ArgNo))
7400 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7404 if (*CBArgPrivTy == PrivatizableType)
7409 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7410 <<
" cannot be privatized in the context of its parent ("
7412 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7414 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7415 <<
").\n[AAPrivatizablePtr] for which the argument "
7416 "privatization is not compatible.\n";
7430 "Expected a direct call operand for callback call operand");
7435 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7436 <<
" check if be privatized in the context of its parent ("
7438 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7440 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7443 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7446 DepClassTy::REQUIRED);
7447 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7451 if (*DCArgPrivTy == PrivatizableType)
7457 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7458 <<
" cannot be privatized in the context of its parent ("
7460 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7463 <<
").\n[AAPrivatizablePtr] for which the argument "
7464 "privatization is not compatible.\n";
7476 return IsCompatiblePrivArgOfDirectCS(ACS);
7480 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7481 UsedAssumedInformation))
7482 return indicatePessimisticFixpoint();
7484 return ChangeStatus::UNCHANGED;
7490 identifyReplacementTypes(
Type *PrivType,
7494 assert(PrivType &&
"Expected privatizable type!");
7497 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7498 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7499 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7500 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7501 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7502 PrivArrayType->getElementType());
7513 assert(PrivType &&
"Expected privatizable type!");
7519 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7520 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7521 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7526 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7527 Type *PointeeTy = PrivArrayType->getElementType();
7528 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7529 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7540 void createReplacementValues(
Align Alignment,
Type *PrivType,
7544 assert(PrivType &&
"Expected privatizable type!");
7551 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7552 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7553 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7554 Type *PointeeTy = PrivStructType->getElementType(u);
7558 L->setAlignment(Alignment);
7561 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7562 Type *PointeeTy = PrivArrayType->getElementType();
7563 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7564 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7567 L->setAlignment(Alignment);
7572 L->setAlignment(Alignment);
7579 if (!PrivatizableType)
7580 return ChangeStatus::UNCHANGED;
7581 assert(*PrivatizableType &&
"Expected privatizable type!");
7587 bool UsedAssumedInformation =
false;
7588 if (!
A.checkForAllInstructions(
7590 CallInst &CI = cast<CallInst>(I);
7591 if (CI.isTailCall())
7592 TailCalls.push_back(&CI);
7595 *
this, {Instruction::Call}, UsedAssumedInformation))
7596 return ChangeStatus::UNCHANGED;
7598 Argument *Arg = getAssociatedArgument();
7601 const auto *AlignAA =
7610 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7613 unsigned AS =
DL.getAllocaAddrSpace();
7615 Arg->
getName() +
".priv", IP);
7616 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7617 ArgIt->getArgNo(), IP);
7620 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7625 CI->setTailCall(
false);
7636 createReplacementValues(
7637 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7638 *PrivatizableType, ACS,
7646 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7649 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7650 std::move(FnRepairCB),
7651 std::move(ACSRepairCB)))
7652 return ChangeStatus::CHANGED;
7653 return ChangeStatus::UNCHANGED;
7657 void trackStatistics()
const override {
7662struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7664 : AAPrivatizablePtrImpl(IRP,
A) {}
7669 indicatePessimisticFixpoint();
7674 "updateImpl will not be called");
7678 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7681 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7685 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7686 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7688 return AI->getAllocatedType();
7689 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7692 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7696 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7697 "alloca nor privatizable argument: "
7703 void trackStatistics()
const override {
7708struct AAPrivatizablePtrCallSiteArgument final
7709 :
public AAPrivatizablePtrFloating {
7711 : AAPrivatizablePtrFloating(IRP,
A) {}
7715 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7716 indicateOptimisticFixpoint();
7721 PrivatizableType = identifyPrivatizableType(
A);
7722 if (!PrivatizableType)
7723 return ChangeStatus::UNCHANGED;
7724 if (!*PrivatizableType)
7725 return indicatePessimisticFixpoint();
7728 bool IsKnownNoCapture;
7729 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7730 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7731 if (!IsAssumedNoCapture) {
7732 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7733 return indicatePessimisticFixpoint();
7736 bool IsKnownNoAlias;
7737 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7738 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7739 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7740 return indicatePessimisticFixpoint();
7745 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7746 return indicatePessimisticFixpoint();
7749 return ChangeStatus::UNCHANGED;
7753 void trackStatistics()
const override {
7758struct AAPrivatizablePtrCallSiteReturned final
7759 :
public AAPrivatizablePtrFloating {
7761 : AAPrivatizablePtrFloating(IRP,
A) {}
7766 indicatePessimisticFixpoint();
7770 void trackStatistics()
const override {
7775struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7777 : AAPrivatizablePtrFloating(IRP,
A) {}
7782 indicatePessimisticFixpoint();
7786 void trackStatistics()
const override {
7802 intersectAssumedBits(BEST_STATE);
7803 getKnownStateFromValue(
A, getIRPosition(), getState());
7804 AAMemoryBehavior::initialize(
A);
7810 bool IgnoreSubsumingPositions =
false) {
7812 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7814 switch (Attr.getKindAsEnum()) {
7815 case Attribute::ReadNone:
7818 case Attribute::ReadOnly:
7821 case Attribute::WriteOnly:
7830 if (!
I->mayReadFromMemory())
7832 if (!
I->mayWriteToMemory())
7845 else if (isAssumedWriteOnly())
7854 if (
A.hasAttr(IRP, Attribute::ReadNone,
7856 return ChangeStatus::UNCHANGED;
7865 return ChangeStatus::UNCHANGED;
7868 A.removeAttrs(IRP, AttrKinds);
7871 A.removeAttrs(IRP, Attribute::Writable);
7878 const std::string getAsStr(
Attributor *
A)
const override {
7883 if (isAssumedWriteOnly())
7885 return "may-read/write";
7893 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7896struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7898 : AAMemoryBehaviorImpl(IRP,
A) {}
7904 void trackStatistics()
const override {
7909 else if (isAssumedWriteOnly())
7924struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7926 : AAMemoryBehaviorFloating(IRP,
A) {}
7930 intersectAssumedBits(BEST_STATE);
7935 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7937 getKnownStateFromValue(
A, IRP, getState(),
7944 return ChangeStatus::UNCHANGED;
7948 if (
A.hasAttr(getIRPosition(),
7949 {Attribute::InAlloca, Attribute::Preallocated})) {
7950 removeKnownBits(NO_WRITES);
7951 removeAssumedBits(NO_WRITES);
7953 A.removeAttrs(getIRPosition(), AttrKinds);
7954 return AAMemoryBehaviorFloating::manifest(
A);
7958 void trackStatistics()
const override {
7963 else if (isAssumedWriteOnly())
7968struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7970 : AAMemoryBehaviorArgument(IRP,
A) {}
7976 Argument *Arg = getAssociatedArgument();
7978 indicatePessimisticFixpoint();
7982 addKnownBits(NO_WRITES);
7983 removeKnownBits(NO_READS);
7984 removeAssumedBits(NO_READS);
7986 AAMemoryBehaviorArgument::initialize(
A);
7987 if (getAssociatedFunction()->isDeclaration())
7988 indicatePessimisticFixpoint();
7997 Argument *Arg = getAssociatedArgument();
8002 return indicatePessimisticFixpoint();
8007 void trackStatistics()
const override {
8012 else if (isAssumedWriteOnly())
8018struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8020 : AAMemoryBehaviorFloating(IRP,
A) {}
8024 AAMemoryBehaviorImpl::initialize(
A);
8029 return ChangeStatus::UNCHANGED;
8033 void trackStatistics()
const override {}
8037struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8039 : AAMemoryBehaviorImpl(IRP,
A) {}
8049 Function &
F = cast<Function>(getAnchorValue());
8055 else if (isAssumedWriteOnly())
8058 A.removeAttrs(getIRPosition(), AttrKinds);
8063 return A.manifestAttrs(getIRPosition(),
8068 void trackStatistics()
const override {
8073 else if (isAssumedWriteOnly())
8079struct AAMemoryBehaviorCallSite final
8080 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8087 CallBase &CB = cast<CallBase>(getAnchorValue());
8093 else if (isAssumedWriteOnly())
8096 A.removeAttrs(getIRPosition(), AttrKinds);
8101 Attribute::Writable);
8102 return A.manifestAttrs(
8107 void trackStatistics()
const override {
8112 else if (isAssumedWriteOnly())
8120 auto AssumedState = getAssumed();
8126 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8129 if (MemBehaviorAA) {
8130 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8131 return !isAtFixpoint();
8136 if (
I.mayReadFromMemory())
8137 removeAssumedBits(NO_READS);
8138 if (
I.mayWriteToMemory())
8139 removeAssumedBits(NO_WRITES);
8140 return !isAtFixpoint();
8143 bool UsedAssumedInformation =
false;
8144 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8145 UsedAssumedInformation))
8146 return indicatePessimisticFixpoint();
8165 const auto *FnMemAA =
8169 S.addKnownBits(FnMemAA->getKnown());
8170 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8176 auto AssumedState = S.getAssumed();
8182 bool IsKnownNoCapture;
8184 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
8188 if (!IsAssumedNoCapture &&
8190 S.intersectAssumedBits(FnMemAssumedState);
8196 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8198 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8206 Follow = followUsersOfUseIn(
A, U, UserI);
8210 analyzeUseIn(
A, U, UserI);
8212 return !isAtFixpoint();
8215 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8216 return indicatePessimisticFixpoint();
8222bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8226 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8231 const auto *CB = dyn_cast<CallBase>(UserI);
8241 if (
U.get()->getType()->isPointerTy()) {
8243 bool IsKnownNoCapture;
8244 return !AA::hasAssumedIRAttr<Attribute::NoCapture>(
8252void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8260 case Instruction::Load:
8262 removeAssumedBits(NO_READS);
8265 case Instruction::Store:
8270 removeAssumedBits(NO_WRITES);
8272 indicatePessimisticFixpoint();
8275 case Instruction::Call:
8276 case Instruction::CallBr:
8277 case Instruction::Invoke: {
8280 const auto *CB = cast<CallBase>(UserI);
8284 indicatePessimisticFixpoint();
8291 removeAssumedBits(NO_READS);
8298 if (
U.get()->getType()->isPointerTy())
8302 const auto *MemBehaviorAA =
8308 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8316 removeAssumedBits(NO_READS);
8318 removeAssumedBits(NO_WRITES);
8330 return "all memory";
8333 std::string S =
"memory:";
8339 S +=
"internal global,";
8341 S +=
"external global,";
8345 S +=
"inaccessible,";
8359 AccessKind2Accesses.fill(
nullptr);
8362 ~AAMemoryLocationImpl() {
8365 for (AccessSet *AS : AccessKind2Accesses)
8372 intersectAssumedBits(BEST_STATE);
8373 getKnownStateFromValue(
A, getIRPosition(), getState());
8374 AAMemoryLocation::initialize(
A);
8380 bool IgnoreSubsumingPositions =
false) {
8389 bool UseArgMemOnly =
true;
8391 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8395 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8404 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8409 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8413 A.manifestAttrs(IRP,
8423 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8427 A.manifestAttrs(IRP,
8446 else if (isAssumedInaccessibleMemOnly())
8449 else if (isAssumedArgMemOnly())
8452 else if (isAssumedInaccessibleOrArgMemOnly())
8467 if (DeducedAttrs.
size() != 1)
8468 return ChangeStatus::UNCHANGED;
8476 bool checkForAllAccessesToMemoryKind(
8478 MemoryLocationsKind)>
8480 MemoryLocationsKind RequestedMLK)
const override {
8481 if (!isValidState())
8484 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8485 if (AssumedMLK == NO_LOCATIONS)
8489 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8490 CurMLK *= 2, ++
Idx) {
8491 if (CurMLK & RequestedMLK)
8494 if (
const AccessSet *Accesses = AccessKind2Accesses[
Idx])
8495 for (
const AccessInfo &AI : *Accesses)
8496 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8508 bool Changed =
false;
8509 MemoryLocationsKind KnownMLK = getKnown();
8510 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8511 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8512 if (!(CurMLK & KnownMLK))
8513 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8514 getAccessKindFromInst(
I));
8515 return AAMemoryLocation::indicatePessimisticFixpoint();
8535 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8539 return LHS.Ptr <
RHS.Ptr;
8540 if (
LHS.Kind !=
RHS.Kind)
8541 return LHS.Kind <
RHS.Kind;
8549 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8566 AK =
I->mayReadFromMemory() ? READ :
NONE;
8584 Changed |= Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8585 if (MLK == NO_UNKOWN_MEM)
8587 State.removeAssumedBits(MLK);
8594 unsigned AccessAS = 0);
8600void AAMemoryLocationImpl::categorizePtrValue(
8603 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8605 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8607 auto Pred = [&](
Value &Obj) {
8610 MemoryLocationsKind MLK = NO_LOCATIONS;
8620 if (isa<UndefValue>(&Obj))
8622 if (isa<Argument>(&Obj)) {
8629 MLK = NO_ARGUMENT_MEM;
8630 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8634 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8635 if (GVar->isConstant())
8638 if (GV->hasLocalLinkage())
8639 MLK = NO_GLOBAL_INTERNAL_MEM;
8641 MLK = NO_GLOBAL_EXTERNAL_MEM;
8642 }
else if (isa<ConstantPointerNull>(&Obj) &&
8646 }
else if (isa<AllocaInst>(&Obj)) {
8648 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8649 bool IsKnownNoAlias;
8650 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8653 MLK = NO_MALLOCED_MEM;
8655 MLK = NO_UNKOWN_MEM;
8657 MLK = NO_UNKOWN_MEM;
8660 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8661 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8662 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8663 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8664 getAccessKindFromInst(&
I));
8673 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8674 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8675 getAccessKindFromInst(&
I));
8680 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8681 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8684void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8687 for (
unsigned ArgNo = 0, E = CB.
arg_size(); ArgNo < E; ++ArgNo) {
8696 const auto *ArgOpMemLocationAA =
8699 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8704 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8711 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8715 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8717 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8723 <<
" [" << CBMemLocationAA <<
"]\n");
8724 if (!CBMemLocationAA) {
8725 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8726 Changed, getAccessKindFromInst(&
I));
8727 return NO_UNKOWN_MEM;
8730 if (CBMemLocationAA->isAssumedReadNone())
8731 return NO_LOCATIONS;
8733 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8734 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8735 Changed, getAccessKindFromInst(&
I));
8736 return AccessedLocs.getAssumed();
8739 uint32_t CBAssumedNotAccessedLocs =
8740 CBMemLocationAA->getAssumedNotAccessedLocation();
8743 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8744 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8746 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8747 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8749 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8750 getAccessKindFromInst(&
I));
8755 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8756 if (HasGlobalAccesses) {
8759 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8760 getAccessKindFromInst(&
I));
8763 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8764 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8765 return AccessedLocs.getWorstState();
8769 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8770 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8773 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8775 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8778 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8779 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8781 return AccessedLocs.getAssumed();
8786 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8787 <<
I <<
" [" << *
Ptr <<
"]\n");
8788 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8789 Ptr->getType()->getPointerAddressSpace());
8790 return AccessedLocs.getAssumed();
8793 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8795 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8796 getAccessKindFromInst(&
I));
8797 return AccessedLocs.getAssumed();
8801struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8803 : AAMemoryLocationImpl(IRP,
A) {}
8808 const auto *MemBehaviorAA =
8812 return indicateOptimisticFixpoint();
8814 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8815 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8816 return ChangeStatus::UNCHANGED;
8820 auto AssumedState = getAssumed();
8821 bool Changed =
false;
8824 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8825 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8826 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8827 removeAssumedBits(inverseLocation(MLK,
false,
false));
8830 return getAssumedNotAccessedLocation() != VALID_STATE;
8833 bool UsedAssumedInformation =
false;
8834 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8835 UsedAssumedInformation))
8836 return indicatePessimisticFixpoint();
8838 Changed |= AssumedState != getAssumed();
8839 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8843 void trackStatistics()
const override {
8846 else if (isAssumedArgMemOnly())
8848 else if (isAssumedInaccessibleMemOnly())
8850 else if (isAssumedInaccessibleOrArgMemOnly())
8856struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8858 : AAMemoryLocationImpl(IRP,
A) {}
8871 return indicatePessimisticFixpoint();
8872 bool Changed =
false;
8875 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8876 getAccessKindFromInst(
I));
8879 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8880 return indicatePessimisticFixpoint();
8881 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8885 void trackStatistics()
const override {
8899 const std::string getAsStr(
Attributor *
A)
const override {
8900 std::string Str(
"AADenormalFPMath[");
8903 DenormalState Known = getKnown();
8904 if (Known.Mode.isValid())
8905 OS <<
"denormal-fp-math=" << Known.Mode;
8909 if (Known.ModeF32.isValid())
8910 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8916struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8918 : AADenormalFPMathImpl(IRP,
A) {}
8930 Known = DenormalState{
Mode, ModeF32};
8941 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8949 CallerInfo->getState());
8953 bool AllCallSitesKnown =
true;
8954 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8955 return indicatePessimisticFixpoint();
8957 if (Change == ChangeStatus::CHANGED && isModeFixed())
8963 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8968 AttrToRemove.
push_back(
"denormal-fp-math");
8974 if (Known.ModeF32 != Known.Mode) {
8976 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8978 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8981 auto &IRP = getIRPosition();
8984 return A.removeAttrs(IRP, AttrToRemove) |
8985 A.manifestAttrs(IRP, AttrToAdd,
true);
8988 void trackStatistics()
const override {
9004 if (
A.hasSimplificationCallback(getIRPosition())) {
9005 indicatePessimisticFixpoint();
9010 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9013 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9017 const std::string getAsStr(
Attributor *
A)
const override {
9021 getKnown().print(
OS);
9023 getAssumed().print(
OS);
9031 if (!getAnchorScope())
9044 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9055 if (!getAnchorScope())
9062 const SCEV *S = getSCEV(
A,
I);
9074 if (!getAnchorScope())
9093 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9095 bool AllowAACtxI)
const {
9096 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9107 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9121 const Instruction *CtxI =
nullptr)
const override {
9122 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9128 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9134 const Instruction *CtxI =
nullptr)
const override {
9139 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9141 return getAssumed();
9145 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9153 Ty, AssumedConstantRange.
getLower())),
9155 Ty, AssumedConstantRange.
getUpper()))};
9177 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(0));
9179 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(1));
9182 return Known.contains(Assumed) && Known != Assumed;
9189 auto *OldRangeMD =
I->getMetadata(LLVMContext::MD_range);
9190 if (isBetterRange(AssumedConstantRange, OldRangeMD)) {
9192 I->setMetadata(LLVMContext::MD_range,
9193 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9194 AssumedConstantRange));
9207 auto &
V = getAssociatedValue();
9211 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9212 "not the context instruction");
9213 if (isa<CallInst>(
I) || isa<LoadInst>(
I))
9214 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9215 Changed = ChangeStatus::CHANGED;
9223struct AAValueConstantRangeArgument final
9224 : AAArgumentFromCallSiteArguments<
9225 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9227 using Base = AAArgumentFromCallSiteArguments<
9234 void trackStatistics()
const override {
9239struct AAValueConstantRangeReturned
9240 : AAReturnedFromReturnedValues<AAValueConstantRange,
9241 AAValueConstantRangeImpl,
9242 AAValueConstantRangeImpl::StateType,
9246 AAValueConstantRangeImpl,
9254 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9255 indicatePessimisticFixpoint();
9259 void trackStatistics()
const override {
9264struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9266 : AAValueConstantRangeImpl(IRP,
A) {}
9270 AAValueConstantRangeImpl::initialize(
A);
9274 Value &
V = getAssociatedValue();
9276 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9278 indicateOptimisticFixpoint();
9282 if (isa<UndefValue>(&V)) {
9285 indicateOptimisticFixpoint();
9289 if (isa<CallBase>(&V))
9292 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9296 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9297 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9304 if (isa<SelectInst>(V) || isa<PHINode>(V))
9308 indicatePessimisticFixpoint();
9311 << getAssociatedValue() <<
"\n");
9314 bool calculateBinaryOperator(
9322 bool UsedAssumedInformation =
false;
9323 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9326 if (!SimplifiedLHS.has_value())
9328 if (!*SimplifiedLHS)
9330 LHS = *SimplifiedLHS;
9332 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9335 if (!SimplifiedRHS.has_value())
9337 if (!*SimplifiedRHS)
9339 RHS = *SimplifiedRHS;
9347 DepClassTy::REQUIRED);
9351 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9355 DepClassTy::REQUIRED);
9359 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9361 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9363 T.unionAssumed(AssumedRange);
9367 return T.isValidState();
9370 bool calculateCastInst(
9379 bool UsedAssumedInformation =
false;
9380 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9383 if (!SimplifiedOpV.has_value())
9385 if (!*SimplifiedOpV)
9387 OpV = *SimplifiedOpV;
9394 DepClassTy::REQUIRED);
9398 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9400 return T.isValidState();
9411 bool UsedAssumedInformation =
false;
9412 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9415 if (!SimplifiedLHS.has_value())
9417 if (!*SimplifiedLHS)
9419 LHS = *SimplifiedLHS;
9421 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9424 if (!SimplifiedRHS.has_value())
9426 if (!*SimplifiedRHS)
9428 RHS = *SimplifiedRHS;
9436 DepClassTy::REQUIRED);
9442 DepClassTy::REQUIRED);
9446 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9447 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9450 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9453 bool MustTrue =
false, MustFalse =
false;
9455 auto AllowedRegion =
9458 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9464 assert((!MustTrue || !MustFalse) &&
9465 "Either MustTrue or MustFalse should be false!");
9474 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9475 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9476 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9480 return T.isValidState();
9489 if (!
I || isa<CallBase>(
I)) {
9492 bool UsedAssumedInformation =
false;
9493 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9496 if (!SimplifiedOpV.has_value())
9498 if (!*SimplifiedOpV)
9500 Value *VPtr = *SimplifiedOpV;
9505 DepClassTy::REQUIRED);
9509 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9513 return T.isValidState();
9517 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9518 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9520 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9521 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9523 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9524 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9530 T.indicatePessimisticFixpoint();
9538 if (QueriedAA !=
this)
9541 if (
T.getAssumed() == getState().getAssumed())
9543 T.indicatePessimisticFixpoint();
9546 return T.isValidState();
9549 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9550 return indicatePessimisticFixpoint();
9555 return ChangeStatus::UNCHANGED;
9556 if (++NumChanges > MaxNumChanges) {
9557 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9558 <<
" but only " << MaxNumChanges
9559 <<
" are allowed to avoid cyclic reasoning.");
9560 return indicatePessimisticFixpoint();
9562 return ChangeStatus::CHANGED;
9566 void trackStatistics()
const override {
9575 static constexpr int MaxNumChanges = 5;
9578struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9580 : AAValueConstantRangeImpl(IRP,
A) {}
9584 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9592struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9594 : AAValueConstantRangeFunction(IRP,
A) {}
9600struct AAValueConstantRangeCallSiteReturned
9601 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9602 AAValueConstantRangeImpl::StateType,
9606 AAValueConstantRangeImpl::StateType,
9612 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue()))
9613 if (
auto *RangeMD = CI->getMetadata(LLVMContext::MD_range))
9616 AAValueConstantRangeImpl::initialize(
A);
9620 void trackStatistics()
const override {
9624struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9626 : AAValueConstantRangeFloating(IRP,
A) {}
9630 return ChangeStatus::UNCHANGED;
9634 void trackStatistics()
const override {
9651 if (
A.hasSimplificationCallback(getIRPosition()))
9652 indicatePessimisticFixpoint();
9654 AAPotentialConstantValues::initialize(
A);
9658 bool &ContainsUndef,
bool ForSelf) {
9660 bool UsedAssumedInformation =
false;
9662 UsedAssumedInformation)) {
9670 *
this, IRP, DepClassTy::REQUIRED);
9671 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9673 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9674 S = PotentialValuesAA->getState().getAssumedSet();
9681 ContainsUndef =
false;
9682 for (
auto &It : Values) {
9683 if (isa<UndefValue>(It.getValue())) {
9684 ContainsUndef =
true;
9687 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9690 S.insert(CI->getValue());
9692 ContainsUndef &= S.empty();
9698 const std::string getAsStr(
Attributor *
A)
const override {
9707 return indicatePessimisticFixpoint();
9711struct AAPotentialConstantValuesArgument final
9712 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9713 AAPotentialConstantValuesImpl,
9714 PotentialConstantIntValuesState> {
9716 AAPotentialConstantValuesImpl,
9722 void trackStatistics()
const override {
9727struct AAPotentialConstantValuesReturned
9728 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9729 AAPotentialConstantValuesImpl> {
9731 AAPotentialConstantValuesImpl>;
9736 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9737 indicatePessimisticFixpoint();
9738 Base::initialize(
A);
9742 void trackStatistics()
const override {
9747struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9749 : AAPotentialConstantValuesImpl(IRP,
A) {}
9753 AAPotentialConstantValuesImpl::initialize(
A);
9757 Value &
V = getAssociatedValue();
9759 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9760 unionAssumed(
C->getValue());
9761 indicateOptimisticFixpoint();
9765 if (isa<UndefValue>(&V)) {
9766 unionAssumedWithUndef();
9767 indicateOptimisticFixpoint();
9771 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9774 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9777 indicatePessimisticFixpoint();
9780 << getAssociatedValue() <<
"\n");
9794 case Instruction::Trunc:
9795 return Src.trunc(ResultBitWidth);
9796 case Instruction::SExt:
9797 return Src.sext(ResultBitWidth);
9798 case Instruction::ZExt:
9799 return Src.zext(ResultBitWidth);
9800 case Instruction::BitCast:
9807 bool &SkipOperation,
bool &Unsupported) {
9814 switch (BinOpcode) {
9818 case Instruction::Add:
9820 case Instruction::Sub:
9822 case Instruction::Mul:
9824 case Instruction::UDiv:
9826 SkipOperation =
true;
9830 case Instruction::SDiv:
9832 SkipOperation =
true;
9836 case Instruction::URem:
9838 SkipOperation =
true;
9842 case Instruction::SRem:
9844 SkipOperation =
true;
9848 case Instruction::Shl:
9850 case Instruction::LShr:
9852 case Instruction::AShr:
9854 case Instruction::And:
9856 case Instruction::Or:
9858 case Instruction::Xor:
9863 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9865 bool SkipOperation =
false;
9868 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9873 unionAssumed(Result);
9874 return isValidState();
9878 auto AssumedBefore = getAssumed();
9882 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9883 SetTy LHSAAPVS, RHSAAPVS;
9885 LHSContainsUndef,
false) ||
9887 RHSContainsUndef,
false))
9888 return indicatePessimisticFixpoint();
9891 bool MaybeTrue =
false, MaybeFalse =
false;
9893 if (LHSContainsUndef && RHSContainsUndef) {
9896 unionAssumedWithUndef();
9897 }
else if (LHSContainsUndef) {
9898 for (
const APInt &R : RHSAAPVS) {
9899 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9900 MaybeTrue |= CmpResult;
9901 MaybeFalse |= !CmpResult;
9902 if (MaybeTrue & MaybeFalse)
9903 return indicatePessimisticFixpoint();
9905 }
else if (RHSContainsUndef) {
9906 for (
const APInt &L : LHSAAPVS) {
9907 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9908 MaybeTrue |= CmpResult;
9909 MaybeFalse |= !CmpResult;
9910 if (MaybeTrue & MaybeFalse)
9911 return indicatePessimisticFixpoint();
9914 for (
const APInt &L : LHSAAPVS) {
9915 for (
const APInt &R : RHSAAPVS) {
9916 bool CmpResult = calculateICmpInst(ICI, L, R);
9917 MaybeTrue |= CmpResult;
9918 MaybeFalse |= !CmpResult;
9919 if (MaybeTrue & MaybeFalse)
9920 return indicatePessimisticFixpoint();
9925 unionAssumed(
APInt( 1, 1));
9927 unionAssumed(
APInt( 1, 0));
9928 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9929 : ChangeStatus::CHANGED;
9933 auto AssumedBefore = getAssumed();
9937 bool UsedAssumedInformation =
false;
9938 std::optional<Constant *>
C =
A.getAssumedConstant(
9939 *
SI->getCondition(), *
this, UsedAssumedInformation);
9942 bool OnlyLeft =
false, OnlyRight =
false;
9943 if (
C && *
C && (*C)->isOneValue())
9945 else if (
C && *
C && (*C)->isZeroValue())
9948 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9949 SetTy LHSAAPVS, RHSAAPVS;
9952 LHSContainsUndef,
false))
9953 return indicatePessimisticFixpoint();
9957 RHSContainsUndef,
false))
9958 return indicatePessimisticFixpoint();
9960 if (OnlyLeft || OnlyRight) {
9962 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9963 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9966 unionAssumedWithUndef();
9968 for (
const auto &It : *OpAA)
9972 }
else if (LHSContainsUndef && RHSContainsUndef) {
9974 unionAssumedWithUndef();
9976 for (
const auto &It : LHSAAPVS)
9978 for (
const auto &It : RHSAAPVS)
9981 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9982 : ChangeStatus::CHANGED;
9986 auto AssumedBefore = getAssumed();
9988 return indicatePessimisticFixpoint();
9993 bool SrcContainsUndef =
false;
9996 SrcContainsUndef,
false))
9997 return indicatePessimisticFixpoint();
9999 if (SrcContainsUndef)
10000 unionAssumedWithUndef();
10002 for (
const APInt &S : SrcPVS) {
10003 APInt T = calculateCastInst(CI, S, ResultBitWidth);
10007 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10008 : ChangeStatus::CHANGED;
10012 auto AssumedBefore = getAssumed();
10016 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10017 SetTy LHSAAPVS, RHSAAPVS;
10019 LHSContainsUndef,
false) ||
10021 RHSContainsUndef,
false))
10022 return indicatePessimisticFixpoint();
10027 if (LHSContainsUndef && RHSContainsUndef) {
10028 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10029 return indicatePessimisticFixpoint();
10030 }
else if (LHSContainsUndef) {
10031 for (
const APInt &R : RHSAAPVS) {
10032 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10033 return indicatePessimisticFixpoint();
10035 }
else if (RHSContainsUndef) {
10036 for (
const APInt &L : LHSAAPVS) {
10037 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10038 return indicatePessimisticFixpoint();
10041 for (
const APInt &L : LHSAAPVS) {
10042 for (
const APInt &R : RHSAAPVS) {
10043 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10044 return indicatePessimisticFixpoint();
10048 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10049 : ChangeStatus::CHANGED;
10053 auto AssumedBefore = getAssumed();
10055 bool ContainsUndef;
10057 ContainsUndef,
true))
10058 return indicatePessimisticFixpoint();
10059 if (ContainsUndef) {
10060 unionAssumedWithUndef();
10065 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10066 : ChangeStatus::CHANGED;
10071 Value &
V = getAssociatedValue();
10074 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10075 return updateWithICmpInst(
A, ICI);
10077 if (
auto *SI = dyn_cast<SelectInst>(
I))
10078 return updateWithSelectInst(
A, SI);
10080 if (
auto *CI = dyn_cast<CastInst>(
I))
10081 return updateWithCastInst(
A, CI);
10083 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10084 return updateWithBinaryOperator(
A, BinOp);
10086 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10087 return updateWithInstruction(
A,
I);
10089 return indicatePessimisticFixpoint();
10093 void trackStatistics()
const override {
10098struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10100 : AAPotentialConstantValuesImpl(IRP,
A) {}
10105 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10110 void trackStatistics()
const override {
10115struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10117 : AAPotentialConstantValuesFunction(IRP,
A) {}
10120 void trackStatistics()
const override {
10125struct AAPotentialConstantValuesCallSiteReturned
10126 : AACalleeToCallSite<AAPotentialConstantValues,
10127 AAPotentialConstantValuesImpl> {
10128 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10131 AAPotentialConstantValuesImpl>(IRP,
A) {}
10134 void trackStatistics()
const override {
10139struct AAPotentialConstantValuesCallSiteArgument
10140 : AAPotentialConstantValuesFloating {
10141 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10143 : AAPotentialConstantValuesFloating(IRP,
A) {}
10147 AAPotentialConstantValuesImpl::initialize(
A);
10148 if (isAtFixpoint())
10151 Value &
V = getAssociatedValue();
10153 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10154 unionAssumed(
C->getValue());
10155 indicateOptimisticFixpoint();
10159 if (isa<UndefValue>(&V)) {
10160 unionAssumedWithUndef();
10161 indicateOptimisticFixpoint();
10168 Value &
V = getAssociatedValue();
10169 auto AssumedBefore = getAssumed();
10173 return indicatePessimisticFixpoint();
10174 const auto &S = AA->getAssumed();
10176 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10177 : ChangeStatus::CHANGED;
10181 void trackStatistics()
const override {
10190 bool IgnoreSubsumingPositions) {
10191 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10192 "Unexpected attribute kind");
10193 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10194 Attribute::NoUndef))
10214 Value &V = getAssociatedValue();
10215 if (isa<UndefValue>(V))
10216 indicatePessimisticFixpoint();
10217 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10223 const Value *UseV =
U->get();
10232 bool TrackUse =
false;
10235 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10241 const std::string getAsStr(
Attributor *
A)
const override {
10242 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10249 bool UsedAssumedInformation =
false;
10250 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10251 UsedAssumedInformation))
10252 return ChangeStatus::UNCHANGED;
10256 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10259 return ChangeStatus::UNCHANGED;
10260 return AANoUndef::manifest(
A);
10264struct AANoUndefFloating :
public AANoUndefImpl {
10266 : AANoUndefImpl(IRP,
A) {}
10270 AANoUndefImpl::initialize(
A);
10271 if (!getState().isAtFixpoint() && getAnchorScope() &&
10272 !getAnchorScope()->isDeclaration())
10274 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10279 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10280 bool IsKnownNoUndef;
10281 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10282 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10286 bool UsedAssumedInformation =
false;
10287 Value *AssociatedValue = &getAssociatedValue();
10289 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10294 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10302 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10303 return indicatePessimisticFixpoint();
10304 return ChangeStatus::UNCHANGED;
10307 for (
const auto &VAC : Values)
10309 return indicatePessimisticFixpoint();
10311 return ChangeStatus::UNCHANGED;
10318struct AANoUndefReturned final
10319 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10321 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10327struct AANoUndefArgument final
10328 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10330 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10336struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10338 : AANoUndefFloating(IRP,
A) {}
10344struct AANoUndefCallSiteReturned final
10345 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10347 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10362 if (isa<UndefValue>(V)) {
10363 indicateOptimisticFixpoint();
10368 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10369 for (
const auto &Attr : Attrs) {
10380 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10387 auto *CB = dyn_cast<CallBase>(
I);
10396 if (
auto *NoFPAA =
A.getAAFor<
AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10397 State.addKnownBits(NoFPAA->getState().getKnown());
10401 const std::string getAsStr(
Attributor *
A)
const override {
10402 std::string
Result =
"nofpclass";
10404 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10414struct AANoFPClassFloating :
public AANoFPClassImpl {
10416 : AANoFPClassImpl(IRP,
A) {}
10421 bool UsedAssumedInformation =
false;
10422 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10424 Values.
push_back({getAssociatedValue(), getCtxI()});
10430 DepClassTy::REQUIRED);
10431 if (!AA ||
this == AA) {
10432 T.indicatePessimisticFixpoint();
10438 return T.isValidState();
10441 for (
const auto &VAC : Values)
10442 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10443 return indicatePessimisticFixpoint();
10449 void trackStatistics()
const override {
10454struct AANoFPClassReturned final
10455 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10456 AANoFPClassImpl::StateType, false,
10457 Attribute::None, false> {
10459 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10460 AANoFPClassImpl::StateType,
false,
10464 void trackStatistics()
const override {
10469struct AANoFPClassArgument final
10470 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10472 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10478struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10480 : AANoFPClassFloating(IRP,
A) {}
10483 void trackStatistics()
const override {
10488struct AANoFPClassCallSiteReturned final
10489 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10491 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10494 void trackStatistics()
const override {
10503 return CalledFunctions;
10506 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10508 bool hasNonAsmUnknownCallee()
const override {
10509 return HasUnknownCalleeNonAsm;
10512 const std::string getAsStr(
Attributor *
A)
const override {
10513 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10514 std::to_string(CalledFunctions.size()) +
"]";
10517 void trackStatistics()
const override {}
10521 if (CalledFunctions.insert(Fn)) {
10522 Change = ChangeStatus::CHANGED;
10528 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10529 if (!HasUnknownCallee)
10530 Change = ChangeStatus::CHANGED;
10531 if (NonAsm && !HasUnknownCalleeNonAsm)
10532 Change = ChangeStatus::CHANGED;
10533 HasUnknownCalleeNonAsm |= NonAsm;
10534 HasUnknownCallee =
true;
10542 bool HasUnknownCallee =
false;
10545 bool HasUnknownCalleeNonAsm =
false;
10548struct AACallEdgesCallSite :
public AACallEdgesImpl {
10550 : AACallEdgesImpl(IRP,
A) {}
10556 if (
Function *Fn = dyn_cast<Function>(&V)) {
10557 addCalledFunction(Fn, Change);
10559 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10560 setHasUnknownCallee(
true, Change);
10570 if (isa<Constant>(V)) {
10571 VisitValue(*V, CtxI);
10575 bool UsedAssumedInformation =
false;
10581 for (
auto &VAC : Values)
10582 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10585 CallBase *CB = cast<CallBase>(getCtxI());
10588 if (
IA->hasSideEffects() &&
10591 setHasUnknownCallee(
false, Change);
10598 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10599 if (IndirectCallAA->foreachCallee(
10600 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10609 for (
const Use *U : CallbackUses)
10610 ProcessCalledOperand(
U->get(), CB);
10616struct AACallEdgesFunction :
public AACallEdgesImpl {
10618 : AACallEdgesImpl(IRP,
A) {}
10625 CallBase &CB = cast<CallBase>(Inst);
10631 if (CBEdges->hasNonAsmUnknownCallee())
10632 setHasUnknownCallee(
true, Change);
10633 if (CBEdges->hasUnknownCallee())
10634 setHasUnknownCallee(
false, Change);
10636 for (
Function *
F : CBEdges->getOptimisticEdges())
10637 addCalledFunction(
F, Change);
10643 bool UsedAssumedInformation =
false;
10644 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10645 UsedAssumedInformation,
10649 setHasUnknownCallee(
true, Change);
10658struct AAInterFnReachabilityFunction
10659 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10660 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10664 bool instructionCanReach(
10667 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10668 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10670 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10671 typename RQITy::Reachable
Result;
10672 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10673 return NonConstThis->isReachableImpl(
A, StackRQI,
10675 return Result == RQITy::Reachable::Yes;
10679 bool IsTemporaryRQI)
override {
10682 if (EntryI != RQI.From &&
10683 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10684 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10687 auto CheckReachableCallBase = [&](
CallBase *CB) {
10690 if (!CBEdges || !CBEdges->getState().isValidState())
10693 if (CBEdges->hasUnknownCallee())
10696 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10707 if (Fn == getAnchorScope()) {
10708 if (EntryI == RQI.From)
10715 DepClassTy::OPTIONAL);
10718 if (!InterFnReachability ||
10728 DepClassTy::OPTIONAL);
10734 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10737 A, *RQI.From, CBInst, RQI.ExclusionSet);
10740 bool UsedExclusionSet =
true;
10741 bool UsedAssumedInformation =
false;
10742 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10743 UsedAssumedInformation,
10745 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10748 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10752 void trackStatistics()
const override {}
10756template <
typename AAType>
10757static std::optional<Constant *>
10768 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10770 if (!COpt.has_value()) {
10772 return std::nullopt;
10774 if (
auto *
C = *COpt) {
10785 std::optional<Value *> V;
10786 for (
auto &It : Values) {
10788 if (V.has_value() && !*V)
10791 if (!V.has_value())
10805 if (
A.hasSimplificationCallback(getIRPosition())) {
10806 indicatePessimisticFixpoint();
10809 Value *Stripped = getAssociatedValue().stripPointerCasts();
10810 if (isa<Constant>(Stripped) && !isa<ConstantExpr>(Stripped)) {
10811 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10813 indicateOptimisticFixpoint();
10816 AAPotentialValues::initialize(
A);
10820 const std::string getAsStr(
Attributor *
A)
const override {
10827 template <
typename AAType>
10828 static std::optional<Value *> askOtherAA(
Attributor &
A,
10833 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10835 return std::nullopt;
10847 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10848 for (
const auto &U : CB->
args()) {
10858 Type &Ty = *getAssociatedType();
10859 std::optional<Value *> SimpleV =
10860 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10861 if (SimpleV.has_value() && !*SimpleV) {
10863 *
this, ValIRP, DepClassTy::OPTIONAL);
10864 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10865 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10866 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10867 if (PotentialConstantsAA->undefIsContained())
10872 if (!SimpleV.has_value())
10879 if (isa<ConstantInt>(VPtr))
10884 State.unionAssumed({{*VPtr, CtxI}, S});
10894 return II.I ==
I &&
II.S == S;
10909 bool UsedAssumedInformation =
false;
10911 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10912 UsedAssumedInformation))
10915 for (
auto &It : Values)
10916 ValueScopeMap[It] += CS;
10918 for (
auto &It : ValueScopeMap)
10919 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10926 auto NewS = StateType::getBestState(getState());
10927 for (
const auto &It : getAssumedSet()) {
10930 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10933 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10941 getState() = StateType::getBestState(getState());
10942 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10949 return indicatePessimisticFixpoint();
10957 if (!getAssumedSimplifiedValues(
A, Values, S))
10959 Value &OldV = getAssociatedValue();
10960 if (isa<UndefValue>(OldV))
10962 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10963 if (!NewV || NewV == &OldV)
10968 if (
A.changeAfterManifest(getIRPosition(), *NewV))
10974 bool getAssumedSimplifiedValues(
10976 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
10977 if (!isValidState())
10979 bool UsedAssumedInformation =
false;
10980 for (
const auto &It : getAssumedSet())
10981 if (It.second & S) {
10982 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
10983 isa<SelectInst>(It.first.getValue()))) {
10984 if (
A.getAssumedSimplifiedValues(
10986 this, Values, S, UsedAssumedInformation))
10991 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10996struct AAPotentialValuesFloating : AAPotentialValuesImpl {
10998 : AAPotentialValuesImpl(IRP,
A) {}
11002 auto AssumedBefore = getAssumed();
11004 genericValueTraversal(
A, &getAssociatedValue());
11006 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11007 : ChangeStatus::CHANGED;
11011 struct LivenessInfo {
11012 const AAIsDead *LivenessAA =
nullptr;
11013 bool AnyDead =
false;
11026 bool UsedAssumedInformation =
false;
11028 auto GetSimplifiedValues = [&](
Value &
V,
11030 if (!
A.getAssumedSimplifiedValues(
11036 return Values.
empty();
11038 if (GetSimplifiedValues(*
LHS, LHSValues))
11040 if (GetSimplifiedValues(*
RHS, RHSValues))
11052 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11060 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11061 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11063 nullptr,
II.S, getAnchorScope());
11069 if (&LHSV == &RHSV &&
11073 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11080 if (TypedLHS && TypedRHS) {
11082 if (NewV && NewV != &Cmp) {
11083 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11093 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11094 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11095 if (!LHSIsNull && !RHSIsNull)
11101 assert((LHSIsNull || RHSIsNull) &&
11102 "Expected nullptr versus non-nullptr comparison at this point");
11105 unsigned PtrIdx = LHSIsNull;
11106 bool IsKnownNonNull;
11107 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11109 DepClassTy::REQUIRED, IsKnownNonNull);
11110 if (!IsAssumedNonNull)
11116 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11121 for (
auto &LHSValue : LHSValues)
11122 for (
auto &RHSValue : RHSValues)
11123 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11131 bool UsedAssumedInformation =
false;
11133 std::optional<Constant *>
C =
11134 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11135 bool NoValueYet = !
C.has_value();
11136 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11138 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11143 }
else if (&SI == &getAssociatedValue()) {
11148 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11150 if (!SimpleV.has_value())
11153 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11165 bool UsedAssumedInformation =
false;
11167 PotentialValueOrigins, *
this,
11168 UsedAssumedInformation,
11170 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11171 "loaded values for load instruction "
11182 if (!
I || isa<AssumeInst>(
I))
11184 if (
auto *SI = dyn_cast<StoreInst>(
I))
11185 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11187 UsedAssumedInformation,
11189 return A.isAssumedDead(*
I,
this,
nullptr,
11190 UsedAssumedInformation,
11193 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11194 "and we cannot delete all the stores: "
11205 bool AllLocal = ScopeIsLocal;
11210 if (!DynamicallyUnique) {
11211 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11212 "values are dynamically unique: "
11217 for (
auto *PotentialCopy : PotentialCopies) {
11219 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11224 if (!AllLocal && ScopeIsLocal)
11229 bool handlePHINode(
11233 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11234 LivenessInfo &LI = LivenessAAs[&
F];
11235 if (!LI.LivenessAA)
11241 if (&
PHI == &getAssociatedValue()) {
11242 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11245 *
PHI.getFunction());
11249 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11251 if (LI.LivenessAA &&
11252 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11262 if (CyclePHI && isa<Instruction>(V) &&
11263 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11271 bool UsedAssumedInformation =
false;
11272 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11274 if (!SimpleV.has_value())
11278 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11287 bool SomeSimplified =
false;
11288 bool UsedAssumedInformation =
false;
11293 const auto &SimplifiedOp =
A.getAssumedSimplified(
11298 if (!SimplifiedOp.has_value())
11302 NewOps[
Idx] = *SimplifiedOp;
11306 SomeSimplified |= (NewOps[
Idx] !=
Op);
11312 if (!SomeSimplified)
11319 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11325 if (!NewV || NewV == &
I)
11328 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11338 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11340 CI->getPredicate(),
II, Worklist);
11342 switch (
I.getOpcode()) {
11343 case Instruction::Select:
11344 return handleSelectInst(
A, cast<SelectInst>(
I),
II, Worklist);
11345 case Instruction::PHI:
11346 return handlePHINode(
A, cast<PHINode>(
I),
II, Worklist, LivenessAAs);
11347 case Instruction::Load:
11348 return handleLoadInst(
A, cast<LoadInst>(
I),
II, Worklist);
11350 return handleGenericInst(
A,
I,
II, Worklist);
11377 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11378 << Iteration <<
"!\n");
11379 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11385 Value *NewV =
nullptr;
11386 if (
V->getType()->isPointerTy()) {
11389 if (
auto *CB = dyn_cast<CallBase>(V))
11399 if (NewV && NewV != V) {
11400 Worklist.
push_back({{*NewV, CtxI}, S});
11404 if (
auto *
I = dyn_cast<Instruction>(V)) {
11409 if (V != InitialV || isa<Argument>(V))
11414 if (V == InitialV && CtxI == getCtxI()) {
11415 indicatePessimisticFixpoint();
11419 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11420 }
while (!Worklist.
empty());
11424 for (
auto &It : LivenessAAs)
11425 if (It.second.AnyDead)
11426 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11430 void trackStatistics()
const override {
11435struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11436 using Base = AAPotentialValuesImpl;
11442 auto &Arg = cast<Argument>(getAssociatedValue());
11444 indicatePessimisticFixpoint();
11449 auto AssumedBefore = getAssumed();
11451 unsigned ArgNo = getCalleeArgNo();
11453 bool UsedAssumedInformation =
false;
11457 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11460 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11462 UsedAssumedInformation))
11465 return isValidState();
11468 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11470 UsedAssumedInformation))
11471 return indicatePessimisticFixpoint();
11473 Function *Fn = getAssociatedFunction();
11474 bool AnyNonLocal =
false;
11475 for (
auto &It : Values) {
11476 if (isa<Constant>(It.getValue())) {
11477 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11482 return indicatePessimisticFixpoint();
11484 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11486 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11492 AnyNonLocal =
true;
11494 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11496 giveUpOnIntraprocedural(
A);
11498 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11499 : ChangeStatus::CHANGED;
11503 void trackStatistics()
const override {
11508struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11509 using Base = AAPotentialValuesFloating;
11516 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11517 indicatePessimisticFixpoint();
11524 ReturnedArg = &Arg;
11527 if (!
A.isFunctionIPOAmendable(*
F) ||
11528 A.hasSimplificationCallback(getIRPosition())) {
11530 indicatePessimisticFixpoint();
11532 indicateOptimisticFixpoint();
11538 auto AssumedBefore = getAssumed();
11539 bool UsedAssumedInformation =
false;
11542 Function *AnchorScope = getAnchorScope();
11548 UsedAssumedInformation,
11554 bool AllInterAreIntra =
false;
11562 addValue(
A, getState(), *
VAC.getValue(),
11563 VAC.getCtxI() ?
VAC.getCtxI() : CtxI,
11566 if (AllInterAreIntra)
11573 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11576 bool AddValues =
true;
11577 if (isa<PHINode>(RetI.getOperand(0)) ||
11578 isa<SelectInst>(RetI.getOperand(0))) {
11579 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11583 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11586 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11587 UsedAssumedInformation,
11589 return indicatePessimisticFixpoint();
11592 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11593 : ChangeStatus::CHANGED;
11598 return ChangeStatus::UNCHANGED;
11600 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11602 return ChangeStatus::UNCHANGED;
11603 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11605 return ChangeStatus::UNCHANGED;
11608 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11610 "Number of function with unique return");
11611 Changed |=
A.manifestAttrs(
11618 Value *RetOp = RetI.getOperand(0);
11619 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11622 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11623 Changed = ChangeStatus::CHANGED;
11626 bool UsedAssumedInformation =
false;
11627 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11628 UsedAssumedInformation,
11638 void trackStatistics()
const override{
11645struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11647 : AAPotentialValuesImpl(IRP,
A) {}
11656 void trackStatistics()
const override {
11661struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11663 : AAPotentialValuesFunction(IRP,
A) {}
11666 void trackStatistics()
const override {
11671struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11673 : AAPotentialValuesImpl(IRP,
A) {}
11677 auto AssumedBefore = getAssumed();
11681 return indicatePessimisticFixpoint();
11683 bool UsedAssumedInformation =
false;
11684 auto *CB = cast<CallBase>(getCtxI());
11687 UsedAssumedInformation))
11688 return indicatePessimisticFixpoint();
11695 Values, S, UsedAssumedInformation))
11698 for (
auto &It : Values) {
11699 Value *
V = It.getValue();
11700 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11701 V, *CB, *
this, UsedAssumedInformation);
11702 if (!CallerV.has_value()) {
11706 V = *CallerV ? *CallerV :
V;
11712 giveUpOnIntraprocedural(
A);
11715 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11720 return indicatePessimisticFixpoint();
11722 return indicatePessimisticFixpoint();
11723 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11724 : ChangeStatus::CHANGED;
11728 return AAPotentialValues::indicatePessimisticFixpoint();
11732 void trackStatistics()
const override {
11737struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11739 : AAPotentialValuesFloating(IRP,
A) {}
11742 void trackStatistics()
const override {
11758 if (getKnown().isUniversal())
11759 return ChangeStatus::UNCHANGED;
11763 getAssumed().getSet().
end());
11765 return A.manifestAttrs(IRP,
11768 llvm::join(Set,
",")),
11773 return isValidState() && setContains(Assumption);
11777 const std::string getAsStr(
Attributor *
A)
const override {
11778 const SetContents &Known = getKnown();
11779 const SetContents &Assumed = getAssumed();
11783 const std::string KnownStr = llvm::join(Set,
",");
11785 std::string AssumedStr =
"Universal";
11786 if (!Assumed.isUniversal()) {
11787 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11788 AssumedStr = llvm::join(Set,
",");
11790 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11805struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11807 : AAAssumptionInfoImpl(IRP,
A,
11812 bool Changed =
false;
11817 DepClassTy::REQUIRED);
11821 Changed |= getIntersection(AssumptionAA->getAssumed());
11822 return !getAssumed().empty() || !getKnown().empty();
11825 bool UsedAssumedInformation =
false;
11830 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11831 UsedAssumedInformation))
11832 return indicatePessimisticFixpoint();
11834 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11837 void trackStatistics()
const override {}
11841struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11844 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11855 auto *AssumptionAA =
11858 return indicatePessimisticFixpoint();
11859 bool Changed = getIntersection(AssumptionAA->getAssumed());
11860 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11864 void trackStatistics()
const override {}
11876 return Assumptions;
11891struct AAUnderlyingObjectsImpl
11897 const std::string getAsStr(
Attributor *
A)
const override {
11898 if (!isValidState())
11899 return "<invalid>";
11902 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11903 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11905 if (!InterAssumedUnderlyingObjects.empty()) {
11906 OS <<
"inter objects:\n";
11907 for (
auto *Obj : InterAssumedUnderlyingObjects)
11908 OS << *Obj <<
'\n';
11910 if (!IntraAssumedUnderlyingObjects.empty()) {
11911 OS <<
"intra objects:\n";
11912 for (
auto *Obj : IntraAssumedUnderlyingObjects)
11913 OS << *Obj <<
'\n';
11919 void trackStatistics()
const override {}
11923 auto &
Ptr = getAssociatedValue();
11925 bool UsedAssumedInformation =
false;
11932 Scope, UsedAssumedInformation))
11935 bool Changed =
false;
11937 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11938 auto &
VAC = Values[
I];
11939 auto *Obj =
VAC.getValue();
11941 if (!SeenObjects.
insert(UO ? UO : Obj).second)
11943 if (UO && UO != Obj) {
11944 if (isa<AllocaInst>(UO) || isa<GlobalValue>(UO)) {
11945 Changed |= UnderlyingObjects.
insert(UO);
11951 auto Pred = [&](
Value &
V) {
11953 Changed |= UnderlyingObjects.
insert(UO);
11959 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11961 "The forall call should not return false at this position");
11966 if (isa<SelectInst>(Obj)) {
11967 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
11968 UsedAssumedInformation);
11971 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
11974 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
11976 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
11977 Scope, UsedAssumedInformation);
11982 Changed |= UnderlyingObjects.
insert(Obj);
11988 bool Changed =
false;
11991 if (!UsedAssumedInformation)
11992 indicateOptimisticFixpoint();
11993 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11996 bool forallUnderlyingObjects(
11999 if (!isValidState())
12000 return Pred(getAssociatedValue());
12003 ? IntraAssumedUnderlyingObjects
12004 : InterAssumedUnderlyingObjects;
12005 for (
Value *Obj : AssumedUnderlyingObjects)
12018 bool Changed =
false;
12021 auto Pred = [&](
Value &
V) {
12022 Changed |= UnderlyingObjects.
insert(&V);
12025 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12027 "The forall call should not return false at this position");
12038struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12040 : AAUnderlyingObjectsImpl(IRP,
A) {}
12043struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12045 : AAUnderlyingObjectsImpl(IRP,
A) {}
12048struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12050 : AAUnderlyingObjectsImpl(IRP,
A) {}
12053struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12055 : AAUnderlyingObjectsImpl(IRP,
A) {}
12058struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12060 : AAUnderlyingObjectsImpl(IRP,
A) {}
12063struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12065 : AAUnderlyingObjectsImpl(IRP,
A) {}
12068struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12070 : AAUnderlyingObjectsImpl(IRP,
A) {}
12085 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12091 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12092 << *UInst <<
"\n");
12094 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12095 int Idx = &
Cmp->getOperandUse(0) == &
U;
12096 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12098 return U == &getAnchorValue();
12102 if (isa<ReturnInst>(UInst)) {
12104 Worklist.
push_back(ACS.getInstruction());
12107 bool UsedAssumedInformation =
false;
12109 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12111 UsedAssumedInformation))
12118 auto *CB = dyn_cast<CallBase>(UInst);
12129 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12138 unsigned NumUsesBefore =
Uses.size();
12144 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12147 case UseCaptureKind::NO_CAPTURE:
12148 return checkUse(
A, U, Follow, Worklist);
12149 case UseCaptureKind::MAY_CAPTURE:
12150 return checkUse(
A, U, Follow, Worklist);
12151 case UseCaptureKind::PASSTHROUGH:
12157 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12158 Uses.insert(&OldU);
12162 while (!Worklist.
empty()) {
12164 if (!Visited.
insert(V).second)
12166 if (!
A.checkForAllUses(UsePred, *
this, *V,
12168 DepClassTy::OPTIONAL,
12169 true, EquivalentUseCB)) {
12170 return indicatePessimisticFixpoint();
12174 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12175 : ChangeStatus::CHANGED;
12178 bool isPotentialUse(
const Use &U)
const override {
12179 return !isValidState() ||
Uses.contains(&U);
12184 return ChangeStatus::UNCHANGED;
12188 const std::string getAsStr(
Attributor *
A)
const override {
12189 return "[" + std::to_string(
Uses.size()) +
" uses]";
12192 void trackStatistics()
const override {
12210 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12211 if (!MD && !
A.isClosedWorldModule())
12215 for (
const auto &
Op : MD->operands())
12216 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12217 PotentialCallees.insert(Callee);
12218 }
else if (
A.isClosedWorldModule()) {
12220 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12221 PotentialCallees.insert(IndirectlyCallableFunctions.
begin(),
12222 IndirectlyCallableFunctions.
end());
12225 if (PotentialCallees.empty())
12226 indicateOptimisticFixpoint();
12230 CallBase *CB = cast<CallBase>(getCtxI());
12235 bool AllCalleesKnownNow = AllCalleesKnown;
12237 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12238 bool &UsedAssumedInformation) {
12241 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12243 UsedAssumedInformation = !GIAA->isAtFixpoint();
12247 auto AddPotentialCallees = [&]() {
12248 for (
auto *PotentialCallee : PotentialCallees) {
12249 bool UsedAssumedInformation =
false;
12250 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12251 AssumedCalleesNow.
insert(PotentialCallee);
12257 bool UsedAssumedInformation =
false;
12260 AA::ValueScope::AnyScope,
12261 UsedAssumedInformation)) {
12262 if (PotentialCallees.empty())
12263 return indicatePessimisticFixpoint();
12264 AddPotentialCallees();
12269 auto CheckPotentialCallee = [&](
Function &Fn) {
12270 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12273 auto &CachedResult = FilterResults[&Fn];
12274 if (CachedResult.has_value())
12275 return CachedResult.value();
12277 bool UsedAssumedInformation =
false;
12278 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12279 if (!UsedAssumedInformation)
12280 CachedResult =
false;
12289 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12290 bool IsKnown =
false;
12291 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12293 DepClassTy::OPTIONAL, IsKnown)) {
12295 CachedResult =
false;
12300 CachedResult =
true;
12306 for (
auto &VAC : Values) {
12307 if (isa<UndefValue>(
VAC.getValue()))
12309 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12310 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12313 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12314 if (CheckPotentialCallee(*VACFn))
12315 AssumedCalleesNow.
insert(VACFn);
12318 if (!PotentialCallees.empty()) {
12319 AddPotentialCallees();
12322 AllCalleesKnownNow =
false;
12325 if (AssumedCalleesNow == AssumedCallees &&
12326 AllCalleesKnown == AllCalleesKnownNow)
12327 return ChangeStatus::UNCHANGED;
12329 std::swap(AssumedCallees, AssumedCalleesNow);
12330 AllCalleesKnown = AllCalleesKnownNow;
12331 return ChangeStatus::CHANGED;
12337 if (!AllCalleesKnown && AssumedCallees.empty())
12338 return ChangeStatus::UNCHANGED;
12340 CallBase *CB = cast<CallBase>(getCtxI());
12341 bool UsedAssumedInformation =
false;
12342 if (
A.isAssumedDead(*CB,
this,
nullptr,
12343 UsedAssumedInformation))
12344 return ChangeStatus::UNCHANGED;
12348 if (
FP->getType()->getPointerAddressSpace())
12359 if (AssumedCallees.empty()) {
12360 assert(AllCalleesKnown &&
12361 "Expected all callees to be known if there are none.");
12362 A.changeToUnreachableAfterManifest(CB);
12363 return ChangeStatus::CHANGED;
12367 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12368 auto *NewCallee = AssumedCallees.front();
12371 NumIndirectCallsPromoted++;
12372 return ChangeStatus::CHANGED;
12379 A.deleteAfterManifest(*CB);
12380 return ChangeStatus::CHANGED;
12390 bool SpecializedForAnyCallees =
false;
12391 bool SpecializedForAllCallees = AllCalleesKnown;
12395 for (
Function *NewCallee : AssumedCallees) {
12396 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12397 AssumedCallees.size())) {
12398 SkippedAssumedCallees.
push_back(NewCallee);
12399 SpecializedForAllCallees =
false;
12402 SpecializedForAnyCallees =
true;
12408 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12409 A.registerManifestAddedBasicBlock(*IP->getParent());
12410 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12415 A.registerManifestAddedBasicBlock(*ElseBB);
12417 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12425 auto *CBClone = cast<CallBase>(CB->
clone());
12426 CBClone->insertBefore(ThenTI);
12427 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12428 NumIndirectCallsPromoted++;
12436 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12437 if (!AllCalleesKnown)
12438 return ChangeStatus::UNCHANGED;
12439 MDBuilder MDB(IndirectCB.getContext());
12440 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12441 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12442 return ChangeStatus::CHANGED;
12445 if (!SpecializedForAnyCallees)
12446 return AttachCalleeMetadata(*CB);
12449 if (SpecializedForAllCallees) {
12453 IP->eraseFromParent();
12455 auto *CBClone = cast<CallInst>(CB->
clone());
12456 CBClone->setName(CB->
getName());
12457 CBClone->insertBefore(*IP->getParent(), IP);
12458 NewCalls.
push_back({CBClone,
nullptr});
12459 AttachCalleeMetadata(*CBClone);
12466 CB->
getParent()->getFirstInsertionPt());
12467 for (
auto &It : NewCalls) {
12469 Instruction *CallRet = It.second ? It.second : It.first;
12481 A.deleteAfterManifest(*CB);
12482 Changed = ChangeStatus::CHANGED;
12488 const std::string getAsStr(
Attributor *
A)
const override {
12489 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12490 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12494 void trackStatistics()
const override {
12495 if (AllCalleesKnown) {
12497 Eliminated, CallSites,
12498 "Number of indirect call sites eliminated via specialization")
12501 "Number of indirect call sites specialized")
12506 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12523 bool AllCalleesKnown =
true;
12530template <
typename InstType>
12531static bool makeChange(
Attributor &
A, InstType *MemInst,
const Use &U,
12533 bool UseOriginalValue) {
12534 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12537 if (MemInst->isVolatile()) {
12539 *MemInst->getFunction());
12540 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12545 if (UseOriginalValue) {
12546 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
12552 A.changeUseAfterManifest(
const_cast<Use &
>(U), *
CastInst);
12561 assert(isValidState() &&
"the AA is invalid");
12562 return AssumedAddressSpace;
12567 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12568 "Associated value is not a pointer");
12570 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
12571 indicatePessimisticFixpoint();
12575 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12576 unsigned AS = getAssociatedType()->getPointerAddressSpace();
12577 if (AS != FlatAS) {
12578 [[maybe_unused]]
bool R = takeAddressSpace(AS);
12579 assert(R &&
"The take should happen");
12580 indicateOptimisticFixpoint();
12585 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12586 uint32_t OldAddressSpace = AssumedAddressSpace;
12588 auto CheckAddressSpace = [&](
Value &Obj) {
12589 if (isa<UndefValue>(&Obj))
12593 if (
auto *Arg = dyn_cast<Argument>(&Obj)) {
12595 unsigned CastAddrSpace = FlatAS;
12596 for (
auto *U : Arg->
users()) {
12597 auto *ASCI = dyn_cast<AddrSpaceCastInst>(U);
12599 return takeAddressSpace(Obj.getType()->getPointerAddressSpace());
12600 if (CastAddrSpace != FlatAS &&
12601 CastAddrSpace != ASCI->getDestAddressSpace())
12603 CastAddrSpace = ASCI->getDestAddressSpace();
12605 if (CastAddrSpace != FlatAS)
12606 return takeAddressSpace(CastAddrSpace);
12609 return takeAddressSpace(Obj.getType()->getPointerAddressSpace());
12613 DepClassTy::REQUIRED);
12614 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
12615 return indicatePessimisticFixpoint();
12617 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12618 : ChangeStatus::CHANGED;
12625 if (NewAS == InvalidAddressSpace ||
12627 return ChangeStatus::UNCHANGED;
12629 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
12631 Value *AssociatedValue = &getAssociatedValue();
12632 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
12635 PointerType::get(getAssociatedType()->getContext(), NewAS);
12636 bool UseOriginalValue =
12639 bool Changed =
false;
12641 auto Pred = [&](
const Use &
U,
bool &) {
12642 if (
U.get() != AssociatedValue)
12644 auto *Inst = dyn_cast<Instruction>(
U.getUser());
12651 if (
auto *LI = dyn_cast<LoadInst>(Inst)) {
12653 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
12654 }
else if (
auto *SI = dyn_cast<StoreInst>(Inst)) {
12656 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
12657 }
else if (
auto *RMW = dyn_cast<AtomicRMWInst>(Inst)) {
12659 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
12660 }
else if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst)) {
12662 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
12669 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
12672 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12676 const std::string getAsStr(
Attributor *
A)
const override {
12677 if (!isValidState())
12678 return "addrspace(<invalid>)";
12679 return "addrspace(" +
12680 (AssumedAddressSpace == InvalidAddressSpace
12682 : std::to_string(AssumedAddressSpace)) +
12687 uint32_t AssumedAddressSpace = InvalidAddressSpace;
12689 bool takeAddressSpace(
uint32_t AS) {
12690 if (AssumedAddressSpace == InvalidAddressSpace) {
12691 AssumedAddressSpace = AS;
12694 return AssumedAddressSpace == AS;
12697 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
12698 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V)) {
12699 assert(
I->getSrcAddressSpace() != FlatAS &&
12700 "there should not be flat AS -> non-flat AS");
12701 return I->getPointerOperand();
12703 if (
auto *
C = dyn_cast<ConstantExpr>(V))
12704 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
12705 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
12707 "there should not be flat AS -> non-flat AS X");
12708 return C->getOperand(0);
12714struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
12716 : AAAddressSpaceImpl(IRP,
A) {}
12718 void trackStatistics()
const override {
12723struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
12725 : AAAddressSpaceImpl(IRP,
A) {}
12731 (void)indicatePessimisticFixpoint();
12734 void trackStatistics()
const override {
12739struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
12741 : AAAddressSpaceImpl(IRP,
A) {}
12743 void trackStatistics()
const override {
12748struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
12750 : AAAddressSpaceImpl(IRP,
A) {}
12755struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
12757 : AAAddressSpaceImpl(IRP,
A) {}
12763 (void)indicatePessimisticFixpoint();
12766 void trackStatistics()
const override {
12778 std::optional<TypeSize> getAllocatedSize()
const override {
12779 assert(isValidState() &&
"the AA is invalid");
12780 return AssumedAllocatedSize;
12783 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
12787 switch (
I->getOpcode()) {
12788 case Instruction::Alloca: {
12793 return std::nullopt;
12803 if (!isa<AllocaInst>(
I))
12804 return indicatePessimisticFixpoint();
12806 bool IsKnownNoCapture;
12807 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
12808 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
12809 return indicatePessimisticFixpoint();
12812 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
12815 return indicatePessimisticFixpoint();
12818 return indicatePessimisticFixpoint();
12821 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
12824 if (!AllocationSize)
12825 return indicatePessimisticFixpoint();
12829 if (*AllocationSize == 0)
12830 return indicatePessimisticFixpoint();
12836 return indicatePessimisticFixpoint();
12838 if (BinSize == 0) {
12839 auto NewAllocationSize = std::optional<TypeSize>(
TypeSize(0,
false));
12840 if (!changeAllocationSize(NewAllocationSize))
12841 return ChangeStatus::UNCHANGED;
12842 return ChangeStatus::CHANGED;
12846 const auto &It = PI->
begin();
12849 if (It->first.Offset != 0)
12850 return indicatePessimisticFixpoint();
12852 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
12854 if (SizeOfBin >= *AllocationSize)
12855 return indicatePessimisticFixpoint();
12857 auto NewAllocationSize =
12858 std::optional<TypeSize>(
TypeSize(SizeOfBin * 8,
false));
12860 if (!changeAllocationSize(NewAllocationSize))
12861 return ChangeStatus::UNCHANGED;
12863 return ChangeStatus::CHANGED;
12869 assert(isValidState() &&
12870 "Manifest should only be called if the state is valid.");
12874 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
12876 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
12878 switch (
I->getOpcode()) {
12880 case Instruction::Alloca: {
12886 auto *NumBytesToValue =
12887 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
12890 insertPt = std::next(insertPt);
12896 return ChangeStatus::CHANGED;
12904 return ChangeStatus::UNCHANGED;
12908 const std::string getAsStr(
Attributor *
A)
const override {
12909 if (!isValidState())
12910 return "allocationinfo(<invalid>)";
12911 return "allocationinfo(" +
12912 (AssumedAllocatedSize == HasNoAllocationSize
12914 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
12919 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
12923 bool changeAllocationSize(std::optional<TypeSize>
Size) {
12924 if (AssumedAllocatedSize == HasNoAllocationSize ||
12925 AssumedAllocatedSize !=
Size) {
12926 AssumedAllocatedSize =
Size;
12933struct AAAllocationInfoFloating : AAAllocationInfoImpl {
12935 : AAAllocationInfoImpl(IRP,
A) {}
12937 void trackStatistics()
const override {
12942struct AAAllocationInfoReturned : AAAllocationInfoImpl {
12944 : AAAllocationInfoImpl(IRP,
A) {}
12950 (void)indicatePessimisticFixpoint();
12953 void trackStatistics()
const override {
12958struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
12960 : AAAllocationInfoImpl(IRP,
A) {}
12962 void trackStatistics()
const override {
12967struct AAAllocationInfoArgument : AAAllocationInfoImpl {
12969 : AAAllocationInfoImpl(IRP,
A) {}
12971 void trackStatistics()
const override {
12976struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
12978 : AAAllocationInfoImpl(IRP,
A) {}
12983 (void)indicatePessimisticFixpoint();
12986 void trackStatistics()
const override {
13033#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13034 case IRPosition::PK: \
13035 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13037#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13038 case IRPosition::PK: \
13039 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13043#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13044 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13045 CLASS *AA = nullptr; \
13046 switch (IRP.getPositionKind()) { \
13047 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13048 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13049 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13050 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13051 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13052 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13053 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13054 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13059#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13060 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13061 CLASS *AA = nullptr; \
13062 switch (IRP.getPositionKind()) { \
13063 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13064 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13065 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13066 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13067 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13068 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13069 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13070 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13075#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13076 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13077 CLASS *AA = nullptr; \
13078 switch (IRP.getPositionKind()) { \
13079 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13081 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13087#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13088 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13089 CLASS *AA = nullptr; \
13090 switch (IRP.getPositionKind()) { \
13091 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13092 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13093 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13094 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13095 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13096 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13097 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13098 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13103#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13104 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13105 CLASS *AA = nullptr; \
13106 switch (IRP.getPositionKind()) { \
13107 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13108 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13109 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13110 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13111 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13112 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13113 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13114 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13119#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13120 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13121 CLASS *AA = nullptr; \
13122 switch (IRP.getPositionKind()) { \
13123 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13124 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13125 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13126 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13127 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13128 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13129 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13130 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13180#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13181#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13182#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13183#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13184#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13185#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13186#undef SWITCH_PK_CREATE
13187#undef SWITCH_PK_INV
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysis::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Development, "development", "for training")))
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
AACallGraphNode * operator*() const
A manager for alias analyses.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
MemoryEffects getMemoryEffects() const
Returns memory effects.
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static bool isEnumAttrKind(AttrKind Kind)
static Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
static Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
bool isEmptySet() const
Return true if this set contains no members.
APInt getSignedMin() const
Return the smallest signed value contained in the ConstantRange.
bool isSingleElement() const
Return true if this set contains exactly one member.
static ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
APInt getSignedMax() const
Return the largest signed value contained in the ConstantRange.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
'undef' values are things that do not have specified contents.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
UseCaptureKind DetermineUseCaptureKind(const Use &U, llvm::function_ref< bool(Value *, const DataLayout &)> IsDereferenceableOrNull)
Determine what kind of capture behaviour U may exhibit.
Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
PotentialValuesState< APInt > PotentialConstantIntValuesState
DWARFExpression::Operation Op
bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
auto pred_begin(const MachineBasicBlock *BB)
std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, unsigned Depth, const SimplifyQuery &SQ)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
bool forallInterferingAccesses(Instruction &I, function_ref< bool(const AAPointerInfo::Access &, bool)> CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
bool forallInterferingAccesses(AA::RangeTy Range, function_ref< bool(const AAPointerInfo::Access &, bool)> CB) const
See AAPointerInfo::forallInterferingAccesses.
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
An abstract attribute for getting assumption information.
static const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for liveness abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
static const char ID
Unique ID (due to the unique address)
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
bool isUnassigned() const
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
virtual const_bin_iterator begin() const =0
static const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
static const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.