73#define DEBUG_TYPE "objc-arc-opts"
77 cl::desc(
"Maximum number of ptr states the optimizer keeps track of"),
88 if (isa<ConstantData>(Arg))
92 if (
const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
95 if (
GEP->hasAllZeroIndices())
99 cast<CallInst>(Arg)->getArgOperand(0));
161STATISTIC(NumNoops,
"Number of no-op objc calls eliminated");
162STATISTIC(NumPartialNoops,
"Number of partially no-op objc calls eliminated");
163STATISTIC(NumAutoreleases,
"Number of autoreleases converted to releases");
165 "retain+autoreleases eliminated");
166STATISTIC(NumRRs,
"Number of retain+release paths eliminated");
167STATISTIC(NumPeeps,
"Number of calls peephole-optimized");
170 "Number of retains before optimization");
172 "Number of releases before optimization");
174 "Number of retains after optimization");
176 "Number of releases after optimization");
185 unsigned TopDownPathCount = 0;
188 unsigned BottomUpPathCount = 0;
211 using top_down_ptr_iterator =
decltype(PerPtrTopDown)::iterator;
212 using const_top_down_ptr_iterator =
decltype(PerPtrTopDown)
::const_iterator;
214 top_down_ptr_iterator top_down_ptr_begin() {
return PerPtrTopDown.
begin(); }
215 top_down_ptr_iterator top_down_ptr_end() {
return PerPtrTopDown.
end(); }
216 const_top_down_ptr_iterator top_down_ptr_begin()
const {
217 return PerPtrTopDown.
begin();
219 const_top_down_ptr_iterator top_down_ptr_end()
const {
220 return PerPtrTopDown.
end();
222 bool hasTopDownPtrs()
const {
223 return !PerPtrTopDown.
empty();
226 unsigned top_down_ptr_list_size()
const {
227 return std::distance(top_down_ptr_begin(), top_down_ptr_end());
230 using bottom_up_ptr_iterator =
decltype(PerPtrBottomUp)::iterator;
231 using const_bottom_up_ptr_iterator =
234 bottom_up_ptr_iterator bottom_up_ptr_begin() {
235 return PerPtrBottomUp.
begin();
237 bottom_up_ptr_iterator bottom_up_ptr_end() {
return PerPtrBottomUp.
end(); }
238 const_bottom_up_ptr_iterator bottom_up_ptr_begin()
const {
239 return PerPtrBottomUp.
begin();
241 const_bottom_up_ptr_iterator bottom_up_ptr_end()
const {
242 return PerPtrBottomUp.
end();
244 bool hasBottomUpPtrs()
const {
245 return !PerPtrBottomUp.
empty();
248 unsigned bottom_up_ptr_list_size()
const {
249 return std::distance(bottom_up_ptr_begin(), bottom_up_ptr_end());
254 void SetAsEntry() { TopDownPathCount = 1; }
258 void SetAsExit() { BottomUpPathCount = 1; }
264 return PerPtrTopDown[Arg];
271 return PerPtrBottomUp[Arg];
276 bottom_up_ptr_iterator findPtrBottomUpState(
const Value *Arg) {
277 return PerPtrBottomUp.
find(Arg);
280 void clearBottomUpPointers() {
281 PerPtrBottomUp.
clear();
284 void clearTopDownPointers() {
285 PerPtrTopDown.
clear();
288 void InitFromPred(
const BBState &
Other);
289 void InitFromSucc(
const BBState &
Other);
290 void MergePred(
const BBState &
Other);
291 void MergeSucc(
const BBState &
Other);
299 bool GetAllPathCountWithOverflow(
unsigned &PathCount)
const {
303 unsigned long long Product =
304 (
unsigned long long)TopDownPathCount*BottomUpPathCount;
307 return (Product >> 32) ||
315 edge_iterator
pred_end()
const {
return Preds.
end(); }
317 edge_iterator
succ_end()
const {
return Succs.
end(); }
322 bool isExit()
const {
return Succs.
empty(); }
336void BBState::InitFromPred(
const BBState &
Other) {
337 PerPtrTopDown =
Other.PerPtrTopDown;
338 TopDownPathCount =
Other.TopDownPathCount;
341void BBState::InitFromSucc(
const BBState &
Other) {
342 PerPtrBottomUp =
Other.PerPtrBottomUp;
343 BottomUpPathCount =
Other.BottomUpPathCount;
348void BBState::MergePred(
const BBState &
Other) {
349 if (TopDownPathCount == OverflowOccurredValue)
354 TopDownPathCount +=
Other.TopDownPathCount;
359 if (TopDownPathCount == OverflowOccurredValue) {
360 clearTopDownPointers();
366 if (TopDownPathCount <
Other.TopDownPathCount) {
367 TopDownPathCount = OverflowOccurredValue;
368 clearTopDownPointers();
375 for (
auto MI =
Other.top_down_ptr_begin(), ME =
Other.top_down_ptr_end();
377 auto Pair = PerPtrTopDown.
insert(*
MI);
384 for (
auto MI = top_down_ptr_begin(), ME = top_down_ptr_end();
MI != ME; ++
MI)
385 if (
Other.PerPtrTopDown.find(
MI->first) ==
Other.PerPtrTopDown.end())
391void BBState::MergeSucc(
const BBState &
Other) {
392 if (BottomUpPathCount == OverflowOccurredValue)
397 BottomUpPathCount +=
Other.BottomUpPathCount;
402 if (BottomUpPathCount == OverflowOccurredValue) {
403 clearBottomUpPointers();
409 if (BottomUpPathCount <
Other.BottomUpPathCount) {
410 BottomUpPathCount = OverflowOccurredValue;
411 clearBottomUpPointers();
418 for (
auto MI =
Other.bottom_up_ptr_begin(), ME =
Other.bottom_up_ptr_end();
420 auto Pair = PerPtrBottomUp.
insert(*
MI);
427 for (
auto MI = bottom_up_ptr_begin(), ME = bottom_up_ptr_end();
MI != ME;
429 if (
Other.PerPtrBottomUp.find(
MI->first) ==
Other.PerPtrBottomUp.end())
435 OS <<
" TopDown State:\n";
436 if (!BBInfo.hasTopDownPtrs()) {
439 for (
auto I = BBInfo.top_down_ptr_begin(), E = BBInfo.top_down_ptr_end();
442 OS <<
" Ptr: " << *
I->first
443 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
444 <<
"\n ImpreciseRelease: "
445 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
446 <<
" HasCFGHazards: "
447 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
448 <<
" KnownPositive: "
449 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
451 <<
P.GetSeq() <<
"\n";
455 OS <<
" BottomUp State:\n";
456 if (!BBInfo.hasBottomUpPtrs()) {
459 for (
auto I = BBInfo.bottom_up_ptr_begin(), E = BBInfo.bottom_up_ptr_end();
462 OS <<
" Ptr: " << *
I->first
463 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
464 <<
"\n ImpreciseRelease: "
465 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
466 <<
" HasCFGHazards: "
467 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
468 <<
" KnownPositive: "
469 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
471 <<
P.GetSeq() <<
"\n";
482 bool Changed =
false;
483 bool CFGChanged =
false;
497 bool DisableRetainReleasePairing =
false;
501 unsigned UsedInThisFunction;
508 void OptimizeIndividualCalls(
Function &
F);
521 const Value *&AutoreleaseRVArg);
525 BBState &MyStates)
const;
532 bool VisitInstructionTopDown(
535 &ReleaseInsertPtToRCIdentityRoots);
540 &ReleaseInsertPtToRCIdentityRoots);
556 Value *Arg,
bool KnownSafe,
557 bool &AnyPairsCompletelyEliminated);
569 template <
typename PredicateT>
570 static void cloneOpBundlesIf(
CallBase *CI,
582 if (!BlockEHColors.
empty()) {
586 if (
auto *EHPad = dyn_cast<FuncletPadInst>(EHPadBB->getFirstNonPHI())) {
594 void GatherStatistics(
Function &
F,
bool AfterOptimization =
false);
600 bool hasCFGChanged()
const {
return CFGChanged; }
620 if (
II->getNormalDest() == RetainRVParent) {
631 "a bundled retainRV's argument should be a call");
637 LLVM_DEBUG(
dbgs() <<
"Transforming objc_retainAutoreleasedReturnValue => "
638 "objc_retain since the operand is not a return value.\n"
642 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Retain);
643 cast<CallInst>(
RetainRV)->setCalledFunction(NewDecl);
650bool ObjCARCOpt::OptimizeInlinedAutoreleaseRVCall(
662 if (Arg != AutoreleaseRVArg) {
664 const PHINode *PN = dyn_cast<PHINode>(Arg);
676 LLVM_DEBUG(
dbgs() <<
"Found inlined objc_autoreleaseReturnValue '"
684 if (Class == ARCInstKind::RetainRV) {
693 assert(Class == ARCInstKind::UnsafeClaimRV);
694 Value *
CallArg = cast<CallInst>(Inst)->getArgOperand(0);
699 "Expected UnsafeClaimRV to be safe to tail call");
705 OptimizeIndividualCallImpl(
F,
Release, ARCInstKind::Release, Arg);
711void ObjCARCOpt::OptimizeAutoreleaseRVCall(
Function &
F,
719 if (isa<ConstantData>(
Ptr))
726 if (
const PHINode *PN = dyn_cast<PHINode>(
Ptr))
731 for (
const User *U :
Ptr->users()) {
734 if (isa<BitCastInst>(U))
737 }
while (!
Users.empty());
743 dbgs() <<
"Transforming objc_autoreleaseReturnValue => "
744 "objc_autorelease since its operand is not used as a return "
750 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Autorelease);
753 Class = ARCInstKind::Autorelease;
760void ObjCARCOpt::OptimizeIndividualCalls(
Function &
F) {
761 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
763 UsedInThisFunction = 0;
768 const Value *DelayedAutoreleaseRVArg =
nullptr;
772 DelayedAutoreleaseRVArg =
nullptr;
774 auto optimizeDelayedAutoreleaseRV = [&]() {
775 if (!DelayedAutoreleaseRV)
777 OptimizeIndividualCallImpl(
F, DelayedAutoreleaseRV,
778 ARCInstKind::AutoreleaseRV,
779 DelayedAutoreleaseRVArg);
780 setDelayedAutoreleaseRV(
nullptr);
782 auto shouldDelayAutoreleaseRV = [&](
Instruction *NonARCInst) {
784 if (!DelayedAutoreleaseRV)
789 if (NonARCInst->isTerminator())
799 auto *CB = dyn_cast<CallBase>(NonARCInst);
809 if (
auto *CI = dyn_cast<CallInst>(Inst))
818 const Value *Arg =
nullptr;
821 optimizeDelayedAutoreleaseRV();
823 case ARCInstKind::CallOrUser:
824 case ARCInstKind::User:
825 case ARCInstKind::None:
829 if (!shouldDelayAutoreleaseRV(Inst))
830 optimizeDelayedAutoreleaseRV();
832 case ARCInstKind::AutoreleaseRV:
833 optimizeDelayedAutoreleaseRV();
834 setDelayedAutoreleaseRV(Inst);
836 case ARCInstKind::RetainRV:
837 case ARCInstKind::UnsafeClaimRV:
838 if (DelayedAutoreleaseRV) {
840 if (OptimizeInlinedAutoreleaseRVCall(
F, Inst, Arg, Class,
841 DelayedAutoreleaseRV,
842 DelayedAutoreleaseRVArg)) {
843 setDelayedAutoreleaseRV(
nullptr);
846 optimizeDelayedAutoreleaseRV();
851 OptimizeIndividualCallImpl(
F, Inst, Class, Arg);
855 optimizeDelayedAutoreleaseRV();
861 V = V->stripPointerCasts();
867 if (
auto *GV = dyn_cast<GlobalVariable>(V))
868 if (GV->hasAttribute(
"objc_arc_inert"))
871 if (
auto PN = dyn_cast<PHINode>(V)) {
873 if (!VisitedPhis.
insert(PN).second)
888 LLVM_DEBUG(
dbgs() <<
"Visiting: Class: " << Class <<
"; " << *Inst <<
"\n");
894 UsedInThisFunction |= 1 <<
unsigned(Class);
919 case ARCInstKind::NoopCast:
927 case ARCInstKind::StoreWeak:
928 case ARCInstKind::LoadWeak:
929 case ARCInstKind::LoadWeakRetained:
930 case ARCInstKind::InitWeak:
931 case ARCInstKind::DestroyWeak: {
932 CallInst *CI = cast<CallInst>(Inst);
940 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
942 << *CI <<
"\nNew = " << *NewValue <<
"\n");
949 case ARCInstKind::CopyWeak:
950 case ARCInstKind::MoveWeak: {
951 CallInst *CI = cast<CallInst>(Inst);
961 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
963 << *CI <<
"\nNew = " << *NewValue <<
"\n");
971 case ARCInstKind::RetainRV:
972 if (OptimizeRetainRVCall(
F, Inst))
975 case ARCInstKind::AutoreleaseRV:
976 OptimizeAutoreleaseRVCall(
F, Inst, Class);
983 const Value *Arg =
Call->getArgOperand(0);
992 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
994 Call->getIterator());
995 NewCall->
setMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease),
998 LLVM_DEBUG(
dbgs() <<
"Replacing autorelease{,RV}(x) with objc_release(x) "
999 "since x is otherwise unused.\nOld: "
1000 << *
Call <<
"\nNew: " << *NewCall <<
"\n");
1004 Class = ARCInstKind::Release;
1010 if (
IsAlwaysTail(Class) && !cast<CallInst>(Inst)->isNoTailCall()) {
1013 dbgs() <<
"Adding tail keyword to function since it can never be "
1014 "passed stack args: "
1016 cast<CallInst>(Inst)->setTailCall();
1023 LLVM_DEBUG(
dbgs() <<
"Removing tail keyword from function: " << *Inst
1025 cast<CallInst>(Inst)->setTailCall(
false);
1031 LLVM_DEBUG(
dbgs() <<
"Found no throw class. Setting nounwind on: " << *Inst
1033 cast<CallInst>(Inst)->setDoesNotThrow();
1038 UsedInThisFunction |= 1 <<
unsigned(Class);
1050 LLVM_DEBUG(
dbgs() <<
"ARC calls with null are no-ops. Erasing: " << *Inst
1058 UsedInThisFunction |= 1 <<
unsigned(Class);
1066 if (Class == ARCInstKind::Release &&
1067 !Inst->
getMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease)))
1071 Worklist.
push_back(std::make_pair(Inst, Arg));
1073 std::pair<Instruction *, const Value *> Pair = Worklist.
pop_back_val();
1077 const PHINode *PN = dyn_cast<PHINode>(Arg);
1083 bool HasNull =
false;
1084 bool HasCriticalEdges =
false;
1091 HasCriticalEdges =
true;
1096 if (HasCriticalEdges)
1106 case ARCInstKind::Retain:
1107 case ARCInstKind::RetainBlock:
1110 case ARCInstKind::Release:
1116 case ARCInstKind::Autorelease:
1121 case ARCInstKind::UnsafeClaimRV:
1122 case ARCInstKind::RetainRV:
1123 case ARCInstKind::AutoreleaseRV:
1139 CallInst *CInst = cast<CallInst>(Inst);
1152 addOpBundleForFunclet(InsertPos->getParent(), OpBundles);
1154 if (
Op->getType() != ParamTy)
1157 Clone->
insertBefore(*InsertPos->getParent(), InsertPos);
1160 "And inserting clone at "
1161 << *InsertPos <<
"\n");
1167 }
while (!Worklist.
empty());
1173 const bool SuccSRRIKnownSafe,
1175 bool &SomeSuccHasSame,
1176 bool &AllSuccsHaveSame,
1177 bool &NotAllSeqEqualButKnownSafe,
1178 bool &ShouldContinue) {
1186 ShouldContinue =
true;
1190 SomeSuccHasSame =
true;
1195 AllSuccsHaveSame =
false;
1197 NotAllSeqEqualButKnownSafe =
true;
1210 const bool SuccSRRIKnownSafe,
1212 bool &SomeSuccHasSame,
1213 bool &AllSuccsHaveSame,
1214 bool &NotAllSeqEqualButKnownSafe) {
1217 SomeSuccHasSame =
true;
1223 AllSuccsHaveSame =
false;
1225 NotAllSeqEqualButKnownSafe =
true;
1238ObjCARCOpt::CheckForCFGHazards(
const BasicBlock *BB,
1240 BBState &MyStates)
const {
1243 for (
auto I = MyStates.top_down_ptr_begin(), E = MyStates.top_down_ptr_end();
1246 const Sequence Seq =
I->second.GetSeq();
1255 "Unknown top down sequence state.");
1257 const Value *Arg =
I->first;
1258 bool SomeSuccHasSame =
false;
1259 bool AllSuccsHaveSame =
true;
1260 bool NotAllSeqEqualButKnownSafe =
false;
1266 BBStates.
find(Succ);
1276 if (SuccSSeq ==
S_None) {
1283 const bool SuccSRRIKnownSafe = SuccS.
IsKnownSafe();
1289 bool ShouldContinue =
false;
1291 AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
1299 SomeSuccHasSame, AllSuccsHaveSame,
1300 NotAllSeqEqualButKnownSafe);
1313 if (SomeSuccHasSame && !AllSuccsHaveSame) {
1315 }
else if (NotAllSeqEqualButKnownSafe) {
1325bool ObjCARCOpt::VisitInstructionBottomUp(
1327 BBState &MyStates) {
1328 bool NestingDetected =
false;
1330 const Value *Arg =
nullptr;
1335 case ARCInstKind::Release: {
1342 case ARCInstKind::RetainBlock:
1347 case ARCInstKind::Retain:
1348 case ARCInstKind::RetainRV: {
1354 if (Class != ARCInstKind::RetainRV) {
1363 case ARCInstKind::AutoreleasepoolPop:
1365 MyStates.clearBottomUpPointers();
1366 return NestingDetected;
1367 case ARCInstKind::AutoreleasepoolPush:
1368 case ARCInstKind::None:
1370 return NestingDetected;
1377 for (
auto MI = MyStates.bottom_up_ptr_begin(),
1378 ME = MyStates.bottom_up_ptr_end();
1391 return NestingDetected;
1394bool ObjCARCOpt::VisitBottomUp(
BasicBlock *BB,
1399 bool NestingDetected =
false;
1400 BBState &MyStates = BBStates[BB];
1404 BBState::edge_iterator
SI(MyStates.succ_begin()),
1405 SE(MyStates.succ_end());
1410 MyStates.InitFromSucc(
I->second);
1412 for (;
SI != SE; ++
SI) {
1414 I = BBStates.
find(Succ);
1416 MyStates.MergeSucc(
I->second);
1421 << BBStates[BB] <<
"\n"
1422 <<
"Performing Dataflow:\n");
1429 if (isa<InvokeInst>(Inst))
1434 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1438 if (MyStates.bottom_up_ptr_list_size() >
MaxPtrStates) {
1439 DisableRetainReleasePairing =
true;
1447 for (BBState::edge_iterator PI(MyStates.pred_begin()),
1448 PE(MyStates.pred_end()); PI != PE; ++PI) {
1451 NestingDetected |= VisitInstructionBottomUp(
II, BB, Retains, MyStates);
1454 LLVM_DEBUG(
dbgs() <<
"\nFinal State:\n" << BBStates[BB] <<
"\n");
1456 return NestingDetected;
1465 &ReleaseInsertPtToRCIdentityRoots) {
1466 for (
const auto &
P : Retains) {
1473 for (
const Instruction *InsertPt :
P.second.ReverseInsertPts)
1474 ReleaseInsertPtToRCIdentityRoots[InsertPt].insert(Root);
1484 &ReleaseInsertPtToRCIdentityRoots) {
1485 auto I = ReleaseInsertPtToRCIdentityRoots.find(InsertPt);
1486 if (
I == ReleaseInsertPtToRCIdentityRoots.end())
1491bool ObjCARCOpt::VisitInstructionTopDown(
1494 &ReleaseInsertPtToRCIdentityRoots) {
1495 bool NestingDetected =
false;
1497 const Value *Arg =
nullptr;
1503 Inst, ReleaseInsertPtToRCIdentityRoots))
1504 for (
const auto *Root : *Roots) {
1518 case ARCInstKind::RetainBlock:
1524 case ARCInstKind::Retain:
1525 case ARCInstKind::RetainRV: {
1533 case ARCInstKind::Release: {
1547 case ARCInstKind::AutoreleasepoolPop:
1549 MyStates.clearTopDownPointers();
1551 case ARCInstKind::AutoreleasepoolPush:
1552 case ARCInstKind::None:
1561 for (
auto MI = MyStates.top_down_ptr_begin(),
1562 ME = MyStates.top_down_ptr_end();
1574 return NestingDetected;
1577bool ObjCARCOpt::VisitTopDown(
1581 &ReleaseInsertPtToRCIdentityRoots) {
1583 bool NestingDetected =
false;
1584 BBState &MyStates = BBStates[BB];
1588 BBState::edge_iterator PI(MyStates.pred_begin()),
1589 PE(MyStates.pred_end());
1594 MyStates.InitFromPred(
I->second);
1596 for (; PI != PE; ++PI) {
1598 I = BBStates.
find(Pred);
1600 MyStates.MergePred(
I->second);
1608 for (
auto I = MyStates.top_down_ptr_begin(),
1609 E = MyStates.top_down_ptr_end();
1611 I->second.SetCFGHazardAfflicted(
true);
1614 << BBStates[BB] <<
"\n"
1615 <<
"Performing Dataflow:\n");
1621 NestingDetected |= VisitInstructionTopDown(
1622 &Inst, Releases, MyStates, ReleaseInsertPtToRCIdentityRoots);
1626 if (MyStates.top_down_ptr_list_size() >
MaxPtrStates) {
1627 DisableRetainReleasePairing =
true;
1632 LLVM_DEBUG(
dbgs() <<
"\nState Before Checking for CFG Hazards:\n"
1633 << BBStates[BB] <<
"\n\n");
1634 CheckForCFGHazards(BB, BBStates, MyStates);
1636 return NestingDetected;
1643 unsigned NoObjCARCExceptionsMDKind,
1655 BBState &MyStates = BBStates[EntryBB];
1656 MyStates.SetAsEntry();
1666 while (SuccStack.
back().second != SE) {
1668 if (Visited.
insert(SuccBB).second) {
1671 BBStates[CurrBB].addSucc(SuccBB);
1672 BBState &SuccStates = BBStates[SuccBB];
1673 SuccStates.addPred(CurrBB);
1678 if (!OnStack.
count(SuccBB)) {
1679 BBStates[CurrBB].addSucc(SuccBB);
1680 BBStates[SuccBB].addPred(CurrBB);
1683 OnStack.
erase(CurrBB);
1686 }
while (!SuccStack.
empty());
1695 BBState &MyStates = BBStates[&ExitBB];
1696 if (!MyStates.isExit())
1699 MyStates.SetAsExit();
1701 PredStack.
push_back(std::make_pair(&ExitBB, MyStates.pred_begin()));
1703 while (!PredStack.
empty()) {
1704 reverse_dfs_next_succ:
1705 BBState::edge_iterator PE = BBStates[PredStack.
back().first].pred_end();
1706 while (PredStack.
back().second != PE) {
1708 if (Visited.
insert(BB).second) {
1710 goto reverse_dfs_next_succ;
1731 MDKindCache.
get(ARCMDKindID::NoObjCARCExceptions),
1735 bool BottomUpNestingDetected =
false;
1737 BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
1738 if (DisableRetainReleasePairing)
1743 ReleaseInsertPtToRCIdentityRoots;
1747 bool TopDownNestingDetected =
false;
1749 TopDownNestingDetected |=
1750 VisitTopDown(BB, BBStates, Releases, ReleaseInsertPtToRCIdentityRoots);
1751 if (DisableRetainReleasePairing)
1755 return TopDownNestingDetected && BottomUpNestingDetected;
1759void ObjCARCOpt::MoveCalls(
Value *Arg,
RRInfo &RetainsToMove,
1772 Value *MyArg = ArgTy == ParamTy ? Arg
1775 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
1777 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1780 Call->setDoesNotThrow();
1781 Call->setTailCall();
1785 "At insertion point: "
1786 << *InsertPt <<
"\n");
1789 Value *MyArg = ArgTy == ParamTy ? Arg
1792 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
1794 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1799 Call->setMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease), M);
1800 Call->setDoesNotThrow();
1802 Call->setTailCall();
1806 "At insertion point: "
1807 << *InsertPt <<
"\n");
1812 Retains.
blot(OrigRetain);
1814 LLVM_DEBUG(
dbgs() <<
"Deleting retain: " << *OrigRetain <<
"\n");
1817 Releases.
erase(OrigRelease);
1819 LLVM_DEBUG(
dbgs() <<
"Deleting release: " << *OrigRelease <<
"\n");
1823bool ObjCARCOpt::PairUpRetainsAndReleases(
1829 RRInfo &ReleasesToMove,
Value *Arg,
bool KnownSafe,
1830 bool &AnyPairsCompletelyEliminated) {
1834 bool KnownSafeTD =
true, KnownSafeBU =
true;
1835 bool CFGHazardAfflicted =
false;
1841 unsigned OldDelta = 0;
1842 unsigned NewDelta = 0;
1843 unsigned OldCount = 0;
1844 unsigned NewCount = 0;
1845 bool FirstRelease =
true;
1849 auto It = Retains.
find(NewRetain);
1851 const RRInfo &NewRetainRRI = It->second;
1855 auto Jt = Releases.
find(NewRetainRelease);
1856 if (Jt == Releases.
end())
1858 const RRInfo &NewRetainReleaseRRI = Jt->second;
1865 if (!NewRetainReleaseRRI.
Calls.count(NewRetain))
1868 if (ReleasesToMove.
Calls.insert(NewRetainRelease).second) {
1871 const BBState &NRRBBState = BBStates[NewRetainRelease->
getParent()];
1873 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1876 "PathCount at this point can not be "
1877 "OverflowOccurredValue.");
1878 OldDelta -= PathCount;
1886 FirstRelease =
false;
1902 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1904 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1907 "PathCount at this point can not be "
1908 "OverflowOccurredValue.");
1909 NewDelta -= PathCount;
1912 NewReleases.
push_back(NewRetainRelease);
1917 if (NewReleases.
empty())
break;
1921 auto It = Releases.
find(NewRelease);
1923 const RRInfo &NewReleaseRRI = It->second;
1927 auto Jt = Retains.
find(NewReleaseRetain);
1928 if (Jt == Retains.
end())
1930 const RRInfo &NewReleaseRetainRRI = Jt->second;
1937 if (!NewReleaseRetainRRI.
Calls.count(NewRelease))
1940 if (RetainsToMove.
Calls.insert(NewReleaseRetain).second) {
1943 const BBState &NRRBBState = BBStates[NewReleaseRetain->
getParent()];
1945 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1948 "PathCount at this point can not be "
1949 "OverflowOccurredValue.");
1950 OldDelta += PathCount;
1951 OldCount += PathCount;
1959 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1962 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1965 "PathCount at this point can not be "
1966 "OverflowOccurredValue.");
1967 NewDelta += PathCount;
1968 NewCount += PathCount;
1971 NewRetains.push_back(NewReleaseRetain);
1975 if (NewRetains.empty())
break;
1979 bool UnconditionallySafe = KnownSafeTD && KnownSafeBU;
1980 if (UnconditionallySafe) {
1995 const bool WillPerformCodeMotion =
1998 if (CFGHazardAfflicted && WillPerformCodeMotion)
2011 assert(OldCount != 0 &&
"Unreachable code?");
2012 NumRRs += OldCount - NewCount;
2014 AnyPairsCompletelyEliminated = NewCount == 0;
2022bool ObjCARCOpt::PerformCodePlacement(
2026 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::PerformCodePlacement ==\n");
2028 bool AnyPairsCompletelyEliminated =
false;
2047 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
2051 if (
const LoadInst *LI = dyn_cast<LoadInst>(Arg))
2053 dyn_cast<GlobalVariable>(
2055 if (GV->isConstant())
2060 RRInfo RetainsToMove, ReleasesToMove;
2062 bool PerformMoveCalls = PairUpRetainsAndReleases(
2063 BBStates, Retains, Releases, M,
Retain, DeadInsts,
2064 RetainsToMove, ReleasesToMove, Arg, KnownSafe,
2065 AnyPairsCompletelyEliminated);
2067 if (PerformMoveCalls) {
2070 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
2071 Retains, Releases, DeadInsts, M);
2077 while (!DeadInsts.
empty())
2080 return AnyPairsCompletelyEliminated;
2084void ObjCARCOpt::OptimizeWeakCalls(
Function &
F) {
2096 if (Class != ARCInstKind::LoadWeak &&
2097 Class != ARCInstKind::LoadWeakRetained)
2101 if (Class == ARCInstKind::LoadWeak && Inst->
use_empty()) {
2118 switch (EarlierClass) {
2119 case ARCInstKind::LoadWeak:
2120 case ARCInstKind::LoadWeakRetained: {
2124 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2127 switch (PA.
getAA()->
alias(Arg, EarlierArg)) {
2131 if (Class == ARCInstKind::LoadWeakRetained) {
2132 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2138 Call->replaceAllUsesWith(EarlierCall);
2139 Call->eraseFromParent();
2149 case ARCInstKind::StoreWeak:
2150 case ARCInstKind::InitWeak: {
2154 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2157 switch (PA.
getAA()->
alias(Arg, EarlierArg)) {
2161 if (Class == ARCInstKind::LoadWeakRetained) {
2162 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2169 Call->eraseFromParent();
2179 case ARCInstKind::MoveWeak:
2180 case ARCInstKind::CopyWeak:
2183 case ARCInstKind::AutoreleasepoolPush:
2184 case ARCInstKind::None:
2185 case ARCInstKind::IntrinsicUser:
2186 case ARCInstKind::User:
2202 if (Class != ARCInstKind::DestroyWeak)
2207 if (
AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
2209 const Instruction *UserInst = cast<Instruction>(U);
2211 case ARCInstKind::InitWeak:
2212 case ARCInstKind::StoreWeak:
2213 case ARCInstKind::DestroyWeak:
2221 CallInst *UserInst = cast<CallInst>(U);
2223 case ARCInstKind::InitWeak:
2224 case ARCInstKind::StoreWeak:
2228 case ARCInstKind::DestroyWeak:
2236 Alloca->eraseFromParent();
2244bool ObjCARCOpt::OptimizeSequences(
Function &
F) {
2257 bool NestingDetected = Visit(
F, BBStates, Retains, Releases);
2259 if (DisableRetainReleasePairing)
2263 bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
2267 return AnyPairsCompletelyEliminated && NestingDetected;
2285 return Class == ARCInstKind::CallOrUser || Class == ARCInstKind::Call
2297 auto *
Retain = dyn_cast_or_null<CallInst>(
2339void ObjCARCOpt::OptimizeReturns(
Function &
F) {
2340 if (!
F.getReturnType()->isPointerTy())
2375 (!
Call->isTailCall() &&
2392ObjCARCOpt::GatherStatistics(
Function &
F,
bool AfterOptimization) {
2394 AfterOptimization ? NumRetainsAfterOpt : NumRetainsBeforeOpt;
2396 AfterOptimization ? NumReleasesAfterOpt : NumReleasesBeforeOpt;
2403 case ARCInstKind::Retain:
2406 case ARCInstKind::Release:
2421 MDKindCache.
init(
F.getParent());
2424 EP.
init(
F.getParent());
2427 if (
F.hasPersonalityFn() &&
2436 Changed = CFGChanged =
false;
2438 BundledInsts = &BRV;
2440 LLVM_DEBUG(
dbgs() <<
"<<< ObjCARCOpt: Visiting Function: " <<
F.getName()
2446 CFGChanged |=
R.second;
2452 GatherStatistics(
F,
false);
2461 OptimizeIndividualCalls(
F);
2464 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::LoadWeak)) |
2465 (1 <<
unsigned(ARCInstKind::LoadWeakRetained)) |
2466 (1 <<
unsigned(ARCInstKind::StoreWeak)) |
2467 (1 <<
unsigned(ARCInstKind::InitWeak)) |
2468 (1 <<
unsigned(ARCInstKind::CopyWeak)) |
2469 (1 <<
unsigned(ARCInstKind::MoveWeak)) |
2470 (1 <<
unsigned(ARCInstKind::DestroyWeak))))
2471 OptimizeWeakCalls(
F);
2474 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Retain)) |
2475 (1 <<
unsigned(ARCInstKind::RetainRV)) |
2476 (1 <<
unsigned(ARCInstKind::RetainBlock))))
2477 if (UsedInThisFunction & (1 <<
unsigned(ARCInstKind::Release)))
2480 while (OptimizeSequences(
F)) {}
2483 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Autorelease)) |
2484 (1 <<
unsigned(ARCInstKind::AutoreleaseRV))))
2490 GatherStatistics(
F,
true);
2508 bool CFGChanged = OCAO.hasCFGChanged();
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_ATTRIBUTE_UNUSED
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
std::optional< std::vector< StOtherPiece > > Other
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
iv Induction Variable Users
uint64_t IntrinsicInst * II
This file defines common analysis utilities used by the ObjC ARC Optimizer.
static cl::opt< unsigned > MaxPtrStates("arc-opt-max-ptr-states", cl::Hidden, cl::desc("Maximum number of ptr states the optimizer keeps track of"), cl::init(4095))
This file defines ARC utility functions which are used by various parts of the compiler.
This file declares a special form of Alias Analysis called Provenance Analysis''.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
A manager for alias analyses.
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)
The main low level interface to the alias analysis implementation.
@ MayAlias
The two locations may or may not alias.
@ NoAlias
The two locations do not alias at all.
@ PartialAlias
The two locations alias, but only due to a partial overlap.
@ MustAlias
The two locations precisely alias each other.
an instruction to allocate memory on the stack
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
InstListType::const_iterator const_iterator
bool hasNPredecessors(unsigned N) const
Return true if this block has exactly N predecessors.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
const Instruction & back() const
This class represents a no-op cast from one type to another.
An associative container with fast insertion-order (deterministic) iteration over its elements.
void blot(const KeyT &Key)
This is similar to erase, but instead of removing the element from the vector, it just zeros out the ...
iterator find(const KeyT &Key)
typename VectorTy::const_iterator const_iterator
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &InsertPair)
Represents analyses that only rely on functions' control flow.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
OperandBundleUse getOperandBundleAt(unsigned Index) const
Return the operand bundle at a specific index.
unsigned getNumOperandBundles() const
Return the number of operand bundles associated with this User.
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
void setCalledFunction(Function *Fn)
Sets the function called, including updating the function type.
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
void setTailCall(bool IsTc=true)
static ConstantInt * getTrue(LLVMContext &Context)
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
BIty & getInstructionIterator()
BBIty & getBasicBlockIterator()
unsigned getNumSuccessors() const LLVM_READONLY
Return the number of successors that this instruction has.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
A Module instance is used to store all the information related to an LLVM module.
op_range incoming_values()
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void preserveSet()
Mark an analysis set as preserved.
Return a value (possibly void), from a function.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
typename SuperClass::const_iterator const_iterator
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
TinyPtrVector - This class is specialized for cases where there are normally 0 or 1 element in a vect...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVMContext & getContext() const
Return the LLVMContext in which this type was uniqued.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
bool hasOneUse() const
Return true if there is exactly one use of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
const ParentTy * getParent() const
self_iterator getIterator()
A cache of MDKinds used by various ARC optimizations.
unsigned get(ARCMDKindID ID)
Declarations for ObjC runtime functions and constants.
Function * get(ARCRuntimeEntryPointKind kind)
bool contains(const Instruction *I) const
See if an instruction is a bundled retainRV/claimRV call.
std::pair< bool, bool > insertAfterInvokes(Function &F, DominatorTree *DT)
Insert a retainRV/claimRV call to the normal destination blocks of invokes with operand bundle "clang...
CallInst * insertRVCall(BasicBlock::iterator InsertPt, CallBase *AnnotatedCall)
Insert a retainRV/claimRV call.
void eraseInst(CallInst *CI)
Remove a retainRV/claimRV call entirely.
This is similar to BasicAliasAnalysis, and it uses many of the same techniques, except it uses specia...
AAResults * getAA() const
void setAA(AAResults *aa)
This class summarizes several per-pointer runtime properties which are propagated through the flow gr...
void SetCFGHazardAfflicted(const bool NewValue)
const RRInfo & GetRRInfo() const
void ClearSequenceProgress()
This class implements an extremely fast bulk output stream that can only output to a stream.
static void CheckForUseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe, bool &ShouldContinue)
If we have a top down pointer in the S_Use state, make sure that there are no CFG hazards by checking...
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe)
If we have a Top Down pointer in the S_CanRelease state, make sure that there are no CFG hazards by c...
static bool isInertARCValue(Value *V, SmallPtrSet< Value *, 1 > &VisitedPhis)
This function returns true if the value is inert.
static CallInst * FindPredecessorAutoreleaseWithSafePath(const Value *Arg, BasicBlock *BB, ReturnInst *Ret, ProvenanceAnalysis &PA)
Look for an `‘autorelease’' instruction dependent on Arg such that there are no instructions dependen...
static void collectReleaseInsertPts(const BlotMapVector< Value *, RRInfo > &Retains, DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
static void ComputePostOrders(Function &F, SmallVectorImpl< BasicBlock * > &PostOrder, SmallVectorImpl< BasicBlock * > &ReverseCFGPostOrder, unsigned NoObjCARCExceptionsMDKind, DenseMap< const BasicBlock *, BBState > &BBStates)
static CallInst * FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB, Instruction *Autorelease, ProvenanceAnalysis &PA)
Find a dependent retain that precedes the given autorelease for which there is nothing in between the...
static const SmallPtrSet< const Value *, 2 > * getRCIdentityRootsFromReleaseInsertPt(const Instruction *InsertPt, const DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
static const unsigned OverflowOccurredValue
static CallInst * HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain, ProvenanceAnalysis &PA)
Check if there is a dependent call earlier that does not have anything in between the Retain and the ...
static const Value * FindSingleUseIdentifiedObject(const Value *Arg)
This is similar to GetRCIdentityRoot but it stops as soon as it finds a value with multiple uses.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
@ AutoreleasePoolBoundary
@ NeedsPositiveRetainCount
bool IsNeverTail(ARCInstKind Class)
Test if the given class represents instructions which are never safe to mark with the "tail" keyword.
bool IsAlwaysTail(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the "tail" keyword...
bool IsNullOrUndef(const Value *V)
bool IsAutorelease(ARCInstKind Class)
Test if the given class is objc_autorelease or equivalent.
ARCInstKind
Equivalence classes of instructions in the ARC Model.
@ Autorelease
objc_autorelease
@ RetainRV
objc_retainAutoreleasedReturnValue
@ AutoreleaseRV
objc_autoreleaseReturnValue
@ Call
could call objc_release
bool IsObjCIdentifiedObject(const Value *V)
Return true if this value refers to a distinct and identifiable object.
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
void getEquivalentPHIs(PHINodeTy &PN, VectorTy &PHIList)
Return the list of PHI nodes that are equivalent to PN.
bool IsForwarding(ARCInstKind Class)
Test if the given class represents instructions which return their argument verbatim.
bool IsNoopInstruction(const Instruction *I)
llvm::Instruction * findSingleDependency(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, ProvenanceAnalysis &PA)
Find dependent instructions.
Sequence
A sequence of states that a pointer may go through in which an objc_retain and objc_release are actua...
@ S_CanRelease
foo(x) – x could possibly see a ref count decrement.
@ S_Retain
objc_retain(x).
@ S_Stop
code motion is stopped.
@ S_MovableRelease
objc_release(x), !clang.imprecise_release.
ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
ARCInstKind GetARCInstKind(const Value *V)
Map V to its ARCInstKind equivalence class.
Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release,...
bool IsNoThrow(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the nounwind attri...
const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
bool IsNoopOnGlobal(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a global variable.
bool IsNoopOnNull(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a null pointer.
bool hasAttachedCallOpBundle(const CallBase *CB)
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
This is an optimization pass for GlobalISel generic memory operations.
auto pred_end(const MachineBasicBlock *BB)
auto successors(const MachineBasicBlock *BB)
DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
inst_iterator inst_begin(Function *F)
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch,...
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
SuccIterator< Instruction, BasicBlock > succ_iterator
bool AreStatisticsEnabled()
Check if statistics are enabled.
EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
inst_iterator inst_end(Function *F)
RNSuccIterator< NodeRef, BlockT, RegionT > succ_begin(NodeRef Node)
RNSuccIterator< NodeRef, BlockT, RegionT > succ_end(NodeRef Node)
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
auto pred_begin(const MachineBasicBlock *BB)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
A lightweight accessor for an operand bundle meant to be passed around by value.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
bool InitBottomUp(ARCMDKindCache &Cache, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool MatchWithRetain()
Return true if this set of releases can be paired with a release.
void HandlePotentialUse(BasicBlock *BB, Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Unidirectional information about either a retain-decrement-use-release sequence or release-use-decrem...
bool KnownSafe
After an objc_retain, the reference count of the referenced object is known to be positive.
SmallPtrSet< Instruction *, 2 > Calls
For a top-down sequence, the set of objc_retains or objc_retainBlocks.
MDNode * ReleaseMetadata
If the Calls are objc_release calls and they all have a clang.imprecise_release tag,...
bool CFGHazardAfflicted
If this is true, we cannot perform code motion but can still remove retain/release pairs.
bool IsTailCallRelease
True of the objc_release calls are all marked with the "tail" keyword.
SmallPtrSet< Instruction *, 2 > ReverseInsertPts
The set of optimal insert positions for moving calls in the opposite sequence.
bool MatchWithRelease(ARCMDKindCache &Cache, Instruction *Release)
Return true if this set of retains can be paired with the given release.
bool InitTopDown(ARCInstKind Kind, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class, const BundledRetainClaimRVs &BundledRVs)
void HandlePotentialUse(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)