74#define DEBUG_TYPE "objc-arc-opts"
78 cl::desc(
"Maximum number of ptr states the optimizer keeps track of"),
96 if (
GEP->hasAllZeroIndices())
159STATISTIC(NumNoops,
"Number of no-op objc calls eliminated");
160STATISTIC(NumPartialNoops,
"Number of partially no-op objc calls eliminated");
161STATISTIC(NumAutoreleases,
"Number of autoreleases converted to releases");
163 "retain+autoreleases eliminated");
164STATISTIC(NumRRs,
"Number of retain+release paths eliminated");
165STATISTIC(NumPeeps,
"Number of calls peephole-optimized");
168 "Number of retains before optimization");
170 "Number of releases before optimization");
172 "Number of retains after optimization");
174 "Number of releases after optimization");
183 unsigned TopDownPathCount = 0;
186 unsigned BottomUpPathCount = 0;
209 using top_down_ptr_iterator =
decltype(PerPtrTopDown)
::iterator;
210 using const_top_down_ptr_iterator =
decltype(PerPtrTopDown)
::const_iterator;
212 top_down_ptr_iterator top_down_ptr_begin() {
return PerPtrTopDown.
begin(); }
213 top_down_ptr_iterator top_down_ptr_end() {
return PerPtrTopDown.
end(); }
214 const_top_down_ptr_iterator top_down_ptr_begin()
const {
215 return PerPtrTopDown.begin();
217 const_top_down_ptr_iterator top_down_ptr_end()
const {
218 return PerPtrTopDown.end();
220 bool hasTopDownPtrs()
const {
221 return !PerPtrTopDown.empty();
224 unsigned top_down_ptr_list_size()
const {
225 return std::distance(top_down_ptr_begin(), top_down_ptr_end());
228 using bottom_up_ptr_iterator =
decltype(PerPtrBottomUp)::iterator;
229 using const_bottom_up_ptr_iterator =
230 decltype(PerPtrBottomUp)::const_iterator;
232 bottom_up_ptr_iterator bottom_up_ptr_begin() {
233 return PerPtrBottomUp.begin();
235 bottom_up_ptr_iterator bottom_up_ptr_end() {
return PerPtrBottomUp.end(); }
236 const_bottom_up_ptr_iterator bottom_up_ptr_begin()
const {
237 return PerPtrBottomUp.begin();
239 const_bottom_up_ptr_iterator bottom_up_ptr_end()
const {
240 return PerPtrBottomUp.end();
242 bool hasBottomUpPtrs()
const {
243 return !PerPtrBottomUp.empty();
246 unsigned bottom_up_ptr_list_size()
const {
247 return std::distance(bottom_up_ptr_begin(), bottom_up_ptr_end());
252 void SetAsEntry() { TopDownPathCount = 1; }
256 void SetAsExit() { BottomUpPathCount = 1; }
261 TopDownPtrState &getPtrTopDownState(
const Value *Arg) {
262 return PerPtrTopDown[Arg];
268 BottomUpPtrState &getPtrBottomUpState(
const Value *Arg) {
269 return PerPtrBottomUp[Arg];
274 bottom_up_ptr_iterator findPtrBottomUpState(
const Value *Arg) {
275 return PerPtrBottomUp.find(Arg);
278 void clearBottomUpPointers() {
279 PerPtrBottomUp.clear();
282 void clearTopDownPointers() {
283 PerPtrTopDown.clear();
286 void InitFromPred(
const BBState &
Other);
287 void InitFromSucc(
const BBState &
Other);
288 void MergePred(
const BBState &
Other);
289 void MergeSucc(
const BBState &
Other);
297 bool GetAllPathCountWithOverflow(
unsigned &PathCount)
const {
301 unsigned long long Product =
302 (
unsigned long long)TopDownPathCount*BottomUpPathCount;
305 return (Product >> 32) ||
312 edge_iterator
pred_begin()
const {
return Preds.begin(); }
313 edge_iterator
pred_end()
const {
return Preds.end(); }
314 edge_iterator
succ_begin()
const {
return Succs.begin(); }
315 edge_iterator
succ_end()
const {
return Succs.end(); }
317 void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
318 void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
320 bool isExit()
const {
return Succs.empty(); }
334void BBState::InitFromPred(
const BBState &
Other) {
335 PerPtrTopDown =
Other.PerPtrTopDown;
336 TopDownPathCount =
Other.TopDownPathCount;
339void BBState::InitFromSucc(
const BBState &
Other) {
340 PerPtrBottomUp =
Other.PerPtrBottomUp;
341 BottomUpPathCount =
Other.BottomUpPathCount;
346void BBState::MergePred(
const BBState &
Other) {
347 if (TopDownPathCount == OverflowOccurredValue)
352 TopDownPathCount +=
Other.TopDownPathCount;
357 if (TopDownPathCount == OverflowOccurredValue) {
358 clearTopDownPointers();
364 if (TopDownPathCount <
Other.TopDownPathCount) {
365 TopDownPathCount = OverflowOccurredValue;
366 clearTopDownPointers();
373 for (
auto MI =
Other.top_down_ptr_begin(), ME =
Other.top_down_ptr_end();
375 auto Pair = PerPtrTopDown.
insert(*
MI);
376 Pair.first->second.Merge(Pair.second ? TopDownPtrState() :
MI->second,
382 for (
auto MI = top_down_ptr_begin(), ME = top_down_ptr_end();
MI != ME; ++
MI)
383 if (
Other.PerPtrTopDown.find(
MI->first) ==
Other.PerPtrTopDown.end())
384 MI->second.Merge(TopDownPtrState(),
true);
389void BBState::MergeSucc(
const BBState &
Other) {
390 if (BottomUpPathCount == OverflowOccurredValue)
395 BottomUpPathCount +=
Other.BottomUpPathCount;
400 if (BottomUpPathCount == OverflowOccurredValue) {
401 clearBottomUpPointers();
407 if (BottomUpPathCount <
Other.BottomUpPathCount) {
408 BottomUpPathCount = OverflowOccurredValue;
409 clearBottomUpPointers();
416 for (
auto MI =
Other.bottom_up_ptr_begin(), ME =
Other.bottom_up_ptr_end();
418 auto Pair = PerPtrBottomUp.
insert(*
MI);
419 Pair.first->second.Merge(Pair.second ? BottomUpPtrState() :
MI->second,
425 for (
auto MI = bottom_up_ptr_begin(), ME = bottom_up_ptr_end();
MI != ME;
427 if (
Other.PerPtrBottomUp.find(
MI->first) ==
Other.PerPtrBottomUp.end())
428 MI->second.Merge(BottomUpPtrState(),
false);
433 OS <<
" TopDown State:\n";
434 if (!BBInfo.hasTopDownPtrs()) {
437 for (
auto I = BBInfo.top_down_ptr_begin(), E = BBInfo.top_down_ptr_end();
440 OS <<
" Ptr: " << *
I->first
441 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
442 <<
"\n ImpreciseRelease: "
443 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
444 <<
" HasCFGHazards: "
445 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
446 <<
" KnownPositive: "
447 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
449 <<
P.GetSeq() <<
"\n";
453 OS <<
" BottomUp State:\n";
454 if (!BBInfo.hasBottomUpPtrs()) {
457 for (
auto I = BBInfo.bottom_up_ptr_begin(), E = BBInfo.bottom_up_ptr_end();
460 OS <<
" Ptr: " << *
I->first
461 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
462 <<
"\n ImpreciseRelease: "
463 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
464 <<
" HasCFGHazards: "
465 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
466 <<
" KnownPositive: "
467 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
469 <<
P.GetSeq() <<
"\n";
481 bool CFGChanged =
false;
495 bool DisableRetainReleasePairing =
false;
499 unsigned UsedInThisFunction;
506 void OptimizeIndividualCalls(
Function &
F);
519 const Value *&AutoreleaseRVArg);
523 BBState &MyStates)
const;
530 bool VisitInstructionTopDown(
533 &ReleaseInsertPtToRCIdentityRoots);
538 &ReleaseInsertPtToRCIdentityRoots);
554 Value *Arg,
bool KnownSafe,
555 bool &AnyPairsCompletelyEliminated);
567 void OptimizeAutoreleasePools(
Function &
F);
569 template <
typename PredicateT>
570 static void cloneOpBundlesIf(
CallBase *CI,
580 void addOpBundleForFunclet(BasicBlock *BB,
581 SmallVectorImpl<OperandBundleDef> &OpBundles) {
582 if (!BlockEHColors.
empty()) {
585 for (BasicBlock *EHPadBB : CV)
595 void GatherStatistics(Function &
F,
bool AfterOptimization =
false);
599 void init(Function &
F);
600 bool run(Function &
F, AAResults &AA);
601 bool hasCFGChanged()
const {
return CFGChanged; }
608ObjCARCOpt::OptimizeRetainRVCall(Function &
F, Instruction *
RetainRV) {
621 if (
II->getNormalDest() == RetainRVParent) {
632 "a bundled retainRV's argument should be a call");
638 LLVM_DEBUG(
dbgs() <<
"Transforming objc_retainAutoreleasedReturnValue => "
639 "objc_retain since the operand is not a return value.\n"
643 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Retain);
651bool ObjCARCOpt::OptimizeInlinedAutoreleaseRVCall(
663 if (Arg != AutoreleaseRVArg) {
677 LLVM_DEBUG(
dbgs() <<
"Found inlined objc_autoreleaseReturnValue '"
685 if (Class == ARCInstKind::RetainRV) {
694 assert(Class == ARCInstKind::UnsafeClaimRV);
700 "Expected UnsafeClaimRV to be safe to tail call");
706 OptimizeIndividualCallImpl(
F,
Release, ARCInstKind::Release, Arg);
712void ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &
F,
723 SmallVector<const Value *, 2>
Users;
732 for (
const User *U :
Ptr->users()) {
738 }
while (!
Users.empty());
744 dbgs() <<
"Transforming objc_autoreleaseReturnValue => "
745 "objc_autorelease since its operand is not used as a return "
751 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Autorelease);
754 Class = ARCInstKind::Autorelease;
761void ObjCARCOpt::OptimizeIndividualCalls(Function &
F) {
762 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
764 UsedInThisFunction = 0;
769 const Value *DelayedAutoreleaseRVArg =
nullptr;
773 DelayedAutoreleaseRVArg =
nullptr;
775 auto optimizeDelayedAutoreleaseRV = [&]() {
776 if (!DelayedAutoreleaseRV)
778 OptimizeIndividualCallImpl(
F, DelayedAutoreleaseRV,
779 ARCInstKind::AutoreleaseRV,
780 DelayedAutoreleaseRVArg);
781 setDelayedAutoreleaseRV(
nullptr);
783 auto shouldDelayAutoreleaseRV = [&](
Instruction *NonARCInst) {
785 if (!DelayedAutoreleaseRV)
790 if (NonARCInst->isTerminator())
819 const Value *Arg =
nullptr;
822 optimizeDelayedAutoreleaseRV();
824 case ARCInstKind::CallOrUser:
825 case ARCInstKind::User:
826 case ARCInstKind::None:
830 if (!shouldDelayAutoreleaseRV(Inst))
831 optimizeDelayedAutoreleaseRV();
833 case ARCInstKind::AutoreleaseRV:
834 optimizeDelayedAutoreleaseRV();
835 setDelayedAutoreleaseRV(Inst);
837 case ARCInstKind::RetainRV:
838 case ARCInstKind::UnsafeClaimRV:
839 if (DelayedAutoreleaseRV) {
841 if (OptimizeInlinedAutoreleaseRVCall(
F, Inst, Arg, Class,
842 DelayedAutoreleaseRV,
843 DelayedAutoreleaseRVArg)) {
844 setDelayedAutoreleaseRV(
nullptr);
847 optimizeDelayedAutoreleaseRV();
852 OptimizeIndividualCallImpl(
F, Inst, Class, Arg);
856 optimizeDelayedAutoreleaseRV();
862 V = V->stripPointerCasts();
869 if (GV->hasAttribute(
"objc_arc_inert"))
874 if (!VisitedPhis.
insert(PN).second)
886void ObjCARCOpt::OptimizeIndividualCallImpl(Function &
F, Instruction *Inst,
889 LLVM_DEBUG(
dbgs() <<
"Visiting: Class: " << Class <<
"; " << *Inst <<
"\n");
892 SmallPtrSet<Value *, 1> VisitedPhis;
895 UsedInThisFunction |= 1 << unsigned(Class);
920 case ARCInstKind::NoopCast:
928 case ARCInstKind::StoreWeak:
929 case ARCInstKind::LoadWeak:
930 case ARCInstKind::LoadWeakRetained:
931 case ARCInstKind::InitWeak:
932 case ARCInstKind::DestroyWeak: {
941 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
943 << *CI <<
"\nNew = " << *NewValue <<
"\n");
950 case ARCInstKind::CopyWeak:
951 case ARCInstKind::MoveWeak: {
962 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
964 << *CI <<
"\nNew = " << *NewValue <<
"\n");
972 case ARCInstKind::RetainRV:
973 if (OptimizeRetainRVCall(
F, Inst))
976 case ARCInstKind::AutoreleaseRV:
977 OptimizeAutoreleaseRVCall(
F, Inst, Class);
993 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
996 NewCall->
setMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease),
999 LLVM_DEBUG(
dbgs() <<
"Replacing autorelease{,RV}(x) with objc_release(x) "
1000 "since x is otherwise unused.\nOld: "
1001 << *
Call <<
"\nNew: " << *NewCall <<
"\n");
1005 Class = ARCInstKind::Release;
1014 dbgs() <<
"Adding tail keyword to function since it can never be "
1015 "passed stack args: "
1024 LLVM_DEBUG(
dbgs() <<
"Removing tail keyword from function: " << *Inst
1032 LLVM_DEBUG(
dbgs() <<
"Found no throw class. Setting nounwind on: " << *Inst
1039 UsedInThisFunction |= 1 << unsigned(Class);
1051 LLVM_DEBUG(
dbgs() <<
"ARC calls with null are no-ops. Erasing: " << *Inst
1059 UsedInThisFunction |= 1 << unsigned(Class);
1067 if (Class == ARCInstKind::Release &&
1068 !Inst->
getMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease)))
1072 Worklist.
push_back(std::make_pair(Inst, Arg));
1074 std::pair<Instruction *, const Value *> Pair = Worklist.
pop_back_val();
1084 bool HasNull =
false;
1085 bool HasCriticalEdges =
false;
1092 HasCriticalEdges =
true;
1097 if (HasCriticalEdges)
1107 case ARCInstKind::Retain:
1108 case ARCInstKind::RetainBlock:
1111 case ARCInstKind::Release:
1117 case ARCInstKind::Autorelease:
1122 case ARCInstKind::UnsafeClaimRV:
1123 case ARCInstKind::RetainRV:
1124 case ARCInstKind::AutoreleaseRV:
1150 cloneOpBundlesIf(CInst, OpBundles, [](
const OperandBundleUse &
B) {
1153 addOpBundleForFunclet(InsertPos->getParent(), OpBundles);
1155 if (
Op->getType() != ParamTy)
1156 Op =
new BitCastInst(
Op, ParamTy,
"", InsertPos);
1158 Clone->
insertBefore(*InsertPos->getParent(), InsertPos);
1161 "And inserting clone at "
1162 << *InsertPos <<
"\n");
1163 Worklist.
push_back(std::make_pair(Clone, Incoming));
1168 }
while (!Worklist.
empty());
1174 const bool SuccSRRIKnownSafe,
1176 bool &SomeSuccHasSame,
1177 bool &AllSuccsHaveSame,
1178 bool &NotAllSeqEqualButKnownSafe,
1179 bool &ShouldContinue) {
1187 ShouldContinue =
true;
1191 SomeSuccHasSame =
true;
1196 AllSuccsHaveSame =
false;
1198 NotAllSeqEqualButKnownSafe =
true;
1211 const bool SuccSRRIKnownSafe,
1213 bool &SomeSuccHasSame,
1214 bool &AllSuccsHaveSame,
1215 bool &NotAllSeqEqualButKnownSafe) {
1218 SomeSuccHasSame =
true;
1224 AllSuccsHaveSame =
false;
1226 NotAllSeqEqualButKnownSafe =
true;
1239ObjCARCOpt::CheckForCFGHazards(
const BasicBlock *BB,
1240 DenseMap<const BasicBlock *, BBState> &BBStates,
1241 BBState &MyStates)
const {
1244 for (
auto I = MyStates.top_down_ptr_begin(),
E = MyStates.top_down_ptr_end();
1246 TopDownPtrState &S =
I->second;
1247 const Sequence Seq =
I->second.GetSeq();
1256 "Unknown top down sequence state.");
1258 const Value *Arg =
I->first;
1259 bool SomeSuccHasSame =
false;
1260 bool AllSuccsHaveSame =
true;
1261 bool NotAllSeqEqualButKnownSafe =
false;
1263 for (
const BasicBlock *Succ :
successors(BB)) {
1266 const DenseMap<const BasicBlock *, BBState>::iterator BBI =
1267 BBStates.
find(Succ);
1269 const BottomUpPtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
1277 if (SuccSSeq ==
S_None) {
1284 const bool SuccSRRIKnownSafe = SuccS.
IsKnownSafe();
1290 bool ShouldContinue =
false;
1292 AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
1300 SomeSuccHasSame, AllSuccsHaveSame,
1301 NotAllSeqEqualButKnownSafe);
1314 if (SomeSuccHasSame && !AllSuccsHaveSame) {
1316 }
else if (NotAllSeqEqualButKnownSafe) {
1326bool ObjCARCOpt::VisitInstructionBottomUp(
1327 Instruction *Inst, BasicBlock *BB, BlotMapVector<Value *, RRInfo> &Retains,
1328 BBState &MyStates) {
1329 bool NestingDetected =
false;
1331 const Value *Arg =
nullptr;
1336 case ARCInstKind::Release: {
1339 BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1343 case ARCInstKind::RetainBlock:
1348 case ARCInstKind::Retain:
1349 case ARCInstKind::RetainRV: {
1351 BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1355 if (Class != ARCInstKind::RetainRV) {
1364 case ARCInstKind::AutoreleasepoolPop:
1366 MyStates.clearBottomUpPointers();
1367 return NestingDetected;
1368 case ARCInstKind::AutoreleasepoolPush:
1369 case ARCInstKind::None:
1371 return NestingDetected;
1378 for (
auto MI = MyStates.bottom_up_ptr_begin(),
1379 ME = MyStates.bottom_up_ptr_end();
1384 BottomUpPtrState &S =
MI->second;
1392 return NestingDetected;
1395bool ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
1396 DenseMap<const BasicBlock *, BBState> &BBStates,
1397 BlotMapVector<Value *, RRInfo> &Retains) {
1400 bool NestingDetected =
false;
1401 BBState &MyStates = BBStates[BB];
1405 BBState::edge_iterator
SI(MyStates.succ_begin()),
1406 SE(MyStates.succ_end());
1409 DenseMap<const BasicBlock *, BBState>::iterator
I = BBStates.
find(Succ);
1411 MyStates.InitFromSucc(
I->second);
1413 for (;
SI != SE; ++
SI) {
1415 I = BBStates.
find(Succ);
1417 MyStates.MergeSucc(
I->second);
1422 << BBStates[BB] <<
"\n"
1423 <<
"Performing Dataflow:\n");
1435 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1439 if (MyStates.bottom_up_ptr_list_size() >
MaxPtrStates) {
1440 DisableRetainReleasePairing =
true;
1448 for (BBState::edge_iterator PI(MyStates.pred_begin()),
1449 PE(MyStates.pred_end()); PI != PE; ++PI) {
1452 NestingDetected |= VisitInstructionBottomUp(
II, BB, Retains, MyStates);
1455 LLVM_DEBUG(
dbgs() <<
"\nFinal State:\n" << BBStates[BB] <<
"\n");
1457 return NestingDetected;
1466 &ReleaseInsertPtToRCIdentityRoots) {
1467 for (
const auto &
P : Retains) {
1474 for (
const Instruction *InsertPt :
P.second.ReverseInsertPts)
1475 ReleaseInsertPtToRCIdentityRoots[InsertPt].insert(Root);
1481static const SmallPtrSet<const Value *, 2> *
1485 &ReleaseInsertPtToRCIdentityRoots) {
1486 auto I = ReleaseInsertPtToRCIdentityRoots.find(InsertPt);
1487 if (
I == ReleaseInsertPtToRCIdentityRoots.end())
1492bool ObjCARCOpt::VisitInstructionTopDown(
1493 Instruction *Inst, DenseMap<Value *, RRInfo> &Releases, BBState &MyStates,
1494 const DenseMap<
const Instruction *, SmallPtrSet<const Value *, 2>>
1495 &ReleaseInsertPtToRCIdentityRoots) {
1496 bool NestingDetected =
false;
1498 const Value *Arg =
nullptr;
1502 if (
const SmallPtrSet<const Value *, 2> *Roots =
1504 Inst, ReleaseInsertPtToRCIdentityRoots))
1505 for (
const auto *Root : *Roots) {
1506 TopDownPtrState &S = MyStates.getPtrTopDownState(Root);
1519 case ARCInstKind::RetainBlock:
1525 case ARCInstKind::Retain:
1526 case ARCInstKind::RetainRV: {
1528 TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1534 case ARCInstKind::Release: {
1536 TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1548 case ARCInstKind::AutoreleasepoolPop:
1550 MyStates.clearTopDownPointers();
1552 case ARCInstKind::AutoreleasepoolPush:
1553 case ARCInstKind::None:
1562 for (
auto MI = MyStates.top_down_ptr_begin(),
1563 ME = MyStates.top_down_ptr_end();
1568 TopDownPtrState &S =
MI->second;
1575 return NestingDetected;
1578bool ObjCARCOpt::VisitTopDown(
1579 BasicBlock *BB, DenseMap<const BasicBlock *, BBState> &BBStates,
1580 DenseMap<Value *, RRInfo> &Releases,
1581 const DenseMap<
const Instruction *, SmallPtrSet<const Value *, 2>>
1582 &ReleaseInsertPtToRCIdentityRoots) {
1584 bool NestingDetected =
false;
1585 BBState &MyStates = BBStates[BB];
1589 BBState::edge_iterator PI(MyStates.pred_begin()),
1590 PE(MyStates.pred_end());
1593 DenseMap<const BasicBlock *, BBState>::iterator
I = BBStates.
find(Pred);
1595 MyStates.InitFromPred(
I->second);
1597 for (; PI != PE; ++PI) {
1599 I = BBStates.
find(Pred);
1601 MyStates.MergePred(
I->second);
1609 for (
auto I = MyStates.top_down_ptr_begin(),
1610 E = MyStates.top_down_ptr_end();
1612 I->second.SetCFGHazardAfflicted(
true);
1615 << BBStates[BB] <<
"\n"
1616 <<
"Performing Dataflow:\n");
1619 for (Instruction &Inst : *BB) {
1622 NestingDetected |= VisitInstructionTopDown(
1623 &Inst, Releases, MyStates, ReleaseInsertPtToRCIdentityRoots);
1627 if (MyStates.top_down_ptr_list_size() >
MaxPtrStates) {
1628 DisableRetainReleasePairing =
true;
1633 LLVM_DEBUG(
dbgs() <<
"\nState Before Checking for CFG Hazards:\n"
1634 << BBStates[BB] <<
"\n\n");
1635 CheckForCFGHazards(BB, BBStates, MyStates);
1637 return NestingDetected;
1644 unsigned NoObjCARCExceptionsMDKind,
1656 BBState &MyStates = BBStates[EntryBB];
1657 MyStates.SetAsEntry();
1667 while (SuccStack.
back().second != SE) {
1669 if (Visited.
insert(SuccBB).second) {
1672 BBStates[CurrBB].addSucc(SuccBB);
1673 BBState &SuccStates = BBStates[SuccBB];
1674 SuccStates.addPred(CurrBB);
1679 if (!OnStack.
count(SuccBB)) {
1680 BBStates[CurrBB].addSucc(SuccBB);
1681 BBStates[SuccBB].addPred(CurrBB);
1684 OnStack.
erase(CurrBB);
1687 }
while (!SuccStack.
empty());
1696 BBState &MyStates = BBStates[&ExitBB];
1697 if (!MyStates.isExit())
1700 MyStates.SetAsExit();
1702 PredStack.
push_back(std::make_pair(&ExitBB, MyStates.pred_begin()));
1704 while (!PredStack.
empty()) {
1705 reverse_dfs_next_succ:
1706 BBState::edge_iterator PE = BBStates[PredStack.
back().first].pred_end();
1707 while (PredStack.
back().second != PE) {
1709 if (Visited.
insert(BB).second) {
1711 goto reverse_dfs_next_succ;
1720bool ObjCARCOpt::Visit(Function &
F,
1721 DenseMap<const BasicBlock *, BBState> &BBStates,
1722 BlotMapVector<Value *, RRInfo> &Retains,
1723 DenseMap<Value *, RRInfo> &Releases) {
1729 SmallVector<BasicBlock *, 16> PostOrder;
1730 SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
1732 MDKindCache.
get(ARCMDKindID::NoObjCARCExceptions),
1736 bool BottomUpNestingDetected =
false;
1738 BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
1739 if (DisableRetainReleasePairing)
1743 DenseMap<const Instruction *, SmallPtrSet<const Value *, 2>>
1744 ReleaseInsertPtToRCIdentityRoots;
1748 bool TopDownNestingDetected =
false;
1750 TopDownNestingDetected |=
1751 VisitTopDown(BB, BBStates, Releases, ReleaseInsertPtToRCIdentityRoots);
1752 if (DisableRetainReleasePairing)
1756 return TopDownNestingDetected && BottomUpNestingDetected;
1760void ObjCARCOpt::MoveCalls(
Value *Arg, RRInfo &RetainsToMove,
1761 RRInfo &ReleasesToMove,
1762 BlotMapVector<Value *, RRInfo> &Retains,
1763 DenseMap<Value *, RRInfo> &Releases,
1764 SmallVectorImpl<Instruction *> &DeadInsts,
1770 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
1772 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1780 "At insertion point: "
1781 << *InsertPt <<
"\n");
1784 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
1786 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1798 "At insertion point: "
1799 << *InsertPt <<
"\n");
1803 for (Instruction *OrigRetain : RetainsToMove.
Calls) {
1804 Retains.
blot(OrigRetain);
1806 LLVM_DEBUG(
dbgs() <<
"Deleting retain: " << *OrigRetain <<
"\n");
1808 for (Instruction *OrigRelease : ReleasesToMove.
Calls) {
1809 Releases.
erase(OrigRelease);
1811 LLVM_DEBUG(
dbgs() <<
"Deleting release: " << *OrigRelease <<
"\n");
1815bool ObjCARCOpt::PairUpRetainsAndReleases(
1816 DenseMap<const BasicBlock *, BBState> &BBStates,
1817 BlotMapVector<Value *, RRInfo> &Retains,
1818 DenseMap<Value *, RRInfo> &Releases,
Module *M,
1820 SmallVectorImpl<Instruction *> &DeadInsts, RRInfo &RetainsToMove,
1821 RRInfo &ReleasesToMove,
Value *Arg,
bool KnownSafe,
1822 bool &AnyPairsCompletelyEliminated) {
1826 bool KnownSafeTD =
true, KnownSafeBU =
true;
1827 bool CFGHazardAfflicted =
false;
1833 unsigned OldDelta = 0;
1834 unsigned NewDelta = 0;
1835 unsigned OldCount = 0;
1836 unsigned NewCount = 0;
1837 bool FirstRelease =
true;
1838 for (SmallVector<Instruction *, 4> NewRetains{
Retain};;) {
1839 SmallVector<Instruction *, 4> NewReleases;
1840 for (Instruction *NewRetain : NewRetains) {
1841 auto It = Retains.
find(NewRetain);
1843 const RRInfo &NewRetainRRI = It->second;
1846 for (Instruction *NewRetainRelease : NewRetainRRI.
Calls) {
1847 auto Jt = Releases.
find(NewRetainRelease);
1848 if (Jt == Releases.
end())
1850 const RRInfo &NewRetainReleaseRRI = Jt->second;
1857 if (!NewRetainReleaseRRI.
Calls.count(NewRetain))
1860 if (ReleasesToMove.
Calls.insert(NewRetainRelease).second) {
1863 const BBState &NRRBBState = BBStates[NewRetainRelease->
getParent()];
1865 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1868 "PathCount at this point can not be "
1869 "OverflowOccurredValue.");
1870 OldDelta -= PathCount;
1878 FirstRelease =
false;
1894 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1896 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1899 "PathCount at this point can not be "
1900 "OverflowOccurredValue.");
1901 NewDelta -= PathCount;
1904 NewReleases.
push_back(NewRetainRelease);
1909 if (NewReleases.
empty())
break;
1912 for (Instruction *NewRelease : NewReleases) {
1913 auto It = Releases.
find(NewRelease);
1915 const RRInfo &NewReleaseRRI = It->second;
1918 for (Instruction *NewReleaseRetain : NewReleaseRRI.
Calls) {
1919 auto Jt = Retains.
find(NewReleaseRetain);
1920 if (Jt == Retains.
end())
1922 const RRInfo &NewReleaseRetainRRI = Jt->second;
1929 if (!NewReleaseRetainRRI.
Calls.count(NewRelease))
1932 if (RetainsToMove.
Calls.insert(NewReleaseRetain).second) {
1935 const BBState &NRRBBState = BBStates[NewReleaseRetain->
getParent()];
1937 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1940 "PathCount at this point can not be "
1941 "OverflowOccurredValue.");
1942 OldDelta += PathCount;
1943 OldCount += PathCount;
1951 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1954 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1957 "PathCount at this point can not be "
1958 "OverflowOccurredValue.");
1959 NewDelta += PathCount;
1960 NewCount += PathCount;
1963 NewRetains.push_back(NewReleaseRetain);
1967 if (NewRetains.empty())
break;
1971 bool UnconditionallySafe = KnownSafeTD && KnownSafeBU;
1972 if (UnconditionallySafe) {
1987 const bool WillPerformCodeMotion =
1990 if (CFGHazardAfflicted && WillPerformCodeMotion)
2003 assert(OldCount != 0 &&
"Unreachable code?");
2004 NumRRs += OldCount - NewCount;
2006 AnyPairsCompletelyEliminated = NewCount == 0;
2014bool ObjCARCOpt::PerformCodePlacement(
2015 DenseMap<const BasicBlock *, BBState> &BBStates,
2016 BlotMapVector<Value *, RRInfo> &Retains,
2017 DenseMap<Value *, RRInfo> &Releases,
Module *M) {
2018 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::PerformCodePlacement ==\n");
2020 bool AnyPairsCompletelyEliminated =
false;
2021 SmallVector<Instruction *, 8> DeadInsts;
2044 if (
const GlobalVariable *GV =
2047 if (GV->isConstant())
2052 RRInfo RetainsToMove, ReleasesToMove;
2054 bool PerformMoveCalls = PairUpRetainsAndReleases(
2055 BBStates, Retains, Releases, M,
Retain, DeadInsts,
2056 RetainsToMove, ReleasesToMove, Arg, KnownSafe,
2057 AnyPairsCompletelyEliminated);
2059 if (PerformMoveCalls) {
2062 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
2063 Retains, Releases, DeadInsts, M);
2069 while (!DeadInsts.
empty())
2072 return AnyPairsCompletelyEliminated;
2076void ObjCARCOpt::OptimizeWeakCalls(Function &
F) {
2088 if (Class != ARCInstKind::LoadWeak &&
2089 Class != ARCInstKind::LoadWeakRetained)
2093 if (Class == ARCInstKind::LoadWeak && Inst->
use_empty()) {
2110 switch (EarlierClass) {
2111 case ARCInstKind::LoadWeak:
2112 case ARCInstKind::LoadWeakRetained: {
2119 switch (PA.
getAA()->
alias(Arg, EarlierArg)) {
2123 if (Class == ARCInstKind::LoadWeakRetained) {
2124 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2141 case ARCInstKind::StoreWeak:
2142 case ARCInstKind::InitWeak: {
2149 switch (PA.
getAA()->
alias(Arg, EarlierArg)) {
2153 if (Class == ARCInstKind::LoadWeakRetained) {
2154 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2171 case ARCInstKind::MoveWeak:
2172 case ARCInstKind::CopyWeak:
2175 case ARCInstKind::AutoreleasepoolPush:
2176 case ARCInstKind::None:
2177 case ARCInstKind::IntrinsicUser:
2178 case ARCInstKind::User:
2194 if (Class != ARCInstKind::DestroyWeak)
2200 for (
User *U : Alloca->users()) {
2203 case ARCInstKind::InitWeak:
2204 case ARCInstKind::StoreWeak:
2205 case ARCInstKind::DestroyWeak:
2215 case ARCInstKind::InitWeak:
2216 case ARCInstKind::StoreWeak:
2220 case ARCInstKind::DestroyWeak:
2228 Alloca->eraseFromParent();
2236bool ObjCARCOpt::OptimizeSequences(Function &
F) {
2241 DenseMap<Value *, RRInfo> Releases;
2242 BlotMapVector<Value *, RRInfo> Retains;
2246 DenseMap<const BasicBlock *, BBState> BBStates;
2249 bool NestingDetected = Visit(
F, BBStates, Retains, Releases);
2251 if (DisableRetainReleasePairing)
2255 bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
2259 return AnyPairsCompletelyEliminated && NestingDetected;
2304static CallInst *FindPredecessorAutoreleaseWithSafePath(
2305 const Value *Arg, BasicBlock *BB, ReturnInst *Ret, ProvenanceAnalysis &PA) {
2328void ObjCARCOpt::OptimizeReturns(Function &
F) {
2329 if (!
F.getReturnType()->isPointerTy())
2334 for (BasicBlock &BB:
F) {
2347 FindPredecessorAutoreleaseWithSafePath(Arg, &BB, Ret, PA);
2364 (!
Call->isTailCall() &&
2381ObjCARCOpt::GatherStatistics(Function &
F,
bool AfterOptimization) {
2383 AfterOptimization ? NumRetainsAfterOpt : NumRetainsBeforeOpt;
2385 AfterOptimization ? NumReleasesAfterOpt : NumReleasesBeforeOpt;
2392 case ARCInstKind::Retain:
2395 case ARCInstKind::Release:
2403void ObjCARCOpt::init(Function &
F) {
2410 MDKindCache.
init(
F.getParent());
2413 EP.
init(
F.getParent());
2416 if (
F.hasPersonalityFn() &&
2421bool ObjCARCOpt::run(Function &
F, AAResults &AA) {
2426 BundledRetainClaimRVs BRV(EP,
false,
false);
2427 BundledInsts = &BRV;
2429 LLVM_DEBUG(
dbgs() <<
"<<< ObjCARCOpt: Visiting Function: " <<
F.getName()
2435 CFGChanged |=
R.second;
2441 GatherStatistics(
F,
false);
2450 OptimizeIndividualCalls(
F);
2453 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::LoadWeak)) |
2454 (1 <<
unsigned(ARCInstKind::LoadWeakRetained)) |
2455 (1 <<
unsigned(ARCInstKind::StoreWeak)) |
2456 (1 <<
unsigned(ARCInstKind::InitWeak)) |
2457 (1 <<
unsigned(ARCInstKind::CopyWeak)) |
2458 (1 <<
unsigned(ARCInstKind::MoveWeak)) |
2459 (1 <<
unsigned(ARCInstKind::DestroyWeak))))
2460 OptimizeWeakCalls(
F);
2463 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Retain)) |
2464 (1 <<
unsigned(ARCInstKind::RetainRV)) |
2465 (1 <<
unsigned(ARCInstKind::RetainBlock))))
2466 if (UsedInThisFunction & (1 <<
unsigned(ARCInstKind::Release)))
2469 while (OptimizeSequences(
F)) {}
2472 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Autorelease)) |
2473 (1 <<
unsigned(ARCInstKind::AutoreleaseRV))))
2477 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::AutoreleasepoolPush)) |
2478 (1 <<
unsigned(ARCInstKind::AutoreleasepoolPop))))
2479 OptimizeAutoreleasePools(
F);
2484 GatherStatistics(
F,
true);
2505 if (!Callee->hasExactDefinition())
2560void ObjCARCOpt::OptimizeAutoreleasePools(Function &
F) {
2561 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeAutoreleasePools ==\n");
2563 OptimizationRemarkEmitter ORE(&
F);
2568 for (BasicBlock &BB :
F) {
2577 case ARCInstKind::AutoreleasepoolPush: {
2582 LLVM_DEBUG(
dbgs() <<
"Found autorelease pool push: " << *Push <<
"\n");
2586 case ARCInstKind::AutoreleasepoolPop: {
2589 if (PoolStack.
empty())
2592 auto &TopPool = PoolStack.
back();
2593 CallInst *PendingPush = TopPool.first;
2594 bool HasAutoreleaseInScope = TopPool.second;
2600 if (Pop->getArgOperand(0)->stripPointerCasts() != PendingPush)
2604 if (HasAutoreleaseInScope)
2609 return OptimizationRemark(
DEBUG_TYPE,
"AutoreleasePoolElimination",
2611 <<
"eliminated empty autorelease pool pair";
2618 Pop->eraseFromParent();
2625 case ARCInstKind::CallOrUser:
2626 case ARCInstKind::Call:
2630 case ARCInstKind::Autorelease:
2631 case ARCInstKind::AutoreleaseRV:
2632 case ARCInstKind::FusedRetainAutorelease:
2633 case ARCInstKind::FusedRetainAutoreleaseRV:
2634 case ARCInstKind::LoadWeak: {
2636 if (!PoolStack.
empty()) {
2637 PoolStack.
back().second =
true;
2640 <<
"Found autorelease or potential autorelease in pool scope: "
2647 case ARCInstKind::Retain:
2648 case ARCInstKind::RetainRV:
2649 case ARCInstKind::UnsafeClaimRV:
2650 case ARCInstKind::RetainBlock:
2651 case ARCInstKind::Release:
2652 case ARCInstKind::NoopCast:
2653 case ARCInstKind::LoadWeakRetained:
2654 case ARCInstKind::StoreWeak:
2655 case ARCInstKind::InitWeak:
2656 case ARCInstKind::MoveWeak:
2657 case ARCInstKind::CopyWeak:
2658 case ARCInstKind::DestroyWeak:
2659 case ARCInstKind::StoreStrong:
2660 case ARCInstKind::IntrinsicUser:
2661 case ARCInstKind::User:
2662 case ARCInstKind::None:
2679 bool CFGChanged = OCAO.hasCFGChanged();
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
Expand Atomic instructions
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_FALLTHROUGH
LLVM_FALLTHROUGH - Mark fallthrough cases in switch statements.
#define LLVM_ATTRIBUTE_UNUSED
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
iv Induction Variable Users
Machine Check Debug Module
uint64_t IntrinsicInst * II
This file defines common analysis utilities used by the ObjC ARC Optimizer.
static cl::opt< unsigned > MaxPtrStates("arc-opt-max-ptr-states", cl::Hidden, cl::desc("Maximum number of ptr states the optimizer keeps track of"), cl::init(4095))
This file defines ARC utility functions which are used by various parts of the compiler.
This file declares a special form of Alias Analysis called Provenance / Analysis''.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
void setAA(AAResults *aa)
AAResults * getAA() const
A manager for alias analyses.
LLVM_ABI AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)
The main low level interface to the alias analysis implementation.
@ MayAlias
The two locations may or may not alias.
@ NoAlias
The two locations do not alias at all.
@ PartialAlias
The two locations alias, but only due to a partial overlap.
@ MustAlias
The two locations precisely alias each other.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const Instruction & back() const
InstListType::const_iterator const_iterator
LLVM_ABI bool hasNPredecessors(unsigned N) const
Return true if this block has exactly N predecessors.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
This class represents a no-op cast from one type to another.
An associative container with fast insertion-order (deterministic) iteration over its elements.
void blot(const KeyT &Key)
This is similar to erase, but instead of removing the element from the vector, it just zeros out the ...
iterator find(const KeyT &Key)
typename VectorTy::const_iterator const_iterator
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &InsertPair)
Represents analyses that only rely on functions' control flow.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
OperandBundleUse getOperandBundleAt(unsigned Index) const
Return the operand bundle at a specific index.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
unsigned getNumOperandBundles() const
Return the number of operand bundles associated with this User.
bool onlyReadsMemory(unsigned OpNo) const
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
void setCalledFunction(Function *Fn)
Sets the function called, including updating the function type.
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
void setTailCall(bool IsTc=true)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
BIty & getInstructionIterator()
BBIty & getBasicBlockIterator()
LLVM_ABI unsigned getNumSuccessors() const LLVM_READONLY
Return the number of successors that this instruction has.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
A Module instance is used to store all the information related to an LLVM module.
op_range incoming_values()
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses & preserveSet()
Mark an analysis set as preserved.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
reference emplace_back(ArgTypes &&... Args)
typename SuperClass::const_iterator const_iterator
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
bool isVoidTy() const
Return true if this is 'void'.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
bool hasOneUse() const
Return true if there is exactly one use of this value.
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
const ParentTy * getParent() const
self_iterator getIterator()
A cache of MDKinds used by various ARC optimizations.
unsigned get(ARCMDKindID ID)
Declarations for ObjC runtime functions and constants.
Function * get(ARCRuntimeEntryPointKind kind)
bool contains(const Instruction *I) const
See if an instruction is a bundled retainRV/claimRV call.
std::pair< bool, bool > insertAfterInvokes(Function &F, DominatorTree *DT)
Insert a retainRV/claimRV call to the normal destination blocks of invokes with operand bundle "clang...
CallInst * insertRVCall(BasicBlock::iterator InsertPt, CallBase *AnnotatedCall)
Insert a retainRV/claimRV call.
void eraseInst(CallInst *CI)
Remove a retainRV/claimRV call entirely.
This class summarizes several per-pointer runtime properties which are propagated through the flow gr...
void SetCFGHazardAfflicted(const bool NewValue)
const RRInfo & GetRRInfo() const
void ClearSequenceProgress()
This class implements an extremely fast bulk output stream that can only output to a stream.
static void CheckForUseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe, bool &ShouldContinue)
If we have a top down pointer in the S_Use state, make sure that there are no CFG hazards by checking...
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe)
If we have a Top Down pointer in the S_CanRelease state, make sure that there are no CFG hazards by c...
static bool isInertARCValue(Value *V, SmallPtrSet< Value *, 1 > &VisitedPhis)
This function returns true if the value is inert.
static void collectReleaseInsertPts(const BlotMapVector< Value *, RRInfo > &Retains, DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
CallInst * Autorelease
Look for an `‘autorelease’' instruction dependent on Arg such that there are / no instructions depend...
static void ComputePostOrders(Function &F, SmallVectorImpl< BasicBlock * > &PostOrder, SmallVectorImpl< BasicBlock * > &ReverseCFGPostOrder, unsigned NoObjCARCExceptionsMDKind, DenseMap< const BasicBlock *, BBState > &BBStates)
static CallInst * FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB, Instruction *Autorelease, ProvenanceAnalysis &PA)
Find a dependent retain that precedes the given autorelease for which there is nothing in between the...
static const SmallPtrSet< const Value *, 2 > * getRCIdentityRootsFromReleaseInsertPt(const Instruction *InsertPt, const DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
bool MayAutorelease(const CallBase &CB, unsigned Depth=0)
Interprocedurally determine if calls made by the given call site can possibly produce autoreleases.
static const unsigned OverflowOccurredValue
static CallInst * HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain, ProvenanceAnalysis &PA)
Check if there is a dependent call earlier that does not have anything in between the Retain and the ...
static const Value * FindSingleUseIdentifiedObject(const Value *Arg)
This is similar to GetRCIdentityRoot but it stops as soon as it finds a value with multiple uses.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
@ AutoreleasePoolBoundary
@ NeedsPositiveRetainCount
bool IsNeverTail(ARCInstKind Class)
Test if the given class represents instructions which are never safe to mark with the "tail" keyword.
bool IsAlwaysTail(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the "tail" keyword...
bool IsNullOrUndef(const Value *V)
bool IsAutorelease(ARCInstKind Class)
Test if the given class is objc_autorelease or equivalent.
ARCInstKind
Equivalence classes of instructions in the ARC Model.
@ DestroyWeak
objc_destroyWeak (derived)
@ FusedRetainAutorelease
objc_retainAutorelease
@ CallOrUser
could call objc_release and/or "use" pointers
@ StoreStrong
objc_storeStrong (derived)
@ LoadWeakRetained
objc_loadWeakRetained (primitive)
@ StoreWeak
objc_storeWeak (primitive)
@ AutoreleasepoolPop
objc_autoreleasePoolPop
@ AutoreleasepoolPush
objc_autoreleasePoolPush
@ InitWeak
objc_initWeak (derived)
@ Autorelease
objc_autorelease
@ LoadWeak
objc_loadWeak (derived)
@ None
anything that is inert from an ARC perspective.
@ MoveWeak
objc_moveWeak (derived)
@ User
could "use" a pointer
@ RetainRV
objc_retainAutoreleasedReturnValue
@ RetainBlock
objc_retainBlock
@ FusedRetainAutoreleaseRV
objc_retainAutoreleaseReturnValue
@ AutoreleaseRV
objc_autoreleaseReturnValue
@ Call
could call objc_release
@ CopyWeak
objc_copyWeak (derived)
@ NoopCast
objc_retainedObject, etc.
@ UnsafeClaimRV
objc_unsafeClaimAutoreleasedReturnValue
@ IntrinsicUser
llvm.objc.clang.arc.use
bool IsObjCIdentifiedObject(const Value *V)
Return true if this value refers to a distinct and identifiable object.
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
void getEquivalentPHIs(PHINodeTy &PN, VectorTy &PHIList)
Return the list of PHI nodes that are equivalent to PN.
bool IsForwarding(ARCInstKind Class)
Test if the given class represents instructions which return their argument verbatim.
bool IsNoopInstruction(const Instruction *I)
llvm::Instruction * findSingleDependency(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, ProvenanceAnalysis &PA)
Find dependent instructions.
Sequence
A sequence of states that a pointer may go through in which an objc_retain and objc_release are actua...
@ S_CanRelease
foo(x) – x could possibly see a ref count decrement.
@ S_Retain
objc_retain(x).
@ S_Stop
code motion is stopped.
@ S_MovableRelease
objc_release(x), !clang.imprecise_release.
ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
ARCInstKind GetARCInstKind(const Value *V)
Map V to its ARCInstKind equivalence class.
Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release,...
bool IsNoThrow(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the nounwind attri...
const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
bool IsNoopOnGlobal(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a global variable.
bool IsNoopOnNull(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a null pointer.
bool hasAttachedCallOpBundle(const CallBase *CB)
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
FunctionAddr VTableAddr Value
InstIterator< SymbolTableList< BasicBlock >, Function::iterator, BasicBlock::iterator, Instruction > inst_iterator
auto pred_end(const MachineBasicBlock *BB)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto successors(const MachineBasicBlock *BB)
LLVM_ABI DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
inst_iterator inst_begin(Function *F)
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch,...
auto dyn_cast_or_null(const Y &Val)
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
LLVM_ABI bool AreStatisticsEnabled()
Check if statistics are enabled.
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
inst_iterator inst_end(Function *F)
RNSuccIterator< NodeRef, BlockT, RegionT > succ_begin(NodeRef Node)
RNSuccIterator< NodeRef, BlockT, RegionT > succ_end(NodeRef Node)
DWARFExpression::Operation Op
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
TinyPtrVector< BasicBlock * > ColorVector
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
SuccIterator< Instruction, BasicBlock > succ_iterator
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
LLVM_ABI PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
A lightweight accessor for an operand bundle meant to be passed around by value.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
bool InitBottomUp(ARCMDKindCache &Cache, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool MatchWithRetain()
Return true if this set of releases can be paired with a release.
void HandlePotentialUse(BasicBlock *BB, Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Unidirectional information about either a retain-decrement-use-release sequence or release-use-decrem...
bool KnownSafe
After an objc_retain, the reference count of the referenced object is known to be positive.
SmallPtrSet< Instruction *, 2 > Calls
For a top-down sequence, the set of objc_retains or objc_retainBlocks.
MDNode * ReleaseMetadata
If the Calls are objc_release calls and they all have a clang.imprecise_release tag,...
bool CFGHazardAfflicted
If this is true, we cannot perform code motion but can still remove retain/release pairs.
bool IsTailCallRelease
True of the objc_release calls are all marked with the "tail" keyword.
SmallPtrSet< Instruction *, 2 > ReverseInsertPts
The set of optimal insert positions for moving calls in the opposite sequence.
bool MatchWithRelease(ARCMDKindCache &Cache, Instruction *Release)
Return true if this set of retains can be paired with the given release.
bool InitTopDown(ARCInstKind Kind, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class, const BundledRetainClaimRVs &BundledRVs)
void HandlePotentialUse(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)