46 using namespace llvm::objcarc;
48 #define DEBUG_TYPE "objc-arc-opts"
57 if (
const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
60 if (
GEP->hasAllZeroIndices())
64 cast<CallInst>(Arg)->getArgOperand(0));
95 if (isa<AllocaInst>(P))
98 if (!Visited.
insert(P).second)
101 if (
const SelectInst *
SI = dyn_cast<const SelectInst>(P)) {
107 if (
const PHINode *PN = dyn_cast<const PHINode>(P)) {
108 for (
Value *IncValue : PN->incoming_values())
112 }
while (!Worklist.
empty());
161 STATISTIC(NumNoops,
"Number of no-op objc calls eliminated");
162 STATISTIC(NumPartialNoops,
"Number of partially no-op objc calls eliminated");
163 STATISTIC(NumAutoreleases,
"Number of autoreleases converted to releases");
164 STATISTIC(NumRets,
"Number of return value forwarding "
165 "retain+autoreleases eliminated");
166 STATISTIC(NumRRs,
"Number of retain+release paths eliminated");
167 STATISTIC(NumPeeps,
"Number of calls peephole-optimized");
170 "Number of retains before optimization");
172 "Number of releases before optimization");
174 "Number of retains after optimization");
176 "Number of releases after optimization");
184 unsigned TopDownPathCount;
187 unsigned BottomUpPathCount;
206 static const unsigned OverflowOccurredValue;
208 BBState() : TopDownPathCount(0), BottomUpPathCount(0) { }
210 typedef decltype(PerPtrTopDown)::iterator top_down_ptr_iterator;
211 typedef decltype(PerPtrTopDown)::const_iterator const_top_down_ptr_iterator;
213 top_down_ptr_iterator top_down_ptr_begin() {
return PerPtrTopDown.begin(); }
214 top_down_ptr_iterator top_down_ptr_end() {
return PerPtrTopDown.end(); }
215 const_top_down_ptr_iterator top_down_ptr_begin()
const {
216 return PerPtrTopDown.begin();
218 const_top_down_ptr_iterator top_down_ptr_end()
const {
219 return PerPtrTopDown.end();
221 bool hasTopDownPtrs()
const {
222 return !PerPtrTopDown.empty();
225 typedef decltype(PerPtrBottomUp)::iterator bottom_up_ptr_iterator;
227 PerPtrBottomUp)::const_iterator const_bottom_up_ptr_iterator;
229 bottom_up_ptr_iterator bottom_up_ptr_begin() {
230 return PerPtrBottomUp.begin();
232 bottom_up_ptr_iterator bottom_up_ptr_end() {
return PerPtrBottomUp.end(); }
233 const_bottom_up_ptr_iterator bottom_up_ptr_begin()
const {
234 return PerPtrBottomUp.begin();
236 const_bottom_up_ptr_iterator bottom_up_ptr_end()
const {
237 return PerPtrBottomUp.end();
239 bool hasBottomUpPtrs()
const {
240 return !PerPtrBottomUp.empty();
245 void SetAsEntry() { TopDownPathCount = 1; }
249 void SetAsExit() { BottomUpPathCount = 1; }
255 return PerPtrTopDown[Arg];
262 return PerPtrBottomUp[Arg];
267 bottom_up_ptr_iterator findPtrBottomUpState(
const Value *Arg) {
268 return PerPtrBottomUp.find(Arg);
271 void clearBottomUpPointers() {
272 PerPtrBottomUp.clear();
275 void clearTopDownPointers() {
276 PerPtrTopDown.clear();
279 void InitFromPred(
const BBState &
Other);
280 void InitFromSucc(
const BBState &
Other);
281 void MergePred(
const BBState &
Other);
282 void MergeSucc(
const BBState &
Other);
290 bool GetAllPathCountWithOverflow(
unsigned &PathCount)
const {
291 if (TopDownPathCount == OverflowOccurredValue ||
292 BottomUpPathCount == OverflowOccurredValue)
294 unsigned long long Product =
295 (
unsigned long long)TopDownPathCount*BottomUpPathCount;
298 return (Product >> 32) ||
299 ((PathCount = Product) == OverflowOccurredValue);
304 edge_iterator
pred_begin()
const {
return Preds.begin(); }
305 edge_iterator
pred_end()
const {
return Preds.end(); }
306 edge_iterator
succ_begin()
const {
return Succs.begin(); }
307 edge_iterator
succ_end()
const {
return Succs.end(); }
309 void addSucc(
BasicBlock *Succ) { Succs.push_back(Succ); }
310 void addPred(
BasicBlock *Pred) { Preds.push_back(Pred); }
312 bool isExit()
const {
return Succs.empty(); }
315 const unsigned BBState::OverflowOccurredValue = 0xffffffff;
323 void BBState::InitFromPred(
const BBState &
Other) {
324 PerPtrTopDown = Other.PerPtrTopDown;
325 TopDownPathCount = Other.TopDownPathCount;
328 void BBState::InitFromSucc(
const BBState &
Other) {
329 PerPtrBottomUp = Other.PerPtrBottomUp;
330 BottomUpPathCount = Other.BottomUpPathCount;
335 void BBState::MergePred(
const BBState &
Other) {
336 if (TopDownPathCount == OverflowOccurredValue)
341 TopDownPathCount += Other.TopDownPathCount;
346 if (TopDownPathCount == OverflowOccurredValue) {
347 clearTopDownPointers();
353 if (TopDownPathCount < Other.TopDownPathCount) {
354 TopDownPathCount = OverflowOccurredValue;
355 clearTopDownPointers();
362 for (
auto MI = Other.top_down_ptr_begin(), ME = Other.top_down_ptr_end();
364 auto Pair = PerPtrTopDown.insert(*
MI);
371 for (
auto MI = top_down_ptr_begin(), ME = top_down_ptr_end();
MI != ME; ++
MI)
372 if (Other.PerPtrTopDown.find(
MI->first) == Other.PerPtrTopDown.end())
378 void BBState::MergeSucc(
const BBState &Other) {
379 if (BottomUpPathCount == OverflowOccurredValue)
384 BottomUpPathCount += Other.BottomUpPathCount;
389 if (BottomUpPathCount == OverflowOccurredValue) {
390 clearBottomUpPointers();
396 if (BottomUpPathCount < Other.BottomUpPathCount) {
397 BottomUpPathCount = OverflowOccurredValue;
398 clearBottomUpPointers();
405 for (
auto MI = Other.bottom_up_ptr_begin(), ME = Other.bottom_up_ptr_end();
407 auto Pair = PerPtrBottomUp.insert(*
MI);
414 for (
auto MI = bottom_up_ptr_begin(), ME = bottom_up_ptr_end();
MI != ME;
416 if (Other.PerPtrBottomUp.find(
MI->first) == Other.PerPtrBottomUp.end())
422 OS <<
" TopDown State:\n";
423 if (!BBInfo.hasTopDownPtrs()) {
426 for (
auto I = BBInfo.top_down_ptr_begin(), E = BBInfo.top_down_ptr_end();
429 OS <<
" Ptr: " << *
I->first
430 <<
"\n KnownSafe: " << (P.
IsKnownSafe()?
"true":
"false")
431 <<
"\n ImpreciseRelease: "
433 <<
" HasCFGHazards: "
435 <<
" KnownPositive: "
442 OS <<
" BottomUp State:\n";
443 if (!BBInfo.hasBottomUpPtrs()) {
446 for (
auto I = BBInfo.bottom_up_ptr_begin(), E = BBInfo.bottom_up_ptr_end();
449 OS <<
" Ptr: " << *
I->first
450 <<
"\n KnownSafe: " << (P.
IsKnownSafe()?
"true":
"false")
451 <<
"\n ImpreciseRelease: "
453 <<
" HasCFGHazards: "
455 <<
" KnownPositive: "
487 unsigned UsedInThisFunction;
492 void OptimizeIndividualCalls(
Function &
F);
496 BBState &MyStates)
const;
526 Value *Arg,
bool KnownSafe,
527 bool &AnyPairsCompletelyEliminated);
540 void GatherStatistics(
Function &
F,
bool AfterOptimization =
false);
544 bool doInitialization(
Module &M)
override;
545 bool runOnFunction(
Function &
F)
override;
546 void releaseMemory()
override;
558 "objc-arc",
"ObjC ARC optimization",
false,
false)
564 return new ObjCARCOpt();
589 }
else if (
const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
591 if (II->getNormalDest() == RetainRVParent) {
610 DEBUG(
dbgs() <<
"Erasing autoreleaseRV,retainRV pair: " << *I <<
"\n"
611 <<
"Erasing " << *RetainRV <<
"\n");
623 DEBUG(
dbgs() <<
"Transforming objc_retainAutoreleasedReturnValue => "
624 "objc_retain since the operand is not a return value.\n"
625 "Old = " << *RetainRV <<
"\n");
628 cast<CallInst>(RetainRV)->setCalledFunction(NewDecl);
630 DEBUG(
dbgs() <<
"New = " << *RetainRV <<
"\n");
637 void ObjCARCOpt::OptimizeAutoreleaseRVCall(
Function &F,
649 if (isa<BitCastInst>(U))
652 }
while (!Users.
empty());
657 DEBUG(
dbgs() <<
"Transforming objc_autoreleaseReturnValue => "
658 "objc_autorelease since its operand is not used as a return "
660 "Old = " << *AutoreleaseRV <<
"\n");
662 CallInst *AutoreleaseRVCI = cast<CallInst>(AutoreleaseRV);
668 DEBUG(
dbgs() <<
"New: " << *AutoreleaseRV <<
"\n");
674 void ObjCARCOpt::OptimizeIndividualCalls(
Function &F) {
675 DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
677 UsedInThisFunction = 0;
685 DEBUG(
dbgs() <<
"Visiting: Class: " << Class <<
"; " << *Inst <<
"\n");
701 DEBUG(
dbgs() <<
"Erasing no-op cast: " << *Inst <<
"\n");
711 CallInst *CI = cast<CallInst>(Inst);
719 DEBUG(
dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
720 "\nOld = " << *CI <<
"\nNew = " << *NewValue <<
"\n");
729 CallInst *CI = cast<CallInst>(Inst);
739 DEBUG(
dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
740 "\nOld = " << *CI <<
"\nNew = " << *NewValue <<
"\n");
749 if (OptimizeRetainRVCall(F, Inst))
753 OptimizeAutoreleaseRVCall(F, Inst, Class);
775 DEBUG(
dbgs() <<
"Replacing autorelease{,RV}(x) with objc_release(x) "
776 "since x is otherwise unused.\nOld: " << *Call <<
"\nNew: "
777 << *NewCall <<
"\n");
789 DEBUG(
dbgs() <<
"Adding tail keyword to function since it can never be "
790 "passed stack args: " << *Inst <<
"\n");
791 cast<CallInst>(Inst)->setTailCall();
798 DEBUG(
dbgs() <<
"Removing tail keyword from function: " << *Inst <<
800 cast<CallInst>(Inst)->setTailCall(
false);
806 DEBUG(
dbgs() <<
"Found no throw class. Setting nounwind on: " << *Inst
808 cast<CallInst>(Inst)->setDoesNotThrow();
812 UsedInThisFunction |= 1 <<
unsigned(Class);
822 DEBUG(
dbgs() <<
"ARC calls with null are no-ops. Erasing: " << *Inst
830 UsedInThisFunction |= 1 <<
unsigned(Class);
838 Worklist.
push_back(std::make_pair(Inst, Arg));
840 std::pair<Instruction *, const Value *> Pair = Worklist.
pop_back_val();
849 bool HasNull =
false;
850 bool HasCriticalEdges =
false;
857 .getNumSuccessors() != 1) {
858 HasCriticalEdges =
true;
863 if (!HasCriticalEdges && HasNull) {
879 DependingInstructions, Visited, PA);
885 DependingInstructions, Visited, PA);
898 if (DependingInstructions.size() == 1 &&
899 *DependingInstructions.begin() == PN) {
903 CallInst *CInst = cast<CallInst>(Inst);
912 if (Op->getType() != ParamTy)
919 "And inserting clone at " << *InsertPos <<
"\n");
920 Worklist.
push_back(std::make_pair(Clone, Incoming));
924 DEBUG(
dbgs() <<
"Erasing: " << *CInst <<
"\n");
929 }
while (!Worklist.
empty());
936 const bool SuccSRRIKnownSafe,
938 bool &SomeSuccHasSame,
939 bool &AllSuccsHaveSame,
940 bool &NotAllSeqEqualButKnownSafe,
941 bool &ShouldContinue) {
949 ShouldContinue =
true;
953 SomeSuccHasSame =
true;
959 AllSuccsHaveSame =
false;
961 NotAllSeqEqualButKnownSafe =
true;
974 const bool SuccSRRIKnownSafe,
976 bool &SomeSuccHasSame,
977 bool &AllSuccsHaveSame,
978 bool &NotAllSeqEqualButKnownSafe) {
981 SomeSuccHasSame =
true;
988 AllSuccsHaveSame =
false;
990 NotAllSeqEqualButKnownSafe =
true;
1003 ObjCARCOpt::CheckForCFGHazards(
const BasicBlock *BB,
1005 BBState &MyStates)
const {
1008 for (
auto I = MyStates.top_down_ptr_begin(), E = MyStates.top_down_ptr_end();
1011 const Sequence Seq = I->second.GetSeq();
1020 "Unknown top down sequence state.");
1022 const Value *Arg = I->first;
1024 bool SomeSuccHasSame =
false;
1025 bool AllSuccsHaveSame =
true;
1026 bool NotAllSeqEqualButKnownSafe =
false;
1030 for (;
SI != SE; ++
SI) {
1035 assert(BBI != BBStates.
end());
1037 const Sequence SuccSSeq = SuccS.GetSeq();
1044 if (SuccSSeq ==
S_None) {
1051 const bool SuccSRRIKnownSafe = SuccS.IsKnownSafe();
1057 bool ShouldContinue =
false;
1059 AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
1067 SomeSuccHasSame, AllSuccsHaveSame,
1068 NotAllSeqEqualButKnownSafe);
1083 if (SomeSuccHasSame && !AllSuccsHaveSame) {
1085 }
else if (NotAllSeqEqualButKnownSafe) {
1095 bool ObjCARCOpt::VisitInstructionBottomUp(
1097 BBState &MyStates) {
1098 bool NestingDetected =
false;
1100 const Value *Arg =
nullptr;
1102 DEBUG(
dbgs() <<
" Class: " << Class <<
"\n");
1135 MyStates.clearBottomUpPointers();
1136 return NestingDetected;
1140 return NestingDetected;
1154 if (
StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
1157 auto I = MyStates.findPtrBottomUpState(
1159 if (I != MyStates.bottom_up_ptr_end())
1160 MultiOwnersSet.insert(I->first);
1170 for (
auto MI = MyStates.bottom_up_ptr_begin(),
1171 ME = MyStates.bottom_up_ptr_end();
1184 return NestingDetected;
1187 bool ObjCARCOpt::VisitBottomUp(
BasicBlock *BB,
1191 DEBUG(
dbgs() <<
"\n== ObjCARCOpt::VisitBottomUp ==\n");
1193 bool NestingDetected =
false;
1194 BBState &MyStates = BBStates[BB];
1198 BBState::edge_iterator
SI(MyStates.succ_begin()),
1199 SE(MyStates.succ_end());
1203 assert(I != BBStates.
end());
1204 MyStates.InitFromSucc(I->second);
1206 for (;
SI != SE; ++
SI) {
1208 I = BBStates.
find(Succ);
1209 assert(I != BBStates.
end());
1210 MyStates.MergeSucc(I->second);
1215 <<
"Performing Dataflow:\n");
1222 if (isa<InvokeInst>(Inst))
1225 DEBUG(
dbgs() <<
" Visiting " << *Inst <<
"\n");
1227 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1233 for (BBState::edge_iterator PI(MyStates.pred_begin()),
1234 PE(MyStates.pred_end()); PI != PE; ++PI) {
1237 NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
1242 return NestingDetected;
1246 ObjCARCOpt::VisitInstructionTopDown(
Instruction *Inst,
1248 BBState &MyStates) {
1249 bool NestingDetected =
false;
1251 const Value *Arg =
nullptr;
1287 MyStates.clearTopDownPointers();
1299 for (
auto MI = MyStates.top_down_ptr_begin(),
1300 ME = MyStates.top_down_ptr_end();
1312 return NestingDetected;
1319 DEBUG(
dbgs() <<
"\n== ObjCARCOpt::VisitTopDown ==\n");
1320 bool NestingDetected =
false;
1321 BBState &MyStates = BBStates[BB];
1325 BBState::edge_iterator PI(MyStates.pred_begin()),
1326 PE(MyStates.pred_end());
1330 assert(I != BBStates.
end());
1331 MyStates.InitFromPred(I->second);
1333 for (; PI != PE; ++PI) {
1335 I = BBStates.
find(Pred);
1336 assert(I != BBStates.
end());
1337 MyStates.MergePred(I->second);
1342 <<
"Performing Dataflow:\n");
1348 DEBUG(
dbgs() <<
" Visiting " << *Inst <<
"\n");
1350 NestingDetected |= VisitInstructionTopDown(Inst, Releases, MyStates);
1354 << BBStates[BB] <<
"\n\n");
1355 CheckForCFGHazards(BB, BBStates, MyStates);
1357 return NestingDetected;
1364 unsigned NoObjCARCExceptionsMDKind,
1376 BBState &MyStates = BBStates[EntryBB];
1377 MyStates.SetAsEntry();
1388 while (SuccStack.
back().second != SE) {
1390 if (Visited.
insert(SuccBB).second) {
1393 BBStates[CurrBB].addSucc(SuccBB);
1394 BBState &SuccStates = BBStates[SuccBB];
1395 SuccStates.addPred(CurrBB);
1400 if (!OnStack.
count(SuccBB)) {
1401 BBStates[CurrBB].addSucc(SuccBB);
1402 BBStates[SuccBB].addPred(CurrBB);
1405 OnStack.
erase(CurrBB);
1408 }
while (!SuccStack.
empty());
1418 BBState &MyStates = BBStates[ExitBB];
1419 if (!MyStates.isExit())
1422 MyStates.SetAsExit();
1424 PredStack.
push_back(std::make_pair(ExitBB, MyStates.pred_begin()));
1426 while (!PredStack.
empty()) {
1427 reverse_dfs_next_succ:
1428 BBState::edge_iterator PE = BBStates[PredStack.
back().first].pred_end();
1429 while (PredStack.
back().second != PE) {
1431 if (Visited.
insert(BB).second) {
1433 goto reverse_dfs_next_succ;
1442 bool ObjCARCOpt::Visit(
Function &F,
1459 bool BottomUpNestingDetected =
false;
1461 ReverseCFGPostOrder.
rbegin(), E = ReverseCFGPostOrder.
rend();
1463 BottomUpNestingDetected |= VisitBottomUp(*I, BBStates, Retains);
1466 bool TopDownNestingDetected =
false;
1470 TopDownNestingDetected |= VisitTopDown(*I, BBStates, Releases);
1472 return TopDownNestingDetected && BottomUpNestingDetected;
1476 void ObjCARCOpt::MoveCalls(
Value *Arg,
RRInfo &RetainsToMove,
1485 DEBUG(
dbgs() <<
"== ObjCARCOpt::MoveCalls ==\n");
1489 Value *MyArg = ArgTy == ParamTy ? Arg :
1496 DEBUG(
dbgs() <<
"Inserting new Retain: " << *Call <<
"\n"
1497 "At insertion point: " << *InsertPt <<
"\n");
1500 Value *MyArg = ArgTy == ParamTy ? Arg :
1511 DEBUG(
dbgs() <<
"Inserting new Release: " << *Call <<
"\n"
1512 "At insertion point: " << *InsertPt <<
"\n");
1517 Retains.
blot(OrigRetain);
1519 DEBUG(
dbgs() <<
"Deleting retain: " << *OrigRetain <<
"\n");
1522 Releases.
erase(OrigRelease);
1524 DEBUG(
dbgs() <<
"Deleting release: " << *OrigRelease <<
"\n");
1529 bool ObjCARCOpt::PairUpRetainsAndReleases(
1536 RRInfo &ReleasesToMove,
Value *Arg,
bool KnownSafe,
1537 bool &AnyPairsCompletelyEliminated) {
1541 bool KnownSafeTD =
true, KnownSafeBU =
true;
1542 bool MultipleOwners =
false;
1543 bool CFGHazardAfflicted =
false;
1549 unsigned OldDelta = 0;
1550 unsigned NewDelta = 0;
1551 unsigned OldCount = 0;
1552 unsigned NewCount = 0;
1553 bool FirstRelease =
true;
1556 NI = NewRetains.
begin(),
NE = NewRetains.
end(); NI !=
NE; ++NI) {
1558 auto It = Retains.
find(NewRetain);
1559 assert(It != Retains.
end());
1560 const RRInfo &NewRetainRRI = It->second;
1561 KnownSafeTD &= NewRetainRRI.KnownSafe;
1564 for (
Instruction *NewRetainRelease : NewRetainRRI.Calls) {
1565 auto Jt = Releases.
find(NewRetainRelease);
1566 if (Jt == Releases.
end())
1568 const RRInfo &NewRetainReleaseRRI = Jt->second;
1575 if (!NewRetainReleaseRRI.
Calls.count(NewRetain))
1578 if (ReleasesToMove.
Calls.insert(NewRetainRelease).second) {
1582 const BBState &NRRBBState = BBStates[NewRetainRelease->
getParent()];
1583 unsigned PathCount = BBState::OverflowOccurredValue;
1584 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1586 assert(PathCount != BBState::OverflowOccurredValue &&
1587 "PathCount at this point can not be "
1588 "OverflowOccurredValue.");
1589 OldDelta -= PathCount;
1597 FirstRelease =
false;
1613 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1614 PathCount = BBState::OverflowOccurredValue;
1615 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1617 assert(PathCount != BBState::OverflowOccurredValue &&
1618 "PathCount at this point can not be "
1619 "OverflowOccurredValue.");
1620 NewDelta -= PathCount;
1623 NewReleases.
push_back(NewRetainRelease);
1628 if (NewReleases.
empty())
break;
1632 NI = NewReleases.
begin(),
NE = NewReleases.
end(); NI !=
NE; ++NI) {
1634 auto It = Releases.
find(NewRelease);
1635 assert(It != Releases.
end());
1636 const RRInfo &NewReleaseRRI = It->second;
1638 CFGHazardAfflicted |= NewReleaseRRI.CFGHazardAfflicted;
1639 for (
Instruction *NewReleaseRetain : NewReleaseRRI.Calls) {
1640 auto Jt = Retains.
find(NewReleaseRetain);
1641 if (Jt == Retains.
end())
1643 const RRInfo &NewReleaseRetainRRI = Jt->second;
1650 if (!NewReleaseRetainRRI.
Calls.count(NewRelease))
1653 if (RetainsToMove.
Calls.insert(NewReleaseRetain).second) {
1656 const BBState &NRRBBState = BBStates[NewReleaseRetain->
getParent()];
1657 unsigned PathCount = BBState::OverflowOccurredValue;
1658 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1660 assert(PathCount != BBState::OverflowOccurredValue &&
1661 "PathCount at this point can not be "
1662 "OverflowOccurredValue.");
1663 OldDelta += PathCount;
1664 OldCount += PathCount;
1672 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1674 PathCount = BBState::OverflowOccurredValue;
1675 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1677 assert(PathCount != BBState::OverflowOccurredValue &&
1678 "PathCount at this point can not be "
1679 "OverflowOccurredValue.");
1680 NewDelta += PathCount;
1681 NewCount += PathCount;
1688 NewReleases.
clear();
1689 if (NewRetains.
empty())
break;
1693 bool UnconditionallySafe = KnownSafeTD && KnownSafeBU;
1694 if (UnconditionallySafe) {
1709 const bool WillPerformCodeMotion = RetainsToMove.
ReverseInsertPts.size() ||
1711 if (CFGHazardAfflicted && WillPerformCodeMotion)
1724 assert(OldCount != 0 &&
"Unreachable code?");
1725 NumRRs += OldCount - NewCount;
1727 AnyPairsCompletelyEliminated = NewCount == 0;
1735 bool ObjCARCOpt::PerformCodePlacement(
1739 DEBUG(
dbgs() <<
"\n== ObjCARCOpt::PerformCodePlacement ==\n");
1741 bool AnyPairsCompletelyEliminated =
false;
1752 Value *V = I->first;
1757 DEBUG(
dbgs() <<
"Visiting: " << *Retain <<
"\n");
1764 bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
1768 if (
const LoadInst *LI = dyn_cast<LoadInst>(Arg))
1770 dyn_cast<GlobalVariable>(
1772 if (GV->isConstant())
1778 bool PerformMoveCalls = PairUpRetainsAndReleases(
1779 BBStates, Retains, Releases, M, NewRetains, NewReleases, DeadInsts,
1780 RetainsToMove, ReleasesToMove, Arg, KnownSafe,
1781 AnyPairsCompletelyEliminated);
1783 if (PerformMoveCalls) {
1786 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
1787 Retains, Releases, DeadInsts, M);
1791 NewReleases.
clear();
1793 RetainsToMove.
clear();
1794 ReleasesToMove.
clear();
1799 while (!DeadInsts.
empty())
1802 return AnyPairsCompletelyEliminated;
1806 void ObjCARCOpt::OptimizeWeakCalls(
Function &F) {
1807 DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeWeakCalls ==\n");
1815 DEBUG(
dbgs() <<
"Visiting: " << *Inst <<
"\n");
1839 switch (EarlierClass) {
1844 CallInst *Call = cast<CallInst>(Inst);
1845 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
1847 Value *EarlierArg = EarlierCall->getArgOperand(0);
1848 switch (PA.getAA()->alias(Arg, EarlierArg)) {
1873 CallInst *Call = cast<CallInst>(Inst);
1874 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
1876 Value *EarlierArg = EarlierCall->getArgOperand(0);
1877 switch (PA.getAA()->alias(Arg, EarlierArg)) {
1925 CallInst *Call = cast<CallInst>(Inst);
1927 if (
AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
1929 const Instruction *UserInst = cast<Instruction>(U);
1940 for (
auto UI = Alloca->user_begin(), UE = Alloca->user_end(); UI != UE;) {
1941 CallInst *UserInst = cast<CallInst>(*UI++);
1956 Alloca->eraseFromParent();
1964 bool ObjCARCOpt::OptimizeSequences(
Function &F) {
1977 bool NestingDetected = Visit(F, BBStates, Retains, Releases);
1980 bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
1985 MultiOwnersSet.clear();
1987 return AnyPairsCompletelyEliminated && NestingDetected;
1999 DepInsts, Visited, PA);
2000 if (DepInsts.size() != 1)
2003 auto *Call = dyn_cast_or_null<CallInst>(*DepInsts.begin());
2006 if (!Call || Arg != Call)
2027 BB, Autorelease, DepInsts, Visited, PA);
2028 if (DepInsts.
size() != 1)
2031 auto *Retain = dyn_cast_or_null<CallInst>(*DepInsts.
begin());
2052 BB, Ret, DepInsts, V, PA);
2053 if (DepInsts.
size() != 1)
2076 void ObjCARCOpt::OptimizeReturns(
Function &F) {
2080 DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeReturns ==\n");
2088 DEBUG(
dbgs() <<
"Visiting: " << *Ret <<
"\n");
2100 DependingInstructions, Visited,
2102 DependingInstructions.
clear();
2110 DependingInstructions, Visited, PA);
2111 DependingInstructions.
clear();
2120 DependingInstructions,
2122 DependingInstructions.
clear();
2125 if (!HasSafePathToCall)
2131 DEBUG(
dbgs() <<
"Erasing: " << *Retain <<
"\nErasing: "
2132 << *Autorelease <<
"\n");
2140 ObjCARCOpt::GatherStatistics(
Function &F,
bool AfterOptimization) {
2142 AfterOptimization? NumRetainsAfterOpt : NumRetainsBeforeOpt;
2144 AfterOptimization? NumReleasesAfterOpt : NumReleasesBeforeOpt;
2162 bool ObjCARCOpt::doInitialization(
Module &M) {
2174 MDKindCache.init(&M);
2182 bool ObjCARCOpt::runOnFunction(
Function &F) {
2192 DEBUG(
dbgs() <<
"<<< ObjCARCOpt: Visiting Function: " << F.
getName() <<
" >>>"
2195 PA.setAA(&getAnalysis<AliasAnalysis>());
2199 GatherStatistics(F,
false);
2208 OptimizeIndividualCalls(F);
2218 OptimizeWeakCalls(F);
2227 while (OptimizeSequences(F)) {}
2237 GatherStatistics(F,
true);
2246 void ObjCARCOpt::releaseMemory() {
Pass interface - Implemented by all 'passes'.
The two locations precisely alias each other.
ReturnInst - Return a value (possibly void), from a function.
iplist< Instruction >::iterator eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing basic block and deletes it...
void push_back(const T &Elt)
A parsed version of the target data layout string in and methods for querying it. ...
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
ARCInstKind GetARCInstKind(const Value *V)
Map V to its ARCInstKind equivalence class.
objc_destroyWeak (derived)
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
SmallPtrSet< Instruction *, 2 > Calls
For a top-down sequence, the set of objc_retains or objc_retainBlocks.
const Instruction & back() const
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
STATISTIC(NumFunctions,"Total number of functions")
objc_loadWeakRetained (primitive)
could call objc_release and/or "use" pointers
A Module instance is used to store all the information related to an LLVM module. ...
The two locations alias, but only due to a partial overlap.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
CallInst - This class represents a function call, abstracting a target machine's calling convention...
objc_retainedObject, etc.
size_type count(PtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
static ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
bool MatchWithRelease(ARCMDKindCache &Cache, Instruction *Release)
Return true if this set of retains can be paired with the given release.
Type * getReturnType() const
The two locations do not alias at all.
static bool AreAnyUnderlyingObjectsAnAlloca(const Value *V, const DataLayout &DL)
This is a wrapper around getUnderlyingObjCPtr along the lines of GetUnderlyingObjects except that it ...
void initializeObjCARCOptPass(PassRegistry &)
LoadInst - an instruction for reading from memory.
iv Induction Variable Users
SmallPtrSet< Instruction *, 2 > ReverseInsertPts
The set of optimal insert positions for moving calls in the opposite sequence.
bool IsNoopOnNull(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a null pointer...
bool IsForwarding(ARCInstKind Class)
Test if the given class represents instructions which return their argument verbatim.
The two locations may or may not alias. This is the least precise result.
static Constant * getNullValue(Type *Ty)
StringRef getName() const
Return a constant reference to the value's name.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
iterator begin()
Instruction iterator methods.
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
objc_autoreleaseReturnValue
inst_iterator inst_begin(Function *F)
SelectInst - This class represents the LLVM 'select' instruction.
static CallInst * Create(Value *Func, ArrayRef< Value * > Args, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr it the function does no...
T LLVM_ATTRIBUTE_UNUSED_RESULT pop_back_val()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This class summarizes several per-pointer runtime properties which are propogated through the flow gr...
Instruction * clone() const
clone() - Create a copy of 'this' instruction that is identical in all ways except the following: ...
Interval::succ_iterator succ_begin(Interval *I)
succ_begin/succ_end - define methods so that Intervals may be used just like BasicBlocks can with the...
bool IsTailCallRelease
True of the objc_release calls are all marked with the "tail" keyword.
bool InitBottomUp(ARCMDKindCache &Cache, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases...
objc_retainAutoreleasedReturnValue
LLVMContext & getContext() const
getContext - Return the LLVMContext in which this type was uniqued.
bool LLVM_ATTRIBUTE_UNUSED_RESULT empty() const
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
This class represents a no-op cast from one type to another.
void HandlePotentialUse(BasicBlock *BB, Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
StoreInst - an instruction for storing to memory.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
const RRInfo & GetRRInfo() const
bool IsAlwaysTail(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the "tail" keyword...
BBIty & getBasicBlockIterator()
Unidirectional information about either a retain-decrement-use-release sequence or release-use-decrem...
unsigned getNumIncomingValues() const
getNumIncomingValues - Return the number of incoming edges
Interval::succ_iterator succ_end(Interval *I)
raw_ostream & operator<<(raw_ostream &OS, const ARCInstKind Class)
GetElementPtrInst - an instruction for type-safe pointer arithmetic to access elements of arrays and ...
This is a simple alias analysis implementation that uses knowledge of ARC constructs to answer querie...
iterator find(const KeyT &Key)
bool erase(const KeyT &Val)
Subclasses of this class are all able to terminate a basic block.
void HandlePotentialUse(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
SuccIterator< TerminatorInst *, BasicBlock > succ_iterator
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction...
LLVM Basic Block Representation.
bool IsCFGHazardAfflicted() const
The instances of the Type class are immutable: once they are created, they are never changed...
This is an important class for using LLVM in a threaded context.
This is an important base class in LLVM.
VectorTy::const_iterator const_iterator
BIty & getInstructionIterator()
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
like S_Release, but code motion is stopped.
Represent the analysis usage information of a pass.
BasicBlock * getIncomingBlock(unsigned i) const
getIncomingBlock - Return incoming basic block number i.
#define LLVM_ATTRIBUTE_UNUSED
FunctionPass class - This class is used to implement most global optimizations.
static bool IsNoopInstruction(const Instruction *I)
Value * getOperand(unsigned i) const
Interval::pred_iterator pred_end(Interval *I)
static const Value * FindSingleUseIdentifiedObject(const Value *Arg)
This is similar to GetRCIdentityRoot but it stops as soon as it finds a value with multiple uses...
static bool IsObjCIdentifiedObject(const Value *V)
Return true if this value refers to a distinct and identifiable object.
A cache of MDKinds used by various ARC optimizations.
static const Value * GetUnderlyingObjCPtr(const Value *V, const DataLayout &DL)
This is a wrapper around getUnderlyingObject which also knows how to look through objc_retain and obj...
bool MatchWithRetain()
Return true if this set of releases can be paired with a release.
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe)
If we have a Top Down pointer in the S_CanRelease state, make sure that there are no CFG hazards by c...
INITIALIZE_PASS_BEGIN(ObjCARCOpt,"objc-arc","ObjC ARC optimization", false, false) INITIALIZE_PASS_END(ObjCARCOpt
void setTailCall(bool isTC=true)
bool isPointerTy() const
isPointerTy - True if this is an instance of PointerType.
static UndefValue * get(Type *T)
get() - Static factory methods - Return an 'undef' object of the specified type.
LLVMContext & getContext() const
All values hold a context through their type.
anything that is inert from an ARC perspective.
This file declares a simple ARC-aware AliasAnalysis using special knowledge of Objective C to enhance...
void setMetadata(unsigned KindID, MDNode *Node)
setMetadata - Set the metadata of the specified kind to the specified node.
objc_release(x), !clang.imprecise_release.
static CallInst * FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB, Instruction *Autorelease, SmallPtrSetImpl< Instruction * > &DepInsts, SmallPtrSetImpl< const BasicBlock * > &Visited, ProvenanceAnalysis &PA)
Find a dependent retain that precedes the given autorelease for which there is nothing in between the...
static Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release...
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
static PointerType * getUnqual(Type *ElementType)
PointerType::getUnqual - This constructs a pointer to an object of the specified type in the generic ...
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false...
Value * getIncomingValue(unsigned i) const
getIncomingValue - Return incoming value number x
static bool IsNullOrUndef(const Value *V)
This file declares a special form of Alias Analysis called ``Provenance Analysis''.
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
ARCInstKind
Equivalence classes of instructions in the ARC Model.
Type * getType() const
All values are typed, get the type of this value.
An associative container with fast insertion-order (deterministic) iteration over its elements...
bool IsNoThrow(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the nounwind attri...
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
static bool HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain, SmallPtrSetImpl< Instruction * > &DepInsts, SmallPtrSetImpl< const BasicBlock * > &Visited, ProvenanceAnalysis &PA)
Check if there is a dependent call earlier that does not have anything in between the Retain and the ...
static CallInst * FindPredecessorAutoreleaseWithSafePath(const Value *Arg, BasicBlock *BB, ReturnInst *Ret, SmallPtrSetImpl< Instruction * > &DepInsts, SmallPtrSetImpl< const BasicBlock * > &V, ProvenanceAnalysis &PA)
Look for an ``autorelease'' instruction dependent on Arg such that there are no instructions dependen...
void setPreservesCFG()
This function should be called by the pass, iff they do not:
const BasicBlock & getEntryBlock() const
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Value * getArgOperand(unsigned i) const
getArgOperand/setArgOperand - Return/set the i-th call argument.
MDNode * ReleaseMetadata
If the Calls are objc_release calls and they all have a clang.imprecise_release tag, this is the metadata tag.
Pass * createObjCARCOptPass()
iterator_range< user_iterator > users()
LLVM_ATTRIBUTE_UNUSED_RESULT std::enable_if< !is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
void ClearSequenceProgress()
objc ObjC ARC optimization
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void setCalledFunction(Value *Fn)
setCalledFunction - Set the function called.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
ImmutableCallSite - establish a view to a call site for examination.
objc_storeWeak (primitive)
bool hasOneUse() const
Return true if there is exactly one user of this value.
void setArgOperand(unsigned i, Value *v)
Sequence
A sequence of states that a pointer may go through in which an objc_retain and objc_release are actua...
iterator find(const KeyT &Val)
bool IsTrackingImpreciseReleases() const
static void CheckForUseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe, bool &ShouldContinue)
If we have a top down pointer in the S_Use state, make sure that there are no CFG hazards by checking...
Declarations for ObjC runtime functions and constants.
void SetCFGHazardAfflicted(const bool NewValue)
static const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
raw_ostream & operator<<(raw_ostream &OS, const APInt &I)
reverse_iterator rbegin()
Module * getParent()
Get the module that this global value is contained inside of...
LLVM Value Representation.
foo(x) – x could possibly see a ref count decrement.
bool KnownSafe
After an objc_retain, the reference count of the referenced object is known to be positive...
This class implements an extremely fast bulk output stream that can only output to a stream...
InvokeInst - Invoke instruction.
This is similar to BasicAliasAnalysis, and it uses many of the same techniques, except it uses specia...
C - The default llvm calling convention, compatible with C.
inst_iterator inst_end(Function *F)
static void ComputePostOrders(Function &F, SmallVectorImpl< BasicBlock * > &PostOrder, SmallVectorImpl< BasicBlock * > &ReverseCFGPostOrder, unsigned NoObjCARCExceptionsMDKind, DenseMap< const BasicBlock *, BBState > &BBStates)
bool InitTopDown(ARCInstKind Kind, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases...
bool HasKnownPositiveRefCount() const
void FindDependencies(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, SmallPtrSetImpl< Instruction * > &DependingInstructions, SmallPtrSetImpl< const BasicBlock * > &Visited, ProvenanceAnalysis &PA)
Walk up the CFG from StartPos (which is in StartBB) and find local and non-local dependencies on Arg...
static IntegerType * getInt8Ty(LLVMContext &C)
bool AreStatisticsEnabled()
Check if statistics are enabled.
const BasicBlock * getParent() const
bool IsNeverTail(ARCInstKind Class)
Test if the given class represents instructions which are never safe to mark with the "tail" keyword...
static bool ModuleHasARC(const Module &M)
Test if the given module looks interesting to run ARC optimization on.
void blot(const KeyT &Key)
This is similar to erase, but instead of removing the element from the vector, it just zeros out the ...
AllocaInst - an instruction to allocate memory on the stack.
bool IsAutorelease(ARCInstKind Class)
Test if the given class is objc_autorelease or equivalent.