LLVM  6.0.0svn
ObjCARCOpts.cpp
Go to the documentation of this file.
1 //===- ObjCARCOpts.cpp - ObjC ARC Optimization ----------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 /// \file
10 /// This file defines ObjC ARC optimizations. ARC stands for Automatic
11 /// Reference Counting and is a system for managing reference counts for objects
12 /// in Objective C.
13 ///
14 /// The optimizations performed include elimination of redundant, partially
15 /// redundant, and inconsequential reference count operations, elimination of
16 /// redundant weak pointer operations, and numerous minor simplifications.
17 ///
18 /// WARNING: This file knows about certain library functions. It recognizes them
19 /// by name, and hardwires knowledge of their semantics.
20 ///
21 /// WARNING: This file knows about how certain Objective-C library functions are
22 /// used. Naive LLVM IR transformations which would otherwise be
23 /// behavior-preserving may break these assumptions.
24 ///
25 //===----------------------------------------------------------------------===//
26 
27 #include "ARCRuntimeEntryPoints.h"
28 #include "BlotMapVector.h"
29 #include "DependencyAnalysis.h"
30 #include "ObjCARC.h"
31 #include "ProvenanceAnalysis.h"
32 #include "PtrState.h"
33 #include "llvm/ADT/DenseMap.h"
34 #include "llvm/ADT/DenseSet.h"
35 #include "llvm/ADT/STLExtras.h"
36 #include "llvm/ADT/SmallPtrSet.h"
37 #include "llvm/ADT/Statistic.h"
39 #include "llvm/IR/CFG.h"
40 #include "llvm/IR/IRBuilder.h"
41 #include "llvm/IR/LLVMContext.h"
42 #include "llvm/Support/Debug.h"
44 
45 using namespace llvm;
46 using namespace llvm::objcarc;
47 
48 #define DEBUG_TYPE "objc-arc-opts"
49 
50 /// \defgroup ARCUtilities Utility declarations/definitions specific to ARC.
51 /// @{
52 
53 /// \brief This is similar to GetRCIdentityRoot but it stops as soon
54 /// as it finds a value with multiple uses.
55 static const Value *FindSingleUseIdentifiedObject(const Value *Arg) {
56  // ConstantData (like ConstantPointerNull and UndefValue) is used across
57  // modules. It's never a single-use value.
58  if (isa<ConstantData>(Arg))
59  return nullptr;
60 
61  if (Arg->hasOneUse()) {
62  if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
63  return FindSingleUseIdentifiedObject(BC->getOperand(0));
64  if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
65  if (GEP->hasAllZeroIndices())
66  return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
69  cast<CallInst>(Arg)->getArgOperand(0));
70  if (!IsObjCIdentifiedObject(Arg))
71  return nullptr;
72  return Arg;
73  }
74 
75  // If we found an identifiable object but it has multiple uses, but they are
76  // trivial uses, we can still consider this to be a single-use value.
77  if (IsObjCIdentifiedObject(Arg)) {
78  for (const User *U : Arg->users())
79  if (!U->use_empty() || GetRCIdentityRoot(U) != Arg)
80  return nullptr;
81 
82  return Arg;
83  }
84 
85  return nullptr;
86 }
87 
88 /// @}
89 ///
90 /// \defgroup ARCOpt ARC Optimization.
91 /// @{
92 
93 // TODO: On code like this:
94 //
95 // objc_retain(%x)
96 // stuff_that_cannot_release()
97 // objc_autorelease(%x)
98 // stuff_that_cannot_release()
99 // objc_retain(%x)
100 // stuff_that_cannot_release()
101 // objc_autorelease(%x)
102 //
103 // The second retain and autorelease can be deleted.
104 
105 // TODO: It should be possible to delete
106 // objc_autoreleasePoolPush and objc_autoreleasePoolPop
107 // pairs if nothing is actually autoreleased between them. Also, autorelease
108 // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
109 // after inlining) can be turned into plain release calls.
110 
111 // TODO: Critical-edge splitting. If the optimial insertion point is
112 // a critical edge, the current algorithm has to fail, because it doesn't
113 // know how to split edges. It should be possible to make the optimizer
114 // think in terms of edges, rather than blocks, and then split critical
115 // edges on demand.
116 
117 // TODO: OptimizeSequences could generalized to be Interprocedural.
118 
119 // TODO: Recognize that a bunch of other objc runtime calls have
120 // non-escaping arguments and non-releasing arguments, and may be
121 // non-autoreleasing.
122 
123 // TODO: Sink autorelease calls as far as possible. Unfortunately we
124 // usually can't sink them past other calls, which would be the main
125 // case where it would be useful.
126 
127 // TODO: The pointer returned from objc_loadWeakRetained is retained.
128 
129 // TODO: Delete release+retain pairs (rare).
130 
131 STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
132 STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
133 STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
134 STATISTIC(NumRets, "Number of return value forwarding "
135  "retain+autoreleases eliminated");
136 STATISTIC(NumRRs, "Number of retain+release paths eliminated");
137 STATISTIC(NumPeeps, "Number of calls peephole-optimized");
138 #ifndef NDEBUG
139 STATISTIC(NumRetainsBeforeOpt,
140  "Number of retains before optimization");
141 STATISTIC(NumReleasesBeforeOpt,
142  "Number of releases before optimization");
143 STATISTIC(NumRetainsAfterOpt,
144  "Number of retains after optimization");
145 STATISTIC(NumReleasesAfterOpt,
146  "Number of releases after optimization");
147 #endif
148 
149 namespace {
150  /// \brief Per-BasicBlock state.
151  class BBState {
152  /// The number of unique control paths from the entry which can reach this
153  /// block.
154  unsigned TopDownPathCount;
155 
156  /// The number of unique control paths to exits from this block.
157  unsigned BottomUpPathCount;
158 
159  /// The top-down traversal uses this to record information known about a
160  /// pointer at the bottom of each block.
162 
163  /// The bottom-up traversal uses this to record information known about a
164  /// pointer at the top of each block.
166 
167  /// Effective predecessors of the current block ignoring ignorable edges and
168  /// ignored backedges.
170 
171  /// Effective successors of the current block ignoring ignorable edges and
172  /// ignored backedges.
174 
175  public:
176  static const unsigned OverflowOccurredValue;
177 
178  BBState() : TopDownPathCount(0), BottomUpPathCount(0) { }
179 
180  typedef decltype(PerPtrTopDown)::iterator top_down_ptr_iterator;
181  typedef decltype(PerPtrTopDown)::const_iterator const_top_down_ptr_iterator;
182 
183  top_down_ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
184  top_down_ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
185  const_top_down_ptr_iterator top_down_ptr_begin() const {
186  return PerPtrTopDown.begin();
187  }
188  const_top_down_ptr_iterator top_down_ptr_end() const {
189  return PerPtrTopDown.end();
190  }
191  bool hasTopDownPtrs() const {
192  return !PerPtrTopDown.empty();
193  }
194 
195  typedef decltype(PerPtrBottomUp)::iterator bottom_up_ptr_iterator;
196  typedef decltype(
197  PerPtrBottomUp)::const_iterator const_bottom_up_ptr_iterator;
198 
199  bottom_up_ptr_iterator bottom_up_ptr_begin() {
200  return PerPtrBottomUp.begin();
201  }
202  bottom_up_ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
203  const_bottom_up_ptr_iterator bottom_up_ptr_begin() const {
204  return PerPtrBottomUp.begin();
205  }
206  const_bottom_up_ptr_iterator bottom_up_ptr_end() const {
207  return PerPtrBottomUp.end();
208  }
209  bool hasBottomUpPtrs() const {
210  return !PerPtrBottomUp.empty();
211  }
212 
213  /// Mark this block as being an entry block, which has one path from the
214  /// entry by definition.
215  void SetAsEntry() { TopDownPathCount = 1; }
216 
217  /// Mark this block as being an exit block, which has one path to an exit by
218  /// definition.
219  void SetAsExit() { BottomUpPathCount = 1; }
220 
221  /// Attempt to find the PtrState object describing the top down state for
222  /// pointer Arg. Return a new initialized PtrState describing the top down
223  /// state for Arg if we do not find one.
224  TopDownPtrState &getPtrTopDownState(const Value *Arg) {
225  return PerPtrTopDown[Arg];
226  }
227 
228  /// Attempt to find the PtrState object describing the bottom up state for
229  /// pointer Arg. Return a new initialized PtrState describing the bottom up
230  /// state for Arg if we do not find one.
231  BottomUpPtrState &getPtrBottomUpState(const Value *Arg) {
232  return PerPtrBottomUp[Arg];
233  }
234 
235  /// Attempt to find the PtrState object describing the bottom up state for
236  /// pointer Arg.
237  bottom_up_ptr_iterator findPtrBottomUpState(const Value *Arg) {
238  return PerPtrBottomUp.find(Arg);
239  }
240 
241  void clearBottomUpPointers() {
242  PerPtrBottomUp.clear();
243  }
244 
245  void clearTopDownPointers() {
246  PerPtrTopDown.clear();
247  }
248 
249  void InitFromPred(const BBState &Other);
250  void InitFromSucc(const BBState &Other);
251  void MergePred(const BBState &Other);
252  void MergeSucc(const BBState &Other);
253 
254  /// Compute the number of possible unique paths from an entry to an exit
255  /// which pass through this block. This is only valid after both the
256  /// top-down and bottom-up traversals are complete.
257  ///
258  /// Returns true if overflow occurred. Returns false if overflow did not
259  /// occur.
260  bool GetAllPathCountWithOverflow(unsigned &PathCount) const {
261  if (TopDownPathCount == OverflowOccurredValue ||
262  BottomUpPathCount == OverflowOccurredValue)
263  return true;
264  unsigned long long Product =
265  (unsigned long long)TopDownPathCount*BottomUpPathCount;
266  // Overflow occurred if any of the upper bits of Product are set or if all
267  // the lower bits of Product are all set.
268  return (Product >> 32) ||
269  ((PathCount = Product) == OverflowOccurredValue);
270  }
271 
272  // Specialized CFG utilities.
273  typedef SmallVectorImpl<BasicBlock *>::const_iterator edge_iterator;
274  edge_iterator pred_begin() const { return Preds.begin(); }
275  edge_iterator pred_end() const { return Preds.end(); }
276  edge_iterator succ_begin() const { return Succs.begin(); }
277  edge_iterator succ_end() const { return Succs.end(); }
278 
279  void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
280  void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
281 
282  bool isExit() const { return Succs.empty(); }
283  };
284 
285  const unsigned BBState::OverflowOccurredValue = 0xffffffff;
286 }
287 
288 namespace llvm {
290  BBState &BBState) LLVM_ATTRIBUTE_UNUSED;
291 }
292 
293 void BBState::InitFromPred(const BBState &Other) {
294  PerPtrTopDown = Other.PerPtrTopDown;
295  TopDownPathCount = Other.TopDownPathCount;
296 }
297 
298 void BBState::InitFromSucc(const BBState &Other) {
299  PerPtrBottomUp = Other.PerPtrBottomUp;
300  BottomUpPathCount = Other.BottomUpPathCount;
301 }
302 
303 /// The top-down traversal uses this to merge information about predecessors to
304 /// form the initial state for a new block.
305 void BBState::MergePred(const BBState &Other) {
306  if (TopDownPathCount == OverflowOccurredValue)
307  return;
308 
309  // Other.TopDownPathCount can be 0, in which case it is either dead or a
310  // loop backedge. Loop backedges are special.
311  TopDownPathCount += Other.TopDownPathCount;
312 
313  // In order to be consistent, we clear the top down pointers when by adding
314  // TopDownPathCount becomes OverflowOccurredValue even though "true" overflow
315  // has not occurred.
316  if (TopDownPathCount == OverflowOccurredValue) {
317  clearTopDownPointers();
318  return;
319  }
320 
321  // Check for overflow. If we have overflow, fall back to conservative
322  // behavior.
323  if (TopDownPathCount < Other.TopDownPathCount) {
324  TopDownPathCount = OverflowOccurredValue;
325  clearTopDownPointers();
326  return;
327  }
328 
329  // For each entry in the other set, if our set has an entry with the same key,
330  // merge the entries. Otherwise, copy the entry and merge it with an empty
331  // entry.
332  for (auto MI = Other.top_down_ptr_begin(), ME = Other.top_down_ptr_end();
333  MI != ME; ++MI) {
334  auto Pair = PerPtrTopDown.insert(*MI);
335  Pair.first->second.Merge(Pair.second ? TopDownPtrState() : MI->second,
336  /*TopDown=*/true);
337  }
338 
339  // For each entry in our set, if the other set doesn't have an entry with the
340  // same key, force it to merge with an empty entry.
341  for (auto MI = top_down_ptr_begin(), ME = top_down_ptr_end(); MI != ME; ++MI)
342  if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
343  MI->second.Merge(TopDownPtrState(), /*TopDown=*/true);
344 }
345 
346 /// The bottom-up traversal uses this to merge information about successors to
347 /// form the initial state for a new block.
348 void BBState::MergeSucc(const BBState &Other) {
349  if (BottomUpPathCount == OverflowOccurredValue)
350  return;
351 
352  // Other.BottomUpPathCount can be 0, in which case it is either dead or a
353  // loop backedge. Loop backedges are special.
354  BottomUpPathCount += Other.BottomUpPathCount;
355 
356  // In order to be consistent, we clear the top down pointers when by adding
357  // BottomUpPathCount becomes OverflowOccurredValue even though "true" overflow
358  // has not occurred.
359  if (BottomUpPathCount == OverflowOccurredValue) {
360  clearBottomUpPointers();
361  return;
362  }
363 
364  // Check for overflow. If we have overflow, fall back to conservative
365  // behavior.
366  if (BottomUpPathCount < Other.BottomUpPathCount) {
367  BottomUpPathCount = OverflowOccurredValue;
368  clearBottomUpPointers();
369  return;
370  }
371 
372  // For each entry in the other set, if our set has an entry with the
373  // same key, merge the entries. Otherwise, copy the entry and merge
374  // it with an empty entry.
375  for (auto MI = Other.bottom_up_ptr_begin(), ME = Other.bottom_up_ptr_end();
376  MI != ME; ++MI) {
377  auto Pair = PerPtrBottomUp.insert(*MI);
378  Pair.first->second.Merge(Pair.second ? BottomUpPtrState() : MI->second,
379  /*TopDown=*/false);
380  }
381 
382  // For each entry in our set, if the other set doesn't have an entry
383  // with the same key, force it to merge with an empty entry.
384  for (auto MI = bottom_up_ptr_begin(), ME = bottom_up_ptr_end(); MI != ME;
385  ++MI)
386  if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
387  MI->second.Merge(BottomUpPtrState(), /*TopDown=*/false);
388 }
389 
390 raw_ostream &llvm::operator<<(raw_ostream &OS, BBState &BBInfo) {
391  // Dump the pointers we are tracking.
392  OS << " TopDown State:\n";
393  if (!BBInfo.hasTopDownPtrs()) {
394  DEBUG(llvm::dbgs() << " NONE!\n");
395  } else {
396  for (auto I = BBInfo.top_down_ptr_begin(), E = BBInfo.top_down_ptr_end();
397  I != E; ++I) {
398  const PtrState &P = I->second;
399  OS << " Ptr: " << *I->first
400  << "\n KnownSafe: " << (P.IsKnownSafe()?"true":"false")
401  << "\n ImpreciseRelease: "
402  << (P.IsTrackingImpreciseReleases()?"true":"false") << "\n"
403  << " HasCFGHazards: "
404  << (P.IsCFGHazardAfflicted()?"true":"false") << "\n"
405  << " KnownPositive: "
406  << (P.HasKnownPositiveRefCount()?"true":"false") << "\n"
407  << " Seq: "
408  << P.GetSeq() << "\n";
409  }
410  }
411 
412  OS << " BottomUp State:\n";
413  if (!BBInfo.hasBottomUpPtrs()) {
414  DEBUG(llvm::dbgs() << " NONE!\n");
415  } else {
416  for (auto I = BBInfo.bottom_up_ptr_begin(), E = BBInfo.bottom_up_ptr_end();
417  I != E; ++I) {
418  const PtrState &P = I->second;
419  OS << " Ptr: " << *I->first
420  << "\n KnownSafe: " << (P.IsKnownSafe()?"true":"false")
421  << "\n ImpreciseRelease: "
422  << (P.IsTrackingImpreciseReleases()?"true":"false") << "\n"
423  << " HasCFGHazards: "
424  << (P.IsCFGHazardAfflicted()?"true":"false") << "\n"
425  << " KnownPositive: "
426  << (P.HasKnownPositiveRefCount()?"true":"false") << "\n"
427  << " Seq: "
428  << P.GetSeq() << "\n";
429  }
430  }
431 
432  return OS;
433 }
434 
435 namespace {
436 
437  /// \brief The main ARC optimization pass.
438  class ObjCARCOpt : public FunctionPass {
439  bool Changed;
441 
442  /// A cache of references to runtime entry point constants.
444 
445  /// A cache of MDKinds that can be passed into other functions to propagate
446  /// MDKind identifiers.
447  ARCMDKindCache MDKindCache;
448 
449  /// A flag indicating whether this optimization pass should run.
450  bool Run;
451 
452  /// Flags which determine whether each of the interesting runtime functions
453  /// is in fact used in the current function.
454  unsigned UsedInThisFunction;
455 
456  bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
457  void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
458  ARCInstKind &Class);
459  void OptimizeIndividualCalls(Function &F);
460 
461  void CheckForCFGHazards(const BasicBlock *BB,
463  BBState &MyStates) const;
464  bool VisitInstructionBottomUp(Instruction *Inst, BasicBlock *BB,
466  BBState &MyStates);
467  bool VisitBottomUp(BasicBlock *BB,
470  bool VisitInstructionTopDown(Instruction *Inst,
471  DenseMap<Value *, RRInfo> &Releases,
472  BBState &MyStates);
473  bool VisitTopDown(BasicBlock *BB,
475  DenseMap<Value *, RRInfo> &Releases);
476  bool Visit(Function &F, DenseMap<const BasicBlock *, BBState> &BBStates,
478  DenseMap<Value *, RRInfo> &Releases);
479 
480  void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
482  DenseMap<Value *, RRInfo> &Releases,
483  SmallVectorImpl<Instruction *> &DeadInsts, Module *M);
484 
485  bool
486  PairUpRetainsAndReleases(DenseMap<const BasicBlock *, BBState> &BBStates,
488  DenseMap<Value *, RRInfo> &Releases, Module *M,
491  RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
492  Value *Arg, bool KnownSafe,
493  bool &AnyPairsCompletelyEliminated);
494 
495  bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
497  DenseMap<Value *, RRInfo> &Releases, Module *M);
498 
499  void OptimizeWeakCalls(Function &F);
500 
501  bool OptimizeSequences(Function &F);
502 
503  void OptimizeReturns(Function &F);
504 
505 #ifndef NDEBUG
506  void GatherStatistics(Function &F, bool AfterOptimization = false);
507 #endif
508 
509  void getAnalysisUsage(AnalysisUsage &AU) const override;
510  bool doInitialization(Module &M) override;
511  bool runOnFunction(Function &F) override;
512  void releaseMemory() override;
513 
514  public:
515  static char ID;
516  ObjCARCOpt() : FunctionPass(ID) {
518  }
519  };
520 }
521 
522 char ObjCARCOpt::ID = 0;
523 INITIALIZE_PASS_BEGIN(ObjCARCOpt,
524  "objc-arc", "ObjC ARC optimization", false, false)
526 INITIALIZE_PASS_END(ObjCARCOpt,
527  "objc-arc", "ObjC ARC optimization", false, false)
528 
530  return new ObjCARCOpt();
531 }
532 
533 void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
536  // ARC optimization doesn't currently split critical edges.
537  AU.setPreservesCFG();
538 }
539 
540 /// Turn objc_retainAutoreleasedReturnValue into objc_retain if the operand is
541 /// not a return value. Or, if it can be paired with an
542 /// objc_autoreleaseReturnValue, delete the pair and return true.
543 bool
544 ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
545  // Check for the argument being from an immediately preceding call or invoke.
546  const Value *Arg = GetArgRCIdentityRoot(RetainRV);
547  ImmutableCallSite CS(Arg);
548  if (const Instruction *Call = CS.getInstruction()) {
549  if (Call->getParent() == RetainRV->getParent()) {
551  ++I;
552  while (IsNoopInstruction(&*I))
553  ++I;
554  if (&*I == RetainRV)
555  return false;
556  } else if (const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
557  BasicBlock *RetainRVParent = RetainRV->getParent();
558  if (II->getNormalDest() == RetainRVParent) {
559  BasicBlock::const_iterator I = RetainRVParent->begin();
560  while (IsNoopInstruction(&*I))
561  ++I;
562  if (&*I == RetainRV)
563  return false;
564  }
565  }
566  }
567 
568  // Check for being preceded by an objc_autoreleaseReturnValue on the same
569  // pointer. In this case, we can delete the pair.
570  BasicBlock::iterator I = RetainRV->getIterator(),
571  Begin = RetainRV->getParent()->begin();
572  if (I != Begin) {
573  do
574  --I;
575  while (I != Begin && IsNoopInstruction(&*I));
577  GetArgRCIdentityRoot(&*I) == Arg) {
578  Changed = true;
579  ++NumPeeps;
580 
581  DEBUG(dbgs() << "Erasing autoreleaseRV,retainRV pair: " << *I << "\n"
582  << "Erasing " << *RetainRV << "\n");
583 
584  EraseInstruction(&*I);
585  EraseInstruction(RetainRV);
586  return true;
587  }
588  }
589 
590  // Turn it to a plain objc_retain.
591  Changed = true;
592  ++NumPeeps;
593 
594  DEBUG(dbgs() << "Transforming objc_retainAutoreleasedReturnValue => "
595  "objc_retain since the operand is not a return value.\n"
596  "Old = " << *RetainRV << "\n");
597 
598  Constant *NewDecl = EP.get(ARCRuntimeEntryPointKind::Retain);
599  cast<CallInst>(RetainRV)->setCalledFunction(NewDecl);
600 
601  DEBUG(dbgs() << "New = " << *RetainRV << "\n");
602 
603  return false;
604 }
605 
606 /// Turn objc_autoreleaseReturnValue into objc_autorelease if the result is not
607 /// used as a return value.
608 void ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F,
610  ARCInstKind &Class) {
611  // Check for a return of the pointer value.
612  const Value *Ptr = GetArgRCIdentityRoot(AutoreleaseRV);
613 
614  // If the argument is ConstantPointerNull or UndefValue, its other users
615  // aren't actually interesting to look at.
616  if (isa<ConstantData>(Ptr))
617  return;
618 
620  Users.push_back(Ptr);
621  do {
622  Ptr = Users.pop_back_val();
623  for (const User *U : Ptr->users()) {
624  if (isa<ReturnInst>(U) || GetBasicARCInstKind(U) == ARCInstKind::RetainRV)
625  return;
626  if (isa<BitCastInst>(U))
627  Users.push_back(U);
628  }
629  } while (!Users.empty());
630 
631  Changed = true;
632  ++NumPeeps;
633 
634  DEBUG(dbgs() << "Transforming objc_autoreleaseReturnValue => "
635  "objc_autorelease since its operand is not used as a return "
636  "value.\n"
637  "Old = " << *AutoreleaseRV << "\n");
638 
639  CallInst *AutoreleaseRVCI = cast<CallInst>(AutoreleaseRV);
641  AutoreleaseRVCI->setCalledFunction(NewDecl);
642  AutoreleaseRVCI->setTailCall(false); // Never tail call objc_autorelease.
643  Class = ARCInstKind::Autorelease;
644 
645  DEBUG(dbgs() << "New: " << *AutoreleaseRV << "\n");
646 
647 }
648 
649 /// Visit each call, one at a time, and make simplifications without doing any
650 /// additional analysis.
651 void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
652  DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
653  // Reset all the flags in preparation for recomputing them.
654  UsedInThisFunction = 0;
655 
656  // Visit all objc_* calls in F.
657  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
658  Instruction *Inst = &*I++;
659 
660  ARCInstKind Class = GetBasicARCInstKind(Inst);
661 
662  DEBUG(dbgs() << "Visiting: Class: " << Class << "; " << *Inst << "\n");
663 
664  switch (Class) {
665  default: break;
666 
667  // Delete no-op casts. These function calls have special semantics, but
668  // the semantics are entirely implemented via lowering in the front-end,
669  // so by the time they reach the optimizer, they are just no-op calls
670  // which return their argument.
671  //
672  // There are gray areas here, as the ability to cast reference-counted
673  // pointers to raw void* and back allows code to break ARC assumptions,
674  // however these are currently considered to be unimportant.
676  Changed = true;
677  ++NumNoops;
678  DEBUG(dbgs() << "Erasing no-op cast: " << *Inst << "\n");
679  EraseInstruction(Inst);
680  continue;
681 
682  // If the pointer-to-weak-pointer is null, it's undefined behavior.
688  CallInst *CI = cast<CallInst>(Inst);
689  if (IsNullOrUndef(CI->getArgOperand(0))) {
690  Changed = true;
691  Type *Ty = CI->getArgOperand(0)->getType();
692  new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
694  CI);
695  llvm::Value *NewValue = UndefValue::get(CI->getType());
696  DEBUG(dbgs() << "A null pointer-to-weak-pointer is undefined behavior."
697  "\nOld = " << *CI << "\nNew = " << *NewValue << "\n");
698  CI->replaceAllUsesWith(NewValue);
699  CI->eraseFromParent();
700  continue;
701  }
702  break;
703  }
705  case ARCInstKind::MoveWeak: {
706  CallInst *CI = cast<CallInst>(Inst);
707  if (IsNullOrUndef(CI->getArgOperand(0)) ||
708  IsNullOrUndef(CI->getArgOperand(1))) {
709  Changed = true;
710  Type *Ty = CI->getArgOperand(0)->getType();
711  new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
713  CI);
714 
715  llvm::Value *NewValue = UndefValue::get(CI->getType());
716  DEBUG(dbgs() << "A null pointer-to-weak-pointer is undefined behavior."
717  "\nOld = " << *CI << "\nNew = " << *NewValue << "\n");
718 
719  CI->replaceAllUsesWith(NewValue);
720  CI->eraseFromParent();
721  continue;
722  }
723  break;
724  }
726  if (OptimizeRetainRVCall(F, Inst))
727  continue;
728  break;
730  OptimizeAutoreleaseRVCall(F, Inst, Class);
731  break;
732  }
733 
734  // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
735  if (IsAutorelease(Class) && Inst->use_empty()) {
736  CallInst *Call = cast<CallInst>(Inst);
737  const Value *Arg = Call->getArgOperand(0);
739  if (Arg) {
740  Changed = true;
741  ++NumAutoreleases;
742 
743  // Create the declaration lazily.
744  LLVMContext &C = Inst->getContext();
745 
747  CallInst *NewCall = CallInst::Create(Decl, Call->getArgOperand(0), "",
748  Call);
749  NewCall->setMetadata(MDKindCache.get(ARCMDKindID::ImpreciseRelease),
750  MDNode::get(C, None));
751 
752  DEBUG(dbgs() << "Replacing autorelease{,RV}(x) with objc_release(x) "
753  "since x is otherwise unused.\nOld: " << *Call << "\nNew: "
754  << *NewCall << "\n");
755 
756  EraseInstruction(Call);
757  Inst = NewCall;
758  Class = ARCInstKind::Release;
759  }
760  }
761 
762  // For functions which can never be passed stack arguments, add
763  // a tail keyword.
764  if (IsAlwaysTail(Class)) {
765  Changed = true;
766  DEBUG(dbgs() << "Adding tail keyword to function since it can never be "
767  "passed stack args: " << *Inst << "\n");
768  cast<CallInst>(Inst)->setTailCall();
769  }
770 
771  // Ensure that functions that can never have a "tail" keyword due to the
772  // semantics of ARC truly do not do so.
773  if (IsNeverTail(Class)) {
774  Changed = true;
775  DEBUG(dbgs() << "Removing tail keyword from function: " << *Inst <<
776  "\n");
777  cast<CallInst>(Inst)->setTailCall(false);
778  }
779 
780  // Set nounwind as needed.
781  if (IsNoThrow(Class)) {
782  Changed = true;
783  DEBUG(dbgs() << "Found no throw class. Setting nounwind on: " << *Inst
784  << "\n");
785  cast<CallInst>(Inst)->setDoesNotThrow();
786  }
787 
788  if (!IsNoopOnNull(Class)) {
789  UsedInThisFunction |= 1 << unsigned(Class);
790  continue;
791  }
792 
793  const Value *Arg = GetArgRCIdentityRoot(Inst);
794 
795  // ARC calls with null are no-ops. Delete them.
796  if (IsNullOrUndef(Arg)) {
797  Changed = true;
798  ++NumNoops;
799  DEBUG(dbgs() << "ARC calls with null are no-ops. Erasing: " << *Inst
800  << "\n");
801  EraseInstruction(Inst);
802  continue;
803  }
804 
805  // Keep track of which of retain, release, autorelease, and retain_block
806  // are actually present in this function.
807  UsedInThisFunction |= 1 << unsigned(Class);
808 
809  // If Arg is a PHI, and one or more incoming values to the
810  // PHI are null, and the call is control-equivalent to the PHI, and there
811  // are no relevant side effects between the PHI and the call, the call
812  // could be pushed up to just those paths with non-null incoming values.
813  // For now, don't bother splitting critical edges for this.
815  Worklist.push_back(std::make_pair(Inst, Arg));
816  do {
817  std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
818  Inst = Pair.first;
819  Arg = Pair.second;
820 
821  const PHINode *PN = dyn_cast<PHINode>(Arg);
822  if (!PN) continue;
823 
824  // Determine if the PHI has any null operands, or any incoming
825  // critical edges.
826  bool HasNull = false;
827  bool HasCriticalEdges = false;
828  for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
829  Value *Incoming =
831  if (IsNullOrUndef(Incoming))
832  HasNull = true;
833  else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
834  .getNumSuccessors() != 1) {
835  HasCriticalEdges = true;
836  break;
837  }
838  }
839  // If we have null operands and no critical edges, optimize.
840  if (!HasCriticalEdges && HasNull) {
841  SmallPtrSet<Instruction *, 4> DependingInstructions;
843 
844  // Check that there is nothing that cares about the reference
845  // count between the call and the phi.
846  switch (Class) {
847  case ARCInstKind::Retain:
849  // These can always be moved up.
850  break;
852  // These can't be moved across things that care about the retain
853  // count.
855  Inst->getParent(), Inst,
856  DependingInstructions, Visited, PA);
857  break;
859  // These can't be moved across autorelease pool scope boundaries.
861  Inst->getParent(), Inst,
862  DependingInstructions, Visited, PA);
863  break;
867  // Don't move these; the RV optimization depends on the autoreleaseRV
868  // being tail called, and the retainRV being immediately after a call
869  // (which might still happen if we get lucky with codegen layout, but
870  // it's not worth taking the chance).
871  continue;
872  default:
873  llvm_unreachable("Invalid dependence flavor");
874  }
875 
876  if (DependingInstructions.size() == 1 &&
877  *DependingInstructions.begin() == PN) {
878  Changed = true;
879  ++NumPartialNoops;
880  // Clone the call into each predecessor that has a non-null value.
881  CallInst *CInst = cast<CallInst>(Inst);
882  Type *ParamTy = CInst->getArgOperand(0)->getType();
883  for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
884  Value *Incoming =
886  if (!IsNullOrUndef(Incoming)) {
887  CallInst *Clone = cast<CallInst>(CInst->clone());
888  Value *Op = PN->getIncomingValue(i);
889  Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
890  if (Op->getType() != ParamTy)
891  Op = new BitCastInst(Op, ParamTy, "", InsertPos);
892  Clone->setArgOperand(0, Op);
893  Clone->insertBefore(InsertPos);
894 
895  DEBUG(dbgs() << "Cloning "
896  << *CInst << "\n"
897  "And inserting clone at " << *InsertPos << "\n");
898  Worklist.push_back(std::make_pair(Clone, Incoming));
899  }
900  }
901  // Erase the original call.
902  DEBUG(dbgs() << "Erasing: " << *CInst << "\n");
903  EraseInstruction(CInst);
904  continue;
905  }
906  }
907  } while (!Worklist.empty());
908  }
909 }
910 
911 /// If we have a top down pointer in the S_Use state, make sure that there are
912 /// no CFG hazards by checking the states of various bottom up pointers.
913 static void CheckForUseCFGHazard(const Sequence SuccSSeq,
914  const bool SuccSRRIKnownSafe,
915  TopDownPtrState &S,
916  bool &SomeSuccHasSame,
917  bool &AllSuccsHaveSame,
918  bool &NotAllSeqEqualButKnownSafe,
919  bool &ShouldContinue) {
920  switch (SuccSSeq) {
921  case S_CanRelease: {
922  if (!S.IsKnownSafe() && !SuccSRRIKnownSafe) {
924  break;
925  }
926  S.SetCFGHazardAfflicted(true);
927  ShouldContinue = true;
928  break;
929  }
930  case S_Use:
931  SomeSuccHasSame = true;
932  break;
933  case S_Stop:
934  case S_Release:
935  case S_MovableRelease:
936  if (!S.IsKnownSafe() && !SuccSRRIKnownSafe)
937  AllSuccsHaveSame = false;
938  else
939  NotAllSeqEqualButKnownSafe = true;
940  break;
941  case S_Retain:
942  llvm_unreachable("bottom-up pointer in retain state!");
943  case S_None:
944  llvm_unreachable("This should have been handled earlier.");
945  }
946 }
947 
948 /// If we have a Top Down pointer in the S_CanRelease state, make sure that
949 /// there are no CFG hazards by checking the states of various bottom up
950 /// pointers.
951 static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq,
952  const bool SuccSRRIKnownSafe,
953  TopDownPtrState &S,
954  bool &SomeSuccHasSame,
955  bool &AllSuccsHaveSame,
956  bool &NotAllSeqEqualButKnownSafe) {
957  switch (SuccSSeq) {
958  case S_CanRelease:
959  SomeSuccHasSame = true;
960  break;
961  case S_Stop:
962  case S_Release:
963  case S_MovableRelease:
964  case S_Use:
965  if (!S.IsKnownSafe() && !SuccSRRIKnownSafe)
966  AllSuccsHaveSame = false;
967  else
968  NotAllSeqEqualButKnownSafe = true;
969  break;
970  case S_Retain:
971  llvm_unreachable("bottom-up pointer in retain state!");
972  case S_None:
973  llvm_unreachable("This should have been handled earlier.");
974  }
975 }
976 
977 /// Check for critical edges, loop boundaries, irreducible control flow, or
978 /// other CFG structures where moving code across the edge would result in it
979 /// being executed more.
980 void
981 ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
983  BBState &MyStates) const {
984  // If any top-down local-use or possible-dec has a succ which is earlier in
985  // the sequence, forget it.
986  for (auto I = MyStates.top_down_ptr_begin(), E = MyStates.top_down_ptr_end();
987  I != E; ++I) {
988  TopDownPtrState &S = I->second;
989  const Sequence Seq = I->second.GetSeq();
990 
991  // We only care about S_Retain, S_CanRelease, and S_Use.
992  if (Seq == S_None)
993  continue;
994 
995  // Make sure that if extra top down states are added in the future that this
996  // code is updated to handle it.
997  assert((Seq == S_Retain || Seq == S_CanRelease || Seq == S_Use) &&
998  "Unknown top down sequence state.");
999 
1000  const Value *Arg = I->first;
1001  const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
1002  bool SomeSuccHasSame = false;
1003  bool AllSuccsHaveSame = true;
1004  bool NotAllSeqEqualButKnownSafe = false;
1005 
1006  succ_const_iterator SI(TI), SE(TI, false);
1007 
1008  for (; SI != SE; ++SI) {
1009  // If VisitBottomUp has pointer information for this successor, take
1010  // what we know about it.
1012  BBStates.find(*SI);
1013  assert(BBI != BBStates.end());
1014  const BottomUpPtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
1015  const Sequence SuccSSeq = SuccS.GetSeq();
1016 
1017  // If bottom up, the pointer is in an S_None state, clear the sequence
1018  // progress since the sequence in the bottom up state finished
1019  // suggesting a mismatch in between retains/releases. This is true for
1020  // all three cases that we are handling here: S_Retain, S_Use, and
1021  // S_CanRelease.
1022  if (SuccSSeq == S_None) {
1024  continue;
1025  }
1026 
1027  // If we have S_Use or S_CanRelease, perform our check for cfg hazard
1028  // checks.
1029  const bool SuccSRRIKnownSafe = SuccS.IsKnownSafe();
1030 
1031  // *NOTE* We do not use Seq from above here since we are allowing for
1032  // S.GetSeq() to change while we are visiting basic blocks.
1033  switch(S.GetSeq()) {
1034  case S_Use: {
1035  bool ShouldContinue = false;
1036  CheckForUseCFGHazard(SuccSSeq, SuccSRRIKnownSafe, S, SomeSuccHasSame,
1037  AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
1038  ShouldContinue);
1039  if (ShouldContinue)
1040  continue;
1041  break;
1042  }
1043  case S_CanRelease: {
1044  CheckForCanReleaseCFGHazard(SuccSSeq, SuccSRRIKnownSafe, S,
1045  SomeSuccHasSame, AllSuccsHaveSame,
1046  NotAllSeqEqualButKnownSafe);
1047  break;
1048  }
1049  case S_Retain:
1050  case S_None:
1051  case S_Stop:
1052  case S_Release:
1053  case S_MovableRelease:
1054  break;
1055  }
1056  }
1057 
1058  // If the state at the other end of any of the successor edges
1059  // matches the current state, require all edges to match. This
1060  // guards against loops in the middle of a sequence.
1061  if (SomeSuccHasSame && !AllSuccsHaveSame) {
1063  } else if (NotAllSeqEqualButKnownSafe) {
1064  // If we would have cleared the state foregoing the fact that we are known
1065  // safe, stop code motion. This is because whether or not it is safe to
1066  // remove RR pairs via KnownSafe is an orthogonal concept to whether we
1067  // are allowed to perform code motion.
1068  S.SetCFGHazardAfflicted(true);
1069  }
1070  }
1071 }
1072 
1073 bool ObjCARCOpt::VisitInstructionBottomUp(
1075  BBState &MyStates) {
1076  bool NestingDetected = false;
1077  ARCInstKind Class = GetARCInstKind(Inst);
1078  const Value *Arg = nullptr;
1079 
1080  DEBUG(dbgs() << " Class: " << Class << "\n");
1081 
1082  switch (Class) {
1083  case ARCInstKind::Release: {
1084  Arg = GetArgRCIdentityRoot(Inst);
1085 
1086  BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1087  NestingDetected |= S.InitBottomUp(MDKindCache, Inst);
1088  break;
1089  }
1091  // In OptimizeIndividualCalls, we have strength reduced all optimizable
1092  // objc_retainBlocks to objc_retains. Thus at this point any
1093  // objc_retainBlocks that we see are not optimizable.
1094  break;
1095  case ARCInstKind::Retain:
1096  case ARCInstKind::RetainRV: {
1097  Arg = GetArgRCIdentityRoot(Inst);
1098  BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1099  if (S.MatchWithRetain()) {
1100  // Don't do retain+release tracking for ARCInstKind::RetainRV, because
1101  // it's better to let it remain as the first instruction after a call.
1102  if (Class != ARCInstKind::RetainRV) {
1103  DEBUG(llvm::dbgs() << " Matching with: " << *Inst << "\n");
1104  Retains[Inst] = S.GetRRInfo();
1105  }
1107  }
1108  // A retain moving bottom up can be a use.
1109  break;
1110  }
1112  // Conservatively, clear MyStates for all known pointers.
1113  MyStates.clearBottomUpPointers();
1114  return NestingDetected;
1116  case ARCInstKind::None:
1117  // These are irrelevant.
1118  return NestingDetected;
1119  default:
1120  break;
1121  }
1122 
1123  // Consider any other possible effects of this instruction on each
1124  // pointer being tracked.
1125  for (auto MI = MyStates.bottom_up_ptr_begin(),
1126  ME = MyStates.bottom_up_ptr_end();
1127  MI != ME; ++MI) {
1128  const Value *Ptr = MI->first;
1129  if (Ptr == Arg)
1130  continue; // Handled above.
1131  BottomUpPtrState &S = MI->second;
1132 
1133  if (S.HandlePotentialAlterRefCount(Inst, Ptr, PA, Class))
1134  continue;
1135 
1136  S.HandlePotentialUse(BB, Inst, Ptr, PA, Class);
1137  }
1138 
1139  return NestingDetected;
1140 }
1141 
1142 bool ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
1144  BlotMapVector<Value *, RRInfo> &Retains) {
1145 
1146  DEBUG(dbgs() << "\n== ObjCARCOpt::VisitBottomUp ==\n");
1147 
1148  bool NestingDetected = false;
1149  BBState &MyStates = BBStates[BB];
1150 
1151  // Merge the states from each successor to compute the initial state
1152  // for the current block.
1153  BBState::edge_iterator SI(MyStates.succ_begin()),
1154  SE(MyStates.succ_end());
1155  if (SI != SE) {
1156  const BasicBlock *Succ = *SI;
1158  assert(I != BBStates.end());
1159  MyStates.InitFromSucc(I->second);
1160  ++SI;
1161  for (; SI != SE; ++SI) {
1162  Succ = *SI;
1163  I = BBStates.find(Succ);
1164  assert(I != BBStates.end());
1165  MyStates.MergeSucc(I->second);
1166  }
1167  }
1168 
1169  DEBUG(llvm::dbgs() << "Before:\n" << BBStates[BB] << "\n"
1170  << "Performing Dataflow:\n");
1171 
1172  // Visit all the instructions, bottom-up.
1173  for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
1174  Instruction *Inst = &*std::prev(I);
1175 
1176  // Invoke instructions are visited as part of their successors (below).
1177  if (isa<InvokeInst>(Inst))
1178  continue;
1179 
1180  DEBUG(dbgs() << " Visiting " << *Inst << "\n");
1181 
1182  NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1183  }
1184 
1185  // If there's a predecessor with an invoke, visit the invoke as if it were
1186  // part of this block, since we can't insert code after an invoke in its own
1187  // block, and we don't want to split critical edges.
1188  for (BBState::edge_iterator PI(MyStates.pred_begin()),
1189  PE(MyStates.pred_end()); PI != PE; ++PI) {
1190  BasicBlock *Pred = *PI;
1191  if (InvokeInst *II = dyn_cast<InvokeInst>(&Pred->back()))
1192  NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
1193  }
1194 
1195  DEBUG(llvm::dbgs() << "\nFinal State:\n" << BBStates[BB] << "\n");
1196 
1197  return NestingDetected;
1198 }
1199 
1200 bool
1201 ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
1202  DenseMap<Value *, RRInfo> &Releases,
1203  BBState &MyStates) {
1204  bool NestingDetected = false;
1205  ARCInstKind Class = GetARCInstKind(Inst);
1206  const Value *Arg = nullptr;
1207 
1208  DEBUG(llvm::dbgs() << " Class: " << Class << "\n");
1209 
1210  switch (Class) {
1212  // In OptimizeIndividualCalls, we have strength reduced all optimizable
1213  // objc_retainBlocks to objc_retains. Thus at this point any
1214  // objc_retainBlocks that we see are not optimizable. We need to break since
1215  // a retain can be a potential use.
1216  break;
1217  case ARCInstKind::Retain:
1218  case ARCInstKind::RetainRV: {
1219  Arg = GetArgRCIdentityRoot(Inst);
1220  TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1221  NestingDetected |= S.InitTopDown(Class, Inst);
1222  // A retain can be a potential use; proceed to the generic checking
1223  // code below.
1224  break;
1225  }
1226  case ARCInstKind::Release: {
1227  Arg = GetArgRCIdentityRoot(Inst);
1228  TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1229  // Try to form a tentative pair in between this release instruction and the
1230  // top down pointers that we are tracking.
1231  if (S.MatchWithRelease(MDKindCache, Inst)) {
1232  // If we succeed, copy S's RRInfo into the Release -> {Retain Set
1233  // Map}. Then we clear S.
1234  DEBUG(llvm::dbgs() << " Matching with: " << *Inst << "\n");
1235  Releases[Inst] = S.GetRRInfo();
1237  }
1238  break;
1239  }
1241  // Conservatively, clear MyStates for all known pointers.
1242  MyStates.clearTopDownPointers();
1243  return false;
1245  case ARCInstKind::None:
1246  // These can not be uses of
1247  return false;
1248  default:
1249  break;
1250  }
1251 
1252  // Consider any other possible effects of this instruction on each
1253  // pointer being tracked.
1254  for (auto MI = MyStates.top_down_ptr_begin(),
1255  ME = MyStates.top_down_ptr_end();
1256  MI != ME; ++MI) {
1257  const Value *Ptr = MI->first;
1258  if (Ptr == Arg)
1259  continue; // Handled above.
1260  TopDownPtrState &S = MI->second;
1261  if (S.HandlePotentialAlterRefCount(Inst, Ptr, PA, Class))
1262  continue;
1263 
1264  S.HandlePotentialUse(Inst, Ptr, PA, Class);
1265  }
1266 
1267  return NestingDetected;
1268 }
1269 
1270 bool
1271 ObjCARCOpt::VisitTopDown(BasicBlock *BB,
1273  DenseMap<Value *, RRInfo> &Releases) {
1274  DEBUG(dbgs() << "\n== ObjCARCOpt::VisitTopDown ==\n");
1275  bool NestingDetected = false;
1276  BBState &MyStates = BBStates[BB];
1277 
1278  // Merge the states from each predecessor to compute the initial state
1279  // for the current block.
1280  BBState::edge_iterator PI(MyStates.pred_begin()),
1281  PE(MyStates.pred_end());
1282  if (PI != PE) {
1283  const BasicBlock *Pred = *PI;
1285  assert(I != BBStates.end());
1286  MyStates.InitFromPred(I->second);
1287  ++PI;
1288  for (; PI != PE; ++PI) {
1289  Pred = *PI;
1290  I = BBStates.find(Pred);
1291  assert(I != BBStates.end());
1292  MyStates.MergePred(I->second);
1293  }
1294  }
1295 
1296  DEBUG(llvm::dbgs() << "Before:\n" << BBStates[BB] << "\n"
1297  << "Performing Dataflow:\n");
1298 
1299  // Visit all the instructions, top-down.
1300  for (Instruction &Inst : *BB) {
1301  DEBUG(dbgs() << " Visiting " << Inst << "\n");
1302 
1303  NestingDetected |= VisitInstructionTopDown(&Inst, Releases, MyStates);
1304  }
1305 
1306  DEBUG(llvm::dbgs() << "\nState Before Checking for CFG Hazards:\n"
1307  << BBStates[BB] << "\n\n");
1308  CheckForCFGHazards(BB, BBStates, MyStates);
1309  DEBUG(llvm::dbgs() << "Final State:\n" << BBStates[BB] << "\n");
1310  return NestingDetected;
1311 }
1312 
1313 static void
1315  SmallVectorImpl<BasicBlock *> &PostOrder,
1316  SmallVectorImpl<BasicBlock *> &ReverseCFGPostOrder,
1317  unsigned NoObjCARCExceptionsMDKind,
1319  /// The visited set, for doing DFS walks.
1321 
1322  // Do DFS, computing the PostOrder.
1325 
1326  // Functions always have exactly one entry block, and we don't have
1327  // any other block that we treat like an entry block.
1328  BasicBlock *EntryBB = &F.getEntryBlock();
1329  BBState &MyStates = BBStates[EntryBB];
1330  MyStates.SetAsEntry();
1331  TerminatorInst *EntryTI = cast<TerminatorInst>(&EntryBB->back());
1332  SuccStack.push_back(std::make_pair(EntryBB, succ_iterator(EntryTI)));
1333  Visited.insert(EntryBB);
1334  OnStack.insert(EntryBB);
1335  do {
1336  dfs_next_succ:
1337  BasicBlock *CurrBB = SuccStack.back().first;
1338  TerminatorInst *TI = cast<TerminatorInst>(&CurrBB->back());
1339  succ_iterator SE(TI, false);
1340 
1341  while (SuccStack.back().second != SE) {
1342  BasicBlock *SuccBB = *SuccStack.back().second++;
1343  if (Visited.insert(SuccBB).second) {
1344  TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
1345  SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
1346  BBStates[CurrBB].addSucc(SuccBB);
1347  BBState &SuccStates = BBStates[SuccBB];
1348  SuccStates.addPred(CurrBB);
1349  OnStack.insert(SuccBB);
1350  goto dfs_next_succ;
1351  }
1352 
1353  if (!OnStack.count(SuccBB)) {
1354  BBStates[CurrBB].addSucc(SuccBB);
1355  BBStates[SuccBB].addPred(CurrBB);
1356  }
1357  }
1358  OnStack.erase(CurrBB);
1359  PostOrder.push_back(CurrBB);
1360  SuccStack.pop_back();
1361  } while (!SuccStack.empty());
1362 
1363  Visited.clear();
1364 
1365  // Do reverse-CFG DFS, computing the reverse-CFG PostOrder.
1366  // Functions may have many exits, and there also blocks which we treat
1367  // as exits due to ignored edges.
1369  for (BasicBlock &ExitBB : F) {
1370  BBState &MyStates = BBStates[&ExitBB];
1371  if (!MyStates.isExit())
1372  continue;
1373 
1374  MyStates.SetAsExit();
1375 
1376  PredStack.push_back(std::make_pair(&ExitBB, MyStates.pred_begin()));
1377  Visited.insert(&ExitBB);
1378  while (!PredStack.empty()) {
1379  reverse_dfs_next_succ:
1380  BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
1381  while (PredStack.back().second != PE) {
1382  BasicBlock *BB = *PredStack.back().second++;
1383  if (Visited.insert(BB).second) {
1384  PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
1385  goto reverse_dfs_next_succ;
1386  }
1387  }
1388  ReverseCFGPostOrder.push_back(PredStack.pop_back_val().first);
1389  }
1390  }
1391 }
1392 
1393 // Visit the function both top-down and bottom-up.
1394 bool ObjCARCOpt::Visit(Function &F,
1397  DenseMap<Value *, RRInfo> &Releases) {
1398 
1399  // Use reverse-postorder traversals, because we magically know that loops
1400  // will be well behaved, i.e. they won't repeatedly call retain on a single
1401  // pointer without doing a release. We can't use the ReversePostOrderTraversal
1402  // class here because we want the reverse-CFG postorder to consider each
1403  // function exit point, and we want to ignore selected cycle edges.
1405  SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
1406  ComputePostOrders(F, PostOrder, ReverseCFGPostOrder,
1407  MDKindCache.get(ARCMDKindID::NoObjCARCExceptions),
1408  BBStates);
1409 
1410  // Use reverse-postorder on the reverse CFG for bottom-up.
1411  bool BottomUpNestingDetected = false;
1412  for (BasicBlock *BB : reverse(ReverseCFGPostOrder))
1413  BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
1414 
1415  // Use reverse-postorder for top-down.
1416  bool TopDownNestingDetected = false;
1417  for (BasicBlock *BB : reverse(PostOrder))
1418  TopDownNestingDetected |= VisitTopDown(BB, BBStates, Releases);
1419 
1420  return TopDownNestingDetected && BottomUpNestingDetected;
1421 }
1422 
1423 /// Move the calls in RetainsToMove and ReleasesToMove.
1424 void ObjCARCOpt::MoveCalls(Value *Arg, RRInfo &RetainsToMove,
1425  RRInfo &ReleasesToMove,
1427  DenseMap<Value *, RRInfo> &Releases,
1428  SmallVectorImpl<Instruction *> &DeadInsts,
1429  Module *M) {
1430  Type *ArgTy = Arg->getType();
1431  Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
1432 
1433  DEBUG(dbgs() << "== ObjCARCOpt::MoveCalls ==\n");
1434 
1435  // Insert the new retain and release calls.
1436  for (Instruction *InsertPt : ReleasesToMove.ReverseInsertPts) {
1437  Value *MyArg = ArgTy == ParamTy ? Arg :
1438  new BitCastInst(Arg, ParamTy, "", InsertPt);
1440  CallInst *Call = CallInst::Create(Decl, MyArg, "", InsertPt);
1441  Call->setDoesNotThrow();
1442  Call->setTailCall();
1443 
1444  DEBUG(dbgs() << "Inserting new Retain: " << *Call << "\n"
1445  "At insertion point: " << *InsertPt << "\n");
1446  }
1447  for (Instruction *InsertPt : RetainsToMove.ReverseInsertPts) {
1448  Value *MyArg = ArgTy == ParamTy ? Arg :
1449  new BitCastInst(Arg, ParamTy, "", InsertPt);
1451  CallInst *Call = CallInst::Create(Decl, MyArg, "", InsertPt);
1452  // Attach a clang.imprecise_release metadata tag, if appropriate.
1453  if (MDNode *M = ReleasesToMove.ReleaseMetadata)
1454  Call->setMetadata(MDKindCache.get(ARCMDKindID::ImpreciseRelease), M);
1455  Call->setDoesNotThrow();
1456  if (ReleasesToMove.IsTailCallRelease)
1457  Call->setTailCall();
1458 
1459  DEBUG(dbgs() << "Inserting new Release: " << *Call << "\n"
1460  "At insertion point: " << *InsertPt << "\n");
1461  }
1462 
1463  // Delete the original retain and release calls.
1464  for (Instruction *OrigRetain : RetainsToMove.Calls) {
1465  Retains.blot(OrigRetain);
1466  DeadInsts.push_back(OrigRetain);
1467  DEBUG(dbgs() << "Deleting retain: " << *OrigRetain << "\n");
1468  }
1469  for (Instruction *OrigRelease : ReleasesToMove.Calls) {
1470  Releases.erase(OrigRelease);
1471  DeadInsts.push_back(OrigRelease);
1472  DEBUG(dbgs() << "Deleting release: " << *OrigRelease << "\n");
1473  }
1474 
1475 }
1476 
1477 bool ObjCARCOpt::PairUpRetainsAndReleases(
1480  DenseMap<Value *, RRInfo> &Releases, Module *M,
1482  SmallVectorImpl<Instruction *> &DeadInsts, RRInfo &RetainsToMove,
1483  RRInfo &ReleasesToMove, Value *Arg, bool KnownSafe,
1484  bool &AnyPairsCompletelyEliminated) {
1485  // If a pair happens in a region where it is known that the reference count
1486  // is already incremented, we can similarly ignore possible decrements unless
1487  // we are dealing with a retainable object with multiple provenance sources.
1488  bool KnownSafeTD = true, KnownSafeBU = true;
1489  bool CFGHazardAfflicted = false;
1490 
1491  // Connect the dots between the top-down-collected RetainsToMove and
1492  // bottom-up-collected ReleasesToMove to form sets of related calls.
1493  // This is an iterative process so that we connect multiple releases
1494  // to multiple retains if needed.
1495  unsigned OldDelta = 0;
1496  unsigned NewDelta = 0;
1497  unsigned OldCount = 0;
1498  unsigned NewCount = 0;
1499  bool FirstRelease = true;
1500  for (SmallVector<Instruction *, 4> NewRetains{Retain};;) {
1501  SmallVector<Instruction *, 4> NewReleases;
1502  for (Instruction *NewRetain : NewRetains) {
1503  auto It = Retains.find(NewRetain);
1504  assert(It != Retains.end());
1505  const RRInfo &NewRetainRRI = It->second;
1506  KnownSafeTD &= NewRetainRRI.KnownSafe;
1507  for (Instruction *NewRetainRelease : NewRetainRRI.Calls) {
1508  auto Jt = Releases.find(NewRetainRelease);
1509  if (Jt == Releases.end())
1510  return false;
1511  const RRInfo &NewRetainReleaseRRI = Jt->second;
1512 
1513  // If the release does not have a reference to the retain as well,
1514  // something happened which is unaccounted for. Do not do anything.
1515  //
1516  // This can happen if we catch an additive overflow during path count
1517  // merging.
1518  if (!NewRetainReleaseRRI.Calls.count(NewRetain))
1519  return false;
1520 
1521  if (ReleasesToMove.Calls.insert(NewRetainRelease).second) {
1522 
1523  // If we overflow when we compute the path count, don't remove/move
1524  // anything.
1525  const BBState &NRRBBState = BBStates[NewRetainRelease->getParent()];
1526  unsigned PathCount = BBState::OverflowOccurredValue;
1527  if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1528  return false;
1529  assert(PathCount != BBState::OverflowOccurredValue &&
1530  "PathCount at this point can not be "
1531  "OverflowOccurredValue.");
1532  OldDelta -= PathCount;
1533 
1534  // Merge the ReleaseMetadata and IsTailCallRelease values.
1535  if (FirstRelease) {
1536  ReleasesToMove.ReleaseMetadata =
1537  NewRetainReleaseRRI.ReleaseMetadata;
1538  ReleasesToMove.IsTailCallRelease =
1539  NewRetainReleaseRRI.IsTailCallRelease;
1540  FirstRelease = false;
1541  } else {
1542  if (ReleasesToMove.ReleaseMetadata !=
1543  NewRetainReleaseRRI.ReleaseMetadata)
1544  ReleasesToMove.ReleaseMetadata = nullptr;
1545  if (ReleasesToMove.IsTailCallRelease !=
1546  NewRetainReleaseRRI.IsTailCallRelease)
1547  ReleasesToMove.IsTailCallRelease = false;
1548  }
1549 
1550  // Collect the optimal insertion points.
1551  if (!KnownSafe)
1552  for (Instruction *RIP : NewRetainReleaseRRI.ReverseInsertPts) {
1553  if (ReleasesToMove.ReverseInsertPts.insert(RIP).second) {
1554  // If we overflow when we compute the path count, don't
1555  // remove/move anything.
1556  const BBState &RIPBBState = BBStates[RIP->getParent()];
1557  PathCount = BBState::OverflowOccurredValue;
1558  if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1559  return false;
1560  assert(PathCount != BBState::OverflowOccurredValue &&
1561  "PathCount at this point can not be "
1562  "OverflowOccurredValue.");
1563  NewDelta -= PathCount;
1564  }
1565  }
1566  NewReleases.push_back(NewRetainRelease);
1567  }
1568  }
1569  }
1570  NewRetains.clear();
1571  if (NewReleases.empty()) break;
1572 
1573  // Back the other way.
1574  for (Instruction *NewRelease : NewReleases) {
1575  auto It = Releases.find(NewRelease);
1576  assert(It != Releases.end());
1577  const RRInfo &NewReleaseRRI = It->second;
1578  KnownSafeBU &= NewReleaseRRI.KnownSafe;
1579  CFGHazardAfflicted |= NewReleaseRRI.CFGHazardAfflicted;
1580  for (Instruction *NewReleaseRetain : NewReleaseRRI.Calls) {
1581  auto Jt = Retains.find(NewReleaseRetain);
1582  if (Jt == Retains.end())
1583  return false;
1584  const RRInfo &NewReleaseRetainRRI = Jt->second;
1585 
1586  // If the retain does not have a reference to the release as well,
1587  // something happened which is unaccounted for. Do not do anything.
1588  //
1589  // This can happen if we catch an additive overflow during path count
1590  // merging.
1591  if (!NewReleaseRetainRRI.Calls.count(NewRelease))
1592  return false;
1593 
1594  if (RetainsToMove.Calls.insert(NewReleaseRetain).second) {
1595  // If we overflow when we compute the path count, don't remove/move
1596  // anything.
1597  const BBState &NRRBBState = BBStates[NewReleaseRetain->getParent()];
1598  unsigned PathCount = BBState::OverflowOccurredValue;
1599  if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1600  return false;
1601  assert(PathCount != BBState::OverflowOccurredValue &&
1602  "PathCount at this point can not be "
1603  "OverflowOccurredValue.");
1604  OldDelta += PathCount;
1605  OldCount += PathCount;
1606 
1607  // Collect the optimal insertion points.
1608  if (!KnownSafe)
1609  for (Instruction *RIP : NewReleaseRetainRRI.ReverseInsertPts) {
1610  if (RetainsToMove.ReverseInsertPts.insert(RIP).second) {
1611  // If we overflow when we compute the path count, don't
1612  // remove/move anything.
1613  const BBState &RIPBBState = BBStates[RIP->getParent()];
1614 
1615  PathCount = BBState::OverflowOccurredValue;
1616  if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1617  return false;
1618  assert(PathCount != BBState::OverflowOccurredValue &&
1619  "PathCount at this point can not be "
1620  "OverflowOccurredValue.");
1621  NewDelta += PathCount;
1622  NewCount += PathCount;
1623  }
1624  }
1625  NewRetains.push_back(NewReleaseRetain);
1626  }
1627  }
1628  }
1629  if (NewRetains.empty()) break;
1630  }
1631 
1632  // We can only remove pointers if we are known safe in both directions.
1633  bool UnconditionallySafe = KnownSafeTD && KnownSafeBU;
1634  if (UnconditionallySafe) {
1635  RetainsToMove.ReverseInsertPts.clear();
1636  ReleasesToMove.ReverseInsertPts.clear();
1637  NewCount = 0;
1638  } else {
1639  // Determine whether the new insertion points we computed preserve the
1640  // balance of retain and release calls through the program.
1641  // TODO: If the fully aggressive solution isn't valid, try to find a
1642  // less aggressive solution which is.
1643  if (NewDelta != 0)
1644  return false;
1645 
1646  // At this point, we are not going to remove any RR pairs, but we still are
1647  // able to move RR pairs. If one of our pointers is afflicted with
1648  // CFGHazards, we cannot perform such code motion so exit early.
1649  const bool WillPerformCodeMotion = RetainsToMove.ReverseInsertPts.size() ||
1650  ReleasesToMove.ReverseInsertPts.size();
1651  if (CFGHazardAfflicted && WillPerformCodeMotion)
1652  return false;
1653  }
1654 
1655  // Determine whether the original call points are balanced in the retain and
1656  // release calls through the program. If not, conservatively don't touch
1657  // them.
1658  // TODO: It's theoretically possible to do code motion in this case, as
1659  // long as the existing imbalances are maintained.
1660  if (OldDelta != 0)
1661  return false;
1662 
1663  Changed = true;
1664  assert(OldCount != 0 && "Unreachable code?");
1665  NumRRs += OldCount - NewCount;
1666  // Set to true if we completely removed any RR pairs.
1667  AnyPairsCompletelyEliminated = NewCount == 0;
1668 
1669  // We can move calls!
1670  return true;
1671 }
1672 
1673 /// Identify pairings between the retains and releases, and delete and/or move
1674 /// them.
1675 bool ObjCARCOpt::PerformCodePlacement(
1678  DenseMap<Value *, RRInfo> &Releases, Module *M) {
1679  DEBUG(dbgs() << "\n== ObjCARCOpt::PerformCodePlacement ==\n");
1680 
1681  bool AnyPairsCompletelyEliminated = false;
1683 
1684  // Visit each retain.
1686  E = Retains.end();
1687  I != E; ++I) {
1688  Value *V = I->first;
1689  if (!V) continue; // blotted
1690 
1691  Instruction *Retain = cast<Instruction>(V);
1692 
1693  DEBUG(dbgs() << "Visiting: " << *Retain << "\n");
1694 
1695  Value *Arg = GetArgRCIdentityRoot(Retain);
1696 
1697  // If the object being released is in static or stack storage, we know it's
1698  // not being managed by ObjC reference counting, so we can delete pairs
1699  // regardless of what possible decrements or uses lie between them.
1700  bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
1701 
1702  // A constant pointer can't be pointing to an object on the heap. It may
1703  // be reference-counted, but it won't be deleted.
1704  if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
1705  if (const GlobalVariable *GV =
1706  dyn_cast<GlobalVariable>(
1707  GetRCIdentityRoot(LI->getPointerOperand())))
1708  if (GV->isConstant())
1709  KnownSafe = true;
1710 
1711  // Connect the dots between the top-down-collected RetainsToMove and
1712  // bottom-up-collected ReleasesToMove to form sets of related calls.
1713  RRInfo RetainsToMove, ReleasesToMove;
1714 
1715  bool PerformMoveCalls = PairUpRetainsAndReleases(
1716  BBStates, Retains, Releases, M, Retain, DeadInsts,
1717  RetainsToMove, ReleasesToMove, Arg, KnownSafe,
1718  AnyPairsCompletelyEliminated);
1719 
1720  if (PerformMoveCalls) {
1721  // Ok, everything checks out and we're all set. Let's move/delete some
1722  // code!
1723  MoveCalls(Arg, RetainsToMove, ReleasesToMove,
1724  Retains, Releases, DeadInsts, M);
1725  }
1726  }
1727 
1728  // Now that we're done moving everything, we can delete the newly dead
1729  // instructions, as we no longer need them as insert points.
1730  while (!DeadInsts.empty())
1731  EraseInstruction(DeadInsts.pop_back_val());
1732 
1733  return AnyPairsCompletelyEliminated;
1734 }
1735 
1736 /// Weak pointer optimizations.
1737 void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
1738  DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeWeakCalls ==\n");
1739 
1740  // First, do memdep-style RLE and S2L optimizations. We can't use memdep
1741  // itself because it uses AliasAnalysis and we need to do provenance
1742  // queries instead.
1743  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
1744  Instruction *Inst = &*I++;
1745 
1746  DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
1747 
1748  ARCInstKind Class = GetBasicARCInstKind(Inst);
1749  if (Class != ARCInstKind::LoadWeak &&
1751  continue;
1752 
1753  // Delete objc_loadWeak calls with no users.
1754  if (Class == ARCInstKind::LoadWeak && Inst->use_empty()) {
1755  Inst->eraseFromParent();
1756  continue;
1757  }
1758 
1759  // TODO: For now, just look for an earlier available version of this value
1760  // within the same block. Theoretically, we could do memdep-style non-local
1761  // analysis too, but that would want caching. A better approach would be to
1762  // use the technique that EarlyCSE uses.
1763  inst_iterator Current = std::prev(I);
1764  BasicBlock *CurrentBB = &*Current.getBasicBlockIterator();
1765  for (BasicBlock::iterator B = CurrentBB->begin(),
1766  J = Current.getInstructionIterator();
1767  J != B; --J) {
1768  Instruction *EarlierInst = &*std::prev(J);
1769  ARCInstKind EarlierClass = GetARCInstKind(EarlierInst);
1770  switch (EarlierClass) {
1771  case ARCInstKind::LoadWeak:
1773  // If this is loading from the same pointer, replace this load's value
1774  // with that one.
1775  CallInst *Call = cast<CallInst>(Inst);
1776  CallInst *EarlierCall = cast<CallInst>(EarlierInst);
1777  Value *Arg = Call->getArgOperand(0);
1778  Value *EarlierArg = EarlierCall->getArgOperand(0);
1779  switch (PA.getAA()->alias(Arg, EarlierArg)) {
1780  case MustAlias:
1781  Changed = true;
1782  // If the load has a builtin retain, insert a plain retain for it.
1783  if (Class == ARCInstKind::LoadWeakRetained) {
1785  CallInst *CI = CallInst::Create(Decl, EarlierCall, "", Call);
1786  CI->setTailCall();
1787  }
1788  // Zap the fully redundant load.
1789  Call->replaceAllUsesWith(EarlierCall);
1790  Call->eraseFromParent();
1791  goto clobbered;
1792  case MayAlias:
1793  case PartialAlias:
1794  goto clobbered;
1795  case NoAlias:
1796  break;
1797  }
1798  break;
1799  }
1801  case ARCInstKind::InitWeak: {
1802  // If this is storing to the same pointer and has the same size etc.
1803  // replace this load's value with the stored value.
1804  CallInst *Call = cast<CallInst>(Inst);
1805  CallInst *EarlierCall = cast<CallInst>(EarlierInst);
1806  Value *Arg = Call->getArgOperand(0);
1807  Value *EarlierArg = EarlierCall->getArgOperand(0);
1808  switch (PA.getAA()->alias(Arg, EarlierArg)) {
1809  case MustAlias:
1810  Changed = true;
1811  // If the load has a builtin retain, insert a plain retain for it.
1812  if (Class == ARCInstKind::LoadWeakRetained) {
1814  CallInst *CI = CallInst::Create(Decl, EarlierCall, "", Call);
1815  CI->setTailCall();
1816  }
1817  // Zap the fully redundant load.
1818  Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
1819  Call->eraseFromParent();
1820  goto clobbered;
1821  case MayAlias:
1822  case PartialAlias:
1823  goto clobbered;
1824  case NoAlias:
1825  break;
1826  }
1827  break;
1828  }
1829  case ARCInstKind::MoveWeak:
1830  case ARCInstKind::CopyWeak:
1831  // TOOD: Grab the copied value.
1832  goto clobbered;
1834  case ARCInstKind::None:
1836  case ARCInstKind::User:
1837  // Weak pointers are only modified through the weak entry points
1838  // (and arbitrary calls, which could call the weak entry points).
1839  break;
1840  default:
1841  // Anything else could modify the weak pointer.
1842  goto clobbered;
1843  }
1844  }
1845  clobbered:;
1846  }
1847 
1848  // Then, for each destroyWeak with an alloca operand, check to see if
1849  // the alloca and all its users can be zapped.
1850  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
1851  Instruction *Inst = &*I++;
1852  ARCInstKind Class = GetBasicARCInstKind(Inst);
1853  if (Class != ARCInstKind::DestroyWeak)
1854  continue;
1855 
1856  CallInst *Call = cast<CallInst>(Inst);
1857  Value *Arg = Call->getArgOperand(0);
1858  if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
1859  for (User *U : Alloca->users()) {
1860  const Instruction *UserInst = cast<Instruction>(U);
1861  switch (GetBasicARCInstKind(UserInst)) {
1862  case ARCInstKind::InitWeak:
1865  continue;
1866  default:
1867  goto done;
1868  }
1869  }
1870  Changed = true;
1871  for (auto UI = Alloca->user_begin(), UE = Alloca->user_end(); UI != UE;) {
1872  CallInst *UserInst = cast<CallInst>(*UI++);
1873  switch (GetBasicARCInstKind(UserInst)) {
1874  case ARCInstKind::InitWeak:
1876  // These functions return their second argument.
1877  UserInst->replaceAllUsesWith(UserInst->getArgOperand(1));
1878  break;
1880  // No return value.
1881  break;
1882  default:
1883  llvm_unreachable("alloca really is used!");
1884  }
1885  UserInst->eraseFromParent();
1886  }
1887  Alloca->eraseFromParent();
1888  done:;
1889  }
1890  }
1891 }
1892 
1893 /// Identify program paths which execute sequences of retains and releases which
1894 /// can be eliminated.
1895 bool ObjCARCOpt::OptimizeSequences(Function &F) {
1896  // Releases, Retains - These are used to store the results of the main flow
1897  // analysis. These use Value* as the key instead of Instruction* so that the
1898  // map stays valid when we get around to rewriting code and calls get
1899  // replaced by arguments.
1900  DenseMap<Value *, RRInfo> Releases;
1902 
1903  // This is used during the traversal of the function to track the
1904  // states for each identified object at each block.
1906 
1907  // Analyze the CFG of the function, and all instructions.
1908  bool NestingDetected = Visit(F, BBStates, Retains, Releases);
1909 
1910  // Transform.
1911  bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
1912  Releases,
1913  F.getParent());
1914 
1915  return AnyPairsCompletelyEliminated && NestingDetected;
1916 }
1917 
1918 /// Check if there is a dependent call earlier that does not have anything in
1919 /// between the Retain and the call that can affect the reference count of their
1920 /// shared pointer argument. Note that Retain need not be in BB.
1921 static bool
1925  ProvenanceAnalysis &PA) {
1927  DepInsts, Visited, PA);
1928  if (DepInsts.size() != 1)
1929  return false;
1930 
1931  auto *Call = dyn_cast_or_null<CallInst>(*DepInsts.begin());
1932 
1933  // Check that the pointer is the return value of the call.
1934  if (!Call || Arg != Call)
1935  return false;
1936 
1937  // Check that the call is a regular call.
1939  return Class == ARCInstKind::CallOrUser || Class == ARCInstKind::Call;
1940 }
1941 
1942 /// Find a dependent retain that precedes the given autorelease for which there
1943 /// is nothing in between the two instructions that can affect the ref count of
1944 /// Arg.
1945 static CallInst *
1950  ProvenanceAnalysis &PA) {
1952  BB, Autorelease, DepInsts, Visited, PA);
1953  if (DepInsts.size() != 1)
1954  return nullptr;
1955 
1956  auto *Retain = dyn_cast_or_null<CallInst>(*DepInsts.begin());
1957 
1958  // Check that we found a retain with the same argument.
1959  if (!Retain || !IsRetain(GetBasicARCInstKind(Retain)) ||
1960  GetArgRCIdentityRoot(Retain) != Arg) {
1961  return nullptr;
1962  }
1963 
1964  return Retain;
1965 }
1966 
1967 /// Look for an ``autorelease'' instruction dependent on Arg such that there are
1968 /// no instructions dependent on Arg that need a positive ref count in between
1969 /// the autorelease and the ret.
1970 static CallInst *
1972  ReturnInst *Ret,
1975  ProvenanceAnalysis &PA) {
1977  BB, Ret, DepInsts, V, PA);
1978  if (DepInsts.size() != 1)
1979  return nullptr;
1980 
1981  auto *Autorelease = dyn_cast_or_null<CallInst>(*DepInsts.begin());
1982  if (!Autorelease)
1983  return nullptr;
1984  ARCInstKind AutoreleaseClass = GetBasicARCInstKind(Autorelease);
1985  if (!IsAutorelease(AutoreleaseClass))
1986  return nullptr;
1987  if (GetArgRCIdentityRoot(Autorelease) != Arg)
1988  return nullptr;
1989 
1990  return Autorelease;
1991 }
1992 
1993 /// Look for this pattern:
1994 /// \code
1995 /// %call = call i8* @something(...)
1996 /// %2 = call i8* @objc_retain(i8* %call)
1997 /// %3 = call i8* @objc_autorelease(i8* %2)
1998 /// ret i8* %3
1999 /// \endcode
2000 /// And delete the retain and autorelease.
2001 void ObjCARCOpt::OptimizeReturns(Function &F) {
2002  if (!F.getReturnType()->isPointerTy())
2003  return;
2004 
2005  DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeReturns ==\n");
2006 
2007  SmallPtrSet<Instruction *, 4> DependingInstructions;
2009  for (BasicBlock &BB: F) {
2010  ReturnInst *Ret = dyn_cast<ReturnInst>(&BB.back());
2011  if (!Ret)
2012  continue;
2013 
2014  DEBUG(dbgs() << "Visiting: " << *Ret << "\n");
2015 
2016  const Value *Arg = GetRCIdentityRoot(Ret->getOperand(0));
2017 
2018  // Look for an ``autorelease'' instruction that is a predecessor of Ret and
2019  // dependent on Arg such that there are no instructions dependent on Arg
2020  // that need a positive ref count in between the autorelease and Ret.
2022  Arg, &BB, Ret, DependingInstructions, Visited, PA);
2023  DependingInstructions.clear();
2024  Visited.clear();
2025 
2026  if (!Autorelease)
2027  continue;
2028 
2030  Arg, &BB, Autorelease, DependingInstructions, Visited, PA);
2031  DependingInstructions.clear();
2032  Visited.clear();
2033 
2034  if (!Retain)
2035  continue;
2036 
2037  // Check that there is nothing that can affect the reference count
2038  // between the retain and the call. Note that Retain need not be in BB.
2039  bool HasSafePathToCall = HasSafePathToPredecessorCall(Arg, Retain,
2040  DependingInstructions,
2041  Visited, PA);
2042  DependingInstructions.clear();
2043  Visited.clear();
2044 
2045  if (!HasSafePathToCall)
2046  continue;
2047 
2048  // If so, we can zap the retain and autorelease.
2049  Changed = true;
2050  ++NumRets;
2051  DEBUG(dbgs() << "Erasing: " << *Retain << "\nErasing: "
2052  << *Autorelease << "\n");
2053  EraseInstruction(Retain);
2054  EraseInstruction(Autorelease);
2055  }
2056 }
2057 
2058 #ifndef NDEBUG
2059 void
2060 ObjCARCOpt::GatherStatistics(Function &F, bool AfterOptimization) {
2061  llvm::Statistic &NumRetains =
2062  AfterOptimization? NumRetainsAfterOpt : NumRetainsBeforeOpt;
2063  llvm::Statistic &NumReleases =
2064  AfterOptimization? NumReleasesAfterOpt : NumReleasesBeforeOpt;
2065 
2066  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2067  Instruction *Inst = &*I++;
2068  switch (GetBasicARCInstKind(Inst)) {
2069  default:
2070  break;
2071  case ARCInstKind::Retain:
2072  ++NumRetains;
2073  break;
2074  case ARCInstKind::Release:
2075  ++NumReleases;
2076  break;
2077  }
2078  }
2079 }
2080 #endif
2081 
2082 bool ObjCARCOpt::doInitialization(Module &M) {
2083  if (!EnableARCOpts)
2084  return false;
2085 
2086  // If nothing in the Module uses ARC, don't do anything.
2087  Run = ModuleHasARC(M);
2088  if (!Run)
2089  return false;
2090 
2091  // Intuitively, objc_retain and others are nocapture, however in practice
2092  // they are not, because they return their argument value. And objc_release
2093  // calls finalizers which can have arbitrary side effects.
2094  MDKindCache.init(&M);
2095 
2096  // Initialize our runtime entry point cache.
2097  EP.init(&M);
2098 
2099  return false;
2100 }
2101 
2102 bool ObjCARCOpt::runOnFunction(Function &F) {
2103  if (!EnableARCOpts)
2104  return false;
2105 
2106  // If nothing in the Module uses ARC, don't do anything.
2107  if (!Run)
2108  return false;
2109 
2110  Changed = false;
2111 
2112  DEBUG(dbgs() << "<<< ObjCARCOpt: Visiting Function: " << F.getName() << " >>>"
2113  "\n");
2114 
2115  PA.setAA(&getAnalysis<AAResultsWrapperPass>().getAAResults());
2116 
2117 #ifndef NDEBUG
2118  if (AreStatisticsEnabled()) {
2119  GatherStatistics(F, false);
2120  }
2121 #endif
2122 
2123  // This pass performs several distinct transformations. As a compile-time aid
2124  // when compiling code that isn't ObjC, skip these if the relevant ObjC
2125  // library functions aren't declared.
2126 
2127  // Preliminary optimizations. This also computes UsedInThisFunction.
2128  OptimizeIndividualCalls(F);
2129 
2130  // Optimizations for weak pointers.
2131  if (UsedInThisFunction & ((1 << unsigned(ARCInstKind::LoadWeak)) |
2132  (1 << unsigned(ARCInstKind::LoadWeakRetained)) |
2133  (1 << unsigned(ARCInstKind::StoreWeak)) |
2134  (1 << unsigned(ARCInstKind::InitWeak)) |
2135  (1 << unsigned(ARCInstKind::CopyWeak)) |
2136  (1 << unsigned(ARCInstKind::MoveWeak)) |
2137  (1 << unsigned(ARCInstKind::DestroyWeak))))
2138  OptimizeWeakCalls(F);
2139 
2140  // Optimizations for retain+release pairs.
2141  if (UsedInThisFunction & ((1 << unsigned(ARCInstKind::Retain)) |
2142  (1 << unsigned(ARCInstKind::RetainRV)) |
2143  (1 << unsigned(ARCInstKind::RetainBlock))))
2144  if (UsedInThisFunction & (1 << unsigned(ARCInstKind::Release)))
2145  // Run OptimizeSequences until it either stops making changes or
2146  // no retain+release pair nesting is detected.
2147  while (OptimizeSequences(F)) {}
2148 
2149  // Optimizations if objc_autorelease is used.
2150  if (UsedInThisFunction & ((1 << unsigned(ARCInstKind::Autorelease)) |
2151  (1 << unsigned(ARCInstKind::AutoreleaseRV))))
2152  OptimizeReturns(F);
2153 
2154  // Gather statistics after optimization.
2155 #ifndef NDEBUG
2156  if (AreStatisticsEnabled()) {
2157  GatherStatistics(F, true);
2158  }
2159 #endif
2160 
2161  DEBUG(dbgs() << "\n");
2162 
2163  return Changed;
2164 }
2165 
2166 void ObjCARCOpt::releaseMemory() {
2167  PA.clear();
2168 }
2169 
2170 /// @}
2171 ///
Pass interface - Implemented by all &#39;passes&#39;.
Definition: Pass.h:81
The two locations precisely alias each other.
Definition: AliasAnalysis.h:85
uint64_t CallInst * C
Return a value (possibly void), from a function.
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks &#39;this&#39; from the containing basic block and deletes it.
Definition: Instruction.cpp:69
Sequence GetSeq() const
Definition: PtrState.h:149
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
void setDoesNotThrow()
ARCInstKind GetARCInstKind(const Value *V)
Map V to its ARCInstKind equivalence class.
objc_destroyWeak (derived)
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
SmallPtrSet< Instruction *, 2 > Calls
For a top-down sequence, the set of objc_retains or objc_retainBlocks.
Definition: PtrState.h:77
typename SuperClass::const_iterator const_iterator
Definition: SmallVector.h:329
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
Compute iterated dominance frontiers using a linear time algorithm.
Definition: AllocatorList.h:24
objc_loadWeakRetained (primitive)
could call objc_release and/or "use" pointers
static char ID
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:63
bool IsTrackingImpreciseReleases() const
Definition: PtrState.h:128
The two locations alias, but only due to a partial overlap.
Definition: AliasAnalysis.h:83
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:215
objc_loadWeak (derived)
This class represents a function call, abstracting a target machine&#39;s calling convention.
objc_retainedObject, etc.
Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release...
any use of x.
Definition: PtrState.h:41
LLVMContext & getContext() const
All values hold a context through their type.
Definition: Value.cpp:697
bool MatchWithRelease(ARCMDKindCache &Cache, Instruction *Release)
Return true if this set of retains can be paired with the given release.
Definition: PtrState.cpp:325
STATISTIC(NumFunctions, "Total number of functions")
Metadata node.
Definition: Metadata.h:862
The two locations do not alias at all.
Definition: AliasAnalysis.h:79
static CallInst * Create(Value *Func, ArrayRef< Value *> Args, ArrayRef< OperandBundleDef > Bundles=None, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
could call objc_release
void initializeObjCARCOptPass(PassRegistry &)
An instruction for reading from memory.
Definition: Instructions.h:164
TerminatorInst::SuccIterator< TerminatorInst *, BasicBlock > succ_iterator
Definition: CFG.h:122
Hexagon Common GEP
iv Induction Variable Users
Definition: IVUsers.cpp:51
SmallPtrSet< Instruction *, 2 > ReverseInsertPts
The set of optimal insert positions for moving calls in the opposite sequence.
Definition: PtrState.h:81
objc_moveWeak (derived)
bool IsNoopOnNull(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a null pointer...
bool IsForwarding(ARCInstKind Class)
Test if the given class represents instructions which return their argument verbatim.
bool IsObjCIdentifiedObject(const Value *V)
Return true if this value refers to a distinct and identifiable object.
The two locations may or may not alias. This is the least precise result.
Definition: AliasAnalysis.h:81
LLVMContext & getContext() const
Return the LLVMContext in which this type was uniqued.
Definition: Type.h:130
static Constant * getNullValue(Type *Ty)
Constructor to create a &#39;0&#39; constant of arbitrary type.
Definition: Constants.cpp:207
iterator begin()
Instruction iterator methods.
Definition: BasicBlock.h:252
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:51
bool HasKnownPositiveRefCount() const
Definition: PtrState.h:145
objc_autoreleaseReturnValue
inst_iterator inst_begin(Function *F)
Definition: InstIterator.h:132
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: APFloat.h:42
InstrTy * getInstruction() const
Definition: CallSite.h:89
This class summarizes several per-pointer runtime properties which are propagated through the flow gr...
Definition: PtrState.h:100
ELFYAML::ELF_STO Other
Definition: ELFYAML.cpp:695
bool IsNullOrUndef(const Value *V)
Interval::succ_iterator succ_begin(Interval *I)
succ_begin/succ_end - define methods so that Intervals may be used just like BasicBlocks can with the...
Definition: Interval.h:106
This file defines common definitions/declarations used by the ObjC ARC Optimizer. ...
bool IsTailCallRelease
True of the objc_release calls are all marked with the "tail" keyword.
Definition: PtrState.h:69
bool InitBottomUp(ARCMDKindCache &Cache, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases...
Definition: PtrState.cpp:165
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
Definition: STLExtras.h:250
Instruction * clone() const
Create a copy of &#39;this&#39; instruction that is identical in all ways except the following: ...
objc_retainAutoreleasedReturnValue
#define F(x, y, z)
Definition: MD5.cpp:55
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:245
static bool setDoesNotThrow(Function &F)
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
This class represents a no-op cast from one type to another.
void HandlePotentialUse(BasicBlock *BB, Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:243
An instruction for storing to memory.
Definition: Instructions.h:306
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
Definition: Value.cpp:428
bool IsAlwaysTail(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the "tail" keyword...
BBIty & getBasicBlockIterator()
Definition: InstIterator.h:74
bool ModuleHasARC(const Module &M)
Test if the given module looks interesting to run ARC optimization on.
Value * getOperand(unsigned i) const
Definition: User.h:154
Unidirectional information about either a retain-decrement-use-release sequence or release-use-decrem...
Definition: PtrState.h:53
Interval::succ_iterator succ_end(Interval *I)
Definition: Interval.h:109
raw_ostream & operator<<(raw_ostream &OS, const ARCInstKind Class)
const BasicBlock & getEntryBlock() const
Definition: Function.h:564
an instruction for type-safe pointer arithmetic to access elements of arrays and structs ...
Definition: Instructions.h:837
iterator find(const KeyT &Key)
Definition: BlotMapVector.h:73
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata *> MDs)
Definition: Metadata.h:1164
#define P(N)
Type * getReturnType() const
Returns the type of the ret val.
Definition: Function.h:142
Subclasses of this class are all able to terminate a basic block.
Definition: InstrTypes.h:54
void HandlePotentialUse(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:389
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction...
Definition: Instruction.cpp:75
LLVM Basic Block Representation.
Definition: BasicBlock.h:59
The instances of the Type class are immutable: once they are created, they are never changed...
Definition: Type.h:46
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:68
This is an important base class in LLVM.
Definition: Constant.h:42
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
Definition: SmallVector.h:116
objc_initWeak (derived)
bool isPointerTy() const
True if this is an instance of PointerType.
Definition: Type.h:221
VectorTy::const_iterator const_iterator
Definition: BlotMapVector.h:28
BIty & getInstructionIterator()
Definition: InstIterator.h:75
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:372
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
Definition: Interval.h:116
like S_Release, but code motion is stopped.
Definition: PtrState.h:42
Represent the analysis usage information of a pass.
const Instruction & back() const
Definition: BasicBlock.h:266
#define LLVM_ATTRIBUTE_UNUSED
Definition: Compiler.h:144
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:298
Interval::pred_iterator pred_end(Interval *I)
Definition: Interval.h:119
static const Value * FindSingleUseIdentifiedObject(const Value *Arg)
This is similar to GetRCIdentityRoot but it stops as soon as it finds a value with multiple uses...
Definition: ObjCARCOpts.cpp:55
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
Definition: SmallPtrSet.h:383
self_iterator getIterator()
Definition: ilist_node.h:82
A cache of MDKinds used by various ARC optimizations.
static CallInst * FindPredecessorAutoreleaseWithSafePath(const Value *Arg, BasicBlock *BB, ReturnInst *Ret, SmallPtrSetImpl< Instruction *> &DepInsts, SmallPtrSetImpl< const BasicBlock *> &V, ProvenanceAnalysis &PA)
Look for an ``autorelease&#39;&#39; instruction dependent on Arg such that there are no instructions dependen...
objc_copyWeak (derived)
bool MatchWithRetain()
Return true if this set of releases can be paired with a release.
Definition: PtrState.cpp:191
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe)
If we have a Top Down pointer in the S_CanRelease state, make sure that there are no CFG hazards by c...
void setTailCall(bool isTC=true)
static UndefValue * get(Type *T)
Static factory methods - Return an &#39;undef&#39; object of the specified type.
Definition: Constants.cpp:1320
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
anything that is inert from an ARC perspective.
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE, "Assign register bank of generic virtual registers", false, false) RegBankSelect
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
This file declares a simple ARC-aware AliasAnalysis using special knowledge of Objective C to enhance...
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
Definition: Metadata.cpp:1214
ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
objc_release(x), !clang.imprecise_release.
Definition: PtrState.h:44
bool IsCFGHazardAfflicted() const
Definition: PtrState.h:136
size_type size() const
Definition: SmallPtrSet.h:93
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
Definition: ObjCARC.h:52
Iterator for intrusive lists based on ilist_node.
objc_release(x).
Definition: PtrState.h:43
#define E
Definition: LargeTest.cpp:27
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the generic address space (address sp...
Definition: DerivedTypes.h:482
#define B
Definition: LargeTest.cpp:24
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false...
Definition: SmallPtrSet.h:379
iterator end()
Definition: BasicBlock.h:254
This file declares a special form of Alias Analysis called ``Provenance Analysis&#39;&#39;.
This is a &#39;vector&#39; (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:864
ARCInstKind
Equivalence classes of instructions in the ARC Model.
An associative container with fast insertion-order (deterministic) iteration over its elements...
Definition: BlotMapVector.h:17
bool IsNoThrow(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the nounwind attri...
objc_unsafeClaimAutoreleasedReturnValue
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:354
LLVM_NODISCARD T pop_back_val()
Definition: SmallVector.h:385
objc_retain(x).
Definition: PtrState.h:39
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:278
unsigned getNumIncomingValues() const
Return the number of incoming edges.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
MDNode * ReleaseMetadata
If the Calls are objc_release calls and they all have a clang.imprecise_release tag, this is the metadata tag.
Definition: PtrState.h:73
Pass * createObjCARCOptPass()
void FindDependencies(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, SmallPtrSetImpl< Instruction *> &DependingInstructions, SmallPtrSetImpl< const BasicBlock *> &Visited, ProvenanceAnalysis &PA)
Walk up the CFG from StartPos (which is in StartBB) and find local and non-local dependencies on Arg...
iterator_range< user_iterator > users()
Definition: Value.h:395
could "use" a pointer
void ClearSequenceProgress()
Definition: PtrState.h:151
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
static bool HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain, SmallPtrSetImpl< Instruction *> &DepInsts, SmallPtrSetImpl< const BasicBlock *> &Visited, ProvenanceAnalysis &PA)
Check if there is a dependent call earlier that does not have anything in between the Retain and the ...
bool empty() const
void setCalledFunction(Value *Fn)
Set the function called.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
Definition: SmallVector.h:120
const RRInfo & GetRRInfo() const
Definition: PtrState.h:164
iterator begin() const
Definition: SmallPtrSet.h:398
LLVM_NODISCARD bool empty() const
Definition: SmallVector.h:61
Value * getArgOperand(unsigned i) const
getArgOperand/setArgOperand - Return/set the i-th call argument.
StringRef getName() const
Return a constant reference to the value&#39;s name.
Definition: Value.cpp:218
Establish a view to a call site for examination.
Definition: CallSite.h:687
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
#define I(x, y, z)
Definition: MD5.cpp:58
objc_storeWeak (primitive)
void setArgOperand(unsigned i, Value *v)
Sequence
A sequence of states that a pointer may go through in which an objc_retain and objc_release are actua...
Definition: PtrState.h:37
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
Definition: Casting.h:323
static void CheckForUseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe, bool &ShouldContinue)
If we have a top down pointer in the S_Use state, make sure that there are no CFG hazards by checking...
Declarations for ObjC runtime functions and constants.
void SetCFGHazardAfflicted(const bool NewValue)
Definition: PtrState.h:138
raw_ostream & operator<<(raw_ostream &OS, const APInt &I)
Definition: APInt.h:2018
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
INITIALIZE_PASS_BEGIN(ObjCARCOpt, "objc-arc", "ObjC ARC optimization", false, false) INITIALIZE_PASS_END(ObjCARCOpt
Module * getParent()
Get the module that this global value is contained inside of...
Definition: GlobalValue.h:545
LLVM Value Representation.
Definition: Value.h:73
foo(x) – x could possibly see a ref count decrement.
Definition: PtrState.h:40
Legacy wrapper pass to provide the ObjCARCAAResult object.
static void ComputePostOrders(Function &F, SmallVectorImpl< BasicBlock *> &PostOrder, SmallVectorImpl< BasicBlock *> &ReverseCFGPostOrder, unsigned NoObjCARCExceptionsMDKind, DenseMap< const BasicBlock *, BBState > &BBStates)
bool KnownSafe
After an objc_retain, the reference count of the referenced object is known to be positive...
Definition: PtrState.h:66
This class implements an extremely fast bulk output stream that can only output to a stream...
Definition: raw_ostream.h:44
Invoke instruction.
#define DEBUG(X)
Definition: Debug.h:118
This is similar to BasicAliasAnalysis, and it uses many of the same techniques, except it uses specia...
IRTranslator LLVM IR MI
bool hasOneUse() const
Return true if there is exactly one user of this value.
Definition: Value.h:408
const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
inst_iterator inst_end(Function *F)
Definition: InstIterator.h:133
static CallInst * FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB, Instruction *Autorelease, SmallPtrSetImpl< Instruction *> &DepInsts, SmallPtrSetImpl< const BasicBlock *> &Visited, ProvenanceAnalysis &PA)
Find a dependent retain that precedes the given autorelease for which there is nothing in between the...
int * Ptr
A wrapper pass to provide the legacy pass manager access to a suitably prepared AAResults object...
bool InitTopDown(ARCInstKind Kind, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases...
Definition: PtrState.cpp:300
bool IsKnownSafe() const
Definition: PtrState.h:118
static IntegerType * getInt8Ty(LLVMContext &C)
Definition: Type.cpp:174
bool AreStatisticsEnabled()
Check if statistics are enabled.
Definition: Statistic.cpp:112
bool use_empty() const
Definition: Value.h:322
bool IsNeverTail(ARCInstKind Class)
Test if the given class represents instructions which are never safe to mark with the "tail" keyword...
void blot(const KeyT &Key)
This is similar to erase, but instead of removing the element from the vector, it just zeros out the ...
Definition: BlotMapVector.h:90
bool IsNoopInstruction(const Instruction *I)
const BasicBlock * getParent() const
Definition: Instruction.h:66
an instruction to allocate memory on the stack
Definition: Instructions.h:60
bool IsAutorelease(ARCInstKind Class)
Test if the given class is objc_autorelease or equivalent.