LLVM  6.0.0svn
ObjCARCOpts.cpp
Go to the documentation of this file.
1 //===- ObjCARCOpts.cpp - ObjC ARC Optimization ----------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 /// \file
11 /// This file defines ObjC ARC optimizations. ARC stands for Automatic
12 /// Reference Counting and is a system for managing reference counts for objects
13 /// in Objective C.
14 ///
15 /// The optimizations performed include elimination of redundant, partially
16 /// redundant, and inconsequential reference count operations, elimination of
17 /// redundant weak pointer operations, and numerous minor simplifications.
18 ///
19 /// WARNING: This file knows about certain library functions. It recognizes them
20 /// by name, and hardwires knowledge of their semantics.
21 ///
22 /// WARNING: This file knows about how certain Objective-C library functions are
23 /// used. Naive LLVM IR transformations which would otherwise be
24 /// behavior-preserving may break these assumptions.
25 //
26 //===----------------------------------------------------------------------===//
27 
28 #include "ARCRuntimeEntryPoints.h"
29 #include "BlotMapVector.h"
30 #include "DependencyAnalysis.h"
31 #include "ObjCARC.h"
32 #include "ProvenanceAnalysis.h"
33 #include "PtrState.h"
34 #include "llvm/ADT/DenseMap.h"
35 #include "llvm/ADT/None.h"
36 #include "llvm/ADT/STLExtras.h"
37 #include "llvm/ADT/SmallPtrSet.h"
38 #include "llvm/ADT/SmallVector.h"
39 #include "llvm/ADT/Statistic.h"
44 #include "llvm/IR/BasicBlock.h"
45 #include "llvm/IR/CFG.h"
46 #include "llvm/IR/CallSite.h"
47 #include "llvm/IR/Constant.h"
48 #include "llvm/IR/Constants.h"
49 #include "llvm/IR/DerivedTypes.h"
50 #include "llvm/IR/Function.h"
51 #include "llvm/IR/GlobalVariable.h"
52 #include "llvm/IR/InstIterator.h"
53 #include "llvm/IR/InstrTypes.h"
54 #include "llvm/IR/Instruction.h"
55 #include "llvm/IR/Instructions.h"
56 #include "llvm/IR/LLVMContext.h"
57 #include "llvm/IR/Metadata.h"
58 #include "llvm/IR/Type.h"
59 #include "llvm/IR/User.h"
60 #include "llvm/IR/Value.h"
61 #include "llvm/Pass.h"
62 #include "llvm/Support/Casting.h"
63 #include "llvm/Support/Compiler.h"
64 #include "llvm/Support/Debug.h"
67 #include <cassert>
68 #include <iterator>
69 #include <utility>
70 
71 using namespace llvm;
72 using namespace llvm::objcarc;
73 
74 #define DEBUG_TYPE "objc-arc-opts"
75 
76 /// \defgroup ARCUtilities Utility declarations/definitions specific to ARC.
77 /// @{
78 
79 /// \brief This is similar to GetRCIdentityRoot but it stops as soon
80 /// as it finds a value with multiple uses.
82  // ConstantData (like ConstantPointerNull and UndefValue) is used across
83  // modules. It's never a single-use value.
84  if (isa<ConstantData>(Arg))
85  return nullptr;
86 
87  if (Arg->hasOneUse()) {
88  if (const BitCastInst *BC = dyn_cast<BitCastInst>(Arg))
89  return FindSingleUseIdentifiedObject(BC->getOperand(0));
90  if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Arg))
91  if (GEP->hasAllZeroIndices())
92  return FindSingleUseIdentifiedObject(GEP->getPointerOperand());
95  cast<CallInst>(Arg)->getArgOperand(0));
96  if (!IsObjCIdentifiedObject(Arg))
97  return nullptr;
98  return Arg;
99  }
100 
101  // If we found an identifiable object but it has multiple uses, but they are
102  // trivial uses, we can still consider this to be a single-use value.
103  if (IsObjCIdentifiedObject(Arg)) {
104  for (const User *U : Arg->users())
105  if (!U->use_empty() || GetRCIdentityRoot(U) != Arg)
106  return nullptr;
107 
108  return Arg;
109  }
110 
111  return nullptr;
112 }
113 
114 /// @}
115 ///
116 /// \defgroup ARCOpt ARC Optimization.
117 /// @{
118 
119 // TODO: On code like this:
120 //
121 // objc_retain(%x)
122 // stuff_that_cannot_release()
123 // objc_autorelease(%x)
124 // stuff_that_cannot_release()
125 // objc_retain(%x)
126 // stuff_that_cannot_release()
127 // objc_autorelease(%x)
128 //
129 // The second retain and autorelease can be deleted.
130 
131 // TODO: It should be possible to delete
132 // objc_autoreleasePoolPush and objc_autoreleasePoolPop
133 // pairs if nothing is actually autoreleased between them. Also, autorelease
134 // calls followed by objc_autoreleasePoolPop calls (perhaps in ObjC++ code
135 // after inlining) can be turned into plain release calls.
136 
137 // TODO: Critical-edge splitting. If the optimial insertion point is
138 // a critical edge, the current algorithm has to fail, because it doesn't
139 // know how to split edges. It should be possible to make the optimizer
140 // think in terms of edges, rather than blocks, and then split critical
141 // edges on demand.
142 
143 // TODO: OptimizeSequences could generalized to be Interprocedural.
144 
145 // TODO: Recognize that a bunch of other objc runtime calls have
146 // non-escaping arguments and non-releasing arguments, and may be
147 // non-autoreleasing.
148 
149 // TODO: Sink autorelease calls as far as possible. Unfortunately we
150 // usually can't sink them past other calls, which would be the main
151 // case where it would be useful.
152 
153 // TODO: The pointer returned from objc_loadWeakRetained is retained.
154 
155 // TODO: Delete release+retain pairs (rare).
156 
157 STATISTIC(NumNoops, "Number of no-op objc calls eliminated");
158 STATISTIC(NumPartialNoops, "Number of partially no-op objc calls eliminated");
159 STATISTIC(NumAutoreleases,"Number of autoreleases converted to releases");
160 STATISTIC(NumRets, "Number of return value forwarding "
161  "retain+autoreleases eliminated");
162 STATISTIC(NumRRs, "Number of retain+release paths eliminated");
163 STATISTIC(NumPeeps, "Number of calls peephole-optimized");
164 #ifndef NDEBUG
165 STATISTIC(NumRetainsBeforeOpt,
166  "Number of retains before optimization");
167 STATISTIC(NumReleasesBeforeOpt,
168  "Number of releases before optimization");
169 STATISTIC(NumRetainsAfterOpt,
170  "Number of retains after optimization");
171 STATISTIC(NumReleasesAfterOpt,
172  "Number of releases after optimization");
173 #endif
174 
175 namespace {
176 
177  /// \brief Per-BasicBlock state.
178  class BBState {
179  /// The number of unique control paths from the entry which can reach this
180  /// block.
181  unsigned TopDownPathCount = 0;
182 
183  /// The number of unique control paths to exits from this block.
184  unsigned BottomUpPathCount = 0;
185 
186  /// The top-down traversal uses this to record information known about a
187  /// pointer at the bottom of each block.
189 
190  /// The bottom-up traversal uses this to record information known about a
191  /// pointer at the top of each block.
193 
194  /// Effective predecessors of the current block ignoring ignorable edges and
195  /// ignored backedges.
197 
198  /// Effective successors of the current block ignoring ignorable edges and
199  /// ignored backedges.
201 
202  public:
203  static const unsigned OverflowOccurredValue;
204 
205  BBState() = default;
206 
207  using top_down_ptr_iterator = decltype(PerPtrTopDown)::iterator;
208  using const_top_down_ptr_iterator = decltype(PerPtrTopDown)::const_iterator;
209 
210  top_down_ptr_iterator top_down_ptr_begin() { return PerPtrTopDown.begin(); }
211  top_down_ptr_iterator top_down_ptr_end() { return PerPtrTopDown.end(); }
212  const_top_down_ptr_iterator top_down_ptr_begin() const {
213  return PerPtrTopDown.begin();
214  }
215  const_top_down_ptr_iterator top_down_ptr_end() const {
216  return PerPtrTopDown.end();
217  }
218  bool hasTopDownPtrs() const {
219  return !PerPtrTopDown.empty();
220  }
221 
222  using bottom_up_ptr_iterator = decltype(PerPtrBottomUp)::iterator;
223  using const_bottom_up_ptr_iterator =
224  decltype(PerPtrBottomUp)::const_iterator;
225 
226  bottom_up_ptr_iterator bottom_up_ptr_begin() {
227  return PerPtrBottomUp.begin();
228  }
229  bottom_up_ptr_iterator bottom_up_ptr_end() { return PerPtrBottomUp.end(); }
230  const_bottom_up_ptr_iterator bottom_up_ptr_begin() const {
231  return PerPtrBottomUp.begin();
232  }
233  const_bottom_up_ptr_iterator bottom_up_ptr_end() const {
234  return PerPtrBottomUp.end();
235  }
236  bool hasBottomUpPtrs() const {
237  return !PerPtrBottomUp.empty();
238  }
239 
240  /// Mark this block as being an entry block, which has one path from the
241  /// entry by definition.
242  void SetAsEntry() { TopDownPathCount = 1; }
243 
244  /// Mark this block as being an exit block, which has one path to an exit by
245  /// definition.
246  void SetAsExit() { BottomUpPathCount = 1; }
247 
248  /// Attempt to find the PtrState object describing the top down state for
249  /// pointer Arg. Return a new initialized PtrState describing the top down
250  /// state for Arg if we do not find one.
251  TopDownPtrState &getPtrTopDownState(const Value *Arg) {
252  return PerPtrTopDown[Arg];
253  }
254 
255  /// Attempt to find the PtrState object describing the bottom up state for
256  /// pointer Arg. Return a new initialized PtrState describing the bottom up
257  /// state for Arg if we do not find one.
258  BottomUpPtrState &getPtrBottomUpState(const Value *Arg) {
259  return PerPtrBottomUp[Arg];
260  }
261 
262  /// Attempt to find the PtrState object describing the bottom up state for
263  /// pointer Arg.
264  bottom_up_ptr_iterator findPtrBottomUpState(const Value *Arg) {
265  return PerPtrBottomUp.find(Arg);
266  }
267 
268  void clearBottomUpPointers() {
269  PerPtrBottomUp.clear();
270  }
271 
272  void clearTopDownPointers() {
273  PerPtrTopDown.clear();
274  }
275 
276  void InitFromPred(const BBState &Other);
277  void InitFromSucc(const BBState &Other);
278  void MergePred(const BBState &Other);
279  void MergeSucc(const BBState &Other);
280 
281  /// Compute the number of possible unique paths from an entry to an exit
282  /// which pass through this block. This is only valid after both the
283  /// top-down and bottom-up traversals are complete.
284  ///
285  /// Returns true if overflow occurred. Returns false if overflow did not
286  /// occur.
287  bool GetAllPathCountWithOverflow(unsigned &PathCount) const {
288  if (TopDownPathCount == OverflowOccurredValue ||
289  BottomUpPathCount == OverflowOccurredValue)
290  return true;
291  unsigned long long Product =
292  (unsigned long long)TopDownPathCount*BottomUpPathCount;
293  // Overflow occurred if any of the upper bits of Product are set or if all
294  // the lower bits of Product are all set.
295  return (Product >> 32) ||
296  ((PathCount = Product) == OverflowOccurredValue);
297  }
298 
299  // Specialized CFG utilities.
300  using edge_iterator = SmallVectorImpl<BasicBlock *>::const_iterator;
301 
302  edge_iterator pred_begin() const { return Preds.begin(); }
303  edge_iterator pred_end() const { return Preds.end(); }
304  edge_iterator succ_begin() const { return Succs.begin(); }
305  edge_iterator succ_end() const { return Succs.end(); }
306 
307  void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
308  void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
309 
310  bool isExit() const { return Succs.empty(); }
311  };
312 
313 } // end anonymous namespace
314 
315 const unsigned BBState::OverflowOccurredValue = 0xffffffff;
316 
317 namespace llvm {
318 
320  BBState &BBState) LLVM_ATTRIBUTE_UNUSED;
321 
322 } // end namespace llvm
323 
324 void BBState::InitFromPred(const BBState &Other) {
325  PerPtrTopDown = Other.PerPtrTopDown;
326  TopDownPathCount = Other.TopDownPathCount;
327 }
328 
329 void BBState::InitFromSucc(const BBState &Other) {
330  PerPtrBottomUp = Other.PerPtrBottomUp;
331  BottomUpPathCount = Other.BottomUpPathCount;
332 }
333 
334 /// The top-down traversal uses this to merge information about predecessors to
335 /// form the initial state for a new block.
336 void BBState::MergePred(const BBState &Other) {
337  if (TopDownPathCount == OverflowOccurredValue)
338  return;
339 
340  // Other.TopDownPathCount can be 0, in which case it is either dead or a
341  // loop backedge. Loop backedges are special.
342  TopDownPathCount += Other.TopDownPathCount;
343 
344  // In order to be consistent, we clear the top down pointers when by adding
345  // TopDownPathCount becomes OverflowOccurredValue even though "true" overflow
346  // has not occurred.
347  if (TopDownPathCount == OverflowOccurredValue) {
348  clearTopDownPointers();
349  return;
350  }
351 
352  // Check for overflow. If we have overflow, fall back to conservative
353  // behavior.
354  if (TopDownPathCount < Other.TopDownPathCount) {
355  TopDownPathCount = OverflowOccurredValue;
356  clearTopDownPointers();
357  return;
358  }
359 
360  // For each entry in the other set, if our set has an entry with the same key,
361  // merge the entries. Otherwise, copy the entry and merge it with an empty
362  // entry.
363  for (auto MI = Other.top_down_ptr_begin(), ME = Other.top_down_ptr_end();
364  MI != ME; ++MI) {
365  auto Pair = PerPtrTopDown.insert(*MI);
366  Pair.first->second.Merge(Pair.second ? TopDownPtrState() : MI->second,
367  /*TopDown=*/true);
368  }
369 
370  // For each entry in our set, if the other set doesn't have an entry with the
371  // same key, force it to merge with an empty entry.
372  for (auto MI = top_down_ptr_begin(), ME = top_down_ptr_end(); MI != ME; ++MI)
373  if (Other.PerPtrTopDown.find(MI->first) == Other.PerPtrTopDown.end())
374  MI->second.Merge(TopDownPtrState(), /*TopDown=*/true);
375 }
376 
377 /// The bottom-up traversal uses this to merge information about successors to
378 /// form the initial state for a new block.
379 void BBState::MergeSucc(const BBState &Other) {
380  if (BottomUpPathCount == OverflowOccurredValue)
381  return;
382 
383  // Other.BottomUpPathCount can be 0, in which case it is either dead or a
384  // loop backedge. Loop backedges are special.
385  BottomUpPathCount += Other.BottomUpPathCount;
386 
387  // In order to be consistent, we clear the top down pointers when by adding
388  // BottomUpPathCount becomes OverflowOccurredValue even though "true" overflow
389  // has not occurred.
390  if (BottomUpPathCount == OverflowOccurredValue) {
391  clearBottomUpPointers();
392  return;
393  }
394 
395  // Check for overflow. If we have overflow, fall back to conservative
396  // behavior.
397  if (BottomUpPathCount < Other.BottomUpPathCount) {
398  BottomUpPathCount = OverflowOccurredValue;
399  clearBottomUpPointers();
400  return;
401  }
402 
403  // For each entry in the other set, if our set has an entry with the
404  // same key, merge the entries. Otherwise, copy the entry and merge
405  // it with an empty entry.
406  for (auto MI = Other.bottom_up_ptr_begin(), ME = Other.bottom_up_ptr_end();
407  MI != ME; ++MI) {
408  auto Pair = PerPtrBottomUp.insert(*MI);
409  Pair.first->second.Merge(Pair.second ? BottomUpPtrState() : MI->second,
410  /*TopDown=*/false);
411  }
412 
413  // For each entry in our set, if the other set doesn't have an entry
414  // with the same key, force it to merge with an empty entry.
415  for (auto MI = bottom_up_ptr_begin(), ME = bottom_up_ptr_end(); MI != ME;
416  ++MI)
417  if (Other.PerPtrBottomUp.find(MI->first) == Other.PerPtrBottomUp.end())
418  MI->second.Merge(BottomUpPtrState(), /*TopDown=*/false);
419 }
420 
421 raw_ostream &llvm::operator<<(raw_ostream &OS, BBState &BBInfo) {
422  // Dump the pointers we are tracking.
423  OS << " TopDown State:\n";
424  if (!BBInfo.hasTopDownPtrs()) {
425  DEBUG(dbgs() << " NONE!\n");
426  } else {
427  for (auto I = BBInfo.top_down_ptr_begin(), E = BBInfo.top_down_ptr_end();
428  I != E; ++I) {
429  const PtrState &P = I->second;
430  OS << " Ptr: " << *I->first
431  << "\n KnownSafe: " << (P.IsKnownSafe()?"true":"false")
432  << "\n ImpreciseRelease: "
433  << (P.IsTrackingImpreciseReleases()?"true":"false") << "\n"
434  << " HasCFGHazards: "
435  << (P.IsCFGHazardAfflicted()?"true":"false") << "\n"
436  << " KnownPositive: "
437  << (P.HasKnownPositiveRefCount()?"true":"false") << "\n"
438  << " Seq: "
439  << P.GetSeq() << "\n";
440  }
441  }
442 
443  OS << " BottomUp State:\n";
444  if (!BBInfo.hasBottomUpPtrs()) {
445  DEBUG(dbgs() << " NONE!\n");
446  } else {
447  for (auto I = BBInfo.bottom_up_ptr_begin(), E = BBInfo.bottom_up_ptr_end();
448  I != E; ++I) {
449  const PtrState &P = I->second;
450  OS << " Ptr: " << *I->first
451  << "\n KnownSafe: " << (P.IsKnownSafe()?"true":"false")
452  << "\n ImpreciseRelease: "
453  << (P.IsTrackingImpreciseReleases()?"true":"false") << "\n"
454  << " HasCFGHazards: "
455  << (P.IsCFGHazardAfflicted()?"true":"false") << "\n"
456  << " KnownPositive: "
457  << (P.HasKnownPositiveRefCount()?"true":"false") << "\n"
458  << " Seq: "
459  << P.GetSeq() << "\n";
460  }
461  }
462 
463  return OS;
464 }
465 
466 namespace {
467 
468  /// \brief The main ARC optimization pass.
469  class ObjCARCOpt : public FunctionPass {
470  bool Changed;
472 
473  /// A cache of references to runtime entry point constants.
475 
476  /// A cache of MDKinds that can be passed into other functions to propagate
477  /// MDKind identifiers.
478  ARCMDKindCache MDKindCache;
479 
480  /// A flag indicating whether this optimization pass should run.
481  bool Run;
482 
483  /// Flags which determine whether each of the interesting runtime functions
484  /// is in fact used in the current function.
485  unsigned UsedInThisFunction;
486 
487  bool OptimizeRetainRVCall(Function &F, Instruction *RetainRV);
488  void OptimizeAutoreleaseRVCall(Function &F, Instruction *AutoreleaseRV,
489  ARCInstKind &Class);
490  void OptimizeIndividualCalls(Function &F);
491 
492  void CheckForCFGHazards(const BasicBlock *BB,
494  BBState &MyStates) const;
495  bool VisitInstructionBottomUp(Instruction *Inst, BasicBlock *BB,
497  BBState &MyStates);
498  bool VisitBottomUp(BasicBlock *BB,
501  bool VisitInstructionTopDown(Instruction *Inst,
502  DenseMap<Value *, RRInfo> &Releases,
503  BBState &MyStates);
504  bool VisitTopDown(BasicBlock *BB,
506  DenseMap<Value *, RRInfo> &Releases);
507  bool Visit(Function &F, DenseMap<const BasicBlock *, BBState> &BBStates,
509  DenseMap<Value *, RRInfo> &Releases);
510 
511  void MoveCalls(Value *Arg, RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
513  DenseMap<Value *, RRInfo> &Releases,
514  SmallVectorImpl<Instruction *> &DeadInsts, Module *M);
515 
516  bool
517  PairUpRetainsAndReleases(DenseMap<const BasicBlock *, BBState> &BBStates,
519  DenseMap<Value *, RRInfo> &Releases, Module *M,
522  RRInfo &RetainsToMove, RRInfo &ReleasesToMove,
523  Value *Arg, bool KnownSafe,
524  bool &AnyPairsCompletelyEliminated);
525 
526  bool PerformCodePlacement(DenseMap<const BasicBlock *, BBState> &BBStates,
528  DenseMap<Value *, RRInfo> &Releases, Module *M);
529 
530  void OptimizeWeakCalls(Function &F);
531 
532  bool OptimizeSequences(Function &F);
533 
534  void OptimizeReturns(Function &F);
535 
536 #ifndef NDEBUG
537  void GatherStatistics(Function &F, bool AfterOptimization = false);
538 #endif
539 
540  void getAnalysisUsage(AnalysisUsage &AU) const override;
541  bool doInitialization(Module &M) override;
542  bool runOnFunction(Function &F) override;
543  void releaseMemory() override;
544 
545  public:
546  static char ID;
547 
548  ObjCARCOpt() : FunctionPass(ID) {
550  }
551  };
552 
553 } // end anonymous namespace
554 
555 char ObjCARCOpt::ID = 0;
556 
557 INITIALIZE_PASS_BEGIN(ObjCARCOpt,
558  "objc-arc", "ObjC ARC optimization", false, false)
560 INITIALIZE_PASS_END(ObjCARCOpt,
561  "objc-arc", "ObjC ARC optimization", false, false)
562 
564  return new ObjCARCOpt();
565 }
566 
567 void ObjCARCOpt::getAnalysisUsage(AnalysisUsage &AU) const {
570  // ARC optimization doesn't currently split critical edges.
571  AU.setPreservesCFG();
572 }
573 
574 /// Turn objc_retainAutoreleasedReturnValue into objc_retain if the operand is
575 /// not a return value. Or, if it can be paired with an
576 /// objc_autoreleaseReturnValue, delete the pair and return true.
577 bool
578 ObjCARCOpt::OptimizeRetainRVCall(Function &F, Instruction *RetainRV) {
579  // Check for the argument being from an immediately preceding call or invoke.
580  const Value *Arg = GetArgRCIdentityRoot(RetainRV);
581  ImmutableCallSite CS(Arg);
582  if (const Instruction *Call = CS.getInstruction()) {
583  if (Call->getParent() == RetainRV->getParent()) {
585  ++I;
586  while (IsNoopInstruction(&*I))
587  ++I;
588  if (&*I == RetainRV)
589  return false;
590  } else if (const InvokeInst *II = dyn_cast<InvokeInst>(Call)) {
591  BasicBlock *RetainRVParent = RetainRV->getParent();
592  if (II->getNormalDest() == RetainRVParent) {
593  BasicBlock::const_iterator I = RetainRVParent->begin();
594  while (IsNoopInstruction(&*I))
595  ++I;
596  if (&*I == RetainRV)
597  return false;
598  }
599  }
600  }
601 
602  // Check for being preceded by an objc_autoreleaseReturnValue on the same
603  // pointer. In this case, we can delete the pair.
604  BasicBlock::iterator I = RetainRV->getIterator(),
605  Begin = RetainRV->getParent()->begin();
606  if (I != Begin) {
607  do
608  --I;
609  while (I != Begin && IsNoopInstruction(&*I));
611  GetArgRCIdentityRoot(&*I) == Arg) {
612  Changed = true;
613  ++NumPeeps;
614 
615  DEBUG(dbgs() << "Erasing autoreleaseRV,retainRV pair: " << *I << "\n"
616  << "Erasing " << *RetainRV << "\n");
617 
618  EraseInstruction(&*I);
619  EraseInstruction(RetainRV);
620  return true;
621  }
622  }
623 
624  // Turn it to a plain objc_retain.
625  Changed = true;
626  ++NumPeeps;
627 
628  DEBUG(dbgs() << "Transforming objc_retainAutoreleasedReturnValue => "
629  "objc_retain since the operand is not a return value.\n"
630  "Old = " << *RetainRV << "\n");
631 
632  Constant *NewDecl = EP.get(ARCRuntimeEntryPointKind::Retain);
633  cast<CallInst>(RetainRV)->setCalledFunction(NewDecl);
634 
635  DEBUG(dbgs() << "New = " << *RetainRV << "\n");
636 
637  return false;
638 }
639 
640 /// Turn objc_autoreleaseReturnValue into objc_autorelease if the result is not
641 /// used as a return value.
642 void ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &F,
644  ARCInstKind &Class) {
645  // Check for a return of the pointer value.
646  const Value *Ptr = GetArgRCIdentityRoot(AutoreleaseRV);
647 
648  // If the argument is ConstantPointerNull or UndefValue, its other users
649  // aren't actually interesting to look at.
650  if (isa<ConstantData>(Ptr))
651  return;
652 
654  Users.push_back(Ptr);
655  do {
656  Ptr = Users.pop_back_val();
657  for (const User *U : Ptr->users()) {
658  if (isa<ReturnInst>(U) || GetBasicARCInstKind(U) == ARCInstKind::RetainRV)
659  return;
660  if (isa<BitCastInst>(U))
661  Users.push_back(U);
662  }
663  } while (!Users.empty());
664 
665  Changed = true;
666  ++NumPeeps;
667 
668  DEBUG(dbgs() << "Transforming objc_autoreleaseReturnValue => "
669  "objc_autorelease since its operand is not used as a return "
670  "value.\n"
671  "Old = " << *AutoreleaseRV << "\n");
672 
673  CallInst *AutoreleaseRVCI = cast<CallInst>(AutoreleaseRV);
675  AutoreleaseRVCI->setCalledFunction(NewDecl);
676  AutoreleaseRVCI->setTailCall(false); // Never tail call objc_autorelease.
677  Class = ARCInstKind::Autorelease;
678 
679  DEBUG(dbgs() << "New: " << *AutoreleaseRV << "\n");
680 }
681 
682 /// Visit each call, one at a time, and make simplifications without doing any
683 /// additional analysis.
684 void ObjCARCOpt::OptimizeIndividualCalls(Function &F) {
685  DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
686  // Reset all the flags in preparation for recomputing them.
687  UsedInThisFunction = 0;
688 
689  // Visit all objc_* calls in F.
690  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
691  Instruction *Inst = &*I++;
692 
693  ARCInstKind Class = GetBasicARCInstKind(Inst);
694 
695  DEBUG(dbgs() << "Visiting: Class: " << Class << "; " << *Inst << "\n");
696 
697  switch (Class) {
698  default: break;
699 
700  // Delete no-op casts. These function calls have special semantics, but
701  // the semantics are entirely implemented via lowering in the front-end,
702  // so by the time they reach the optimizer, they are just no-op calls
703  // which return their argument.
704  //
705  // There are gray areas here, as the ability to cast reference-counted
706  // pointers to raw void* and back allows code to break ARC assumptions,
707  // however these are currently considered to be unimportant.
709  Changed = true;
710  ++NumNoops;
711  DEBUG(dbgs() << "Erasing no-op cast: " << *Inst << "\n");
712  EraseInstruction(Inst);
713  continue;
714 
715  // If the pointer-to-weak-pointer is null, it's undefined behavior.
721  CallInst *CI = cast<CallInst>(Inst);
722  if (IsNullOrUndef(CI->getArgOperand(0))) {
723  Changed = true;
724  Type *Ty = CI->getArgOperand(0)->getType();
725  new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
727  CI);
728  Value *NewValue = UndefValue::get(CI->getType());
729  DEBUG(dbgs() << "A null pointer-to-weak-pointer is undefined behavior."
730  "\nOld = " << *CI << "\nNew = " << *NewValue << "\n");
731  CI->replaceAllUsesWith(NewValue);
732  CI->eraseFromParent();
733  continue;
734  }
735  break;
736  }
738  case ARCInstKind::MoveWeak: {
739  CallInst *CI = cast<CallInst>(Inst);
740  if (IsNullOrUndef(CI->getArgOperand(0)) ||
741  IsNullOrUndef(CI->getArgOperand(1))) {
742  Changed = true;
743  Type *Ty = CI->getArgOperand(0)->getType();
744  new StoreInst(UndefValue::get(cast<PointerType>(Ty)->getElementType()),
746  CI);
747 
748  Value *NewValue = UndefValue::get(CI->getType());
749  DEBUG(dbgs() << "A null pointer-to-weak-pointer is undefined behavior."
750  "\nOld = " << *CI << "\nNew = " << *NewValue << "\n");
751 
752  CI->replaceAllUsesWith(NewValue);
753  CI->eraseFromParent();
754  continue;
755  }
756  break;
757  }
759  if (OptimizeRetainRVCall(F, Inst))
760  continue;
761  break;
763  OptimizeAutoreleaseRVCall(F, Inst, Class);
764  break;
765  }
766 
767  // objc_autorelease(x) -> objc_release(x) if x is otherwise unused.
768  if (IsAutorelease(Class) && Inst->use_empty()) {
769  CallInst *Call = cast<CallInst>(Inst);
770  const Value *Arg = Call->getArgOperand(0);
772  if (Arg) {
773  Changed = true;
774  ++NumAutoreleases;
775 
776  // Create the declaration lazily.
777  LLVMContext &C = Inst->getContext();
778 
780  CallInst *NewCall = CallInst::Create(Decl, Call->getArgOperand(0), "",
781  Call);
782  NewCall->setMetadata(MDKindCache.get(ARCMDKindID::ImpreciseRelease),
783  MDNode::get(C, None));
784 
785  DEBUG(dbgs() << "Replacing autorelease{,RV}(x) with objc_release(x) "
786  "since x is otherwise unused.\nOld: " << *Call << "\nNew: "
787  << *NewCall << "\n");
788 
789  EraseInstruction(Call);
790  Inst = NewCall;
791  Class = ARCInstKind::Release;
792  }
793  }
794 
795  // For functions which can never be passed stack arguments, add
796  // a tail keyword.
797  if (IsAlwaysTail(Class)) {
798  Changed = true;
799  DEBUG(dbgs() << "Adding tail keyword to function since it can never be "
800  "passed stack args: " << *Inst << "\n");
801  cast<CallInst>(Inst)->setTailCall();
802  }
803 
804  // Ensure that functions that can never have a "tail" keyword due to the
805  // semantics of ARC truly do not do so.
806  if (IsNeverTail(Class)) {
807  Changed = true;
808  DEBUG(dbgs() << "Removing tail keyword from function: " << *Inst <<
809  "\n");
810  cast<CallInst>(Inst)->setTailCall(false);
811  }
812 
813  // Set nounwind as needed.
814  if (IsNoThrow(Class)) {
815  Changed = true;
816  DEBUG(dbgs() << "Found no throw class. Setting nounwind on: " << *Inst
817  << "\n");
818  cast<CallInst>(Inst)->setDoesNotThrow();
819  }
820 
821  if (!IsNoopOnNull(Class)) {
822  UsedInThisFunction |= 1 << unsigned(Class);
823  continue;
824  }
825 
826  const Value *Arg = GetArgRCIdentityRoot(Inst);
827 
828  // ARC calls with null are no-ops. Delete them.
829  if (IsNullOrUndef(Arg)) {
830  Changed = true;
831  ++NumNoops;
832  DEBUG(dbgs() << "ARC calls with null are no-ops. Erasing: " << *Inst
833  << "\n");
834  EraseInstruction(Inst);
835  continue;
836  }
837 
838  // Keep track of which of retain, release, autorelease, and retain_block
839  // are actually present in this function.
840  UsedInThisFunction |= 1 << unsigned(Class);
841 
842  // If Arg is a PHI, and one or more incoming values to the
843  // PHI are null, and the call is control-equivalent to the PHI, and there
844  // are no relevant side effects between the PHI and the call, and the call
845  // is not a release that doesn't have the clang.imprecise_release tag, the
846  // call could be pushed up to just those paths with non-null incoming
847  // values. For now, don't bother splitting critical edges for this.
848  if (Class == ARCInstKind::Release &&
849  !Inst->getMetadata(MDKindCache.get(ARCMDKindID::ImpreciseRelease)))
850  continue;
851 
853  Worklist.push_back(std::make_pair(Inst, Arg));
854  do {
855  std::pair<Instruction *, const Value *> Pair = Worklist.pop_back_val();
856  Inst = Pair.first;
857  Arg = Pair.second;
858 
859  const PHINode *PN = dyn_cast<PHINode>(Arg);
860  if (!PN) continue;
861 
862  // Determine if the PHI has any null operands, or any incoming
863  // critical edges.
864  bool HasNull = false;
865  bool HasCriticalEdges = false;
866  for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
867  Value *Incoming =
869  if (IsNullOrUndef(Incoming))
870  HasNull = true;
871  else if (cast<TerminatorInst>(PN->getIncomingBlock(i)->back())
872  .getNumSuccessors() != 1) {
873  HasCriticalEdges = true;
874  break;
875  }
876  }
877  // If we have null operands and no critical edges, optimize.
878  if (!HasCriticalEdges && HasNull) {
879  SmallPtrSet<Instruction *, 4> DependingInstructions;
881 
882  // Check that there is nothing that cares about the reference
883  // count between the call and the phi.
884  switch (Class) {
885  case ARCInstKind::Retain:
887  // These can always be moved up.
888  break;
890  // These can't be moved across things that care about the retain
891  // count.
893  Inst->getParent(), Inst,
894  DependingInstructions, Visited, PA);
895  break;
897  // These can't be moved across autorelease pool scope boundaries.
899  Inst->getParent(), Inst,
900  DependingInstructions, Visited, PA);
901  break;
905  // Don't move these; the RV optimization depends on the autoreleaseRV
906  // being tail called, and the retainRV being immediately after a call
907  // (which might still happen if we get lucky with codegen layout, but
908  // it's not worth taking the chance).
909  continue;
910  default:
911  llvm_unreachable("Invalid dependence flavor");
912  }
913 
914  if (DependingInstructions.size() == 1 &&
915  *DependingInstructions.begin() == PN) {
916  Changed = true;
917  ++NumPartialNoops;
918  // Clone the call into each predecessor that has a non-null value.
919  CallInst *CInst = cast<CallInst>(Inst);
920  Type *ParamTy = CInst->getArgOperand(0)->getType();
921  for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
922  Value *Incoming =
924  if (!IsNullOrUndef(Incoming)) {
925  CallInst *Clone = cast<CallInst>(CInst->clone());
926  Value *Op = PN->getIncomingValue(i);
927  Instruction *InsertPos = &PN->getIncomingBlock(i)->back();
928  if (Op->getType() != ParamTy)
929  Op = new BitCastInst(Op, ParamTy, "", InsertPos);
930  Clone->setArgOperand(0, Op);
931  Clone->insertBefore(InsertPos);
932 
933  DEBUG(dbgs() << "Cloning "
934  << *CInst << "\n"
935  "And inserting clone at " << *InsertPos << "\n");
936  Worklist.push_back(std::make_pair(Clone, Incoming));
937  }
938  }
939  // Erase the original call.
940  DEBUG(dbgs() << "Erasing: " << *CInst << "\n");
941  EraseInstruction(CInst);
942  continue;
943  }
944  }
945  } while (!Worklist.empty());
946  }
947 }
948 
949 /// If we have a top down pointer in the S_Use state, make sure that there are
950 /// no CFG hazards by checking the states of various bottom up pointers.
951 static void CheckForUseCFGHazard(const Sequence SuccSSeq,
952  const bool SuccSRRIKnownSafe,
953  TopDownPtrState &S,
954  bool &SomeSuccHasSame,
955  bool &AllSuccsHaveSame,
956  bool &NotAllSeqEqualButKnownSafe,
957  bool &ShouldContinue) {
958  switch (SuccSSeq) {
959  case S_CanRelease: {
960  if (!S.IsKnownSafe() && !SuccSRRIKnownSafe) {
962  break;
963  }
964  S.SetCFGHazardAfflicted(true);
965  ShouldContinue = true;
966  break;
967  }
968  case S_Use:
969  SomeSuccHasSame = true;
970  break;
971  case S_Stop:
972  case S_Release:
973  case S_MovableRelease:
974  if (!S.IsKnownSafe() && !SuccSRRIKnownSafe)
975  AllSuccsHaveSame = false;
976  else
977  NotAllSeqEqualButKnownSafe = true;
978  break;
979  case S_Retain:
980  llvm_unreachable("bottom-up pointer in retain state!");
981  case S_None:
982  llvm_unreachable("This should have been handled earlier.");
983  }
984 }
985 
986 /// If we have a Top Down pointer in the S_CanRelease state, make sure that
987 /// there are no CFG hazards by checking the states of various bottom up
988 /// pointers.
989 static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq,
990  const bool SuccSRRIKnownSafe,
991  TopDownPtrState &S,
992  bool &SomeSuccHasSame,
993  bool &AllSuccsHaveSame,
994  bool &NotAllSeqEqualButKnownSafe) {
995  switch (SuccSSeq) {
996  case S_CanRelease:
997  SomeSuccHasSame = true;
998  break;
999  case S_Stop:
1000  case S_Release:
1001  case S_MovableRelease:
1002  case S_Use:
1003  if (!S.IsKnownSafe() && !SuccSRRIKnownSafe)
1004  AllSuccsHaveSame = false;
1005  else
1006  NotAllSeqEqualButKnownSafe = true;
1007  break;
1008  case S_Retain:
1009  llvm_unreachable("bottom-up pointer in retain state!");
1010  case S_None:
1011  llvm_unreachable("This should have been handled earlier.");
1012  }
1013 }
1014 
1015 /// Check for critical edges, loop boundaries, irreducible control flow, or
1016 /// other CFG structures where moving code across the edge would result in it
1017 /// being executed more.
1018 void
1019 ObjCARCOpt::CheckForCFGHazards(const BasicBlock *BB,
1021  BBState &MyStates) const {
1022  // If any top-down local-use or possible-dec has a succ which is earlier in
1023  // the sequence, forget it.
1024  for (auto I = MyStates.top_down_ptr_begin(), E = MyStates.top_down_ptr_end();
1025  I != E; ++I) {
1026  TopDownPtrState &S = I->second;
1027  const Sequence Seq = I->second.GetSeq();
1028 
1029  // We only care about S_Retain, S_CanRelease, and S_Use.
1030  if (Seq == S_None)
1031  continue;
1032 
1033  // Make sure that if extra top down states are added in the future that this
1034  // code is updated to handle it.
1035  assert((Seq == S_Retain || Seq == S_CanRelease || Seq == S_Use) &&
1036  "Unknown top down sequence state.");
1037 
1038  const Value *Arg = I->first;
1039  const TerminatorInst *TI = cast<TerminatorInst>(&BB->back());
1040  bool SomeSuccHasSame = false;
1041  bool AllSuccsHaveSame = true;
1042  bool NotAllSeqEqualButKnownSafe = false;
1043 
1044  succ_const_iterator SI(TI), SE(TI, false);
1045 
1046  for (; SI != SE; ++SI) {
1047  // If VisitBottomUp has pointer information for this successor, take
1048  // what we know about it.
1050  BBStates.find(*SI);
1051  assert(BBI != BBStates.end());
1052  const BottomUpPtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
1053  const Sequence SuccSSeq = SuccS.GetSeq();
1054 
1055  // If bottom up, the pointer is in an S_None state, clear the sequence
1056  // progress since the sequence in the bottom up state finished
1057  // suggesting a mismatch in between retains/releases. This is true for
1058  // all three cases that we are handling here: S_Retain, S_Use, and
1059  // S_CanRelease.
1060  if (SuccSSeq == S_None) {
1062  continue;
1063  }
1064 
1065  // If we have S_Use or S_CanRelease, perform our check for cfg hazard
1066  // checks.
1067  const bool SuccSRRIKnownSafe = SuccS.IsKnownSafe();
1068 
1069  // *NOTE* We do not use Seq from above here since we are allowing for
1070  // S.GetSeq() to change while we are visiting basic blocks.
1071  switch(S.GetSeq()) {
1072  case S_Use: {
1073  bool ShouldContinue = false;
1074  CheckForUseCFGHazard(SuccSSeq, SuccSRRIKnownSafe, S, SomeSuccHasSame,
1075  AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
1076  ShouldContinue);
1077  if (ShouldContinue)
1078  continue;
1079  break;
1080  }
1081  case S_CanRelease:
1082  CheckForCanReleaseCFGHazard(SuccSSeq, SuccSRRIKnownSafe, S,
1083  SomeSuccHasSame, AllSuccsHaveSame,
1084  NotAllSeqEqualButKnownSafe);
1085  break;
1086  case S_Retain:
1087  case S_None:
1088  case S_Stop:
1089  case S_Release:
1090  case S_MovableRelease:
1091  break;
1092  }
1093  }
1094 
1095  // If the state at the other end of any of the successor edges
1096  // matches the current state, require all edges to match. This
1097  // guards against loops in the middle of a sequence.
1098  if (SomeSuccHasSame && !AllSuccsHaveSame) {
1100  } else if (NotAllSeqEqualButKnownSafe) {
1101  // If we would have cleared the state foregoing the fact that we are known
1102  // safe, stop code motion. This is because whether or not it is safe to
1103  // remove RR pairs via KnownSafe is an orthogonal concept to whether we
1104  // are allowed to perform code motion.
1105  S.SetCFGHazardAfflicted(true);
1106  }
1107  }
1108 }
1109 
1110 bool ObjCARCOpt::VisitInstructionBottomUp(
1112  BBState &MyStates) {
1113  bool NestingDetected = false;
1114  ARCInstKind Class = GetARCInstKind(Inst);
1115  const Value *Arg = nullptr;
1116 
1117  DEBUG(dbgs() << " Class: " << Class << "\n");
1118 
1119  switch (Class) {
1120  case ARCInstKind::Release: {
1121  Arg = GetArgRCIdentityRoot(Inst);
1122 
1123  BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1124  NestingDetected |= S.InitBottomUp(MDKindCache, Inst);
1125  break;
1126  }
1128  // In OptimizeIndividualCalls, we have strength reduced all optimizable
1129  // objc_retainBlocks to objc_retains. Thus at this point any
1130  // objc_retainBlocks that we see are not optimizable.
1131  break;
1132  case ARCInstKind::Retain:
1133  case ARCInstKind::RetainRV: {
1134  Arg = GetArgRCIdentityRoot(Inst);
1135  BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1136  if (S.MatchWithRetain()) {
1137  // Don't do retain+release tracking for ARCInstKind::RetainRV, because
1138  // it's better to let it remain as the first instruction after a call.
1139  if (Class != ARCInstKind::RetainRV) {
1140  DEBUG(dbgs() << " Matching with: " << *Inst << "\n");
1141  Retains[Inst] = S.GetRRInfo();
1142  }
1144  }
1145  // A retain moving bottom up can be a use.
1146  break;
1147  }
1149  // Conservatively, clear MyStates for all known pointers.
1150  MyStates.clearBottomUpPointers();
1151  return NestingDetected;
1153  case ARCInstKind::None:
1154  // These are irrelevant.
1155  return NestingDetected;
1156  default:
1157  break;
1158  }
1159 
1160  // Consider any other possible effects of this instruction on each
1161  // pointer being tracked.
1162  for (auto MI = MyStates.bottom_up_ptr_begin(),
1163  ME = MyStates.bottom_up_ptr_end();
1164  MI != ME; ++MI) {
1165  const Value *Ptr = MI->first;
1166  if (Ptr == Arg)
1167  continue; // Handled above.
1168  BottomUpPtrState &S = MI->second;
1169 
1170  if (S.HandlePotentialAlterRefCount(Inst, Ptr, PA, Class))
1171  continue;
1172 
1173  S.HandlePotentialUse(BB, Inst, Ptr, PA, Class);
1174  }
1175 
1176  return NestingDetected;
1177 }
1178 
1179 bool ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
1181  BlotMapVector<Value *, RRInfo> &Retains) {
1182  DEBUG(dbgs() << "\n== ObjCARCOpt::VisitBottomUp ==\n");
1183 
1184  bool NestingDetected = false;
1185  BBState &MyStates = BBStates[BB];
1186 
1187  // Merge the states from each successor to compute the initial state
1188  // for the current block.
1189  BBState::edge_iterator SI(MyStates.succ_begin()),
1190  SE(MyStates.succ_end());
1191  if (SI != SE) {
1192  const BasicBlock *Succ = *SI;
1194  assert(I != BBStates.end());
1195  MyStates.InitFromSucc(I->second);
1196  ++SI;
1197  for (; SI != SE; ++SI) {
1198  Succ = *SI;
1199  I = BBStates.find(Succ);
1200  assert(I != BBStates.end());
1201  MyStates.MergeSucc(I->second);
1202  }
1203  }
1204 
1205  DEBUG(dbgs() << "Before:\n" << BBStates[BB] << "\n"
1206  << "Performing Dataflow:\n");
1207 
1208  // Visit all the instructions, bottom-up.
1209  for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
1210  Instruction *Inst = &*std::prev(I);
1211 
1212  // Invoke instructions are visited as part of their successors (below).
1213  if (isa<InvokeInst>(Inst))
1214  continue;
1215 
1216  DEBUG(dbgs() << " Visiting " << *Inst << "\n");
1217 
1218  NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1219  }
1220 
1221  // If there's a predecessor with an invoke, visit the invoke as if it were
1222  // part of this block, since we can't insert code after an invoke in its own
1223  // block, and we don't want to split critical edges.
1224  for (BBState::edge_iterator PI(MyStates.pred_begin()),
1225  PE(MyStates.pred_end()); PI != PE; ++PI) {
1226  BasicBlock *Pred = *PI;
1227  if (InvokeInst *II = dyn_cast<InvokeInst>(&Pred->back()))
1228  NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
1229  }
1230 
1231  DEBUG(dbgs() << "\nFinal State:\n" << BBStates[BB] << "\n");
1232 
1233  return NestingDetected;
1234 }
1235 
1236 bool
1237 ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
1238  DenseMap<Value *, RRInfo> &Releases,
1239  BBState &MyStates) {
1240  bool NestingDetected = false;
1241  ARCInstKind Class = GetARCInstKind(Inst);
1242  const Value *Arg = nullptr;
1243 
1244  DEBUG(dbgs() << " Class: " << Class << "\n");
1245 
1246  switch (Class) {
1248  // In OptimizeIndividualCalls, we have strength reduced all optimizable
1249  // objc_retainBlocks to objc_retains. Thus at this point any
1250  // objc_retainBlocks that we see are not optimizable. We need to break since
1251  // a retain can be a potential use.
1252  break;
1253  case ARCInstKind::Retain:
1254  case ARCInstKind::RetainRV: {
1255  Arg = GetArgRCIdentityRoot(Inst);
1256  TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1257  NestingDetected |= S.InitTopDown(Class, Inst);
1258  // A retain can be a potential use; proceed to the generic checking
1259  // code below.
1260  break;
1261  }
1262  case ARCInstKind::Release: {
1263  Arg = GetArgRCIdentityRoot(Inst);
1264  TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1265  // Try to form a tentative pair in between this release instruction and the
1266  // top down pointers that we are tracking.
1267  if (S.MatchWithRelease(MDKindCache, Inst)) {
1268  // If we succeed, copy S's RRInfo into the Release -> {Retain Set
1269  // Map}. Then we clear S.
1270  DEBUG(dbgs() << " Matching with: " << *Inst << "\n");
1271  Releases[Inst] = S.GetRRInfo();
1273  }
1274  break;
1275  }
1277  // Conservatively, clear MyStates for all known pointers.
1278  MyStates.clearTopDownPointers();
1279  return false;
1281  case ARCInstKind::None:
1282  // These can not be uses of
1283  return false;
1284  default:
1285  break;
1286  }
1287 
1288  // Consider any other possible effects of this instruction on each
1289  // pointer being tracked.
1290  for (auto MI = MyStates.top_down_ptr_begin(),
1291  ME = MyStates.top_down_ptr_end();
1292  MI != ME; ++MI) {
1293  const Value *Ptr = MI->first;
1294  if (Ptr == Arg)
1295  continue; // Handled above.
1296  TopDownPtrState &S = MI->second;
1297  if (S.HandlePotentialAlterRefCount(Inst, Ptr, PA, Class))
1298  continue;
1299 
1300  S.HandlePotentialUse(Inst, Ptr, PA, Class);
1301  }
1302 
1303  return NestingDetected;
1304 }
1305 
1306 bool
1307 ObjCARCOpt::VisitTopDown(BasicBlock *BB,
1309  DenseMap<Value *, RRInfo> &Releases) {
1310  DEBUG(dbgs() << "\n== ObjCARCOpt::VisitTopDown ==\n");
1311  bool NestingDetected = false;
1312  BBState &MyStates = BBStates[BB];
1313 
1314  // Merge the states from each predecessor to compute the initial state
1315  // for the current block.
1316  BBState::edge_iterator PI(MyStates.pred_begin()),
1317  PE(MyStates.pred_end());
1318  if (PI != PE) {
1319  const BasicBlock *Pred = *PI;
1321  assert(I != BBStates.end());
1322  MyStates.InitFromPred(I->second);
1323  ++PI;
1324  for (; PI != PE; ++PI) {
1325  Pred = *PI;
1326  I = BBStates.find(Pred);
1327  assert(I != BBStates.end());
1328  MyStates.MergePred(I->second);
1329  }
1330  }
1331 
1332  DEBUG(dbgs() << "Before:\n" << BBStates[BB] << "\n"
1333  << "Performing Dataflow:\n");
1334 
1335  // Visit all the instructions, top-down.
1336  for (Instruction &Inst : *BB) {
1337  DEBUG(dbgs() << " Visiting " << Inst << "\n");
1338 
1339  NestingDetected |= VisitInstructionTopDown(&Inst, Releases, MyStates);
1340  }
1341 
1342  DEBUG(dbgs() << "\nState Before Checking for CFG Hazards:\n"
1343  << BBStates[BB] << "\n\n");
1344  CheckForCFGHazards(BB, BBStates, MyStates);
1345  DEBUG(dbgs() << "Final State:\n" << BBStates[BB] << "\n");
1346  return NestingDetected;
1347 }
1348 
1349 static void
1351  SmallVectorImpl<BasicBlock *> &PostOrder,
1352  SmallVectorImpl<BasicBlock *> &ReverseCFGPostOrder,
1353  unsigned NoObjCARCExceptionsMDKind,
1355  /// The visited set, for doing DFS walks.
1357 
1358  // Do DFS, computing the PostOrder.
1361 
1362  // Functions always have exactly one entry block, and we don't have
1363  // any other block that we treat like an entry block.
1364  BasicBlock *EntryBB = &F.getEntryBlock();
1365  BBState &MyStates = BBStates[EntryBB];
1366  MyStates.SetAsEntry();
1367  TerminatorInst *EntryTI = cast<TerminatorInst>(&EntryBB->back());
1368  SuccStack.push_back(std::make_pair(EntryBB, succ_iterator(EntryTI)));
1369  Visited.insert(EntryBB);
1370  OnStack.insert(EntryBB);
1371  do {
1372  dfs_next_succ:
1373  BasicBlock *CurrBB = SuccStack.back().first;
1374  TerminatorInst *TI = cast<TerminatorInst>(&CurrBB->back());
1375  succ_iterator SE(TI, false);
1376 
1377  while (SuccStack.back().second != SE) {
1378  BasicBlock *SuccBB = *SuccStack.back().second++;
1379  if (Visited.insert(SuccBB).second) {
1380  TerminatorInst *TI = cast<TerminatorInst>(&SuccBB->back());
1381  SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
1382  BBStates[CurrBB].addSucc(SuccBB);
1383  BBState &SuccStates = BBStates[SuccBB];
1384  SuccStates.addPred(CurrBB);
1385  OnStack.insert(SuccBB);
1386  goto dfs_next_succ;
1387  }
1388 
1389  if (!OnStack.count(SuccBB)) {
1390  BBStates[CurrBB].addSucc(SuccBB);
1391  BBStates[SuccBB].addPred(CurrBB);
1392  }
1393  }
1394  OnStack.erase(CurrBB);
1395  PostOrder.push_back(CurrBB);
1396  SuccStack.pop_back();
1397  } while (!SuccStack.empty());
1398 
1399  Visited.clear();
1400 
1401  // Do reverse-CFG DFS, computing the reverse-CFG PostOrder.
1402  // Functions may have many exits, and there also blocks which we treat
1403  // as exits due to ignored edges.
1405  for (BasicBlock &ExitBB : F) {
1406  BBState &MyStates = BBStates[&ExitBB];
1407  if (!MyStates.isExit())
1408  continue;
1409 
1410  MyStates.SetAsExit();
1411 
1412  PredStack.push_back(std::make_pair(&ExitBB, MyStates.pred_begin()));
1413  Visited.insert(&ExitBB);
1414  while (!PredStack.empty()) {
1415  reverse_dfs_next_succ:
1416  BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
1417  while (PredStack.back().second != PE) {
1418  BasicBlock *BB = *PredStack.back().second++;
1419  if (Visited.insert(BB).second) {
1420  PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
1421  goto reverse_dfs_next_succ;
1422  }
1423  }
1424  ReverseCFGPostOrder.push_back(PredStack.pop_back_val().first);
1425  }
1426  }
1427 }
1428 
1429 // Visit the function both top-down and bottom-up.
1430 bool ObjCARCOpt::Visit(Function &F,
1433  DenseMap<Value *, RRInfo> &Releases) {
1434  // Use reverse-postorder traversals, because we magically know that loops
1435  // will be well behaved, i.e. they won't repeatedly call retain on a single
1436  // pointer without doing a release. We can't use the ReversePostOrderTraversal
1437  // class here because we want the reverse-CFG postorder to consider each
1438  // function exit point, and we want to ignore selected cycle edges.
1440  SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
1441  ComputePostOrders(F, PostOrder, ReverseCFGPostOrder,
1442  MDKindCache.get(ARCMDKindID::NoObjCARCExceptions),
1443  BBStates);
1444 
1445  // Use reverse-postorder on the reverse CFG for bottom-up.
1446  bool BottomUpNestingDetected = false;
1447  for (BasicBlock *BB : llvm::reverse(ReverseCFGPostOrder))
1448  BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
1449 
1450  // Use reverse-postorder for top-down.
1451  bool TopDownNestingDetected = false;
1452  for (BasicBlock *BB : llvm::reverse(PostOrder))
1453  TopDownNestingDetected |= VisitTopDown(BB, BBStates, Releases);
1454 
1455  return TopDownNestingDetected && BottomUpNestingDetected;
1456 }
1457 
1458 /// Move the calls in RetainsToMove and ReleasesToMove.
1459 void ObjCARCOpt::MoveCalls(Value *Arg, RRInfo &RetainsToMove,
1460  RRInfo &ReleasesToMove,
1462  DenseMap<Value *, RRInfo> &Releases,
1463  SmallVectorImpl<Instruction *> &DeadInsts,
1464  Module *M) {
1465  Type *ArgTy = Arg->getType();
1466  Type *ParamTy = PointerType::getUnqual(Type::getInt8Ty(ArgTy->getContext()));
1467 
1468  DEBUG(dbgs() << "== ObjCARCOpt::MoveCalls ==\n");
1469 
1470  // Insert the new retain and release calls.
1471  for (Instruction *InsertPt : ReleasesToMove.ReverseInsertPts) {
1472  Value *MyArg = ArgTy == ParamTy ? Arg :
1473  new BitCastInst(Arg, ParamTy, "", InsertPt);
1475  CallInst *Call = CallInst::Create(Decl, MyArg, "", InsertPt);
1476  Call->setDoesNotThrow();
1477  Call->setTailCall();
1478 
1479  DEBUG(dbgs() << "Inserting new Retain: " << *Call << "\n"
1480  "At insertion point: " << *InsertPt << "\n");
1481  }
1482  for (Instruction *InsertPt : RetainsToMove.ReverseInsertPts) {
1483  Value *MyArg = ArgTy == ParamTy ? Arg :
1484  new BitCastInst(Arg, ParamTy, "", InsertPt);
1486  CallInst *Call = CallInst::Create(Decl, MyArg, "", InsertPt);
1487  // Attach a clang.imprecise_release metadata tag, if appropriate.
1488  if (MDNode *M = ReleasesToMove.ReleaseMetadata)
1489  Call->setMetadata(MDKindCache.get(ARCMDKindID::ImpreciseRelease), M);
1490  Call->setDoesNotThrow();
1491  if (ReleasesToMove.IsTailCallRelease)
1492  Call->setTailCall();
1493 
1494  DEBUG(dbgs() << "Inserting new Release: " << *Call << "\n"
1495  "At insertion point: " << *InsertPt << "\n");
1496  }
1497 
1498  // Delete the original retain and release calls.
1499  for (Instruction *OrigRetain : RetainsToMove.Calls) {
1500  Retains.blot(OrigRetain);
1501  DeadInsts.push_back(OrigRetain);
1502  DEBUG(dbgs() << "Deleting retain: " << *OrigRetain << "\n");
1503  }
1504  for (Instruction *OrigRelease : ReleasesToMove.Calls) {
1505  Releases.erase(OrigRelease);
1506  DeadInsts.push_back(OrigRelease);
1507  DEBUG(dbgs() << "Deleting release: " << *OrigRelease << "\n");
1508  }
1509 }
1510 
1511 bool ObjCARCOpt::PairUpRetainsAndReleases(
1514  DenseMap<Value *, RRInfo> &Releases, Module *M,
1516  SmallVectorImpl<Instruction *> &DeadInsts, RRInfo &RetainsToMove,
1517  RRInfo &ReleasesToMove, Value *Arg, bool KnownSafe,
1518  bool &AnyPairsCompletelyEliminated) {
1519  // If a pair happens in a region where it is known that the reference count
1520  // is already incremented, we can similarly ignore possible decrements unless
1521  // we are dealing with a retainable object with multiple provenance sources.
1522  bool KnownSafeTD = true, KnownSafeBU = true;
1523  bool CFGHazardAfflicted = false;
1524 
1525  // Connect the dots between the top-down-collected RetainsToMove and
1526  // bottom-up-collected ReleasesToMove to form sets of related calls.
1527  // This is an iterative process so that we connect multiple releases
1528  // to multiple retains if needed.
1529  unsigned OldDelta = 0;
1530  unsigned NewDelta = 0;
1531  unsigned OldCount = 0;
1532  unsigned NewCount = 0;
1533  bool FirstRelease = true;
1534  for (SmallVector<Instruction *, 4> NewRetains{Retain};;) {
1535  SmallVector<Instruction *, 4> NewReleases;
1536  for (Instruction *NewRetain : NewRetains) {
1537  auto It = Retains.find(NewRetain);
1538  assert(It != Retains.end());
1539  const RRInfo &NewRetainRRI = It->second;
1540  KnownSafeTD &= NewRetainRRI.KnownSafe;
1541  for (Instruction *NewRetainRelease : NewRetainRRI.Calls) {
1542  auto Jt = Releases.find(NewRetainRelease);
1543  if (Jt == Releases.end())
1544  return false;
1545  const RRInfo &NewRetainReleaseRRI = Jt->second;
1546 
1547  // If the release does not have a reference to the retain as well,
1548  // something happened which is unaccounted for. Do not do anything.
1549  //
1550  // This can happen if we catch an additive overflow during path count
1551  // merging.
1552  if (!NewRetainReleaseRRI.Calls.count(NewRetain))
1553  return false;
1554 
1555  if (ReleasesToMove.Calls.insert(NewRetainRelease).second) {
1556  // If we overflow when we compute the path count, don't remove/move
1557  // anything.
1558  const BBState &NRRBBState = BBStates[NewRetainRelease->getParent()];
1559  unsigned PathCount = BBState::OverflowOccurredValue;
1560  if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1561  return false;
1562  assert(PathCount != BBState::OverflowOccurredValue &&
1563  "PathCount at this point can not be "
1564  "OverflowOccurredValue.");
1565  OldDelta -= PathCount;
1566 
1567  // Merge the ReleaseMetadata and IsTailCallRelease values.
1568  if (FirstRelease) {
1569  ReleasesToMove.ReleaseMetadata =
1570  NewRetainReleaseRRI.ReleaseMetadata;
1571  ReleasesToMove.IsTailCallRelease =
1572  NewRetainReleaseRRI.IsTailCallRelease;
1573  FirstRelease = false;
1574  } else {
1575  if (ReleasesToMove.ReleaseMetadata !=
1576  NewRetainReleaseRRI.ReleaseMetadata)
1577  ReleasesToMove.ReleaseMetadata = nullptr;
1578  if (ReleasesToMove.IsTailCallRelease !=
1579  NewRetainReleaseRRI.IsTailCallRelease)
1580  ReleasesToMove.IsTailCallRelease = false;
1581  }
1582 
1583  // Collect the optimal insertion points.
1584  if (!KnownSafe)
1585  for (Instruction *RIP : NewRetainReleaseRRI.ReverseInsertPts) {
1586  if (ReleasesToMove.ReverseInsertPts.insert(RIP).second) {
1587  // If we overflow when we compute the path count, don't
1588  // remove/move anything.
1589  const BBState &RIPBBState = BBStates[RIP->getParent()];
1590  PathCount = BBState::OverflowOccurredValue;
1591  if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1592  return false;
1593  assert(PathCount != BBState::OverflowOccurredValue &&
1594  "PathCount at this point can not be "
1595  "OverflowOccurredValue.");
1596  NewDelta -= PathCount;
1597  }
1598  }
1599  NewReleases.push_back(NewRetainRelease);
1600  }
1601  }
1602  }
1603  NewRetains.clear();
1604  if (NewReleases.empty()) break;
1605 
1606  // Back the other way.
1607  for (Instruction *NewRelease : NewReleases) {
1608  auto It = Releases.find(NewRelease);
1609  assert(It != Releases.end());
1610  const RRInfo &NewReleaseRRI = It->second;
1611  KnownSafeBU &= NewReleaseRRI.KnownSafe;
1612  CFGHazardAfflicted |= NewReleaseRRI.CFGHazardAfflicted;
1613  for (Instruction *NewReleaseRetain : NewReleaseRRI.Calls) {
1614  auto Jt = Retains.find(NewReleaseRetain);
1615  if (Jt == Retains.end())
1616  return false;
1617  const RRInfo &NewReleaseRetainRRI = Jt->second;
1618 
1619  // If the retain does not have a reference to the release as well,
1620  // something happened which is unaccounted for. Do not do anything.
1621  //
1622  // This can happen if we catch an additive overflow during path count
1623  // merging.
1624  if (!NewReleaseRetainRRI.Calls.count(NewRelease))
1625  return false;
1626 
1627  if (RetainsToMove.Calls.insert(NewReleaseRetain).second) {
1628  // If we overflow when we compute the path count, don't remove/move
1629  // anything.
1630  const BBState &NRRBBState = BBStates[NewReleaseRetain->getParent()];
1631  unsigned PathCount = BBState::OverflowOccurredValue;
1632  if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1633  return false;
1634  assert(PathCount != BBState::OverflowOccurredValue &&
1635  "PathCount at this point can not be "
1636  "OverflowOccurredValue.");
1637  OldDelta += PathCount;
1638  OldCount += PathCount;
1639 
1640  // Collect the optimal insertion points.
1641  if (!KnownSafe)
1642  for (Instruction *RIP : NewReleaseRetainRRI.ReverseInsertPts) {
1643  if (RetainsToMove.ReverseInsertPts.insert(RIP).second) {
1644  // If we overflow when we compute the path count, don't
1645  // remove/move anything.
1646  const BBState &RIPBBState = BBStates[RIP->getParent()];
1647 
1648  PathCount = BBState::OverflowOccurredValue;
1649  if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1650  return false;
1651  assert(PathCount != BBState::OverflowOccurredValue &&
1652  "PathCount at this point can not be "
1653  "OverflowOccurredValue.");
1654  NewDelta += PathCount;
1655  NewCount += PathCount;
1656  }
1657  }
1658  NewRetains.push_back(NewReleaseRetain);
1659  }
1660  }
1661  }
1662  if (NewRetains.empty()) break;
1663  }
1664 
1665  // We can only remove pointers if we are known safe in both directions.
1666  bool UnconditionallySafe = KnownSafeTD && KnownSafeBU;
1667  if (UnconditionallySafe) {
1668  RetainsToMove.ReverseInsertPts.clear();
1669  ReleasesToMove.ReverseInsertPts.clear();
1670  NewCount = 0;
1671  } else {
1672  // Determine whether the new insertion points we computed preserve the
1673  // balance of retain and release calls through the program.
1674  // TODO: If the fully aggressive solution isn't valid, try to find a
1675  // less aggressive solution which is.
1676  if (NewDelta != 0)
1677  return false;
1678 
1679  // At this point, we are not going to remove any RR pairs, but we still are
1680  // able to move RR pairs. If one of our pointers is afflicted with
1681  // CFGHazards, we cannot perform such code motion so exit early.
1682  const bool WillPerformCodeMotion =
1683  !RetainsToMove.ReverseInsertPts.empty() ||
1684  !ReleasesToMove.ReverseInsertPts.empty();
1685  if (CFGHazardAfflicted && WillPerformCodeMotion)
1686  return false;
1687  }
1688 
1689  // Determine whether the original call points are balanced in the retain and
1690  // release calls through the program. If not, conservatively don't touch
1691  // them.
1692  // TODO: It's theoretically possible to do code motion in this case, as
1693  // long as the existing imbalances are maintained.
1694  if (OldDelta != 0)
1695  return false;
1696 
1697  Changed = true;
1698  assert(OldCount != 0 && "Unreachable code?");
1699  NumRRs += OldCount - NewCount;
1700  // Set to true if we completely removed any RR pairs.
1701  AnyPairsCompletelyEliminated = NewCount == 0;
1702 
1703  // We can move calls!
1704  return true;
1705 }
1706 
1707 /// Identify pairings between the retains and releases, and delete and/or move
1708 /// them.
1709 bool ObjCARCOpt::PerformCodePlacement(
1712  DenseMap<Value *, RRInfo> &Releases, Module *M) {
1713  DEBUG(dbgs() << "\n== ObjCARCOpt::PerformCodePlacement ==\n");
1714 
1715  bool AnyPairsCompletelyEliminated = false;
1717 
1718  // Visit each retain.
1720  E = Retains.end();
1721  I != E; ++I) {
1722  Value *V = I->first;
1723  if (!V) continue; // blotted
1724 
1725  Instruction *Retain = cast<Instruction>(V);
1726 
1727  DEBUG(dbgs() << "Visiting: " << *Retain << "\n");
1728 
1729  Value *Arg = GetArgRCIdentityRoot(Retain);
1730 
1731  // If the object being released is in static or stack storage, we know it's
1732  // not being managed by ObjC reference counting, so we can delete pairs
1733  // regardless of what possible decrements or uses lie between them.
1734  bool KnownSafe = isa<Constant>(Arg) || isa<AllocaInst>(Arg);
1735 
1736  // A constant pointer can't be pointing to an object on the heap. It may
1737  // be reference-counted, but it won't be deleted.
1738  if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
1739  if (const GlobalVariable *GV =
1740  dyn_cast<GlobalVariable>(
1741  GetRCIdentityRoot(LI->getPointerOperand())))
1742  if (GV->isConstant())
1743  KnownSafe = true;
1744 
1745  // Connect the dots between the top-down-collected RetainsToMove and
1746  // bottom-up-collected ReleasesToMove to form sets of related calls.
1747  RRInfo RetainsToMove, ReleasesToMove;
1748 
1749  bool PerformMoveCalls = PairUpRetainsAndReleases(
1750  BBStates, Retains, Releases, M, Retain, DeadInsts,
1751  RetainsToMove, ReleasesToMove, Arg, KnownSafe,
1752  AnyPairsCompletelyEliminated);
1753 
1754  if (PerformMoveCalls) {
1755  // Ok, everything checks out and we're all set. Let's move/delete some
1756  // code!
1757  MoveCalls(Arg, RetainsToMove, ReleasesToMove,
1758  Retains, Releases, DeadInsts, M);
1759  }
1760  }
1761 
1762  // Now that we're done moving everything, we can delete the newly dead
1763  // instructions, as we no longer need them as insert points.
1764  while (!DeadInsts.empty())
1765  EraseInstruction(DeadInsts.pop_back_val());
1766 
1767  return AnyPairsCompletelyEliminated;
1768 }
1769 
1770 /// Weak pointer optimizations.
1771 void ObjCARCOpt::OptimizeWeakCalls(Function &F) {
1772  DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeWeakCalls ==\n");
1773 
1774  // First, do memdep-style RLE and S2L optimizations. We can't use memdep
1775  // itself because it uses AliasAnalysis and we need to do provenance
1776  // queries instead.
1777  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
1778  Instruction *Inst = &*I++;
1779 
1780  DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
1781 
1782  ARCInstKind Class = GetBasicARCInstKind(Inst);
1783  if (Class != ARCInstKind::LoadWeak &&
1785  continue;
1786 
1787  // Delete objc_loadWeak calls with no users.
1788  if (Class == ARCInstKind::LoadWeak && Inst->use_empty()) {
1789  Inst->eraseFromParent();
1790  continue;
1791  }
1792 
1793  // TODO: For now, just look for an earlier available version of this value
1794  // within the same block. Theoretically, we could do memdep-style non-local
1795  // analysis too, but that would want caching. A better approach would be to
1796  // use the technique that EarlyCSE uses.
1797  inst_iterator Current = std::prev(I);
1798  BasicBlock *CurrentBB = &*Current.getBasicBlockIterator();
1799  for (BasicBlock::iterator B = CurrentBB->begin(),
1800  J = Current.getInstructionIterator();
1801  J != B; --J) {
1802  Instruction *EarlierInst = &*std::prev(J);
1803  ARCInstKind EarlierClass = GetARCInstKind(EarlierInst);
1804  switch (EarlierClass) {
1805  case ARCInstKind::LoadWeak:
1807  // If this is loading from the same pointer, replace this load's value
1808  // with that one.
1809  CallInst *Call = cast<CallInst>(Inst);
1810  CallInst *EarlierCall = cast<CallInst>(EarlierInst);
1811  Value *Arg = Call->getArgOperand(0);
1812  Value *EarlierArg = EarlierCall->getArgOperand(0);
1813  switch (PA.getAA()->alias(Arg, EarlierArg)) {
1814  case MustAlias:
1815  Changed = true;
1816  // If the load has a builtin retain, insert a plain retain for it.
1817  if (Class == ARCInstKind::LoadWeakRetained) {
1819  CallInst *CI = CallInst::Create(Decl, EarlierCall, "", Call);
1820  CI->setTailCall();
1821  }
1822  // Zap the fully redundant load.
1823  Call->replaceAllUsesWith(EarlierCall);
1824  Call->eraseFromParent();
1825  goto clobbered;
1826  case MayAlias:
1827  case PartialAlias:
1828  goto clobbered;
1829  case NoAlias:
1830  break;
1831  }
1832  break;
1833  }
1835  case ARCInstKind::InitWeak: {
1836  // If this is storing to the same pointer and has the same size etc.
1837  // replace this load's value with the stored value.
1838  CallInst *Call = cast<CallInst>(Inst);
1839  CallInst *EarlierCall = cast<CallInst>(EarlierInst);
1840  Value *Arg = Call->getArgOperand(0);
1841  Value *EarlierArg = EarlierCall->getArgOperand(0);
1842  switch (PA.getAA()->alias(Arg, EarlierArg)) {
1843  case MustAlias:
1844  Changed = true;
1845  // If the load has a builtin retain, insert a plain retain for it.
1846  if (Class == ARCInstKind::LoadWeakRetained) {
1848  CallInst *CI = CallInst::Create(Decl, EarlierCall, "", Call);
1849  CI->setTailCall();
1850  }
1851  // Zap the fully redundant load.
1852  Call->replaceAllUsesWith(EarlierCall->getArgOperand(1));
1853  Call->eraseFromParent();
1854  goto clobbered;
1855  case MayAlias:
1856  case PartialAlias:
1857  goto clobbered;
1858  case NoAlias:
1859  break;
1860  }
1861  break;
1862  }
1863  case ARCInstKind::MoveWeak:
1864  case ARCInstKind::CopyWeak:
1865  // TOOD: Grab the copied value.
1866  goto clobbered;
1868  case ARCInstKind::None:
1870  case ARCInstKind::User:
1871  // Weak pointers are only modified through the weak entry points
1872  // (and arbitrary calls, which could call the weak entry points).
1873  break;
1874  default:
1875  // Anything else could modify the weak pointer.
1876  goto clobbered;
1877  }
1878  }
1879  clobbered:;
1880  }
1881 
1882  // Then, for each destroyWeak with an alloca operand, check to see if
1883  // the alloca and all its users can be zapped.
1884  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
1885  Instruction *Inst = &*I++;
1886  ARCInstKind Class = GetBasicARCInstKind(Inst);
1887  if (Class != ARCInstKind::DestroyWeak)
1888  continue;
1889 
1890  CallInst *Call = cast<CallInst>(Inst);
1891  Value *Arg = Call->getArgOperand(0);
1892  if (AllocaInst *Alloca = dyn_cast<AllocaInst>(Arg)) {
1893  for (User *U : Alloca->users()) {
1894  const Instruction *UserInst = cast<Instruction>(U);
1895  switch (GetBasicARCInstKind(UserInst)) {
1896  case ARCInstKind::InitWeak:
1899  continue;
1900  default:
1901  goto done;
1902  }
1903  }
1904  Changed = true;
1905  for (auto UI = Alloca->user_begin(), UE = Alloca->user_end(); UI != UE;) {
1906  CallInst *UserInst = cast<CallInst>(*UI++);
1907  switch (GetBasicARCInstKind(UserInst)) {
1908  case ARCInstKind::InitWeak:
1910  // These functions return their second argument.
1911  UserInst->replaceAllUsesWith(UserInst->getArgOperand(1));
1912  break;
1914  // No return value.
1915  break;
1916  default:
1917  llvm_unreachable("alloca really is used!");
1918  }
1919  UserInst->eraseFromParent();
1920  }
1921  Alloca->eraseFromParent();
1922  done:;
1923  }
1924  }
1925 }
1926 
1927 /// Identify program paths which execute sequences of retains and releases which
1928 /// can be eliminated.
1929 bool ObjCARCOpt::OptimizeSequences(Function &F) {
1930  // Releases, Retains - These are used to store the results of the main flow
1931  // analysis. These use Value* as the key instead of Instruction* so that the
1932  // map stays valid when we get around to rewriting code and calls get
1933  // replaced by arguments.
1934  DenseMap<Value *, RRInfo> Releases;
1936 
1937  // This is used during the traversal of the function to track the
1938  // states for each identified object at each block.
1940 
1941  // Analyze the CFG of the function, and all instructions.
1942  bool NestingDetected = Visit(F, BBStates, Retains, Releases);
1943 
1944  // Transform.
1945  bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
1946  Releases,
1947  F.getParent());
1948 
1949  return AnyPairsCompletelyEliminated && NestingDetected;
1950 }
1951 
1952 /// Check if there is a dependent call earlier that does not have anything in
1953 /// between the Retain and the call that can affect the reference count of their
1954 /// shared pointer argument. Note that Retain need not be in BB.
1955 static bool
1959  ProvenanceAnalysis &PA) {
1961  DepInsts, Visited, PA);
1962  if (DepInsts.size() != 1)
1963  return false;
1964 
1965  auto *Call = dyn_cast_or_null<CallInst>(*DepInsts.begin());
1966 
1967  // Check that the pointer is the return value of the call.
1968  if (!Call || Arg != Call)
1969  return false;
1970 
1971  // Check that the call is a regular call.
1973  return Class == ARCInstKind::CallOrUser || Class == ARCInstKind::Call;
1974 }
1975 
1976 /// Find a dependent retain that precedes the given autorelease for which there
1977 /// is nothing in between the two instructions that can affect the ref count of
1978 /// Arg.
1979 static CallInst *
1984  ProvenanceAnalysis &PA) {
1986  BB, Autorelease, DepInsts, Visited, PA);
1987  if (DepInsts.size() != 1)
1988  return nullptr;
1989 
1990  auto *Retain = dyn_cast_or_null<CallInst>(*DepInsts.begin());
1991 
1992  // Check that we found a retain with the same argument.
1993  if (!Retain || !IsRetain(GetBasicARCInstKind(Retain)) ||
1994  GetArgRCIdentityRoot(Retain) != Arg) {
1995  return nullptr;
1996  }
1997 
1998  return Retain;
1999 }
2000 
2001 /// Look for an ``autorelease'' instruction dependent on Arg such that there are
2002 /// no instructions dependent on Arg that need a positive ref count in between
2003 /// the autorelease and the ret.
2004 static CallInst *
2006  ReturnInst *Ret,
2009  ProvenanceAnalysis &PA) {
2011  BB, Ret, DepInsts, V, PA);
2012  if (DepInsts.size() != 1)
2013  return nullptr;
2014 
2015  auto *Autorelease = dyn_cast_or_null<CallInst>(*DepInsts.begin());
2016  if (!Autorelease)
2017  return nullptr;
2018  ARCInstKind AutoreleaseClass = GetBasicARCInstKind(Autorelease);
2019  if (!IsAutorelease(AutoreleaseClass))
2020  return nullptr;
2021  if (GetArgRCIdentityRoot(Autorelease) != Arg)
2022  return nullptr;
2023 
2024  return Autorelease;
2025 }
2026 
2027 /// Look for this pattern:
2028 /// \code
2029 /// %call = call i8* @something(...)
2030 /// %2 = call i8* @objc_retain(i8* %call)
2031 /// %3 = call i8* @objc_autorelease(i8* %2)
2032 /// ret i8* %3
2033 /// \endcode
2034 /// And delete the retain and autorelease.
2035 void ObjCARCOpt::OptimizeReturns(Function &F) {
2036  if (!F.getReturnType()->isPointerTy())
2037  return;
2038 
2039  DEBUG(dbgs() << "\n== ObjCARCOpt::OptimizeReturns ==\n");
2040 
2041  SmallPtrSet<Instruction *, 4> DependingInstructions;
2043  for (BasicBlock &BB: F) {
2044  ReturnInst *Ret = dyn_cast<ReturnInst>(&BB.back());
2045  if (!Ret)
2046  continue;
2047 
2048  DEBUG(dbgs() << "Visiting: " << *Ret << "\n");
2049 
2050  const Value *Arg = GetRCIdentityRoot(Ret->getOperand(0));
2051 
2052  // Look for an ``autorelease'' instruction that is a predecessor of Ret and
2053  // dependent on Arg such that there are no instructions dependent on Arg
2054  // that need a positive ref count in between the autorelease and Ret.
2056  Arg, &BB, Ret, DependingInstructions, Visited, PA);
2057  DependingInstructions.clear();
2058  Visited.clear();
2059 
2060  if (!Autorelease)
2061  continue;
2062 
2064  Arg, Autorelease->getParent(), Autorelease, DependingInstructions,
2065  Visited, PA);
2066  DependingInstructions.clear();
2067  Visited.clear();
2068 
2069  if (!Retain)
2070  continue;
2071 
2072  // Check that there is nothing that can affect the reference count
2073  // between the retain and the call. Note that Retain need not be in BB.
2074  bool HasSafePathToCall = HasSafePathToPredecessorCall(Arg, Retain,
2075  DependingInstructions,
2076  Visited, PA);
2077  DependingInstructions.clear();
2078  Visited.clear();
2079 
2080  if (!HasSafePathToCall)
2081  continue;
2082 
2083  // If so, we can zap the retain and autorelease.
2084  Changed = true;
2085  ++NumRets;
2086  DEBUG(dbgs() << "Erasing: " << *Retain << "\nErasing: "
2087  << *Autorelease << "\n");
2088  EraseInstruction(Retain);
2089  EraseInstruction(Autorelease);
2090  }
2091 }
2092 
2093 #ifndef NDEBUG
2094 void
2095 ObjCARCOpt::GatherStatistics(Function &F, bool AfterOptimization) {
2096  Statistic &NumRetains =
2097  AfterOptimization ? NumRetainsAfterOpt : NumRetainsBeforeOpt;
2098  Statistic &NumReleases =
2099  AfterOptimization ? NumReleasesAfterOpt : NumReleasesBeforeOpt;
2100 
2101  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E; ) {
2102  Instruction *Inst = &*I++;
2103  switch (GetBasicARCInstKind(Inst)) {
2104  default:
2105  break;
2106  case ARCInstKind::Retain:
2107  ++NumRetains;
2108  break;
2109  case ARCInstKind::Release:
2110  ++NumReleases;
2111  break;
2112  }
2113  }
2114 }
2115 #endif
2116 
2117 bool ObjCARCOpt::doInitialization(Module &M) {
2118  if (!EnableARCOpts)
2119  return false;
2120 
2121  // If nothing in the Module uses ARC, don't do anything.
2122  Run = ModuleHasARC(M);
2123  if (!Run)
2124  return false;
2125 
2126  // Intuitively, objc_retain and others are nocapture, however in practice
2127  // they are not, because they return their argument value. And objc_release
2128  // calls finalizers which can have arbitrary side effects.
2129  MDKindCache.init(&M);
2130 
2131  // Initialize our runtime entry point cache.
2132  EP.init(&M);
2133 
2134  return false;
2135 }
2136 
2138  if (!EnableARCOpts)
2139  return false;
2140 
2141  // If nothing in the Module uses ARC, don't do anything.
2142  if (!Run)
2143  return false;
2144 
2145  Changed = false;
2146 
2147  DEBUG(dbgs() << "<<< ObjCARCOpt: Visiting Function: " << F.getName() << " >>>"
2148  "\n");
2149 
2150  PA.setAA(&getAnalysis<AAResultsWrapperPass>().getAAResults());
2151 
2152 #ifndef NDEBUG
2153  if (AreStatisticsEnabled()) {
2154  GatherStatistics(F, false);
2155  }
2156 #endif
2157 
2158  // This pass performs several distinct transformations. As a compile-time aid
2159  // when compiling code that isn't ObjC, skip these if the relevant ObjC
2160  // library functions aren't declared.
2161 
2162  // Preliminary optimizations. This also computes UsedInThisFunction.
2163  OptimizeIndividualCalls(F);
2164 
2165  // Optimizations for weak pointers.
2166  if (UsedInThisFunction & ((1 << unsigned(ARCInstKind::LoadWeak)) |
2167  (1 << unsigned(ARCInstKind::LoadWeakRetained)) |
2168  (1 << unsigned(ARCInstKind::StoreWeak)) |
2169  (1 << unsigned(ARCInstKind::InitWeak)) |
2170  (1 << unsigned(ARCInstKind::CopyWeak)) |
2171  (1 << unsigned(ARCInstKind::MoveWeak)) |
2172  (1 << unsigned(ARCInstKind::DestroyWeak))))
2173  OptimizeWeakCalls(F);
2174 
2175  // Optimizations for retain+release pairs.
2176  if (UsedInThisFunction & ((1 << unsigned(ARCInstKind::Retain)) |
2177  (1 << unsigned(ARCInstKind::RetainRV)) |
2178  (1 << unsigned(ARCInstKind::RetainBlock))))
2179  if (UsedInThisFunction & (1 << unsigned(ARCInstKind::Release)))
2180  // Run OptimizeSequences until it either stops making changes or
2181  // no retain+release pair nesting is detected.
2182  while (OptimizeSequences(F)) {}
2183 
2184  // Optimizations if objc_autorelease is used.
2185  if (UsedInThisFunction & ((1 << unsigned(ARCInstKind::Autorelease)) |
2186  (1 << unsigned(ARCInstKind::AutoreleaseRV))))
2187  OptimizeReturns(F);
2188 
2189  // Gather statistics after optimization.
2190 #ifndef NDEBUG
2191  if (AreStatisticsEnabled()) {
2192  GatherStatistics(F, true);
2193  }
2194 #endif
2195 
2196  DEBUG(dbgs() << "\n");
2197 
2198  return Changed;
2199 }
2200 
2201 void ObjCARCOpt::releaseMemory() {
2202  PA.clear();
2203 }
2204 
2205 /// @}
2206 ///
Pass interface - Implemented by all &#39;passes&#39;.
Definition: Pass.h:81
The two locations precisely alias each other.
Definition: AliasAnalysis.h:91
uint64_t CallInst * C
Return a value (possibly void), from a function.
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks &#39;this&#39; from the containing basic block and deletes it.
Definition: Instruction.cpp:69
Sequence GetSeq() const
Definition: PtrState.h:151
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
void setDoesNotThrow()
ARCInstKind GetARCInstKind(const Value *V)
Map V to its ARCInstKind equivalence class.
objc_destroyWeak (derived)
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
SmallPtrSet< Instruction *, 2 > Calls
For a top-down sequence, the set of objc_retains or objc_retainBlocks.
Definition: PtrState.h:81
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
Compute iterated dominance frontiers using a linear time algorithm.
Definition: AllocatorList.h:24
objc_loadWeakRetained (primitive)
typename VectorTy::const_iterator const_iterator
Definition: BlotMapVector.h:49
could call objc_release and/or "use" pointers
static char ID
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:63
bool IsTrackingImpreciseReleases() const
Definition: PtrState.h:130
The two locations alias, but only due to a partial overlap.
Definition: AliasAnalysis.h:89
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:227
objc_loadWeak (derived)
This class represents a function call, abstracting a target machine&#39;s calling convention.
objc_retainedObject, etc.
This file contains the declarations for metadata subclasses.
Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release...
any use of x.
Definition: PtrState.h:45
LLVMContext & getContext() const
All values hold a context through their type.
Definition: Value.cpp:728
bool MatchWithRelease(ARCMDKindCache &Cache, Instruction *Release)
Return true if this set of retains can be paired with the given release.
Definition: PtrState.cpp:341
STATISTIC(NumFunctions, "Total number of functions")
Metadata node.
Definition: Metadata.h:862
The two locations do not alias at all.
Definition: AliasAnalysis.h:85
F(f)
static CallInst * Create(Value *Func, ArrayRef< Value *> Args, ArrayRef< OperandBundleDef > Bundles=None, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
could call objc_release
void initializeObjCARCOptPass(PassRegistry &)
An instruction for reading from memory.
Definition: Instructions.h:164
TerminatorInst::SuccIterator< TerminatorInst *, BasicBlock > succ_iterator
Definition: CFG.h:122
Hexagon Common GEP
iv Induction Variable Users
Definition: IVUsers.cpp:51
SmallPtrSet< Instruction *, 2 > ReverseInsertPts
The set of optimal insert positions for moving calls in the opposite sequence.
Definition: PtrState.h:85
objc_moveWeak (derived)
bool IsNoopOnNull(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a null pointer...
bool IsForwarding(ARCInstKind Class)
Test if the given class represents instructions which return their argument verbatim.
bool IsObjCIdentifiedObject(const Value *V)
Return true if this value refers to a distinct and identifiable object.
The two locations may or may not alias. This is the least precise result.
Definition: AliasAnalysis.h:87
LLVMContext & getContext() const
Return the LLVMContext in which this type was uniqued.
Definition: Type.h:130
static Constant * getNullValue(Type *Ty)
Constructor to create a &#39;0&#39; constant of arbitrary type.
Definition: Constants.cpp:207
iterator begin()
Instruction iterator methods.
Definition: BasicBlock.h:252
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:51
bool HasKnownPositiveRefCount() const
Definition: PtrState.h:147
objc_autoreleaseReturnValue
inst_iterator inst_begin(Function *F)
Definition: InstIterator.h:132
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: APFloat.h:42
InstrTy * getInstruction() const
Definition: CallSite.h:92
This class summarizes several per-pointer runtime properties which are propagated through the flow gr...
Definition: PtrState.h:102
ELFYAML::ELF_STO Other
Definition: ELFYAML.cpp:736
bool IsNullOrUndef(const Value *V)
Interval::succ_iterator succ_begin(Interval *I)
succ_begin/succ_end - define methods so that Intervals may be used just like BasicBlocks can with the...
Definition: Interval.h:103
This file defines common definitions/declarations used by the ObjC ARC Optimizer. ...
bool IsTailCallRelease
True of the objc_release calls are all marked with the "tail" keyword.
Definition: PtrState.h:73
bool InitBottomUp(ARCMDKindCache &Cache, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases...
Definition: PtrState.cpp:177
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
Definition: STLExtras.h:186
Instruction * clone() const
Create a copy of &#39;this&#39; instruction that is identical in all ways except the following: ...
objc_retainAutoreleasedReturnValue
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:245
static bool setDoesNotThrow(Function &F)
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
This class represents a no-op cast from one type to another.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
Definition: Instruction.h:194
void HandlePotentialUse(BasicBlock *BB, Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:255
An instruction for storing to memory.
Definition: Instructions.h:306
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
Definition: Value.cpp:430
bool IsAlwaysTail(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the "tail" keyword...
BBIty & getBasicBlockIterator()
Definition: InstIterator.h:74
bool ModuleHasARC(const Module &M)
Test if the given module looks interesting to run ARC optimization on.
Value * getOperand(unsigned i) const
Definition: User.h:154
Unidirectional information about either a retain-decrement-use-release sequence or release-use-decrem...
Definition: PtrState.h:57
Interval::succ_iterator succ_end(Interval *I)
Definition: Interval.h:106
raw_ostream & operator<<(raw_ostream &OS, const ARCInstKind Class)
const BasicBlock & getEntryBlock() const
Definition: Function.h:572
an instruction for type-safe pointer arithmetic to access elements of arrays and structs ...
Definition: Instructions.h:837
iterator find(const KeyT &Key)
Definition: BlotMapVector.h:80
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata *> MDs)
Definition: Metadata.h:1164
static bool runOnFunction(Function &F, bool PostInlining)
#define P(N)
Type * getReturnType() const
Returns the type of the ret val.
Definition: Function.h:150
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Subclasses of this class are all able to terminate a basic block.
Definition: InstrTypes.h:54
void HandlePotentialUse(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:405
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction...
Definition: Instruction.cpp:75
LLVM Basic Block Representation.
Definition: BasicBlock.h:59
The instances of the Type class are immutable: once they are created, they are never changed...
Definition: Type.h:46
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:69
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This is an important base class in LLVM.
Definition: Constant.h:42
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
Definition: SmallVector.h:116
objc_initWeak (derived)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
bool isPointerTy() const
True if this is an instance of PointerType.
Definition: Type.h:221
BIty & getInstructionIterator()
Definition: InstIterator.h:75
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:371
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
Definition: Interval.h:113
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &InsertPair)
Definition: BlotMapVector.h:68
like S_Release, but code motion is stopped.
Definition: PtrState.h:46
Represent the analysis usage information of a pass.
const Instruction & back() const
Definition: BasicBlock.h:266
#define LLVM_ATTRIBUTE_UNUSED
Definition: Compiler.h:144
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:285
Interval::pred_iterator pred_end(Interval *I)
Definition: Interval.h:116
static const Value * FindSingleUseIdentifiedObject(const Value *Arg)
This is similar to GetRCIdentityRoot but it stops as soon as it finds a value with multiple uses...
Definition: ObjCARCOpts.cpp:81
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
Definition: SmallPtrSet.h:382
self_iterator getIterator()
Definition: ilist_node.h:82
A cache of MDKinds used by various ARC optimizations.
static CallInst * FindPredecessorAutoreleaseWithSafePath(const Value *Arg, BasicBlock *BB, ReturnInst *Ret, SmallPtrSetImpl< Instruction *> &DepInsts, SmallPtrSetImpl< const BasicBlock *> &V, ProvenanceAnalysis &PA)
Look for an ``autorelease&#39;&#39; instruction dependent on Arg such that there are no instructions dependen...
objc_copyWeak (derived)
bool MatchWithRetain()
Return true if this set of releases can be paired with a release.
Definition: PtrState.cpp:203
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe)
If we have a Top Down pointer in the S_CanRelease state, make sure that there are no CFG hazards by c...
void setTailCall(bool isTC=true)
static UndefValue * get(Type *T)
Static factory methods - Return an &#39;undef&#39; object of the specified type.
Definition: Constants.cpp:1320
This file defines common analysis utilities used by the ObjC ARC Optimizer.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
anything that is inert from an ARC perspective.
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE, "Assign register bank of generic virtual registers", false, false) RegBankSelect
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
This file declares a simple ARC-aware AliasAnalysis using special knowledge of Objective C to enhance...
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
Definition: Metadata.cpp:1214
ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
objc_release(x), !clang.imprecise_release.
Definition: PtrState.h:48
bool IsCFGHazardAfflicted() const
Definition: PtrState.h:138
size_type size() const
Definition: SmallPtrSet.h:93
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
Definition: ObjCARC.h:52
Iterator for intrusive lists based on ilist_node.
objc_release(x).
Definition: PtrState.h:47
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the generic address space (address sp...
Definition: DerivedTypes.h:482
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false...
Definition: SmallPtrSet.h:378
iterator end()
Definition: BasicBlock.h:254
This file declares a special form of Alias Analysis called ``Provenance Analysis&#39;&#39;.
This is a &#39;vector&#39; (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:864
ARCInstKind
Equivalence classes of instructions in the ARC Model.
An associative container with fast insertion-order (deterministic) iteration over its elements...
Definition: BlotMapVector.h:23
bool IsNoThrow(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the nounwind attri...
objc_unsafeClaimAutoreleasedReturnValue
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Definition: PtrState.cpp:370
LLVM_NODISCARD T pop_back_val()
Definition: SmallVector.h:385
objc_retain(x).
Definition: PtrState.h:43
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:285
unsigned getNumIncomingValues() const
Return the number of incoming edges.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
MDNode * ReleaseMetadata
If the Calls are objc_release calls and they all have a clang.imprecise_release tag, this is the metadata tag.
Definition: PtrState.h:77
Pass * createObjCARCOptPass()
void FindDependencies(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, SmallPtrSetImpl< Instruction *> &DependingInstructions, SmallPtrSetImpl< const BasicBlock *> &Visited, ProvenanceAnalysis &PA)
Walk up the CFG from StartPos (which is in StartBB) and find local and non-local dependencies on Arg...
iterator_range< user_iterator > users()
Definition: Value.h:401
amdgpu Simplify well known AMD library false Value Value * Arg
could "use" a pointer
void ClearSequenceProgress()
Definition: PtrState.h:153
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
static bool HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain, SmallPtrSetImpl< Instruction *> &DepInsts, SmallPtrSetImpl< const BasicBlock *> &Visited, ProvenanceAnalysis &PA)
Check if there is a dependent call earlier that does not have anything in between the Retain and the ...
bool empty() const
void setCalledFunction(Value *Fn)
Set the function called.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
Definition: SmallVector.h:120
const RRInfo & GetRRInfo() const
Definition: PtrState.h:166
iterator begin() const
Definition: SmallPtrSet.h:397
LLVM_NODISCARD bool empty() const
Definition: SmallVector.h:61
Value * getArgOperand(unsigned i) const
getArgOperand/setArgOperand - Return/set the i-th call argument.
StringRef getName() const
Return a constant reference to the value&#39;s name.
Definition: Value.cpp:220
Establish a view to a call site for examination.
Definition: CallSite.h:713
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
#define I(x, y, z)
Definition: MD5.cpp:58
objc_storeWeak (primitive)
void setArgOperand(unsigned i, Value *v)
Sequence
A sequence of states that a pointer may go through in which an objc_retain and objc_release are actua...
Definition: PtrState.h:41
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
Definition: Casting.h:323
static void CheckForUseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe, bool &ShouldContinue)
If we have a top down pointer in the S_Use state, make sure that there are no CFG hazards by checking...
Declarations for ObjC runtime functions and constants.
void SetCFGHazardAfflicted(const bool NewValue)
Definition: PtrState.h:140
raw_ostream & operator<<(raw_ostream &OS, const APInt &I)
Definition: APInt.h:2018
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
INITIALIZE_PASS_BEGIN(ObjCARCOpt, "objc-arc", "ObjC ARC optimization", false, false) INITIALIZE_PASS_END(ObjCARCOpt
Module * getParent()
Get the module that this global value is contained inside of...
Definition: GlobalValue.h:556
LLVM Value Representation.
Definition: Value.h:73
foo(x) – x could possibly see a ref count decrement.
Definition: PtrState.h:44
Legacy wrapper pass to provide the ObjCARCAAResult object.
static void ComputePostOrders(Function &F, SmallVectorImpl< BasicBlock *> &PostOrder, SmallVectorImpl< BasicBlock *> &ReverseCFGPostOrder, unsigned NoObjCARCExceptionsMDKind, DenseMap< const BasicBlock *, BBState > &BBStates)
static const unsigned OverflowOccurredValue
bool KnownSafe
After an objc_retain, the reference count of the referenced object is known to be positive...
Definition: PtrState.h:70
This class implements an extremely fast bulk output stream that can only output to a stream...
Definition: raw_ostream.h:44
Invoke instruction.
#define DEBUG(X)
Definition: Debug.h:118
This is similar to BasicAliasAnalysis, and it uses many of the same techniques, except it uses specia...
IRTranslator LLVM IR MI
bool hasOneUse() const
Return true if there is exactly one user of this value.
Definition: Value.h:414
const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
inst_iterator inst_end(Function *F)
Definition: InstIterator.h:133
static CallInst * FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB, Instruction *Autorelease, SmallPtrSetImpl< Instruction *> &DepInsts, SmallPtrSetImpl< const BasicBlock *> &Visited, ProvenanceAnalysis &PA)
Find a dependent retain that precedes the given autorelease for which there is nothing in between the...
A wrapper pass to provide the legacy pass manager access to a suitably prepared AAResults object...
bool InitTopDown(ARCInstKind Kind, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases...
Definition: PtrState.cpp:316
bool IsKnownSafe() const
Definition: PtrState.h:120
static IntegerType * getInt8Ty(LLVMContext &C)
Definition: Type.cpp:174
bool AreStatisticsEnabled()
Check if statistics are enabled.
Definition: Statistic.cpp:112
bool use_empty() const
Definition: Value.h:328
bool IsNeverTail(ARCInstKind Class)
Test if the given class represents instructions which are never safe to mark with the "tail" keyword...
void blot(const KeyT &Key)
This is similar to erase, but instead of removing the element from the vector, it just zeros out the ...
Definition: BlotMapVector.h:97
bool IsNoopInstruction(const Instruction *I)
const BasicBlock * getParent() const
Definition: Instruction.h:66
an instruction to allocate memory on the stack
Definition: Instructions.h:60
bool IsAutorelease(ARCInstKind Class)
Test if the given class is objc_autorelease or equivalent.