LLVM 20.0.0git
ObjCARCContract.cpp
Go to the documentation of this file.
1//===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This file defines late ObjC ARC optimizations. ARC stands for Automatic
10/// Reference Counting and is a system for managing reference counts for objects
11/// in Objective C.
12///
13/// This specific file mainly deals with ``contracting'' multiple lower level
14/// operations into singular higher level operations through pattern matching.
15///
16/// WARNING: This file knows about certain library functions. It recognizes them
17/// by name, and hardwires knowledge of their semantics.
18///
19/// WARNING: This file knows about how certain Objective-C library functions are
20/// used. Naive LLVM IR transformations which would otherwise be
21/// behavior-preserving may break these assumptions.
22///
23//===----------------------------------------------------------------------===//
24
25// TODO: ObjCARCContract could insert PHI nodes when uses aren't
26// dominated by single calls.
27
29#include "DependencyAnalysis.h"
30#include "ObjCARC.h"
31#include "ProvenanceAnalysis.h"
32#include "llvm/ADT/Statistic.h"
36#include "llvm/IR/Dominators.h"
38#include "llvm/IR/InlineAsm.h"
40#include "llvm/IR/Operator.h"
41#include "llvm/IR/PassManager.h"
44#include "llvm/Support/Debug.h"
47
48using namespace llvm;
49using namespace llvm::objcarc;
50
51#define DEBUG_TYPE "objc-arc-contract"
52
53STATISTIC(NumPeeps, "Number of calls peephole-optimized");
54STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
55
56//===----------------------------------------------------------------------===//
57// Declarations
58//===----------------------------------------------------------------------===//
59
60namespace {
61/// Late ARC optimizations
62///
63/// These change the IR in a way that makes it difficult to be analyzed by
64/// ObjCARCOpt, so it's run late.
65
66class ObjCARCContract {
67 bool Changed;
68 bool CFGChanged;
69 AAResults *AA;
70 DominatorTree *DT;
73 BundledRetainClaimRVs *BundledInsts = nullptr;
74
75 /// A flag indicating whether this optimization pass should run.
76 bool Run;
77
78 /// The inline asm string to insert between calls and RetainRV calls to make
79 /// the optimization work on targets which need it.
80 const MDString *RVInstMarker;
81
82 /// The set of inserted objc_storeStrong calls. If at the end of walking the
83 /// function we have found no alloca instructions, these calls can be marked
84 /// "tail".
85 SmallPtrSet<CallInst *, 8> StoreStrongCalls;
86
87 /// Returns true if we eliminated Inst.
88 bool tryToPeepholeInstruction(
89 Function &F, Instruction *Inst, inst_iterator &Iter,
90 bool &TailOkForStoreStrong,
91 const DenseMap<BasicBlock *, ColorVector> &BlockColors);
92
93 bool optimizeRetainCall(Function &F, Instruction *Retain);
94
95 bool contractAutorelease(Function &F, Instruction *Autorelease,
96 ARCInstKind Class);
97
98 void tryToContractReleaseIntoStoreStrong(
100 const DenseMap<BasicBlock *, ColorVector> &BlockColors);
101
102public:
103 bool init(Module &M);
104 bool run(Function &F, AAResults *AA, DominatorTree *DT);
105 bool hasCFGChanged() const { return CFGChanged; }
106};
107
108class ObjCARCContractLegacyPass : public FunctionPass {
109public:
110 void getAnalysisUsage(AnalysisUsage &AU) const override;
111 bool runOnFunction(Function &F) override;
112
113 static char ID;
114 ObjCARCContractLegacyPass() : FunctionPass(ID) {
116 }
117};
118}
119
120//===----------------------------------------------------------------------===//
121// Implementation
122//===----------------------------------------------------------------------===//
123
124/// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
125/// return value. We do this late so we do not disrupt the dataflow analysis in
126/// ObjCARCOpt.
127bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {
128 const auto *Call = dyn_cast<CallBase>(GetArgRCIdentityRoot(Retain));
129 if (!Call)
130 return false;
131 if (Call->getParent() != Retain->getParent())
132 return false;
133
134 // Check that the call is next to the retain.
135 BasicBlock::const_iterator I = ++Call->getIterator();
136 while (IsNoopInstruction(&*I))
137 ++I;
138 if (&*I != Retain)
139 return false;
140
141 // Turn it to an objc_retainAutoreleasedReturnValue.
142 Changed = true;
143 ++NumPeeps;
144
146 dbgs() << "Transforming objc_retain => "
147 "objc_retainAutoreleasedReturnValue since the operand is a "
148 "return value.\nOld: "
149 << *Retain << "\n");
150
151 // We do not have to worry about tail calls/does not throw since
152 // retain/retainRV have the same properties.
153 Function *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV);
154 cast<CallInst>(Retain)->setCalledFunction(Decl);
155
156 LLVM_DEBUG(dbgs() << "New: " << *Retain << "\n");
157 return true;
158}
159
160/// Merge an autorelease with a retain into a fused call.
161bool ObjCARCContract::contractAutorelease(Function &F, Instruction *Autorelease,
162 ARCInstKind Class) {
164
165 // Check that there are no instructions between the retain and the autorelease
166 // (such as an autorelease_pop) which may change the count.
167 DependenceKind DK = Class == ARCInstKind::AutoreleaseRV
170 auto *Retain = dyn_cast_or_null<CallInst>(
171 findSingleDependency(DK, Arg, Autorelease->getParent(), Autorelease, PA));
172
173 if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain ||
175 return false;
176
177 Changed = true;
178 ++NumPeeps;
179
180 LLVM_DEBUG(dbgs() << " Fusing retain/autorelease!\n"
181 " Autorelease:"
182 << *Autorelease
183 << "\n"
184 " Retain: "
185 << *Retain << "\n");
186
187 Function *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV
188 ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV
189 : ARCRuntimeEntryPointKind::RetainAutorelease);
190 Retain->setCalledFunction(Decl);
191
192 LLVM_DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n");
193
195 return true;
196}
197
201 AAResults *AA) {
202 StoreInst *Store = nullptr;
203 bool SawRelease = false;
204
205 // Get the location associated with Load.
207 auto *LocPtr = Loc.Ptr->stripPointerCasts();
208
209 // Walk down to find the store and the release, which may be in either order.
210 for (auto I = std::next(BasicBlock::iterator(Load)),
211 E = Load->getParent()->end();
212 I != E; ++I) {
213 // If we found the store we were looking for and saw the release,
214 // break. There is no more work to be done.
215 if (Store && SawRelease)
216 break;
217
218 // Now we know that we have not seen either the store or the release. If I
219 // is the release, mark that we saw the release and continue.
220 Instruction *Inst = &*I;
221 if (Inst == Release) {
222 SawRelease = true;
223 continue;
224 }
225
226 // Otherwise, we check if Inst is a "good" store. Grab the instruction class
227 // of Inst.
228 ARCInstKind Class = GetBasicARCInstKind(Inst);
229
230 // If we have seen the store, but not the release...
231 if (Store) {
232 // We need to make sure that it is safe to move the release from its
233 // current position to the store. This implies proving that any
234 // instruction in between Store and the Release conservatively can not use
235 // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so
236 // continue...
237 if (!CanUse(Inst, Load, PA, Class)) {
238 continue;
239 }
240
241 // Otherwise, be conservative and return nullptr.
242 return nullptr;
243 }
244
245 // Ok, now we know we have not seen a store yet.
246
247 // If Inst is a retain, we don't care about it as it doesn't prevent moving
248 // the load to the store.
249 //
250 // TODO: This is one area where the optimization could be made more
251 // aggressive.
252 if (IsRetain(Class))
253 continue;
254
255 // See if Inst can write to our load location, if it can not, just ignore
256 // the instruction.
257 if (!isModSet(AA->getModRefInfo(Inst, Loc)))
258 continue;
259
260 Store = dyn_cast<StoreInst>(Inst);
261
262 // If Inst can, then check if Inst is a simple store. If Inst is not a
263 // store or a store that is not simple, then we have some we do not
264 // understand writing to this memory implying we can not move the load
265 // over the write to any subsequent store that we may find.
266 if (!Store || !Store->isSimple())
267 return nullptr;
268
269 // Then make sure that the pointer we are storing to is Ptr. If so, we
270 // found our Store!
271 if (Store->getPointerOperand()->stripPointerCasts() == LocPtr)
272 continue;
273
274 // Otherwise, we have an unknown store to some other ptr that clobbers
275 // Loc.Ptr. Bail!
276 return nullptr;
277 }
278
279 // If we did not find the store or did not see the release, fail.
280 if (!Store || !SawRelease)
281 return nullptr;
282
283 // We succeeded!
284 return Store;
285}
286
287static Instruction *
290 ProvenanceAnalysis &PA) {
291 // Walk up from the Store to find the retain.
292 BasicBlock::iterator I = Store->getIterator();
293 BasicBlock::iterator Begin = Store->getParent()->begin();
294 while (I != Begin && GetBasicARCInstKind(&*I) != ARCInstKind::Retain) {
295 Instruction *Inst = &*I;
296
297 // It is only safe to move the retain to the store if we can prove
298 // conservatively that nothing besides the release can decrement reference
299 // counts in between the retain and the store.
300 if (CanDecrementRefCount(Inst, New, PA) && Inst != Release)
301 return nullptr;
302 --I;
303 }
304 Instruction *Retain = &*I;
305 if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)
306 return nullptr;
307 if (GetArgRCIdentityRoot(Retain) != New)
308 return nullptr;
309 return Retain;
310}
311
312/// Attempt to merge an objc_release with a store, load, and objc_retain to form
313/// an objc_storeStrong. An objc_storeStrong:
314///
315/// objc_storeStrong(i8** %old_ptr, i8* new_value)
316///
317/// is equivalent to the following IR sequence:
318///
319/// ; Load old value.
320/// %old_value = load i8** %old_ptr (1)
321///
322/// ; Increment the new value and then release the old value. This must occur
323/// ; in order in case old_value releases new_value in its destructor causing
324/// ; us to potentially have a dangling ptr.
325/// tail call i8* @objc_retain(i8* %new_value) (2)
326/// tail call void @objc_release(i8* %old_value) (3)
327///
328/// ; Store the new_value into old_ptr
329/// store i8* %new_value, i8** %old_ptr (4)
330///
331/// The safety of this optimization is based around the following
332/// considerations:
333///
334/// 1. We are forming the store strong at the store. Thus to perform this
335/// optimization it must be safe to move the retain, load, and release to
336/// (4).
337/// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are
338/// safe.
339void ObjCARCContract::tryToContractReleaseIntoStoreStrong(
341 const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
342 // See if we are releasing something that we just loaded.
343 auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
344 if (!Load || !Load->isSimple())
345 return;
346
347 // For now, require everything to be in one basic block.
348 BasicBlock *BB = Release->getParent();
349 if (Load->getParent() != BB)
350 return;
351
352 // First scan down the BB from Load, looking for a store of the RCIdentityRoot
353 // of Load's
356 // If we fail, bail.
357 if (!Store)
358 return;
359
360 // Then find what new_value's RCIdentity Root is.
361 Value *New = GetRCIdentityRoot(Store->getValueOperand());
362
363 // Then walk up the BB and look for a retain on New without any intervening
364 // instructions which conservatively might decrement ref counts.
367
368 // If we fail, bail.
369 if (!Retain)
370 return;
371
372 Changed = true;
373 ++NumStoreStrongs;
374
376 llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n"
377 << " Old:\n"
378 << " Store: " << *Store << "\n"
379 << " Release: " << *Release << "\n"
380 << " Retain: " << *Retain << "\n"
381 << " Load: " << *Load << "\n");
382
383 LLVMContext &C = Release->getContext();
384 Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
385 Type *I8XX = PointerType::getUnqual(I8X);
386
387 Value *Args[] = { Load->getPointerOperand(), New };
388 if (Args[0]->getType() != I8XX)
389 Args[0] = new BitCastInst(Args[0], I8XX, "", Store->getIterator());
390 if (Args[1]->getType() != I8X)
391 Args[1] = new BitCastInst(Args[1], I8X, "", Store->getIterator());
392 Function *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);
394 Decl, Args, "", Store->getIterator(), BlockColors);
395 StoreStrong->setDoesNotThrow();
396 StoreStrong->setDebugLoc(Store->getDebugLoc());
397
398 // We can't set the tail flag yet, because we haven't yet determined
399 // whether there are any escaping allocas. Remember this call, so that
400 // we can set the tail flag once we know it's safe.
401 StoreStrongCalls.insert(StoreStrong);
402
403 LLVM_DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong
404 << "\n");
405
406 if (&*Iter == Retain) ++Iter;
407 if (&*Iter == Store) ++Iter;
408 Store->eraseFromParent();
409 Release->eraseFromParent();
411 if (Load->use_empty())
412 Load->eraseFromParent();
413}
414
415bool ObjCARCContract::tryToPeepholeInstruction(
416 Function &F, Instruction *Inst, inst_iterator &Iter,
417 bool &TailOkForStoreStrongs,
418 const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
419 // Only these library routines return their argument. In particular,
420 // objc_retainBlock does not necessarily return its argument.
422 switch (Class) {
423 case ARCInstKind::FusedRetainAutorelease:
424 case ARCInstKind::FusedRetainAutoreleaseRV:
425 return false;
426 case ARCInstKind::Autorelease:
427 case ARCInstKind::AutoreleaseRV:
428 return contractAutorelease(F, Inst, Class);
429 case ARCInstKind::Retain:
430 // Attempt to convert retains to retainrvs if they are next to function
431 // calls.
432 if (!optimizeRetainCall(F, Inst))
433 return false;
434 // If we succeed in our optimization, fall through.
435 [[fallthrough]];
436 case ARCInstKind::RetainRV:
437 case ARCInstKind::UnsafeClaimRV: {
438 // Return true if this is a bundled retainRV/claimRV call, which is always
439 // redundant with the attachedcall in the bundle, and is going to be erased
440 // at the end of this pass. This avoids undoing objc-arc-expand and
441 // replacing uses of the retainRV/claimRV call's argument with its result.
442 if (BundledInsts->contains(Inst))
443 return true;
444
445 // If this isn't a bundled call, and the target doesn't need a special
446 // inline-asm marker, we're done: return now, and undo objc-arc-expand.
447 if (!RVInstMarker)
448 return false;
449
450 // The target needs a special inline-asm marker. Insert it.
451
452 BasicBlock::iterator BBI = Inst->getIterator();
453 BasicBlock *InstParent = Inst->getParent();
454
455 // Step up to see if the call immediately precedes the RV call.
456 // If it's an invoke, we have to cross a block boundary. And we have
457 // to carefully dodge no-op instructions.
458 do {
459 if (BBI == InstParent->begin()) {
460 BasicBlock *Pred = InstParent->getSinglePredecessor();
461 if (!Pred)
462 goto decline_rv_optimization;
463 BBI = Pred->getTerminator()->getIterator();
464 break;
465 }
466 --BBI;
467 } while (IsNoopInstruction(&*BBI));
468
469 if (GetRCIdentityRoot(&*BBI) == GetArgRCIdentityRoot(Inst)) {
470 LLVM_DEBUG(dbgs() << "Adding inline asm marker for the return value "
471 "optimization.\n");
472 Changed = true;
473 InlineAsm *IA =
474 InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
475 /*isVarArg=*/false),
476 RVInstMarker->getString(),
477 /*Constraints=*/"", /*hasSideEffects=*/true);
478
479 objcarc::createCallInstWithColors(IA, std::nullopt, "",
480 Inst->getIterator(), BlockColors);
481 }
482 decline_rv_optimization:
483 return false;
484 }
485 case ARCInstKind::InitWeak: {
486 // objc_initWeak(p, null) => *p = null
487 CallInst *CI = cast<CallInst>(Inst);
488 if (IsNullOrUndef(CI->getArgOperand(1))) {
489 Value *Null = ConstantPointerNull::get(cast<PointerType>(CI->getType()));
490 Changed = true;
491 new StoreInst(Null, CI->getArgOperand(0), CI->getIterator());
492
493 LLVM_DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"
494 << " New = " << *Null << "\n");
495
497 CI->eraseFromParent();
498 }
499 return true;
500 }
501 case ARCInstKind::Release:
502 // Try to form an objc store strong from our release. If we fail, there is
503 // nothing further to do below, so continue.
504 tryToContractReleaseIntoStoreStrong(Inst, Iter, BlockColors);
505 return true;
506 case ARCInstKind::User:
507 // Be conservative if the function has any alloca instructions.
508 // Technically we only care about escaping alloca instructions,
509 // but this is sufficient to handle some interesting cases.
510 if (isa<AllocaInst>(Inst))
511 TailOkForStoreStrongs = false;
512 return true;
513 case ARCInstKind::IntrinsicUser:
514 // Remove calls to @llvm.objc.clang.arc.use(...).
515 Changed = true;
516 Inst->eraseFromParent();
517 return true;
518 default:
519 if (auto *CI = dyn_cast<CallInst>(Inst))
520 if (CI->getIntrinsicID() == Intrinsic::objc_clang_arc_noop_use) {
521 // Remove calls to @llvm.objc.clang.arc.noop.use(...).
522 Changed = true;
523 CI->eraseFromParent();
524 }
525 return true;
526 }
527}
528
529//===----------------------------------------------------------------------===//
530// Top Level Driver
531//===----------------------------------------------------------------------===//
532
533bool ObjCARCContract::init(Module &M) {
534 Run = ModuleHasARC(M);
535 if (!Run)
536 return false;
537
538 EP.init(&M);
539
540 // Initialize RVInstMarker.
541 RVInstMarker = getRVInstMarker(M);
542
543 return false;
544}
545
546bool ObjCARCContract::run(Function &F, AAResults *A, DominatorTree *D) {
547 if (!Run)
548 return false;
549
550 if (!EnableARCOpts)
551 return false;
552
553 Changed = CFGChanged = false;
554 AA = A;
555 DT = D;
556 PA.setAA(A);
557 BundledRetainClaimRVs BRV(/*ContractPass=*/true);
558 BundledInsts = &BRV;
559
560 std::pair<bool, bool> R = BundledInsts->insertAfterInvokes(F, DT);
561 Changed |= R.first;
562 CFGChanged |= R.second;
563
565 if (F.hasPersonalityFn() &&
566 isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn())))
567 BlockColors = colorEHFunclets(F);
568
569 LLVM_DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n");
570
571 // Track whether it's ok to mark objc_storeStrong calls with the "tail"
572 // keyword. Be conservative if the function has variadic arguments.
573 // It seems that functions which "return twice" are also unsafe for the
574 // "tail" argument, because they are setjmp, which could need to
575 // return to an earlier stack state.
576 bool TailOkForStoreStrongs =
577 !F.isVarArg() && !F.callsFunctionThatReturnsTwice();
578
579 // For ObjC library calls which return their argument, replace uses of the
580 // argument with uses of the call return value, if it dominates the use. This
581 // reduces register pressure.
582 for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) {
583 Instruction *Inst = &*I++;
584
585 LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
586
587 if (auto *CI = dyn_cast<CallInst>(Inst))
589 BundledInsts->insertRVCallWithColors(I->getIterator(), CI, BlockColors);
590 --I;
591 Changed = true;
592 }
593
594 // First try to peephole Inst. If there is nothing further we can do in
595 // terms of undoing objc-arc-expand, process the next inst.
596 if (tryToPeepholeInstruction(F, Inst, I, TailOkForStoreStrongs,
597 BlockColors))
598 continue;
599
600 // Otherwise, try to undo objc-arc-expand.
601
602 // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts
603 // and such; to do the replacement, the argument must have type i8*.
604
605 // Function for replacing uses of Arg dominated by Inst.
606 auto ReplaceArgUses = [Inst, this](Value *Arg) {
607 // If we're compiling bugpointed code, don't get in trouble.
608 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
609 return;
610
611 // Look through the uses of the pointer.
612 for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
613 UI != UE; ) {
614 // Increment UI now, because we may unlink its element.
615 Use &U = *UI++;
616 unsigned OperandNo = U.getOperandNo();
617
618 // If the call's return value dominates a use of the call's argument
619 // value, rewrite the use to use the return value. We check for
620 // reachability here because an unreachable call is considered to
621 // trivially dominate itself, which would lead us to rewriting its
622 // argument in terms of its return value, which would lead to
623 // infinite loops in GetArgRCIdentityRoot.
624 if (!DT->isReachableFromEntry(U) || !DT->dominates(Inst, U))
625 continue;
626
627 Changed = true;
628 Instruction *Replacement = Inst;
629 Type *UseTy = U.get()->getType();
630 if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
631 // For PHI nodes, insert the bitcast in the predecessor block.
632 unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
633 BasicBlock *IncomingBB = PHI->getIncomingBlock(ValNo);
634 if (Replacement->getType() != UseTy) {
635 // A catchswitch is both a pad and a terminator, meaning a basic
636 // block with a catchswitch has no insertion point. Keep going up
637 // the dominator tree until we find a non-catchswitch.
638 BasicBlock *InsertBB = IncomingBB;
639 while (isa<CatchSwitchInst>(InsertBB->getFirstNonPHI())) {
640 InsertBB = DT->getNode(InsertBB)->getIDom()->getBlock();
641 }
642
643 assert(DT->dominates(Inst, &InsertBB->back()) &&
644 "Invalid insertion point for bitcast");
645 Replacement = new BitCastInst(Replacement, UseTy, "",
646 InsertBB->back().getIterator());
647 }
648
649 // While we're here, rewrite all edges for this PHI, rather
650 // than just one use at a time, to minimize the number of
651 // bitcasts we emit.
652 for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)
653 if (PHI->getIncomingBlock(i) == IncomingBB) {
654 // Keep the UI iterator valid.
655 if (UI != UE &&
656 &PHI->getOperandUse(
658 ++UI;
659 PHI->setIncomingValue(i, Replacement);
660 }
661 } else {
662 if (Replacement->getType() != UseTy)
663 Replacement =
664 new BitCastInst(Replacement, UseTy, "",
665 cast<Instruction>(U.getUser())->getIterator());
666 U.set(Replacement);
667 }
668 }
669 };
670
671 Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
672 Value *OrigArg = Arg;
673
674 // TODO: Change this to a do-while.
675 for (;;) {
676 ReplaceArgUses(Arg);
677
678 // If Arg is a no-op casted pointer, strip one level of casts and iterate.
679 if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
680 Arg = BI->getOperand(0);
681 else if (isa<GEPOperator>(Arg) &&
682 cast<GEPOperator>(Arg)->hasAllZeroIndices())
683 Arg = cast<GEPOperator>(Arg)->getPointerOperand();
684 else if (isa<GlobalAlias>(Arg) &&
685 !cast<GlobalAlias>(Arg)->isInterposable())
686 Arg = cast<GlobalAlias>(Arg)->getAliasee();
687 else {
688 // If Arg is a PHI node, get PHIs that are equivalent to it and replace
689 // their uses.
690 if (PHINode *PN = dyn_cast<PHINode>(Arg)) {
692 getEquivalentPHIs(*PN, PHIList);
693 for (Value *PHI : PHIList)
694 ReplaceArgUses(PHI);
695 }
696 break;
697 }
698 }
699
700 // Replace bitcast users of Arg that are dominated by Inst.
702
703 // Add all bitcast users of the function argument first.
704 for (User *U : OrigArg->users())
705 if (auto *BC = dyn_cast<BitCastInst>(U))
706 BitCastUsers.push_back(BC);
707
708 // Replace the bitcasts with the call return. Iterate until list is empty.
709 while (!BitCastUsers.empty()) {
710 auto *BC = BitCastUsers.pop_back_val();
711 for (User *U : BC->users())
712 if (auto *B = dyn_cast<BitCastInst>(U))
713 BitCastUsers.push_back(B);
714
715 ReplaceArgUses(BC);
716 }
717 }
718
719 // If this function has no escaping allocas or suspicious vararg usage,
720 // objc_storeStrong calls can be marked with the "tail" keyword.
721 if (TailOkForStoreStrongs)
722 for (CallInst *CI : StoreStrongCalls)
723 CI->setTailCall();
724 StoreStrongCalls.clear();
725
726 return Changed;
727}
728
729//===----------------------------------------------------------------------===//
730// Misc Pass Manager
731//===----------------------------------------------------------------------===//
732
733char ObjCARCContractLegacyPass::ID = 0;
734INITIALIZE_PASS_BEGIN(ObjCARCContractLegacyPass, "objc-arc-contract",
735 "ObjC ARC contraction", false, false)
738INITIALIZE_PASS_END(ObjCARCContractLegacyPass, "objc-arc-contract",
739 "ObjC ARC contraction", false, false)
740
741void ObjCARCContractLegacyPass::getAnalysisUsage(AnalysisUsage &AU) const {
742 AU.addRequired<AAResultsWrapperPass>();
743 AU.addRequired<DominatorTreeWrapperPass>();
744 AU.addPreserved<AAResultsWrapperPass>();
745 AU.addPreserved<BasicAAWrapperPass>();
746 AU.addPreserved<DominatorTreeWrapperPass>();
747}
748
750 return new ObjCARCContractLegacyPass();
751}
752
753bool ObjCARCContractLegacyPass::runOnFunction(Function &F) {
754 ObjCARCContract OCARCC;
755 OCARCC.init(*F.getParent());
756 auto *AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
757 auto *DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
758 return OCARCC.run(F, AA, DT);
759}
760
763 ObjCARCContract OCAC;
764 OCAC.init(*F.getParent());
765
766 bool Changed = OCAC.run(F, &AM.getResult<AAManager>(F),
768 bool CFGChanged = OCAC.hasCFGChanged();
769 if (Changed) {
771 if (!CFGChanged)
773 return PA;
774 }
775 return PreservedAnalyses::all();
776}
Rewrite undef for PHI
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
This is the interface for LLVM's primary stateless and local alias analysis.
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
#define LLVM_DEBUG(X)
Definition: Debug.h:101
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
static StoreInst * findSafeStoreForStoreStrongContraction(LoadInst *Load, Instruction *Release, ProvenanceAnalysis &PA, AAResults *AA)
static Instruction * findRetainForStoreStrongContraction(Value *New, StoreInst *Store, Instruction *Release, ProvenanceAnalysis &PA)
objc arc ObjC ARC contraction
objc arc contract
This file defines ARC utility functions which are used by various parts of the compiler.
This header defines various interfaces for pass management in LLVM.
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:55
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:57
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:52
This file declares a special form of Alias Analysis called Provenance Analysis''.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition: Statistic.h:166
static SymbolRef::Type getType(const Symbol *Sym)
Definition: TapiFile.cpp:40
A manager for alias analyses.
A wrapper pass to provide the legacy pass manager access to a suitably prepared AAResults object.
ModRefInfo getModRefInfo(const Instruction *I, const std::optional< MemoryLocation > &OptLoc)
Check whether or not an instruction may read or write the optionally specified memory location.
A container for analyses that lazily runs them and caches their results.
Definition: PassManager.h:253
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:405
Represent the analysis usage information of a pass.
Legacy wrapper pass to provide the BasicAAResult object.
LLVM Basic Block Representation.
Definition: BasicBlock.h:61
iterator begin()
Instruction iterator methods.
Definition: BasicBlock.h:448
InstListType::const_iterator const_iterator
Definition: BasicBlock.h:178
const Instruction * getFirstNonPHI() const
Returns a pointer to the first instruction in this block that is not a PHINode instruction.
Definition: BasicBlock.cpp:367
const BasicBlock * getSinglePredecessor() const
Return the predecessor of this block if it has a single predecessor block.
Definition: BasicBlock.cpp:459
InstListType::iterator iterator
Instruction iterators...
Definition: BasicBlock.h:177
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition: BasicBlock.h:239
const Instruction & back() const
Definition: BasicBlock.h:473
This class represents a no-op cast from one type to another.
Represents analyses that only rely on functions' control flow.
Definition: Analysis.h:72
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1410
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
This class represents a function call, abstracting a target machine's calling convention.
void setTailCall(bool IsTc=true)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
Definition: Constants.cpp:1800
Analysis pass which computes a DominatorTree.
Definition: Dominators.h:279
Legacy analysis pass which computes a DominatorTree.
Definition: Dominators.h:317
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
Definition: Dominators.h:162
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:310
virtual bool runOnFunction(Function &F)=0
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
Definition: InlineAsm.cpp:43
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Definition: Instruction.cpp:92
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:67
An instruction for reading from memory.
Definition: Instructions.h:174
A single uniqued string.
Definition: Metadata.h:720
Representation for a specific memory location.
static MemoryLocation get(const LoadInst *LI)
Return a location with information about the memory reference by the given instruction.
const Value * Ptr
The address of the start of the location.
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
static unsigned getOperandNumForIncomingValue(unsigned i)
static unsigned getIncomingValueNumForOperand(unsigned i)
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
Pass interface - Implemented by all 'passes'.
Definition: Pass.h:94
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
Definition: Pass.cpp:98
A set of analyses that are preserved following a run of a transformation pass.
Definition: Analysis.h:111
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: Analysis.h:117
void preserveSet()
Mark an analysis set as preserved.
Definition: Analysis.h:146
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
Definition: SmallPtrSet.h:502
bool empty() const
Definition: SmallVector.h:94
void push_back(const T &Elt)
Definition: SmallVector.h:426
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1209
An instruction for storing to memory.
Definition: Instructions.h:290
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
LLVM Value Representation.
Definition: Value.h:74
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:255
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
Definition: Value.cpp:534
iterator_range< user_iterator > users()
Definition: Value.h:421
use_iterator use_begin()
Definition: Value.h:360
use_iterator_impl< Use > use_iterator
Definition: Value.h:353
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
Definition: Value.cpp:694
LLVMContext & getContext() const
All values hold a context through their type.
Definition: Value.cpp:1075
use_iterator use_end()
Definition: Value.h:368
const ParentTy * getParent() const
Definition: ilist_node.h:32
self_iterator getIterator()
Definition: ilist_node.h:132
Declarations for ObjC runtime functions and constants.
This is similar to BasicAliasAnalysis, and it uses many of the same techniques, except it uses specia...
This file defines common definitions/declarations used by the ObjC ARC Optimizer.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ C
The default llvm calling convention, compatible with C.
Definition: CallingConv.h:34
bool ModuleHasARC(const Module &M)
Test if the given module looks interesting to run ARC optimization on.
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
DependenceKind
Defines different dependence kinds among various ARC constructs.
@ RetainAutoreleaseDep
Blocks objc_retainAutorelease.
@ RetainAutoreleaseRVDep
Blocks objc_retainAutoreleaseReturnValue.
bool IsNullOrUndef(const Value *V)
ARCInstKind
Equivalence classes of instructions in the ARC Model.
@ StoreStrong
objc_storeStrong (derived)
@ Autorelease
objc_autorelease
@ Call
could call objc_release
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
CallInst * createCallInstWithColors(FunctionCallee Func, ArrayRef< Value * > Args, const Twine &NameStr, BasicBlock::iterator InsertBefore, const DenseMap< BasicBlock *, ColorVector > &BlockColors)
Create a call instruction with the correct funclet token.
Definition: ObjCARC.cpp:24
void getEquivalentPHIs(PHINodeTy &PN, VectorTy &PHIList)
Return the list of PHI nodes that are equivalent to PN.
Definition: ObjCARC.h:74
bool IsNoopInstruction(const Instruction *I)
llvm::Instruction * findSingleDependency(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, ProvenanceAnalysis &PA)
Find dependent instructions.
ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release,...
bool CanDecrementRefCount(ARCInstKind Kind)
Returns false if conservatively we can prove that any instruction mapped to this kind can not decreme...
const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
static MDString * getRVInstMarker(Module &M)
Definition: ObjCARC.h:92
bool hasAttachedCallOpBundle(const CallBase *CB)
Definition: ObjCARCUtil.h:29
bool CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Test whether the given instruction can "use" the given pointer's object in a way that requires the re...
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
Definition: ObjCARC.h:39
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
inst_iterator inst_begin(Function *F)
Definition: InstIterator.h:131
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch,...
bool isModSet(const ModRefInfo MRI)
Definition: ModRef.h:48
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:163
EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
inst_iterator inst_end(Function *F)
Definition: InstIterator.h:132
void initializeObjCARCContractLegacyPassPass(PassRegistry &)
Pass * createObjCARCContractPass()
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)