LLVM  9.0.0svn
CGSCCPassManager.cpp
Go to the documentation of this file.
1 //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
10 #include "llvm/ADT/ArrayRef.h"
11 #include "llvm/ADT/Optional.h"
12 #include "llvm/ADT/STLExtras.h"
13 #include "llvm/ADT/SetVector.h"
14 #include "llvm/ADT/SmallPtrSet.h"
15 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/IR/CallSite.h"
19 #include "llvm/IR/Constant.h"
20 #include "llvm/IR/InstIterator.h"
21 #include "llvm/IR/Instruction.h"
22 #include "llvm/IR/PassManager.h"
23 #include "llvm/Support/Casting.h"
24 #include "llvm/Support/Debug.h"
26 #include <algorithm>
27 #include <cassert>
28 #include <iterator>
29 
30 #define DEBUG_TYPE "cgscc"
31 
32 using namespace llvm;
33 
34 // Explicit template instantiations and specialization definitions for core
35 // template typedefs.
36 namespace llvm {
37 
38 // Explicit instantiations for the core proxy templates.
45  LazyCallGraph::SCC, LazyCallGraph &>;
47 
48 /// Explicitly specialize the pass manager run method to handle call graph
49 /// updates.
50 template <>
52 PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
53  CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
54  CGSCCAnalysisManager &AM,
55  LazyCallGraph &G, CGSCCUpdateResult &UR) {
56  // Request PassInstrumentation from analysis manager, will use it to run
57  // instrumenting callbacks for the passes later.
60 
62 
63  if (DebugLogging)
64  dbgs() << "Starting CGSCC pass manager run.\n";
65 
66  // The SCC may be refined while we are running passes over it, so set up
67  // a pointer that we can update.
68  LazyCallGraph::SCC *C = &InitialC;
69 
70  for (auto &Pass : Passes) {
71  if (DebugLogging)
72  dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
73 
74  // Check the PassInstrumentation's BeforePass callbacks before running the
75  // pass, skip its execution completely if asked to (callback returns false).
76  if (!PI.runBeforePass(*Pass, *C))
77  continue;
78 
79  PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
80 
81  if (UR.InvalidatedSCCs.count(C))
82  PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass);
83  else
84  PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C);
85 
86  // Update the SCC if necessary.
87  C = UR.UpdatedC ? UR.UpdatedC : C;
88 
89  // If the CGSCC pass wasn't able to provide a valid updated SCC, the
90  // current SCC may simply need to be skipped if invalid.
91  if (UR.InvalidatedSCCs.count(C)) {
92  LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
93  break;
94  }
95  // Check that we didn't miss any update scenario.
96  assert(C->begin() != C->end() && "Cannot have an empty SCC!");
97 
98  // Update the analysis manager as each pass runs and potentially
99  // invalidates analyses.
100  AM.invalidate(*C, PassPA);
101 
102  // Finally, we intersect the final preserved analyses to compute the
103  // aggregate preserved set for this pass manager.
104  PA.intersect(std::move(PassPA));
105 
106  // FIXME: Historically, the pass managers all called the LLVM context's
107  // yield function here. We don't have a generic way to acquire the
108  // context and it isn't yet clear what the right pattern is for yielding
109  // in the new pass manager so it is currently omitted.
110  // ...getContext().yield();
111  }
112 
113  // Invalidation was handled after each pass in the above loop for the current
114  // SCC. Therefore, the remaining analysis results in the AnalysisManager are
115  // preserved. We mark this with a set so that we don't need to inspect each
116  // one individually.
117  PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
118 
119  if (DebugLogging)
120  dbgs() << "Finished CGSCC pass manager run.\n";
121 
122  return PA;
123 }
124 
126  Module &M, const PreservedAnalyses &PA,
128  // If literally everything is preserved, we're done.
129  if (PA.areAllPreserved())
130  return false; // This is still a valid proxy.
131 
132  // If this proxy or the call graph is going to be invalidated, we also need
133  // to clear all the keys coming from that analysis.
134  //
135  // We also directly invalidate the FAM's module proxy if necessary, and if
136  // that proxy isn't preserved we can't preserve this proxy either. We rely on
137  // it to handle module -> function analysis invalidation in the face of
138  // structural changes and so if it's unavailable we conservatively clear the
139  // entire SCC layer as well rather than trying to do invalidation ourselves.
141  if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
142  Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
144  InnerAM->clear();
145 
146  // And the proxy itself should be marked as invalid so that we can observe
147  // the new call graph. This isn't strictly necessary because we cheat
148  // above, but is still useful.
149  return true;
150  }
151 
152  // Directly check if the relevant set is preserved so we can short circuit
153  // invalidating SCCs below.
154  bool AreSCCAnalysesPreserved =
156 
157  // Ok, we have a graph, so we can propagate the invalidation down into it.
158  G->buildRefSCCs();
159  for (auto &RC : G->postorder_ref_sccs())
160  for (auto &C : RC) {
162 
163  // Check to see whether the preserved set needs to be adjusted based on
164  // module-level analysis invalidation triggering deferred invalidation
165  // for this SCC.
166  if (auto *OuterProxy =
167  InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
168  for (const auto &OuterInvalidationPair :
169  OuterProxy->getOuterInvalidations()) {
170  AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
171  const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
172  if (Inv.invalidate(OuterAnalysisID, M, PA)) {
173  if (!InnerPA)
174  InnerPA = PA;
175  for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
176  InnerPA->abandon(InnerAnalysisID);
177  }
178  }
179 
180  // Check if we needed a custom PA set. If so we'll need to run the inner
181  // invalidation.
182  if (InnerPA) {
183  InnerAM->invalidate(C, *InnerPA);
184  continue;
185  }
186 
187  // Otherwise we only need to do invalidation if the original PA set didn't
188  // preserve all SCC analyses.
189  if (!AreSCCAnalysesPreserved)
190  InnerAM->invalidate(C, PA);
191  }
192 
193  // Return false to indicate that this result is still a valid proxy.
194  return false;
195 }
196 
197 template <>
199 CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
200  // Force the Function analysis manager to also be available so that it can
201  // be accessed in an SCC analysis and proxied onward to function passes.
202  // FIXME: It is pretty awkward to just drop the result here and assert that
203  // we can find it again later.
205 
206  return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
207 }
208 
209 AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
210 
213  CGSCCAnalysisManager &AM,
214  LazyCallGraph &CG) {
215  // Collect the FunctionAnalysisManager from the Module layer and use that to
216  // build the proxy result.
217  //
218  // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
219  // invalidate the function analyses.
220  auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
221  Module &M = *C.begin()->getFunction().getParent();
222  auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
223  assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
224  "proxy is run on the module prior to entering the CGSCC "
225  "walk.");
226 
227  // Note that we special-case invalidation handling of this proxy in the CGSCC
228  // analysis manager's Module proxy. This avoids the need to do anything
229  // special here to recompute all of this if ever the FAM's module proxy goes
230  // away.
231  return Result(FAMProxy->getManager());
232 }
233 
235  LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
237  // If literally everything is preserved, we're done.
238  if (PA.areAllPreserved())
239  return false; // This is still a valid proxy.
240 
241  // If this proxy isn't marked as preserved, then even if the result remains
242  // valid, the key itself may no longer be valid, so we clear everything.
243  //
244  // Note that in order to preserve this proxy, a module pass must ensure that
245  // the FAM has been completely updated to handle the deletion of functions.
246  // Specifically, any FAM-cached results for those functions need to have been
247  // forcibly cleared. When preserved, this proxy will only invalidate results
248  // cached on functions *still in the module* at the end of the module pass.
250  if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
251  for (LazyCallGraph::Node &N : C)
252  FAM->clear(N.getFunction(), N.getFunction().getName());
253 
254  return true;
255  }
256 
257  // Directly check if the relevant set is preserved.
258  bool AreFunctionAnalysesPreserved =
260 
261  // Now walk all the functions to see if any inner analysis invalidation is
262  // necessary.
263  for (LazyCallGraph::Node &N : C) {
264  Function &F = N.getFunction();
265  Optional<PreservedAnalyses> FunctionPA;
266 
267  // Check to see whether the preserved set needs to be pruned based on
268  // SCC-level analysis invalidation that triggers deferred invalidation
269  // registered with the outer analysis manager proxy for this function.
270  if (auto *OuterProxy =
271  FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
272  for (const auto &OuterInvalidationPair :
273  OuterProxy->getOuterInvalidations()) {
274  AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
275  const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
276  if (Inv.invalidate(OuterAnalysisID, C, PA)) {
277  if (!FunctionPA)
278  FunctionPA = PA;
279  for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
280  FunctionPA->abandon(InnerAnalysisID);
281  }
282  }
283 
284  // Check if we needed a custom PA set, and if so we'll need to run the
285  // inner invalidation.
286  if (FunctionPA) {
287  FAM->invalidate(F, *FunctionPA);
288  continue;
289  }
290 
291  // Otherwise we only need to do invalidation if the original PA set didn't
292  // preserve all function analyses.
293  if (!AreFunctionAnalysesPreserved)
294  FAM->invalidate(F, PA);
295  }
296 
297  // Return false to indicate that this result is still a valid proxy.
298  return false;
299 }
300 
301 } // end namespace llvm
302 
303 /// When a new SCC is created for the graph and there might be function
304 /// analysis results cached for the functions now in that SCC two forms of
305 /// updates are required.
306 ///
307 /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
308 /// created so that any subsequent invalidation events to the SCC are
309 /// propagated to the function analysis results cached for functions within it.
310 ///
311 /// Second, if any of the functions within the SCC have analysis results with
312 /// outer analysis dependencies, then those dependencies would point to the
313 /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
314 /// function analyses so that they don't retain stale handles.
316  LazyCallGraph &G,
317  CGSCCAnalysisManager &AM) {
318  // Get the relevant function analysis manager.
319  auto &FAM =
321 
322  // Now walk the functions in this SCC and invalidate any function analysis
323  // results that might have outer dependencies on an SCC analysis.
324  for (LazyCallGraph::Node &N : C) {
325  Function &F = N.getFunction();
326 
327  auto *OuterProxy =
328  FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
329  if (!OuterProxy)
330  // No outer analyses were queried, nothing to do.
331  continue;
332 
333  // Forcibly abandon all the inner analyses with dependencies, but
334  // invalidate nothing else.
335  auto PA = PreservedAnalyses::all();
336  for (const auto &OuterInvalidationPair :
337  OuterProxy->getOuterInvalidations()) {
338  const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
339  for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
340  PA.abandon(InnerAnalysisID);
341  }
342 
343  // Now invalidate anything we found.
344  FAM.invalidate(F, PA);
345  }
346 }
347 
348 /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
349 /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
350 /// added SCCs.
351 ///
352 /// The range of new SCCs must be in postorder already. The SCC they were split
353 /// out of must be provided as \p C. The current node being mutated and
354 /// triggering updates must be passed as \p N.
355 ///
356 /// This function returns the SCC containing \p N. This will be either \p C if
357 /// no new SCCs have been split out, or it will be the new SCC containing \p N.
358 template <typename SCCRangeT>
359 static LazyCallGraph::SCC *
360 incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
363  using SCC = LazyCallGraph::SCC;
364 
365  if (NewSCCRange.begin() == NewSCCRange.end())
366  return C;
367 
368  // Add the current SCC to the worklist as its shape has changed.
369  UR.CWorklist.insert(C);
370  LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
371  << "\n");
372 
373  SCC *OldC = C;
374 
375  // Update the current SCC. Note that if we have new SCCs, this must actually
376  // change the SCC.
377  assert(C != &*NewSCCRange.begin() &&
378  "Cannot insert new SCCs without changing current SCC!");
379  C = &*NewSCCRange.begin();
380  assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
381 
382  // If we had a cached FAM proxy originally, we will want to create more of
383  // them for each SCC that was split off.
384  bool NeedFAMProxy =
386 
387  // We need to propagate an invalidation call to all but the newly current SCC
388  // because the outer pass manager won't do that for us after splitting them.
389  // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
390  // there are preserved analysis we can avoid invalidating them here for
391  // split-off SCCs.
392  // We know however that this will preserve any FAM proxy so go ahead and mark
393  // that.
396  AM.invalidate(*OldC, PA);
397 
398  // Ensure the now-current SCC's function analyses are updated.
399  if (NeedFAMProxy)
400  updateNewSCCFunctionAnalyses(*C, G, AM);
401 
402  for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
403  NewSCCRange.end()))) {
404  assert(C != &NewC && "No need to re-visit the current SCC!");
405  assert(OldC != &NewC && "Already handled the original SCC!");
406  UR.CWorklist.insert(&NewC);
407  LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
408 
409  // Ensure new SCCs' function analyses are updated.
410  if (NeedFAMProxy)
411  updateNewSCCFunctionAnalyses(NewC, G, AM);
412 
413  // Also propagate a normal invalidation to the new SCC as only the current
414  // will get one from the pass manager infrastructure.
415  AM.invalidate(NewC, PA);
416  }
417  return C;
418 }
419 
423  using Node = LazyCallGraph::Node;
424  using Edge = LazyCallGraph::Edge;
425  using SCC = LazyCallGraph::SCC;
426  using RefSCC = LazyCallGraph::RefSCC;
427 
428  RefSCC &InitialRC = InitialC.getOuterRefSCC();
429  SCC *C = &InitialC;
430  RefSCC *RC = &InitialRC;
431  Function &F = N.getFunction();
432 
433  // Walk the function body and build up the set of retained, promoted, and
434  // demoted edges.
437  SmallPtrSet<Node *, 16> RetainedEdges;
438  SmallSetVector<Node *, 4> PromotedRefTargets;
439  SmallSetVector<Node *, 4> DemotedCallTargets;
440 
441  // First walk the function and handle all called functions. We do this first
442  // because if there is a single call edge, whether there are ref edges is
443  // irrelevant.
444  for (Instruction &I : instructions(F))
445  if (auto CS = CallSite(&I))
446  if (Function *Callee = CS.getCalledFunction())
447  if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
448  Node &CalleeN = *G.lookup(*Callee);
449  Edge *E = N->lookup(CalleeN);
450  // FIXME: We should really handle adding new calls. While it will
451  // make downstream usage more complex, there is no fundamental
452  // limitation and it will allow passes within the CGSCC to be a bit
453  // more flexible in what transforms they can do. Until then, we
454  // verify that new calls haven't been introduced.
455  assert(E && "No function transformations should introduce *new* "
456  "call edges! Any new calls should be modeled as "
457  "promoted existing ref edges!");
458  bool Inserted = RetainedEdges.insert(&CalleeN).second;
459  (void)Inserted;
460  assert(Inserted && "We should never visit a function twice.");
461  if (!E->isCall())
462  PromotedRefTargets.insert(&CalleeN);
463  }
464 
465  // Now walk all references.
466  for (Instruction &I : instructions(F))
467  for (Value *Op : I.operand_values())
468  if (auto *C = dyn_cast<Constant>(Op))
469  if (Visited.insert(C).second)
470  Worklist.push_back(C);
471 
472  auto VisitRef = [&](Function &Referee) {
473  Node &RefereeN = *G.lookup(Referee);
474  Edge *E = N->lookup(RefereeN);
475  // FIXME: Similarly to new calls, we also currently preclude
476  // introducing new references. See above for details.
477  assert(E && "No function transformations should introduce *new* ref "
478  "edges! Any new ref edges would require IPO which "
479  "function passes aren't allowed to do!");
480  bool Inserted = RetainedEdges.insert(&RefereeN).second;
481  (void)Inserted;
482  assert(Inserted && "We should never visit a function twice.");
483  if (E->isCall())
484  DemotedCallTargets.insert(&RefereeN);
485  };
486  LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
487 
488  // Include synthetic reference edges to known, defined lib functions.
489  for (auto *F : G.getLibFunctions())
490  // While the list of lib functions doesn't have repeats, don't re-visit
491  // anything handled above.
492  if (!Visited.count(F))
493  VisitRef(*F);
494 
495  // First remove all of the edges that are no longer present in this function.
496  // The first step makes these edges uniformly ref edges and accumulates them
497  // into a separate data structure so removal doesn't invalidate anything.
498  SmallVector<Node *, 4> DeadTargets;
499  for (Edge &E : *N) {
500  if (RetainedEdges.count(&E.getNode()))
501  continue;
502 
503  SCC &TargetC = *G.lookupSCC(E.getNode());
504  RefSCC &TargetRC = TargetC.getOuterRefSCC();
505  if (&TargetRC == RC && E.isCall()) {
506  if (C != &TargetC) {
507  // For separate SCCs this is trivial.
508  RC->switchTrivialInternalEdgeToRef(N, E.getNode());
509  } else {
510  // Now update the call graph.
511  C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
512  G, N, C, AM, UR);
513  }
514  }
515 
516  // Now that this is ready for actual removal, put it into our list.
517  DeadTargets.push_back(&E.getNode());
518  }
519  // Remove the easy cases quickly and actually pull them out of our list.
520  DeadTargets.erase(
521  llvm::remove_if(DeadTargets,
522  [&](Node *TargetN) {
523  SCC &TargetC = *G.lookupSCC(*TargetN);
524  RefSCC &TargetRC = TargetC.getOuterRefSCC();
525 
526  // We can't trivially remove internal targets, so skip
527  // those.
528  if (&TargetRC == RC)
529  return false;
530 
531  RC->removeOutgoingEdge(N, *TargetN);
532  LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '"
533  << N << "' to '" << TargetN << "'\n");
534  return true;
535  }),
536  DeadTargets.end());
537 
538  // Now do a batch removal of the internal ref edges left.
539  auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
540  if (!NewRefSCCs.empty()) {
541  // The old RefSCC is dead, mark it as such.
542  UR.InvalidatedRefSCCs.insert(RC);
543 
544  // Note that we don't bother to invalidate analyses as ref-edge
545  // connectivity is not really observable in any way and is intended
546  // exclusively to be used for ordering of transforms rather than for
547  // analysis conclusions.
548 
549  // Update RC to the "bottom".
550  assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
551  RC = &C->getOuterRefSCC();
552  assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
553 
554  // The RC worklist is in reverse postorder, so we enqueue the new ones in
555  // RPO except for the one which contains the source node as that is the
556  // "bottom" we will continue processing in the bottom-up walk.
557  assert(NewRefSCCs.front() == RC &&
558  "New current RefSCC not first in the returned list!");
559  for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
560  NewRefSCCs.end()))) {
561  assert(NewRC != RC && "Should not encounter the current RefSCC further "
562  "in the postorder list of new RefSCCs.");
563  UR.RCWorklist.insert(NewRC);
564  LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
565  << *NewRC << "\n");
566  }
567  }
568 
569  // Next demote all the call edges that are now ref edges. This helps make
570  // the SCCs small which should minimize the work below as we don't want to
571  // form cycles that this would break.
572  for (Node *RefTarget : DemotedCallTargets) {
573  SCC &TargetC = *G.lookupSCC(*RefTarget);
574  RefSCC &TargetRC = TargetC.getOuterRefSCC();
575 
576  // The easy case is when the target RefSCC is not this RefSCC. This is
577  // only supported when the target RefSCC is a child of this RefSCC.
578  if (&TargetRC != RC) {
579  assert(RC->isAncestorOf(TargetRC) &&
580  "Cannot potentially form RefSCC cycles here!");
581  RC->switchOutgoingEdgeToRef(N, *RefTarget);
582  LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
583  << "' to '" << *RefTarget << "'\n");
584  continue;
585  }
586 
587  // We are switching an internal call edge to a ref edge. This may split up
588  // some SCCs.
589  if (C != &TargetC) {
590  // For separate SCCs this is trivial.
591  RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
592  continue;
593  }
594 
595  // Now update the call graph.
596  C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
597  C, AM, UR);
598  }
599 
600  // Now promote ref edges into call edges.
601  for (Node *CallTarget : PromotedRefTargets) {
602  SCC &TargetC = *G.lookupSCC(*CallTarget);
603  RefSCC &TargetRC = TargetC.getOuterRefSCC();
604 
605  // The easy case is when the target RefSCC is not this RefSCC. This is
606  // only supported when the target RefSCC is a child of this RefSCC.
607  if (&TargetRC != RC) {
608  assert(RC->isAncestorOf(TargetRC) &&
609  "Cannot potentially form RefSCC cycles here!");
610  RC->switchOutgoingEdgeToCall(N, *CallTarget);
611  LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
612  << "' to '" << *CallTarget << "'\n");
613  continue;
614  }
615  LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
616  << N << "' to '" << *CallTarget << "'\n");
617 
618  // Otherwise we are switching an internal ref edge to a call edge. This
619  // may merge away some SCCs, and we add those to the UpdateResult. We also
620  // need to make sure to update the worklist in the event SCCs have moved
621  // before the current one in the post-order sequence
622  bool HasFunctionAnalysisProxy = false;
623  auto InitialSCCIndex = RC->find(*C) - RC->begin();
624  bool FormedCycle = RC->switchInternalEdgeToCall(
625  N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
626  for (SCC *MergedC : MergedSCCs) {
627  assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
628 
629  HasFunctionAnalysisProxy |=
631  *MergedC) != nullptr;
632 
633  // Mark that this SCC will no longer be valid.
634  UR.InvalidatedSCCs.insert(MergedC);
635 
636  // FIXME: We should really do a 'clear' here to forcibly release
637  // memory, but we don't have a good way of doing that and
638  // preserving the function analyses.
639  auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
640  PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
641  AM.invalidate(*MergedC, PA);
642  }
643  });
644 
645  // If we formed a cycle by creating this call, we need to update more data
646  // structures.
647  if (FormedCycle) {
648  C = &TargetC;
649  assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
650 
651  // If one of the invalidated SCCs had a cached proxy to a function
652  // analysis manager, we need to create a proxy in the new current SCC as
653  // the invalidated SCCs had their functions moved.
654  if (HasFunctionAnalysisProxy)
656 
657  // Any analyses cached for this SCC are no longer precise as the shape
658  // has changed by introducing this cycle. However, we have taken care to
659  // update the proxies so it remains valide.
660  auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
661  PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
662  AM.invalidate(*C, PA);
663  }
664  auto NewSCCIndex = RC->find(*C) - RC->begin();
665  // If we have actually moved an SCC to be topologically "below" the current
666  // one due to merging, we will need to revisit the current SCC after
667  // visiting those moved SCCs.
668  //
669  // It is critical that we *do not* revisit the current SCC unless we
670  // actually move SCCs in the process of merging because otherwise we may
671  // form a cycle where an SCC is split apart, merged, split, merged and so
672  // on infinitely.
673  if (InitialSCCIndex < NewSCCIndex) {
674  // Put our current SCC back onto the worklist as we'll visit other SCCs
675  // that are now definitively ordered prior to the current one in the
676  // post-order sequence, and may end up observing more precise context to
677  // optimize the current SCC.
678  UR.CWorklist.insert(C);
679  LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
680  << "\n");
681  // Enqueue in reverse order as we pop off the back of the worklist.
682  for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
683  RC->begin() + NewSCCIndex))) {
684  UR.CWorklist.insert(&MovedC);
685  LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
686  << MovedC << "\n");
687  }
688  }
689  }
690 
691  assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
692  assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
693  assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
694 
695  // Record the current RefSCC and SCC for higher layers of the CGSCC pass
696  // manager now that all the updates have been applied.
697  if (RC != &InitialRC)
698  UR.UpdatedRC = RC;
699  if (C != &InitialC)
700  UR.UpdatedC = C;
701 
702  return *C;
703 }
Pass interface - Implemented by all &#39;passes&#39;.
Definition: Pass.h:80
void abandon()
Mark an analysis as abandoned.
Definition: PassManager.h:207
uint64_t CallInst * C
Result run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &)
Computes the FunctionAnalysisManager and stores it in the result proxy.
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA)
Trigger the invalidation of some other analysis pass if not already handled and return whether it was...
Definition: PassManager.h:659
void removeOutgoingEdge(Node &SourceN, Node &TargetN)
Remove an edge whose source is in this RefSCC and target is not.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:769
This class represents lattice values for constants.
Definition: AllocatorList.h:23
SmallPriorityWorklist< LazyCallGraph::RefSCC *, 1 > & RCWorklist
Worklist of the RefSCCs queued for processing.
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:64
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR)
Helper to update the call graph after running a function pass.
This provides a very simple, boring adaptor for a begin and end iterator into a range type...
SCC * lookupSCC(Node &N) const
Lookup a function&#39;s SCC in the graph.
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA, typename AnalysisManager< IRUnitT, ExtraArgTs... >::Invalidator &Inv)
Handler for invalidation of the outer IR unit, IRUnitT.
Implements a lazy call graph analysis and related passes for the new pass manager.
Function & getFunction() const
F(f)
A proxy from a FunctionAnalysisManager to an SCC.
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
RefSCC * lookupRefSCC(Node &N) const
Lookup a function&#39;s RefSCC in the graph.
PreservedAnalysisChecker getChecker() const
Build a checker for this PreservedAnalyses and the specified analysis type.
Definition: PassManager.h:304
RefSCC & getOuterRefSCC() const
LazyCallGraph::SCC * UpdatedC
If non-null, the updated current SCC being processed.
iterator begin() const
AnalysisManagerT & getManager()
Accessor for the analysis manager.
Definition: PassManager.h:1072
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
Definition: STLExtras.h:266
A RefSCC of the call graph.
bool insert(const value_type &X)
Insert a new element into the SetVector.
Definition: SetVector.h:141
AnalysisManager< LazyCallGraph::SCC, LazyCallGraph & > CGSCCAnalysisManager
The CGSCC analysis manager.
static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C, LazyCallGraph &G, CGSCCAnalysisManager &AM)
When a new SCC is created for the graph and there might be function analysis results cached for the f...
A lazily constructed view of the call graph of a module.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
Definition: APInt.h:32
bool runBeforePass(const PassT &Pass, const IRUnitT &IR) const
BeforePass instrumentation point - takes Pass instance to be executed and constant reference to IR it...
A set of analyses that are preserved following a run of a transformation pass.
Definition: PassManager.h:153
bool areAllPreserved() const
Test whether all analyses are preserved (and none are abandoned).
Definition: PassManager.h:321
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
SmallPtrSetImpl< LazyCallGraph::RefSCC * > & InvalidatedRefSCCs
The set of invalidated RefSCCs which should be skipped if they are found in RCWorklist.
SmallPtrSetImpl< LazyCallGraph::SCC * > & InvalidatedSCCs
The set of invalidated SCCs which should be skipped if they are found in CWorklist.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:370
LazyCallGraph::RefSCC * UpdatedRC
If non-null, the updated current RefSCC being processed.
iterator end() const
A node in the call graph.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
Definition: SmallPtrSet.h:381
A class used to represent edges in the call graph.
auto remove_if(R &&Range, UnaryPredicate P) -> decltype(adl_begin(Range))
Provide wrappers to std::remove_if which take ranges instead of having to pass begin/end explicitly...
Definition: STLExtras.h:1225
iterator erase(const_iterator CI)
Definition: SmallVector.h:437
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: PassManager.h:159
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
Pseudo-analysis pass that exposes the PassInstrumentation to pass managers.
Definition: PassManager.h:574
AnalysisManager< Module > ModuleAnalysisManager
Convenience typedef for the Module analysis manager.
Definition: PassManager.h:1013
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
A SetVector that performs no allocations if smaller than a certain size.
Definition: SetVector.h:297
print lazy value Lazy Value Info Printer Pass
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
Definition: SmallPtrSet.h:417
void runAfterPass(const PassT &Pass, const IRUnitT &IR) const
AfterPass instrumentation point - takes Pass instance that has just been executed and constant refere...
An analysis over an "inner" IR unit that provides access to an analysis manager over a "outer" IR uni...
Definition: PassManager.h:1153
This is a &#39;vector&#39; (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:839
void runAfterPassInvalidated(const PassT &Pass) const
AfterPassInvalidated instrumentation point - takes Pass instance that has just been executed...
Result run(IRUnitT &IR, AnalysisManager< IRUnitT, ExtraArgTs... > &AM, ExtraArgTs...)
Run the analysis pass and create our proxy result object.
Definition: PassManager.h:1100
ArrayRef< Function * > getLibFunctions() const
Get the sequence of known and defined library functions.
const DataFlowGraph & G
Definition: RDFGraph.cpp:210
void invalidate(IRUnitT &IR)
Invalidate a specific analysis pass for an IR module.
Definition: PassManager.h:841
Node * lookup(const Function &F) const
Lookup a function in the graph which has already been scanned and added.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
Result(AnalysisManagerT &InnerAM)
Definition: PassManager.h:1043
amdgpu Simplify well known AMD library false FunctionCallee Callee
void switchTrivialInternalEdgeToRef(Node &SourceN, Node &TargetN)
Make an existing internal call edge between separate SCCs into a ref edge.
Manages a sequence of passes over a particular unit of IR.
Definition: PassManager.h:457
#define I(x, y, z)
Definition: MD5.cpp:58
#define N
PassT::Result * getCachedResult(IRUnitT &IR) const
Get the cached result of an analysis pass for a given IR unit.
Definition: PassManager.h:788
void preserve()
Mark an analysis as preserved.
Definition: PassManager.h:174
static LazyCallGraph::SCC * incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G, LazyCallGraph::Node &N, LazyCallGraph::SCC *C, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR)
Helper function to update both the CGSCCAnalysisManager AM and the CGSCCPassManager&#39;s CGSCCUpdateResu...
An analysis pass which computes the call graph for a module.
This header provides classes for managing passes over SCCs of the call graph.
API to communicate dependencies between analyses during invalidation.
Definition: PassManager.h:641
bool invalidate(LazyCallGraph::SCC &C, const PreservedAnalyses &PA, CGSCCAnalysisManager::Invalidator &Inv)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This templated class represents "all analyses that operate over <a particular IR unit>" (e...
Definition: PassManager.h:91
LLVM Value Representation.
Definition: Value.h:72
An SCC of the call graph.
inst_range instructions(Function *F)
Definition: InstIterator.h:133
A container for analyses that lazily runs them and caches their results.
This class provides instrumentation entry points for the Pass Manager, doing calls to callbacks regis...
This header defines various interfaces for pass management in LLVM.
#define LLVM_DEBUG(X)
Definition: Debug.h:122
static void visitReferences(SmallVectorImpl< Constant *> &Worklist, SmallPtrSetImpl< Constant *> &Visited, CallbackT Callback)
Recursively visits the defined functions whose address is reachable from every constant in the Workli...
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition: PassManager.h:70
bool allAnalysesInSetPreserved() const
Directly test whether a set of analyses is preserved.
Definition: PassManager.h:329
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Definition: PassManager.h:1037