LLVM  4.0.0
CGSCCPassManager.cpp
Go to the documentation of this file.
1 //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 
11 #include "llvm/IR/CallSite.h"
12 #include "llvm/IR/InstIterator.h"
13 
14 using namespace llvm;
15 
16 // Explicit template instantiations and specialization defininitions for core
17 // template typedefs.
18 namespace llvm {
19 
20 // Explicit instantiations for the core proxy templates.
29 
30 /// Explicitly specialize the pass manager run method to handle call graph
31 /// updates.
32 template <>
39 
40  if (DebugLogging)
41  dbgs() << "Starting CGSCC pass manager run.\n";
42 
43  // The SCC may be refined while we are running passes over it, so set up
44  // a pointer that we can update.
45  LazyCallGraph::SCC *C = &InitialC;
46 
47  for (auto &Pass : Passes) {
48  if (DebugLogging)
49  dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
50 
51  PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
52 
53  // Update the SCC if necessary.
54  C = UR.UpdatedC ? UR.UpdatedC : C;
55 
56  // Check that we didn't miss any update scenario.
57  assert(!UR.InvalidatedSCCs.count(C) && "Processing an invalid SCC!");
58  assert(C->begin() != C->end() && "Cannot have an empty SCC!");
59 
60  // Update the analysis manager as each pass runs and potentially
61  // invalidates analyses.
62  AM.invalidate(*C, PassPA);
63 
64  // Finally, we intersect the final preserved analyses to compute the
65  // aggregate preserved set for this pass manager.
66  PA.intersect(std::move(PassPA));
67 
68  // FIXME: Historically, the pass managers all called the LLVM context's
69  // yield function here. We don't have a generic way to acquire the
70  // context and it isn't yet clear what the right pattern is for yielding
71  // in the new pass manager so it is currently omitted.
72  // ...getContext().yield();
73  }
74 
75  // Invaliadtion was handled after each pass in the above loop for the current
76  // SCC. Therefore, the remaining analysis results in the AnalysisManager are
77  // preserved. We mark this with a set so that we don't need to inspect each
78  // one individually.
80 
81  if (DebugLogging)
82  dbgs() << "Finished CGSCC pass manager run.\n";
83 
84  return PA;
85 }
86 
88  Module &M, const PreservedAnalyses &PA,
90  // If literally everything is preserved, we're done.
91  if (PA.areAllPreserved())
92  return false; // This is still a valid proxy.
93 
94  // If this proxy or the call graph is going to be invalidated, we also need
95  // to clear all the keys coming from that analysis.
96  //
97  // We also directly invalidate the FAM's module proxy if necessary, and if
98  // that proxy isn't preserved we can't preserve this proxy either. We rely on
99  // it to handle module -> function analysis invalidation in the face of
100  // structural changes and so if it's unavailable we conservatively clear the
101  // entire SCC layer as well rather than trying to do invalidation ourselves.
103  if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
104  Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
106  InnerAM->clear();
107 
108  // And the proxy itself should be marked as invalid so that we can observe
109  // the new call graph. This isn't strictly necessary because we cheat
110  // above, but is still useful.
111  return true;
112  }
113 
114  // Directly check if the relevant set is preserved so we can short circuit
115  // invalidating SCCs below.
116  bool AreSCCAnalysesPreserved =
118 
119  // Ok, we have a graph, so we can propagate the invalidation down into it.
120  for (auto &RC : G->postorder_ref_sccs())
121  for (auto &C : RC) {
123 
124  // Check to see whether the preserved set needs to be adjusted based on
125  // module-level analysis invalidation triggering deferred invalidation
126  // for this SCC.
127  if (auto *OuterProxy =
128  InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
129  for (const auto &OuterInvalidationPair :
130  OuterProxy->getOuterInvalidations()) {
131  AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
132  const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
133  if (Inv.invalidate(OuterAnalysisID, M, PA)) {
134  if (!InnerPA)
135  InnerPA = PA;
136  for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
137  InnerPA->abandon(InnerAnalysisID);
138  }
139  }
140 
141  // Check if we needed a custom PA set. If so we'll need to run the inner
142  // invalidation.
143  if (InnerPA) {
144  InnerAM->invalidate(C, *InnerPA);
145  continue;
146  }
147 
148  // Otherwise we only need to do invalidation if the original PA set didn't
149  // preserve all SCC analyses.
150  if (!AreSCCAnalysesPreserved)
151  InnerAM->invalidate(C, PA);
152  }
153 
154  // Return false to indicate that this result is still a valid proxy.
155  return false;
156 }
157 
158 template <>
161  // Force the Function analysis manager to also be available so that it can
162  // be accessed in an SCC analysis and proxied onward to function passes.
163  // FIXME: It is pretty awkward to just drop the result here and assert that
164  // we can find it again later.
166 
167  return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
168 }
169 
170 AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
171 
175  LazyCallGraph &CG) {
176  // Collect the FunctionAnalysisManager from the Module layer and use that to
177  // build the proxy result.
178  //
179  // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
180  // invalidate the function analyses.
181  auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
182  Module &M = *C.begin()->getFunction().getParent();
183  auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
184  assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
185  "proxy is run on the module prior to entering the CGSCC "
186  "walk.");
187 
188  // Note that we special-case invalidation handling of this proxy in the CGSCC
189  // analysis manager's Module proxy. This avoids the need to do anything
190  // special here to recompute all of this if ever the FAM's module proxy goes
191  // away.
192  return Result(FAMProxy->getManager());
193 }
194 
198  for (LazyCallGraph::Node &N : C)
199  FAM->invalidate(N.getFunction(), PA);
200 
201  // This proxy doesn't need to handle invalidation itself. Instead, the
202  // module-level CGSCC proxy handles it above by ensuring that if the
203  // module-level FAM proxy becomes invalid the entire SCC layer, which
204  // includes this proxy, is cleared.
205  return false;
206 }
207 
208 } // End llvm namespace
209 
210 namespace {
211 /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
212 /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
213 /// added SCCs.
214 ///
215 /// The range of new SCCs must be in postorder already. The SCC they were split
216 /// out of must be provided as \p C. The current node being mutated and
217 /// triggering updates must be passed as \p N.
218 ///
219 /// This function returns the SCC containing \p N. This will be either \p C if
220 /// no new SCCs have been split out, or it will be the new SCC containing \p N.
221 template <typename SCCRangeT>
223 incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
226  bool DebugLogging = false) {
227  typedef LazyCallGraph::SCC SCC;
228 
229  if (NewSCCRange.begin() == NewSCCRange.end())
230  return C;
231 
232  // Add the current SCC to the worklist as its shape has changed.
233  UR.CWorklist.insert(C);
234  if (DebugLogging)
235  dbgs() << "Enqueuing the existing SCC in the worklist:" << *C << "\n";
236 
237  SCC *OldC = C;
238  (void)OldC;
239 
240  // Update the current SCC. Note that if we have new SCCs, this must actually
241  // change the SCC.
242  assert(C != &*NewSCCRange.begin() &&
243  "Cannot insert new SCCs without changing current SCC!");
244  C = &*NewSCCRange.begin();
245  assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
246 
247  for (SCC &NewC :
248  reverse(make_range(std::next(NewSCCRange.begin()), NewSCCRange.end()))) {
249  assert(C != &NewC && "No need to re-visit the current SCC!");
250  assert(OldC != &NewC && "Already handled the original SCC!");
251  UR.CWorklist.insert(&NewC);
252  if (DebugLogging)
253  dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n";
254  }
255  return C;
256 }
257 }
258 
261  CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, bool DebugLogging) {
262  typedef LazyCallGraph::Node Node;
263  typedef LazyCallGraph::Edge Edge;
264  typedef LazyCallGraph::SCC SCC;
265  typedef LazyCallGraph::RefSCC RefSCC;
266 
267  RefSCC &InitialRC = InitialC.getOuterRefSCC();
268  SCC *C = &InitialC;
269  RefSCC *RC = &InitialRC;
270  Function &F = N.getFunction();
271 
272  // Walk the function body and build up the set of retained, promoted, and
273  // demoted edges.
276  SmallPtrSet<Function *, 16> RetainedEdges;
277  SmallSetVector<Function *, 4> PromotedRefTargets;
278  SmallSetVector<Function *, 4> DemotedCallTargets;
279 
280  // First walk the function and handle all called functions. We do this first
281  // because if there is a single call edge, whether there are ref edges is
282  // irrelevant.
283  for (Instruction &I : instructions(F))
284  if (auto CS = CallSite(&I))
285  if (Function *Callee = CS.getCalledFunction())
286  if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
287  const Edge *E = N.lookup(*Callee);
288  // FIXME: We should really handle adding new calls. While it will
289  // make downstream usage more complex, there is no fundamental
290  // limitation and it will allow passes within the CGSCC to be a bit
291  // more flexible in what transforms they can do. Until then, we
292  // verify that new calls haven't been introduced.
293  assert(E && "No function transformations should introduce *new* "
294  "call edges! Any new calls should be modeled as "
295  "promoted existing ref edges!");
296  RetainedEdges.insert(Callee);
297  if (!E->isCall())
298  PromotedRefTargets.insert(Callee);
299  }
300 
301  // Now walk all references.
302  for (Instruction &I : instructions(F))
303  for (Value *Op : I.operand_values())
304  if (Constant *C = dyn_cast<Constant>(Op))
305  if (Visited.insert(C).second)
306  Worklist.push_back(C);
307 
308  LazyCallGraph::visitReferences(Worklist, Visited, [&](Function &Referee) {
309  const Edge *E = N.lookup(Referee);
310  // FIXME: Similarly to new calls, we also currently preclude
311  // introducing new references. See above for details.
312  assert(E && "No function transformations should introduce *new* ref "
313  "edges! Any new ref edges would require IPO which "
314  "function passes aren't allowed to do!");
315  RetainedEdges.insert(&Referee);
316  if (E->isCall())
317  DemotedCallTargets.insert(&Referee);
318  });
319 
320  // First remove all of the edges that are no longer present in this function.
321  // We have to build a list of dead targets first and then remove them as the
322  // data structures will all be invalidated by removing them.
324  for (Edge &E : N)
325  if (!RetainedEdges.count(&E.getFunction()))
326  DeadTargets.push_back({E.getNode(), E.getKind()});
327  for (auto DeadTarget : DeadTargets) {
328  Node &TargetN = *DeadTarget.getPointer();
329  bool IsCall = DeadTarget.getInt() == Edge::Call;
330  SCC &TargetC = *G.lookupSCC(TargetN);
331  RefSCC &TargetRC = TargetC.getOuterRefSCC();
332 
333  if (&TargetRC != RC) {
334  RC->removeOutgoingEdge(N, TargetN);
335  if (DebugLogging)
336  dbgs() << "Deleting outgoing edge from '" << N << "' to '" << TargetN
337  << "'\n";
338  continue;
339  }
340  if (DebugLogging)
341  dbgs() << "Deleting internal " << (IsCall ? "call" : "ref")
342  << " edge from '" << N << "' to '" << TargetN << "'\n";
343 
344  if (IsCall) {
345  if (C != &TargetC) {
346  // For separate SCCs this is trivial.
347  RC->switchTrivialInternalEdgeToRef(N, TargetN);
348  } else {
349  // Otherwise we may end up re-structuring the call graph. First,
350  // invalidate any SCC analyses. We have to do this before we split
351  // functions into new SCCs and lose track of where their analyses are
352  // cached.
353  // FIXME: We should accept a more precise preserved set here. For
354  // example, it might be possible to preserve some function analyses
355  // even as the SCC structure is changed.
357  // Now update the call graph.
358  C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, TargetN), G,
359  N, C, AM, UR, DebugLogging);
360  }
361  }
362 
363  auto NewRefSCCs = RC->removeInternalRefEdge(N, TargetN);
364  if (!NewRefSCCs.empty()) {
365  // Note that we don't bother to invalidate analyses as ref-edge
366  // connectivity is not really observable in any way and is intended
367  // exclusively to be used for ordering of transforms rather than for
368  // analysis conclusions.
369 
370  // The RC worklist is in reverse postorder, so we first enqueue the
371  // current RefSCC as it will remain the parent of all split RefSCCs, then
372  // we enqueue the new ones in RPO except for the one which contains the
373  // source node as that is the "bottom" we will continue processing in the
374  // bottom-up walk.
375  UR.RCWorklist.insert(RC);
376  if (DebugLogging)
377  dbgs() << "Enqueuing the existing RefSCC in the update worklist: "
378  << *RC << "\n";
379  // Update the RC to the "bottom".
380  assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
381  RC = &C->getOuterRefSCC();
382  assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
383  assert(NewRefSCCs.front() == RC &&
384  "New current RefSCC not first in the returned list!");
385  for (RefSCC *NewRC : reverse(
386  make_range(std::next(NewRefSCCs.begin()), NewRefSCCs.end()))) {
387  assert(NewRC != RC && "Should not encounter the current RefSCC further "
388  "in the postorder list of new RefSCCs.");
389  UR.RCWorklist.insert(NewRC);
390  if (DebugLogging)
391  dbgs() << "Enqueuing a new RefSCC in the update worklist: " << *NewRC
392  << "\n";
393  }
394  }
395  }
396 
397  // Next demote all the call edges that are now ref edges. This helps make
398  // the SCCs small which should minimize the work below as we don't want to
399  // form cycles that this would break.
400  for (Function *RefTarget : DemotedCallTargets) {
401  Node &TargetN = *G.lookup(*RefTarget);
402  SCC &TargetC = *G.lookupSCC(TargetN);
403  RefSCC &TargetRC = TargetC.getOuterRefSCC();
404 
405  // The easy case is when the target RefSCC is not this RefSCC. This is
406  // only supported when the target RefSCC is a child of this RefSCC.
407  if (&TargetRC != RC) {
408  assert(RC->isAncestorOf(TargetRC) &&
409  "Cannot potentially form RefSCC cycles here!");
410  RC->switchOutgoingEdgeToRef(N, TargetN);
411  if (DebugLogging)
412  dbgs() << "Switch outgoing call edge to a ref edge from '" << N
413  << "' to '" << TargetN << "'\n";
414  continue;
415  }
416 
417  // We are switching an internal call edge to a ref edge. This may split up
418  // some SCCs.
419  if (C != &TargetC) {
420  // For separate SCCs this is trivial.
421  RC->switchTrivialInternalEdgeToRef(N, TargetN);
422  continue;
423  }
424 
425  // Otherwise we may end up re-structuring the call graph. First, invalidate
426  // any SCC analyses. We have to do this before we split functions into new
427  // SCCs and lose track of where their analyses are cached.
428  // FIXME: We should accept a more precise preserved set here. For example,
429  // it might be possible to preserve some function analyses even as the SCC
430  // structure is changed.
432  // Now update the call graph.
433  C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, TargetN), G,
434  N, C, AM, UR, DebugLogging);
435  }
436 
437  // Now promote ref edges into call edges.
438  for (Function *CallTarget : PromotedRefTargets) {
439  Node &TargetN = *G.lookup(*CallTarget);
440  SCC &TargetC = *G.lookupSCC(TargetN);
441  RefSCC &TargetRC = TargetC.getOuterRefSCC();
442 
443  // The easy case is when the target RefSCC is not this RefSCC. This is
444  // only supported when the target RefSCC is a child of this RefSCC.
445  if (&TargetRC != RC) {
446  assert(RC->isAncestorOf(TargetRC) &&
447  "Cannot potentially form RefSCC cycles here!");
448  RC->switchOutgoingEdgeToCall(N, TargetN);
449  if (DebugLogging)
450  dbgs() << "Switch outgoing ref edge to a call edge from '" << N
451  << "' to '" << TargetN << "'\n";
452  continue;
453  }
454  if (DebugLogging)
455  dbgs() << "Switch an internal ref edge to a call edge from '" << N
456  << "' to '" << TargetN << "'\n";
457 
458  // Otherwise we are switching an internal ref edge to a call edge. This
459  // may merge away some SCCs, and we add those to the UpdateResult. We also
460  // need to make sure to update the worklist in the event SCCs have moved
461  // before the current one in the post-order sequence.
462  auto InitialSCCIndex = RC->find(*C) - RC->begin();
463  auto InvalidatedSCCs = RC->switchInternalEdgeToCall(N, TargetN);
464  if (!InvalidatedSCCs.empty()) {
465  C = &TargetC;
466  assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
467 
468  // Any analyses cached for this SCC are no longer precise as the shape
469  // has changed by introducing this cycle.
471 
472  for (SCC *InvalidatedC : InvalidatedSCCs) {
473  assert(InvalidatedC != C && "Cannot invalidate the current SCC!");
474  UR.InvalidatedSCCs.insert(InvalidatedC);
475 
476  // Also clear any cached analyses for the SCCs that are dead. This
477  // isn't really necessary for correctness but can release memory.
478  AM.clear(*InvalidatedC);
479  }
480  }
481  auto NewSCCIndex = RC->find(*C) - RC->begin();
482  if (InitialSCCIndex < NewSCCIndex) {
483  // Put our current SCC back onto the worklist as we'll visit other SCCs
484  // that are now definitively ordered prior to the current one in the
485  // post-order sequence, and may end up observing more precise context to
486  // optimize the current SCC.
487  UR.CWorklist.insert(C);
488  if (DebugLogging)
489  dbgs() << "Enqueuing the existing SCC in the worklist: " << *C << "\n";
490  // Enqueue in reverse order as we pop off the back of the worklist.
491  for (SCC &MovedC : reverse(make_range(RC->begin() + InitialSCCIndex,
492  RC->begin() + NewSCCIndex))) {
493  UR.CWorklist.insert(&MovedC);
494  if (DebugLogging)
495  dbgs() << "Enqueuing a newly earlier in post-order SCC: " << MovedC
496  << "\n";
497  }
498  }
499  }
500 
501  assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
502  assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
503  assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
504 
505  // Record the current RefSCC and SCC for higher layers of the CGSCC pass
506  // manager now that all the updates have been applied.
507  if (RC != &InitialRC)
508  UR.UpdatedRC = RC;
509  if (C != &InitialC)
510  UR.UpdatedC = C;
511 
512  return *C;
513 }
Pass interface - Implemented by all 'passes'.
Definition: Pass.h:81
void abandon()
Mark an analysis as abandoned.
Definition: PassManager.h:153
SCC * lookupSCC(Node &N) const
Lookup a function's SCC in the graph.
Result run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &)
Computes the FunctionAnalysisManager and stores it in the result proxy.
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA)
Trigger the invalidation of some other analysis pass if not already handled and return whether it was...
Definition: PassManager.h:543
void removeOutgoingEdge(Node &SourceN, Node &TargetN)
Remove an edge whose source is in this RefSCC and target is not.
SmallPriorityWorklist< LazyCallGraph::RefSCC *, 1 > & RCWorklist
Worklist of the RefSCCs queued for processing.
void intersect(const PreservedAnalyses &Arg)
Intersect this set with another in place.
Definition: PassManager.h:171
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:52
iterator begin() const
size_type count(PtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
Definition: SmallPtrSet.h:380
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
AnalysisManager< LazyCallGraph::SCC, LazyCallGraph & > CGSCCAnalysisManager
The CGSCC analysis manager.
LazyCallGraph::SCC * UpdatedC
If non-null, the updated current SCC being processed.
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
Definition: STLExtras.h:241
A RefSCC of the call graph.
#define F(x, y, z)
Definition: MD5.cpp:51
bool insert(const value_type &X)
Insert a new element into the SetVector.
Definition: SetVector.h:136
A lazily constructed view of the call graph of a module.
Result run(IRUnitT &IR, AnalysisManager< IRUnitT, ExtraArgTs...> &AM, ExtraArgTs...)
Run the analysis pass and create our proxy result object.
Definition: PassManager.h:965
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
Definition: PassManager.h:110
static GCRegistry::Add< CoreCLRGC > E("coreclr","CoreCLR-compatible GC")
A set of analyses that are preserved following a run of a transformation pass.
Definition: PassManager.h:107
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs...ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:653
This is an important base class in LLVM.
Definition: Constant.h:42
SmallPtrSetImpl< LazyCallGraph::RefSCC * > & InvalidatedRefSCCs
The set of invalidated RefSCCs which should be skipped if they are found in RCWorklist.
SmallPtrSetImpl< LazyCallGraph::SCC * > & InvalidatedSCCs
The set of invalidated SCCs which should be skipped if they are found in CWorklist.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:368
LazyCallGraph::RefSCC * UpdatedRC
If non-null, the updated current RefSCC being processed.
PreservedAnalysisChecker getChecker() const
Build a checker for this PreservedAnalyses and the specified analysis type.
Definition: PassManager.h:250
RefSCC & getOuterRefSCC() const
A node in the call graph.
A class used to represent edges in the call graph.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: PassManager.h:113
bool areAllPreserved() const
Test whether all analyses are preserved (and none are abandoned).
Definition: PassManager.h:267
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
void clear(IRUnitT &IR)
Clear any cached analysis results for a single unit of IR.
Definition: PassManager.h:623
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
Function & getFunction() const
A SetVector that performs no allocations if smaller than a certain size.
Definition: SetVector.h:292
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
Definition: SmallPtrSet.h:425
An analysis over an "inner" IR unit that provides access to an analysis manager over a "outer" IR uni...
Definition: PassManager.h:1017
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:843
const DataFlowGraph & G
Definition: RDFGraph.cpp:206
void invalidate(IRUnitT &IR)
Invalidate a specific analysis pass for an IR module.
Definition: PassManager.h:722
static GCRegistry::Add< ShadowStackGC > C("shadow-stack","Very portable GC for uncooperative code generators")
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
const Edge * lookup(Function &F) const
static void visitReferences(SmallVectorImpl< Constant * > &Worklist, SmallPtrSetImpl< Constant * > &Visited, CallbackT Callback)
Recursively visits the defined functions whose address is reachable from every constant in the Workli...
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA, typename AnalysisManager< IRUnitT, ExtraArgTs...>::Invalidator &Inv)
Handler for invalidation of the outer IR unit, IRUnitT.
Node * lookup(const Function &F) const
Lookup a function in the graph which has already been scanned and added.
Manages a sequence of passes over a particular unit of IR.
Definition: PassManager.h:389
iterator end() const
void preserveSet()
Mark an analysis set as preserved.
Definition: PassManager.h:135
#define I(x, y, z)
Definition: MD5.cpp:54
#define N
AnalysisManager< Module > ModuleAnalysisManager
Convenience typedef for the Module analysis manager.
Definition: PassManager.h:882
An analysis pass which computes the call graph for a module.
This header provides classes for managing passes over SCCs of the call graph.
API to communicate dependencies between analyses during invalidation.
Definition: PassManager.h:525
bool invalidate(LazyCallGraph::SCC &C, const PreservedAnalyses &PA, CGSCCAnalysisManager::Invalidator &Inv)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This templated class represents "all analyses that operate over \<a particular IR unit\>" (e...
Definition: PassManager.h:361
bool allAnalysesInSetPreserved() const
Directly test whether a set of analyses is preserved.
Definition: PassManager.h:275
LLVM Value Representation.
Definition: Value.h:71
An SCC of the call graph.
inst_range instructions(Function *F)
Definition: InstIterator.h:132
A container for analyses that lazily runs them and caches their results.
RefSCC * lookupRefSCC(Node &N) const
Lookup a function's RefSCC in the graph.
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, bool DebugLogging=false)
Helper to update the call graph after running a function pass.
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition: PassManager.h:64
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Definition: PassManager.h:905