Bug Summary

File:build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/llvm/lib/Analysis/CGSCCPassManager.cpp
Warning:line 226, column 11
Forming reference to null pointer

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -clear-ast-before-backend -disable-llvm-verifier -discard-value-names -main-file-name CGSCCPassManager.cpp -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model pic -pic-level 2 -mframe-pointer=none -fmath-errno -ffp-contract=on -fno-rounding-math -mconstructor-aliases -funwind-tables=2 -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -ffunction-sections -fdata-sections -fcoverage-compilation-dir=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/build-llvm -resource-dir /usr/lib/llvm-16/lib/clang/16.0.0 -D _DEBUG -D _GNU_SOURCE -D __STDC_CONSTANT_MACROS -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -I lib/Analysis -I /build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/llvm/lib/Analysis -I include -I /build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/llvm/include -D _FORTIFY_SOURCE=2 -D NDEBUG -U NDEBUG -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/c++/10 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/x86_64-linux-gnu/c++/10 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/c++/10/backward -internal-isystem /usr/lib/llvm-16/lib/clang/16.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../x86_64-linux-gnu/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -fmacro-prefix-map=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/build-llvm=build-llvm -fmacro-prefix-map=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/= -fcoverage-prefix-map=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/build-llvm=build-llvm -fcoverage-prefix-map=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/= -O3 -Wno-unused-command-line-argument -Wno-unused-parameter -Wwrite-strings -Wno-missing-field-initializers -Wno-long-long -Wno-maybe-uninitialized -Wno-class-memaccess -Wno-redundant-move -Wno-pessimizing-move -Wno-noexcept-type -Wno-comment -Wno-misleading-indentation -std=c++17 -fdeprecated-macro -fdebug-compilation-dir=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/build-llvm -fdebug-prefix-map=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/build-llvm=build-llvm -fdebug-prefix-map=/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/= -ferror-limit 19 -fvisibility-inlines-hidden -stack-protector 2 -fgnuc-version=4.2.1 -fcolor-diagnostics -vectorize-loops -vectorize-slp -analyzer-output=html -analyzer-config stable-report-filename=true -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /tmp/scan-build-2022-10-03-140002-15933-1 -x c++ /build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/llvm/lib/Analysis/CGSCCPassManager.cpp

/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/llvm/lib/Analysis/CGSCCPassManager.cpp

1//===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "llvm/Analysis/CGSCCPassManager.h"
10#include "llvm/ADT/ArrayRef.h"
11#include "llvm/ADT/Optional.h"
12#include "llvm/ADT/PriorityWorklist.h"
13#include "llvm/ADT/STLExtras.h"
14#include "llvm/ADT/SetVector.h"
15#include "llvm/ADT/SmallPtrSet.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/iterator_range.h"
18#include "llvm/Analysis/LazyCallGraph.h"
19#include "llvm/IR/Constant.h"
20#include "llvm/IR/InstIterator.h"
21#include "llvm/IR/Instruction.h"
22#include "llvm/IR/PassManager.h"
23#include "llvm/IR/PassManagerImpl.h"
24#include "llvm/IR/ValueHandle.h"
25#include "llvm/Support/Casting.h"
26#include "llvm/Support/CommandLine.h"
27#include "llvm/Support/Debug.h"
28#include "llvm/Support/ErrorHandling.h"
29#include "llvm/Support/TimeProfiler.h"
30#include "llvm/Support/raw_ostream.h"
31#include <cassert>
32#include <iterator>
33
34#define DEBUG_TYPE"cgscc" "cgscc"
35
36using namespace llvm;
37
38// Explicit template instantiations and specialization definitions for core
39// template typedefs.
40namespace llvm {
41static cl::opt<bool> AbortOnMaxDevirtIterationsReached(
42 "abort-on-max-devirt-iterations-reached",
43 cl::desc("Abort when the max iterations for devirtualization CGSCC repeat "
44 "pass is reached"));
45
46AnalysisKey ShouldNotRunFunctionPassesAnalysis::Key;
47
48// Explicit instantiations for the core proxy templates.
49template class AllAnalysesOn<LazyCallGraph::SCC>;
50template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
51template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
52 LazyCallGraph &, CGSCCUpdateResult &>;
53template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
54template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
55 LazyCallGraph::SCC, LazyCallGraph &>;
56template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
57
58/// Explicitly specialize the pass manager run method to handle call graph
59/// updates.
60template <>
61PreservedAnalyses
62PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
63 CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
64 CGSCCAnalysisManager &AM,
65 LazyCallGraph &G, CGSCCUpdateResult &UR) {
66 // Request PassInstrumentation from analysis manager, will use it to run
67 // instrumenting callbacks for the passes later.
68 PassInstrumentation PI =
69 AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
70
71 PreservedAnalyses PA = PreservedAnalyses::all();
72
73 // The SCC may be refined while we are running passes over it, so set up
74 // a pointer that we can update.
75 LazyCallGraph::SCC *C = &InitialC;
76
77 // Get Function analysis manager from its proxy.
78 FunctionAnalysisManager &FAM =
79 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*C)->getManager();
80
81 for (auto &Pass : Passes) {
82 // Check the PassInstrumentation's BeforePass callbacks before running the
83 // pass, skip its execution completely if asked to (callback returns false).
84 if (!PI.runBeforePass(*Pass, *C))
85 continue;
86
87 PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
88
89 if (UR.InvalidatedSCCs.count(C))
90 PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
91 else
92 PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
93
94 // Update the SCC if necessary.
95 C = UR.UpdatedC ? UR.UpdatedC : C;
96 if (UR.UpdatedC) {
97 // If C is updated, also create a proxy and update FAM inside the result.
98 auto *ResultFAMCP =
99 &AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
100 ResultFAMCP->updateFAM(FAM);
101 }
102
103 // Intersect the final preserved analyses to compute the aggregate
104 // preserved set for this pass manager.
105 PA.intersect(PassPA);
106
107 // If the CGSCC pass wasn't able to provide a valid updated SCC, the
108 // current SCC may simply need to be skipped if invalid.
109 if (UR.InvalidatedSCCs.count(C)) {
110 LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Skipping invalidated root or island SCC!\n"
; } } while (false)
;
111 break;
112 }
113
114 // Check that we didn't miss any update scenario.
115 assert(C->begin() != C->end() && "Cannot have an empty SCC!")(static_cast <bool> (C->begin() != C->end() &&
"Cannot have an empty SCC!") ? void (0) : __assert_fail ("C->begin() != C->end() && \"Cannot have an empty SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 115, __extension__
__PRETTY_FUNCTION__))
;
116
117 // Update the analysis manager as each pass runs and potentially
118 // invalidates analyses.
119 AM.invalidate(*C, PassPA);
120 }
121
122 // Before we mark all of *this* SCC's analyses as preserved below, intersect
123 // this with the cross-SCC preserved analysis set. This is used to allow
124 // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
125 // for them.
126 UR.CrossSCCPA.intersect(PA);
127
128 // Invalidation was handled after each pass in the above loop for the current
129 // SCC. Therefore, the remaining analysis results in the AnalysisManager are
130 // preserved. We mark this with a set so that we don't need to inspect each
131 // one individually.
132 PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
133
134 return PA;
135}
136
137PreservedAnalyses
138ModuleToPostOrderCGSCCPassAdaptor::run(Module &M, ModuleAnalysisManager &AM) {
139 // Setup the CGSCC analysis manager from its proxy.
140 CGSCCAnalysisManager &CGAM =
141 AM.getResult<CGSCCAnalysisManagerModuleProxy>(M).getManager();
142
143 // Get the call graph for this module.
144 LazyCallGraph &CG = AM.getResult<LazyCallGraphAnalysis>(M);
145
146 // Get Function analysis manager from its proxy.
147 FunctionAnalysisManager &FAM =
148 AM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M)->getManager();
149
150 // We keep worklists to allow us to push more work onto the pass manager as
151 // the passes are run.
152 SmallPriorityWorklist<LazyCallGraph::RefSCC *, 1> RCWorklist;
153 SmallPriorityWorklist<LazyCallGraph::SCC *, 1> CWorklist;
154
155 // Keep sets for invalidated SCCs and RefSCCs that should be skipped when
156 // iterating off the worklists.
157 SmallPtrSet<LazyCallGraph::RefSCC *, 4> InvalidRefSCCSet;
158 SmallPtrSet<LazyCallGraph::SCC *, 4> InvalidSCCSet;
159
160 SmallDenseSet<std::pair<LazyCallGraph::Node *, LazyCallGraph::SCC *>, 4>
161 InlinedInternalEdges;
162
163 CGSCCUpdateResult UR = {
164 RCWorklist, CWorklist, InvalidRefSCCSet,
165 InvalidSCCSet, nullptr, PreservedAnalyses::all(),
166 InlinedInternalEdges, {}};
167
168 // Request PassInstrumentation from analysis manager, will use it to run
169 // instrumenting callbacks for the passes later.
170 PassInstrumentation PI = AM.getResult<PassInstrumentationAnalysis>(M);
171
172 PreservedAnalyses PA = PreservedAnalyses::all();
173 CG.buildRefSCCs();
174 for (LazyCallGraph::RefSCC &RC :
175 llvm::make_early_inc_range(CG.postorder_ref_sccs())) {
176 assert(RCWorklist.empty() &&(static_cast <bool> (RCWorklist.empty() && "Should always start with an empty RefSCC worklist"
) ? void (0) : __assert_fail ("RCWorklist.empty() && \"Should always start with an empty RefSCC worklist\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 177, __extension__
__PRETTY_FUNCTION__))
1
'?' condition is true
177 "Should always start with an empty RefSCC worklist")(static_cast <bool> (RCWorklist.empty() && "Should always start with an empty RefSCC worklist"
) ? void (0) : __assert_fail ("RCWorklist.empty() && \"Should always start with an empty RefSCC worklist\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 177, __extension__
__PRETTY_FUNCTION__))
;
178 // The postorder_ref_sccs range we are walking is lazily constructed, so
179 // we only push the first one onto the worklist. The worklist allows us
180 // to capture *new* RefSCCs created during transformations.
181 //
182 // We really want to form RefSCCs lazily because that makes them cheaper
183 // to update as the program is simplified and allows us to have greater
184 // cache locality as forming a RefSCC touches all the parts of all the
185 // functions within that RefSCC.
186 //
187 // We also eagerly increment the iterator to the next position because
188 // the CGSCC passes below may delete the current RefSCC.
189 RCWorklist.insert(&RC);
190
191 do {
192 LazyCallGraph::RefSCC *RC = RCWorklist.pop_back_val();
193 if (InvalidRefSCCSet.count(RC)) {
2
Assuming the condition is false
194 LLVM_DEBUG(dbgs() << "Skipping an invalid RefSCC...\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Skipping an invalid RefSCC...\n"
; } } while (false)
;
195 continue;
196 }
197
198 assert(CWorklist.empty() &&(static_cast <bool> (CWorklist.empty() && "Should always start with an empty SCC worklist"
) ? void (0) : __assert_fail ("CWorklist.empty() && \"Should always start with an empty SCC worklist\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 199, __extension__
__PRETTY_FUNCTION__))
3
Taking false branch
4
'?' condition is true
199 "Should always start with an empty SCC worklist")(static_cast <bool> (CWorklist.empty() && "Should always start with an empty SCC worklist"
) ? void (0) : __assert_fail ("CWorklist.empty() && \"Should always start with an empty SCC worklist\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 199, __extension__
__PRETTY_FUNCTION__))
;
200
201 LLVM_DEBUG(dbgs() << "Running an SCC pass across the RefSCC: " << *RCdo { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Running an SCC pass across the RefSCC: "
<< *RC << "\n"; } } while (false)
5
Assuming 'DebugFlag' is false
6
Loop condition is false. Exiting loop
202 << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Running an SCC pass across the RefSCC: "
<< *RC << "\n"; } } while (false)
;
203
204 // The top of the worklist may *also* be the same SCC we just ran over
205 // (and invalidated for). Keep track of that last SCC we processed due
206 // to SCC update to avoid redundant processing when an SCC is both just
207 // updated itself and at the top of the worklist.
208 LazyCallGraph::SCC *LastUpdatedC = nullptr;
209
210 // Push the initial SCCs in reverse post-order as we'll pop off the
211 // back and so see this in post-order.
212 for (LazyCallGraph::SCC &C : llvm::reverse(*RC))
213 CWorklist.insert(&C);
214
215 do {
216 LazyCallGraph::SCC *C = CWorklist.pop_back_val();
7
Calling 'PriorityWorklist::pop_back_val'
14
Returning from 'PriorityWorklist::pop_back_val'
15
'C' initialized here
217 // Due to call graph mutations, we may have invalid SCCs or SCCs from
218 // other RefSCCs in the worklist. The invalid ones are dead and the
219 // other RefSCCs should be queued above, so we just need to skip both
220 // scenarios here.
221 if (InvalidSCCSet.count(C)) {
16
Assuming the condition is false
17
Taking false branch
222 LLVM_DEBUG(dbgs() << "Skipping an invalid SCC...\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Skipping an invalid SCC...\n"; }
} while (false)
;
223 continue;
224 }
225 if (LastUpdatedC == C) {
18
Assuming 'LastUpdatedC' is equal to 'C'
226 LLVM_DEBUG(dbgs() << "Skipping redundant run on SCC: " << *C << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Skipping redundant run on SCC: "
<< *C << "\n"; } } while (false)
;
19
Taking true branch
20
Assuming 'DebugFlag' is true
21
Assuming the condition is true
22
Taking true branch
23
Forming reference to null pointer
227 continue;
228 }
229 // We used to also check if the current SCC is part of the current
230 // RefSCC and bail if it wasn't, since it should be in RCWorklist.
231 // However, this can cause compile time explosions in some cases on
232 // modules with a huge RefSCC. If a non-trivial amount of SCCs in the
233 // huge RefSCC can become their own child RefSCC, we create one child
234 // RefSCC, bail on the current RefSCC, visit the child RefSCC, revisit
235 // the huge RefSCC, and repeat. By visiting all SCCs in the original
236 // RefSCC we create all the child RefSCCs in one pass of the RefSCC,
237 // rather one pass of the RefSCC creating one child RefSCC at a time.
238
239 // Ensure we can proxy analysis updates from the CGSCC analysis manager
240 // into the the Function analysis manager by getting a proxy here.
241 // This also needs to update the FunctionAnalysisManager, as this may be
242 // the first time we see this SCC.
243 CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
244 FAM);
245
246 // Each time we visit a new SCC pulled off the worklist,
247 // a transformation of a child SCC may have also modified this parent
248 // and invalidated analyses. So we invalidate using the update record's
249 // cross-SCC preserved set. This preserved set is intersected by any
250 // CGSCC pass that handles invalidation (primarily pass managers) prior
251 // to marking its SCC as preserved. That lets us track everything that
252 // might need invalidation across SCCs without excessive invalidations
253 // on a single SCC.
254 //
255 // This essentially allows SCC passes to freely invalidate analyses
256 // of any ancestor SCC. If this becomes detrimental to successfully
257 // caching analyses, we could force each SCC pass to manually
258 // invalidate the analyses for any SCCs other than themselves which
259 // are mutated. However, that seems to lose the robustness of the
260 // pass-manager driven invalidation scheme.
261 CGAM.invalidate(*C, UR.CrossSCCPA);
262
263 do {
264 // Check that we didn't miss any update scenario.
265 assert(!InvalidSCCSet.count(C) && "Processing an invalid SCC!")(static_cast <bool> (!InvalidSCCSet.count(C) &&
"Processing an invalid SCC!") ? void (0) : __assert_fail ("!InvalidSCCSet.count(C) && \"Processing an invalid SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 265, __extension__
__PRETTY_FUNCTION__))
;
266 assert(C->begin() != C->end() && "Cannot have an empty SCC!")(static_cast <bool> (C->begin() != C->end() &&
"Cannot have an empty SCC!") ? void (0) : __assert_fail ("C->begin() != C->end() && \"Cannot have an empty SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 266, __extension__
__PRETTY_FUNCTION__))
;
267
268 LastUpdatedC = UR.UpdatedC;
269 UR.UpdatedC = nullptr;
270
271 // Check the PassInstrumentation's BeforePass callbacks before
272 // running the pass, skip its execution completely if asked to
273 // (callback returns false).
274 if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
275 continue;
276
277 PreservedAnalyses PassPA = Pass->run(*C, CGAM, CG, UR);
278
279 if (UR.InvalidatedSCCs.count(C))
280 PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
281 else
282 PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
283
284 // Update the SCC and RefSCC if necessary.
285 C = UR.UpdatedC ? UR.UpdatedC : C;
286
287 if (UR.UpdatedC) {
288 // If we're updating the SCC, also update the FAM inside the proxy's
289 // result.
290 CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
291 FAM);
292 }
293
294 // Intersect with the cross-SCC preserved set to capture any
295 // cross-SCC invalidation.
296 UR.CrossSCCPA.intersect(PassPA);
297 // Intersect the preserved set so that invalidation of module
298 // analyses will eventually occur when the module pass completes.
299 PA.intersect(PassPA);
300
301 // If the CGSCC pass wasn't able to provide a valid updated SCC,
302 // the current SCC may simply need to be skipped if invalid.
303 if (UR.InvalidatedSCCs.count(C)) {
304 LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Skipping invalidated root or island SCC!\n"
; } } while (false)
;
305 break;
306 }
307
308 // Check that we didn't miss any update scenario.
309 assert(C->begin() != C->end() && "Cannot have an empty SCC!")(static_cast <bool> (C->begin() != C->end() &&
"Cannot have an empty SCC!") ? void (0) : __assert_fail ("C->begin() != C->end() && \"Cannot have an empty SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 309, __extension__
__PRETTY_FUNCTION__))
;
310
311 // We handle invalidating the CGSCC analysis manager's information
312 // for the (potentially updated) SCC here. Note that any other SCCs
313 // whose structure has changed should have been invalidated by
314 // whatever was updating the call graph. This SCC gets invalidated
315 // late as it contains the nodes that were actively being
316 // processed.
317 CGAM.invalidate(*C, PassPA);
318
319 // The pass may have restructured the call graph and refined the
320 // current SCC and/or RefSCC. We need to update our current SCC and
321 // RefSCC pointers to follow these. Also, when the current SCC is
322 // refined, re-run the SCC pass over the newly refined SCC in order
323 // to observe the most precise SCC model available. This inherently
324 // cannot cycle excessively as it only happens when we split SCCs
325 // apart, at most converging on a DAG of single nodes.
326 // FIXME: If we ever start having RefSCC passes, we'll want to
327 // iterate there too.
328 if (UR.UpdatedC)
329 LLVM_DEBUG(dbgs()do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Re-running SCC passes after a refinement of the "
"current SCC: " << *UR.UpdatedC << "\n"; } } while
(false)
330 << "Re-running SCC passes after a refinement of the "do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Re-running SCC passes after a refinement of the "
"current SCC: " << *UR.UpdatedC << "\n"; } } while
(false)
331 "current SCC: "do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Re-running SCC passes after a refinement of the "
"current SCC: " << *UR.UpdatedC << "\n"; } } while
(false)
332 << *UR.UpdatedC << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Re-running SCC passes after a refinement of the "
"current SCC: " << *UR.UpdatedC << "\n"; } } while
(false)
;
333
334 // Note that both `C` and `RC` may at this point refer to deleted,
335 // invalid SCC and RefSCCs respectively. But we will short circuit
336 // the processing when we check them in the loop above.
337 } while (UR.UpdatedC);
338 } while (!CWorklist.empty());
339
340 // We only need to keep internal inlined edge information within
341 // a RefSCC, clear it to save on space and let the next time we visit
342 // any of these functions have a fresh start.
343 InlinedInternalEdges.clear();
344 } while (!RCWorklist.empty());
345 }
346
347 // By definition we preserve the call garph, all SCC analyses, and the
348 // analysis proxies by handling them above and in any nested pass managers.
349 PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
350 PA.preserve<LazyCallGraphAnalysis>();
351 PA.preserve<CGSCCAnalysisManagerModuleProxy>();
352 PA.preserve<FunctionAnalysisManagerModuleProxy>();
353 return PA;
354}
355
356PreservedAnalyses DevirtSCCRepeatedPass::run(LazyCallGraph::SCC &InitialC,
357 CGSCCAnalysisManager &AM,
358 LazyCallGraph &CG,
359 CGSCCUpdateResult &UR) {
360 PreservedAnalyses PA = PreservedAnalyses::all();
361 PassInstrumentation PI =
362 AM.getResult<PassInstrumentationAnalysis>(InitialC, CG);
363
364 // The SCC may be refined while we are running passes over it, so set up
365 // a pointer that we can update.
366 LazyCallGraph::SCC *C = &InitialC;
367
368 // Struct to track the counts of direct and indirect calls in each function
369 // of the SCC.
370 struct CallCount {
371 int Direct;
372 int Indirect;
373 };
374
375 // Put value handles on all of the indirect calls and return the number of
376 // direct calls for each function in the SCC.
377 auto ScanSCC = [](LazyCallGraph::SCC &C,
378 SmallMapVector<Value *, WeakTrackingVH, 16> &CallHandles) {
379 assert(CallHandles.empty() && "Must start with a clear set of handles.")(static_cast <bool> (CallHandles.empty() && "Must start with a clear set of handles."
) ? void (0) : __assert_fail ("CallHandles.empty() && \"Must start with a clear set of handles.\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 379, __extension__
__PRETTY_FUNCTION__))
;
380
381 SmallDenseMap<Function *, CallCount> CallCounts;
382 CallCount CountLocal = {0, 0};
383 for (LazyCallGraph::Node &N : C) {
384 CallCount &Count =
385 CallCounts.insert(std::make_pair(&N.getFunction(), CountLocal))
386 .first->second;
387 for (Instruction &I : instructions(N.getFunction()))
388 if (auto *CB = dyn_cast<CallBase>(&I)) {
389 if (CB->getCalledFunction()) {
390 ++Count.Direct;
391 } else {
392 ++Count.Indirect;
393 CallHandles.insert({CB, WeakTrackingVH(CB)});
394 }
395 }
396 }
397
398 return CallCounts;
399 };
400
401 UR.IndirectVHs.clear();
402 // Populate the initial call handles and get the initial call counts.
403 auto CallCounts = ScanSCC(*C, UR.IndirectVHs);
404
405 for (int Iteration = 0;; ++Iteration) {
406 if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
407 continue;
408
409 PreservedAnalyses PassPA = Pass->run(*C, AM, CG, UR);
410
411 if (UR.InvalidatedSCCs.count(C))
412 PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
413 else
414 PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
415
416 PA.intersect(PassPA);
417
418 // If the SCC structure has changed, bail immediately and let the outer
419 // CGSCC layer handle any iteration to reflect the refined structure.
420 if (UR.UpdatedC && UR.UpdatedC != C)
421 break;
422
423 // If the CGSCC pass wasn't able to provide a valid updated SCC, the
424 // current SCC may simply need to be skipped if invalid.
425 if (UR.InvalidatedSCCs.count(C)) {
426 LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Skipping invalidated root or island SCC!\n"
; } } while (false)
;
427 break;
428 }
429
430 assert(C->begin() != C->end() && "Cannot have an empty SCC!")(static_cast <bool> (C->begin() != C->end() &&
"Cannot have an empty SCC!") ? void (0) : __assert_fail ("C->begin() != C->end() && \"Cannot have an empty SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 430, __extension__
__PRETTY_FUNCTION__))
;
431
432 // Check whether any of the handles were devirtualized.
433 bool Devirt = llvm::any_of(UR.IndirectVHs, [](auto &P) -> bool {
434 if (P.second) {
435 if (CallBase *CB = dyn_cast<CallBase>(P.second)) {
436 if (CB->getCalledFunction()) {
437 LLVM_DEBUG(dbgs() << "Found devirtualized call: " << *CB << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Found devirtualized call: " <<
*CB << "\n"; } } while (false)
;
438 return true;
439 }
440 }
441 }
442 return false;
443 });
444
445 // Rescan to build up a new set of handles and count how many direct
446 // calls remain. If we decide to iterate, this also sets up the input to
447 // the next iteration.
448 UR.IndirectVHs.clear();
449 auto NewCallCounts = ScanSCC(*C, UR.IndirectVHs);
450
451 // If we haven't found an explicit devirtualization already see if we
452 // have decreased the number of indirect calls and increased the number
453 // of direct calls for any function in the SCC. This can be fooled by all
454 // manner of transformations such as DCE and other things, but seems to
455 // work well in practice.
456 if (!Devirt)
457 // Iterate over the keys in NewCallCounts, if Function also exists in
458 // CallCounts, make the check below.
459 for (auto &Pair : NewCallCounts) {
460 auto &CallCountNew = Pair.second;
461 auto CountIt = CallCounts.find(Pair.first);
462 if (CountIt != CallCounts.end()) {
463 const auto &CallCountOld = CountIt->second;
464 if (CallCountOld.Indirect > CallCountNew.Indirect &&
465 CallCountOld.Direct < CallCountNew.Direct) {
466 Devirt = true;
467 break;
468 }
469 }
470 }
471
472 if (!Devirt) {
473 break;
474 }
475
476 // Otherwise, if we've already hit our max, we're done.
477 if (Iteration >= MaxIterations) {
478 if (AbortOnMaxDevirtIterationsReached)
479 report_fatal_error("Max devirtualization iterations reached");
480 LLVM_DEBUG(do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Found another devirtualization after hitting the max "
"number of repetitions (" << MaxIterations << ") on SCC: "
<< *C << "\n"; } } while (false)
481 dbgs() << "Found another devirtualization after hitting the max "do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Found another devirtualization after hitting the max "
"number of repetitions (" << MaxIterations << ") on SCC: "
<< *C << "\n"; } } while (false)
482 "number of repetitions ("do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Found another devirtualization after hitting the max "
"number of repetitions (" << MaxIterations << ") on SCC: "
<< *C << "\n"; } } while (false)
483 << MaxIterations << ") on SCC: " << *C << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Found another devirtualization after hitting the max "
"number of repetitions (" << MaxIterations << ") on SCC: "
<< *C << "\n"; } } while (false)
;
484 break;
485 }
486
487 LLVM_DEBUG(do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Repeating an SCC pass after finding a devirtualization in: "
<< *C << "\n"; } } while (false)
488 dbgs() << "Repeating an SCC pass after finding a devirtualization in: "do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Repeating an SCC pass after finding a devirtualization in: "
<< *C << "\n"; } } while (false)
489 << *C << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Repeating an SCC pass after finding a devirtualization in: "
<< *C << "\n"; } } while (false)
;
490
491 // Move over the new call counts in preparation for iterating.
492 CallCounts = std::move(NewCallCounts);
493
494 // Update the analysis manager with each run and intersect the total set
495 // of preserved analyses so we're ready to iterate.
496 AM.invalidate(*C, PassPA);
497 }
498
499 // Note that we don't add any preserved entries here unlike a more normal
500 // "pass manager" because we only handle invalidation *between* iterations,
501 // not after the last iteration.
502 return PA;
503}
504
505PreservedAnalyses CGSCCToFunctionPassAdaptor::run(LazyCallGraph::SCC &C,
506 CGSCCAnalysisManager &AM,
507 LazyCallGraph &CG,
508 CGSCCUpdateResult &UR) {
509 // Setup the function analysis manager from its proxy.
510 FunctionAnalysisManager &FAM =
511 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager();
512
513 SmallVector<LazyCallGraph::Node *, 4> Nodes;
514 for (LazyCallGraph::Node &N : C)
515 Nodes.push_back(&N);
516
517 // The SCC may get split while we are optimizing functions due to deleting
518 // edges. If this happens, the current SCC can shift, so keep track of
519 // a pointer we can overwrite.
520 LazyCallGraph::SCC *CurrentC = &C;
521
522 LLVM_DEBUG(dbgs() << "Running function passes across an SCC: " << C << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Running function passes across an SCC: "
<< C << "\n"; } } while (false)
;
523
524 PreservedAnalyses PA = PreservedAnalyses::all();
525 for (LazyCallGraph::Node *N : Nodes) {
526 // Skip nodes from other SCCs. These may have been split out during
527 // processing. We'll eventually visit those SCCs and pick up the nodes
528 // there.
529 if (CG.lookupSCC(*N) != CurrentC)
530 continue;
531
532 Function &F = N->getFunction();
533
534 if (NoRerun && FAM.getCachedResult<ShouldNotRunFunctionPassesAnalysis>(F))
535 continue;
536
537 PassInstrumentation PI = FAM.getResult<PassInstrumentationAnalysis>(F);
538 if (!PI.runBeforePass<Function>(*Pass, F))
539 continue;
540
541 PreservedAnalyses PassPA = Pass->run(F, FAM);
542 PI.runAfterPass<Function>(*Pass, F, PassPA);
543
544 // We know that the function pass couldn't have invalidated any other
545 // function's analyses (that's the contract of a function pass), so
546 // directly handle the function analysis manager's invalidation here.
547 FAM.invalidate(F, EagerlyInvalidate ? PreservedAnalyses::none() : PassPA);
548 if (NoRerun)
549 (void)FAM.getResult<ShouldNotRunFunctionPassesAnalysis>(F);
550
551 // Then intersect the preserved set so that invalidation of module
552 // analyses will eventually occur when the module pass completes.
553 PA.intersect(std::move(PassPA));
554
555 // If the call graph hasn't been preserved, update it based on this
556 // function pass. This may also update the current SCC to point to
557 // a smaller, more refined SCC.
558 auto PAC = PA.getChecker<LazyCallGraphAnalysis>();
559 if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<Module>>()) {
560 CurrentC = &updateCGAndAnalysisManagerForFunctionPass(CG, *CurrentC, *N,
561 AM, UR, FAM);
562 assert(CG.lookupSCC(*N) == CurrentC &&(static_cast <bool> (CG.lookupSCC(*N) == CurrentC &&
"Current SCC not updated to the SCC containing the current node!"
) ? void (0) : __assert_fail ("CG.lookupSCC(*N) == CurrentC && \"Current SCC not updated to the SCC containing the current node!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 563, __extension__
__PRETTY_FUNCTION__))
563 "Current SCC not updated to the SCC containing the current node!")(static_cast <bool> (CG.lookupSCC(*N) == CurrentC &&
"Current SCC not updated to the SCC containing the current node!"
) ? void (0) : __assert_fail ("CG.lookupSCC(*N) == CurrentC && \"Current SCC not updated to the SCC containing the current node!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 563, __extension__
__PRETTY_FUNCTION__))
;
564 }
565 }
566
567 // By definition we preserve the proxy. And we preserve all analyses on
568 // Functions. This precludes *any* invalidation of function analyses by the
569 // proxy, but that's OK because we've taken care to invalidate analyses in
570 // the function analysis manager incrementally above.
571 PA.preserveSet<AllAnalysesOn<Function>>();
572 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
573
574 // We've also ensured that we updated the call graph along the way.
575 PA.preserve<LazyCallGraphAnalysis>();
576
577 return PA;
578}
579
580bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
581 Module &M, const PreservedAnalyses &PA,
582 ModuleAnalysisManager::Invalidator &Inv) {
583 // If literally everything is preserved, we're done.
584 if (PA.areAllPreserved())
585 return false; // This is still a valid proxy.
586
587 // If this proxy or the call graph is going to be invalidated, we also need
588 // to clear all the keys coming from that analysis.
589 //
590 // We also directly invalidate the FAM's module proxy if necessary, and if
591 // that proxy isn't preserved we can't preserve this proxy either. We rely on
592 // it to handle module -> function analysis invalidation in the face of
593 // structural changes and so if it's unavailable we conservatively clear the
594 // entire SCC layer as well rather than trying to do invalidation ourselves.
595 auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
596 if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
597 Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
598 Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
599 InnerAM->clear();
600
601 // And the proxy itself should be marked as invalid so that we can observe
602 // the new call graph. This isn't strictly necessary because we cheat
603 // above, but is still useful.
604 return true;
605 }
606
607 // Directly check if the relevant set is preserved so we can short circuit
608 // invalidating SCCs below.
609 bool AreSCCAnalysesPreserved =
610 PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
611
612 // Ok, we have a graph, so we can propagate the invalidation down into it.
613 G->buildRefSCCs();
614 for (auto &RC : G->postorder_ref_sccs())
615 for (auto &C : RC) {
616 Optional<PreservedAnalyses> InnerPA;
617
618 // Check to see whether the preserved set needs to be adjusted based on
619 // module-level analysis invalidation triggering deferred invalidation
620 // for this SCC.
621 if (auto *OuterProxy =
622 InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
623 for (const auto &OuterInvalidationPair :
624 OuterProxy->getOuterInvalidations()) {
625 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
626 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
627 if (Inv.invalidate(OuterAnalysisID, M, PA)) {
628 if (!InnerPA)
629 InnerPA = PA;
630 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
631 InnerPA->abandon(InnerAnalysisID);
632 }
633 }
634
635 // Check if we needed a custom PA set. If so we'll need to run the inner
636 // invalidation.
637 if (InnerPA) {
638 InnerAM->invalidate(C, *InnerPA);
639 continue;
640 }
641
642 // Otherwise we only need to do invalidation if the original PA set didn't
643 // preserve all SCC analyses.
644 if (!AreSCCAnalysesPreserved)
645 InnerAM->invalidate(C, PA);
646 }
647
648 // Return false to indicate that this result is still a valid proxy.
649 return false;
650}
651
652template <>
653CGSCCAnalysisManagerModuleProxy::Result
654CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
655 // Force the Function analysis manager to also be available so that it can
656 // be accessed in an SCC analysis and proxied onward to function passes.
657 // FIXME: It is pretty awkward to just drop the result here and assert that
658 // we can find it again later.
659 (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
660
661 return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
662}
663
664AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
665
666FunctionAnalysisManagerCGSCCProxy::Result
667FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
668 CGSCCAnalysisManager &AM,
669 LazyCallGraph &CG) {
670 // Note: unconditionally getting checking that the proxy exists may get it at
671 // this point. There are cases when this is being run unnecessarily, but
672 // it is cheap and having the assertion in place is more valuable.
673 auto &MAMProxy = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG);
674 Module &M = *C.begin()->getFunction().getParent();
675 bool ProxyExists =
676 MAMProxy.cachedResultExists<FunctionAnalysisManagerModuleProxy>(M);
677 assert(ProxyExists &&(static_cast <bool> (ProxyExists && "The CGSCC pass manager requires that the FAM module proxy is run "
"on the module prior to entering the CGSCC walk") ? void (0)
: __assert_fail ("ProxyExists && \"The CGSCC pass manager requires that the FAM module proxy is run \" \"on the module prior to entering the CGSCC walk\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 679, __extension__
__PRETTY_FUNCTION__))
678 "The CGSCC pass manager requires that the FAM module proxy is run "(static_cast <bool> (ProxyExists && "The CGSCC pass manager requires that the FAM module proxy is run "
"on the module prior to entering the CGSCC walk") ? void (0)
: __assert_fail ("ProxyExists && \"The CGSCC pass manager requires that the FAM module proxy is run \" \"on the module prior to entering the CGSCC walk\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 679, __extension__
__PRETTY_FUNCTION__))
679 "on the module prior to entering the CGSCC walk")(static_cast <bool> (ProxyExists && "The CGSCC pass manager requires that the FAM module proxy is run "
"on the module prior to entering the CGSCC walk") ? void (0)
: __assert_fail ("ProxyExists && \"The CGSCC pass manager requires that the FAM module proxy is run \" \"on the module prior to entering the CGSCC walk\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 679, __extension__
__PRETTY_FUNCTION__))
;
680 (void)ProxyExists;
681
682 // We just return an empty result. The caller will use the updateFAM interface
683 // to correctly register the relevant FunctionAnalysisManager based on the
684 // context in which this proxy is run.
685 return Result();
686}
687
688bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
689 LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
690 CGSCCAnalysisManager::Invalidator &Inv) {
691 // If literally everything is preserved, we're done.
692 if (PA.areAllPreserved())
693 return false; // This is still a valid proxy.
694
695 // All updates to preserve valid results are done below, so we don't need to
696 // invalidate this proxy.
697 //
698 // Note that in order to preserve this proxy, a module pass must ensure that
699 // the FAM has been completely updated to handle the deletion of functions.
700 // Specifically, any FAM-cached results for those functions need to have been
701 // forcibly cleared. When preserved, this proxy will only invalidate results
702 // cached on functions *still in the module* at the end of the module pass.
703 auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
704 if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
705 for (LazyCallGraph::Node &N : C)
706 FAM->invalidate(N.getFunction(), PA);
707
708 return false;
709 }
710
711 // Directly check if the relevant set is preserved.
712 bool AreFunctionAnalysesPreserved =
713 PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
714
715 // Now walk all the functions to see if any inner analysis invalidation is
716 // necessary.
717 for (LazyCallGraph::Node &N : C) {
718 Function &F = N.getFunction();
719 Optional<PreservedAnalyses> FunctionPA;
720
721 // Check to see whether the preserved set needs to be pruned based on
722 // SCC-level analysis invalidation that triggers deferred invalidation
723 // registered with the outer analysis manager proxy for this function.
724 if (auto *OuterProxy =
725 FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
726 for (const auto &OuterInvalidationPair :
727 OuterProxy->getOuterInvalidations()) {
728 AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
729 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
730 if (Inv.invalidate(OuterAnalysisID, C, PA)) {
731 if (!FunctionPA)
732 FunctionPA = PA;
733 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
734 FunctionPA->abandon(InnerAnalysisID);
735 }
736 }
737
738 // Check if we needed a custom PA set, and if so we'll need to run the
739 // inner invalidation.
740 if (FunctionPA) {
741 FAM->invalidate(F, *FunctionPA);
742 continue;
743 }
744
745 // Otherwise we only need to do invalidation if the original PA set didn't
746 // preserve all function analyses.
747 if (!AreFunctionAnalysesPreserved)
748 FAM->invalidate(F, PA);
749 }
750
751 // Return false to indicate that this result is still a valid proxy.
752 return false;
753}
754
755} // end namespace llvm
756
757/// When a new SCC is created for the graph we first update the
758/// FunctionAnalysisManager in the Proxy's result.
759/// As there might be function analysis results cached for the functions now in
760/// that SCC, two forms of updates are required.
761///
762/// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
763/// created so that any subsequent invalidation events to the SCC are
764/// propagated to the function analysis results cached for functions within it.
765///
766/// Second, if any of the functions within the SCC have analysis results with
767/// outer analysis dependencies, then those dependencies would point to the
768/// *wrong* SCC's analysis result. We forcibly invalidate the necessary
769/// function analyses so that they don't retain stale handles.
770static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
771 LazyCallGraph &G,
772 CGSCCAnalysisManager &AM,
773 FunctionAnalysisManager &FAM) {
774 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).updateFAM(FAM);
775
776 // Now walk the functions in this SCC and invalidate any function analysis
777 // results that might have outer dependencies on an SCC analysis.
778 for (LazyCallGraph::Node &N : C) {
779 Function &F = N.getFunction();
780
781 auto *OuterProxy =
782 FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
783 if (!OuterProxy)
784 // No outer analyses were queried, nothing to do.
785 continue;
786
787 // Forcibly abandon all the inner analyses with dependencies, but
788 // invalidate nothing else.
789 auto PA = PreservedAnalyses::all();
790 for (const auto &OuterInvalidationPair :
791 OuterProxy->getOuterInvalidations()) {
792 const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
793 for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
794 PA.abandon(InnerAnalysisID);
795 }
796
797 // Now invalidate anything we found.
798 FAM.invalidate(F, PA);
799 }
800}
801
802/// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
803/// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
804/// added SCCs.
805///
806/// The range of new SCCs must be in postorder already. The SCC they were split
807/// out of must be provided as \p C. The current node being mutated and
808/// triggering updates must be passed as \p N.
809///
810/// This function returns the SCC containing \p N. This will be either \p C if
811/// no new SCCs have been split out, or it will be the new SCC containing \p N.
812template <typename SCCRangeT>
813static LazyCallGraph::SCC *
814incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
815 LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
816 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
817 using SCC = LazyCallGraph::SCC;
818
819 if (NewSCCRange.empty())
820 return C;
821
822 // Add the current SCC to the worklist as its shape has changed.
823 UR.CWorklist.insert(C);
824 LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *Cdo { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing the existing SCC in the worklist:"
<< *C << "\n"; } } while (false)
825 << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing the existing SCC in the worklist:"
<< *C << "\n"; } } while (false)
;
826
827 SCC *OldC = C;
828
829 // Update the current SCC. Note that if we have new SCCs, this must actually
830 // change the SCC.
831 assert(C != &*NewSCCRange.begin() &&(static_cast <bool> (C != &*NewSCCRange.begin() &&
"Cannot insert new SCCs without changing current SCC!") ? void
(0) : __assert_fail ("C != &*NewSCCRange.begin() && \"Cannot insert new SCCs without changing current SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 832, __extension__
__PRETTY_FUNCTION__))
832 "Cannot insert new SCCs without changing current SCC!")(static_cast <bool> (C != &*NewSCCRange.begin() &&
"Cannot insert new SCCs without changing current SCC!") ? void
(0) : __assert_fail ("C != &*NewSCCRange.begin() && \"Cannot insert new SCCs without changing current SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 832, __extension__
__PRETTY_FUNCTION__))
;
833 C = &*NewSCCRange.begin();
834 assert(G.lookupSCC(N) == C && "Failed to update current SCC!")(static_cast <bool> (G.lookupSCC(N) == C && "Failed to update current SCC!"
) ? void (0) : __assert_fail ("G.lookupSCC(N) == C && \"Failed to update current SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 834, __extension__
__PRETTY_FUNCTION__))
;
835
836 // If we had a cached FAM proxy originally, we will want to create more of
837 // them for each SCC that was split off.
838 FunctionAnalysisManager *FAM = nullptr;
839 if (auto *FAMProxy =
840 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC))
841 FAM = &FAMProxy->getManager();
842
843 // We need to propagate an invalidation call to all but the newly current SCC
844 // because the outer pass manager won't do that for us after splitting them.
845 // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
846 // there are preserved analysis we can avoid invalidating them here for
847 // split-off SCCs.
848 // We know however that this will preserve any FAM proxy so go ahead and mark
849 // that.
850 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
851 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
852 AM.invalidate(*OldC, PA);
853
854 // Ensure the now-current SCC's function analyses are updated.
855 if (FAM)
856 updateNewSCCFunctionAnalyses(*C, G, AM, *FAM);
857
858 for (SCC &NewC : llvm::reverse(llvm::drop_begin(NewSCCRange))) {
859 assert(C != &NewC && "No need to re-visit the current SCC!")(static_cast <bool> (C != &NewC && "No need to re-visit the current SCC!"
) ? void (0) : __assert_fail ("C != &NewC && \"No need to re-visit the current SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 859, __extension__
__PRETTY_FUNCTION__))
;
860 assert(OldC != &NewC && "Already handled the original SCC!")(static_cast <bool> (OldC != &NewC && "Already handled the original SCC!"
) ? void (0) : __assert_fail ("OldC != &NewC && \"Already handled the original SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 860, __extension__
__PRETTY_FUNCTION__))
;
861 UR.CWorklist.insert(&NewC);
862 LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing a newly formed SCC:" <<
NewC << "\n"; } } while (false)
;
863
864 // Ensure new SCCs' function analyses are updated.
865 if (FAM)
866 updateNewSCCFunctionAnalyses(NewC, G, AM, *FAM);
867
868 // Also propagate a normal invalidation to the new SCC as only the current
869 // will get one from the pass manager infrastructure.
870 AM.invalidate(NewC, PA);
871 }
872 return C;
873}
874
875static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass(
876 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
877 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
878 FunctionAnalysisManager &FAM, bool FunctionPass) {
879 using Node = LazyCallGraph::Node;
880 using Edge = LazyCallGraph::Edge;
881 using SCC = LazyCallGraph::SCC;
882 using RefSCC = LazyCallGraph::RefSCC;
883
884 RefSCC &InitialRC = InitialC.getOuterRefSCC();
885 SCC *C = &InitialC;
886 RefSCC *RC = &InitialRC;
887 Function &F = N.getFunction();
888
889 // Walk the function body and build up the set of retained, promoted, and
890 // demoted edges.
891 SmallVector<Constant *, 16> Worklist;
892 SmallPtrSet<Constant *, 16> Visited;
893 SmallPtrSet<Node *, 16> RetainedEdges;
894 SmallSetVector<Node *, 4> PromotedRefTargets;
895 SmallSetVector<Node *, 4> DemotedCallTargets;
896 SmallSetVector<Node *, 4> NewCallEdges;
897 SmallSetVector<Node *, 4> NewRefEdges;
898
899 // First walk the function and handle all called functions. We do this first
900 // because if there is a single call edge, whether there are ref edges is
901 // irrelevant.
902 for (Instruction &I : instructions(F)) {
903 if (auto *CB = dyn_cast<CallBase>(&I)) {
904 if (Function *Callee = CB->getCalledFunction()) {
905 if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
906 Node *CalleeN = G.lookup(*Callee);
907 assert(CalleeN &&(static_cast <bool> (CalleeN && "Visited function should already have an associated node"
) ? void (0) : __assert_fail ("CalleeN && \"Visited function should already have an associated node\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 908, __extension__
__PRETTY_FUNCTION__))
908 "Visited function should already have an associated node")(static_cast <bool> (CalleeN && "Visited function should already have an associated node"
) ? void (0) : __assert_fail ("CalleeN && \"Visited function should already have an associated node\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 908, __extension__
__PRETTY_FUNCTION__))
;
909 Edge *E = N->lookup(*CalleeN);
910 assert((E || !FunctionPass) &&(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* "
"call edges! Any new calls should be modeled as " "promoted existing ref edges!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* \" \"call edges! Any new calls should be modeled as \" \"promoted existing ref edges!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 913, __extension__
__PRETTY_FUNCTION__))
911 "No function transformations should introduce *new* "(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* "
"call edges! Any new calls should be modeled as " "promoted existing ref edges!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* \" \"call edges! Any new calls should be modeled as \" \"promoted existing ref edges!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 913, __extension__
__PRETTY_FUNCTION__))
912 "call edges! Any new calls should be modeled as "(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* "
"call edges! Any new calls should be modeled as " "promoted existing ref edges!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* \" \"call edges! Any new calls should be modeled as \" \"promoted existing ref edges!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 913, __extension__
__PRETTY_FUNCTION__))
913 "promoted existing ref edges!")(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* "
"call edges! Any new calls should be modeled as " "promoted existing ref edges!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* \" \"call edges! Any new calls should be modeled as \" \"promoted existing ref edges!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 913, __extension__
__PRETTY_FUNCTION__))
;
914 bool Inserted = RetainedEdges.insert(CalleeN).second;
915 (void)Inserted;
916 assert(Inserted && "We should never visit a function twice.")(static_cast <bool> (Inserted && "We should never visit a function twice."
) ? void (0) : __assert_fail ("Inserted && \"We should never visit a function twice.\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 916, __extension__
__PRETTY_FUNCTION__))
;
917 if (!E)
918 NewCallEdges.insert(CalleeN);
919 else if (!E->isCall())
920 PromotedRefTargets.insert(CalleeN);
921 }
922 } else {
923 // We can miss devirtualization if an indirect call is created then
924 // promoted before updateCGAndAnalysisManagerForPass runs.
925 auto *Entry = UR.IndirectVHs.find(CB);
926 if (Entry == UR.IndirectVHs.end())
927 UR.IndirectVHs.insert({CB, WeakTrackingVH(CB)});
928 else if (!Entry->second)
929 Entry->second = WeakTrackingVH(CB);
930 }
931 }
932 }
933
934 // Now walk all references.
935 for (Instruction &I : instructions(F))
936 for (Value *Op : I.operand_values())
937 if (auto *OpC = dyn_cast<Constant>(Op))
938 if (Visited.insert(OpC).second)
939 Worklist.push_back(OpC);
940
941 auto VisitRef = [&](Function &Referee) {
942 Node *RefereeN = G.lookup(Referee);
943 assert(RefereeN &&(static_cast <bool> (RefereeN && "Visited function should already have an associated node"
) ? void (0) : __assert_fail ("RefereeN && \"Visited function should already have an associated node\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 944, __extension__
__PRETTY_FUNCTION__))
944 "Visited function should already have an associated node")(static_cast <bool> (RefereeN && "Visited function should already have an associated node"
) ? void (0) : __assert_fail ("RefereeN && \"Visited function should already have an associated node\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 944, __extension__
__PRETTY_FUNCTION__))
;
945 Edge *E = N->lookup(*RefereeN);
946 assert((E || !FunctionPass) &&(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* ref "
"edges! Any new ref edges would require IPO which " "function passes aren't allowed to do!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* ref \" \"edges! Any new ref edges would require IPO which \" \"function passes aren't allowed to do!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 949, __extension__
__PRETTY_FUNCTION__))
947 "No function transformations should introduce *new* ref "(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* ref "
"edges! Any new ref edges would require IPO which " "function passes aren't allowed to do!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* ref \" \"edges! Any new ref edges would require IPO which \" \"function passes aren't allowed to do!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 949, __extension__
__PRETTY_FUNCTION__))
948 "edges! Any new ref edges would require IPO which "(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* ref "
"edges! Any new ref edges would require IPO which " "function passes aren't allowed to do!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* ref \" \"edges! Any new ref edges would require IPO which \" \"function passes aren't allowed to do!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 949, __extension__
__PRETTY_FUNCTION__))
949 "function passes aren't allowed to do!")(static_cast <bool> ((E || !FunctionPass) && "No function transformations should introduce *new* ref "
"edges! Any new ref edges would require IPO which " "function passes aren't allowed to do!"
) ? void (0) : __assert_fail ("(E || !FunctionPass) && \"No function transformations should introduce *new* ref \" \"edges! Any new ref edges would require IPO which \" \"function passes aren't allowed to do!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 949, __extension__
__PRETTY_FUNCTION__))
;
950 bool Inserted = RetainedEdges.insert(RefereeN).second;
951 (void)Inserted;
952 assert(Inserted && "We should never visit a function twice.")(static_cast <bool> (Inserted && "We should never visit a function twice."
) ? void (0) : __assert_fail ("Inserted && \"We should never visit a function twice.\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 952, __extension__
__PRETTY_FUNCTION__))
;
953 if (!E)
954 NewRefEdges.insert(RefereeN);
955 else if (E->isCall())
956 DemotedCallTargets.insert(RefereeN);
957 };
958 LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
959
960 // Handle new ref edges.
961 for (Node *RefTarget : NewRefEdges) {
962 SCC &TargetC = *G.lookupSCC(*RefTarget);
963 RefSCC &TargetRC = TargetC.getOuterRefSCC();
964 (void)TargetRC;
965 // TODO: This only allows trivial edges to be added for now.
966#ifdef EXPENSIVE_CHECKS
967 assert((RC == &TargetRC ||(static_cast <bool> ((RC == &TargetRC || RC->isAncestorOf
(TargetRC)) && "New ref edge is not trivial!") ? void
(0) : __assert_fail ("(RC == &TargetRC || RC->isAncestorOf(TargetRC)) && \"New ref edge is not trivial!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 968, __extension__
__PRETTY_FUNCTION__))
968 RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!")(static_cast <bool> ((RC == &TargetRC || RC->isAncestorOf
(TargetRC)) && "New ref edge is not trivial!") ? void
(0) : __assert_fail ("(RC == &TargetRC || RC->isAncestorOf(TargetRC)) && \"New ref edge is not trivial!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 968, __extension__
__PRETTY_FUNCTION__))
;
969#endif
970 RC->insertTrivialRefEdge(N, *RefTarget);
971 }
972
973 // Handle new call edges.
974 for (Node *CallTarget : NewCallEdges) {
975 SCC &TargetC = *G.lookupSCC(*CallTarget);
976 RefSCC &TargetRC = TargetC.getOuterRefSCC();
977 (void)TargetRC;
978 // TODO: This only allows trivial edges to be added for now.
979#ifdef EXPENSIVE_CHECKS
980 assert((RC == &TargetRC ||(static_cast <bool> ((RC == &TargetRC || RC->isAncestorOf
(TargetRC)) && "New call edge is not trivial!") ? void
(0) : __assert_fail ("(RC == &TargetRC || RC->isAncestorOf(TargetRC)) && \"New call edge is not trivial!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 981, __extension__
__PRETTY_FUNCTION__))
981 RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!")(static_cast <bool> ((RC == &TargetRC || RC->isAncestorOf
(TargetRC)) && "New call edge is not trivial!") ? void
(0) : __assert_fail ("(RC == &TargetRC || RC->isAncestorOf(TargetRC)) && \"New call edge is not trivial!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 981, __extension__
__PRETTY_FUNCTION__))
;
982#endif
983 // Add a trivial ref edge to be promoted later on alongside
984 // PromotedRefTargets.
985 RC->insertTrivialRefEdge(N, *CallTarget);
986 }
987
988 // Include synthetic reference edges to known, defined lib functions.
989 for (auto *LibFn : G.getLibFunctions())
990 // While the list of lib functions doesn't have repeats, don't re-visit
991 // anything handled above.
992 if (!Visited.count(LibFn))
993 VisitRef(*LibFn);
994
995 // First remove all of the edges that are no longer present in this function.
996 // The first step makes these edges uniformly ref edges and accumulates them
997 // into a separate data structure so removal doesn't invalidate anything.
998 SmallVector<Node *, 4> DeadTargets;
999 for (Edge &E : *N) {
1000 if (RetainedEdges.count(&E.getNode()))
1001 continue;
1002
1003 SCC &TargetC = *G.lookupSCC(E.getNode());
1004 RefSCC &TargetRC = TargetC.getOuterRefSCC();
1005 if (&TargetRC == RC && E.isCall()) {
1006 if (C != &TargetC) {
1007 // For separate SCCs this is trivial.
1008 RC->switchTrivialInternalEdgeToRef(N, E.getNode());
1009 } else {
1010 // Now update the call graph.
1011 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
1012 G, N, C, AM, UR);
1013 }
1014 }
1015
1016 // Now that this is ready for actual removal, put it into our list.
1017 DeadTargets.push_back(&E.getNode());
1018 }
1019 // Remove the easy cases quickly and actually pull them out of our list.
1020 llvm::erase_if(DeadTargets, [&](Node *TargetN) {
1021 SCC &TargetC = *G.lookupSCC(*TargetN);
1022 RefSCC &TargetRC = TargetC.getOuterRefSCC();
1023
1024 // We can't trivially remove internal targets, so skip
1025 // those.
1026 if (&TargetRC == RC)
1027 return false;
1028
1029 LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '" << N << "' to '"do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Deleting outgoing edge from '" <<
N << "' to '" << *TargetN << "'\n"; } } while
(false)
1030 << *TargetN << "'\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Deleting outgoing edge from '" <<
N << "' to '" << *TargetN << "'\n"; } } while
(false)
;
1031 RC->removeOutgoingEdge(N, *TargetN);
1032 return true;
1033 });
1034
1035 // Now do a batch removal of the internal ref edges left.
1036 auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
1037 if (!NewRefSCCs.empty()) {
1038 // The old RefSCC is dead, mark it as such.
1039 UR.InvalidatedRefSCCs.insert(RC);
1040
1041 // Note that we don't bother to invalidate analyses as ref-edge
1042 // connectivity is not really observable in any way and is intended
1043 // exclusively to be used for ordering of transforms rather than for
1044 // analysis conclusions.
1045
1046 // Update RC to the "bottom".
1047 assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!")(static_cast <bool> (G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!"
) ? void (0) : __assert_fail ("G.lookupSCC(N) == C && \"Changed the SCC when splitting RefSCCs!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1047, __extension__
__PRETTY_FUNCTION__))
;
1048 RC = &C->getOuterRefSCC();
1049 assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!")(static_cast <bool> (G.lookupRefSCC(N) == RC &&
"Failed to update current RefSCC!") ? void (0) : __assert_fail
("G.lookupRefSCC(N) == RC && \"Failed to update current RefSCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1049, __extension__
__PRETTY_FUNCTION__))
;
1050
1051 // The RC worklist is in reverse postorder, so we enqueue the new ones in
1052 // RPO except for the one which contains the source node as that is the
1053 // "bottom" we will continue processing in the bottom-up walk.
1054 assert(NewRefSCCs.front() == RC &&(static_cast <bool> (NewRefSCCs.front() == RC &&
"New current RefSCC not first in the returned list!") ? void
(0) : __assert_fail ("NewRefSCCs.front() == RC && \"New current RefSCC not first in the returned list!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1055, __extension__
__PRETTY_FUNCTION__))
1055 "New current RefSCC not first in the returned list!")(static_cast <bool> (NewRefSCCs.front() == RC &&
"New current RefSCC not first in the returned list!") ? void
(0) : __assert_fail ("NewRefSCCs.front() == RC && \"New current RefSCC not first in the returned list!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1055, __extension__
__PRETTY_FUNCTION__))
;
1056 for (RefSCC *NewRC : llvm::reverse(llvm::drop_begin(NewRefSCCs))) {
1057 assert(NewRC != RC && "Should not encounter the current RefSCC further "(static_cast <bool> (NewRC != RC && "Should not encounter the current RefSCC further "
"in the postorder list of new RefSCCs.") ? void (0) : __assert_fail
("NewRC != RC && \"Should not encounter the current RefSCC further \" \"in the postorder list of new RefSCCs.\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1058, __extension__
__PRETTY_FUNCTION__))
1058 "in the postorder list of new RefSCCs.")(static_cast <bool> (NewRC != RC && "Should not encounter the current RefSCC further "
"in the postorder list of new RefSCCs.") ? void (0) : __assert_fail
("NewRC != RC && \"Should not encounter the current RefSCC further \" \"in the postorder list of new RefSCCs.\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1058, __extension__
__PRETTY_FUNCTION__))
;
1059 UR.RCWorklist.insert(NewRC);
1060 LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing a new RefSCC in the update worklist: "
<< *NewRC << "\n"; } } while (false)
1061 << *NewRC << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing a new RefSCC in the update worklist: "
<< *NewRC << "\n"; } } while (false)
;
1062 }
1063 }
1064
1065 // Next demote all the call edges that are now ref edges. This helps make
1066 // the SCCs small which should minimize the work below as we don't want to
1067 // form cycles that this would break.
1068 for (Node *RefTarget : DemotedCallTargets) {
1069 SCC &TargetC = *G.lookupSCC(*RefTarget);
1070 RefSCC &TargetRC = TargetC.getOuterRefSCC();
1071
1072 // The easy case is when the target RefSCC is not this RefSCC. This is
1073 // only supported when the target RefSCC is a child of this RefSCC.
1074 if (&TargetRC != RC) {
1075#ifdef EXPENSIVE_CHECKS
1076 assert(RC->isAncestorOf(TargetRC) &&(static_cast <bool> (RC->isAncestorOf(TargetRC) &&
"Cannot potentially form RefSCC cycles here!") ? void (0) : __assert_fail
("RC->isAncestorOf(TargetRC) && \"Cannot potentially form RefSCC cycles here!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1077, __extension__
__PRETTY_FUNCTION__))
1077 "Cannot potentially form RefSCC cycles here!")(static_cast <bool> (RC->isAncestorOf(TargetRC) &&
"Cannot potentially form RefSCC cycles here!") ? void (0) : __assert_fail
("RC->isAncestorOf(TargetRC) && \"Cannot potentially form RefSCC cycles here!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1077, __extension__
__PRETTY_FUNCTION__))
;
1078#endif
1079 RC->switchOutgoingEdgeToRef(N, *RefTarget);
1080 LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << Ndo { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Switch outgoing call edge to a ref edge from '"
<< N << "' to '" << *RefTarget << "'\n"
; } } while (false)
1081 << "' to '" << *RefTarget << "'\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Switch outgoing call edge to a ref edge from '"
<< N << "' to '" << *RefTarget << "'\n"
; } } while (false)
;
1082 continue;
1083 }
1084
1085 // We are switching an internal call edge to a ref edge. This may split up
1086 // some SCCs.
1087 if (C != &TargetC) {
1088 // For separate SCCs this is trivial.
1089 RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
1090 continue;
1091 }
1092
1093 // Now update the call graph.
1094 C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
1095 C, AM, UR);
1096 }
1097
1098 // We added a ref edge earlier for new call edges, promote those to call edges
1099 // alongside PromotedRefTargets.
1100 for (Node *E : NewCallEdges)
1101 PromotedRefTargets.insert(E);
1102
1103 // Now promote ref edges into call edges.
1104 for (Node *CallTarget : PromotedRefTargets) {
1105 SCC &TargetC = *G.lookupSCC(*CallTarget);
1106 RefSCC &TargetRC = TargetC.getOuterRefSCC();
1107
1108 // The easy case is when the target RefSCC is not this RefSCC. This is
1109 // only supported when the target RefSCC is a child of this RefSCC.
1110 if (&TargetRC != RC) {
1111#ifdef EXPENSIVE_CHECKS
1112 assert(RC->isAncestorOf(TargetRC) &&(static_cast <bool> (RC->isAncestorOf(TargetRC) &&
"Cannot potentially form RefSCC cycles here!") ? void (0) : __assert_fail
("RC->isAncestorOf(TargetRC) && \"Cannot potentially form RefSCC cycles here!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1113, __extension__
__PRETTY_FUNCTION__))
1113 "Cannot potentially form RefSCC cycles here!")(static_cast <bool> (RC->isAncestorOf(TargetRC) &&
"Cannot potentially form RefSCC cycles here!") ? void (0) : __assert_fail
("RC->isAncestorOf(TargetRC) && \"Cannot potentially form RefSCC cycles here!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1113, __extension__
__PRETTY_FUNCTION__))
;
1114#endif
1115 RC->switchOutgoingEdgeToCall(N, *CallTarget);
1116 LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << Ndo { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Switch outgoing ref edge to a call edge from '"
<< N << "' to '" << *CallTarget << "'\n"
; } } while (false)
1117 << "' to '" << *CallTarget << "'\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Switch outgoing ref edge to a call edge from '"
<< N << "' to '" << *CallTarget << "'\n"
; } } while (false)
;
1118 continue;
1119 }
1120 LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Switch an internal ref edge to a call edge from '"
<< N << "' to '" << *CallTarget << "'\n"
; } } while (false)
1121 << N << "' to '" << *CallTarget << "'\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Switch an internal ref edge to a call edge from '"
<< N << "' to '" << *CallTarget << "'\n"
; } } while (false)
;
1122
1123 // Otherwise we are switching an internal ref edge to a call edge. This
1124 // may merge away some SCCs, and we add those to the UpdateResult. We also
1125 // need to make sure to update the worklist in the event SCCs have moved
1126 // before the current one in the post-order sequence
1127 bool HasFunctionAnalysisProxy = false;
1128 auto InitialSCCIndex = RC->find(*C) - RC->begin();
1129 bool FormedCycle = RC->switchInternalEdgeToCall(
1130 N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
1131 for (SCC *MergedC : MergedSCCs) {
1132 assert(MergedC != &TargetC && "Cannot merge away the target SCC!")(static_cast <bool> (MergedC != &TargetC &&
"Cannot merge away the target SCC!") ? void (0) : __assert_fail
("MergedC != &TargetC && \"Cannot merge away the target SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1132, __extension__
__PRETTY_FUNCTION__))
;
1133
1134 HasFunctionAnalysisProxy |=
1135 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
1136 *MergedC) != nullptr;
1137
1138 // Mark that this SCC will no longer be valid.
1139 UR.InvalidatedSCCs.insert(MergedC);
1140
1141 // FIXME: We should really do a 'clear' here to forcibly release
1142 // memory, but we don't have a good way of doing that and
1143 // preserving the function analyses.
1144 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
1145 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
1146 AM.invalidate(*MergedC, PA);
1147 }
1148 });
1149
1150 // If we formed a cycle by creating this call, we need to update more data
1151 // structures.
1152 if (FormedCycle) {
1153 C = &TargetC;
1154 assert(G.lookupSCC(N) == C && "Failed to update current SCC!")(static_cast <bool> (G.lookupSCC(N) == C && "Failed to update current SCC!"
) ? void (0) : __assert_fail ("G.lookupSCC(N) == C && \"Failed to update current SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1154, __extension__
__PRETTY_FUNCTION__))
;
1155
1156 // If one of the invalidated SCCs had a cached proxy to a function
1157 // analysis manager, we need to create a proxy in the new current SCC as
1158 // the invalidated SCCs had their functions moved.
1159 if (HasFunctionAnalysisProxy)
1160 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G).updateFAM(FAM);
1161
1162 // Any analyses cached for this SCC are no longer precise as the shape
1163 // has changed by introducing this cycle. However, we have taken care to
1164 // update the proxies so it remains valide.
1165 auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
1166 PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
1167 AM.invalidate(*C, PA);
1168 }
1169 auto NewSCCIndex = RC->find(*C) - RC->begin();
1170 // If we have actually moved an SCC to be topologically "below" the current
1171 // one due to merging, we will need to revisit the current SCC after
1172 // visiting those moved SCCs.
1173 //
1174 // It is critical that we *do not* revisit the current SCC unless we
1175 // actually move SCCs in the process of merging because otherwise we may
1176 // form a cycle where an SCC is split apart, merged, split, merged and so
1177 // on infinitely.
1178 if (InitialSCCIndex < NewSCCIndex) {
1179 // Put our current SCC back onto the worklist as we'll visit other SCCs
1180 // that are now definitively ordered prior to the current one in the
1181 // post-order sequence, and may end up observing more precise context to
1182 // optimize the current SCC.
1183 UR.CWorklist.insert(C);
1184 LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *Cdo { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing the existing SCC in the worklist: "
<< *C << "\n"; } } while (false)
1185 << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing the existing SCC in the worklist: "
<< *C << "\n"; } } while (false)
;
1186 // Enqueue in reverse order as we pop off the back of the worklist.
1187 for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
1188 RC->begin() + NewSCCIndex))) {
1189 UR.CWorklist.insert(&MovedC);
1190 LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing a newly earlier in post-order SCC: "
<< MovedC << "\n"; } } while (false)
1191 << MovedC << "\n")do { if (::llvm::DebugFlag && ::llvm::isCurrentDebugType
("cgscc")) { dbgs() << "Enqueuing a newly earlier in post-order SCC: "
<< MovedC << "\n"; } } while (false)
;
1192 }
1193 }
1194 }
1195
1196 assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!")(static_cast <bool> (!UR.InvalidatedSCCs.count(C) &&
"Invalidated the current SCC!") ? void (0) : __assert_fail (
"!UR.InvalidatedSCCs.count(C) && \"Invalidated the current SCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1196, __extension__
__PRETTY_FUNCTION__))
;
1197 assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!")(static_cast <bool> (!UR.InvalidatedRefSCCs.count(RC) &&
"Invalidated the current RefSCC!") ? void (0) : __assert_fail
("!UR.InvalidatedRefSCCs.count(RC) && \"Invalidated the current RefSCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1197, __extension__
__PRETTY_FUNCTION__))
;
1198 assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!")(static_cast <bool> (&C->getOuterRefSCC() == RC &&
"Current SCC not in current RefSCC!") ? void (0) : __assert_fail
("&C->getOuterRefSCC() == RC && \"Current SCC not in current RefSCC!\""
, "llvm/lib/Analysis/CGSCCPassManager.cpp", 1198, __extension__
__PRETTY_FUNCTION__))
;
1199
1200 // Record the current SCC for higher layers of the CGSCC pass manager now that
1201 // all the updates have been applied.
1202 if (C != &InitialC)
1203 UR.UpdatedC = C;
1204
1205 return *C;
1206}
1207
1208LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
1209 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
1210 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
1211 FunctionAnalysisManager &FAM) {
1212 return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
1213 /* FunctionPass */ true);
1214}
1215LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass(
1216 LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
1217 CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
1218 FunctionAnalysisManager &FAM) {
1219 return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
1220 /* FunctionPass */ false);
1221}

/build/llvm-toolchain-snapshot-16~++20221003111214+1fa2019828ca/llvm/include/llvm/ADT/PriorityWorklist.h

1//===- PriorityWorklist.h - Worklist with insertion priority ----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8///
9/// \file
10///
11/// This file provides a priority worklist. See the class comments for details.
12///
13//===----------------------------------------------------------------------===//
14
15#ifndef LLVM_ADT_PRIORITYWORKLIST_H
16#define LLVM_ADT_PRIORITYWORKLIST_H
17
18#include "llvm/ADT/DenseMap.h"
19#include "llvm/ADT/STLExtras.h"
20#include "llvm/ADT/SmallVector.h"
21#include "llvm/Support/Compiler.h"
22#include <cassert>
23#include <cstddef>
24#include <iterator>
25#include <type_traits>
26#include <vector>
27
28namespace llvm {
29
30/// A FILO worklist that prioritizes on re-insertion without duplication.
31///
32/// This is very similar to a \c SetVector with the primary difference that
33/// while re-insertion does not create a duplicate, it does adjust the
34/// visitation order to respect the last insertion point. This can be useful
35/// when the visit order needs to be prioritized based on insertion point
36/// without actually having duplicate visits.
37///
38/// Note that this doesn't prevent re-insertion of elements which have been
39/// visited -- if you need to break cycles, a set will still be necessary.
40///
41/// The type \c T must be default constructable to a null value that will be
42/// ignored. It is an error to insert such a value, and popping elements will
43/// never produce such a value. It is expected to be used with common nullable
44/// types like pointers or optionals.
45///
46/// Internally this uses a vector to store the worklist and a map to identify
47/// existing elements in the worklist. Both of these may be customized, but the
48/// map must support the basic DenseMap API for mapping from a T to an integer
49/// index into the vector.
50///
51/// A partial specialization is provided to automatically select a SmallVector
52/// and a SmallDenseMap if custom data structures are not provided.
53template <typename T, typename VectorT = std::vector<T>,
54 typename MapT = DenseMap<T, ptrdiff_t>>
55class PriorityWorklist {
56public:
57 using value_type = T;
58 using key_type = T;
59 using reference = T&;
60 using const_reference = const T&;
61 using size_type = typename MapT::size_type;
62
63 /// Construct an empty PriorityWorklist
64 PriorityWorklist() = default;
65
66 /// Determine if the PriorityWorklist is empty or not.
67 bool empty() const {
68 return V.empty();
69 }
70
71 /// Returns the number of elements in the worklist.
72 size_type size() const {
73 return M.size();
74 }
75
76 /// Count the number of elements of a given key in the PriorityWorklist.
77 /// \returns 0 if the element is not in the PriorityWorklist, 1 if it is.
78 size_type count(const key_type &key) const {
79 return M.count(key);
80 }
81
82 /// Return the last element of the PriorityWorklist.
83 const T &back() const {
84 assert(!empty() && "Cannot call back() on empty PriorityWorklist!")(static_cast <bool> (!empty() && "Cannot call back() on empty PriorityWorklist!"
) ? void (0) : __assert_fail ("!empty() && \"Cannot call back() on empty PriorityWorklist!\""
, "llvm/include/llvm/ADT/PriorityWorklist.h", 84, __extension__
__PRETTY_FUNCTION__))
;
9
'?' condition is true
85 return V.back();
10
Returning pointer
86 }
87
88 /// Insert a new element into the PriorityWorklist.
89 /// \returns true if the element was inserted into the PriorityWorklist.
90 bool insert(const T &X) {
91 assert(X != T() && "Cannot insert a null (default constructed) value!")(static_cast <bool> (X != T() && "Cannot insert a null (default constructed) value!"
) ? void (0) : __assert_fail ("X != T() && \"Cannot insert a null (default constructed) value!\""
, "llvm/include/llvm/ADT/PriorityWorklist.h", 91, __extension__
__PRETTY_FUNCTION__))
;
92 auto InsertResult = M.insert({X, V.size()});
93 if (InsertResult.second) {
94 // Fresh value, just append it to the vector.
95 V.push_back(X);
96 return true;
97 }
98
99 auto &Index = InsertResult.first->second;
100 assert(V[Index] == X && "Value not actually at index in map!")(static_cast <bool> (V[Index] == X && "Value not actually at index in map!"
) ? void (0) : __assert_fail ("V[Index] == X && \"Value not actually at index in map!\""
, "llvm/include/llvm/ADT/PriorityWorklist.h", 100, __extension__
__PRETTY_FUNCTION__))
;
101 if (Index != (ptrdiff_t)(V.size() - 1)) {
102 // If the element isn't at the back, null it out and append a fresh one.
103 V[Index] = T();
104 Index = (ptrdiff_t)V.size();
105 V.push_back(X);
106 }
107 return false;
108 }
109
110 /// Insert a sequence of new elements into the PriorityWorklist.
111 template <typename SequenceT>
112 std::enable_if_t<!std::is_convertible<SequenceT, T>::value>
113 insert(SequenceT &&Input) {
114 if (std::begin(Input) == std::end(Input))
115 // Nothing to do for an empty input sequence.
116 return;
117
118 // First pull the input sequence into the vector as a bulk append
119 // operation.
120 ptrdiff_t StartIndex = V.size();
121 V.insert(V.end(), std::begin(Input), std::end(Input));
122 // Now walk backwards fixing up the index map and deleting any duplicates.
123 for (ptrdiff_t i = V.size() - 1; i >= StartIndex; --i) {
124 auto InsertResult = M.insert({V[i], i});
125 if (InsertResult.second)
126 continue;
127
128 // If the existing index is before this insert's start, nuke that one and
129 // move it up.
130 ptrdiff_t &Index = InsertResult.first->second;
131 if (Index < StartIndex) {
132 V[Index] = T();
133 Index = i;
134 continue;
135 }
136
137 // Otherwise the existing one comes first so just clear out the value in
138 // this slot.
139 V[i] = T();
140 }
141 }
142
143 /// Remove the last element of the PriorityWorklist.
144 void pop_back() {
145 assert(!empty() && "Cannot remove an element when empty!")(static_cast <bool> (!empty() && "Cannot remove an element when empty!"
) ? void (0) : __assert_fail ("!empty() && \"Cannot remove an element when empty!\""
, "llvm/include/llvm/ADT/PriorityWorklist.h", 145, __extension__
__PRETTY_FUNCTION__))
;
146 assert(back() != T() && "Cannot have a null element at the back!")(static_cast <bool> (back() != T() && "Cannot have a null element at the back!"
) ? void (0) : __assert_fail ("back() != T() && \"Cannot have a null element at the back!\""
, "llvm/include/llvm/ADT/PriorityWorklist.h", 146, __extension__
__PRETTY_FUNCTION__))
;
147 M.erase(back());
148 do {
149 V.pop_back();
150 } while (!V.empty() && V.back() == T());
151 }
152
153 [[nodiscard]] T pop_back_val() {
154 T Ret = back();
8
Calling 'PriorityWorklist::back'
11
Returning from 'PriorityWorklist::back'
12
'Ret' initialized here
155 pop_back();
156 return Ret;
13
Returning pointer (loaded from 'Ret')
157 }
158
159 /// Erase an item from the worklist.
160 ///
161 /// Note that this is constant time due to the nature of the worklist implementation.
162 bool erase(const T& X) {
163 auto I = M.find(X);
164 if (I == M.end())
165 return false;
166
167 assert(V[I->second] == X && "Value not actually at index in map!")(static_cast <bool> (V[I->second] == X && "Value not actually at index in map!"
) ? void (0) : __assert_fail ("V[I->second] == X && \"Value not actually at index in map!\""
, "llvm/include/llvm/ADT/PriorityWorklist.h", 167, __extension__
__PRETTY_FUNCTION__))
;
168 if (I->second == (ptrdiff_t)(V.size() - 1)) {
169 do {
170 V.pop_back();
171 } while (!V.empty() && V.back() == T());
172 } else {
173 V[I->second] = T();
174 }
175 M.erase(I);
176 return true;
177 }
178
179 /// Erase items from the set vector based on a predicate function.
180 ///
181 /// This is intended to be equivalent to the following code, if we could
182 /// write it:
183 ///
184 /// \code
185 /// V.erase(remove_if(V, P), V.end());
186 /// \endcode
187 ///
188 /// However, PriorityWorklist doesn't expose non-const iterators, making any
189 /// algorithm like remove_if impossible to use.
190 ///
191 /// \returns true if any element is removed.
192 template <typename UnaryPredicate>
193 bool erase_if(UnaryPredicate P) {
194 typename VectorT::iterator E =
195 remove_if(V, TestAndEraseFromMap<UnaryPredicate>(P, M));
196 if (E == V.end())
197 return false;
198 for (auto I = V.begin(); I != E; ++I)
199 if (*I != T())
200 M[*I] = I - V.begin();
201 V.erase(E, V.end());
202 return true;
203 }
204
205 /// Reverse the items in the PriorityWorklist.
206 ///
207 /// This does an in-place reversal. Other kinds of reverse aren't easy to
208 /// support in the face of the worklist semantics.
209
210 /// Completely clear the PriorityWorklist
211 void clear() {
212 M.clear();
213 V.clear();
214 }
215
216private:
217 /// A wrapper predicate designed for use with std::remove_if.
218 ///
219 /// This predicate wraps a predicate suitable for use with std::remove_if to
220 /// call M.erase(x) on each element which is slated for removal. This just
221 /// allows the predicate to be move only which we can't do with lambdas
222 /// today.
223 template <typename UnaryPredicateT>
224 class TestAndEraseFromMap {
225 UnaryPredicateT P;
226 MapT &M;
227
228 public:
229 TestAndEraseFromMap(UnaryPredicateT P, MapT &M)
230 : P(std::move(P)), M(M) {}
231
232 bool operator()(const T &Arg) {
233 if (Arg == T())
234 // Skip null values in the PriorityWorklist.
235 return false;
236
237 if (P(Arg)) {
238 M.erase(Arg);
239 return true;
240 }
241 return false;
242 }
243 };
244
245 /// The map from value to index in the vector.
246 MapT M;
247
248 /// The vector of elements in insertion order.
249 VectorT V;
250};
251
252/// A version of \c PriorityWorklist that selects small size optimized data
253/// structures for the vector and map.
254template <typename T, unsigned N>
255class SmallPriorityWorklist
256 : public PriorityWorklist<T, SmallVector<T, N>,
257 SmallDenseMap<T, ptrdiff_t>> {
258public:
259 SmallPriorityWorklist() = default;
260};
261
262} // end namespace llvm
263
264#endif // LLVM_ADT_PRIORITYWORKLIST_H