Bug Summary

File:tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp
Warning:line 37, column 65
Potential memory leak

Annotated Source Code

/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp

1//=-- ExplodedGraph.cpp - Local, Path-Sens. "Exploded Graph" -*- C++ -*------=//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the template classes ExplodedNode and ExplodedGraph,
11// which represent a path-sensitive, intra-procedural "exploded graph."
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
16#include "clang/AST/ParentMap.h"
17#include "clang/AST/Stmt.h"
18#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
19#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
20#include "llvm/ADT/DenseSet.h"
21#include "llvm/ADT/SmallVector.h"
22#include "llvm/ADT/Statistic.h"
23
24using namespace clang;
25using namespace ento;
26
27//===----------------------------------------------------------------------===//
28// Node auditing.
29//===----------------------------------------------------------------------===//
30
31// An out of line virtual method to provide a home for the class vtable.
32ExplodedNode::Auditor::~Auditor() {}
33
34#ifndef NDEBUG
35static ExplodedNode::Auditor* NodeAuditor = nullptr;
36#endif
37
38void ExplodedNode::SetAuditor(ExplodedNode::Auditor* A) {
39#ifndef NDEBUG
40 NodeAuditor = A;
41#endif
42}
43
44//===----------------------------------------------------------------------===//
45// Cleanup.
46//===----------------------------------------------------------------------===//
47
48ExplodedGraph::ExplodedGraph()
1
Calling default constructor for 'BumpVectorContext'
49 : NumNodes(0), ReclaimNodeInterval(0) {}
50
51ExplodedGraph::~ExplodedGraph() {}
52
53//===----------------------------------------------------------------------===//
54// Node reclamation.
55//===----------------------------------------------------------------------===//
56
57bool ExplodedGraph::isInterestingLValueExpr(const Expr *Ex) {
58 if (!Ex->isLValue())
59 return false;
60 return isa<DeclRefExpr>(Ex) ||
61 isa<MemberExpr>(Ex) ||
62 isa<ObjCIvarRefExpr>(Ex);
63}
64
65bool ExplodedGraph::shouldCollect(const ExplodedNode *node) {
66 // First, we only consider nodes for reclamation of the following
67 // conditions apply:
68 //
69 // (1) 1 predecessor (that has one successor)
70 // (2) 1 successor (that has one predecessor)
71 //
72 // If a node has no successor it is on the "frontier", while a node
73 // with no predecessor is a root.
74 //
75 // After these prerequisites, we discard all "filler" nodes that
76 // are used only for intermediate processing, and are not essential
77 // for analyzer history:
78 //
79 // (a) PreStmtPurgeDeadSymbols
80 //
81 // We then discard all other nodes where *all* of the following conditions
82 // apply:
83 //
84 // (3) The ProgramPoint is for a PostStmt, but not a PostStore.
85 // (4) There is no 'tag' for the ProgramPoint.
86 // (5) The 'store' is the same as the predecessor.
87 // (6) The 'GDM' is the same as the predecessor.
88 // (7) The LocationContext is the same as the predecessor.
89 // (8) Expressions that are *not* lvalue expressions.
90 // (9) The PostStmt isn't for a non-consumed Stmt or Expr.
91 // (10) The successor is neither a CallExpr StmtPoint nor a CallEnter or
92 // PreImplicitCall (so that we would be able to find it when retrying a
93 // call with no inlining).
94 // FIXME: It may be safe to reclaim PreCall and PostCall nodes as well.
95
96 // Conditions 1 and 2.
97 if (node->pred_size() != 1 || node->succ_size() != 1)
98 return false;
99
100 const ExplodedNode *pred = *(node->pred_begin());
101 if (pred->succ_size() != 1)
102 return false;
103
104 const ExplodedNode *succ = *(node->succ_begin());
105 if (succ->pred_size() != 1)
106 return false;
107
108 // Now reclaim any nodes that are (by definition) not essential to
109 // analysis history and are not consulted by any client code.
110 ProgramPoint progPoint = node->getLocation();
111 if (progPoint.getAs<PreStmtPurgeDeadSymbols>())
112 return !progPoint.getTag();
113
114 // Condition 3.
115 if (!progPoint.getAs<PostStmt>() || progPoint.getAs<PostStore>())
116 return false;
117
118 // Condition 4.
119 if (progPoint.getTag())
120 return false;
121
122 // Conditions 5, 6, and 7.
123 ProgramStateRef state = node->getState();
124 ProgramStateRef pred_state = pred->getState();
125 if (state->store != pred_state->store || state->GDM != pred_state->GDM ||
126 progPoint.getLocationContext() != pred->getLocationContext())
127 return false;
128
129 // All further checks require expressions. As per #3, we know that we have
130 // a PostStmt.
131 const Expr *Ex = dyn_cast<Expr>(progPoint.castAs<PostStmt>().getStmt());
132 if (!Ex)
133 return false;
134
135 // Condition 8.
136 // Do not collect nodes for "interesting" lvalue expressions since they are
137 // used extensively for generating path diagnostics.
138 if (isInterestingLValueExpr(Ex))
139 return false;
140
141 // Condition 9.
142 // Do not collect nodes for non-consumed Stmt or Expr to ensure precise
143 // diagnostic generation; specifically, so that we could anchor arrows
144 // pointing to the beginning of statements (as written in code).
145 ParentMap &PM = progPoint.getLocationContext()->getParentMap();
146 if (!PM.isConsumedExpr(Ex))
147 return false;
148
149 // Condition 10.
150 const ProgramPoint SuccLoc = succ->getLocation();
151 if (Optional<StmtPoint> SP = SuccLoc.getAs<StmtPoint>())
152 if (CallEvent::isCallStmt(SP->getStmt()))
153 return false;
154
155 // Condition 10, continuation.
156 if (SuccLoc.getAs<CallEnter>() || SuccLoc.getAs<PreImplicitCall>())
157 return false;
158
159 return true;
160}
161
162void ExplodedGraph::collectNode(ExplodedNode *node) {
163 // Removing a node means:
164 // (a) changing the predecessors successor to the successor of this node
165 // (b) changing the successors predecessor to the predecessor of this node
166 // (c) Putting 'node' onto freeNodes.
167 assert(node->pred_size() == 1 || node->succ_size() == 1)(static_cast <bool> (node->pred_size() == 1 || node->
succ_size() == 1) ? void (0) : __assert_fail ("node->pred_size() == 1 || node->succ_size() == 1"
, "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 167, __extension__ __PRETTY_FUNCTION__))
;
168 ExplodedNode *pred = *(node->pred_begin());
169 ExplodedNode *succ = *(node->succ_begin());
170 pred->replaceSuccessor(succ);
171 succ->replacePredecessor(pred);
172 FreeNodes.push_back(node);
173 Nodes.RemoveNode(node);
174 --NumNodes;
175 node->~ExplodedNode();
176}
177
178void ExplodedGraph::reclaimRecentlyAllocatedNodes() {
179 if (ChangedNodes.empty())
180 return;
181
182 // Only periodically reclaim nodes so that we can build up a set of
183 // nodes that meet the reclamation criteria. Freshly created nodes
184 // by definition have no successor, and thus cannot be reclaimed (see below).
185 assert(ReclaimCounter > 0)(static_cast <bool> (ReclaimCounter > 0) ? void (0) :
__assert_fail ("ReclaimCounter > 0", "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 185, __extension__ __PRETTY_FUNCTION__))
;
186 if (--ReclaimCounter != 0)
187 return;
188 ReclaimCounter = ReclaimNodeInterval;
189
190 for (NodeVector::iterator it = ChangedNodes.begin(), et = ChangedNodes.end();
191 it != et; ++it) {
192 ExplodedNode *node = *it;
193 if (shouldCollect(node))
194 collectNode(node);
195 }
196 ChangedNodes.clear();
197}
198
199//===----------------------------------------------------------------------===//
200// ExplodedNode.
201//===----------------------------------------------------------------------===//
202
203// An NodeGroup's storage type is actually very much like a TinyPtrVector:
204// it can be either a pointer to a single ExplodedNode, or a pointer to a
205// BumpVector allocated with the ExplodedGraph's allocator. This allows the
206// common case of single-node NodeGroups to be implemented with no extra memory.
207//
208// Consequently, each of the NodeGroup methods have up to four cases to handle:
209// 1. The flag is set and this group does not actually contain any nodes.
210// 2. The group is empty, in which case the storage value is null.
211// 3. The group contains a single node.
212// 4. The group contains more than one node.
213typedef BumpVector<ExplodedNode *> ExplodedNodeVector;
214typedef llvm::PointerUnion<ExplodedNode *, ExplodedNodeVector *> GroupStorage;
215
216void ExplodedNode::addPredecessor(ExplodedNode *V, ExplodedGraph &G) {
217 assert (!V->isSink())(static_cast <bool> (!V->isSink()) ? void (0) : __assert_fail
("!V->isSink()", "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 217, __extension__ __PRETTY_FUNCTION__))
;
218 Preds.addNode(V, G);
219 V->Succs.addNode(this, G);
220#ifndef NDEBUG
221 if (NodeAuditor) NodeAuditor->AddEdge(V, this);
222#endif
223}
224
225void ExplodedNode::NodeGroup::replaceNode(ExplodedNode *node) {
226 assert(!getFlag())(static_cast <bool> (!getFlag()) ? void (0) : __assert_fail
("!getFlag()", "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 226, __extension__ __PRETTY_FUNCTION__))
;
227
228 GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
229 assert(Storage.is<ExplodedNode *>())(static_cast <bool> (Storage.is<ExplodedNode *>()
) ? void (0) : __assert_fail ("Storage.is<ExplodedNode *>()"
, "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 229, __extension__ __PRETTY_FUNCTION__))
;
230 Storage = node;
231 assert(Storage.is<ExplodedNode *>())(static_cast <bool> (Storage.is<ExplodedNode *>()
) ? void (0) : __assert_fail ("Storage.is<ExplodedNode *>()"
, "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 231, __extension__ __PRETTY_FUNCTION__))
;
232}
233
234void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) {
235 assert(!getFlag())(static_cast <bool> (!getFlag()) ? void (0) : __assert_fail
("!getFlag()", "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 235, __extension__ __PRETTY_FUNCTION__))
;
236
237 GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
238 if (Storage.isNull()) {
239 Storage = N;
240 assert(Storage.is<ExplodedNode *>())(static_cast <bool> (Storage.is<ExplodedNode *>()
) ? void (0) : __assert_fail ("Storage.is<ExplodedNode *>()"
, "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 240, __extension__ __PRETTY_FUNCTION__))
;
241 return;
242 }
243
244 ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>();
245
246 if (!V) {
247 // Switch from single-node to multi-node representation.
248 ExplodedNode *Old = Storage.get<ExplodedNode *>();
249
250 BumpVectorContext &Ctx = G.getNodeAllocator();
251 V = G.getAllocator().Allocate<ExplodedNodeVector>();
252 new (V) ExplodedNodeVector(Ctx, 4);
253 V->push_back(Old, Ctx);
254
255 Storage = V;
256 assert(!getFlag())(static_cast <bool> (!getFlag()) ? void (0) : __assert_fail
("!getFlag()", "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 256, __extension__ __PRETTY_FUNCTION__))
;
257 assert(Storage.is<ExplodedNodeVector *>())(static_cast <bool> (Storage.is<ExplodedNodeVector *
>()) ? void (0) : __assert_fail ("Storage.is<ExplodedNodeVector *>()"
, "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp"
, 257, __extension__ __PRETTY_FUNCTION__))
;
258 }
259
260 V->push_back(N, G.getNodeAllocator());
261}
262
263unsigned ExplodedNode::NodeGroup::size() const {
264 if (getFlag())
265 return 0;
266
267 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
268 if (Storage.isNull())
269 return 0;
270 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
271 return V->size();
272 return 1;
273}
274
275ExplodedNode * const *ExplodedNode::NodeGroup::begin() const {
276 if (getFlag())
277 return nullptr;
278
279 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
280 if (Storage.isNull())
281 return nullptr;
282 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
283 return V->begin();
284 return Storage.getAddrOfPtr1();
285}
286
287ExplodedNode * const *ExplodedNode::NodeGroup::end() const {
288 if (getFlag())
289 return nullptr;
290
291 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
292 if (Storage.isNull())
293 return nullptr;
294 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
295 return V->end();
296 return Storage.getAddrOfPtr1() + 1;
297}
298
299ExplodedNode *ExplodedGraph::getNode(const ProgramPoint &L,
300 ProgramStateRef State,
301 bool IsSink,
302 bool* IsNew) {
303 // Profile 'State' to determine if we already have an existing node.
304 llvm::FoldingSetNodeID profile;
305 void *InsertPos = nullptr;
306
307 NodeTy::Profile(profile, L, State, IsSink);
308 NodeTy* V = Nodes.FindNodeOrInsertPos(profile, InsertPos);
309
310 if (!V) {
311 if (!FreeNodes.empty()) {
312 V = FreeNodes.back();
313 FreeNodes.pop_back();
314 }
315 else {
316 // Allocate a new node.
317 V = (NodeTy*) getAllocator().Allocate<NodeTy>();
318 }
319
320 new (V) NodeTy(L, State, IsSink);
321
322 if (ReclaimNodeInterval)
323 ChangedNodes.push_back(V);
324
325 // Insert the node into the node set and return it.
326 Nodes.InsertNode(V, InsertPos);
327 ++NumNodes;
328
329 if (IsNew) *IsNew = true;
330 }
331 else
332 if (IsNew) *IsNew = false;
333
334 return V;
335}
336
337ExplodedNode *ExplodedGraph::createUncachedNode(const ProgramPoint &L,
338 ProgramStateRef State,
339 bool IsSink) {
340 NodeTy *V = (NodeTy *) getAllocator().Allocate<NodeTy>();
341 new (V) NodeTy(L, State, IsSink);
342 return V;
343}
344
345std::unique_ptr<ExplodedGraph>
346ExplodedGraph::trim(ArrayRef<const NodeTy *> Sinks,
347 InterExplodedGraphMap *ForwardMap,
348 InterExplodedGraphMap *InverseMap) const {
349
350 if (Nodes.empty())
351 return nullptr;
352
353 typedef llvm::DenseSet<const ExplodedNode*> Pass1Ty;
354 Pass1Ty Pass1;
355
356 typedef InterExplodedGraphMap Pass2Ty;
357 InterExplodedGraphMap Pass2Scratch;
358 Pass2Ty &Pass2 = ForwardMap ? *ForwardMap : Pass2Scratch;
359
360 SmallVector<const ExplodedNode*, 10> WL1, WL2;
361
362 // ===- Pass 1 (reverse DFS) -===
363 for (ArrayRef<const NodeTy *>::iterator I = Sinks.begin(), E = Sinks.end();
364 I != E; ++I) {
365 if (*I)
366 WL1.push_back(*I);
367 }
368
369 // Process the first worklist until it is empty.
370 while (!WL1.empty()) {
371 const ExplodedNode *N = WL1.pop_back_val();
372
373 // Have we already visited this node? If so, continue to the next one.
374 if (!Pass1.insert(N).second)
375 continue;
376
377 // If this is a root enqueue it to the second worklist.
378 if (N->Preds.empty()) {
379 WL2.push_back(N);
380 continue;
381 }
382
383 // Visit our predecessors and enqueue them.
384 WL1.append(N->Preds.begin(), N->Preds.end());
385 }
386
387 // We didn't hit a root? Return with a null pointer for the new graph.
388 if (WL2.empty())
389 return nullptr;
390
391 // Create an empty graph.
392 std::unique_ptr<ExplodedGraph> G = MakeEmptyGraph();
393
394 // ===- Pass 2 (forward DFS to construct the new graph) -===
395 while (!WL2.empty()) {
396 const ExplodedNode *N = WL2.pop_back_val();
397
398 // Skip this node if we have already processed it.
399 if (Pass2.find(N) != Pass2.end())
400 continue;
401
402 // Create the corresponding node in the new graph and record the mapping
403 // from the old node to the new node.
404 ExplodedNode *NewN = G->createUncachedNode(N->getLocation(), N->State, N->isSink());
405 Pass2[N] = NewN;
406
407 // Also record the reverse mapping from the new node to the old node.
408 if (InverseMap) (*InverseMap)[NewN] = N;
409
410 // If this node is a root, designate it as such in the graph.
411 if (N->Preds.empty())
412 G->addRoot(NewN);
413
414 // In the case that some of the intended predecessors of NewN have already
415 // been created, we should hook them up as predecessors.
416
417 // Walk through the predecessors of 'N' and hook up their corresponding
418 // nodes in the new graph (if any) to the freshly created node.
419 for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end();
420 I != E; ++I) {
421 Pass2Ty::iterator PI = Pass2.find(*I);
422 if (PI == Pass2.end())
423 continue;
424
425 NewN->addPredecessor(const_cast<ExplodedNode *>(PI->second), *G);
426 }
427
428 // In the case that some of the intended successors of NewN have already
429 // been created, we should hook them up as successors. Otherwise, enqueue
430 // the new nodes from the original graph that should have nodes created
431 // in the new graph.
432 for (ExplodedNode::succ_iterator I = N->Succs.begin(), E = N->Succs.end();
433 I != E; ++I) {
434 Pass2Ty::iterator PI = Pass2.find(*I);
435 if (PI != Pass2.end()) {
436 const_cast<ExplodedNode *>(PI->second)->addPredecessor(NewN, *G);
437 continue;
438 }
439
440 // Enqueue nodes to the worklist that were marked during pass 1.
441 if (Pass1.count(*I))
442 WL2.push_back(*I);
443 }
444 }
445
446 return G;
447}
448

/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/include/clang/Analysis/Support/BumpVector.h

1//===-- BumpVector.h - Vector-like ADT that uses bump allocation --*- C++ -*-=//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file provides BumpVector, a vector-like ADT whose contents are
11// allocated from a BumpPtrAllocator.
12//
13//===----------------------------------------------------------------------===//
14
15// FIXME: Most of this is copy-and-paste from SmallVector.h. We can
16// refactor this core logic into something common that is shared between
17// the two. The main thing that is different is the allocation strategy.
18
19#ifndef LLVM_CLANG_ANALYSIS_SUPPORT_BUMPVECTOR_H
20#define LLVM_CLANG_ANALYSIS_SUPPORT_BUMPVECTOR_H
21
22#include "llvm/ADT/PointerIntPair.h"
23#include "llvm/Support/Allocator.h"
24#include "llvm/Support/type_traits.h"
25#include <algorithm>
26#include <cstring>
27#include <iterator>
28#include <memory>
29
30namespace clang {
31
32class BumpVectorContext {
33 llvm::PointerIntPair<llvm::BumpPtrAllocator*, 1> Alloc;
34public:
35 /// Construct a new BumpVectorContext that creates a new BumpPtrAllocator
36 /// and destroys it when the BumpVectorContext object is destroyed.
37 BumpVectorContext() : Alloc(new llvm::BumpPtrAllocator(), 1) {}
2
Memory is allocated
3
Potential memory leak
38
39 BumpVectorContext(BumpVectorContext &&Other) : Alloc(Other.Alloc) {
40 Other.Alloc.setInt(false);
41 Other.Alloc.setPointer(nullptr);
42 }
43
44 /// Construct a new BumpVectorContext that reuses an existing
45 /// BumpPtrAllocator. This BumpPtrAllocator is not destroyed when the
46 /// BumpVectorContext object is destroyed.
47 BumpVectorContext(llvm::BumpPtrAllocator &A) : Alloc(&A, 0) {}
48
49 ~BumpVectorContext() {
50 if (Alloc.getInt())
51 delete Alloc.getPointer();
52 }
53
54 llvm::BumpPtrAllocator &getAllocator() { return *Alloc.getPointer(); }
55};
56
57template<typename T>
58class BumpVector {
59 T *Begin, *End, *Capacity;
60public:
61 // Default ctor - Initialize to empty.
62 explicit BumpVector(BumpVectorContext &C, unsigned N)
63 : Begin(nullptr), End(nullptr), Capacity(nullptr) {
64 reserve(C, N);
65 }
66
67 ~BumpVector() {
68 if (std::is_class<T>::value) {
69 // Destroy the constructed elements in the vector.
70 destroy_range(Begin, End);
71 }
72 }
73
74 typedef size_t size_type;
75 typedef ptrdiff_t difference_type;
76 typedef T value_type;
77 typedef T* iterator;
78 typedef const T* const_iterator;
79
80 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
81 typedef std::reverse_iterator<iterator> reverse_iterator;
82
83 typedef T& reference;
84 typedef const T& const_reference;
85 typedef T* pointer;
86 typedef const T* const_pointer;
87
88 // forward iterator creation methods.
89 iterator begin() { return Begin; }
90 const_iterator begin() const { return Begin; }
91 iterator end() { return End; }
92 const_iterator end() const { return End; }
93
94 // reverse iterator creation methods.
95 reverse_iterator rbegin() { return reverse_iterator(end()); }
96 const_reverse_iterator rbegin() const{ return const_reverse_iterator(end()); }
97 reverse_iterator rend() { return reverse_iterator(begin()); }
98 const_reverse_iterator rend() const { return const_reverse_iterator(begin());}
99
100 bool empty() const { return Begin == End; }
101 size_type size() const { return End-Begin; }
102
103 reference operator[](unsigned idx) {
104 assert(Begin + idx < End)(static_cast <bool> (Begin + idx < End) ? void (0) :
__assert_fail ("Begin + idx < End", "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/include/clang/Analysis/Support/BumpVector.h"
, 104, __extension__ __PRETTY_FUNCTION__))
;
105 return Begin[idx];
106 }
107 const_reference operator[](unsigned idx) const {
108 assert(Begin + idx < End)(static_cast <bool> (Begin + idx < End) ? void (0) :
__assert_fail ("Begin + idx < End", "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/include/clang/Analysis/Support/BumpVector.h"
, 108, __extension__ __PRETTY_FUNCTION__))
;
109 return Begin[idx];
110 }
111
112 reference front() {
113 return begin()[0];
114 }
115 const_reference front() const {
116 return begin()[0];
117 }
118
119 reference back() {
120 return end()[-1];
121 }
122 const_reference back() const {
123 return end()[-1];
124 }
125
126 void pop_back() {
127 --End;
128 End->~T();
129 }
130
131 T pop_back_val() {
132 T Result = back();
133 pop_back();
134 return Result;
135 }
136
137 void clear() {
138 if (std::is_class<T>::value) {
139 destroy_range(Begin, End);
140 }
141 End = Begin;
142 }
143
144 /// data - Return a pointer to the vector's buffer, even if empty().
145 pointer data() {
146 return pointer(Begin);
147 }
148
149 /// data - Return a pointer to the vector's buffer, even if empty().
150 const_pointer data() const {
151 return const_pointer(Begin);
152 }
153
154 void push_back(const_reference Elt, BumpVectorContext &C) {
155 if (End < Capacity) {
156 Retry:
157 new (End) T(Elt);
158 ++End;
159 return;
160 }
161 grow(C);
162 goto Retry;
163 }
164
165 /// insert - Insert some number of copies of element into a position. Return
166 /// iterator to position after last inserted copy.
167 iterator insert(iterator I, size_t Cnt, const_reference E,
168 BumpVectorContext &C) {
169 assert (I >= Begin && I <= End && "Iterator out of bounds.")(static_cast <bool> (I >= Begin && I <= End
&& "Iterator out of bounds.") ? void (0) : __assert_fail
("I >= Begin && I <= End && \"Iterator out of bounds.\""
, "/build/llvm-toolchain-snapshot-6.0~svn318693/tools/clang/include/clang/Analysis/Support/BumpVector.h"
, 169, __extension__ __PRETTY_FUNCTION__))
;
170 if (End + Cnt <= Capacity) {
171 Retry:
172 move_range_right(I, End, Cnt);
173 construct_range(I, I + Cnt, E);
174 End += Cnt;
175 return I + Cnt;
176 }
177 ptrdiff_t D = I - Begin;
178 grow(C, size() + Cnt);
179 I = Begin + D;
180 goto Retry;
181 }
182
183 void reserve(BumpVectorContext &C, unsigned N) {
184 if (unsigned(Capacity-Begin) < N)
185 grow(C, N);
186 }
187
188 /// capacity - Return the total number of elements in the currently allocated
189 /// buffer.
190 size_t capacity() const { return Capacity - Begin; }
191
192private:
193 /// grow - double the size of the allocated memory, guaranteeing space for at
194 /// least one more element or MinSize if specified.
195 void grow(BumpVectorContext &C, size_type MinSize = 1);
196
197 void construct_range(T *S, T *E, const T &Elt) {
198 for (; S != E; ++S)
199 new (S) T(Elt);
200 }
201
202 void destroy_range(T *S, T *E) {
203 while (S != E) {
204 --E;
205 E->~T();
206 }
207 }
208
209 void move_range_right(T *S, T *E, size_t D) {
210 for (T *I = E + D - 1, *IL = S + D - 1; I != IL; --I) {
211 --E;
212 new (I) T(*E);
213 E->~T();
214 }
215 }
216};
217
218// Define this out-of-line to dissuade the C++ compiler from inlining it.
219template <typename T>
220void BumpVector<T>::grow(BumpVectorContext &C, size_t MinSize) {
221 size_t CurCapacity = Capacity-Begin;
222 size_t CurSize = size();
223 size_t NewCapacity = 2*CurCapacity;
224 if (NewCapacity < MinSize)
225 NewCapacity = MinSize;
226
227 // Allocate the memory from the BumpPtrAllocator.
228 T *NewElts = C.getAllocator().template Allocate<T>(NewCapacity);
229
230 // Copy the elements over.
231 if (Begin != End) {
232 if (std::is_class<T>::value) {
233 std::uninitialized_copy(Begin, End, NewElts);
234 // Destroy the original elements.
235 destroy_range(Begin, End);
236 } else {
237 // Use memcpy for PODs (std::uninitialized_copy optimizes to memmove).
238 memcpy(NewElts, Begin, CurSize * sizeof(T));
239 }
240 }
241
242 // For now, leak 'Begin'. We can add it back to a freelist in
243 // BumpVectorContext.
244 Begin = NewElts;
245 End = NewElts+CurSize;
246 Capacity = Begin+NewCapacity;
247}
248
249} // end: clang namespace
250#endif