Bug Summary

File:include/llvm/ADT/IntrusiveRefCntPtr.h
Warning:line 157, column 38
Potential leak of memory pointed to by field 'Obj'

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name RewriteRope.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-eagerly-assume -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -mrelocation-model pic -pic-level 2 -mthread-model posix -relaxed-aliasing -fmath-errno -masm-verbose -mconstructor-aliases -munwind-tables -fuse-init-array -target-cpu x86-64 -dwarf-column-info -debugger-tuning=gdb -momit-leaf-frame-pointer -ffunction-sections -fdata-sections -resource-dir /usr/lib/llvm-7/lib/clang/7.0.0 -D _DEBUG -D _GNU_SOURCE -D __STDC_CONSTANT_MACROS -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -I /build/llvm-toolchain-snapshot-7~svn338205/build-llvm/tools/clang/lib/Rewrite -I /build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite -I /build/llvm-toolchain-snapshot-7~svn338205/tools/clang/include -I /build/llvm-toolchain-snapshot-7~svn338205/build-llvm/tools/clang/include -I /build/llvm-toolchain-snapshot-7~svn338205/build-llvm/include -I /build/llvm-toolchain-snapshot-7~svn338205/include -U NDEBUG -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/8/../../../../include/c++/8 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/8/../../../../include/x86_64-linux-gnu/c++/8 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/8/../../../../include/x86_64-linux-gnu/c++/8 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/8/../../../../include/c++/8/backward -internal-isystem /usr/include/clang/7.0.0/include/ -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-7/lib/clang/7.0.0/include -internal-externc-isystem /usr/lib/gcc/x86_64-linux-gnu/8/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-unused-parameter -Wwrite-strings -Wno-missing-field-initializers -Wno-long-long -Wno-maybe-uninitialized -Wno-class-memaccess -Wno-comment -std=c++11 -fdeprecated-macro -fdebug-compilation-dir /build/llvm-toolchain-snapshot-7~svn338205/build-llvm/tools/clang/lib/Rewrite -ferror-limit 19 -fmessage-length 0 -fvisibility-inlines-hidden -fobjc-runtime=gcc -fno-common -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-output=html -analyzer-config stable-report-filename=true -o /tmp/scan-build-2018-07-29-043837-17923-1 -x c++ /build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp -faddrsig

/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp

1//===- RewriteRope.cpp - Rope specialized for rewriter --------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the RewriteRope class, which is a powerful string.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/Rewrite/Core/RewriteRope.h"
15#include "clang/Basic/LLVM.h"
16#include "llvm/Support/Casting.h"
17#include <algorithm>
18#include <cassert>
19#include <cstring>
20
21using namespace clang;
22
23/// RewriteRope is a "strong" string class, designed to make insertions and
24/// deletions in the middle of the string nearly constant time (really, they are
25/// O(log N), but with a very low constant factor).
26///
27/// The implementation of this datastructure is a conceptual linear sequence of
28/// RopePiece elements. Each RopePiece represents a view on a separately
29/// allocated and reference counted string. This means that splitting a very
30/// long string can be done in constant time by splitting a RopePiece that
31/// references the whole string into two rope pieces that reference each half.
32/// Once split, another string can be inserted in between the two halves by
33/// inserting a RopePiece in between the two others. All of this is very
34/// inexpensive: it takes time proportional to the number of RopePieces, not the
35/// length of the strings they represent.
36///
37/// While a linear sequences of RopePieces is the conceptual model, the actual
38/// implementation captures them in an adapted B+ Tree. Using a B+ tree (which
39/// is a tree that keeps the values in the leaves and has where each node
40/// contains a reasonable number of pointers to children/values) allows us to
41/// maintain efficient operation when the RewriteRope contains a *huge* number
42/// of RopePieces. The basic idea of the B+ Tree is that it allows us to find
43/// the RopePiece corresponding to some offset very efficiently, and it
44/// automatically balances itself on insertions of RopePieces (which can happen
45/// for both insertions and erases of string ranges).
46///
47/// The one wrinkle on the theory is that we don't attempt to keep the tree
48/// properly balanced when erases happen. Erases of string data can both insert
49/// new RopePieces (e.g. when the middle of some other rope piece is deleted,
50/// which results in two rope pieces, which is just like an insert) or it can
51/// reduce the number of RopePieces maintained by the B+Tree. In the case when
52/// the number of RopePieces is reduced, we don't attempt to maintain the
53/// standard 'invariant' that each node in the tree contains at least
54/// 'WidthFactor' children/values. For our use cases, this doesn't seem to
55/// matter.
56///
57/// The implementation below is primarily implemented in terms of three classes:
58/// RopePieceBTreeNode - Common base class for:
59///
60/// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
61/// nodes. This directly represents a chunk of the string with those
62/// RopePieces contatenated.
63/// RopePieceBTreeInterior - An interior node in the B+ Tree, which manages
64/// up to '2*WidthFactor' other nodes in the tree.
65
66namespace {
67
68//===----------------------------------------------------------------------===//
69// RopePieceBTreeNode Class
70//===----------------------------------------------------------------------===//
71
72 /// RopePieceBTreeNode - Common base class of RopePieceBTreeLeaf and
73 /// RopePieceBTreeInterior. This provides some 'virtual' dispatching methods
74 /// and a flag that determines which subclass the instance is. Also
75 /// important, this node knows the full extend of the node, including any
76 /// children that it has. This allows efficient skipping over entire subtrees
77 /// when looking for an offset in the BTree.
78 class RopePieceBTreeNode {
79 protected:
80 /// WidthFactor - This controls the number of K/V slots held in the BTree:
81 /// how wide it is. Each level of the BTree is guaranteed to have at least
82 /// 'WidthFactor' elements in it (either ropepieces or children), (except
83 /// the root, which may have less) and may have at most 2*WidthFactor
84 /// elements.
85 enum { WidthFactor = 8 };
86
87 /// Size - This is the number of bytes of file this node (including any
88 /// potential children) covers.
89 unsigned Size = 0;
90
91 /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it
92 /// is an instance of RopePieceBTreeInterior.
93 bool IsLeaf;
94
95 RopePieceBTreeNode(bool isLeaf) : IsLeaf(isLeaf) {}
96 ~RopePieceBTreeNode() = default;
97
98 public:
99 bool isLeaf() const { return IsLeaf; }
100 unsigned size() const { return Size; }
101
102 void Destroy();
103
104 /// split - Split the range containing the specified offset so that we are
105 /// guaranteed that there is a place to do an insertion at the specified
106 /// offset. The offset is relative, so "0" is the start of the node.
107 ///
108 /// If there is no space in this subtree for the extra piece, the extra tree
109 /// node is returned and must be inserted into a parent.
110 RopePieceBTreeNode *split(unsigned Offset);
111
112 /// insert - Insert the specified ropepiece into this tree node at the
113 /// specified offset. The offset is relative, so "0" is the start of the
114 /// node.
115 ///
116 /// If there is no space in this subtree for the extra piece, the extra tree
117 /// node is returned and must be inserted into a parent.
118 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
119
120 /// erase - Remove NumBytes from this node at the specified offset. We are
121 /// guaranteed that there is a split at Offset.
122 void erase(unsigned Offset, unsigned NumBytes);
123 };
124
125//===----------------------------------------------------------------------===//
126// RopePieceBTreeLeaf Class
127//===----------------------------------------------------------------------===//
128
129 /// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
130 /// nodes. This directly represents a chunk of the string with those
131 /// RopePieces contatenated. Since this is a B+Tree, all values (in this case
132 /// instances of RopePiece) are stored in leaves like this. To make iteration
133 /// over the leaves efficient, they maintain a singly linked list through the
134 /// NextLeaf field. This allows the B+Tree forward iterator to be constant
135 /// time for all increments.
136 class RopePieceBTreeLeaf : public RopePieceBTreeNode {
137 /// NumPieces - This holds the number of rope pieces currently active in the
138 /// Pieces array.
139 unsigned char NumPieces = 0;
140
141 /// Pieces - This tracks the file chunks currently in this leaf.
142 RopePiece Pieces[2*WidthFactor];
143
144 /// NextLeaf - This is a pointer to the next leaf in the tree, allowing
145 /// efficient in-order forward iteration of the tree without traversal.
146 RopePieceBTreeLeaf **PrevLeaf = nullptr;
147 RopePieceBTreeLeaf *NextLeaf = nullptr;
148
149 public:
150 RopePieceBTreeLeaf() : RopePieceBTreeNode(true) {}
151
152 ~RopePieceBTreeLeaf() {
153 if (PrevLeaf || NextLeaf)
154 removeFromLeafInOrder();
155 clear();
156 }
157
158 bool isFull() const { return NumPieces == 2*WidthFactor; }
159
160 /// clear - Remove all rope pieces from this leaf.
161 void clear() {
162 while (NumPieces)
163 Pieces[--NumPieces] = RopePiece();
164 Size = 0;
165 }
166
167 unsigned getNumPieces() const { return NumPieces; }
168
169 const RopePiece &getPiece(unsigned i) const {
170 assert(i < getNumPieces() && "Invalid piece ID")(static_cast <bool> (i < getNumPieces() && "Invalid piece ID"
) ? void (0) : __assert_fail ("i < getNumPieces() && \"Invalid piece ID\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 170, __extension__ __PRETTY_FUNCTION__))
;
171 return Pieces[i];
172 }
173
174 const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; }
175
176 void insertAfterLeafInOrder(RopePieceBTreeLeaf *Node) {
177 assert(!PrevLeaf && !NextLeaf && "Already in ordering")(static_cast <bool> (!PrevLeaf && !NextLeaf &&
"Already in ordering") ? void (0) : __assert_fail ("!PrevLeaf && !NextLeaf && \"Already in ordering\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 177, __extension__ __PRETTY_FUNCTION__))
;
178
179 NextLeaf = Node->NextLeaf;
180 if (NextLeaf)
181 NextLeaf->PrevLeaf = &NextLeaf;
182 PrevLeaf = &Node->NextLeaf;
183 Node->NextLeaf = this;
184 }
185
186 void removeFromLeafInOrder() {
187 if (PrevLeaf) {
188 *PrevLeaf = NextLeaf;
189 if (NextLeaf)
190 NextLeaf->PrevLeaf = PrevLeaf;
191 } else if (NextLeaf) {
192 NextLeaf->PrevLeaf = nullptr;
193 }
194 }
195
196 /// FullRecomputeSizeLocally - This method recomputes the 'Size' field by
197 /// summing the size of all RopePieces.
198 void FullRecomputeSizeLocally() {
199 Size = 0;
200 for (unsigned i = 0, e = getNumPieces(); i != e; ++i)
201 Size += getPiece(i).size();
202 }
203
204 /// split - Split the range containing the specified offset so that we are
205 /// guaranteed that there is a place to do an insertion at the specified
206 /// offset. The offset is relative, so "0" is the start of the node.
207 ///
208 /// If there is no space in this subtree for the extra piece, the extra tree
209 /// node is returned and must be inserted into a parent.
210 RopePieceBTreeNode *split(unsigned Offset);
211
212 /// insert - Insert the specified ropepiece into this tree node at the
213 /// specified offset. The offset is relative, so "0" is the start of the
214 /// node.
215 ///
216 /// If there is no space in this subtree for the extra piece, the extra tree
217 /// node is returned and must be inserted into a parent.
218 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
219
220 /// erase - Remove NumBytes from this node at the specified offset. We are
221 /// guaranteed that there is a split at Offset.
222 void erase(unsigned Offset, unsigned NumBytes);
223
224 static bool classof(const RopePieceBTreeNode *N) {
225 return N->isLeaf();
226 }
227 };
228
229} // namespace
230
231/// split - Split the range containing the specified offset so that we are
232/// guaranteed that there is a place to do an insertion at the specified
233/// offset. The offset is relative, so "0" is the start of the node.
234///
235/// If there is no space in this subtree for the extra piece, the extra tree
236/// node is returned and must be inserted into a parent.
237RopePieceBTreeNode *RopePieceBTreeLeaf::split(unsigned Offset) {
238 // Find the insertion point. We are guaranteed that there is a split at the
239 // specified offset so find it.
240 if (Offset == 0 || Offset == size()) {
241 // Fastpath for a common case. There is already a splitpoint at the end.
242 return nullptr;
243 }
244
245 // Find the piece that this offset lands in.
246 unsigned PieceOffs = 0;
247 unsigned i = 0;
248 while (Offset >= PieceOffs+Pieces[i].size()) {
249 PieceOffs += Pieces[i].size();
250 ++i;
251 }
252
253 // If there is already a split point at the specified offset, just return
254 // success.
255 if (PieceOffs == Offset)
256 return nullptr;
257
258 // Otherwise, we need to split piece 'i' at Offset-PieceOffs. Convert Offset
259 // to being Piece relative.
260 unsigned IntraPieceOffset = Offset-PieceOffs;
261
262 // We do this by shrinking the RopePiece and then doing an insert of the tail.
263 RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs+IntraPieceOffset,
264 Pieces[i].EndOffs);
265 Size -= Pieces[i].size();
266 Pieces[i].EndOffs = Pieces[i].StartOffs+IntraPieceOffset;
267 Size += Pieces[i].size();
268
269 return insert(Offset, Tail);
270}
271
272/// insert - Insert the specified RopePiece into this tree node at the
273/// specified offset. The offset is relative, so "0" is the start of the node.
274///
275/// If there is no space in this subtree for the extra piece, the extra tree
276/// node is returned and must be inserted into a parent.
277RopePieceBTreeNode *RopePieceBTreeLeaf::insert(unsigned Offset,
278 const RopePiece &R) {
279 // If this node is not full, insert the piece.
280 if (!isFull()) {
281 // Find the insertion point. We are guaranteed that there is a split at the
282 // specified offset so find it.
283 unsigned i = 0, e = getNumPieces();
284 if (Offset == size()) {
285 // Fastpath for a common case.
286 i = e;
287 } else {
288 unsigned SlotOffs = 0;
289 for (; Offset > SlotOffs; ++i)
290 SlotOffs += getPiece(i).size();
291 assert(SlotOffs == Offset && "Split didn't occur before insertion!")(static_cast <bool> (SlotOffs == Offset && "Split didn't occur before insertion!"
) ? void (0) : __assert_fail ("SlotOffs == Offset && \"Split didn't occur before insertion!\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 291, __extension__ __PRETTY_FUNCTION__))
;
292 }
293
294 // For an insertion into a non-full leaf node, just insert the value in
295 // its sorted position. This requires moving later values over.
296 for (; i != e; --e)
297 Pieces[e] = Pieces[e-1];
298 Pieces[i] = R;
299 ++NumPieces;
300 Size += R.size();
301 return nullptr;
302 }
303
304 // Otherwise, if this is leaf is full, split it in two halves. Since this
305 // node is full, it contains 2*WidthFactor values. We move the first
306 // 'WidthFactor' values to the LHS child (which we leave in this node) and
307 // move the last 'WidthFactor' values into the RHS child.
308
309 // Create the new node.
310 RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf();
311
312 // Move over the last 'WidthFactor' values from here to NewNode.
313 std::copy(&Pieces[WidthFactor], &Pieces[2*WidthFactor],
314 &NewNode->Pieces[0]);
315 // Replace old pieces with null RopePieces to drop refcounts.
316 std::fill(&Pieces[WidthFactor], &Pieces[2*WidthFactor], RopePiece());
317
318 // Decrease the number of values in the two nodes.
319 NewNode->NumPieces = NumPieces = WidthFactor;
320
321 // Recompute the two nodes' size.
322 NewNode->FullRecomputeSizeLocally();
323 FullRecomputeSizeLocally();
324
325 // Update the list of leaves.
326 NewNode->insertAfterLeafInOrder(this);
327
328 // These insertions can't fail.
329 if (this->size() >= Offset)
330 this->insert(Offset, R);
331 else
332 NewNode->insert(Offset - this->size(), R);
333 return NewNode;
334}
335
336/// erase - Remove NumBytes from this node at the specified offset. We are
337/// guaranteed that there is a split at Offset.
338void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) {
339 // Since we are guaranteed that there is a split at Offset, we start by
340 // finding the Piece that starts there.
341 unsigned PieceOffs = 0;
342 unsigned i = 0;
343 for (; Offset > PieceOffs; ++i)
344 PieceOffs += getPiece(i).size();
345 assert(PieceOffs == Offset && "Split didn't occur before erase!")(static_cast <bool> (PieceOffs == Offset && "Split didn't occur before erase!"
) ? void (0) : __assert_fail ("PieceOffs == Offset && \"Split didn't occur before erase!\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 345, __extension__ __PRETTY_FUNCTION__))
;
346
347 unsigned StartPiece = i;
348
349 // Figure out how many pieces completely cover 'NumBytes'. We want to remove
350 // all of them.
351 for (; Offset+NumBytes > PieceOffs+getPiece(i).size(); ++i)
352 PieceOffs += getPiece(i).size();
353
354 // If we exactly include the last one, include it in the region to delete.
355 if (Offset+NumBytes == PieceOffs+getPiece(i).size()) {
356 PieceOffs += getPiece(i).size();
357 ++i;
358 }
359
360 // If we completely cover some RopePieces, erase them now.
361 if (i != StartPiece) {
362 unsigned NumDeleted = i-StartPiece;
363 for (; i != getNumPieces(); ++i)
364 Pieces[i-NumDeleted] = Pieces[i];
365
366 // Drop references to dead rope pieces.
367 std::fill(&Pieces[getNumPieces()-NumDeleted], &Pieces[getNumPieces()],
368 RopePiece());
369 NumPieces -= NumDeleted;
370
371 unsigned CoverBytes = PieceOffs-Offset;
372 NumBytes -= CoverBytes;
373 Size -= CoverBytes;
374 }
375
376 // If we completely removed some stuff, we could be done.
377 if (NumBytes == 0) return;
378
379 // Okay, now might be erasing part of some Piece. If this is the case, then
380 // move the start point of the piece.
381 assert(getPiece(StartPiece).size() > NumBytes)(static_cast <bool> (getPiece(StartPiece).size() > NumBytes
) ? void (0) : __assert_fail ("getPiece(StartPiece).size() > NumBytes"
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 381, __extension__ __PRETTY_FUNCTION__))
;
382 Pieces[StartPiece].StartOffs += NumBytes;
383
384 // The size of this node just shrunk by NumBytes.
385 Size -= NumBytes;
386}
387
388//===----------------------------------------------------------------------===//
389// RopePieceBTreeInterior Class
390//===----------------------------------------------------------------------===//
391
392namespace {
393
394 /// RopePieceBTreeInterior - This represents an interior node in the B+Tree,
395 /// which holds up to 2*WidthFactor pointers to child nodes.
396 class RopePieceBTreeInterior : public RopePieceBTreeNode {
397 /// NumChildren - This holds the number of children currently active in the
398 /// Children array.
399 unsigned char NumChildren = 0;
400
401 RopePieceBTreeNode *Children[2*WidthFactor];
402
403 public:
404 RopePieceBTreeInterior() : RopePieceBTreeNode(false) {}
405
406 RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS)
407 : RopePieceBTreeNode(false) {
408 Children[0] = LHS;
409 Children[1] = RHS;
410 NumChildren = 2;
411 Size = LHS->size() + RHS->size();
412 }
413
414 ~RopePieceBTreeInterior() {
415 for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
416 Children[i]->Destroy();
417 }
418
419 bool isFull() const { return NumChildren == 2*WidthFactor; }
420
421 unsigned getNumChildren() const { return NumChildren; }
422
423 const RopePieceBTreeNode *getChild(unsigned i) const {
424 assert(i < NumChildren && "invalid child #")(static_cast <bool> (i < NumChildren && "invalid child #"
) ? void (0) : __assert_fail ("i < NumChildren && \"invalid child #\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 424, __extension__ __PRETTY_FUNCTION__))
;
425 return Children[i];
426 }
427
428 RopePieceBTreeNode *getChild(unsigned i) {
429 assert(i < NumChildren && "invalid child #")(static_cast <bool> (i < NumChildren && "invalid child #"
) ? void (0) : __assert_fail ("i < NumChildren && \"invalid child #\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 429, __extension__ __PRETTY_FUNCTION__))
;
430 return Children[i];
431 }
432
433 /// FullRecomputeSizeLocally - Recompute the Size field of this node by
434 /// summing up the sizes of the child nodes.
435 void FullRecomputeSizeLocally() {
436 Size = 0;
437 for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
438 Size += getChild(i)->size();
439 }
440
441 /// split - Split the range containing the specified offset so that we are
442 /// guaranteed that there is a place to do an insertion at the specified
443 /// offset. The offset is relative, so "0" is the start of the node.
444 ///
445 /// If there is no space in this subtree for the extra piece, the extra tree
446 /// node is returned and must be inserted into a parent.
447 RopePieceBTreeNode *split(unsigned Offset);
448
449 /// insert - Insert the specified ropepiece into this tree node at the
450 /// specified offset. The offset is relative, so "0" is the start of the
451 /// node.
452 ///
453 /// If there is no space in this subtree for the extra piece, the extra tree
454 /// node is returned and must be inserted into a parent.
455 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
456
457 /// HandleChildPiece - A child propagated an insertion result up to us.
458 /// Insert the new child, and/or propagate the result further up the tree.
459 RopePieceBTreeNode *HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS);
460
461 /// erase - Remove NumBytes from this node at the specified offset. We are
462 /// guaranteed that there is a split at Offset.
463 void erase(unsigned Offset, unsigned NumBytes);
464
465 static bool classof(const RopePieceBTreeNode *N) {
466 return !N->isLeaf();
467 }
468 };
469
470} // namespace
471
472/// split - Split the range containing the specified offset so that we are
473/// guaranteed that there is a place to do an insertion at the specified
474/// offset. The offset is relative, so "0" is the start of the node.
475///
476/// If there is no space in this subtree for the extra piece, the extra tree
477/// node is returned and must be inserted into a parent.
478RopePieceBTreeNode *RopePieceBTreeInterior::split(unsigned Offset) {
479 // Figure out which child to split.
480 if (Offset == 0 || Offset == size())
481 return nullptr; // If we have an exact offset, we're already split.
482
483 unsigned ChildOffset = 0;
484 unsigned i = 0;
485 for (; Offset >= ChildOffset+getChild(i)->size(); ++i)
486 ChildOffset += getChild(i)->size();
487
488 // If already split there, we're done.
489 if (ChildOffset == Offset)
490 return nullptr;
491
492 // Otherwise, recursively split the child.
493 if (RopePieceBTreeNode *RHS = getChild(i)->split(Offset-ChildOffset))
494 return HandleChildPiece(i, RHS);
495 return nullptr; // Done!
496}
497
498/// insert - Insert the specified ropepiece into this tree node at the
499/// specified offset. The offset is relative, so "0" is the start of the
500/// node.
501///
502/// If there is no space in this subtree for the extra piece, the extra tree
503/// node is returned and must be inserted into a parent.
504RopePieceBTreeNode *RopePieceBTreeInterior::insert(unsigned Offset,
505 const RopePiece &R) {
506 // Find the insertion point. We are guaranteed that there is a split at the
507 // specified offset so find it.
508 unsigned i = 0, e = getNumChildren();
509
510 unsigned ChildOffs = 0;
511 if (Offset == size()) {
512 // Fastpath for a common case. Insert at end of last child.
513 i = e-1;
514 ChildOffs = size()-getChild(i)->size();
515 } else {
516 for (; Offset > ChildOffs+getChild(i)->size(); ++i)
517 ChildOffs += getChild(i)->size();
518 }
519
520 Size += R.size();
521
522 // Insert at the end of this child.
523 if (RopePieceBTreeNode *RHS = getChild(i)->insert(Offset-ChildOffs, R))
524 return HandleChildPiece(i, RHS);
525
526 return nullptr;
527}
528
529/// HandleChildPiece - A child propagated an insertion result up to us.
530/// Insert the new child, and/or propagate the result further up the tree.
531RopePieceBTreeNode *
532RopePieceBTreeInterior::HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS) {
533 // Otherwise the child propagated a subtree up to us as a new child. See if
534 // we have space for it here.
535 if (!isFull()) {
536 // Insert RHS after child 'i'.
537 if (i + 1 != getNumChildren())
538 memmove(&Children[i+2], &Children[i+1],
539 (getNumChildren()-i-1)*sizeof(Children[0]));
540 Children[i+1] = RHS;
541 ++NumChildren;
542 return nullptr;
543 }
544
545 // Okay, this node is full. Split it in half, moving WidthFactor children to
546 // a newly allocated interior node.
547
548 // Create the new node.
549 RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior();
550
551 // Move over the last 'WidthFactor' values from here to NewNode.
552 memcpy(&NewNode->Children[0], &Children[WidthFactor],
553 WidthFactor*sizeof(Children[0]));
554
555 // Decrease the number of values in the two nodes.
556 NewNode->NumChildren = NumChildren = WidthFactor;
557
558 // Finally, insert the two new children in the side the can (now) hold them.
559 // These insertions can't fail.
560 if (i < WidthFactor)
561 this->HandleChildPiece(i, RHS);
562 else
563 NewNode->HandleChildPiece(i-WidthFactor, RHS);
564
565 // Recompute the two nodes' size.
566 NewNode->FullRecomputeSizeLocally();
567 FullRecomputeSizeLocally();
568 return NewNode;
569}
570
571/// erase - Remove NumBytes from this node at the specified offset. We are
572/// guaranteed that there is a split at Offset.
573void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) {
574 // This will shrink this node by NumBytes.
575 Size -= NumBytes;
576
577 // Find the first child that overlaps with Offset.
578 unsigned i = 0;
579 for (; Offset >= getChild(i)->size(); ++i)
580 Offset -= getChild(i)->size();
581
582 // Propagate the delete request into overlapping children, or completely
583 // delete the children as appropriate.
584 while (NumBytes) {
585 RopePieceBTreeNode *CurChild = getChild(i);
586
587 // If we are deleting something contained entirely in the child, pass on the
588 // request.
589 if (Offset+NumBytes < CurChild->size()) {
590 CurChild->erase(Offset, NumBytes);
591 return;
592 }
593
594 // If this deletion request starts somewhere in the middle of the child, it
595 // must be deleting to the end of the child.
596 if (Offset) {
597 unsigned BytesFromChild = CurChild->size()-Offset;
598 CurChild->erase(Offset, BytesFromChild);
599 NumBytes -= BytesFromChild;
600 // Start at the beginning of the next child.
601 Offset = 0;
602 ++i;
603 continue;
604 }
605
606 // If the deletion request completely covers the child, delete it and move
607 // the rest down.
608 NumBytes -= CurChild->size();
609 CurChild->Destroy();
610 --NumChildren;
611 if (i != getNumChildren())
612 memmove(&Children[i], &Children[i+1],
613 (getNumChildren()-i)*sizeof(Children[0]));
614 }
615}
616
617//===----------------------------------------------------------------------===//
618// RopePieceBTreeNode Implementation
619//===----------------------------------------------------------------------===//
620
621void RopePieceBTreeNode::Destroy() {
622 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
623 delete Leaf;
624 else
625 delete cast<RopePieceBTreeInterior>(this);
626}
627
628/// split - Split the range containing the specified offset so that we are
629/// guaranteed that there is a place to do an insertion at the specified
630/// offset. The offset is relative, so "0" is the start of the node.
631///
632/// If there is no space in this subtree for the extra piece, the extra tree
633/// node is returned and must be inserted into a parent.
634RopePieceBTreeNode *RopePieceBTreeNode::split(unsigned Offset) {
635 assert(Offset <= size() && "Invalid offset to split!")(static_cast <bool> (Offset <= size() && "Invalid offset to split!"
) ? void (0) : __assert_fail ("Offset <= size() && \"Invalid offset to split!\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 635, __extension__ __PRETTY_FUNCTION__))
;
636 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
637 return Leaf->split(Offset);
638 return cast<RopePieceBTreeInterior>(this)->split(Offset);
639}
640
641/// insert - Insert the specified ropepiece into this tree node at the
642/// specified offset. The offset is relative, so "0" is the start of the
643/// node.
644///
645/// If there is no space in this subtree for the extra piece, the extra tree
646/// node is returned and must be inserted into a parent.
647RopePieceBTreeNode *RopePieceBTreeNode::insert(unsigned Offset,
648 const RopePiece &R) {
649 assert(Offset <= size() && "Invalid offset to insert!")(static_cast <bool> (Offset <= size() && "Invalid offset to insert!"
) ? void (0) : __assert_fail ("Offset <= size() && \"Invalid offset to insert!\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 649, __extension__ __PRETTY_FUNCTION__))
;
650 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
651 return Leaf->insert(Offset, R);
652 return cast<RopePieceBTreeInterior>(this)->insert(Offset, R);
653}
654
655/// erase - Remove NumBytes from this node at the specified offset. We are
656/// guaranteed that there is a split at Offset.
657void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) {
658 assert(Offset+NumBytes <= size() && "Invalid offset to erase!")(static_cast <bool> (Offset+NumBytes <= size() &&
"Invalid offset to erase!") ? void (0) : __assert_fail ("Offset+NumBytes <= size() && \"Invalid offset to erase!\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 658, __extension__ __PRETTY_FUNCTION__))
;
659 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
660 return Leaf->erase(Offset, NumBytes);
661 return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes);
662}
663
664//===----------------------------------------------------------------------===//
665// RopePieceBTreeIterator Implementation
666//===----------------------------------------------------------------------===//
667
668static const RopePieceBTreeLeaf *getCN(const void *P) {
669 return static_cast<const RopePieceBTreeLeaf*>(P);
670}
671
672// begin iterator.
673RopePieceBTreeIterator::RopePieceBTreeIterator(const void *n) {
674 const auto *N = static_cast<const RopePieceBTreeNode *>(n);
675
676 // Walk down the left side of the tree until we get to a leaf.
677 while (const auto *IN = dyn_cast<RopePieceBTreeInterior>(N))
678 N = IN->getChild(0);
679
680 // We must have at least one leaf.
681 CurNode = cast<RopePieceBTreeLeaf>(N);
682
683 // If we found a leaf that happens to be empty, skip over it until we get
684 // to something full.
685 while (CurNode && getCN(CurNode)->getNumPieces() == 0)
686 CurNode = getCN(CurNode)->getNextLeafInOrder();
687
688 if (CurNode)
689 CurPiece = &getCN(CurNode)->getPiece(0);
690 else // Empty tree, this is an end() iterator.
691 CurPiece = nullptr;
692 CurChar = 0;
693}
694
695void RopePieceBTreeIterator::MoveToNextPiece() {
696 if (CurPiece != &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces()-1)) {
697 CurChar = 0;
698 ++CurPiece;
699 return;
700 }
701
702 // Find the next non-empty leaf node.
703 do
704 CurNode = getCN(CurNode)->getNextLeafInOrder();
705 while (CurNode && getCN(CurNode)->getNumPieces() == 0);
706
707 if (CurNode)
708 CurPiece = &getCN(CurNode)->getPiece(0);
709 else // Hit end().
710 CurPiece = nullptr;
711 CurChar = 0;
712}
713
714//===----------------------------------------------------------------------===//
715// RopePieceBTree Implementation
716//===----------------------------------------------------------------------===//
717
718static RopePieceBTreeNode *getRoot(void *P) {
719 return static_cast<RopePieceBTreeNode*>(P);
720}
721
722RopePieceBTree::RopePieceBTree() {
723 Root = new RopePieceBTreeLeaf();
724}
725
726RopePieceBTree::RopePieceBTree(const RopePieceBTree &RHS) {
727 assert(RHS.empty() && "Can't copy non-empty tree yet")(static_cast <bool> (RHS.empty() && "Can't copy non-empty tree yet"
) ? void (0) : __assert_fail ("RHS.empty() && \"Can't copy non-empty tree yet\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 727, __extension__ __PRETTY_FUNCTION__))
;
728 Root = new RopePieceBTreeLeaf();
729}
730
731RopePieceBTree::~RopePieceBTree() {
732 getRoot(Root)->Destroy();
733}
734
735unsigned RopePieceBTree::size() const {
736 return getRoot(Root)->size();
737}
738
739void RopePieceBTree::clear() {
740 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root)))
741 Leaf->clear();
742 else {
743 getRoot(Root)->Destroy();
744 Root = new RopePieceBTreeLeaf();
745 }
746}
747
748void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) {
749 // #1. Split at Offset.
750 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
751 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
752
753 // #2. Do the insertion.
754 if (RopePieceBTreeNode *RHS = getRoot(Root)->insert(Offset, R))
755 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
756}
757
758void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) {
759 // #1. Split at Offset.
760 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
761 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
762
763 // #2. Do the erasing.
764 getRoot(Root)->erase(Offset, NumBytes);
765}
766
767//===----------------------------------------------------------------------===//
768// RewriteRope Implementation
769//===----------------------------------------------------------------------===//
770
771/// MakeRopeString - This copies the specified byte range into some instance of
772/// RopeRefCountString, and return a RopePiece that represents it. This uses
773/// the AllocBuffer object to aggregate requests for small strings into one
774/// allocation instead of doing tons of tiny allocations.
775RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) {
776 unsigned Len = End-Start;
777 assert(Len && "Zero length RopePiece is invalid!")(static_cast <bool> (Len && "Zero length RopePiece is invalid!"
) ? void (0) : __assert_fail ("Len && \"Zero length RopePiece is invalid!\""
, "/build/llvm-toolchain-snapshot-7~svn338205/tools/clang/lib/Rewrite/RewriteRope.cpp"
, 777, __extension__ __PRETTY_FUNCTION__))
;
778
779 // If we have space for this string in the current alloc buffer, use it.
780 if (AllocOffs+Len <= AllocChunkSize) {
1
Assuming the condition is false
2
Taking false branch
781 memcpy(AllocBuffer->Data+AllocOffs, Start, Len);
782 AllocOffs += Len;
783 return RopePiece(AllocBuffer, AllocOffs-Len, AllocOffs);
784 }
785
786 // If we don't have enough room because this specific allocation is huge,
787 // just allocate a new rope piece for it alone.
788 if (Len > AllocChunkSize) {
3
Assuming 'Len' is > AllocChunkSize
4
Taking true branch
789 unsigned Size = End-Start+sizeof(RopeRefCountString)-1;
790 auto *Res = reinterpret_cast<RopeRefCountString *>(new char[Size]);
5
Memory is allocated
791 Res->RefCount = 0;
792 memcpy(Res->Data, Start, End-Start);
793 return RopePiece(Res, 0, End-Start);
6
Calling '~IntrusiveRefCntPtr'
794 }
795
796 // Otherwise, this was a small request but we just don't have space for it
797 // Make a new chunk and share it with later allocations.
798
799 unsigned AllocSize = offsetof(RopeRefCountString, Data)__builtin_offsetof(RopeRefCountString, Data) + AllocChunkSize;
800 auto *Res = reinterpret_cast<RopeRefCountString *>(new char[AllocSize]);
801 Res->RefCount = 0;
802 memcpy(Res->Data, Start, Len);
803 AllocBuffer = Res;
804 AllocOffs = Len;
805
806 return RopePiece(AllocBuffer, 0, Len);
807}

/build/llvm-toolchain-snapshot-7~svn338205/include/llvm/ADT/IntrusiveRefCntPtr.h

1//==- llvm/ADT/IntrusiveRefCntPtr.h - Smart Refcounting Pointer --*- C++ -*-==//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RefCountedBase, ThreadSafeRefCountedBase, and
11// IntrusiveRefCntPtr classes.
12//
13// IntrusiveRefCntPtr is a smart pointer to an object which maintains a
14// reference count. (ThreadSafe)RefCountedBase is a mixin class that adds a
15// refcount member variable and methods for updating the refcount. An object
16// that inherits from (ThreadSafe)RefCountedBase deletes itself when its
17// refcount hits zero.
18//
19// For example:
20//
21// class MyClass : public RefCountedBase<MyClass> {};
22//
23// void foo() {
24// // Constructing an IntrusiveRefCntPtr increases the pointee's refcount by
25// // 1 (from 0 in this case).
26// IntrusiveRefCntPtr<MyClass> Ptr1(new MyClass());
27//
28// // Copying an IntrusiveRefCntPtr increases the pointee's refcount by 1.
29// IntrusiveRefCntPtr<MyClass> Ptr2(Ptr1);
30//
31// // Constructing an IntrusiveRefCntPtr has no effect on the object's
32// // refcount. After a move, the moved-from pointer is null.
33// IntrusiveRefCntPtr<MyClass> Ptr3(std::move(Ptr1));
34// assert(Ptr1 == nullptr);
35//
36// // Clearing an IntrusiveRefCntPtr decreases the pointee's refcount by 1.
37// Ptr2.reset();
38//
39// // The object deletes itself when we return from the function, because
40// // Ptr3's destructor decrements its refcount to 0.
41// }
42//
43// You can use IntrusiveRefCntPtr with isa<T>(), dyn_cast<T>(), etc.:
44//
45// IntrusiveRefCntPtr<MyClass> Ptr(new MyClass());
46// OtherClass *Other = dyn_cast<OtherClass>(Ptr); // Ptr.get() not required
47//
48// IntrusiveRefCntPtr works with any class that
49//
50// - inherits from (ThreadSafe)RefCountedBase,
51// - has Retain() and Release() methods, or
52// - specializes IntrusiveRefCntPtrInfo.
53//
54//===----------------------------------------------------------------------===//
55
56#ifndef LLVM_ADT_INTRUSIVEREFCNTPTR_H
57#define LLVM_ADT_INTRUSIVEREFCNTPTR_H
58
59#include <atomic>
60#include <cassert>
61#include <cstddef>
62
63namespace llvm {
64
65/// A CRTP mixin class that adds reference counting to a type.
66///
67/// The lifetime of an object which inherits from RefCountedBase is managed by
68/// calls to Release() and Retain(), which increment and decrement the object's
69/// refcount, respectively. When a Release() call decrements the refcount to 0,
70/// the object deletes itself.
71template <class Derived> class RefCountedBase {
72 mutable unsigned RefCount = 0;
73
74public:
75 RefCountedBase() = default;
76 RefCountedBase(const RefCountedBase &) {}
77
78 void Retain() const { ++RefCount; }
79
80 void Release() const {
81 assert(RefCount > 0 && "Reference count is already zero.")(static_cast <bool> (RefCount > 0 && "Reference count is already zero."
) ? void (0) : __assert_fail ("RefCount > 0 && \"Reference count is already zero.\""
, "/build/llvm-toolchain-snapshot-7~svn338205/include/llvm/ADT/IntrusiveRefCntPtr.h"
, 81, __extension__ __PRETTY_FUNCTION__))
;
82 if (--RefCount == 0)
83 delete static_cast<const Derived *>(this);
84 }
85};
86
87/// A thread-safe version of \c RefCountedBase.
88template <class Derived> class ThreadSafeRefCountedBase {
89 mutable std::atomic<int> RefCount;
90
91protected:
92 ThreadSafeRefCountedBase() : RefCount(0) {}
93
94public:
95 void Retain() const { RefCount.fetch_add(1, std::memory_order_relaxed); }
96
97 void Release() const {
98 int NewRefCount = RefCount.fetch_sub(1, std::memory_order_acq_rel) - 1;
99 assert(NewRefCount >= 0 && "Reference count was already zero.")(static_cast <bool> (NewRefCount >= 0 && "Reference count was already zero."
) ? void (0) : __assert_fail ("NewRefCount >= 0 && \"Reference count was already zero.\""
, "/build/llvm-toolchain-snapshot-7~svn338205/include/llvm/ADT/IntrusiveRefCntPtr.h"
, 99, __extension__ __PRETTY_FUNCTION__))
;
100 if (NewRefCount == 0)
101 delete static_cast<const Derived *>(this);
102 }
103};
104
105/// Class you can specialize to provide custom retain/release functionality for
106/// a type.
107///
108/// Usually specializing this class is not necessary, as IntrusiveRefCntPtr
109/// works with any type which defines Retain() and Release() functions -- you
110/// can define those functions yourself if RefCountedBase doesn't work for you.
111///
112/// One case when you might want to specialize this type is if you have
113/// - Foo.h defines type Foo and includes Bar.h, and
114/// - Bar.h uses IntrusiveRefCntPtr<Foo> in inline functions.
115///
116/// Because Foo.h includes Bar.h, Bar.h can't include Foo.h in order to pull in
117/// the declaration of Foo. Without the declaration of Foo, normally Bar.h
118/// wouldn't be able to use IntrusiveRefCntPtr<Foo>, which wants to call
119/// T::Retain and T::Release.
120///
121/// To resolve this, Bar.h could include a third header, FooFwd.h, which
122/// forward-declares Foo and specializes IntrusiveRefCntPtrInfo<Foo>. Then
123/// Bar.h could use IntrusiveRefCntPtr<Foo>, although it still couldn't call any
124/// functions on Foo itself, because Foo would be an incomplete type.
125template <typename T> struct IntrusiveRefCntPtrInfo {
126 static void retain(T *obj) { obj->Retain(); }
127 static void release(T *obj) { obj->Release(); }
128};
129
130/// A smart pointer to a reference-counted object that inherits from
131/// RefCountedBase or ThreadSafeRefCountedBase.
132///
133/// This class increments its pointee's reference count when it is created, and
134/// decrements its refcount when it's destroyed (or is changed to point to a
135/// different object).
136template <typename T> class IntrusiveRefCntPtr {
137 T *Obj = nullptr;
138
139public:
140 using element_type = T;
141
142 explicit IntrusiveRefCntPtr() = default;
143 IntrusiveRefCntPtr(T *obj) : Obj(obj) { retain(); }
144 IntrusiveRefCntPtr(const IntrusiveRefCntPtr &S) : Obj(S.Obj) { retain(); }
145 IntrusiveRefCntPtr(IntrusiveRefCntPtr &&S) : Obj(S.Obj) { S.Obj = nullptr; }
146
147 template <class X>
148 IntrusiveRefCntPtr(IntrusiveRefCntPtr<X> &&S) : Obj(S.get()) {
149 S.Obj = nullptr;
150 }
151
152 template <class X>
153 IntrusiveRefCntPtr(const IntrusiveRefCntPtr<X> &S) : Obj(S.get()) {
154 retain();
155 }
156
157 ~IntrusiveRefCntPtr() { release(); }
7
Potential leak of memory pointed to by field 'Obj'
158
159 IntrusiveRefCntPtr &operator=(IntrusiveRefCntPtr S) {
160 swap(S);
161 return *this;
162 }
163
164 T &operator*() const { return *Obj; }
165 T *operator->() const { return Obj; }
166 T *get() const { return Obj; }
167 explicit operator bool() const { return Obj; }
168
169 void swap(IntrusiveRefCntPtr &other) {
170 T *tmp = other.Obj;
171 other.Obj = Obj;
172 Obj = tmp;
173 }
174
175 void reset() {
176 release();
177 Obj = nullptr;
178 }
179
180 void resetWithoutRelease() { Obj = nullptr; }
181
182private:
183 void retain() {
184 if (Obj)
185 IntrusiveRefCntPtrInfo<T>::retain(Obj);
186 }
187
188 void release() {
189 if (Obj)
190 IntrusiveRefCntPtrInfo<T>::release(Obj);
191 }
192
193 template <typename X> friend class IntrusiveRefCntPtr;
194};
195
196template <class T, class U>
197inline bool operator==(const IntrusiveRefCntPtr<T> &A,
198 const IntrusiveRefCntPtr<U> &B) {
199 return A.get() == B.get();
200}
201
202template <class T, class U>
203inline bool operator!=(const IntrusiveRefCntPtr<T> &A,
204 const IntrusiveRefCntPtr<U> &B) {
205 return A.get() != B.get();
206}
207
208template <class T, class U>
209inline bool operator==(const IntrusiveRefCntPtr<T> &A, U *B) {
210 return A.get() == B;
211}
212
213template <class T, class U>
214inline bool operator!=(const IntrusiveRefCntPtr<T> &A, U *B) {
215 return A.get() != B;
216}
217
218template <class T, class U>
219inline bool operator==(T *A, const IntrusiveRefCntPtr<U> &B) {
220 return A == B.get();
221}
222
223template <class T, class U>
224inline bool operator!=(T *A, const IntrusiveRefCntPtr<U> &B) {
225 return A != B.get();
226}
227
228template <class T>
229bool operator==(std::nullptr_t A, const IntrusiveRefCntPtr<T> &B) {
230 return !B;
231}
232
233template <class T>
234bool operator==(const IntrusiveRefCntPtr<T> &A, std::nullptr_t B) {
235 return B == A;
236}
237
238template <class T>
239bool operator!=(std::nullptr_t A, const IntrusiveRefCntPtr<T> &B) {
240 return !(A == B);
241}
242
243template <class T>
244bool operator!=(const IntrusiveRefCntPtr<T> &A, std::nullptr_t B) {
245 return !(A == B);
246}
247
248// Make IntrusiveRefCntPtr work with dyn_cast, isa, and the other idioms from
249// Casting.h.
250template <typename From> struct simplify_type;
251
252template <class T> struct simplify_type<IntrusiveRefCntPtr<T>> {
253 using SimpleType = T *;
254
255 static SimpleType getSimplifiedValue(IntrusiveRefCntPtr<T> &Val) {
256 return Val.get();
257 }
258};
259
260template <class T> struct simplify_type<const IntrusiveRefCntPtr<T>> {
261 using SimpleType = /*const*/ T *;
262
263 static SimpleType getSimplifiedValue(const IntrusiveRefCntPtr<T> &Val) {
264 return Val.get();
265 }
266};
267
268} // end namespace llvm
269
270#endif // LLVM_ADT_INTRUSIVEREFCNTPTR_H