LLVM 20.0.0git
RewriteRope.cpp
Go to the documentation of this file.
1//===- RewriteRope.cpp - Rope specialized for rewriter --------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the RewriteRope class, which is a powerful string.
10//
11//===----------------------------------------------------------------------===//
12
15#include <algorithm>
16#include <cassert>
17#include <cstring>
18
19using namespace llvm;
20
21/// RewriteRope is a "strong" string class, designed to make insertions and
22/// deletions in the middle of the string nearly constant time (really, they are
23/// O(log N), but with a very low constant factor).
24///
25/// The implementation of this datastructure is a conceptual linear sequence of
26/// RopePiece elements. Each RopePiece represents a view on a separately
27/// allocated and reference counted string. This means that splitting a very
28/// long string can be done in constant time by splitting a RopePiece that
29/// references the whole string into two rope pieces that reference each half.
30/// Once split, another string can be inserted in between the two halves by
31/// inserting a RopePiece in between the two others. All of this is very
32/// inexpensive: it takes time proportional to the number of RopePieces, not the
33/// length of the strings they represent.
34///
35/// While a linear sequences of RopePieces is the conceptual model, the actual
36/// implementation captures them in an adapted B+ Tree. Using a B+ tree (which
37/// is a tree that keeps the values in the leaves and has where each node
38/// contains a reasonable number of pointers to children/values) allows us to
39/// maintain efficient operation when the RewriteRope contains a *huge* number
40/// of RopePieces. The basic idea of the B+ Tree is that it allows us to find
41/// the RopePiece corresponding to some offset very efficiently, and it
42/// automatically balances itself on insertions of RopePieces (which can happen
43/// for both insertions and erases of string ranges).
44///
45/// The one wrinkle on the theory is that we don't attempt to keep the tree
46/// properly balanced when erases happen. Erases of string data can both insert
47/// new RopePieces (e.g. when the middle of some other rope piece is deleted,
48/// which results in two rope pieces, which is just like an insert) or it can
49/// reduce the number of RopePieces maintained by the B+Tree. In the case when
50/// the number of RopePieces is reduced, we don't attempt to maintain the
51/// standard 'invariant' that each node in the tree contains at least
52/// 'WidthFactor' children/values. For our use cases, this doesn't seem to
53/// matter.
54///
55/// The implementation below is primarily implemented in terms of three classes:
56/// RopePieceBTreeNode - Common base class for:
57///
58/// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
59/// nodes. This directly represents a chunk of the string with those
60/// RopePieces concatenated.
61/// RopePieceBTreeInterior - An interior node in the B+ Tree, which manages
62/// up to '2*WidthFactor' other nodes in the tree.
63
64namespace {
65
66//===----------------------------------------------------------------------===//
67// RopePieceBTreeNode Class
68//===----------------------------------------------------------------------===//
69
70/// RopePieceBTreeNode - Common base class of RopePieceBTreeLeaf and
71/// RopePieceBTreeInterior. This provides some 'virtual' dispatching methods
72/// and a flag that determines which subclass the instance is. Also
73/// important, this node knows the full extend of the node, including any
74/// children that it has. This allows efficient skipping over entire subtrees
75/// when looking for an offset in the BTree.
76class RopePieceBTreeNode {
77protected:
78 /// WidthFactor - This controls the number of K/V slots held in the BTree:
79 /// how wide it is. Each level of the BTree is guaranteed to have at least
80 /// 'WidthFactor' elements in it (either ropepieces or children), (except
81 /// the root, which may have less) and may have at most 2*WidthFactor
82 /// elements.
83 enum { WidthFactor = 8 };
84
85 /// Size - This is the number of bytes of file this node (including any
86 /// potential children) covers.
87 unsigned Size = 0;
88
89 /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it
90 /// is an instance of RopePieceBTreeInterior.
91 bool IsLeaf;
92
93 RopePieceBTreeNode(bool isLeaf) : IsLeaf(isLeaf) {}
94 ~RopePieceBTreeNode() = default;
95
96public:
97 bool isLeaf() const { return IsLeaf; }
98 unsigned size() const { return Size; }
99
100 void Destroy();
101
102 /// split - Split the range containing the specified offset so that we are
103 /// guaranteed that there is a place to do an insertion at the specified
104 /// offset. The offset is relative, so "0" is the start of the node.
105 ///
106 /// If there is no space in this subtree for the extra piece, the extra tree
107 /// node is returned and must be inserted into a parent.
108 RopePieceBTreeNode *split(unsigned Offset);
109
110 /// insert - Insert the specified ropepiece into this tree node at the
111 /// specified offset. The offset is relative, so "0" is the start of the
112 /// node.
113 ///
114 /// If there is no space in this subtree for the extra piece, the extra tree
115 /// node is returned and must be inserted into a parent.
116 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
117
118 /// erase - Remove NumBytes from this node at the specified offset. We are
119 /// guaranteed that there is a split at Offset.
120 void erase(unsigned Offset, unsigned NumBytes);
121};
122
123//===----------------------------------------------------------------------===//
124// RopePieceBTreeLeaf Class
125//===----------------------------------------------------------------------===//
126
127/// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece
128/// nodes. This directly represents a chunk of the string with those
129/// RopePieces concatenated. Since this is a B+Tree, all values (in this case
130/// instances of RopePiece) are stored in leaves like this. To make iteration
131/// over the leaves efficient, they maintain a singly linked list through the
132/// NextLeaf field. This allows the B+Tree forward iterator to be constant
133/// time for all increments.
134class RopePieceBTreeLeaf : public RopePieceBTreeNode {
135 /// NumPieces - This holds the number of rope pieces currently active in the
136 /// Pieces array.
137 unsigned char NumPieces = 0;
138
139 /// Pieces - This tracks the file chunks currently in this leaf.
140 RopePiece Pieces[2 * WidthFactor];
141
142 /// NextLeaf - This is a pointer to the next leaf in the tree, allowing
143 /// efficient in-order forward iteration of the tree without traversal.
144 RopePieceBTreeLeaf **PrevLeaf = nullptr;
145 RopePieceBTreeLeaf *NextLeaf = nullptr;
146
147public:
148 RopePieceBTreeLeaf() : RopePieceBTreeNode(true) {}
149
150 ~RopePieceBTreeLeaf() {
151 if (PrevLeaf || NextLeaf)
152 removeFromLeafInOrder();
153 clear();
154 }
155
156 bool isFull() const { return NumPieces == 2 * WidthFactor; }
157
158 /// clear - Remove all rope pieces from this leaf.
159 void clear() {
160 while (NumPieces)
161 Pieces[--NumPieces] = RopePiece();
162 Size = 0;
163 }
164
165 unsigned getNumPieces() const { return NumPieces; }
166
167 const RopePiece &getPiece(unsigned i) const {
168 assert(i < getNumPieces() && "Invalid piece ID");
169 return Pieces[i];
170 }
171
172 const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; }
173
174 void insertAfterLeafInOrder(RopePieceBTreeLeaf *Node) {
175 assert(!PrevLeaf && !NextLeaf && "Already in ordering");
176
177 NextLeaf = Node->NextLeaf;
178 if (NextLeaf)
179 NextLeaf->PrevLeaf = &NextLeaf;
180 PrevLeaf = &Node->NextLeaf;
181 Node->NextLeaf = this;
182 }
183
184 void removeFromLeafInOrder() {
185 if (PrevLeaf) {
186 *PrevLeaf = NextLeaf;
187 if (NextLeaf)
188 NextLeaf->PrevLeaf = PrevLeaf;
189 } else if (NextLeaf) {
190 NextLeaf->PrevLeaf = nullptr;
191 }
192 }
193
194 /// FullRecomputeSizeLocally - This method recomputes the 'Size' field by
195 /// summing the size of all RopePieces.
196 void FullRecomputeSizeLocally() {
197 Size = 0;
198 for (unsigned i = 0, e = getNumPieces(); i != e; ++i)
199 Size += getPiece(i).size();
200 }
201
202 /// split - Split the range containing the specified offset so that we are
203 /// guaranteed that there is a place to do an insertion at the specified
204 /// offset. The offset is relative, so "0" is the start of the node.
205 ///
206 /// If there is no space in this subtree for the extra piece, the extra tree
207 /// node is returned and must be inserted into a parent.
208 RopePieceBTreeNode *split(unsigned Offset);
209
210 /// insert - Insert the specified ropepiece into this tree node at the
211 /// specified offset. The offset is relative, so "0" is the start of the
212 /// node.
213 ///
214 /// If there is no space in this subtree for the extra piece, the extra tree
215 /// node is returned and must be inserted into a parent.
216 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
217
218 /// erase - Remove NumBytes from this node at the specified offset. We are
219 /// guaranteed that there is a split at Offset.
220 void erase(unsigned Offset, unsigned NumBytes);
221
222 static bool classof(const RopePieceBTreeNode *N) { return N->isLeaf(); }
223};
224
225} // namespace
226
227/// split - Split the range containing the specified offset so that we are
228/// guaranteed that there is a place to do an insertion at the specified
229/// offset. The offset is relative, so "0" is the start of the node.
230///
231/// If there is no space in this subtree for the extra piece, the extra tree
232/// node is returned and must be inserted into a parent.
233RopePieceBTreeNode *RopePieceBTreeLeaf::split(unsigned Offset) {
234 // Find the insertion point. We are guaranteed that there is a split at the
235 // specified offset so find it.
236 if (Offset == 0 || Offset == size()) {
237 // Fastpath for a common case. There is already a splitpoint at the end.
238 return nullptr;
239 }
240
241 // Find the piece that this offset lands in.
242 unsigned PieceOffs = 0;
243 unsigned i = 0;
244 while (Offset >= PieceOffs + Pieces[i].size()) {
245 PieceOffs += Pieces[i].size();
246 ++i;
247 }
248
249 // If there is already a split point at the specified offset, just return
250 // success.
251 if (PieceOffs == Offset)
252 return nullptr;
253
254 // Otherwise, we need to split piece 'i' at Offset-PieceOffs. Convert Offset
255 // to being Piece relative.
256 unsigned IntraPieceOffset = Offset - PieceOffs;
257
258 // We do this by shrinking the RopePiece and then doing an insert of the tail.
259 RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs + IntraPieceOffset,
260 Pieces[i].EndOffs);
261 Size -= Pieces[i].size();
262 Pieces[i].EndOffs = Pieces[i].StartOffs + IntraPieceOffset;
263 Size += Pieces[i].size();
264
265 return insert(Offset, Tail);
266}
267
268/// insert - Insert the specified RopePiece into this tree node at the
269/// specified offset. The offset is relative, so "0" is the start of the node.
270///
271/// If there is no space in this subtree for the extra piece, the extra tree
272/// node is returned and must be inserted into a parent.
273RopePieceBTreeNode *RopePieceBTreeLeaf::insert(unsigned Offset,
274 const RopePiece &R) {
275 // If this node is not full, insert the piece.
276 if (!isFull()) {
277 // Find the insertion point. We are guaranteed that there is a split at the
278 // specified offset so find it.
279 unsigned i = 0, e = getNumPieces();
280 if (Offset == size()) {
281 // Fastpath for a common case.
282 i = e;
283 } else {
284 unsigned SlotOffs = 0;
285 for (; Offset > SlotOffs; ++i)
286 SlotOffs += getPiece(i).size();
287 assert(SlotOffs == Offset && "Split didn't occur before insertion!");
288 }
289
290 // For an insertion into a non-full leaf node, just insert the value in
291 // its sorted position. This requires moving later values over.
292 for (; i != e; --e)
293 Pieces[e] = Pieces[e - 1];
294 Pieces[i] = R;
295 ++NumPieces;
296 Size += R.size();
297 return nullptr;
298 }
299
300 // Otherwise, if this is leaf is full, split it in two halves. Since this
301 // node is full, it contains 2*WidthFactor values. We move the first
302 // 'WidthFactor' values to the LHS child (which we leave in this node) and
303 // move the last 'WidthFactor' values into the RHS child.
304
305 // Create the new node.
306 RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf();
307
308 // Move over the last 'WidthFactor' values from here to NewNode.
309 std::copy(&Pieces[WidthFactor], &Pieces[2 * WidthFactor],
310 &NewNode->Pieces[0]);
311 // Replace old pieces with null RopePieces to drop refcounts.
312 std::fill(&Pieces[WidthFactor], &Pieces[2 * WidthFactor], RopePiece());
313
314 // Decrease the number of values in the two nodes.
315 NewNode->NumPieces = NumPieces = WidthFactor;
316
317 // Recompute the two nodes' size.
318 NewNode->FullRecomputeSizeLocally();
319 FullRecomputeSizeLocally();
320
321 // Update the list of leaves.
322 NewNode->insertAfterLeafInOrder(this);
323
324 // These insertions can't fail.
325 if (this->size() >= Offset)
326 this->insert(Offset, R);
327 else
328 NewNode->insert(Offset - this->size(), R);
329 return NewNode;
330}
331
332/// erase - Remove NumBytes from this node at the specified offset. We are
333/// guaranteed that there is a split at Offset.
334void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) {
335 // Since we are guaranteed that there is a split at Offset, we start by
336 // finding the Piece that starts there.
337 unsigned PieceOffs = 0;
338 unsigned i = 0;
339 for (; Offset > PieceOffs; ++i)
340 PieceOffs += getPiece(i).size();
341 assert(PieceOffs == Offset && "Split didn't occur before erase!");
342
343 unsigned StartPiece = i;
344
345 // Figure out how many pieces completely cover 'NumBytes'. We want to remove
346 // all of them.
347 for (; Offset + NumBytes > PieceOffs + getPiece(i).size(); ++i)
348 PieceOffs += getPiece(i).size();
349
350 // If we exactly include the last one, include it in the region to delete.
351 if (Offset + NumBytes == PieceOffs + getPiece(i).size()) {
352 PieceOffs += getPiece(i).size();
353 ++i;
354 }
355
356 // If we completely cover some RopePieces, erase them now.
357 if (i != StartPiece) {
358 unsigned NumDeleted = i - StartPiece;
359 for (; i != getNumPieces(); ++i)
360 Pieces[i - NumDeleted] = Pieces[i];
361
362 // Drop references to dead rope pieces.
363 std::fill(&Pieces[getNumPieces() - NumDeleted], &Pieces[getNumPieces()],
364 RopePiece());
365 NumPieces -= NumDeleted;
366
367 unsigned CoverBytes = PieceOffs - Offset;
368 NumBytes -= CoverBytes;
369 Size -= CoverBytes;
370 }
371
372 // If we completely removed some stuff, we could be done.
373 if (NumBytes == 0)
374 return;
375
376 // Okay, now might be erasing part of some Piece. If this is the case, then
377 // move the start point of the piece.
378 assert(getPiece(StartPiece).size() > NumBytes);
379 Pieces[StartPiece].StartOffs += NumBytes;
380
381 // The size of this node just shrunk by NumBytes.
382 Size -= NumBytes;
383}
384
385//===----------------------------------------------------------------------===//
386// RopePieceBTreeInterior Class
387//===----------------------------------------------------------------------===//
388
389namespace {
390
391/// RopePieceBTreeInterior - This represents an interior node in the B+Tree,
392/// which holds up to 2*WidthFactor pointers to child nodes.
393class RopePieceBTreeInterior : public RopePieceBTreeNode {
394 /// NumChildren - This holds the number of children currently active in the
395 /// Children array.
396 unsigned char NumChildren = 0;
397
398 RopePieceBTreeNode *Children[2 * WidthFactor];
399
400public:
401 RopePieceBTreeInterior() : RopePieceBTreeNode(false) {}
402
403 RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS)
404 : RopePieceBTreeNode(false) {
405 Children[0] = LHS;
406 Children[1] = RHS;
407 NumChildren = 2;
408 Size = LHS->size() + RHS->size();
409 }
410
411 ~RopePieceBTreeInterior() {
412 for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
413 Children[i]->Destroy();
414 }
415
416 bool isFull() const { return NumChildren == 2 * WidthFactor; }
417
418 unsigned getNumChildren() const { return NumChildren; }
419
420 const RopePieceBTreeNode *getChild(unsigned i) const {
421 assert(i < NumChildren && "invalid child #");
422 return Children[i];
423 }
424
425 RopePieceBTreeNode *getChild(unsigned i) {
426 assert(i < NumChildren && "invalid child #");
427 return Children[i];
428 }
429
430 /// FullRecomputeSizeLocally - Recompute the Size field of this node by
431 /// summing up the sizes of the child nodes.
432 void FullRecomputeSizeLocally() {
433 Size = 0;
434 for (unsigned i = 0, e = getNumChildren(); i != e; ++i)
435 Size += getChild(i)->size();
436 }
437
438 /// split - Split the range containing the specified offset so that we are
439 /// guaranteed that there is a place to do an insertion at the specified
440 /// offset. The offset is relative, so "0" is the start of the node.
441 ///
442 /// If there is no space in this subtree for the extra piece, the extra tree
443 /// node is returned and must be inserted into a parent.
444 RopePieceBTreeNode *split(unsigned Offset);
445
446 /// insert - Insert the specified ropepiece into this tree node at the
447 /// specified offset. The offset is relative, so "0" is the start of the
448 /// node.
449 ///
450 /// If there is no space in this subtree for the extra piece, the extra tree
451 /// node is returned and must be inserted into a parent.
452 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R);
453
454 /// HandleChildPiece - A child propagated an insertion result up to us.
455 /// Insert the new child, and/or propagate the result further up the tree.
456 RopePieceBTreeNode *HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS);
457
458 /// erase - Remove NumBytes from this node at the specified offset. We are
459 /// guaranteed that there is a split at Offset.
460 void erase(unsigned Offset, unsigned NumBytes);
461
462 static bool classof(const RopePieceBTreeNode *N) { return !N->isLeaf(); }
463};
464
465} // namespace
466
467/// split - Split the range containing the specified offset so that we are
468/// guaranteed that there is a place to do an insertion at the specified
469/// offset. The offset is relative, so "0" is the start of the node.
470///
471/// If there is no space in this subtree for the extra piece, the extra tree
472/// node is returned and must be inserted into a parent.
473RopePieceBTreeNode *RopePieceBTreeInterior::split(unsigned Offset) {
474 // Figure out which child to split.
475 if (Offset == 0 || Offset == size())
476 return nullptr; // If we have an exact offset, we're already split.
477
478 unsigned ChildOffset = 0;
479 unsigned i = 0;
480 for (; Offset >= ChildOffset + getChild(i)->size(); ++i)
481 ChildOffset += getChild(i)->size();
482
483 // If already split there, we're done.
484 if (ChildOffset == Offset)
485 return nullptr;
486
487 // Otherwise, recursively split the child.
488 if (RopePieceBTreeNode *RHS = getChild(i)->split(Offset - ChildOffset))
489 return HandleChildPiece(i, RHS);
490 return nullptr; // Done!
491}
492
493/// insert - Insert the specified ropepiece into this tree node at the
494/// specified offset. The offset is relative, so "0" is the start of the
495/// node.
496///
497/// If there is no space in this subtree for the extra piece, the extra tree
498/// node is returned and must be inserted into a parent.
499RopePieceBTreeNode *RopePieceBTreeInterior::insert(unsigned Offset,
500 const RopePiece &R) {
501 // Find the insertion point. We are guaranteed that there is a split at the
502 // specified offset so find it.
503 unsigned i = 0, e = getNumChildren();
504
505 unsigned ChildOffs = 0;
506 if (Offset == size()) {
507 // Fastpath for a common case. Insert at end of last child.
508 i = e - 1;
509 ChildOffs = size() - getChild(i)->size();
510 } else {
511 for (; Offset > ChildOffs + getChild(i)->size(); ++i)
512 ChildOffs += getChild(i)->size();
513 }
514
515 Size += R.size();
516
517 // Insert at the end of this child.
518 if (RopePieceBTreeNode *RHS = getChild(i)->insert(Offset - ChildOffs, R))
519 return HandleChildPiece(i, RHS);
520
521 return nullptr;
522}
523
524/// HandleChildPiece - A child propagated an insertion result up to us.
525/// Insert the new child, and/or propagate the result further up the tree.
526RopePieceBTreeNode *
527RopePieceBTreeInterior::HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS) {
528 // Otherwise the child propagated a subtree up to us as a new child. See if
529 // we have space for it here.
530 if (!isFull()) {
531 // Insert RHS after child 'i'.
532 if (i + 1 != getNumChildren())
533 memmove(&Children[i + 2], &Children[i + 1],
534 (getNumChildren() - i - 1) * sizeof(Children[0]));
535 Children[i + 1] = RHS;
536 ++NumChildren;
537 return nullptr;
538 }
539
540 // Okay, this node is full. Split it in half, moving WidthFactor children to
541 // a newly allocated interior node.
542
543 // Create the new node.
544 RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior();
545
546 // Move over the last 'WidthFactor' values from here to NewNode.
547 memcpy(&NewNode->Children[0], &Children[WidthFactor],
548 WidthFactor * sizeof(Children[0]));
549
550 // Decrease the number of values in the two nodes.
551 NewNode->NumChildren = NumChildren = WidthFactor;
552
553 // Finally, insert the two new children in the side the can (now) hold them.
554 // These insertions can't fail.
555 if (i < WidthFactor)
556 this->HandleChildPiece(i, RHS);
557 else
558 NewNode->HandleChildPiece(i - WidthFactor, RHS);
559
560 // Recompute the two nodes' size.
561 NewNode->FullRecomputeSizeLocally();
562 FullRecomputeSizeLocally();
563 return NewNode;
564}
565
566/// erase - Remove NumBytes from this node at the specified offset. We are
567/// guaranteed that there is a split at Offset.
568void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) {
569 // This will shrink this node by NumBytes.
570 Size -= NumBytes;
571
572 // Find the first child that overlaps with Offset.
573 unsigned i = 0;
574 for (; Offset >= getChild(i)->size(); ++i)
575 Offset -= getChild(i)->size();
576
577 // Propagate the delete request into overlapping children, or completely
578 // delete the children as appropriate.
579 while (NumBytes) {
580 RopePieceBTreeNode *CurChild = getChild(i);
581
582 // If we are deleting something contained entirely in the child, pass on the
583 // request.
584 if (Offset + NumBytes < CurChild->size()) {
585 CurChild->erase(Offset, NumBytes);
586 return;
587 }
588
589 // If this deletion request starts somewhere in the middle of the child, it
590 // must be deleting to the end of the child.
591 if (Offset) {
592 unsigned BytesFromChild = CurChild->size() - Offset;
593 CurChild->erase(Offset, BytesFromChild);
594 NumBytes -= BytesFromChild;
595 // Start at the beginning of the next child.
596 Offset = 0;
597 ++i;
598 continue;
599 }
600
601 // If the deletion request completely covers the child, delete it and move
602 // the rest down.
603 NumBytes -= CurChild->size();
604 CurChild->Destroy();
605 --NumChildren;
606 if (i != getNumChildren())
607 memmove(&Children[i], &Children[i + 1],
608 (getNumChildren() - i) * sizeof(Children[0]));
609 }
610}
611
612//===----------------------------------------------------------------------===//
613// RopePieceBTreeNode Implementation
614//===----------------------------------------------------------------------===//
615
616void RopePieceBTreeNode::Destroy() {
617 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
618 delete Leaf;
619 else
620 delete cast<RopePieceBTreeInterior>(this);
621}
622
623/// split - Split the range containing the specified offset so that we are
624/// guaranteed that there is a place to do an insertion at the specified
625/// offset. The offset is relative, so "0" is the start of the node.
626///
627/// If there is no space in this subtree for the extra piece, the extra tree
628/// node is returned and must be inserted into a parent.
629RopePieceBTreeNode *RopePieceBTreeNode::split(unsigned Offset) {
630 assert(Offset <= size() && "Invalid offset to split!");
631 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
632 return Leaf->split(Offset);
633 return cast<RopePieceBTreeInterior>(this)->split(Offset);
634}
635
636/// insert - Insert the specified ropepiece into this tree node at the
637/// specified offset. The offset is relative, so "0" is the start of the
638/// node.
639///
640/// If there is no space in this subtree for the extra piece, the extra tree
641/// node is returned and must be inserted into a parent.
642RopePieceBTreeNode *RopePieceBTreeNode::insert(unsigned Offset,
643 const RopePiece &R) {
644 assert(Offset <= size() && "Invalid offset to insert!");
645 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
646 return Leaf->insert(Offset, R);
647 return cast<RopePieceBTreeInterior>(this)->insert(Offset, R);
648}
649
650/// erase - Remove NumBytes from this node at the specified offset. We are
651/// guaranteed that there is a split at Offset.
652void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) {
653 assert(Offset + NumBytes <= size() && "Invalid offset to erase!");
654 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(this))
655 return Leaf->erase(Offset, NumBytes);
656 return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes);
657}
658
659//===----------------------------------------------------------------------===//
660// RopePieceBTreeIterator Implementation
661//===----------------------------------------------------------------------===//
662
663static const RopePieceBTreeLeaf *getCN(const void *P) {
664 return static_cast<const RopePieceBTreeLeaf *>(P);
665}
666
667// begin iterator.
669 const auto *N = static_cast<const RopePieceBTreeNode *>(n);
670
671 // Walk down the left side of the tree until we get to a leaf.
672 while (const auto *IN = dyn_cast<RopePieceBTreeInterior>(N))
673 N = IN->getChild(0);
674
675 // We must have at least one leaf.
676 CurNode = cast<RopePieceBTreeLeaf>(N);
677
678 // If we found a leaf that happens to be empty, skip over it until we get
679 // to something full.
680 while (CurNode && getCN(CurNode)->getNumPieces() == 0)
681 CurNode = getCN(CurNode)->getNextLeafInOrder();
682
683 if (CurNode)
684 CurPiece = &getCN(CurNode)->getPiece(0);
685 else // Empty tree, this is an end() iterator.
686 CurPiece = nullptr;
687 CurChar = 0;
688}
689
691 if (CurPiece !=
692 &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces() - 1)) {
693 CurChar = 0;
694 ++CurPiece;
695 return;
696 }
697
698 // Find the next non-empty leaf node.
699 do
700 CurNode = getCN(CurNode)->getNextLeafInOrder();
701 while (CurNode && getCN(CurNode)->getNumPieces() == 0);
702
703 if (CurNode)
704 CurPiece = &getCN(CurNode)->getPiece(0);
705 else // Hit end().
706 CurPiece = nullptr;
707 CurChar = 0;
708}
709
710//===----------------------------------------------------------------------===//
711// RopePieceBTree Implementation
712//===----------------------------------------------------------------------===//
713
714static RopePieceBTreeNode *getRoot(void *P) {
715 return static_cast<RopePieceBTreeNode *>(P);
716}
717
718RopePieceBTree::RopePieceBTree() { Root = new RopePieceBTreeLeaf(); }
719
721 assert(RHS.empty() && "Can't copy non-empty tree yet");
722 Root = new RopePieceBTreeLeaf();
723}
724
726
727unsigned RopePieceBTree::size() const { return getRoot(Root)->size(); }
728
730 if (auto *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root)))
731 Leaf->clear();
732 else {
733 getRoot(Root)->Destroy();
734 Root = new RopePieceBTreeLeaf();
735 }
736}
737
738void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) {
739 // #1. Split at Offset.
740 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
741 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
742
743 // #2. Do the insertion.
744 if (RopePieceBTreeNode *RHS = getRoot(Root)->insert(Offset, R))
745 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
746}
747
748void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) {
749 // #1. Split at Offset.
750 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset))
751 Root = new RopePieceBTreeInterior(getRoot(Root), RHS);
752
753 // #2. Do the erasing.
754 getRoot(Root)->erase(Offset, NumBytes);
755}
756
757//===----------------------------------------------------------------------===//
758// RewriteRope Implementation
759//===----------------------------------------------------------------------===//
760
761/// MakeRopeString - This copies the specified byte range into some instance of
762/// RopeRefCountString, and return a RopePiece that represents it. This uses
763/// the AllocBuffer object to aggregate requests for small strings into one
764/// allocation instead of doing tons of tiny allocations.
765RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) {
766 unsigned Len = End - Start;
767 assert(Len && "Zero length RopePiece is invalid!");
768
769 // If we have space for this string in the current alloc buffer, use it.
770 if (AllocOffs + Len <= AllocChunkSize) {
771 memcpy(AllocBuffer->Data + AllocOffs, Start, Len);
772 AllocOffs += Len;
773 return RopePiece(AllocBuffer, AllocOffs - Len, AllocOffs);
774 }
775
776 // If we don't have enough room because this specific allocation is huge,
777 // just allocate a new rope piece for it alone.
778 if (Len > AllocChunkSize) {
779 unsigned Size = End - Start + sizeof(RopeRefCountString) - 1;
780 auto *Res = reinterpret_cast<RopeRefCountString *>(new char[Size]);
781 Res->RefCount = 0;
782 memcpy(Res->Data, Start, End - Start);
783 return RopePiece(Res, 0, End - Start);
784 }
785
786 // Otherwise, this was a small request but we just don't have space for it
787 // Make a new chunk and share it with later allocations.
788
789 unsigned AllocSize = offsetof(RopeRefCountString, Data) + AllocChunkSize;
790 auto *Res = reinterpret_cast<RopeRefCountString *>(new char[AllocSize]);
791 Res->RefCount = 0;
792 memcpy(Res->Data, Start, Len);
793 AllocBuffer = Res;
794 AllocOffs = Len;
795
796 return RopePiece(AllocBuffer, 0, Len);
797}
static MachineBasicBlock * split(MachineBasicBlock::iterator I)
#define offsetof(TYPE, MEMBER)
basic Basic Alias true
static void clear(coro::Shape &Shape)
Definition: Coroutines.cpp:148
static DeltaTreeNode * getRoot(void *Root)
Definition: DeltaTree.cpp:386
uint64_t Size
bool End
Definition: ELF_riscv.cpp:480
#define P(N)
static const RopePieceBTreeLeaf * getCN(const void *P)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
Value * RHS
Value * LHS
unsigned size() const
void insert(unsigned Offset, const RopePiece &R)
void erase(unsigned Offset, unsigned NumBytes)
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
Definition: CallingConv.h:76
constexpr double e
Definition: MathExtras.h:47
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
@ Offset
Definition: DWP.cpp:480
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition: STLExtras.h:1680
void erase(Container &C, ValueType V)
Wrapper function to remove a value from a container:
Definition: STLExtras.h:2090
#define N
RopePiece - This class represents a view into a RopeRefCountString object.
Definition: RewriteRope.h:58
unsigned EndOffs
Definition: RewriteRope.h:61
unsigned StartOffs
Definition: RewriteRope.h:60
unsigned size() const
Definition: RewriteRope.h:75
RopeRefCountString - This struct is allocated with 'new char[]' from the heap, and represents a refer...
Definition: RewriteRope.h:33