1 // Copyright 2021 The Abseil Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #ifndef ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
16 #define ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
17
18 #include <cassert>
19 #include <cstdint>
20 #include <iosfwd>
21
22 #include "absl/base/config.h"
23 #include "absl/base/internal/raw_logging.h"
24 #include "absl/base/optimization.h"
25 #include "absl/strings/internal/cord_internal.h"
26 #include "absl/strings/internal/cord_rep_flat.h"
27 #include "absl/strings/string_view.h"
28 #include "absl/types/span.h"
29
30 namespace absl {
31 ABSL_NAMESPACE_BEGIN
32 namespace cord_internal {
33
34 class CordRepBtreeNavigator;
35
36 // CordRepBtree is as the name implies a btree implementation of a Cordrep tree.
37 // Data is stored at the leaf level only, non leaf nodes contain down pointers
38 // only. Allowed types of data edges are FLAT, EXTERNAL and SUBSTRINGs of FLAT
39 // or EXTERNAL nodes. The implementation allows for data to be added to either
40 // end of the tree only, it does not provide any 'insert' logic. This has the
41 // benefit that we can expect good fill ratios: all nodes except the outer
42 // 'legs' will have 100% fill ratios for trees built using Append/Prepend
43 // methods. Merged trees will typically have a fill ratio well above 50% as in a
44 // similar fashion, one side of the merged tree will typically have a 100% fill
45 // ratio, and the 'open' end will average 50%. All operations are O(log(n)) or
46 // better, and the tree never needs balancing.
47 //
48 // All methods accepting a CordRep* or CordRepBtree* adopt a reference on that
49 // input unless explicitly stated otherwise. All functions returning a CordRep*
50 // or CordRepBtree* instance transfer a reference back to the caller.
51 // Simplified, callers both 'donate' and 'consume' a reference count on each
52 // call, simplifying the API. An example of building a tree:
53 //
54 // CordRepBtree* tree = CordRepBtree::Create(MakeFlat("Hello"));
55 // tree = CordRepBtree::Append(tree, MakeFlat("world"));
56 //
57 // In the above example, all inputs are consumed, making each call affecting
58 // `tree` reference count neutral. The returned `tree` value can be different
59 // from the input if the input is shared with other threads, or if the tree
60 // grows in height, but callers typically never have to concern themselves with
61 // that and trust that all methods DTRT at all times.
62 class CordRepBtree : public CordRep {
63 public:
64 // EdgeType identifies `front` and `back` enum values.
65 // Various implementations in CordRepBtree such as `Add` and `Edge` are
66 // generic and templated on operating on either of the boundary edges.
67 // For more information on the possible edges contained in a CordRepBtree
68 // instance see the documentation for `edges_`.
69 enum class EdgeType { kFront, kBack };
70
71 // Convenience constants into `EdgeType`
72 static constexpr EdgeType kFront = EdgeType::kFront;
73 static constexpr EdgeType kBack = EdgeType::kBack;
74
75 // Maximum number of edges: based on experiments and performance data, we can
76 // pick suitable values resulting in optimum cacheline aligned values. The
77 // preferred values are based on 64-bit systems where we aim to align this
78 // class onto 64 bytes, i.e.: 6 = 64 bytes, 14 = 128 bytes, etc.
79 // TODO(b/192061034): experiment with alternative sizes.
80 static constexpr size_t kMaxCapacity = 6;
81
82 // Reasonable maximum height of the btree. We can expect a fill ratio of at
83 // least 50%: trees are always expanded at the front or back. Concatenating
84 // trees will then typically fold at the top most node, where the lower nodes
85 // are at least at capacity on one side of joined inputs. At a lower fill
86 // rate of 4 edges per node, we have capacity for ~16 million leaf nodes.
87 // We will fail / abort if an application ever exceeds this height, which
88 // should be extremely rare (near impossible) and be an indication of an
89 // application error: we do not assume it reasonable for any application to
90 // operate correctly with such monster trees.
91 // Another compelling reason for the number `12` is that any contextual stack
92 // required for navigation or insertion requires 12 words and 12 bytes, which
93 // fits inside 2 cache lines with some room to spare, and is reasonable as a
94 // local stack variable compared to Cord's current near 400 bytes stack use.
95 // The maximum `height` value of a node is then `kMaxDepth - 1` as node height
96 // values start with a value of 0 for leaf nodes.
97 static constexpr int kMaxDepth = 12;
98 static constexpr int kMaxHeight = kMaxDepth - 1;
99
100 // `Action` defines the action for unwinding changes done at the btree's leaf
101 // level that need to be propagated up to the parent node(s). Each operation
102 // on a node has an effect / action defined as follows:
103 // - kSelf
104 // The operation (add / update, etc) was performed directly on the node as
105 // the node is private to the current thread (i.e.: not shared directly or
106 // indirectly through a refcount > 1). Changes can be propagated directly to
107 // all parent nodes as all parent nodes are also then private to the current
108 // thread.
109 // - kCopied
110 // The operation (add / update, etc) was performed on a copy of the original
111 // node, as the node is (potentially) directly or indirectly shared with
112 // other threads. Changes need to be propagated into the parent nodes where
113 // the old down pointer must be unreffed and replaced with this new copy.
114 // Such changes to parent nodes may themselves require a copy if the parent
115 // node is also shared. A kCopied action can propagate all the way to the
116 // top node where we then must unref the `tree` input provided by the
117 // caller, and return the new copy.
118 // - kPopped
119 // The operation (typically add) could not be satisfied due to insufficient
120 // capacity in the targeted node, and a new 'leg' was created that needs to
121 // be added into the parent node. For example, adding a FLAT inside a leaf
122 // node that is at capacity will create a new leaf node containing that
123 // FLAT, that needs to be 'popped' up the btree. Such 'pop' actions can
124 // cascade up the tree if parent nodes are also at capacity. A 'Popped'
125 // action propagating all the way to the top of the tree will result in
126 // the tree becoming one level higher than the current tree through a final
127 // `CordRepBtree::New(tree, popped)` call, resulting in a new top node
128 // referencing the old tree and the new (fully popped upwards) 'leg'.
129 enum Action { kSelf, kCopied, kPopped };
130
131 // Result of an operation on a node. See the `Action` enum for details.
132 struct OpResult {
133 CordRepBtree* tree;
134 Action action;
135 };
136
137 // Return value of the CopyPrefix and CopySuffix methods which can
138 // return a node or data edge at any height inside the tree.
139 // A height of 0 defines the lowest (leaf) node, a height of -1 identifies
140 // `edge` as being a plain data node: EXTERNAL / FLAT or SUBSTRING thereof.
141 struct CopyResult {
142 CordRep* edge;
143 int height;
144 };
145
146 // Logical position inside a node:
147 // - index: index of the edge.
148 // - n: size or offset value depending on context.
149 struct Position {
150 size_t index;
151 size_t n;
152 };
153
154 // Creates a btree from the given input. Adopts a ref of `rep`.
155 // If the input `rep` is itself a btree, i.e., `IsBtree()`, then this
156 // function immediately returns `rep->btree()`. If the input is a valid data
157 // edge (see IsDataEdge()), then a new leaf node is returned containing `rep`
158 // as the sole data edge. Else, the input is assumed to be a (legacy) concat
159 // tree, and the input is consumed and transformed into a btree().
160 static CordRepBtree* Create(CordRep* rep);
161
162 // Destroys the provided tree. Should only be called by cord internal API's,
163 // typically after a ref_count.Decrement() on the last reference count.
164 static void Destroy(CordRepBtree* tree);
165
166 // Use CordRep::Unref() as we overload for absl::Span<CordRep* const>.
167 using CordRep::Unref;
168
169 // Unrefs all edges in `edges` which are assumed to be 'likely one'.
170 static void Unref(absl::Span<CordRep* const> edges);
171
172 // Appends / Prepends an existing CordRep instance to this tree.
173 // The below methods accept three types of input:
174 // 1) `rep` is a data node (See `IsDataNode` for valid data edges).
175 // `rep` is appended or prepended to this tree 'as is'.
176 // 2) `rep` is a BTREE.
177 // `rep` is merged into `tree` respecting the Append/Prepend order.
178 // 3) `rep` is some other (legacy) type.
179 // `rep` is converted in place and added to `tree`
180 // Requires `tree` and `rep` to be not null.
181 static CordRepBtree* Append(CordRepBtree* tree, CordRep* rep);
182 static CordRepBtree* Prepend(CordRepBtree* tree, CordRep* rep);
183
184 // Append/Prepend the data in `data` to this tree.
185 // The `extra` parameter defines how much extra capacity should be allocated
186 // for any additional FLAT being allocated. This is an optimization hint from
187 // the caller. For example, a caller may need to add 2 string_views of data
188 // "abc" and "defghi" which are not consecutive. The caller can in this case
189 // invoke `AddData(tree, "abc", 6)`, and any newly added flat is allocated
190 // where possible with at least 6 bytes of extra capacity beyond `length`.
191 // This helps avoiding data getting fragmented over multiple flats.
192 // There is no limit on the size of `data`. If `data` can not be stored inside
193 // a single flat, then the function will iteratively add flats until all data
194 // has been consumed and appended or prepended to the tree.
195 static CordRepBtree* Append(CordRepBtree* tree, string_view data,
196 size_t extra = 0);
197 static CordRepBtree* Prepend(CordRepBtree* tree, string_view data,
198 size_t extra = 0);
199
200 // Returns a new tree, containing `n` bytes of data from this instance
201 // starting at offset `offset`. Where possible, the returned tree shares
202 // (re-uses) data edges and nodes with this instance to minimize the
203 // combined memory footprint of both trees.
204 // Requires `offset + n <= length`. Returns `nullptr` if `n` is zero.
205 CordRep* SubTree(size_t offset, size_t n);
206
207 // Removes `n` trailing bytes from `tree`, and returns the resulting tree
208 // or data edge. Returns `tree` if n is zero, and nullptr if n == length.
209 // This function is logically identical to:
210 // result = tree->SubTree(0, tree->length - n);
211 // Unref(tree);
212 // return result;
213 // However, the actual implementation will as much as possible perform 'in
214 // place' modifications on the tree on all nodes and edges that are mutable.
215 // For example, in a fully privately owned tree with the last edge being a
216 // flat of length 12, RemoveSuffix(1) will simply set the length of that data
217 // edge to 11, and reduce the length of all nodes on the edge path by 1.
218 static CordRep* RemoveSuffix(CordRepBtree* tree, size_t n);
219
220 // Returns the character at the given offset.
221 char GetCharacter(size_t offset) const;
222
223 // Returns true if this node holds a single data edge, and if so, sets
224 // `fragment` to reference the contained data. `fragment` is an optional
225 // output parameter and allowed to be null.
226 bool IsFlat(absl::string_view* fragment) const;
227
228 // Returns true if the data of `n` bytes starting at offset `offset`
229 // is contained in a single data edge, and if so, sets fragment to reference
230 // the contained data. `fragment` is an optional output parameter and allowed
231 // to be null.
232 bool IsFlat(size_t offset, size_t n, absl::string_view* fragment) const;
233
234 // Returns a span (mutable range of bytes) of up to `size` bytes into the
235 // last FLAT data edge inside this tree under the following conditions:
236 // - none of the nodes down into the FLAT node are shared.
237 // - the last data edge in this tree is a non-shared FLAT.
238 // - the referenced FLAT has additional capacity available.
239 // If all these conditions are met, a non-empty span is returned, and the
240 // length of the flat node and involved tree nodes have been increased by
241 // `span.length()`. The caller is responsible for immediately assigning values
242 // to all uninitialized data reference by the returned span.
243 // Requires `this->refcount.IsMutable()`: this function forces the
244 // caller to do this fast path check on the top level node, as this is the
245 // most commonly shared node of a cord tree.
246 Span<char> GetAppendBuffer(size_t size);
247
248 // Returns the `height` of the tree. The height of a tree is limited to
249 // kMaxHeight. `height` is implemented as an `int` as in some places we
250 // use negative (-1) values for 'data edges'.
height()251 int height() const { return static_cast<int>(storage[0]); }
252
253 // Properties: begin, back, end, front/back boundary indexes.
begin()254 size_t begin() const { return static_cast<size_t>(storage[1]); }
back()255 size_t back() const { return static_cast<size_t>(storage[2]) - 1; }
end()256 size_t end() const { return static_cast<size_t>(storage[2]); }
index(EdgeType edge)257 size_t index(EdgeType edge) const {
258 return edge == kFront ? begin() : back();
259 }
260
261 // Properties: size and capacity.
262 // `capacity` contains the current capacity of this instance, where
263 // `kMaxCapacity` contains the maximum capacity of a btree node.
264 // For now, `capacity` and `kMaxCapacity` return the same value, but this may
265 // change in the future if we see benefit in dynamically sizing 'small' nodes
266 // to 'large' nodes for large data trees.
size()267 size_t size() const { return end() - begin(); }
capacity()268 size_t capacity() const { return kMaxCapacity; }
269
270 // Edge access
271 inline CordRep* Edge(size_t index) const;
272 inline CordRep* Edge(EdgeType edge_type) const;
273 inline absl::Span<CordRep* const> Edges() const;
274 inline absl::Span<CordRep* const> Edges(size_t begin, size_t end) const;
275
276 // Returns reference to the data edge at `index`.
277 // Requires this instance to be a leaf node, and `index` to be valid index.
278 inline absl::string_view Data(size_t index) const;
279
280 static const char* EdgeDataPtr(const CordRep* r);
281 static absl::string_view EdgeData(const CordRep* r);
282
283 // Returns true if the provided rep is a FLAT, EXTERNAL or a SUBSTRING node
284 // holding a FLAT or EXTERNAL child rep.
285 static bool IsDataEdge(const CordRep* rep);
286
287 // Diagnostics: returns true if `tree` is valid and internally consistent.
288 // If `shallow` is false, then the provided top level node and all child nodes
289 // below it are recursively checked. If `shallow` is true, only the provided
290 // node in `tree` and the cumulative length, type and height of the direct
291 // child nodes of `tree` are checked. The value of `shallow` is ignored if the
292 // internal `cord_btree_exhaustive_validation` diagnostics variable is true,
293 // in which case the performed validations works as if `shallow` were false.
294 // This function is intended for debugging and testing purposes only.
295 static bool IsValid(const CordRepBtree* tree, bool shallow = false);
296
297 // Diagnostics: asserts that the provided tree is valid.
298 // `AssertValid()` performs a shallow validation by default. `shallow` can be
299 // set to false in which case an exhaustive validation is performed. This
300 // function is implemented in terms of calling `IsValid()` and asserting the
301 // return value to be true. See `IsValid()` for more information.
302 // This function is intended for debugging and testing purposes only.
303 static CordRepBtree* AssertValid(CordRepBtree* tree, bool shallow = true);
304 static const CordRepBtree* AssertValid(const CordRepBtree* tree,
305 bool shallow = true);
306
307 // Diagnostics: dump the contents of this tree to `stream`.
308 // This function is intended for debugging and testing purposes only.
309 static void Dump(const CordRep* rep, std::ostream& stream);
310 static void Dump(const CordRep* rep, absl::string_view label,
311 std::ostream& stream);
312 static void Dump(const CordRep* rep, absl::string_view label,
313 bool include_contents, std::ostream& stream);
314
315 // Adds the edge `edge` to this node if possible. `owned` indicates if the
316 // current node is potentially shared or not with other threads. Returns:
317 // - {kSelf, <this>}
318 // The edge was directly added to this node.
319 // - {kCopied, <node>}
320 // The edge was added to a copy of this node.
321 // - {kPopped, New(edge, height())}
322 // A new leg with the edge was created as this node has no extra capacity.
323 template <EdgeType edge_type>
324 inline OpResult AddEdge(bool owned, CordRep* edge, size_t delta);
325
326 // Replaces the front or back edge with the provided new edge. Returns:
327 // - {kSelf, <this>}
328 // The edge was directly set in this node. The old edge is unreffed.
329 // - {kCopied, <node>}
330 // A copy of this node was created with the new edge value.
331 // In both cases, the function adopts a reference on `edge`.
332 template <EdgeType edge_type>
333 OpResult SetEdge(bool owned, CordRep* edge, size_t delta);
334
335 // Creates a new empty node at the specified height.
336 static CordRepBtree* New(int height = 0);
337
338 // Creates a new node containing `rep`, with the height being computed
339 // automatically based on the type of `rep`.
340 static CordRepBtree* New(CordRep* rep);
341
342 // Creates a new node containing both `front` and `back` at height
343 // `front.height() + 1`. Requires `back.height() == front.height()`.
344 static CordRepBtree* New(CordRepBtree* front, CordRepBtree* back);
345
346 // Creates a fully balanced tree from the provided tree by rebuilding a new
347 // tree from all data edges in the input. This function is automatically
348 // invoked internally when the tree exceeds the maximum height.
349 static CordRepBtree* Rebuild(CordRepBtree* tree);
350
351 private:
352 CordRepBtree() = default;
353 ~CordRepBtree() = default;
354
355 // Initializes the main properties `tag`, `begin`, `end`, `height`.
356 inline void InitInstance(int height, size_t begin = 0, size_t end = 0);
357
358 // Direct property access begin / end
set_begin(size_t begin)359 void set_begin(size_t begin) { storage[1] = static_cast<uint8_t>(begin); }
set_end(size_t end)360 void set_end(size_t end) { storage[2] = static_cast<uint8_t>(end); }
361
362 // Decreases the value of `begin` by `n`, and returns the new value. Notice
363 // how this returns the new value unlike atomic::fetch_add which returns the
364 // old value. This is because this is used to prepend edges at 'begin - 1'.
sub_fetch_begin(size_t n)365 size_t sub_fetch_begin(size_t n) {
366 storage[1] -= static_cast<uint8_t>(n);
367 return storage[1];
368 }
369
370 // Increases the value of `end` by `n`, and returns the previous value. This
371 // function is typically used to append edges at 'end'.
fetch_add_end(size_t n)372 size_t fetch_add_end(size_t n) {
373 const uint8_t current = storage[2];
374 storage[2] = static_cast<uint8_t>(current + n);
375 return current;
376 }
377
378 // Returns the index of the last edge starting on, or before `offset`, with
379 // `n` containing the relative offset of `offset` inside that edge.
380 // Requires `offset` < length.
381 Position IndexOf(size_t offset) const;
382
383 // Returns the index of the last edge starting before `offset`, with `n`
384 // containing the relative offset of `offset` inside that edge.
385 // This function is useful to find the edges for some span of bytes ending at
386 // `offset` (i.e., `n` bytes). For example:
387 //
388 // Position pos = IndexBefore(n)
389 // edges = Edges(begin(), pos.index) // All full edges (may be empty)
390 // last = Sub(Edge(pos.index), 0, pos.n) // Last partial edge (may be empty)
391 //
392 // Requires 0 < `offset` <= length.
393 Position IndexBefore(size_t offset) const;
394
395 // Returns the index of the edge ending at (or on) length `length`, and the
396 // number of bytes inside that edge up to `length`. For example, if we have a
397 // Node with 2 edges, one of 10 and one of 20 long, then IndexOfLength(27)
398 // will return {1, 17}, and IndexOfLength(10) will return {0, 10}.
399 Position IndexOfLength(size_t n) const;
400
401 // Identical to the above function except starting from the position `front`.
402 // This function is equivalent to `IndexBefore(front.n + offset)`, with
403 // the difference that this function is optimized to start at `front.index`.
404 Position IndexBefore(Position front, size_t offset) const;
405
406 // Returns the index of the edge directly beyond the edge containing offset
407 // `offset`, with `n` containing the distance of that edge from `offset`.
408 // This function is useful for iteratively finding suffix nodes and remaining
409 // partial bytes in left-most suffix nodes as for example in CopySuffix.
410 // Requires `offset` < length.
411 Position IndexBeyond(size_t offset) const;
412
413 // Destruction
414 static void DestroyLeaf(CordRepBtree* tree, size_t begin, size_t end);
415 static void DestroyNonLeaf(CordRepBtree* tree, size_t begin, size_t end);
416 static void DestroyTree(CordRepBtree* tree, size_t begin, size_t end);
Delete(CordRepBtree * tree)417 static void Delete(CordRepBtree* tree) { delete tree; }
418
419 // Creates a new leaf node containing as much data as possible from `data`.
420 // The data is added either forwards or reversed depending on `edge_type`.
421 // Callers must check the length of the returned node to determine if all data
422 // was copied or not.
423 // See the `Append/Prepend` function for the meaning and purpose of `extra`.
424 template <EdgeType edge_type>
425 static CordRepBtree* NewLeaf(absl::string_view data, size_t extra);
426
427 // Creates a raw copy of this Btree node, copying all properties, but
428 // without adding any references to existing edges.
429 CordRepBtree* CopyRaw() const;
430
431 // Creates a full copy of this Btree node, adding a reference on all edges.
432 CordRepBtree* Copy() const;
433
434 // Creates a partial copy of this Btree node, copying all edges up to `end`,
435 // adding a reference on each copied edge, and sets the length of the newly
436 // created copy to `new_length`.
437 CordRepBtree* CopyBeginTo(size_t end, size_t new_length) const;
438
439 // Returns a tree containing the edges [tree->begin(), end) and length
440 // of `new_length`. This method consumes a reference on the provided
441 // tree, and logically performs the following operation:
442 // result = tree->CopyBeginTo(end, new_length);
443 // CordRep::Unref(tree);
444 // return result;
445 static CordRepBtree* ConsumeBeginTo(CordRepBtree* tree, size_t end,
446 size_t new_length);
447
448 // Creates a partial copy of this Btree node, copying all edges starting at
449 // `begin`, adding a reference on each copied edge, and sets the length of
450 // the newly created copy to `new_length`.
451 CordRepBtree* CopyToEndFrom(size_t begin, size_t new_length) const;
452
453 // Extracts and returns the front edge from the provided tree.
454 // This method consumes a reference on the provided tree, and logically
455 // performs the following operation:
456 // edge = CordRep::Ref(tree->Edge(kFront));
457 // CordRep::Unref(tree);
458 // return edge;
459 static CordRep* ExtractFront(CordRepBtree* tree);
460
461 // Returns a tree containing the result of appending `right` to `left`.
462 static CordRepBtree* MergeTrees(CordRepBtree* left, CordRepBtree* right);
463
464 // Fallback functions for `Create()`, `Append()` and `Prepend()` which
465 // deal with legacy / non conforming input, i.e.: CONCAT trees.
466 static CordRepBtree* CreateSlow(CordRep* rep);
467 static CordRepBtree* AppendSlow(CordRepBtree*, CordRep* rep);
468 static CordRepBtree* PrependSlow(CordRepBtree*, CordRep* rep);
469
470 // Recursively rebuilds `tree` into `stack`. If 'consume` is set to true, the
471 // function will consume a reference on `tree`. `stack` is a null terminated
472 // array containing the new tree's state, with the current leaf node at
473 // stack[0], and parent nodes above that, or null for 'top of tree'.
474 static void Rebuild(CordRepBtree** stack, CordRepBtree* tree, bool consume);
475
476 // Aligns existing edges to start at index 0, to allow for a new edge to be
477 // added to the back of the current edges.
478 inline void AlignBegin();
479
480 // Aligns existing edges to end at `capacity`, to allow for a new edge to be
481 // added in front of the current edges.
482 inline void AlignEnd();
483
484 // Adds the provided edge to this node.
485 // Requires this node to have capacity for the edge. Realigns / moves
486 // existing edges as needed to prepend or append the new edge.
487 template <EdgeType edge_type>
488 inline void Add(CordRep* rep);
489
490 // Adds the provided edges to this node.
491 // Requires this node to have capacity for the edges. Realigns / moves
492 // existing edges as needed to prepend or append the new edges.
493 template <EdgeType edge_type>
494 inline void Add(absl::Span<CordRep* const>);
495
496 // Adds data from `data` to this node until either all data has been consumed,
497 // or there is no more capacity for additional flat nodes inside this node.
498 // Requires the current node to be a leaf node, data to be non empty, and the
499 // current node to have capacity for at least one more data edge.
500 // Returns any remaining data from `data` that was not added, which is
501 // depending on the edge type (front / back) either the remaining prefix of
502 // suffix of the input.
503 // See the `Append/Prepend` function for the meaning and purpose of `extra`.
504 template <EdgeType edge_type>
505 absl::string_view AddData(absl::string_view data, size_t extra);
506
507 // Replace the front or back edge with the provided value.
508 // Adopts a reference on `edge` and unrefs the old edge.
509 template <EdgeType edge_type>
510 inline void SetEdge(CordRep* edge);
511
512 // Returns a partial copy of the current tree containing the first `n` bytes
513 // of data. `CopyResult` contains both the resulting edge and its height. The
514 // resulting tree may be less high than the current tree, or even be a single
515 // matching data edge if `allow_folding` is set to true.
516 // For example, if `n == 1`, then the result will be the single data edge, and
517 // height will be set to -1 (one below the owning leaf node). If n == 0, this
518 // function returns null. Requires `n <= length`
519 CopyResult CopyPrefix(size_t n, bool allow_folding = true);
520
521 // Returns a partial copy of the current tree containing all data starting
522 // after `offset`. `CopyResult` contains both the resulting edge and its
523 // height. The resulting tree may be less high than the current tree, or even
524 // be a single matching data edge. For example, if `n == length - 1`, then the
525 // result will be a single data edge, and height will be set to -1 (one below
526 // the owning leaf node).
527 // Requires `offset < length`
528 CopyResult CopySuffix(size_t offset);
529
530 // Returns a OpResult value of {this, kSelf} or {Copy(), kCopied}
531 // depending on the value of `owned`.
532 inline OpResult ToOpResult(bool owned);
533
534 // Adds `rep` to the specified tree, returning the modified tree.
535 template <EdgeType edge_type>
536 static CordRepBtree* AddCordRep(CordRepBtree* tree, CordRep* rep);
537
538 // Adds `data` to the specified tree, returning the modified tree.
539 // See the `Append/Prepend` function for the meaning and purpose of `extra`.
540 template <EdgeType edge_type>
541 static CordRepBtree* AddData(CordRepBtree* tree, absl::string_view data,
542 size_t extra = 0);
543
544 // Merges `src` into `dst` with `src` being added either before (kFront) or
545 // after (kBack) `dst`. Requires the height of `dst` to be greater than or
546 // equal to the height of `src`.
547 template <EdgeType edge_type>
548 static CordRepBtree* Merge(CordRepBtree* dst, CordRepBtree* src);
549
550 // Fallback version of GetAppendBuffer for large trees: GetAppendBuffer()
551 // implements an inlined version for trees of limited height (3 levels),
552 // GetAppendBufferSlow implements the logic for large trees.
553 Span<char> GetAppendBufferSlow(size_t size);
554
555 // `edges_` contains all edges starting from this instance.
556 // These are explicitly `child` edges only, a cord btree (or any cord tree in
557 // that respect) does not store `parent` pointers anywhere: multiple trees /
558 // parents can reference the same shared child edge. The type of these edges
559 // depends on the height of the node. `Leaf nodes` (height == 0) contain `data
560 // edges` (external or flat nodes, or sub-strings thereof). All other nodes
561 // (height > 0) contain pointers to BTREE nodes with a height of `height - 1`.
562 CordRep* edges_[kMaxCapacity];
563
564 friend class CordRepBtreeTestPeer;
565 friend class CordRepBtreeNavigator;
566 };
567
btree()568 inline CordRepBtree* CordRep::btree() {
569 assert(IsBtree());
570 return static_cast<CordRepBtree*>(this);
571 }
572
btree()573 inline const CordRepBtree* CordRep::btree() const {
574 assert(IsBtree());
575 return static_cast<const CordRepBtree*>(this);
576 }
577
InitInstance(int height,size_t begin,size_t end)578 inline void CordRepBtree::InitInstance(int height, size_t begin, size_t end) {
579 tag = BTREE;
580 storage[0] = static_cast<uint8_t>(height);
581 storage[1] = static_cast<uint8_t>(begin);
582 storage[2] = static_cast<uint8_t>(end);
583 }
584
Edge(size_t index)585 inline CordRep* CordRepBtree::Edge(size_t index) const {
586 assert(index >= begin());
587 assert(index < end());
588 return edges_[index];
589 }
590
Edge(EdgeType edge_type)591 inline CordRep* CordRepBtree::Edge(EdgeType edge_type) const {
592 return edges_[edge_type == kFront ? begin() : back()];
593 }
594
Edges()595 inline absl::Span<CordRep* const> CordRepBtree::Edges() const {
596 return {edges_ + begin(), size()};
597 }
598
Edges(size_t begin,size_t end)599 inline absl::Span<CordRep* const> CordRepBtree::Edges(size_t begin,
600 size_t end) const {
601 assert(begin <= end);
602 assert(begin >= this->begin());
603 assert(end <= this->end());
604 return {edges_ + begin, static_cast<size_t>(end - begin)};
605 }
606
EdgeDataPtr(const CordRep * r)607 inline const char* CordRepBtree::EdgeDataPtr(const CordRep* r) {
608 assert(IsDataEdge(r));
609 size_t offset = 0;
610 if (r->tag == SUBSTRING) {
611 offset = r->substring()->start;
612 r = r->substring()->child;
613 }
614 return (r->tag >= FLAT ? r->flat()->Data() : r->external()->base) + offset;
615 }
616
EdgeData(const CordRep * r)617 inline absl::string_view CordRepBtree::EdgeData(const CordRep* r) {
618 return absl::string_view(EdgeDataPtr(r), r->length);
619 }
620
Data(size_t index)621 inline absl::string_view CordRepBtree::Data(size_t index) const {
622 assert(height() == 0);
623 return EdgeData(Edge(index));
624 }
625
IsDataEdge(const CordRep * rep)626 inline bool CordRepBtree::IsDataEdge(const CordRep* rep) {
627 // The fast path is that `rep` is an EXTERNAL or FLAT node, making the below
628 // if a single, well predicted branch. We then repeat the FLAT or EXTERNAL
629 // check in the slow path the SUBSTRING check to optimize for the hot path.
630 if (rep->tag == EXTERNAL || rep->tag >= FLAT) return true;
631 if (rep->tag == SUBSTRING) rep = rep->substring()->child;
632 return rep->tag == EXTERNAL || rep->tag >= FLAT;
633 }
634
New(int height)635 inline CordRepBtree* CordRepBtree::New(int height) {
636 CordRepBtree* tree = new CordRepBtree;
637 tree->length = 0;
638 tree->InitInstance(height);
639 return tree;
640 }
641
New(CordRep * rep)642 inline CordRepBtree* CordRepBtree::New(CordRep* rep) {
643 CordRepBtree* tree = new CordRepBtree;
644 int height = rep->IsBtree() ? rep->btree()->height() + 1 : 0;
645 tree->length = rep->length;
646 tree->InitInstance(height, /*begin=*/0, /*end=*/1);
647 tree->edges_[0] = rep;
648 return tree;
649 }
650
New(CordRepBtree * front,CordRepBtree * back)651 inline CordRepBtree* CordRepBtree::New(CordRepBtree* front,
652 CordRepBtree* back) {
653 assert(front->height() == back->height());
654 CordRepBtree* tree = new CordRepBtree;
655 tree->length = front->length + back->length;
656 tree->InitInstance(front->height() + 1, /*begin=*/0, /*end=*/2);
657 tree->edges_[0] = front;
658 tree->edges_[1] = back;
659 return tree;
660 }
661
DestroyTree(CordRepBtree * tree,size_t begin,size_t end)662 inline void CordRepBtree::DestroyTree(CordRepBtree* tree, size_t begin,
663 size_t end) {
664 if (tree->height() == 0) {
665 DestroyLeaf(tree, begin, end);
666 } else {
667 DestroyNonLeaf(tree, begin, end);
668 }
669 }
670
Destroy(CordRepBtree * tree)671 inline void CordRepBtree::Destroy(CordRepBtree* tree) {
672 DestroyTree(tree, tree->begin(), tree->end());
673 }
674
Unref(absl::Span<CordRep * const> edges)675 inline void CordRepBtree::Unref(absl::Span<CordRep* const> edges) {
676 for (CordRep* edge : edges) {
677 if (ABSL_PREDICT_FALSE(!edge->refcount.Decrement())) {
678 CordRep::Destroy(edge);
679 }
680 }
681 }
682
CopyRaw()683 inline CordRepBtree* CordRepBtree::CopyRaw() const {
684 auto* tree = static_cast<CordRepBtree*>(::operator new(sizeof(CordRepBtree)));
685 memcpy(static_cast<void*>(tree), this, sizeof(CordRepBtree));
686 new (&tree->refcount) RefcountAndFlags;
687 return tree;
688 }
689
Copy()690 inline CordRepBtree* CordRepBtree::Copy() const {
691 CordRepBtree* tree = CopyRaw();
692 for (CordRep* rep : Edges()) CordRep::Ref(rep);
693 return tree;
694 }
695
CopyToEndFrom(size_t begin,size_t new_length)696 inline CordRepBtree* CordRepBtree::CopyToEndFrom(size_t begin,
697 size_t new_length) const {
698 assert(begin >= this->begin());
699 assert(begin <= this->end());
700 CordRepBtree* tree = CopyRaw();
701 tree->length = new_length;
702 tree->set_begin(begin);
703 for (CordRep* edge : tree->Edges()) CordRep::Ref(edge);
704 return tree;
705 }
706
CopyBeginTo(size_t end,size_t new_length)707 inline CordRepBtree* CordRepBtree::CopyBeginTo(size_t end,
708 size_t new_length) const {
709 assert(end <= capacity());
710 assert(end >= this->begin());
711 CordRepBtree* tree = CopyRaw();
712 tree->length = new_length;
713 tree->set_end(end);
714 for (CordRep* edge : tree->Edges()) CordRep::Ref(edge);
715 return tree;
716 }
717
AlignBegin()718 inline void CordRepBtree::AlignBegin() {
719 // The below code itself does not need to be fast as typically we have
720 // mono-directional append/prepend calls, and `begin` / `end` are typically
721 // adjusted no more than once. But we want to avoid potential register clobber
722 // effects, making the compiler emit register save/store/spills, and minimize
723 // the size of code.
724 const size_t delta = begin();
725 if (ABSL_PREDICT_FALSE(delta != 0)) {
726 const size_t new_end = end() - delta;
727 set_begin(0);
728 set_end(new_end);
729 // TODO(mvels): we can write this using 2 loads / 2 stores depending on
730 // total size for the kMaxCapacity = 6 case. I.e., we can branch (switch) on
731 // size, and then do overlapping load/store of up to 4 pointers (inlined as
732 // XMM, YMM or ZMM load/store) and up to 2 pointers (XMM / YMM), which is a)
733 // compact and b) not clobbering any registers.
734 ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity);
735 #ifdef __clang__
736 #pragma unroll 1
737 #endif
738 for (size_t i = 0; i < new_end; ++i) {
739 edges_[i] = edges_[i + delta];
740 }
741 }
742 }
743
AlignEnd()744 inline void CordRepBtree::AlignEnd() {
745 // See comments in `AlignBegin` for motivation on the hand-rolled for loops.
746 const size_t delta = capacity() - end();
747 if (delta != 0) {
748 const size_t new_begin = begin() + delta;
749 const size_t new_end = end() + delta;
750 set_begin(new_begin);
751 set_end(new_end);
752 ABSL_INTERNAL_ASSUME(new_end <= kMaxCapacity);
753 #ifdef __clang__
754 #pragma unroll 1
755 #endif
756 for (size_t i = new_end - 1; i >= new_begin; --i) {
757 edges_[i] = edges_[i - delta];
758 }
759 }
760 }
761
762 template <>
763 inline void CordRepBtree::Add<CordRepBtree::kBack>(CordRep* rep) {
764 AlignBegin();
765 edges_[fetch_add_end(1)] = rep;
766 }
767
768 template <>
769 inline void CordRepBtree::Add<CordRepBtree::kBack>(
770 absl::Span<CordRep* const> edges) {
771 AlignBegin();
772 size_t new_end = end();
773 for (CordRep* edge : edges) edges_[new_end++] = edge;
774 set_end(new_end);
775 }
776
777 template <>
778 inline void CordRepBtree::Add<CordRepBtree::kFront>(CordRep* rep) {
779 AlignEnd();
780 edges_[sub_fetch_begin(1)] = rep;
781 }
782
783 template <>
784 inline void CordRepBtree::Add<CordRepBtree::kFront>(
785 absl::Span<CordRep* const> edges) {
786 AlignEnd();
787 size_t new_begin = begin() - edges.size();
788 set_begin(new_begin);
789 for (CordRep* edge : edges) edges_[new_begin++] = edge;
790 }
791
792 template <CordRepBtree::EdgeType edge_type>
SetEdge(CordRep * edge)793 inline void CordRepBtree::SetEdge(CordRep* edge) {
794 const int idx = edge_type == kFront ? begin() : back();
795 CordRep::Unref(edges_[idx]);
796 edges_[idx] = edge;
797 }
798
ToOpResult(bool owned)799 inline CordRepBtree::OpResult CordRepBtree::ToOpResult(bool owned) {
800 return owned ? OpResult{this, kSelf} : OpResult{Copy(), kCopied};
801 }
802
IndexOf(size_t offset)803 inline CordRepBtree::Position CordRepBtree::IndexOf(size_t offset) const {
804 assert(offset < length);
805 size_t index = begin();
806 while (offset >= edges_[index]->length) offset -= edges_[index++]->length;
807 return {index, offset};
808 }
809
IndexBefore(size_t offset)810 inline CordRepBtree::Position CordRepBtree::IndexBefore(size_t offset) const {
811 assert(offset > 0);
812 assert(offset <= length);
813 size_t index = begin();
814 while (offset > edges_[index]->length) offset -= edges_[index++]->length;
815 return {index, offset};
816 }
817
IndexBefore(Position front,size_t offset)818 inline CordRepBtree::Position CordRepBtree::IndexBefore(Position front,
819 size_t offset) const {
820 size_t index = front.index;
821 offset = offset + front.n;
822 while (offset > edges_[index]->length) offset -= edges_[index++]->length;
823 return {index, offset};
824 }
825
IndexOfLength(size_t n)826 inline CordRepBtree::Position CordRepBtree::IndexOfLength(size_t n) const {
827 assert(n <= length);
828 size_t index = back();
829 size_t strip = length - n;
830 while (strip >= edges_[index]->length) strip -= edges_[index--]->length;
831 return {index, edges_[index]->length - strip};
832 }
833
IndexBeyond(const size_t offset)834 inline CordRepBtree::Position CordRepBtree::IndexBeyond(
835 const size_t offset) const {
836 // We need to find the edge which `starting offset` is beyond (>=)`offset`.
837 // For this we can't use the `offset -= length` logic of IndexOf. Instead, we
838 // track the offset of the `current edge` in `off`, which we increase as we
839 // iterate over the edges until we find the matching edge.
840 size_t off = 0;
841 size_t index = begin();
842 while (offset > off) off += edges_[index++]->length;
843 return {index, off - offset};
844 }
845
Create(CordRep * rep)846 inline CordRepBtree* CordRepBtree::Create(CordRep* rep) {
847 if (IsDataEdge(rep)) return New(rep);
848 return CreateSlow(rep);
849 }
850
GetAppendBuffer(size_t size)851 inline Span<char> CordRepBtree::GetAppendBuffer(size_t size) {
852 assert(refcount.IsMutable());
853 CordRepBtree* tree = this;
854 const int height = this->height();
855 CordRepBtree* n1 = tree;
856 CordRepBtree* n2 = tree;
857 CordRepBtree* n3 = tree;
858 switch (height) {
859 case 3:
860 tree = tree->Edge(kBack)->btree();
861 if (!tree->refcount.IsMutable()) return {};
862 n2 = tree;
863 ABSL_FALLTHROUGH_INTENDED;
864 case 2:
865 tree = tree->Edge(kBack)->btree();
866 if (!tree->refcount.IsMutable()) return {};
867 n1 = tree;
868 ABSL_FALLTHROUGH_INTENDED;
869 case 1:
870 tree = tree->Edge(kBack)->btree();
871 if (!tree->refcount.IsMutable()) return {};
872 ABSL_FALLTHROUGH_INTENDED;
873 case 0:
874 CordRep* edge = tree->Edge(kBack);
875 if (!edge->refcount.IsMutable()) return {};
876 if (edge->tag < FLAT) return {};
877 size_t avail = edge->flat()->Capacity() - edge->length;
878 if (avail == 0) return {};
879 size_t delta = (std::min)(size, avail);
880 Span<char> span = {edge->flat()->Data() + edge->length, delta};
881 edge->length += delta;
882 switch (height) {
883 case 3:
884 n3->length += delta;
885 ABSL_FALLTHROUGH_INTENDED;
886 case 2:
887 n2->length += delta;
888 ABSL_FALLTHROUGH_INTENDED;
889 case 1:
890 n1->length += delta;
891 ABSL_FALLTHROUGH_INTENDED;
892 case 0:
893 tree->length += delta;
894 return span;
895 }
896 break;
897 }
898 return GetAppendBufferSlow(size);
899 }
900
901 extern template CordRepBtree* CordRepBtree::AddCordRep<CordRepBtree::kBack>(
902 CordRepBtree* tree, CordRep* rep);
903
904 extern template CordRepBtree* CordRepBtree::AddCordRep<CordRepBtree::kFront>(
905 CordRepBtree* tree, CordRep* rep);
906
Append(CordRepBtree * tree,CordRep * rep)907 inline CordRepBtree* CordRepBtree::Append(CordRepBtree* tree, CordRep* rep) {
908 if (ABSL_PREDICT_TRUE(IsDataEdge(rep))) {
909 return CordRepBtree::AddCordRep<kBack>(tree, rep);
910 }
911 return AppendSlow(tree, rep);
912 }
913
Prepend(CordRepBtree * tree,CordRep * rep)914 inline CordRepBtree* CordRepBtree::Prepend(CordRepBtree* tree, CordRep* rep) {
915 if (ABSL_PREDICT_TRUE(IsDataEdge(rep))) {
916 return CordRepBtree::AddCordRep<kFront>(tree, rep);
917 }
918 return PrependSlow(tree, rep);
919 }
920
921 #ifdef NDEBUG
922
AssertValid(CordRepBtree * tree,bool)923 inline CordRepBtree* CordRepBtree::AssertValid(CordRepBtree* tree,
924 bool /* shallow */) {
925 return tree;
926 }
927
AssertValid(const CordRepBtree * tree,bool)928 inline const CordRepBtree* CordRepBtree::AssertValid(const CordRepBtree* tree,
929 bool /* shallow */) {
930 return tree;
931 }
932
933 #endif
934
935 } // namespace cord_internal
936 ABSL_NAMESPACE_END
937 } // namespace absl
938
939 #endif // ABSL_STRINGS_INTERNAL_CORD_REP_BTREE_H_
940