• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2018 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //      https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 // A btree implementation of the STL set and map interfaces. A btree is smaller
16 // and generally also faster than STL set/map (refer to the benchmarks below).
17 // The red-black tree implementation of STL set/map has an overhead of 3
18 // pointers (left, right and parent) plus the node color information for each
19 // stored value. So a set<int32_t> consumes 40 bytes for each value stored in
20 // 64-bit mode. This btree implementation stores multiple values on fixed
21 // size nodes (usually 256 bytes) and doesn't store child pointers for leaf
22 // nodes. The result is that a btree_set<int32_t> may use much less memory per
23 // stored value. For the random insertion benchmark in btree_bench.cc, a
24 // btree_set<int32_t> with node-size of 256 uses 5.1 bytes per stored value.
25 //
26 // The packing of multiple values on to each node of a btree has another effect
27 // besides better space utilization: better cache locality due to fewer cache
28 // lines being accessed. Better cache locality translates into faster
29 // operations.
30 //
31 // CAVEATS
32 //
33 // Insertions and deletions on a btree can cause splitting, merging or
34 // rebalancing of btree nodes. And even without these operations, insertions
35 // and deletions on a btree will move values around within a node. In both
36 // cases, the result is that insertions and deletions can invalidate iterators
37 // pointing to values other than the one being inserted/deleted. Therefore, this
38 // container does not provide pointer stability. This is notably different from
39 // STL set/map which takes care to not invalidate iterators on insert/erase
40 // except, of course, for iterators pointing to the value being erased.  A
41 // partial workaround when erasing is available: erase() returns an iterator
42 // pointing to the item just after the one that was erased (or end() if none
43 // exists).
44 
45 #ifndef ABSL_CONTAINER_INTERNAL_BTREE_H_
46 #define ABSL_CONTAINER_INTERNAL_BTREE_H_
47 
48 #include <algorithm>
49 #include <cassert>
50 #include <cstddef>
51 #include <cstdint>
52 #include <cstring>
53 #include <functional>
54 #include <iterator>
55 #include <limits>
56 #include <new>
57 #include <string>
58 #include <type_traits>
59 #include <utility>
60 
61 #include "absl/base/internal/raw_logging.h"
62 #include "absl/base/macros.h"
63 #include "absl/container/internal/common.h"
64 #include "absl/container/internal/common_policy_traits.h"
65 #include "absl/container/internal/compressed_tuple.h"
66 #include "absl/container/internal/container_memory.h"
67 #include "absl/container/internal/layout.h"
68 #include "absl/memory/memory.h"
69 #include "absl/meta/type_traits.h"
70 #include "absl/strings/cord.h"
71 #include "absl/strings/string_view.h"
72 #include "absl/types/compare.h"
73 #include "absl/utility/utility.h"
74 
75 namespace absl {
76 ABSL_NAMESPACE_BEGIN
77 namespace container_internal {
78 
79 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
80 #error ABSL_BTREE_ENABLE_GENERATIONS cannot be directly set
81 #elif defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
82     defined(ABSL_HAVE_HWADDRESS_SANITIZER) || \
83     defined(ABSL_HAVE_MEMORY_SANITIZER)
84 // When compiled in sanitizer mode, we add generation integers to the nodes and
85 // iterators. When iterators are used, we validate that the container has not
86 // been mutated since the iterator was constructed.
87 #define ABSL_BTREE_ENABLE_GENERATIONS
88 #endif
89 
90 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
BtreeGenerationsEnabled()91 constexpr bool BtreeGenerationsEnabled() { return true; }
92 #else
93 constexpr bool BtreeGenerationsEnabled() { return false; }
94 #endif
95 
96 template <typename Compare, typename T, typename U>
97 using compare_result_t = absl::result_of_t<const Compare(const T &, const U &)>;
98 
99 // A helper class that indicates if the Compare parameter is a key-compare-to
100 // comparator.
101 template <typename Compare, typename T>
102 using btree_is_key_compare_to =
103     std::is_convertible<compare_result_t<Compare, T, T>, absl::weak_ordering>;
104 
105 struct StringBtreeDefaultLess {
106   using is_transparent = void;
107 
108   StringBtreeDefaultLess() = default;
109 
110   // Compatibility constructor.
StringBtreeDefaultLessStringBtreeDefaultLess111   StringBtreeDefaultLess(std::less<std::string>) {}        // NOLINT
StringBtreeDefaultLessStringBtreeDefaultLess112   StringBtreeDefaultLess(std::less<absl::string_view>) {}  // NOLINT
113 
114   // Allow converting to std::less for use in key_comp()/value_comp().
115   explicit operator std::less<std::string>() const { return {}; }
116   explicit operator std::less<absl::string_view>() const { return {}; }
117   explicit operator std::less<absl::Cord>() const { return {}; }
118 
operatorStringBtreeDefaultLess119   absl::weak_ordering operator()(absl::string_view lhs,
120                                  absl::string_view rhs) const {
121     return compare_internal::compare_result_as_ordering(lhs.compare(rhs));
122   }
StringBtreeDefaultLessStringBtreeDefaultLess123   StringBtreeDefaultLess(std::less<absl::Cord>) {}  // NOLINT
operatorStringBtreeDefaultLess124   absl::weak_ordering operator()(const absl::Cord &lhs,
125                                  const absl::Cord &rhs) const {
126     return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
127   }
operatorStringBtreeDefaultLess128   absl::weak_ordering operator()(const absl::Cord &lhs,
129                                  absl::string_view rhs) const {
130     return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
131   }
operatorStringBtreeDefaultLess132   absl::weak_ordering operator()(absl::string_view lhs,
133                                  const absl::Cord &rhs) const {
134     return compare_internal::compare_result_as_ordering(-rhs.Compare(lhs));
135   }
136 };
137 
138 struct StringBtreeDefaultGreater {
139   using is_transparent = void;
140 
141   StringBtreeDefaultGreater() = default;
142 
StringBtreeDefaultGreaterStringBtreeDefaultGreater143   StringBtreeDefaultGreater(std::greater<std::string>) {}        // NOLINT
StringBtreeDefaultGreaterStringBtreeDefaultGreater144   StringBtreeDefaultGreater(std::greater<absl::string_view>) {}  // NOLINT
145 
146   // Allow converting to std::greater for use in key_comp()/value_comp().
147   explicit operator std::greater<std::string>() const { return {}; }
148   explicit operator std::greater<absl::string_view>() const { return {}; }
149   explicit operator std::greater<absl::Cord>() const { return {}; }
150 
operatorStringBtreeDefaultGreater151   absl::weak_ordering operator()(absl::string_view lhs,
152                                  absl::string_view rhs) const {
153     return compare_internal::compare_result_as_ordering(rhs.compare(lhs));
154   }
StringBtreeDefaultGreaterStringBtreeDefaultGreater155   StringBtreeDefaultGreater(std::greater<absl::Cord>) {}  // NOLINT
operatorStringBtreeDefaultGreater156   absl::weak_ordering operator()(const absl::Cord &lhs,
157                                  const absl::Cord &rhs) const {
158     return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
159   }
operatorStringBtreeDefaultGreater160   absl::weak_ordering operator()(const absl::Cord &lhs,
161                                  absl::string_view rhs) const {
162     return compare_internal::compare_result_as_ordering(-lhs.Compare(rhs));
163   }
operatorStringBtreeDefaultGreater164   absl::weak_ordering operator()(absl::string_view lhs,
165                                  const absl::Cord &rhs) const {
166     return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
167   }
168 };
169 
170 // See below comments for checked_compare.
171 template <typename Compare, bool is_class = std::is_class<Compare>::value>
172 struct checked_compare_base : Compare {
173   using Compare::Compare;
checked_compare_basechecked_compare_base174   explicit checked_compare_base(Compare c) : Compare(std::move(c)) {}
compchecked_compare_base175   const Compare &comp() const { return *this; }
176 };
177 template <typename Compare>
178 struct checked_compare_base<Compare, false> {
179   explicit checked_compare_base(Compare c) : compare(std::move(c)) {}
180   const Compare &comp() const { return compare; }
181   Compare compare;
182 };
183 
184 // A mechanism for opting out of checked_compare for use only in btree_test.cc.
185 struct BtreeTestOnlyCheckedCompareOptOutBase {};
186 
187 // A helper class to adapt the specified comparator for two use cases:
188 // (1) When using common Abseil string types with common comparison functors,
189 // convert a boolean comparison into a three-way comparison that returns an
190 // `absl::weak_ordering`. This helper class is specialized for
191 // less<std::string>, greater<std::string>, less<string_view>,
192 // greater<string_view>, less<absl::Cord>, and greater<absl::Cord>.
193 // (2) Adapt the comparator to diagnose cases of non-strict-weak-ordering (see
194 // https://en.cppreference.com/w/cpp/named_req/Compare) in debug mode. Whenever
195 // a comparison is made, we will make assertions to verify that the comparator
196 // is valid.
197 template <typename Compare, typename Key>
198 struct key_compare_adapter {
199   // Inherit from checked_compare_base to support function pointers and also
200   // keep empty-base-optimization (EBO) support for classes.
201   // Note: we can't use CompressedTuple here because that would interfere
202   // with the EBO for `btree::rightmost_`. `btree::rightmost_` is itself a
203   // CompressedTuple and nested `CompressedTuple`s don't support EBO.
204   // TODO(b/214288561): use CompressedTuple instead once it supports EBO for
205   // nested `CompressedTuple`s.
206   struct checked_compare : checked_compare_base<Compare> {
207    private:
208     using Base = typename checked_compare::checked_compare_base;
209     using Base::comp;
210 
211     // If possible, returns whether `t` is equivalent to itself. We can only do
212     // this for `Key`s because we can't be sure that it's safe to call
213     // `comp()(k, k)` otherwise. Even if SFINAE allows it, there could be a
214     // compilation failure inside the implementation of the comparison operator.
215     bool is_self_equivalent(const Key &k) const {
216       // Note: this works for both boolean and three-way comparators.
217       return comp()(k, k) == 0;
218     }
219     // If we can't compare `t` with itself, returns true unconditionally.
220     template <typename T>
221     bool is_self_equivalent(const T &) const {
222       return true;
223     }
224 
225    public:
226     using Base::Base;
227     checked_compare(Compare comp) : Base(std::move(comp)) {}  // NOLINT
228 
229     // Allow converting to Compare for use in key_comp()/value_comp().
230     explicit operator Compare() const { return comp(); }
231 
232     template <typename T, typename U,
233               absl::enable_if_t<
234                   std::is_same<bool, compare_result_t<Compare, T, U>>::value,
235                   int> = 0>
236     bool operator()(const T &lhs, const U &rhs) const {
237       // NOTE: if any of these assertions fail, then the comparator does not
238       // establish a strict-weak-ordering (see
239       // https://en.cppreference.com/w/cpp/named_req/Compare).
240       assert(is_self_equivalent(lhs));
241       assert(is_self_equivalent(rhs));
242       const bool lhs_comp_rhs = comp()(lhs, rhs);
243       assert(!lhs_comp_rhs || !comp()(rhs, lhs));
244       return lhs_comp_rhs;
245     }
246 
247     template <
248         typename T, typename U,
249         absl::enable_if_t<std::is_convertible<compare_result_t<Compare, T, U>,
250                                               absl::weak_ordering>::value,
251                           int> = 0>
252     absl::weak_ordering operator()(const T &lhs, const U &rhs) const {
253       // NOTE: if any of these assertions fail, then the comparator does not
254       // establish a strict-weak-ordering (see
255       // https://en.cppreference.com/w/cpp/named_req/Compare).
256       assert(is_self_equivalent(lhs));
257       assert(is_self_equivalent(rhs));
258       const absl::weak_ordering lhs_comp_rhs = comp()(lhs, rhs);
259 #ifndef NDEBUG
260       const absl::weak_ordering rhs_comp_lhs = comp()(rhs, lhs);
261       if (lhs_comp_rhs > 0) {
262         assert(rhs_comp_lhs < 0 && "lhs_comp_rhs > 0 -> rhs_comp_lhs < 0");
263       } else if (lhs_comp_rhs == 0) {
264         assert(rhs_comp_lhs == 0 && "lhs_comp_rhs == 0 -> rhs_comp_lhs == 0");
265       } else {
266         assert(rhs_comp_lhs > 0 && "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0");
267       }
268 #endif
269       return lhs_comp_rhs;
270     }
271   };
272   using type = absl::conditional_t<
273       std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase, Compare>::value,
274       Compare, checked_compare>;
275 };
276 
277 template <>
278 struct key_compare_adapter<std::less<std::string>, std::string> {
279   using type = StringBtreeDefaultLess;
280 };
281 
282 template <>
283 struct key_compare_adapter<std::greater<std::string>, std::string> {
284   using type = StringBtreeDefaultGreater;
285 };
286 
287 template <>
288 struct key_compare_adapter<std::less<absl::string_view>, absl::string_view> {
289   using type = StringBtreeDefaultLess;
290 };
291 
292 template <>
293 struct key_compare_adapter<std::greater<absl::string_view>, absl::string_view> {
294   using type = StringBtreeDefaultGreater;
295 };
296 
297 template <>
298 struct key_compare_adapter<std::less<absl::Cord>, absl::Cord> {
299   using type = StringBtreeDefaultLess;
300 };
301 
302 template <>
303 struct key_compare_adapter<std::greater<absl::Cord>, absl::Cord> {
304   using type = StringBtreeDefaultGreater;
305 };
306 
307 // Detects an 'absl_btree_prefer_linear_node_search' member. This is
308 // a protocol used as an opt-in or opt-out of linear search.
309 //
310 //  For example, this would be useful for key types that wrap an integer
311 //  and define their own cheap operator<(). For example:
312 //
313 //   class K {
314 //    public:
315 //     using absl_btree_prefer_linear_node_search = std::true_type;
316 //     ...
317 //    private:
318 //     friend bool operator<(K a, K b) { return a.k_ < b.k_; }
319 //     int k_;
320 //   };
321 //
322 //   btree_map<K, V> m;  // Uses linear search
323 //
324 // If T has the preference tag, then it has a preference.
325 // Btree will use the tag's truth value.
326 template <typename T, typename = void>
327 struct has_linear_node_search_preference : std::false_type {};
328 template <typename T, typename = void>
329 struct prefers_linear_node_search : std::false_type {};
330 template <typename T>
331 struct has_linear_node_search_preference<
332     T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
333     : std::true_type {};
334 template <typename T>
335 struct prefers_linear_node_search<
336     T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
337     : T::absl_btree_prefer_linear_node_search {};
338 
339 template <typename Compare, typename Key>
340 constexpr bool compare_has_valid_result_type() {
341   using compare_result_type = compare_result_t<Compare, Key, Key>;
342   return std::is_same<compare_result_type, bool>::value ||
343          std::is_convertible<compare_result_type, absl::weak_ordering>::value;
344 }
345 
346 template <typename original_key_compare, typename value_type>
347 class map_value_compare {
348   template <typename Params>
349   friend class btree;
350 
351   // Note: this `protected` is part of the API of std::map::value_compare. See
352   // https://en.cppreference.com/w/cpp/container/map/value_compare.
353  protected:
354   explicit map_value_compare(original_key_compare c) : comp(std::move(c)) {}
355 
356   original_key_compare comp;  // NOLINT
357 
358  public:
359   auto operator()(const value_type &lhs, const value_type &rhs) const
360       -> decltype(comp(lhs.first, rhs.first)) {
361     return comp(lhs.first, rhs.first);
362   }
363 };
364 
365 template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
366           bool IsMulti, bool IsMap, typename SlotPolicy>
367 struct common_params : common_policy_traits<SlotPolicy> {
368   using original_key_compare = Compare;
369 
370   // If Compare is a common comparator for a string-like type, then we adapt it
371   // to use heterogeneous lookup and to be a key-compare-to comparator.
372   // We also adapt the comparator to diagnose invalid comparators in debug mode.
373   // We disable this when `Compare` is invalid in a way that will cause
374   // adaptation to fail (having invalid return type) so that we can give a
375   // better compilation failure in static_assert_validation. If we don't do
376   // this, then there will be cascading compilation failures that are confusing
377   // for users.
378   using key_compare =
379       absl::conditional_t<!compare_has_valid_result_type<Compare, Key>(),
380                           Compare,
381                           typename key_compare_adapter<Compare, Key>::type>;
382 
383   static constexpr bool kIsKeyCompareStringAdapted =
384       std::is_same<key_compare, StringBtreeDefaultLess>::value ||
385       std::is_same<key_compare, StringBtreeDefaultGreater>::value;
386   static constexpr bool kIsKeyCompareTransparent =
387       IsTransparent<original_key_compare>::value || kIsKeyCompareStringAdapted;
388 
389   // A type which indicates if we have a key-compare-to functor or a plain old
390   // key-compare functor.
391   using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
392 
393   using allocator_type = Alloc;
394   using key_type = Key;
395   using size_type = size_t;
396   using difference_type = ptrdiff_t;
397 
398   using slot_policy = SlotPolicy;
399   using slot_type = typename slot_policy::slot_type;
400   using value_type = typename slot_policy::value_type;
401   using init_type = typename slot_policy::mutable_value_type;
402   using pointer = value_type *;
403   using const_pointer = const value_type *;
404   using reference = value_type &;
405   using const_reference = const value_type &;
406 
407   using value_compare =
408       absl::conditional_t<IsMap,
409                           map_value_compare<original_key_compare, value_type>,
410                           original_key_compare>;
411   using is_map_container = std::integral_constant<bool, IsMap>;
412 
413   // For the given lookup key type, returns whether we can have multiple
414   // equivalent keys in the btree. If this is a multi-container, then we can.
415   // Otherwise, we can have multiple equivalent keys only if all of the
416   // following conditions are met:
417   // - The comparator is transparent.
418   // - The lookup key type is not the same as key_type.
419   // - The comparator is not a StringBtreeDefault{Less,Greater} comparator
420   //   that we know has the same equivalence classes for all lookup types.
421   template <typename LookupKey>
422   constexpr static bool can_have_multiple_equivalent_keys() {
423     return IsMulti || (IsTransparent<key_compare>::value &&
424                        !std::is_same<LookupKey, Key>::value &&
425                        !kIsKeyCompareStringAdapted);
426   }
427 
428   enum {
429     kTargetNodeSize = TargetNodeSize,
430 
431     // Upper bound for the available space for slots. This is largest for leaf
432     // nodes, which have overhead of at least a pointer + 4 bytes (for storing
433     // 3 field_types and an enum).
434     kNodeSlotSpace = TargetNodeSize - /*minimum overhead=*/(sizeof(void *) + 4),
435   };
436 
437   // This is an integral type large enough to hold as many slots as will fit a
438   // node of TargetNodeSize bytes.
439   using node_count_type =
440       absl::conditional_t<(kNodeSlotSpace / sizeof(slot_type) >
441                            (std::numeric_limits<uint8_t>::max)()),
442                           uint16_t, uint8_t>;  // NOLINT
443 };
444 
445 // An adapter class that converts a lower-bound compare into an upper-bound
446 // compare. Note: there is no need to make a version of this adapter specialized
447 // for key-compare-to functors because the upper-bound (the first value greater
448 // than the input) is never an exact match.
449 template <typename Compare>
450 struct upper_bound_adapter {
451   explicit upper_bound_adapter(const Compare &c) : comp(c) {}
452   template <typename K1, typename K2>
453   bool operator()(const K1 &a, const K2 &b) const {
454     // Returns true when a is not greater than b.
455     return !compare_internal::compare_result_as_less_than(comp(b, a));
456   }
457 
458  private:
459   Compare comp;
460 };
461 
462 enum class MatchKind : uint8_t { kEq, kNe };
463 
464 template <typename V, bool IsCompareTo>
465 struct SearchResult {
466   V value;
467   MatchKind match;
468 
469   static constexpr bool HasMatch() { return true; }
470   bool IsEq() const { return match == MatchKind::kEq; }
471 };
472 
473 // When we don't use CompareTo, `match` is not present.
474 // This ensures that callers can't use it accidentally when it provides no
475 // useful information.
476 template <typename V>
477 struct SearchResult<V, false> {
478   SearchResult() {}
479   explicit SearchResult(V v) : value(v) {}
480   SearchResult(V v, MatchKind /*match*/) : value(v) {}
481 
482   V value;
483 
484   static constexpr bool HasMatch() { return false; }
485   static constexpr bool IsEq() { return false; }
486 };
487 
488 // A node in the btree holding. The same node type is used for both internal
489 // and leaf nodes in the btree, though the nodes are allocated in such a way
490 // that the children array is only valid in internal nodes.
491 template <typename Params>
492 class btree_node {
493   using is_key_compare_to = typename Params::is_key_compare_to;
494   using field_type = typename Params::node_count_type;
495   using allocator_type = typename Params::allocator_type;
496   using slot_type = typename Params::slot_type;
497   using original_key_compare = typename Params::original_key_compare;
498 
499  public:
500   using params_type = Params;
501   using key_type = typename Params::key_type;
502   using value_type = typename Params::value_type;
503   using pointer = typename Params::pointer;
504   using const_pointer = typename Params::const_pointer;
505   using reference = typename Params::reference;
506   using const_reference = typename Params::const_reference;
507   using key_compare = typename Params::key_compare;
508   using size_type = typename Params::size_type;
509   using difference_type = typename Params::difference_type;
510 
511   // Btree decides whether to use linear node search as follows:
512   //   - If the comparator expresses a preference, use that.
513   //   - If the key expresses a preference, use that.
514   //   - If the key is arithmetic and the comparator is std::less or
515   //     std::greater, choose linear.
516   //   - Otherwise, choose binary.
517   // TODO(ezb): Might make sense to add condition(s) based on node-size.
518   using use_linear_search = std::integral_constant<
519       bool, has_linear_node_search_preference<original_key_compare>::value
520                 ? prefers_linear_node_search<original_key_compare>::value
521             : has_linear_node_search_preference<key_type>::value
522                 ? prefers_linear_node_search<key_type>::value
523                 : std::is_arithmetic<key_type>::value &&
524                       (std::is_same<std::less<key_type>,
525                                     original_key_compare>::value ||
526                        std::is_same<std::greater<key_type>,
527                                     original_key_compare>::value)>;
528 
529   // This class is organized by absl::container_internal::Layout as if it had
530   // the following structure:
531   //   // A pointer to the node's parent.
532   //   btree_node *parent;
533   //
534   //   // When ABSL_BTREE_ENABLE_GENERATIONS is defined, we also have a
535   //   // generation integer in order to check that when iterators are
536   //   // used, they haven't been invalidated already. Only the generation on
537   //   // the root is used, but we have one on each node because whether a node
538   //   // is root or not can change.
539   //   uint32_t generation;
540   //
541   //   // The position of the node in the node's parent.
542   //   field_type position;
543   //   // The index of the first populated value in `values`.
544   //   // TODO(ezb): right now, `start` is always 0. Update insertion/merge
545   //   // logic to allow for floating storage within nodes.
546   //   field_type start;
547   //   // The index after the last populated value in `values`. Currently, this
548   //   // is the same as the count of values.
549   //   field_type finish;
550   //   // The maximum number of values the node can hold. This is an integer in
551   //   // [1, kNodeSlots] for root leaf nodes, kNodeSlots for non-root leaf
552   //   // nodes, and kInternalNodeMaxCount (as a sentinel value) for internal
553   //   // nodes (even though there are still kNodeSlots values in the node).
554   //   // TODO(ezb): make max_count use only 4 bits and record log2(capacity)
555   //   // to free extra bits for is_root, etc.
556   //   field_type max_count;
557   //
558   //   // The array of values. The capacity is `max_count` for leaf nodes and
559   //   // kNodeSlots for internal nodes. Only the values in
560   //   // [start, finish) have been initialized and are valid.
561   //   slot_type values[max_count];
562   //
563   //   // The array of child pointers. The keys in children[i] are all less
564   //   // than key(i). The keys in children[i + 1] are all greater than key(i).
565   //   // There are 0 children for leaf nodes and kNodeSlots + 1 children for
566   //   // internal nodes.
567   //   btree_node *children[kNodeSlots + 1];
568   //
569   // This class is only constructed by EmptyNodeType. Normally, pointers to the
570   // layout above are allocated, cast to btree_node*, and de-allocated within
571   // the btree implementation.
572   ~btree_node() = default;
573   btree_node(btree_node const &) = delete;
574   btree_node &operator=(btree_node const &) = delete;
575 
576  protected:
577   btree_node() = default;
578 
579  private:
580   using layout_type =
581       absl::container_internal::Layout<btree_node *, uint32_t, field_type,
582                                        slot_type, btree_node *>;
583   constexpr static size_type SizeWithNSlots(size_type n) {
584     return layout_type(
585                /*parent*/ 1,
586                /*generation*/ BtreeGenerationsEnabled() ? 1 : 0,
587                /*position, start, finish, max_count*/ 4,
588                /*slots*/ n,
589                /*children*/ 0)
590         .AllocSize();
591   }
592   // A lower bound for the overhead of fields other than slots in a leaf node.
593   constexpr static size_type MinimumOverhead() {
594     return SizeWithNSlots(1) - sizeof(slot_type);
595   }
596 
597   // Compute how many values we can fit onto a leaf node taking into account
598   // padding.
599   constexpr static size_type NodeTargetSlots(const size_type begin,
600                                              const size_type end) {
601     return begin == end ? begin
602            : SizeWithNSlots((begin + end) / 2 + 1) >
603                    params_type::kTargetNodeSize
604                ? NodeTargetSlots(begin, (begin + end) / 2)
605                : NodeTargetSlots((begin + end) / 2 + 1, end);
606   }
607 
608   constexpr static size_type kTargetNodeSize = params_type::kTargetNodeSize;
609   constexpr static size_type kNodeTargetSlots =
610       NodeTargetSlots(0, kTargetNodeSize);
611 
612   // We need a minimum of 3 slots per internal node in order to perform
613   // splitting (1 value for the two nodes involved in the split and 1 value
614   // propagated to the parent as the delimiter for the split). For performance
615   // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy of
616   // 1/3 (for a node, not a b-tree).
617   constexpr static size_type kMinNodeSlots = 4;
618 
619   constexpr static size_type kNodeSlots =
620       kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots;
621 
622   // The node is internal (i.e. is not a leaf node) if and only if `max_count`
623   // has this value.
624   constexpr static field_type kInternalNodeMaxCount = 0;
625 
626   constexpr static layout_type Layout(const size_type slot_count,
627                                       const size_type child_count) {
628     return layout_type(
629         /*parent*/ 1,
630         /*generation*/ BtreeGenerationsEnabled() ? 1 : 0,
631         /*position, start, finish, max_count*/ 4,
632         /*slots*/ slot_count,
633         /*children*/ child_count);
634   }
635   // Leaves can have less than kNodeSlots values.
636   constexpr static layout_type LeafLayout(
637       const size_type slot_count = kNodeSlots) {
638     return Layout(slot_count, 0);
639   }
640   constexpr static layout_type InternalLayout() {
641     return Layout(kNodeSlots, kNodeSlots + 1);
642   }
643   constexpr static size_type LeafSize(const size_type slot_count = kNodeSlots) {
644     return LeafLayout(slot_count).AllocSize();
645   }
646   constexpr static size_type InternalSize() {
647     return InternalLayout().AllocSize();
648   }
649 
650   constexpr static size_type Alignment() {
651     static_assert(LeafLayout(1).Alignment() == InternalLayout().Alignment(),
652                   "Alignment of all nodes must be equal.");
653     return InternalLayout().Alignment();
654   }
655 
656   // N is the index of the type in the Layout definition.
657   // ElementType<N> is the Nth type in the Layout definition.
658   template <size_type N>
659   inline typename layout_type::template ElementType<N> *GetField() {
660     // We assert that we don't read from values that aren't there.
661     assert(N < 4 || is_internal());
662     return InternalLayout().template Pointer<N>(reinterpret_cast<char *>(this));
663   }
664   template <size_type N>
665   inline const typename layout_type::template ElementType<N> *GetField() const {
666     assert(N < 4 || is_internal());
667     return InternalLayout().template Pointer<N>(
668         reinterpret_cast<const char *>(this));
669   }
670   void set_parent(btree_node *p) { *GetField<0>() = p; }
671   field_type &mutable_finish() { return GetField<2>()[2]; }
672   slot_type *slot(size_type i) { return &GetField<3>()[i]; }
673   slot_type *start_slot() { return slot(start()); }
674   slot_type *finish_slot() { return slot(finish()); }
675   const slot_type *slot(size_type i) const { return &GetField<3>()[i]; }
676   void set_position(field_type v) { GetField<2>()[0] = v; }
677   void set_start(field_type v) { GetField<2>()[1] = v; }
678   void set_finish(field_type v) { GetField<2>()[2] = v; }
679   // This method is only called by the node init methods.
680   void set_max_count(field_type v) { GetField<2>()[3] = v; }
681 
682  public:
683   // Whether this is a leaf node or not. This value doesn't change after the
684   // node is created.
685   bool is_leaf() const { return GetField<2>()[3] != kInternalNodeMaxCount; }
686   // Whether this is an internal node or not. This value doesn't change after
687   // the node is created.
688   bool is_internal() const { return !is_leaf(); }
689 
690   // Getter for the position of this node in its parent.
691   field_type position() const { return GetField<2>()[0]; }
692 
693   // Getter for the offset of the first value in the `values` array.
694   field_type start() const {
695     // TODO(ezb): when floating storage is implemented, return GetField<2>()[1];
696     assert(GetField<2>()[1] == 0);
697     return 0;
698   }
699 
700   // Getter for the offset after the last value in the `values` array.
701   field_type finish() const { return GetField<2>()[2]; }
702 
703   // Getters for the number of values stored in this node.
704   field_type count() const {
705     assert(finish() >= start());
706     return finish() - start();
707   }
708   field_type max_count() const {
709     // Internal nodes have max_count==kInternalNodeMaxCount.
710     // Leaf nodes have max_count in [1, kNodeSlots].
711     const field_type max_count = GetField<2>()[3];
712     return max_count == field_type{kInternalNodeMaxCount}
713                ? field_type{kNodeSlots}
714                : max_count;
715   }
716 
717   // Getter for the parent of this node.
718   btree_node *parent() const { return *GetField<0>(); }
719   // Getter for whether the node is the root of the tree. The parent of the
720   // root of the tree is the leftmost node in the tree which is guaranteed to
721   // be a leaf.
722   bool is_root() const { return parent()->is_leaf(); }
723   void make_root() {
724     assert(parent()->is_root());
725     set_generation(parent()->generation());
726     set_parent(parent()->parent());
727   }
728 
729   // Gets the root node's generation integer, which is the one used by the tree.
730   uint32_t *get_root_generation() const {
731     assert(BtreeGenerationsEnabled());
732     const btree_node *curr = this;
733     for (; !curr->is_root(); curr = curr->parent()) continue;
734     return const_cast<uint32_t *>(&curr->GetField<1>()[0]);
735   }
736 
737   // Returns the generation for iterator validation.
738   uint32_t generation() const {
739     return BtreeGenerationsEnabled() ? *get_root_generation() : 0;
740   }
741   // Updates generation. Should only be called on a root node or during node
742   // initialization.
743   void set_generation(uint32_t generation) {
744     if (BtreeGenerationsEnabled()) GetField<1>()[0] = generation;
745   }
746   // Updates the generation. We do this whenever the node is mutated.
747   void next_generation() {
748     if (BtreeGenerationsEnabled()) ++*get_root_generation();
749   }
750 
751   // Getters for the key/value at position i in the node.
752   const key_type &key(size_type i) const { return params_type::key(slot(i)); }
753   reference value(size_type i) { return params_type::element(slot(i)); }
754   const_reference value(size_type i) const {
755     return params_type::element(slot(i));
756   }
757 
758   // Getters/setter for the child at position i in the node.
759   btree_node *child(field_type i) const { return GetField<4>()[i]; }
760   btree_node *start_child() const { return child(start()); }
761   btree_node *&mutable_child(field_type i) { return GetField<4>()[i]; }
762   void clear_child(field_type i) {
763     absl::container_internal::SanitizerPoisonObject(&mutable_child(i));
764   }
765   void set_child_noupdate_position(field_type i, btree_node *c) {
766     absl::container_internal::SanitizerUnpoisonObject(&mutable_child(i));
767     mutable_child(i) = c;
768   }
769   void set_child(field_type i, btree_node *c) {
770     set_child_noupdate_position(i, c);
771     c->set_position(i);
772   }
773   void init_child(field_type i, btree_node *c) {
774     set_child(i, c);
775     c->set_parent(this);
776   }
777 
778   // Returns the position of the first value whose key is not less than k.
779   template <typename K>
780   SearchResult<size_type, is_key_compare_to::value> lower_bound(
781       const K &k, const key_compare &comp) const {
782     return use_linear_search::value ? linear_search(k, comp)
783                                     : binary_search(k, comp);
784   }
785   // Returns the position of the first value whose key is greater than k.
786   template <typename K>
787   size_type upper_bound(const K &k, const key_compare &comp) const {
788     auto upper_compare = upper_bound_adapter<key_compare>(comp);
789     return use_linear_search::value ? linear_search(k, upper_compare).value
790                                     : binary_search(k, upper_compare).value;
791   }
792 
793   template <typename K, typename Compare>
794   SearchResult<size_type, btree_is_key_compare_to<Compare, key_type>::value>
795   linear_search(const K &k, const Compare &comp) const {
796     return linear_search_impl(k, start(), finish(), comp,
797                               btree_is_key_compare_to<Compare, key_type>());
798   }
799 
800   template <typename K, typename Compare>
801   SearchResult<size_type, btree_is_key_compare_to<Compare, key_type>::value>
802   binary_search(const K &k, const Compare &comp) const {
803     return binary_search_impl(k, start(), finish(), comp,
804                               btree_is_key_compare_to<Compare, key_type>());
805   }
806 
807   // Returns the position of the first value whose key is not less than k using
808   // linear search performed using plain compare.
809   template <typename K, typename Compare>
810   SearchResult<size_type, false> linear_search_impl(
811       const K &k, size_type s, const size_type e, const Compare &comp,
812       std::false_type /* IsCompareTo */) const {
813     while (s < e) {
814       if (!comp(key(s), k)) {
815         break;
816       }
817       ++s;
818     }
819     return SearchResult<size_type, false>{s};
820   }
821 
822   // Returns the position of the first value whose key is not less than k using
823   // linear search performed using compare-to.
824   template <typename K, typename Compare>
825   SearchResult<size_type, true> linear_search_impl(
826       const K &k, size_type s, const size_type e, const Compare &comp,
827       std::true_type /* IsCompareTo */) const {
828     while (s < e) {
829       const absl::weak_ordering c = comp(key(s), k);
830       if (c == 0) {
831         return {s, MatchKind::kEq};
832       } else if (c > 0) {
833         break;
834       }
835       ++s;
836     }
837     return {s, MatchKind::kNe};
838   }
839 
840   // Returns the position of the first value whose key is not less than k using
841   // binary search performed using plain compare.
842   template <typename K, typename Compare>
843   SearchResult<size_type, false> binary_search_impl(
844       const K &k, size_type s, size_type e, const Compare &comp,
845       std::false_type /* IsCompareTo */) const {
846     while (s != e) {
847       const size_type mid = (s + e) >> 1;
848       if (comp(key(mid), k)) {
849         s = mid + 1;
850       } else {
851         e = mid;
852       }
853     }
854     return SearchResult<size_type, false>{s};
855   }
856 
857   // Returns the position of the first value whose key is not less than k using
858   // binary search performed using compare-to.
859   template <typename K, typename CompareTo>
860   SearchResult<size_type, true> binary_search_impl(
861       const K &k, size_type s, size_type e, const CompareTo &comp,
862       std::true_type /* IsCompareTo */) const {
863     if (params_type::template can_have_multiple_equivalent_keys<K>()) {
864       MatchKind exact_match = MatchKind::kNe;
865       while (s != e) {
866         const size_type mid = (s + e) >> 1;
867         const absl::weak_ordering c = comp(key(mid), k);
868         if (c < 0) {
869           s = mid + 1;
870         } else {
871           e = mid;
872           if (c == 0) {
873             // Need to return the first value whose key is not less than k,
874             // which requires continuing the binary search if there could be
875             // multiple equivalent keys.
876             exact_match = MatchKind::kEq;
877           }
878         }
879       }
880       return {s, exact_match};
881     } else {  // Can't have multiple equivalent keys.
882       while (s != e) {
883         const size_type mid = (s + e) >> 1;
884         const absl::weak_ordering c = comp(key(mid), k);
885         if (c < 0) {
886           s = mid + 1;
887         } else if (c > 0) {
888           e = mid;
889         } else {
890           return {mid, MatchKind::kEq};
891         }
892       }
893       return {s, MatchKind::kNe};
894     }
895   }
896 
897   // Returns whether key i is ordered correctly with respect to the other keys
898   // in the node. The motivation here is to detect comparators that violate
899   // transitivity. Note: we only do comparisons of keys on this node rather than
900   // the whole tree so that this is constant time.
901   template <typename Compare>
902   bool is_ordered_correctly(field_type i, const Compare &comp) const {
903     if (std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase,
904                         Compare>::value ||
905         params_type::kIsKeyCompareStringAdapted) {
906       return true;
907     }
908 
909     const auto compare = [&](field_type a, field_type b) {
910       const absl::weak_ordering cmp =
911           compare_internal::do_three_way_comparison(comp, key(a), key(b));
912       return cmp < 0 ? -1 : cmp > 0 ? 1 : 0;
913     };
914     int cmp = -1;
915     constexpr bool kCanHaveEquivKeys =
916         params_type::template can_have_multiple_equivalent_keys<key_type>();
917     for (field_type j = start(); j < finish(); ++j) {
918       if (j == i) {
919         if (cmp > 0) return false;
920         continue;
921       }
922       int new_cmp = compare(j, i);
923       if (new_cmp < cmp || (!kCanHaveEquivKeys && new_cmp == 0)) return false;
924       cmp = new_cmp;
925     }
926     return true;
927   }
928 
929   // Emplaces a value at position i, shifting all existing values and
930   // children at positions >= i to the right by 1.
931   template <typename... Args>
932   void emplace_value(field_type i, allocator_type *alloc, Args &&...args);
933 
934   // Removes the values at positions [i, i + to_erase), shifting all existing
935   // values and children after that range to the left by to_erase. Clears all
936   // children between [i, i + to_erase).
937   void remove_values(field_type i, field_type to_erase, allocator_type *alloc);
938 
939   // Rebalances a node with its right sibling.
940   void rebalance_right_to_left(field_type to_move, btree_node *right,
941                                allocator_type *alloc);
942   void rebalance_left_to_right(field_type to_move, btree_node *right,
943                                allocator_type *alloc);
944 
945   // Splits a node, moving a portion of the node's values to its right sibling.
946   void split(int insert_position, btree_node *dest, allocator_type *alloc);
947 
948   // Merges a node with its right sibling, moving all of the values and the
949   // delimiting key in the parent node onto itself, and deleting the src node.
950   void merge(btree_node *src, allocator_type *alloc);
951 
952   // Node allocation/deletion routines.
953   void init_leaf(field_type position, field_type max_count,
954                  btree_node *parent) {
955     set_generation(0);
956     set_parent(parent);
957     set_position(position);
958     set_start(0);
959     set_finish(0);
960     set_max_count(max_count);
961     absl::container_internal::SanitizerPoisonMemoryRegion(
962         start_slot(), max_count * sizeof(slot_type));
963   }
964   void init_internal(field_type position, btree_node *parent) {
965     init_leaf(position, kNodeSlots, parent);
966     // Set `max_count` to a sentinel value to indicate that this node is
967     // internal.
968     set_max_count(kInternalNodeMaxCount);
969     absl::container_internal::SanitizerPoisonMemoryRegion(
970         &mutable_child(start()), (kNodeSlots + 1) * sizeof(btree_node *));
971   }
972 
973   static void deallocate(const size_type size, btree_node *node,
974                          allocator_type *alloc) {
975     absl::container_internal::SanitizerUnpoisonMemoryRegion(node, size);
976     absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
977   }
978 
979   // Deletes a node and all of its children.
980   static void clear_and_delete(btree_node *node, allocator_type *alloc);
981 
982  private:
983   template <typename... Args>
984   void value_init(const field_type i, allocator_type *alloc, Args &&...args) {
985     next_generation();
986     absl::container_internal::SanitizerUnpoisonObject(slot(i));
987     params_type::construct(alloc, slot(i), std::forward<Args>(args)...);
988   }
989   void value_destroy(const field_type i, allocator_type *alloc) {
990     next_generation();
991     params_type::destroy(alloc, slot(i));
992     absl::container_internal::SanitizerPoisonObject(slot(i));
993   }
994   void value_destroy_n(const field_type i, const field_type n,
995                        allocator_type *alloc) {
996     next_generation();
997     for (slot_type *s = slot(i), *end = slot(i + n); s != end; ++s) {
998       params_type::destroy(alloc, s);
999       absl::container_internal::SanitizerPoisonObject(s);
1000     }
1001   }
1002 
1003   static void transfer(slot_type *dest, slot_type *src, allocator_type *alloc) {
1004     absl::container_internal::SanitizerUnpoisonObject(dest);
1005     params_type::transfer(alloc, dest, src);
1006     absl::container_internal::SanitizerPoisonObject(src);
1007   }
1008 
1009   // Transfers value from slot `src_i` in `src_node` to slot `dest_i` in `this`.
1010   void transfer(const size_type dest_i, const size_type src_i,
1011                 btree_node *src_node, allocator_type *alloc) {
1012     next_generation();
1013     transfer(slot(dest_i), src_node->slot(src_i), alloc);
1014   }
1015 
1016   // Transfers `n` values starting at value `src_i` in `src_node` into the
1017   // values starting at value `dest_i` in `this`.
1018   void transfer_n(const size_type n, const size_type dest_i,
1019                   const size_type src_i, btree_node *src_node,
1020                   allocator_type *alloc) {
1021     next_generation();
1022     for (slot_type *src = src_node->slot(src_i), *end = src + n,
1023                    *dest = slot(dest_i);
1024          src != end; ++src, ++dest) {
1025       transfer(dest, src, alloc);
1026     }
1027   }
1028 
1029   // Same as above, except that we start at the end and work our way to the
1030   // beginning.
1031   void transfer_n_backward(const size_type n, const size_type dest_i,
1032                            const size_type src_i, btree_node *src_node,
1033                            allocator_type *alloc) {
1034     next_generation();
1035     for (slot_type *src = src_node->slot(src_i + n), *end = src - n,
1036                    *dest = slot(dest_i + n);
1037          src != end; --src, --dest) {
1038       // If we modified the loop index calculations above to avoid the -1s here,
1039       // it would result in UB in the computation of `end` (and possibly `src`
1040       // as well, if n == 0), since slot() is effectively an array index and it
1041       // is UB to compute the address of any out-of-bounds array element except
1042       // for one-past-the-end.
1043       transfer(dest - 1, src - 1, alloc);
1044     }
1045   }
1046 
1047   template <typename P>
1048   friend class btree;
1049   template <typename N, typename R, typename P>
1050   friend class btree_iterator;
1051   friend class BtreeNodePeer;
1052   friend struct btree_access;
1053 };
1054 
1055 template <typename Node>
1056 bool AreNodesFromSameContainer(const Node *node_a, const Node *node_b) {
1057   // If either node is null, then give up on checking whether they're from the
1058   // same container. (If exactly one is null, then we'll trigger the
1059   // default-constructed assert in Equals.)
1060   if (node_a == nullptr || node_b == nullptr) return true;
1061   while (!node_a->is_root()) node_a = node_a->parent();
1062   while (!node_b->is_root()) node_b = node_b->parent();
1063   return node_a == node_b;
1064 }
1065 
1066 class btree_iterator_generation_info_enabled {
1067  public:
1068   explicit btree_iterator_generation_info_enabled(uint32_t g)
1069       : generation_(g) {}
1070 
1071   // Updates the generation. For use internally right before we return an
1072   // iterator to the user.
1073   template <typename Node>
1074   void update_generation(const Node *node) {
1075     if (node != nullptr) generation_ = node->generation();
1076   }
1077   uint32_t generation() const { return generation_; }
1078 
1079   template <typename Node>
1080   void assert_valid_generation(const Node *node) const {
1081     if (node != nullptr && node->generation() != generation_) {
1082       ABSL_INTERNAL_LOG(
1083           FATAL,
1084           "Attempting to use an invalidated iterator. The corresponding b-tree "
1085           "container has been mutated since this iterator was constructed.");
1086     }
1087   }
1088 
1089  private:
1090   // Used to check that the iterator hasn't been invalidated.
1091   uint32_t generation_;
1092 };
1093 
1094 class btree_iterator_generation_info_disabled {
1095  public:
1096   explicit btree_iterator_generation_info_disabled(uint32_t) {}
1097   static void update_generation(const void *) {}
1098   static uint32_t generation() { return 0; }
1099   static void assert_valid_generation(const void *) {}
1100 };
1101 
1102 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1103 using btree_iterator_generation_info = btree_iterator_generation_info_enabled;
1104 #else
1105 using btree_iterator_generation_info = btree_iterator_generation_info_disabled;
1106 #endif
1107 
1108 template <typename Node, typename Reference, typename Pointer>
1109 class btree_iterator : private btree_iterator_generation_info {
1110   using field_type = typename Node::field_type;
1111   using key_type = typename Node::key_type;
1112   using size_type = typename Node::size_type;
1113   using params_type = typename Node::params_type;
1114   using is_map_container = typename params_type::is_map_container;
1115 
1116   using node_type = Node;
1117   using normal_node = typename std::remove_const<Node>::type;
1118   using const_node = const Node;
1119   using normal_pointer = typename params_type::pointer;
1120   using normal_reference = typename params_type::reference;
1121   using const_pointer = typename params_type::const_pointer;
1122   using const_reference = typename params_type::const_reference;
1123   using slot_type = typename params_type::slot_type;
1124 
1125   // In sets, all iterators are const.
1126   using iterator = absl::conditional_t<
1127       is_map_container::value,
1128       btree_iterator<normal_node, normal_reference, normal_pointer>,
1129       btree_iterator<normal_node, const_reference, const_pointer>>;
1130   using const_iterator =
1131       btree_iterator<const_node, const_reference, const_pointer>;
1132 
1133  public:
1134   // These aliases are public for std::iterator_traits.
1135   using difference_type = typename Node::difference_type;
1136   using value_type = typename params_type::value_type;
1137   using pointer = Pointer;
1138   using reference = Reference;
1139   using iterator_category = std::bidirectional_iterator_tag;
1140 
1141   btree_iterator() : btree_iterator(nullptr, -1) {}
1142   explicit btree_iterator(Node *n) : btree_iterator(n, n->start()) {}
1143   btree_iterator(Node *n, int p)
1144       : btree_iterator_generation_info(n != nullptr ? n->generation()
1145                                                     : ~uint32_t{}),
1146         node_(n),
1147         position_(p) {}
1148 
1149   // NOTE: this SFINAE allows for implicit conversions from iterator to
1150   // const_iterator, but it specifically avoids hiding the copy constructor so
1151   // that the trivial one will be used when possible.
1152   template <typename N, typename R, typename P,
1153             absl::enable_if_t<
1154                 std::is_same<btree_iterator<N, R, P>, iterator>::value &&
1155                     std::is_same<btree_iterator, const_iterator>::value,
1156                 int> = 0>
1157   btree_iterator(const btree_iterator<N, R, P> other)  // NOLINT
1158       : btree_iterator_generation_info(other),
1159         node_(other.node_),
1160         position_(other.position_) {}
1161 
1162   bool operator==(const iterator &other) const {
1163     return Equals(other);
1164   }
1165   bool operator==(const const_iterator &other) const {
1166     return Equals(other);
1167   }
1168   bool operator!=(const iterator &other) const {
1169     return !Equals(other);
1170   }
1171   bool operator!=(const const_iterator &other) const {
1172     return !Equals(other);
1173   }
1174 
1175   // Returns n such that n calls to ++other yields *this.
1176   // Precondition: n exists.
1177   difference_type operator-(const_iterator other) const {
1178     if (node_ == other.node_) {
1179       if (node_->is_leaf()) return position_ - other.position_;
1180       if (position_ == other.position_) return 0;
1181     }
1182     return distance_slow(other);
1183   }
1184 
1185   // Accessors for the key/value the iterator is pointing at.
1186   reference operator*() const {
1187     ABSL_HARDENING_ASSERT(node_ != nullptr);
1188     assert_valid_generation(node_);
1189     ABSL_HARDENING_ASSERT(position_ >= node_->start());
1190     if (position_ >= node_->finish()) {
1191       ABSL_HARDENING_ASSERT(!IsEndIterator() && "Dereferencing end() iterator");
1192       ABSL_HARDENING_ASSERT(position_ < node_->finish());
1193     }
1194     return node_->value(static_cast<field_type>(position_));
1195   }
1196   pointer operator->() const { return &operator*(); }
1197 
1198   btree_iterator &operator++() {
1199     increment();
1200     return *this;
1201   }
1202   btree_iterator &operator--() {
1203     decrement();
1204     return *this;
1205   }
1206   btree_iterator operator++(int) {
1207     btree_iterator tmp = *this;
1208     ++*this;
1209     return tmp;
1210   }
1211   btree_iterator operator--(int) {
1212     btree_iterator tmp = *this;
1213     --*this;
1214     return tmp;
1215   }
1216 
1217  private:
1218   friend iterator;
1219   friend const_iterator;
1220   template <typename Params>
1221   friend class btree;
1222   template <typename Tree>
1223   friend class btree_container;
1224   template <typename Tree>
1225   friend class btree_set_container;
1226   template <typename Tree>
1227   friend class btree_map_container;
1228   template <typename Tree>
1229   friend class btree_multiset_container;
1230   template <typename TreeType, typename CheckerType>
1231   friend class base_checker;
1232   friend struct btree_access;
1233 
1234   // This SFINAE allows explicit conversions from const_iterator to
1235   // iterator, but also avoids hiding the copy constructor.
1236   // NOTE: the const_cast is safe because this constructor is only called by
1237   // non-const methods and the container owns the nodes.
1238   template <typename N, typename R, typename P,
1239             absl::enable_if_t<
1240                 std::is_same<btree_iterator<N, R, P>, const_iterator>::value &&
1241                     std::is_same<btree_iterator, iterator>::value,
1242                 int> = 0>
1243   explicit btree_iterator(const btree_iterator<N, R, P> other)
1244       : btree_iterator_generation_info(other.generation()),
1245         node_(const_cast<node_type *>(other.node_)),
1246         position_(other.position_) {}
1247 
1248   bool Equals(const const_iterator other) const {
1249     ABSL_HARDENING_ASSERT(((node_ == nullptr && other.node_ == nullptr) ||
1250                            (node_ != nullptr && other.node_ != nullptr)) &&
1251                           "Comparing default-constructed iterator with "
1252                           "non-default-constructed iterator.");
1253     // Note: we use assert instead of ABSL_HARDENING_ASSERT here because this
1254     // changes the complexity of Equals from O(1) to O(log(N) + log(M)) where
1255     // N/M are sizes of the containers containing node_/other.node_.
1256     assert(AreNodesFromSameContainer(node_, other.node_) &&
1257            "Comparing iterators from different containers.");
1258     assert_valid_generation(node_);
1259     other.assert_valid_generation(other.node_);
1260     return node_ == other.node_ && position_ == other.position_;
1261   }
1262 
1263   bool IsEndIterator() const {
1264     if (position_ != node_->finish()) return false;
1265     node_type *node = node_;
1266     while (!node->is_root()) {
1267       if (node->position() != node->parent()->finish()) return false;
1268       node = node->parent();
1269     }
1270     return true;
1271   }
1272 
1273   // Returns n such that n calls to ++other yields *this.
1274   // Precondition: n exists && (this->node_ != other.node_ ||
1275   // !this->node_->is_leaf() || this->position_ != other.position_).
1276   difference_type distance_slow(const_iterator other) const;
1277 
1278   // Increment/decrement the iterator.
1279   void increment() {
1280     assert_valid_generation(node_);
1281     if (node_->is_leaf() && ++position_ < node_->finish()) {
1282       return;
1283     }
1284     increment_slow();
1285   }
1286   void increment_slow();
1287 
1288   void decrement() {
1289     assert_valid_generation(node_);
1290     if (node_->is_leaf() && --position_ >= node_->start()) {
1291       return;
1292     }
1293     decrement_slow();
1294   }
1295   void decrement_slow();
1296 
1297   const key_type &key() const {
1298     return node_->key(static_cast<size_type>(position_));
1299   }
1300   decltype(std::declval<Node *>()->slot(0)) slot() {
1301     return node_->slot(static_cast<size_type>(position_));
1302   }
1303 
1304   void update_generation() {
1305     btree_iterator_generation_info::update_generation(node_);
1306   }
1307 
1308   // The node in the tree the iterator is pointing at.
1309   Node *node_;
1310   // The position within the node of the tree the iterator is pointing at.
1311   // NOTE: this is an int rather than a field_type because iterators can point
1312   // to invalid positions (such as -1) in certain circumstances.
1313   int position_;
1314 };
1315 
1316 template <typename Params>
1317 class btree {
1318   using node_type = btree_node<Params>;
1319   using is_key_compare_to = typename Params::is_key_compare_to;
1320   using field_type = typename node_type::field_type;
1321 
1322   // We use a static empty node for the root/leftmost/rightmost of empty btrees
1323   // in order to avoid branching in begin()/end().
1324   struct EmptyNodeType : node_type {
1325     using field_type = typename node_type::field_type;
1326     node_type *parent;
1327 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1328     uint32_t generation = 0;
1329 #endif
1330     field_type position = 0;
1331     field_type start = 0;
1332     field_type finish = 0;
1333     // max_count must be != kInternalNodeMaxCount (so that this node is regarded
1334     // as a leaf node). max_count() is never called when the tree is empty.
1335     field_type max_count = node_type::kInternalNodeMaxCount + 1;
1336 
1337     constexpr EmptyNodeType() : parent(this) {}
1338   };
1339 
1340   static node_type *EmptyNode() {
1341     alignas(node_type::Alignment()) static constexpr EmptyNodeType empty_node;
1342     return const_cast<EmptyNodeType *>(&empty_node);
1343   }
1344 
1345   enum : uint32_t {
1346     kNodeSlots = node_type::kNodeSlots,
1347     kMinNodeValues = kNodeSlots / 2,
1348   };
1349 
1350   struct node_stats {
1351     using size_type = typename Params::size_type;
1352 
1353     node_stats(size_type l, size_type i) : leaf_nodes(l), internal_nodes(i) {}
1354 
1355     node_stats &operator+=(const node_stats &other) {
1356       leaf_nodes += other.leaf_nodes;
1357       internal_nodes += other.internal_nodes;
1358       return *this;
1359     }
1360 
1361     size_type leaf_nodes;
1362     size_type internal_nodes;
1363   };
1364 
1365  public:
1366   using key_type = typename Params::key_type;
1367   using value_type = typename Params::value_type;
1368   using size_type = typename Params::size_type;
1369   using difference_type = typename Params::difference_type;
1370   using key_compare = typename Params::key_compare;
1371   using original_key_compare = typename Params::original_key_compare;
1372   using value_compare = typename Params::value_compare;
1373   using allocator_type = typename Params::allocator_type;
1374   using reference = typename Params::reference;
1375   using const_reference = typename Params::const_reference;
1376   using pointer = typename Params::pointer;
1377   using const_pointer = typename Params::const_pointer;
1378   using iterator =
1379       typename btree_iterator<node_type, reference, pointer>::iterator;
1380   using const_iterator = typename iterator::const_iterator;
1381   using reverse_iterator = std::reverse_iterator<iterator>;
1382   using const_reverse_iterator = std::reverse_iterator<const_iterator>;
1383   using node_handle_type = node_handle<Params, Params, allocator_type>;
1384 
1385   // Internal types made public for use by btree_container types.
1386   using params_type = Params;
1387   using slot_type = typename Params::slot_type;
1388 
1389  private:
1390   // Copies or moves (depending on the template parameter) the values in
1391   // other into this btree in their order in other. This btree must be empty
1392   // before this method is called. This method is used in copy construction,
1393   // copy assignment, and move assignment.
1394   template <typename Btree>
1395   void copy_or_move_values_in_order(Btree &other);
1396 
1397   // Validates that various assumptions/requirements are true at compile time.
1398   constexpr static bool static_assert_validation();
1399 
1400  public:
1401   btree(const key_compare &comp, const allocator_type &alloc)
1402       : root_(EmptyNode()), rightmost_(comp, alloc, EmptyNode()), size_(0) {}
1403 
1404   btree(const btree &other) : btree(other, other.allocator()) {}
1405   btree(const btree &other, const allocator_type &alloc)
1406       : btree(other.key_comp(), alloc) {
1407     copy_or_move_values_in_order(other);
1408   }
1409   btree(btree &&other) noexcept
1410       : root_(absl::exchange(other.root_, EmptyNode())),
1411         rightmost_(std::move(other.rightmost_)),
1412         size_(absl::exchange(other.size_, 0u)) {
1413     other.mutable_rightmost() = EmptyNode();
1414   }
1415   btree(btree &&other, const allocator_type &alloc)
1416       : btree(other.key_comp(), alloc) {
1417     if (alloc == other.allocator()) {
1418       swap(other);
1419     } else {
1420       // Move values from `other` one at a time when allocators are different.
1421       copy_or_move_values_in_order(other);
1422     }
1423   }
1424 
1425   ~btree() {
1426     // Put static_asserts in destructor to avoid triggering them before the type
1427     // is complete.
1428     static_assert(static_assert_validation(), "This call must be elided.");
1429     clear();
1430   }
1431 
1432   // Assign the contents of other to *this.
1433   btree &operator=(const btree &other);
1434   btree &operator=(btree &&other) noexcept;
1435 
1436   iterator begin() { return iterator(leftmost()); }
1437   const_iterator begin() const { return const_iterator(leftmost()); }
1438   iterator end() { return iterator(rightmost(), rightmost()->finish()); }
1439   const_iterator end() const {
1440     return const_iterator(rightmost(), rightmost()->finish());
1441   }
1442   reverse_iterator rbegin() { return reverse_iterator(end()); }
1443   const_reverse_iterator rbegin() const {
1444     return const_reverse_iterator(end());
1445   }
1446   reverse_iterator rend() { return reverse_iterator(begin()); }
1447   const_reverse_iterator rend() const {
1448     return const_reverse_iterator(begin());
1449   }
1450 
1451   // Finds the first element whose key is not less than `key`.
1452   template <typename K>
1453   iterator lower_bound(const K &key) {
1454     return internal_end(internal_lower_bound(key).value);
1455   }
1456   template <typename K>
1457   const_iterator lower_bound(const K &key) const {
1458     return internal_end(internal_lower_bound(key).value);
1459   }
1460 
1461   // Finds the first element whose key is not less than `key` and also returns
1462   // whether that element is equal to `key`.
1463   template <typename K>
1464   std::pair<iterator, bool> lower_bound_equal(const K &key) const;
1465 
1466   // Finds the first element whose key is greater than `key`.
1467   template <typename K>
1468   iterator upper_bound(const K &key) {
1469     return internal_end(internal_upper_bound(key));
1470   }
1471   template <typename K>
1472   const_iterator upper_bound(const K &key) const {
1473     return internal_end(internal_upper_bound(key));
1474   }
1475 
1476   // Finds the range of values which compare equal to key. The first member of
1477   // the returned pair is equal to lower_bound(key). The second member of the
1478   // pair is equal to upper_bound(key).
1479   template <typename K>
1480   std::pair<iterator, iterator> equal_range(const K &key);
1481   template <typename K>
1482   std::pair<const_iterator, const_iterator> equal_range(const K &key) const {
1483     return const_cast<btree *>(this)->equal_range(key);
1484   }
1485 
1486   // Inserts a value into the btree only if it does not already exist. The
1487   // boolean return value indicates whether insertion succeeded or failed.
1488   // Requirement: if `key` already exists in the btree, does not consume `args`.
1489   // Requirement: `key` is never referenced after consuming `args`.
1490   template <typename K, typename... Args>
1491   std::pair<iterator, bool> insert_unique(const K &key, Args &&...args);
1492 
1493   // Inserts with hint. Checks to see if the value should be placed immediately
1494   // before `position` in the tree. If so, then the insertion will take
1495   // amortized constant time. If not, the insertion will take amortized
1496   // logarithmic time as if a call to insert_unique() were made.
1497   // Requirement: if `key` already exists in the btree, does not consume `args`.
1498   // Requirement: `key` is never referenced after consuming `args`.
1499   template <typename K, typename... Args>
1500   std::pair<iterator, bool> insert_hint_unique(iterator position, const K &key,
1501                                                Args &&...args);
1502 
1503   // Insert a range of values into the btree.
1504   // Note: the first overload avoids constructing a value_type if the key
1505   // already exists in the btree.
1506   template <typename InputIterator,
1507             typename = decltype(std::declval<const key_compare &>()(
1508                 params_type::key(*std::declval<InputIterator>()),
1509                 std::declval<const key_type &>()))>
1510   void insert_iterator_unique(InputIterator b, InputIterator e, int);
1511   // We need the second overload for cases in which we need to construct a
1512   // value_type in order to compare it with the keys already in the btree.
1513   template <typename InputIterator>
1514   void insert_iterator_unique(InputIterator b, InputIterator e, char);
1515 
1516   // Inserts a value into the btree.
1517   template <typename ValueType>
1518   iterator insert_multi(const key_type &key, ValueType &&v);
1519 
1520   // Inserts a value into the btree.
1521   template <typename ValueType>
1522   iterator insert_multi(ValueType &&v) {
1523     return insert_multi(params_type::key(v), std::forward<ValueType>(v));
1524   }
1525 
1526   // Insert with hint. Check to see if the value should be placed immediately
1527   // before position in the tree. If it does, then the insertion will take
1528   // amortized constant time. If not, the insertion will take amortized
1529   // logarithmic time as if a call to insert_multi(v) were made.
1530   template <typename ValueType>
1531   iterator insert_hint_multi(iterator position, ValueType &&v);
1532 
1533   // Insert a range of values into the btree.
1534   template <typename InputIterator>
1535   void insert_iterator_multi(InputIterator b,
1536                              InputIterator e);
1537 
1538   // Erase the specified iterator from the btree. The iterator must be valid
1539   // (i.e. not equal to end()).  Return an iterator pointing to the node after
1540   // the one that was erased (or end() if none exists).
1541   // Requirement: does not read the value at `*iter`.
1542   iterator erase(iterator iter);
1543 
1544   // Erases range. Returns the number of keys erased and an iterator pointing
1545   // to the element after the last erased element.
1546   std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
1547 
1548   // Finds an element with key equivalent to `key` or returns `end()` if `key`
1549   // is not present.
1550   template <typename K>
1551   iterator find(const K &key) {
1552     return internal_end(internal_find(key));
1553   }
1554   template <typename K>
1555   const_iterator find(const K &key) const {
1556     return internal_end(internal_find(key));
1557   }
1558 
1559   // Clear the btree, deleting all of the values it contains.
1560   void clear();
1561 
1562   // Swaps the contents of `this` and `other`.
1563   void swap(btree &other);
1564 
1565   const key_compare &key_comp() const noexcept {
1566     return rightmost_.template get<0>();
1567   }
1568   template <typename K1, typename K2>
1569   bool compare_keys(const K1 &a, const K2 &b) const {
1570     return compare_internal::compare_result_as_less_than(key_comp()(a, b));
1571   }
1572 
1573   value_compare value_comp() const {
1574     return value_compare(original_key_compare(key_comp()));
1575   }
1576 
1577   // Verifies the structure of the btree.
1578   void verify() const;
1579 
1580   // Size routines.
1581   size_type size() const { return size_; }
1582   size_type max_size() const { return (std::numeric_limits<size_type>::max)(); }
1583   bool empty() const { return size_ == 0; }
1584 
1585   // The height of the btree. An empty tree will have height 0.
1586   size_type height() const {
1587     size_type h = 0;
1588     if (!empty()) {
1589       // Count the length of the chain from the leftmost node up to the
1590       // root. We actually count from the root back around to the level below
1591       // the root, but the calculation is the same because of the circularity
1592       // of that traversal.
1593       const node_type *n = root();
1594       do {
1595         ++h;
1596         n = n->parent();
1597       } while (n != root());
1598     }
1599     return h;
1600   }
1601 
1602   // The number of internal, leaf and total nodes used by the btree.
1603   size_type leaf_nodes() const { return internal_stats(root()).leaf_nodes; }
1604   size_type internal_nodes() const {
1605     return internal_stats(root()).internal_nodes;
1606   }
1607   size_type nodes() const {
1608     node_stats stats = internal_stats(root());
1609     return stats.leaf_nodes + stats.internal_nodes;
1610   }
1611 
1612   // The total number of bytes used by the btree.
1613   // TODO(b/169338300): update to support node_btree_*.
1614   size_type bytes_used() const {
1615     node_stats stats = internal_stats(root());
1616     if (stats.leaf_nodes == 1 && stats.internal_nodes == 0) {
1617       return sizeof(*this) + node_type::LeafSize(root()->max_count());
1618     } else {
1619       return sizeof(*this) + stats.leaf_nodes * node_type::LeafSize() +
1620              stats.internal_nodes * node_type::InternalSize();
1621     }
1622   }
1623 
1624   // The average number of bytes used per value stored in the btree assuming
1625   // random insertion order.
1626   static double average_bytes_per_value() {
1627     // The expected number of values per node with random insertion order is the
1628     // average of the maximum and minimum numbers of values per node.
1629     const double expected_values_per_node = (kNodeSlots + kMinNodeValues) / 2.0;
1630     return node_type::LeafSize() / expected_values_per_node;
1631   }
1632 
1633   // The fullness of the btree. Computed as the number of elements in the btree
1634   // divided by the maximum number of elements a tree with the current number
1635   // of nodes could hold. A value of 1 indicates perfect space
1636   // utilization. Smaller values indicate space wastage.
1637   // Returns 0 for empty trees.
1638   double fullness() const {
1639     if (empty()) return 0.0;
1640     return static_cast<double>(size()) / (nodes() * kNodeSlots);
1641   }
1642   // The overhead of the btree structure in bytes per node. Computed as the
1643   // total number of bytes used by the btree minus the number of bytes used for
1644   // storing elements divided by the number of elements.
1645   // Returns 0 for empty trees.
1646   double overhead() const {
1647     if (empty()) return 0.0;
1648     return (bytes_used() - size() * sizeof(value_type)) /
1649            static_cast<double>(size());
1650   }
1651 
1652   // The allocator used by the btree.
1653   allocator_type get_allocator() const { return allocator(); }
1654 
1655  private:
1656   friend struct btree_access;
1657 
1658   // Internal accessor routines.
1659   node_type *root() { return root_; }
1660   const node_type *root() const { return root_; }
1661   node_type *&mutable_root() noexcept { return root_; }
1662   node_type *rightmost() { return rightmost_.template get<2>(); }
1663   const node_type *rightmost() const { return rightmost_.template get<2>(); }
1664   node_type *&mutable_rightmost() noexcept {
1665     return rightmost_.template get<2>();
1666   }
1667   key_compare *mutable_key_comp() noexcept {
1668     return &rightmost_.template get<0>();
1669   }
1670 
1671   // The leftmost node is stored as the parent of the root node.
1672   node_type *leftmost() { return root()->parent(); }
1673   const node_type *leftmost() const { return root()->parent(); }
1674 
1675   // Allocator routines.
1676   allocator_type *mutable_allocator() noexcept {
1677     return &rightmost_.template get<1>();
1678   }
1679   const allocator_type &allocator() const noexcept {
1680     return rightmost_.template get<1>();
1681   }
1682 
1683   // Allocates a correctly aligned node of at least size bytes using the
1684   // allocator.
1685   node_type *allocate(size_type size) {
1686     return reinterpret_cast<node_type *>(
1687         absl::container_internal::Allocate<node_type::Alignment()>(
1688             mutable_allocator(), size));
1689   }
1690 
1691   // Node creation/deletion routines.
1692   node_type *new_internal_node(field_type position, node_type *parent) {
1693     node_type *n = allocate(node_type::InternalSize());
1694     n->init_internal(position, parent);
1695     return n;
1696   }
1697   node_type *new_leaf_node(field_type position, node_type *parent) {
1698     node_type *n = allocate(node_type::LeafSize());
1699     n->init_leaf(position, kNodeSlots, parent);
1700     return n;
1701   }
1702   node_type *new_leaf_root_node(field_type max_count) {
1703     node_type *n = allocate(node_type::LeafSize(max_count));
1704     n->init_leaf(/*position=*/0, max_count, /*parent=*/n);
1705     return n;
1706   }
1707 
1708   // Deletion helper routines.
1709   iterator rebalance_after_delete(iterator iter);
1710 
1711   // Rebalances or splits the node iter points to.
1712   void rebalance_or_split(iterator *iter);
1713 
1714   // Merges the values of left, right and the delimiting key on their parent
1715   // onto left, removing the delimiting key and deleting right.
1716   void merge_nodes(node_type *left, node_type *right);
1717 
1718   // Tries to merge node with its left or right sibling, and failing that,
1719   // rebalance with its left or right sibling. Returns true if a merge
1720   // occurred, at which point it is no longer valid to access node. Returns
1721   // false if no merging took place.
1722   bool try_merge_or_rebalance(iterator *iter);
1723 
1724   // Tries to shrink the height of the tree by 1.
1725   void try_shrink();
1726 
1727   iterator internal_end(iterator iter) {
1728     return iter.node_ != nullptr ? iter : end();
1729   }
1730   const_iterator internal_end(const_iterator iter) const {
1731     return iter.node_ != nullptr ? iter : end();
1732   }
1733 
1734   // Emplaces a value into the btree immediately before iter. Requires that
1735   // key(v) <= iter.key() and (--iter).key() <= key(v).
1736   template <typename... Args>
1737   iterator internal_emplace(iterator iter, Args &&...args);
1738 
1739   // Returns an iterator pointing to the first value >= the value "iter" is
1740   // pointing at. Note that "iter" might be pointing to an invalid location such
1741   // as iter.position_ == iter.node_->finish(). This routine simply moves iter
1742   // up in the tree to a valid location. Requires: iter.node_ is non-null.
1743   template <typename IterType>
1744   static IterType internal_last(IterType iter);
1745 
1746   // Returns an iterator pointing to the leaf position at which key would
1747   // reside in the tree, unless there is an exact match - in which case, the
1748   // result may not be on a leaf. When there's a three-way comparator, we can
1749   // return whether there was an exact match. This allows the caller to avoid a
1750   // subsequent comparison to determine if an exact match was made, which is
1751   // important for keys with expensive comparison, such as strings.
1752   template <typename K>
1753   SearchResult<iterator, is_key_compare_to::value> internal_locate(
1754       const K &key) const;
1755 
1756   // Internal routine which implements lower_bound().
1757   template <typename K>
1758   SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
1759       const K &key) const;
1760 
1761   // Internal routine which implements upper_bound().
1762   template <typename K>
1763   iterator internal_upper_bound(const K &key) const;
1764 
1765   // Internal routine which implements find().
1766   template <typename K>
1767   iterator internal_find(const K &key) const;
1768 
1769   // Verifies the tree structure of node.
1770   size_type internal_verify(const node_type *node, const key_type *lo,
1771                             const key_type *hi) const;
1772 
1773   node_stats internal_stats(const node_type *node) const {
1774     // The root can be a static empty node.
1775     if (node == nullptr || (node == root() && empty())) {
1776       return node_stats(0, 0);
1777     }
1778     if (node->is_leaf()) {
1779       return node_stats(1, 0);
1780     }
1781     node_stats res(0, 1);
1782     for (int i = node->start(); i <= node->finish(); ++i) {
1783       res += internal_stats(node->child(i));
1784     }
1785     return res;
1786   }
1787 
1788   node_type *root_;
1789 
1790   // A pointer to the rightmost node. Note that the leftmost node is stored as
1791   // the root's parent. We use compressed tuple in order to save space because
1792   // key_compare and allocator_type are usually empty.
1793   absl::container_internal::CompressedTuple<key_compare, allocator_type,
1794                                             node_type *>
1795       rightmost_;
1796 
1797   // Number of values.
1798   size_type size_;
1799 };
1800 
1801 ////
1802 // btree_node methods
1803 template <typename P>
1804 template <typename... Args>
1805 inline void btree_node<P>::emplace_value(const field_type i,
1806                                          allocator_type *alloc,
1807                                          Args &&...args) {
1808   assert(i >= start());
1809   assert(i <= finish());
1810   // Shift old values to create space for new value and then construct it in
1811   // place.
1812   if (i < finish()) {
1813     transfer_n_backward(finish() - i, /*dest_i=*/i + 1, /*src_i=*/i, this,
1814                         alloc);
1815   }
1816   value_init(static_cast<field_type>(i), alloc, std::forward<Args>(args)...);
1817   set_finish(finish() + 1);
1818 
1819   if (is_internal() && finish() > i + 1) {
1820     for (field_type j = finish(); j > i + 1; --j) {
1821       set_child(j, child(j - 1));
1822     }
1823     clear_child(i + 1);
1824   }
1825 }
1826 
1827 template <typename P>
1828 inline void btree_node<P>::remove_values(const field_type i,
1829                                          const field_type to_erase,
1830                                          allocator_type *alloc) {
1831   // Transfer values after the removed range into their new places.
1832   value_destroy_n(i, to_erase, alloc);
1833   const field_type orig_finish = finish();
1834   const field_type src_i = i + to_erase;
1835   transfer_n(orig_finish - src_i, i, src_i, this, alloc);
1836 
1837   if (is_internal()) {
1838     // Delete all children between begin and end.
1839     for (field_type j = 0; j < to_erase; ++j) {
1840       clear_and_delete(child(i + j + 1), alloc);
1841     }
1842     // Rotate children after end into new positions.
1843     for (field_type j = i + to_erase + 1; j <= orig_finish; ++j) {
1844       set_child(j - to_erase, child(j));
1845       clear_child(j);
1846     }
1847   }
1848   set_finish(orig_finish - to_erase);
1849 }
1850 
1851 template <typename P>
1852 void btree_node<P>::rebalance_right_to_left(field_type to_move,
1853                                             btree_node *right,
1854                                             allocator_type *alloc) {
1855   assert(parent() == right->parent());
1856   assert(position() + 1 == right->position());
1857   assert(right->count() >= count());
1858   assert(to_move >= 1);
1859   assert(to_move <= right->count());
1860 
1861   // 1) Move the delimiting value in the parent to the left node.
1862   transfer(finish(), position(), parent(), alloc);
1863 
1864   // 2) Move the (to_move - 1) values from the right node to the left node.
1865   transfer_n(to_move - 1, finish() + 1, right->start(), right, alloc);
1866 
1867   // 3) Move the new delimiting value to the parent from the right node.
1868   parent()->transfer(position(), right->start() + to_move - 1, right, alloc);
1869 
1870   // 4) Shift the values in the right node to their correct positions.
1871   right->transfer_n(right->count() - to_move, right->start(),
1872                     right->start() + to_move, right, alloc);
1873 
1874   if (is_internal()) {
1875     // Move the child pointers from the right to the left node.
1876     for (field_type i = 0; i < to_move; ++i) {
1877       init_child(finish() + i + 1, right->child(i));
1878     }
1879     for (field_type i = right->start(); i <= right->finish() - to_move; ++i) {
1880       assert(i + to_move <= right->max_count());
1881       right->init_child(i, right->child(i + to_move));
1882       right->clear_child(i + to_move);
1883     }
1884   }
1885 
1886   // Fixup `finish` on the left and right nodes.
1887   set_finish(finish() + to_move);
1888   right->set_finish(right->finish() - to_move);
1889 }
1890 
1891 template <typename P>
1892 void btree_node<P>::rebalance_left_to_right(field_type to_move,
1893                                             btree_node *right,
1894                                             allocator_type *alloc) {
1895   assert(parent() == right->parent());
1896   assert(position() + 1 == right->position());
1897   assert(count() >= right->count());
1898   assert(to_move >= 1);
1899   assert(to_move <= count());
1900 
1901   // Values in the right node are shifted to the right to make room for the
1902   // new to_move values. Then, the delimiting value in the parent and the
1903   // other (to_move - 1) values in the left node are moved into the right node.
1904   // Lastly, a new delimiting value is moved from the left node into the
1905   // parent, and the remaining empty left node entries are destroyed.
1906 
1907   // 1) Shift existing values in the right node to their correct positions.
1908   right->transfer_n_backward(right->count(), right->start() + to_move,
1909                              right->start(), right, alloc);
1910 
1911   // 2) Move the delimiting value in the parent to the right node.
1912   right->transfer(right->start() + to_move - 1, position(), parent(), alloc);
1913 
1914   // 3) Move the (to_move - 1) values from the left node to the right node.
1915   right->transfer_n(to_move - 1, right->start(), finish() - (to_move - 1), this,
1916                     alloc);
1917 
1918   // 4) Move the new delimiting value to the parent from the left node.
1919   parent()->transfer(position(), finish() - to_move, this, alloc);
1920 
1921   if (is_internal()) {
1922     // Move the child pointers from the left to the right node.
1923     for (field_type i = right->finish() + 1; i > right->start(); --i) {
1924       right->init_child(i - 1 + to_move, right->child(i - 1));
1925       right->clear_child(i - 1);
1926     }
1927     for (field_type i = 1; i <= to_move; ++i) {
1928       right->init_child(i - 1, child(finish() - to_move + i));
1929       clear_child(finish() - to_move + i);
1930     }
1931   }
1932 
1933   // Fixup the counts on the left and right nodes.
1934   set_finish(finish() - to_move);
1935   right->set_finish(right->finish() + to_move);
1936 }
1937 
1938 template <typename P>
1939 void btree_node<P>::split(const int insert_position, btree_node *dest,
1940                           allocator_type *alloc) {
1941   assert(dest->count() == 0);
1942   assert(max_count() == kNodeSlots);
1943   assert(position() + 1 == dest->position());
1944   assert(parent() == dest->parent());
1945 
1946   // We bias the split based on the position being inserted. If we're
1947   // inserting at the beginning of the left node then bias the split to put
1948   // more values on the right node. If we're inserting at the end of the
1949   // right node then bias the split to put more values on the left node.
1950   if (insert_position == start()) {
1951     dest->set_finish(dest->start() + finish() - 1);
1952   } else if (insert_position == kNodeSlots) {
1953     dest->set_finish(dest->start());
1954   } else {
1955     dest->set_finish(dest->start() + count() / 2);
1956   }
1957   set_finish(finish() - dest->count());
1958   assert(count() >= 1);
1959 
1960   // Move values from the left sibling to the right sibling.
1961   dest->transfer_n(dest->count(), dest->start(), finish(), this, alloc);
1962 
1963   // The split key is the largest value in the left sibling.
1964   --mutable_finish();
1965   parent()->emplace_value(position(), alloc, finish_slot());
1966   value_destroy(finish(), alloc);
1967   parent()->set_child_noupdate_position(position() + 1, dest);
1968 
1969   if (is_internal()) {
1970     for (field_type i = dest->start(), j = finish() + 1; i <= dest->finish();
1971          ++i, ++j) {
1972       assert(child(j) != nullptr);
1973       dest->init_child(i, child(j));
1974       clear_child(j);
1975     }
1976   }
1977 }
1978 
1979 template <typename P>
1980 void btree_node<P>::merge(btree_node *src, allocator_type *alloc) {
1981   assert(parent() == src->parent());
1982   assert(position() + 1 == src->position());
1983 
1984   // Move the delimiting value to the left node.
1985   value_init(finish(), alloc, parent()->slot(position()));
1986 
1987   // Move the values from the right to the left node.
1988   transfer_n(src->count(), finish() + 1, src->start(), src, alloc);
1989 
1990   if (is_internal()) {
1991     // Move the child pointers from the right to the left node.
1992     for (field_type i = src->start(), j = finish() + 1; i <= src->finish();
1993          ++i, ++j) {
1994       init_child(j, src->child(i));
1995       src->clear_child(i);
1996     }
1997   }
1998 
1999   // Fixup `finish` on the src and dest nodes.
2000   set_finish(start() + 1 + count() + src->count());
2001   src->set_finish(src->start());
2002 
2003   // Remove the value on the parent node and delete the src node.
2004   parent()->remove_values(position(), /*to_erase=*/1, alloc);
2005 }
2006 
2007 template <typename P>
2008 void btree_node<P>::clear_and_delete(btree_node *node, allocator_type *alloc) {
2009   if (node->is_leaf()) {
2010     node->value_destroy_n(node->start(), node->count(), alloc);
2011     deallocate(LeafSize(node->max_count()), node, alloc);
2012     return;
2013   }
2014   if (node->count() == 0) {
2015     deallocate(InternalSize(), node, alloc);
2016     return;
2017   }
2018 
2019   // The parent of the root of the subtree we are deleting.
2020   btree_node *delete_root_parent = node->parent();
2021 
2022   // Navigate to the leftmost leaf under node, and then delete upwards.
2023   while (node->is_internal()) node = node->start_child();
2024 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
2025   // When generations are enabled, we delete the leftmost leaf last in case it's
2026   // the parent of the root and we need to check whether it's a leaf before we
2027   // can update the root's generation.
2028   // TODO(ezb): if we change btree_node::is_root to check a bool inside the node
2029   // instead of checking whether the parent is a leaf, we can remove this logic.
2030   btree_node *leftmost_leaf = node;
2031 #endif
2032   // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`,
2033   // which isn't guaranteed to be a valid `field_type`.
2034   size_type pos = node->position();
2035   btree_node *parent = node->parent();
2036   for (;;) {
2037     // In each iteration of the next loop, we delete one leaf node and go right.
2038     assert(pos <= parent->finish());
2039     do {
2040       node = parent->child(static_cast<field_type>(pos));
2041       if (node->is_internal()) {
2042         // Navigate to the leftmost leaf under node.
2043         while (node->is_internal()) node = node->start_child();
2044         pos = node->position();
2045         parent = node->parent();
2046       }
2047       node->value_destroy_n(node->start(), node->count(), alloc);
2048 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
2049       if (leftmost_leaf != node)
2050 #endif
2051         deallocate(LeafSize(node->max_count()), node, alloc);
2052       ++pos;
2053     } while (pos <= parent->finish());
2054 
2055     // Once we've deleted all children of parent, delete parent and go up/right.
2056     assert(pos > parent->finish());
2057     do {
2058       node = parent;
2059       pos = node->position();
2060       parent = node->parent();
2061       node->value_destroy_n(node->start(), node->count(), alloc);
2062       deallocate(InternalSize(), node, alloc);
2063       if (parent == delete_root_parent) {
2064 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
2065         deallocate(LeafSize(leftmost_leaf->max_count()), leftmost_leaf, alloc);
2066 #endif
2067         return;
2068       }
2069       ++pos;
2070     } while (pos > parent->finish());
2071   }
2072 }
2073 
2074 ////
2075 // btree_iterator methods
2076 
2077 // Note: the implementation here is based on btree_node::clear_and_delete.
2078 template <typename N, typename R, typename P>
2079 auto btree_iterator<N, R, P>::distance_slow(const_iterator other) const
2080     -> difference_type {
2081   const_iterator begin = other;
2082   const_iterator end = *this;
2083   assert(begin.node_ != end.node_ || !begin.node_->is_leaf() ||
2084          begin.position_ != end.position_);
2085 
2086   const node_type *node = begin.node_;
2087   // We need to compensate for double counting if begin.node_ is a leaf node.
2088   difference_type count = node->is_leaf() ? -begin.position_ : 0;
2089 
2090   // First navigate to the leftmost leaf node past begin.
2091   if (node->is_internal()) {
2092     ++count;
2093     node = node->child(begin.position_ + 1);
2094   }
2095   while (node->is_internal()) node = node->start_child();
2096 
2097   // Use `size_type` because `pos` needs to be able to hold `kNodeSlots+1`,
2098   // which isn't guaranteed to be a valid `field_type`.
2099   size_type pos = node->position();
2100   const node_type *parent = node->parent();
2101   for (;;) {
2102     // In each iteration of the next loop, we count one leaf node and go right.
2103     assert(pos <= parent->finish());
2104     do {
2105       node = parent->child(static_cast<field_type>(pos));
2106       if (node->is_internal()) {
2107         // Navigate to the leftmost leaf under node.
2108         while (node->is_internal()) node = node->start_child();
2109         pos = node->position();
2110         parent = node->parent();
2111       }
2112       if (node == end.node_) return count + end.position_;
2113       if (parent == end.node_ && pos == static_cast<size_type>(end.position_))
2114         return count + node->count();
2115       // +1 is for the next internal node value.
2116       count += node->count() + 1;
2117       ++pos;
2118     } while (pos <= parent->finish());
2119 
2120     // Once we've counted all children of parent, go up/right.
2121     assert(pos > parent->finish());
2122     do {
2123       node = parent;
2124       pos = node->position();
2125       parent = node->parent();
2126       // -1 because we counted the value at end and shouldn't.
2127       if (parent == end.node_ && pos == static_cast<size_type>(end.position_))
2128         return count - 1;
2129       ++pos;
2130     } while (pos > parent->finish());
2131   }
2132 }
2133 
2134 template <typename N, typename R, typename P>
2135 void btree_iterator<N, R, P>::increment_slow() {
2136   if (node_->is_leaf()) {
2137     assert(position_ >= node_->finish());
2138     btree_iterator save(*this);
2139     while (position_ == node_->finish() && !node_->is_root()) {
2140       assert(node_->parent()->child(node_->position()) == node_);
2141       position_ = node_->position();
2142       node_ = node_->parent();
2143     }
2144     // TODO(ezb): assert we aren't incrementing end() instead of handling.
2145     if (position_ == node_->finish()) {
2146       *this = save;
2147     }
2148   } else {
2149     assert(position_ < node_->finish());
2150     node_ = node_->child(static_cast<field_type>(position_ + 1));
2151     while (node_->is_internal()) {
2152       node_ = node_->start_child();
2153     }
2154     position_ = node_->start();
2155   }
2156 }
2157 
2158 template <typename N, typename R, typename P>
2159 void btree_iterator<N, R, P>::decrement_slow() {
2160   if (node_->is_leaf()) {
2161     assert(position_ <= -1);
2162     btree_iterator save(*this);
2163     while (position_ < node_->start() && !node_->is_root()) {
2164       assert(node_->parent()->child(node_->position()) == node_);
2165       position_ = node_->position() - 1;
2166       node_ = node_->parent();
2167     }
2168     // TODO(ezb): assert we aren't decrementing begin() instead of handling.
2169     if (position_ < node_->start()) {
2170       *this = save;
2171     }
2172   } else {
2173     assert(position_ >= node_->start());
2174     node_ = node_->child(static_cast<field_type>(position_));
2175     while (node_->is_internal()) {
2176       node_ = node_->child(node_->finish());
2177     }
2178     position_ = node_->finish() - 1;
2179   }
2180 }
2181 
2182 ////
2183 // btree methods
2184 template <typename P>
2185 template <typename Btree>
2186 void btree<P>::copy_or_move_values_in_order(Btree &other) {
2187   static_assert(std::is_same<btree, Btree>::value ||
2188                     std::is_same<const btree, Btree>::value,
2189                 "Btree type must be same or const.");
2190   assert(empty());
2191 
2192   // We can avoid key comparisons because we know the order of the
2193   // values is the same order we'll store them in.
2194   auto iter = other.begin();
2195   if (iter == other.end()) return;
2196   insert_multi(iter.slot());
2197   ++iter;
2198   for (; iter != other.end(); ++iter) {
2199     // If the btree is not empty, we can just insert the new value at the end
2200     // of the tree.
2201     internal_emplace(end(), iter.slot());
2202   }
2203 }
2204 
2205 template <typename P>
2206 constexpr bool btree<P>::static_assert_validation() {
2207   static_assert(std::is_nothrow_copy_constructible<key_compare>::value,
2208                 "Key comparison must be nothrow copy constructible");
2209   static_assert(std::is_nothrow_copy_constructible<allocator_type>::value,
2210                 "Allocator must be nothrow copy constructible");
2211   static_assert(std::is_trivially_copyable<iterator>::value,
2212                 "iterator not trivially copyable.");
2213 
2214   // Note: We assert that kTargetValues, which is computed from
2215   // Params::kTargetNodeSize, must fit the node_type::field_type.
2216   static_assert(
2217       kNodeSlots < (1 << (8 * sizeof(typename node_type::field_type))),
2218       "target node size too large");
2219 
2220   // Verify that key_compare returns an absl::{weak,strong}_ordering or bool.
2221   static_assert(
2222       compare_has_valid_result_type<key_compare, key_type>(),
2223       "key comparison function must return absl::{weak,strong}_ordering or "
2224       "bool.");
2225 
2226   // Test the assumption made in setting kNodeSlotSpace.
2227   static_assert(node_type::MinimumOverhead() >= sizeof(void *) + 4,
2228                 "node space assumption incorrect");
2229 
2230   return true;
2231 }
2232 
2233 template <typename P>
2234 template <typename K>
2235 auto btree<P>::lower_bound_equal(const K &key) const
2236     -> std::pair<iterator, bool> {
2237   const SearchResult<iterator, is_key_compare_to::value> res =
2238       internal_lower_bound(key);
2239   const iterator lower = iterator(internal_end(res.value));
2240   const bool equal = res.HasMatch()
2241                          ? res.IsEq()
2242                          : lower != end() && !compare_keys(key, lower.key());
2243   return {lower, equal};
2244 }
2245 
2246 template <typename P>
2247 template <typename K>
2248 auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
2249   const std::pair<iterator, bool> lower_and_equal = lower_bound_equal(key);
2250   const iterator lower = lower_and_equal.first;
2251   if (!lower_and_equal.second) {
2252     return {lower, lower};
2253   }
2254 
2255   const iterator next = std::next(lower);
2256   if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2257     // The next iterator after lower must point to a key greater than `key`.
2258     // Note: if this assert fails, then it may indicate that the comparator does
2259     // not meet the equivalence requirements for Compare
2260     // (see https://en.cppreference.com/w/cpp/named_req/Compare).
2261     assert(next == end() || compare_keys(key, next.key()));
2262     return {lower, next};
2263   }
2264   // Try once more to avoid the call to upper_bound() if there's only one
2265   // equivalent key. This should prevent all calls to upper_bound() in cases of
2266   // unique-containers with heterogeneous comparators in which all comparison
2267   // operators have the same equivalence classes.
2268   if (next == end() || compare_keys(key, next.key())) return {lower, next};
2269 
2270   // In this case, we need to call upper_bound() to avoid worst case O(N)
2271   // behavior if we were to iterate over equal keys.
2272   return {lower, upper_bound(key)};
2273 }
2274 
2275 template <typename P>
2276 template <typename K, typename... Args>
2277 auto btree<P>::insert_unique(const K &key, Args &&...args)
2278     -> std::pair<iterator, bool> {
2279   if (empty()) {
2280     mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2281   }
2282 
2283   SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2284   iterator iter = res.value;
2285 
2286   if (res.HasMatch()) {
2287     if (res.IsEq()) {
2288       // The key already exists in the tree, do nothing.
2289       return {iter, false};
2290     }
2291   } else {
2292     iterator last = internal_last(iter);
2293     if (last.node_ && !compare_keys(key, last.key())) {
2294       // The key already exists in the tree, do nothing.
2295       return {last, false};
2296     }
2297   }
2298   return {internal_emplace(iter, std::forward<Args>(args)...), true};
2299 }
2300 
2301 template <typename P>
2302 template <typename K, typename... Args>
2303 inline auto btree<P>::insert_hint_unique(iterator position, const K &key,
2304                                          Args &&...args)
2305     -> std::pair<iterator, bool> {
2306   if (!empty()) {
2307     if (position == end() || compare_keys(key, position.key())) {
2308       if (position == begin() || compare_keys(std::prev(position).key(), key)) {
2309         // prev.key() < key < position.key()
2310         return {internal_emplace(position, std::forward<Args>(args)...), true};
2311       }
2312     } else if (compare_keys(position.key(), key)) {
2313       ++position;
2314       if (position == end() || compare_keys(key, position.key())) {
2315         // {original `position`}.key() < key < {current `position`}.key()
2316         return {internal_emplace(position, std::forward<Args>(args)...), true};
2317       }
2318     } else {
2319       // position.key() == key
2320       return {position, false};
2321     }
2322   }
2323   return insert_unique(key, std::forward<Args>(args)...);
2324 }
2325 
2326 template <typename P>
2327 template <typename InputIterator, typename>
2328 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int) {
2329   for (; b != e; ++b) {
2330     insert_hint_unique(end(), params_type::key(*b), *b);
2331   }
2332 }
2333 
2334 template <typename P>
2335 template <typename InputIterator>
2336 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char) {
2337   for (; b != e; ++b) {
2338     // Use a node handle to manage a temp slot.
2339     auto node_handle =
2340         CommonAccess::Construct<node_handle_type>(get_allocator(), *b);
2341     slot_type *slot = CommonAccess::GetSlot(node_handle);
2342     insert_hint_unique(end(), params_type::key(slot), slot);
2343   }
2344 }
2345 
2346 template <typename P>
2347 template <typename ValueType>
2348 auto btree<P>::insert_multi(const key_type &key, ValueType &&v) -> iterator {
2349   if (empty()) {
2350     mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2351   }
2352 
2353   iterator iter = internal_upper_bound(key);
2354   if (iter.node_ == nullptr) {
2355     iter = end();
2356   }
2357   return internal_emplace(iter, std::forward<ValueType>(v));
2358 }
2359 
2360 template <typename P>
2361 template <typename ValueType>
2362 auto btree<P>::insert_hint_multi(iterator position, ValueType &&v) -> iterator {
2363   if (!empty()) {
2364     const key_type &key = params_type::key(v);
2365     if (position == end() || !compare_keys(position.key(), key)) {
2366       if (position == begin() ||
2367           !compare_keys(key, std::prev(position).key())) {
2368         // prev.key() <= key <= position.key()
2369         return internal_emplace(position, std::forward<ValueType>(v));
2370       }
2371     } else {
2372       ++position;
2373       if (position == end() || !compare_keys(position.key(), key)) {
2374         // {original `position`}.key() < key < {current `position`}.key()
2375         return internal_emplace(position, std::forward<ValueType>(v));
2376       }
2377     }
2378   }
2379   return insert_multi(std::forward<ValueType>(v));
2380 }
2381 
2382 template <typename P>
2383 template <typename InputIterator>
2384 void btree<P>::insert_iterator_multi(InputIterator b, InputIterator e) {
2385   for (; b != e; ++b) {
2386     insert_hint_multi(end(), *b);
2387   }
2388 }
2389 
2390 template <typename P>
2391 auto btree<P>::operator=(const btree &other) -> btree & {
2392   if (this != &other) {
2393     clear();
2394 
2395     *mutable_key_comp() = other.key_comp();
2396     if (absl::allocator_traits<
2397             allocator_type>::propagate_on_container_copy_assignment::value) {
2398       *mutable_allocator() = other.allocator();
2399     }
2400 
2401     copy_or_move_values_in_order(other);
2402   }
2403   return *this;
2404 }
2405 
2406 template <typename P>
2407 auto btree<P>::operator=(btree &&other) noexcept -> btree & {
2408   if (this != &other) {
2409     clear();
2410 
2411     using std::swap;
2412     if (absl::allocator_traits<
2413             allocator_type>::propagate_on_container_move_assignment::value) {
2414       swap(root_, other.root_);
2415       // Note: `rightmost_` also contains the allocator and the key comparator.
2416       swap(rightmost_, other.rightmost_);
2417       swap(size_, other.size_);
2418     } else {
2419       if (allocator() == other.allocator()) {
2420         swap(mutable_root(), other.mutable_root());
2421         swap(*mutable_key_comp(), *other.mutable_key_comp());
2422         swap(mutable_rightmost(), other.mutable_rightmost());
2423         swap(size_, other.size_);
2424       } else {
2425         // We aren't allowed to propagate the allocator and the allocator is
2426         // different so we can't take over its memory. We must move each element
2427         // individually. We need both `other` and `this` to have `other`s key
2428         // comparator while moving the values so we can't swap the key
2429         // comparators.
2430         *mutable_key_comp() = other.key_comp();
2431         copy_or_move_values_in_order(other);
2432       }
2433     }
2434   }
2435   return *this;
2436 }
2437 
2438 template <typename P>
2439 auto btree<P>::erase(iterator iter) -> iterator {
2440   iter.node_->value_destroy(static_cast<field_type>(iter.position_),
2441                             mutable_allocator());
2442   iter.update_generation();
2443 
2444   const bool internal_delete = iter.node_->is_internal();
2445   if (internal_delete) {
2446     // Deletion of a value on an internal node. First, transfer the largest
2447     // value from our left child here, then erase/rebalance from that position.
2448     // We can get to the largest value from our left child by decrementing iter.
2449     iterator internal_iter(iter);
2450     --iter;
2451     assert(iter.node_->is_leaf());
2452     internal_iter.node_->transfer(
2453         static_cast<size_type>(internal_iter.position_),
2454         static_cast<size_type>(iter.position_), iter.node_,
2455         mutable_allocator());
2456   } else {
2457     // Shift values after erased position in leaf. In the internal case, we
2458     // don't need to do this because the leaf position is the end of the node.
2459     const field_type transfer_from =
2460         static_cast<field_type>(iter.position_ + 1);
2461     const field_type num_to_transfer = iter.node_->finish() - transfer_from;
2462     iter.node_->transfer_n(num_to_transfer,
2463                            static_cast<size_type>(iter.position_),
2464                            transfer_from, iter.node_, mutable_allocator());
2465   }
2466   // Update node finish and container size.
2467   iter.node_->set_finish(iter.node_->finish() - 1);
2468   --size_;
2469 
2470   // We want to return the next value after the one we just erased. If we
2471   // erased from an internal node (internal_delete == true), then the next
2472   // value is ++(++iter). If we erased from a leaf node (internal_delete ==
2473   // false) then the next value is ++iter. Note that ++iter may point to an
2474   // internal node and the value in the internal node may move to a leaf node
2475   // (iter.node_) when rebalancing is performed at the leaf level.
2476 
2477   iterator res = rebalance_after_delete(iter);
2478 
2479   // If we erased from an internal node, advance the iterator.
2480   if (internal_delete) {
2481     ++res;
2482   }
2483   return res;
2484 }
2485 
2486 template <typename P>
2487 auto btree<P>::rebalance_after_delete(iterator iter) -> iterator {
2488   // Merge/rebalance as we walk back up the tree.
2489   iterator res(iter);
2490   bool first_iteration = true;
2491   for (;;) {
2492     if (iter.node_ == root()) {
2493       try_shrink();
2494       if (empty()) {
2495         return end();
2496       }
2497       break;
2498     }
2499     if (iter.node_->count() >= kMinNodeValues) {
2500       break;
2501     }
2502     bool merged = try_merge_or_rebalance(&iter);
2503     // On the first iteration, we should update `res` with `iter` because `res`
2504     // may have been invalidated.
2505     if (first_iteration) {
2506       res = iter;
2507       first_iteration = false;
2508     }
2509     if (!merged) {
2510       break;
2511     }
2512     iter.position_ = iter.node_->position();
2513     iter.node_ = iter.node_->parent();
2514   }
2515   res.update_generation();
2516 
2517   // Adjust our return value. If we're pointing at the end of a node, advance
2518   // the iterator.
2519   if (res.position_ == res.node_->finish()) {
2520     res.position_ = res.node_->finish() - 1;
2521     ++res;
2522   }
2523 
2524   return res;
2525 }
2526 
2527 // Note: we tried implementing this more efficiently by erasing all of the
2528 // elements in [begin, end) at once and then doing rebalancing once at the end
2529 // (rather than interleaving deletion and rebalancing), but that adds a lot of
2530 // complexity, which seems to outweigh the performance win.
2531 template <typename P>
2532 auto btree<P>::erase_range(iterator begin, iterator end)
2533     -> std::pair<size_type, iterator> {
2534   size_type count = static_cast<size_type>(end - begin);
2535   assert(count >= 0);
2536 
2537   if (count == 0) {
2538     return {0, begin};
2539   }
2540 
2541   if (static_cast<size_type>(count) == size_) {
2542     clear();
2543     return {count, this->end()};
2544   }
2545 
2546   if (begin.node_ == end.node_) {
2547     assert(end.position_ > begin.position_);
2548     begin.node_->remove_values(
2549         static_cast<field_type>(begin.position_),
2550         static_cast<field_type>(end.position_ - begin.position_),
2551         mutable_allocator());
2552     size_ -= count;
2553     return {count, rebalance_after_delete(begin)};
2554   }
2555 
2556   const size_type target_size = size_ - count;
2557   while (size_ > target_size) {
2558     if (begin.node_->is_leaf()) {
2559       const size_type remaining_to_erase = size_ - target_size;
2560       const size_type remaining_in_node =
2561           static_cast<size_type>(begin.node_->finish() - begin.position_);
2562       const field_type to_erase = static_cast<field_type>(
2563           (std::min)(remaining_to_erase, remaining_in_node));
2564       begin.node_->remove_values(static_cast<field_type>(begin.position_),
2565                                  to_erase, mutable_allocator());
2566       size_ -= to_erase;
2567       begin = rebalance_after_delete(begin);
2568     } else {
2569       begin = erase(begin);
2570     }
2571   }
2572   begin.update_generation();
2573   return {count, begin};
2574 }
2575 
2576 template <typename P>
2577 void btree<P>::clear() {
2578   if (!empty()) {
2579     node_type::clear_and_delete(root(), mutable_allocator());
2580   }
2581   mutable_root() = mutable_rightmost() = EmptyNode();
2582   size_ = 0;
2583 }
2584 
2585 template <typename P>
2586 void btree<P>::swap(btree &other) {
2587   using std::swap;
2588   if (absl::allocator_traits<
2589           allocator_type>::propagate_on_container_swap::value) {
2590     // Note: `rightmost_` also contains the allocator and the key comparator.
2591     swap(rightmost_, other.rightmost_);
2592   } else {
2593     // It's undefined behavior if the allocators are unequal here.
2594     assert(allocator() == other.allocator());
2595     swap(mutable_rightmost(), other.mutable_rightmost());
2596     swap(*mutable_key_comp(), *other.mutable_key_comp());
2597   }
2598   swap(mutable_root(), other.mutable_root());
2599   swap(size_, other.size_);
2600 }
2601 
2602 template <typename P>
2603 void btree<P>::verify() const {
2604   assert(root() != nullptr);
2605   assert(leftmost() != nullptr);
2606   assert(rightmost() != nullptr);
2607   assert(empty() || size() == internal_verify(root(), nullptr, nullptr));
2608   assert(leftmost() == (++const_iterator(root(), -1)).node_);
2609   assert(rightmost() == (--const_iterator(root(), root()->finish())).node_);
2610   assert(leftmost()->is_leaf());
2611   assert(rightmost()->is_leaf());
2612 }
2613 
2614 template <typename P>
2615 void btree<P>::rebalance_or_split(iterator *iter) {
2616   node_type *&node = iter->node_;
2617   int &insert_position = iter->position_;
2618   assert(node->count() == node->max_count());
2619   assert(kNodeSlots == node->max_count());
2620 
2621   // First try to make room on the node by rebalancing.
2622   node_type *parent = node->parent();
2623   if (node != root()) {
2624     if (node->position() > parent->start()) {
2625       // Try rebalancing with our left sibling.
2626       node_type *left = parent->child(node->position() - 1);
2627       assert(left->max_count() == kNodeSlots);
2628       if (left->count() < kNodeSlots) {
2629         // We bias rebalancing based on the position being inserted. If we're
2630         // inserting at the end of the right node then we bias rebalancing to
2631         // fill up the left node.
2632         field_type to_move =
2633             (kNodeSlots - left->count()) /
2634             (1 + (static_cast<field_type>(insert_position) < kNodeSlots));
2635         to_move = (std::max)(field_type{1}, to_move);
2636 
2637         if (static_cast<field_type>(insert_position) - to_move >=
2638                 node->start() ||
2639             left->count() + to_move < kNodeSlots) {
2640           left->rebalance_right_to_left(to_move, node, mutable_allocator());
2641 
2642           assert(node->max_count() - node->count() == to_move);
2643           insert_position = static_cast<int>(
2644               static_cast<field_type>(insert_position) - to_move);
2645           if (insert_position < node->start()) {
2646             insert_position = insert_position + left->count() + 1;
2647             node = left;
2648           }
2649 
2650           assert(node->count() < node->max_count());
2651           return;
2652         }
2653       }
2654     }
2655 
2656     if (node->position() < parent->finish()) {
2657       // Try rebalancing with our right sibling.
2658       node_type *right = parent->child(node->position() + 1);
2659       assert(right->max_count() == kNodeSlots);
2660       if (right->count() < kNodeSlots) {
2661         // We bias rebalancing based on the position being inserted. If we're
2662         // inserting at the beginning of the left node then we bias rebalancing
2663         // to fill up the right node.
2664         field_type to_move = (kNodeSlots - right->count()) /
2665                              (1 + (insert_position > node->start()));
2666         to_move = (std::max)(field_type{1}, to_move);
2667 
2668         if (static_cast<field_type>(insert_position) <=
2669                 node->finish() - to_move ||
2670             right->count() + to_move < kNodeSlots) {
2671           node->rebalance_left_to_right(to_move, right, mutable_allocator());
2672 
2673           if (insert_position > node->finish()) {
2674             insert_position = insert_position - node->count() - 1;
2675             node = right;
2676           }
2677 
2678           assert(node->count() < node->max_count());
2679           return;
2680         }
2681       }
2682     }
2683 
2684     // Rebalancing failed, make sure there is room on the parent node for a new
2685     // value.
2686     assert(parent->max_count() == kNodeSlots);
2687     if (parent->count() == kNodeSlots) {
2688       iterator parent_iter(parent, node->position());
2689       rebalance_or_split(&parent_iter);
2690       parent = node->parent();
2691     }
2692   } else {
2693     // Rebalancing not possible because this is the root node.
2694     // Create a new root node and set the current root node as the child of the
2695     // new root.
2696     parent = new_internal_node(/*position=*/0, parent);
2697     parent->set_generation(root()->generation());
2698     parent->init_child(parent->start(), node);
2699     mutable_root() = parent;
2700     // If the former root was a leaf node, then it's now the rightmost node.
2701     assert(parent->start_child()->is_internal() ||
2702            parent->start_child() == rightmost());
2703   }
2704 
2705   // Split the node.
2706   node_type *split_node;
2707   if (node->is_leaf()) {
2708     split_node = new_leaf_node(node->position() + 1, parent);
2709     node->split(insert_position, split_node, mutable_allocator());
2710     if (rightmost() == node) mutable_rightmost() = split_node;
2711   } else {
2712     split_node = new_internal_node(node->position() + 1, parent);
2713     node->split(insert_position, split_node, mutable_allocator());
2714   }
2715 
2716   if (insert_position > node->finish()) {
2717     insert_position = insert_position - node->count() - 1;
2718     node = split_node;
2719   }
2720 }
2721 
2722 template <typename P>
2723 void btree<P>::merge_nodes(node_type *left, node_type *right) {
2724   left->merge(right, mutable_allocator());
2725   if (rightmost() == right) mutable_rightmost() = left;
2726 }
2727 
2728 template <typename P>
2729 bool btree<P>::try_merge_or_rebalance(iterator *iter) {
2730   node_type *parent = iter->node_->parent();
2731   if (iter->node_->position() > parent->start()) {
2732     // Try merging with our left sibling.
2733     node_type *left = parent->child(iter->node_->position() - 1);
2734     assert(left->max_count() == kNodeSlots);
2735     if (1U + left->count() + iter->node_->count() <= kNodeSlots) {
2736       iter->position_ += 1 + left->count();
2737       merge_nodes(left, iter->node_);
2738       iter->node_ = left;
2739       return true;
2740     }
2741   }
2742   if (iter->node_->position() < parent->finish()) {
2743     // Try merging with our right sibling.
2744     node_type *right = parent->child(iter->node_->position() + 1);
2745     assert(right->max_count() == kNodeSlots);
2746     if (1U + iter->node_->count() + right->count() <= kNodeSlots) {
2747       merge_nodes(iter->node_, right);
2748       return true;
2749     }
2750     // Try rebalancing with our right sibling. We don't perform rebalancing if
2751     // we deleted the first element from iter->node_ and the node is not
2752     // empty. This is a small optimization for the common pattern of deleting
2753     // from the front of the tree.
2754     if (right->count() > kMinNodeValues &&
2755         (iter->node_->count() == 0 || iter->position_ > iter->node_->start())) {
2756       field_type to_move = (right->count() - iter->node_->count()) / 2;
2757       to_move =
2758           (std::min)(to_move, static_cast<field_type>(right->count() - 1));
2759       iter->node_->rebalance_right_to_left(to_move, right, mutable_allocator());
2760       return false;
2761     }
2762   }
2763   if (iter->node_->position() > parent->start()) {
2764     // Try rebalancing with our left sibling. We don't perform rebalancing if
2765     // we deleted the last element from iter->node_ and the node is not
2766     // empty. This is a small optimization for the common pattern of deleting
2767     // from the back of the tree.
2768     node_type *left = parent->child(iter->node_->position() - 1);
2769     if (left->count() > kMinNodeValues &&
2770         (iter->node_->count() == 0 ||
2771          iter->position_ < iter->node_->finish())) {
2772       field_type to_move = (left->count() - iter->node_->count()) / 2;
2773       to_move = (std::min)(to_move, static_cast<field_type>(left->count() - 1));
2774       left->rebalance_left_to_right(to_move, iter->node_, mutable_allocator());
2775       iter->position_ += to_move;
2776       return false;
2777     }
2778   }
2779   return false;
2780 }
2781 
2782 template <typename P>
2783 void btree<P>::try_shrink() {
2784   node_type *orig_root = root();
2785   if (orig_root->count() > 0) {
2786     return;
2787   }
2788   // Deleted the last item on the root node, shrink the height of the tree.
2789   if (orig_root->is_leaf()) {
2790     assert(size() == 0);
2791     mutable_root() = mutable_rightmost() = EmptyNode();
2792   } else {
2793     node_type *child = orig_root->start_child();
2794     child->make_root();
2795     mutable_root() = child;
2796   }
2797   node_type::clear_and_delete(orig_root, mutable_allocator());
2798 }
2799 
2800 template <typename P>
2801 template <typename IterType>
2802 inline IterType btree<P>::internal_last(IterType iter) {
2803   assert(iter.node_ != nullptr);
2804   while (iter.position_ == iter.node_->finish()) {
2805     iter.position_ = iter.node_->position();
2806     iter.node_ = iter.node_->parent();
2807     if (iter.node_->is_leaf()) {
2808       iter.node_ = nullptr;
2809       break;
2810     }
2811   }
2812   iter.update_generation();
2813   return iter;
2814 }
2815 
2816 template <typename P>
2817 template <typename... Args>
2818 inline auto btree<P>::internal_emplace(iterator iter, Args &&...args)
2819     -> iterator {
2820   if (iter.node_->is_internal()) {
2821     // We can't insert on an internal node. Instead, we'll insert after the
2822     // previous value which is guaranteed to be on a leaf node.
2823     --iter;
2824     ++iter.position_;
2825   }
2826   const field_type max_count = iter.node_->max_count();
2827   allocator_type *alloc = mutable_allocator();
2828 
2829   const auto transfer_and_delete = [&](node_type *old_node,
2830                                        node_type *new_node) {
2831     new_node->transfer_n(old_node->count(), new_node->start(),
2832                          old_node->start(), old_node, alloc);
2833     new_node->set_finish(old_node->finish());
2834     old_node->set_finish(old_node->start());
2835     new_node->set_generation(old_node->generation());
2836     node_type::clear_and_delete(old_node, alloc);
2837   };
2838   const auto replace_leaf_root_node = [&](field_type new_node_size) {
2839     assert(iter.node_ == root());
2840     node_type *old_root = iter.node_;
2841     node_type *new_root = iter.node_ = new_leaf_root_node(new_node_size);
2842     transfer_and_delete(old_root, new_root);
2843     mutable_root() = mutable_rightmost() = new_root;
2844   };
2845 
2846   bool replaced_node = false;
2847   if (iter.node_->count() == max_count) {
2848     // Make room in the leaf for the new item.
2849     if (max_count < kNodeSlots) {
2850       // Insertion into the root where the root is smaller than the full node
2851       // size. Simply grow the size of the root node.
2852       replace_leaf_root_node(static_cast<field_type>(
2853           (std::min)(static_cast<int>(kNodeSlots), 2 * max_count)));
2854       replaced_node = true;
2855     } else {
2856       rebalance_or_split(&iter);
2857     }
2858   }
2859   (void)replaced_node;
2860 #if defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
2861     defined(ABSL_HAVE_HWADDRESS_SANITIZER)
2862   if (!replaced_node) {
2863     assert(iter.node_->is_leaf());
2864     if (iter.node_->is_root()) {
2865       replace_leaf_root_node(max_count);
2866     } else {
2867       node_type *old_node = iter.node_;
2868       const bool was_rightmost = rightmost() == old_node;
2869       const bool was_leftmost = leftmost() == old_node;
2870       node_type *parent = old_node->parent();
2871       const field_type position = old_node->position();
2872       node_type *new_node = iter.node_ = new_leaf_node(position, parent);
2873       parent->set_child_noupdate_position(position, new_node);
2874       transfer_and_delete(old_node, new_node);
2875       if (was_rightmost) mutable_rightmost() = new_node;
2876       // The leftmost node is stored as the parent of the root node.
2877       if (was_leftmost) root()->set_parent(new_node);
2878     }
2879   }
2880 #endif
2881   iter.node_->emplace_value(static_cast<field_type>(iter.position_), alloc,
2882                             std::forward<Args>(args)...);
2883   assert(
2884       iter.node_->is_ordered_correctly(static_cast<field_type>(iter.position_),
2885                                        original_key_compare(key_comp())) &&
2886       "If this assert fails, then either (1) the comparator may violate "
2887       "transitivity, i.e. comp(a,b) && comp(b,c) -> comp(a,c) (see "
2888       "https://en.cppreference.com/w/cpp/named_req/Compare), or (2) a "
2889       "key may have been mutated after it was inserted into the tree.");
2890   ++size_;
2891   iter.update_generation();
2892   return iter;
2893 }
2894 
2895 template <typename P>
2896 template <typename K>
2897 inline auto btree<P>::internal_locate(const K &key) const
2898     -> SearchResult<iterator, is_key_compare_to::value> {
2899   iterator iter(const_cast<node_type *>(root()));
2900   for (;;) {
2901     SearchResult<size_type, is_key_compare_to::value> res =
2902         iter.node_->lower_bound(key, key_comp());
2903     iter.position_ = static_cast<int>(res.value);
2904     if (res.IsEq()) {
2905       return {iter, MatchKind::kEq};
2906     }
2907     // Note: in the non-key-compare-to case, we don't need to walk all the way
2908     // down the tree if the keys are equal, but determining equality would
2909     // require doing an extra comparison on each node on the way down, and we
2910     // will need to go all the way to the leaf node in the expected case.
2911     if (iter.node_->is_leaf()) {
2912       break;
2913     }
2914     iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
2915   }
2916   // Note: in the non-key-compare-to case, the key may actually be equivalent
2917   // here (and the MatchKind::kNe is ignored).
2918   return {iter, MatchKind::kNe};
2919 }
2920 
2921 template <typename P>
2922 template <typename K>
2923 auto btree<P>::internal_lower_bound(const K &key) const
2924     -> SearchResult<iterator, is_key_compare_to::value> {
2925   if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2926     SearchResult<iterator, is_key_compare_to::value> ret = internal_locate(key);
2927     ret.value = internal_last(ret.value);
2928     return ret;
2929   }
2930   iterator iter(const_cast<node_type *>(root()));
2931   SearchResult<size_type, is_key_compare_to::value> res;
2932   bool seen_eq = false;
2933   for (;;) {
2934     res = iter.node_->lower_bound(key, key_comp());
2935     iter.position_ = static_cast<int>(res.value);
2936     if (iter.node_->is_leaf()) {
2937       break;
2938     }
2939     seen_eq = seen_eq || res.IsEq();
2940     iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
2941   }
2942   if (res.IsEq()) return {iter, MatchKind::kEq};
2943   return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
2944 }
2945 
2946 template <typename P>
2947 template <typename K>
2948 auto btree<P>::internal_upper_bound(const K &key) const -> iterator {
2949   iterator iter(const_cast<node_type *>(root()));
2950   for (;;) {
2951     iter.position_ = static_cast<int>(iter.node_->upper_bound(key, key_comp()));
2952     if (iter.node_->is_leaf()) {
2953       break;
2954     }
2955     iter.node_ = iter.node_->child(static_cast<field_type>(iter.position_));
2956   }
2957   return internal_last(iter);
2958 }
2959 
2960 template <typename P>
2961 template <typename K>
2962 auto btree<P>::internal_find(const K &key) const -> iterator {
2963   SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2964   if (res.HasMatch()) {
2965     if (res.IsEq()) {
2966       return res.value;
2967     }
2968   } else {
2969     const iterator iter = internal_last(res.value);
2970     if (iter.node_ != nullptr && !compare_keys(key, iter.key())) {
2971       return iter;
2972     }
2973   }
2974   return {nullptr, 0};
2975 }
2976 
2977 template <typename P>
2978 typename btree<P>::size_type btree<P>::internal_verify(
2979     const node_type *node, const key_type *lo, const key_type *hi) const {
2980   assert(node->count() > 0);
2981   assert(node->count() <= node->max_count());
2982   if (lo) {
2983     assert(!compare_keys(node->key(node->start()), *lo));
2984   }
2985   if (hi) {
2986     assert(!compare_keys(*hi, node->key(node->finish() - 1)));
2987   }
2988   for (int i = node->start() + 1; i < node->finish(); ++i) {
2989     assert(!compare_keys(node->key(i), node->key(i - 1)));
2990   }
2991   size_type count = node->count();
2992   if (node->is_internal()) {
2993     for (field_type i = node->start(); i <= node->finish(); ++i) {
2994       assert(node->child(i) != nullptr);
2995       assert(node->child(i)->parent() == node);
2996       assert(node->child(i)->position() == i);
2997       count += internal_verify(node->child(i),
2998                                i == node->start() ? lo : &node->key(i - 1),
2999                                i == node->finish() ? hi : &node->key(i));
3000     }
3001   }
3002   return count;
3003 }
3004 
3005 struct btree_access {
3006   template <typename BtreeContainer, typename Pred>
3007   static auto erase_if(BtreeContainer &container, Pred pred) ->
3008       typename BtreeContainer::size_type {
3009     const auto initial_size = container.size();
3010     auto &tree = container.tree_;
3011     auto *alloc = tree.mutable_allocator();
3012     for (auto it = container.begin(); it != container.end();) {
3013       if (!pred(*it)) {
3014         ++it;
3015         continue;
3016       }
3017       auto *node = it.node_;
3018       if (node->is_internal()) {
3019         // Handle internal nodes normally.
3020         it = container.erase(it);
3021         continue;
3022       }
3023       // If this is a leaf node, then we do all the erases from this node
3024       // at once before doing rebalancing.
3025 
3026       // The current position to transfer slots to.
3027       int to_pos = it.position_;
3028       node->value_destroy(it.position_, alloc);
3029       while (++it.position_ < node->finish()) {
3030         it.update_generation();
3031         if (pred(*it)) {
3032           node->value_destroy(it.position_, alloc);
3033         } else {
3034           node->transfer(node->slot(to_pos++), node->slot(it.position_), alloc);
3035         }
3036       }
3037       const int num_deleted = node->finish() - to_pos;
3038       tree.size_ -= num_deleted;
3039       node->set_finish(to_pos);
3040       it.position_ = to_pos;
3041       it = tree.rebalance_after_delete(it);
3042     }
3043     return initial_size - container.size();
3044   }
3045 };
3046 
3047 #undef ABSL_BTREE_ENABLE_GENERATIONS
3048 
3049 }  // namespace container_internal
3050 ABSL_NAMESPACE_END
3051 }  // namespace absl
3052 
3053 #endif  // ABSL_CONTAINER_INTERNAL_BTREE_H_
3054