abseil-cpp/absl/container/internal/btree.h
Go to the documentation of this file.
1 // Copyright 2018 The Abseil Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 // A btree implementation of the STL set and map interfaces. A btree is smaller
16 // and generally also faster than STL set/map (refer to the benchmarks below).
17 // The red-black tree implementation of STL set/map has an overhead of 3
18 // pointers (left, right and parent) plus the node color information for each
19 // stored value. So a set<int32_t> consumes 40 bytes for each value stored in
20 // 64-bit mode. This btree implementation stores multiple values on fixed
21 // size nodes (usually 256 bytes) and doesn't store child pointers for leaf
22 // nodes. The result is that a btree_set<int32_t> may use much less memory per
23 // stored value. For the random insertion benchmark in btree_bench.cc, a
24 // btree_set<int32_t> with node-size of 256 uses 5.1 bytes per stored value.
25 //
26 // The packing of multiple values on to each node of a btree has another effect
27 // besides better space utilization: better cache locality due to fewer cache
28 // lines being accessed. Better cache locality translates into faster
29 // operations.
30 //
31 // CAVEATS
32 //
33 // Insertions and deletions on a btree can cause splitting, merging or
34 // rebalancing of btree nodes. And even without these operations, insertions
35 // and deletions on a btree will move values around within a node. In both
36 // cases, the result is that insertions and deletions can invalidate iterators
37 // pointing to values other than the one being inserted/deleted. Therefore, this
38 // container does not provide pointer stability. This is notably different from
39 // STL set/map which takes care to not invalidate iterators on insert/erase
40 // except, of course, for iterators pointing to the value being erased. A
41 // partial workaround when erasing is available: erase() returns an iterator
42 // pointing to the item just after the one that was erased (or end() if none
43 // exists).
44 
45 #ifndef ABSL_CONTAINER_INTERNAL_BTREE_H_
46 #define ABSL_CONTAINER_INTERNAL_BTREE_H_
47 
48 #include <algorithm>
49 #include <cassert>
50 #include <cstddef>
51 #include <cstdint>
52 #include <cstring>
53 #include <functional>
54 #include <iterator>
55 #include <limits>
56 #include <new>
57 #include <string>
58 #include <type_traits>
59 #include <utility>
60 
61 #include "absl/base/internal/raw_logging.h"
62 #include "absl/base/macros.h"
63 #include "absl/container/internal/common.h"
64 #include "absl/container/internal/compressed_tuple.h"
65 #include "absl/container/internal/container_memory.h"
66 #include "absl/container/internal/layout.h"
67 #include "absl/memory/memory.h"
68 #include "absl/meta/type_traits.h"
69 #include "absl/strings/cord.h"
70 #include "absl/strings/string_view.h"
71 #include "absl/types/compare.h"
72 #include "absl/utility/utility.h"
73 
74 namespace absl {
76 namespace container_internal {
77 
78 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
79 #error ABSL_BTREE_ENABLE_GENERATIONS cannot be directly set
80 #elif defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
81  defined(ABSL_HAVE_MEMORY_SANITIZER)
82 // When compiled in sanitizer mode, we add generation integers to the nodes and
83 // iterators. When iterators are used, we validate that the container has not
84 // been mutated since the iterator was constructed.
85 #define ABSL_BTREE_ENABLE_GENERATIONS
86 #endif
87 
88 template <typename Compare, typename T, typename U>
89 using compare_result_t = absl::result_of_t<const Compare(const T &, const U &)>;
90 
91 // A helper class that indicates if the Compare parameter is a key-compare-to
92 // comparator.
93 template <typename Compare, typename T>
95  std::is_convertible<compare_result_t<Compare, T, T>, absl::weak_ordering>;
96 
98  using is_transparent = void;
99 
100  StringBtreeDefaultLess() = default;
101 
102  // Compatibility constructor.
103  StringBtreeDefaultLess(std::less<std::string>) {} // NOLINT
104  StringBtreeDefaultLess(std::less<absl::string_view>) {} // NOLINT
105 
106  // Allow converting to std::less for use in key_comp()/value_comp().
107  explicit operator std::less<std::string>() const { return {}; }
108  explicit operator std::less<absl::string_view>() const { return {}; }
109  explicit operator std::less<absl::Cord>() const { return {}; }
110 
112  absl::string_view rhs) const {
114  }
115  StringBtreeDefaultLess(std::less<absl::Cord>) {} // NOLINT
117  const absl::Cord &rhs) const {
119  }
121  absl::string_view rhs) const {
123  }
125  const absl::Cord &rhs) const {
127  }
128 };
129 
131  using is_transparent = void;
132 
133  StringBtreeDefaultGreater() = default;
134 
135  StringBtreeDefaultGreater(std::greater<std::string>) {} // NOLINT
136  StringBtreeDefaultGreater(std::greater<absl::string_view>) {} // NOLINT
137 
138  // Allow converting to std::greater for use in key_comp()/value_comp().
139  explicit operator std::greater<std::string>() const { return {}; }
140  explicit operator std::greater<absl::string_view>() const { return {}; }
141  explicit operator std::greater<absl::Cord>() const { return {}; }
142 
144  absl::string_view rhs) const {
146  }
147  StringBtreeDefaultGreater(std::greater<absl::Cord>) {} // NOLINT
149  const absl::Cord &rhs) const {
151  }
153  absl::string_view rhs) const {
155  }
157  const absl::Cord &rhs) const {
159  }
160 };
161 
162 // See below comments for checked_compare.
165  using Compare::Compare;
167  const Compare &comp() const { return *this; }
168 };
169 template <typename Compare>
172  const Compare &comp() const { return compare; }
174 };
175 
176 // A mechanism for opting out of checked_compare for use only in btree_test.cc.
178 
179 // A helper class to adapt the specified comparator for two use cases:
180 // (1) When using common Abseil string types with common comparison functors,
181 // convert a boolean comparison into a three-way comparison that returns an
182 // `absl::weak_ordering`. This helper class is specialized for
183 // less<std::string>, greater<std::string>, less<string_view>,
184 // greater<string_view>, less<absl::Cord>, and greater<absl::Cord>.
185 // (2) Adapt the comparator to diagnose cases of non-strict-weak-ordering (see
186 // https://en.cppreference.com/w/cpp/named_req/Compare) in debug mode. Whenever
187 // a comparison is made, we will make assertions to verify that the comparator
188 // is valid.
189 template <typename Compare, typename Key>
191  // Inherit from checked_compare_base to support function pointers and also
192  // keep empty-base-optimization (EBO) support for classes.
193  // Note: we can't use CompressedTuple here because that would interfere
194  // with the EBO for `btree::rightmost_`. `btree::rightmost_` is itself a
195  // CompressedTuple and nested `CompressedTuple`s don't support EBO.
196  // TODO(b/214288561): use CompressedTuple instead once it supports EBO for
197  // nested `CompressedTuple`s.
199  private:
201  using Base::comp;
202 
203  // If possible, returns whether `t` is equivalent to itself. We can only do
204  // this for `Key`s because we can't be sure that it's safe to call
205  // `comp()(k, k)` otherwise. Even if SFINAE allows it, there could be a
206  // compilation failure inside the implementation of the comparison operator.
207  bool is_self_equivalent(const Key &k) const {
208  // Note: this works for both boolean and three-way comparators.
209  return comp()(k, k) == 0;
210  }
211  // If we can't compare `t` with itself, returns true unconditionally.
212  template <typename T>
213  bool is_self_equivalent(const T &) const {
214  return true;
215  }
216 
217  public:
218  using Base::Base;
220 
221  // Allow converting to Compare for use in key_comp()/value_comp().
222  explicit operator Compare() const { return comp(); }
223 
224  template <typename T, typename U,
226  std::is_same<bool, compare_result_t<Compare, T, U>>::value,
227  int> = 0>
228  bool operator()(const T &lhs, const U &rhs) const {
229  // NOTE: if any of these assertions fail, then the comparator does not
230  // establish a strict-weak-ordering (see
231  // https://en.cppreference.com/w/cpp/named_req/Compare).
232  assert(is_self_equivalent(lhs));
233  assert(is_self_equivalent(rhs));
234  const bool lhs_comp_rhs = comp()(lhs, rhs);
235  assert(!lhs_comp_rhs || !comp()(rhs, lhs));
236  return lhs_comp_rhs;
237  }
238 
239  template <
240  typename T, typename U,
243  int> = 0>
244  absl::weak_ordering operator()(const T &lhs, const U &rhs) const {
245  // NOTE: if any of these assertions fail, then the comparator does not
246  // establish a strict-weak-ordering (see
247  // https://en.cppreference.com/w/cpp/named_req/Compare).
248  assert(is_self_equivalent(lhs));
249  assert(is_self_equivalent(rhs));
250  const absl::weak_ordering lhs_comp_rhs = comp()(lhs, rhs);
251 #ifndef NDEBUG
252  const absl::weak_ordering rhs_comp_lhs = comp()(rhs, lhs);
253  if (lhs_comp_rhs > 0) {
254  assert(rhs_comp_lhs < 0 && "lhs_comp_rhs > 0 -> rhs_comp_lhs < 0");
255  } else if (lhs_comp_rhs == 0) {
256  assert(rhs_comp_lhs == 0 && "lhs_comp_rhs == 0 -> rhs_comp_lhs == 0");
257  } else {
258  assert(rhs_comp_lhs > 0 && "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0");
259  }
260 #endif
261  return lhs_comp_rhs;
262  }
263  };
264  using type = absl::conditional_t<
265  std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase, Compare>::value,
266  Compare, checked_compare>;
267 };
268 
269 template <>
270 struct key_compare_adapter<std::less<std::string>, std::string> {
271  using type = StringBtreeDefaultLess;
272 };
273 
274 template <>
275 struct key_compare_adapter<std::greater<std::string>, std::string> {
276  using type = StringBtreeDefaultGreater;
277 };
278 
279 template <>
280 struct key_compare_adapter<std::less<absl::string_view>, absl::string_view> {
281  using type = StringBtreeDefaultLess;
282 };
283 
284 template <>
285 struct key_compare_adapter<std::greater<absl::string_view>, absl::string_view> {
286  using type = StringBtreeDefaultGreater;
287 };
288 
289 template <>
290 struct key_compare_adapter<std::less<absl::Cord>, absl::Cord> {
291  using type = StringBtreeDefaultLess;
292 };
293 
294 template <>
295 struct key_compare_adapter<std::greater<absl::Cord>, absl::Cord> {
296  using type = StringBtreeDefaultGreater;
297 };
298 
299 // Detects an 'absl_btree_prefer_linear_node_search' member. This is
300 // a protocol used as an opt-in or opt-out of linear search.
301 //
302 // For example, this would be useful for key types that wrap an integer
303 // and define their own cheap operator<(). For example:
304 //
305 // class K {
306 // public:
307 // using absl_btree_prefer_linear_node_search = std::true_type;
308 // ...
309 // private:
310 // friend bool operator<(K a, K b) { return a.k_ < b.k_; }
311 // int k_;
312 // };
313 //
314 // btree_map<K, V> m; // Uses linear search
315 //
316 // If T has the preference tag, then it has a preference.
317 // Btree will use the tag's truth value.
318 template <typename T, typename = void>
319 struct has_linear_node_search_preference : std::false_type {};
320 template <typename T, typename = void>
321 struct prefers_linear_node_search : std::false_type {};
322 template <typename T>
323 struct has_linear_node_search_preference<
324  T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
325  : std::true_type {};
326 template <typename T>
327 struct prefers_linear_node_search<
328  T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
329  : T::absl_btree_prefer_linear_node_search {};
330 
331 template <typename Compare, typename Key>
332 constexpr bool compare_has_valid_result_type() {
333  using compare_result_type = compare_result_t<Compare, Key, Key>;
334  return std::is_same<compare_result_type, bool>::value ||
335  std::is_convertible<compare_result_type, absl::weak_ordering>::value;
336 }
337 
338 template <typename original_key_compare, typename value_type>
339 class map_value_compare {
340  template <typename Params>
341  friend class btree;
342 
343  // Note: this `protected` is part of the API of std::map::value_compare. See
344  // https://en.cppreference.com/w/cpp/container/map/value_compare.
345  protected:
346  explicit map_value_compare(original_key_compare c) : comp(std::move(c)) {}
347 
348  original_key_compare comp; // NOLINT
349 
350  public:
351  auto operator()(const value_type &lhs, const value_type &rhs) const
352  -> decltype(comp(lhs.first, rhs.first)) {
353  return comp(lhs.first, rhs.first);
354  }
355 };
356 
357 template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
358  bool IsMulti, bool IsMap, typename SlotPolicy>
359 struct common_params {
360  using original_key_compare = Compare;
361 
362  // If Compare is a common comparator for a string-like type, then we adapt it
363  // to use heterogeneous lookup and to be a key-compare-to comparator.
364  // We also adapt the comparator to diagnose invalid comparators in debug mode.
365  // We disable this when `Compare` is invalid in a way that will cause
366  // adaptation to fail (having invalid return type) so that we can give a
367  // better compilation failure in static_assert_validation. If we don't do
368  // this, then there will be cascading compilation failures that are confusing
369  // for users.
370  using key_compare =
371  absl::conditional_t<!compare_has_valid_result_type<Compare, Key>(),
372  Compare,
373  typename key_compare_adapter<Compare, Key>::type>;
374 
375  static constexpr bool kIsKeyCompareStringAdapted =
376  std::is_same<key_compare, StringBtreeDefaultLess>::value ||
377  std::is_same<key_compare, StringBtreeDefaultGreater>::value;
378  static constexpr bool kIsKeyCompareTransparent =
379  IsTransparent<original_key_compare>::value ||
380  kIsKeyCompareStringAdapted;
381  static constexpr bool kEnableGenerations =
382 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
383  true;
384 #else
385  false;
386 #endif
387 
388  // A type which indicates if we have a key-compare-to functor or a plain old
389  // key-compare functor.
390  using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
391 
392  using allocator_type = Alloc;
393  using key_type = Key;
394  using size_type = size_t;
395  using difference_type = ptrdiff_t;
396 
397  using slot_policy = SlotPolicy;
398  using slot_type = typename slot_policy::slot_type;
399  using value_type = typename slot_policy::value_type;
400  using init_type = typename slot_policy::mutable_value_type;
401  using pointer = value_type *;
402  using const_pointer = const value_type *;
403  using reference = value_type &;
404  using const_reference = const value_type &;
405 
406  using value_compare =
407  absl::conditional_t<IsMap,
408  map_value_compare<original_key_compare, value_type>,
409  original_key_compare>;
410  using is_map_container = std::integral_constant<bool, IsMap>;
411 
412  // For the given lookup key type, returns whether we can have multiple
413  // equivalent keys in the btree. If this is a multi-container, then we can.
414  // Otherwise, we can have multiple equivalent keys only if all of the
415  // following conditions are met:
416  // - The comparator is transparent.
417  // - The lookup key type is not the same as key_type.
418  // - The comparator is not a StringBtreeDefault{Less,Greater} comparator
419  // that we know has the same equivalence classes for all lookup types.
420  template <typename LookupKey>
421  constexpr static bool can_have_multiple_equivalent_keys() {
422  return IsMulti || (IsTransparent<key_compare>::value &&
423  !std::is_same<LookupKey, Key>::value &&
424  !kIsKeyCompareStringAdapted);
425  }
426 
427  enum {
428  kTargetNodeSize = TargetNodeSize,
429 
430  // Upper bound for the available space for slots. This is largest for leaf
431  // nodes, which have overhead of at least a pointer + 4 bytes (for storing
432  // 3 field_types and an enum).
433  kNodeSlotSpace =
434  TargetNodeSize - /*minimum overhead=*/(sizeof(void *) + 4),
435  };
436 
437  // This is an integral type large enough to hold as many slots as will fit a
438  // node of TargetNodeSize bytes.
439  using node_count_type =
440  absl::conditional_t<(kNodeSlotSpace / sizeof(slot_type) >
441  (std::numeric_limits<uint8_t>::max)()),
442  uint16_t, uint8_t>; // NOLINT
443 
444  // The following methods are necessary for passing this struct as PolicyTraits
445  // for node_handle and/or are used within btree.
446  static value_type &element(slot_type *slot) {
447  return slot_policy::element(slot);
448  }
449  static const value_type &element(const slot_type *slot) {
450  return slot_policy::element(slot);
451  }
452  template <class... Args>
453  static void construct(Alloc *alloc, slot_type *slot, Args &&... args) {
454  slot_policy::construct(alloc, slot, std::forward<Args>(args)...);
455  }
456  static void construct(Alloc *alloc, slot_type *slot, slot_type *other) {
457  slot_policy::construct(alloc, slot, other);
458  }
459  static void destroy(Alloc *alloc, slot_type *slot) {
460  slot_policy::destroy(alloc, slot);
461  }
462  static void transfer(Alloc *alloc, slot_type *new_slot, slot_type *old_slot) {
463  slot_policy::transfer(alloc, new_slot, old_slot);
464  }
465 };
466 
467 // An adapter class that converts a lower-bound compare into an upper-bound
468 // compare. Note: there is no need to make a version of this adapter specialized
469 // for key-compare-to functors because the upper-bound (the first value greater
470 // than the input) is never an exact match.
471 template <typename Compare>
472 struct upper_bound_adapter {
473  explicit upper_bound_adapter(const Compare &c) : comp(c) {}
474  template <typename K1, typename K2>
475  bool operator()(const K1 &a, const K2 &b) const {
476  // Returns true when a is not greater than b.
477  return !compare_internal::compare_result_as_less_than(comp(b, a));
478  }
479 
480  private:
481  Compare comp;
482 };
483 
484 enum class MatchKind : uint8_t { kEq, kNe };
485 
486 template <typename V, bool IsCompareTo>
487 struct SearchResult {
488  V value;
489  MatchKind match;
490 
491  static constexpr bool HasMatch() { return true; }
492  bool IsEq() const { return match == MatchKind::kEq; }
493 };
494 
495 // When we don't use CompareTo, `match` is not present.
496 // This ensures that callers can't use it accidentally when it provides no
497 // useful information.
498 template <typename V>
499 struct SearchResult<V, false> {
500  SearchResult() {}
501  explicit SearchResult(V v) : value(v) {}
502  SearchResult(V v, MatchKind /*match*/) : value(v) {}
503 
504  V value;
505 
506  static constexpr bool HasMatch() { return false; }
507  static constexpr bool IsEq() { return false; }
508 };
509 
510 // A node in the btree holding. The same node type is used for both internal
511 // and leaf nodes in the btree, though the nodes are allocated in such a way
512 // that the children array is only valid in internal nodes.
513 template <typename Params>
514 class btree_node {
515  using is_key_compare_to = typename Params::is_key_compare_to;
516  using field_type = typename Params::node_count_type;
517  using allocator_type = typename Params::allocator_type;
518  using slot_type = typename Params::slot_type;
519  using original_key_compare = typename Params::original_key_compare;
520 
521  public:
522  using params_type = Params;
523  using key_type = typename Params::key_type;
524  using value_type = typename Params::value_type;
525  using pointer = typename Params::pointer;
526  using const_pointer = typename Params::const_pointer;
527  using reference = typename Params::reference;
528  using const_reference = typename Params::const_reference;
529  using key_compare = typename Params::key_compare;
530  using size_type = typename Params::size_type;
531  using difference_type = typename Params::difference_type;
532 
533  // Btree decides whether to use linear node search as follows:
534  // - If the comparator expresses a preference, use that.
535  // - If the key expresses a preference, use that.
536  // - If the key is arithmetic and the comparator is std::less or
537  // std::greater, choose linear.
538  // - Otherwise, choose binary.
539  // TODO(ezb): Might make sense to add condition(s) based on node-size.
540  using use_linear_search = std::integral_constant<
541  bool, has_linear_node_search_preference<original_key_compare>::value
542  ? prefers_linear_node_search<original_key_compare>::value
543  : has_linear_node_search_preference<key_type>::value
544  ? prefers_linear_node_search<key_type>::value
545  : std::is_arithmetic<key_type>::value &&
546  (std::is_same<std::less<key_type>,
547  original_key_compare>::value ||
548  std::is_same<std::greater<key_type>,
549  original_key_compare>::value)>;
550 
551  // This class is organized by absl::container_internal::Layout as if it had
552  // the following structure:
553  // // A pointer to the node's parent.
554  // btree_node *parent;
555  //
556  // // When ABSL_BTREE_ENABLE_GENERATIONS is defined, we also have a
557  // // generation integer in order to check that when iterators are
558  // // used, they haven't been invalidated already. Only the generation on
559  // // the root is used, but we have one on each node because whether a node
560  // // is root or not can change.
561  // uint32_t generation;
562  //
563  // // The position of the node in the node's parent.
564  // field_type position;
565  // // The index of the first populated value in `values`.
566  // // TODO(ezb): right now, `start` is always 0. Update insertion/merge
567  // // logic to allow for floating storage within nodes.
568  // field_type start;
569  // // The index after the last populated value in `values`. Currently, this
570  // // is the same as the count of values.
571  // field_type finish;
572  // // The maximum number of values the node can hold. This is an integer in
573  // // [1, kNodeSlots] for root leaf nodes, kNodeSlots for non-root leaf
574  // // nodes, and kInternalNodeMaxCount (as a sentinel value) for internal
575  // // nodes (even though there are still kNodeSlots values in the node).
576  // // TODO(ezb): make max_count use only 4 bits and record log2(capacity)
577  // // to free extra bits for is_root, etc.
578  // field_type max_count;
579  //
580  // // The array of values. The capacity is `max_count` for leaf nodes and
581  // // kNodeSlots for internal nodes. Only the values in
582  // // [start, finish) have been initialized and are valid.
583  // slot_type values[max_count];
584  //
585  // // The array of child pointers. The keys in children[i] are all less
586  // // than key(i). The keys in children[i + 1] are all greater than key(i).
587  // // There are 0 children for leaf nodes and kNodeSlots + 1 children for
588  // // internal nodes.
589  // btree_node *children[kNodeSlots + 1];
590  //
591  // This class is only constructed by EmptyNodeType. Normally, pointers to the
592  // layout above are allocated, cast to btree_node*, and de-allocated within
593  // the btree implementation.
594  ~btree_node() = default;
595  btree_node(btree_node const &) = delete;
596  btree_node &operator=(btree_node const &) = delete;
597 
598  // Public for EmptyNodeType.
599  constexpr static size_type Alignment() {
600  static_assert(LeafLayout(1).Alignment() == InternalLayout().Alignment(),
601  "Alignment of all nodes must be equal.");
602  return InternalLayout().Alignment();
603  }
604 
605  protected:
606  btree_node() = default;
607 
608  private:
609  using layout_type =
610  absl::container_internal::Layout<btree_node *, uint32_t, field_type,
611  slot_type, btree_node *>;
612  constexpr static size_type SizeWithNSlots(size_type n) {
613  return layout_type(
614  /*parent*/ 1,
615  /*generation*/ params_type::kEnableGenerations ? 1 : 0,
616  /*position, start, finish, max_count*/ 4,
617  /*slots*/ n,
618  /*children*/ 0)
619  .AllocSize();
620  }
621  // A lower bound for the overhead of fields other than slots in a leaf node.
622  constexpr static size_type MinimumOverhead() {
623  return SizeWithNSlots(1) - sizeof(slot_type);
624  }
625 
626  // Compute how many values we can fit onto a leaf node taking into account
627  // padding.
628  constexpr static size_type NodeTargetSlots(const size_type begin,
629  const size_type end) {
630  return begin == end ? begin
631  : SizeWithNSlots((begin + end) / 2 + 1) >
632  params_type::kTargetNodeSize
633  ? NodeTargetSlots(begin, (begin + end) / 2)
634  : NodeTargetSlots((begin + end) / 2 + 1, end);
635  }
636 
637  enum {
638  kTargetNodeSize = params_type::kTargetNodeSize,
639  kNodeTargetSlots = NodeTargetSlots(0, params_type::kTargetNodeSize),
640 
641  // We need a minimum of 3 slots per internal node in order to perform
642  // splitting (1 value for the two nodes involved in the split and 1 value
643  // propagated to the parent as the delimiter for the split). For performance
644  // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy
645  // of 1/3 (for a node, not a b-tree).
646  kMinNodeSlots = 4,
647 
648  kNodeSlots =
649  kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots,
650 
651  // The node is internal (i.e. is not a leaf node) if and only if `max_count`
652  // has this value.
653  kInternalNodeMaxCount = 0,
654  };
655 
656  // Leaves can have less than kNodeSlots values.
657  constexpr static layout_type LeafLayout(const int slot_count = kNodeSlots) {
658  return layout_type(
659  /*parent*/ 1,
660  /*generation*/ params_type::kEnableGenerations ? 1 : 0,
661  /*position, start, finish, max_count*/ 4,
662  /*slots*/ slot_count,
663  /*children*/ 0);
664  }
665  constexpr static layout_type InternalLayout() {
666  return layout_type(
667  /*parent*/ 1,
668  /*generation*/ params_type::kEnableGenerations ? 1 : 0,
669  /*position, start, finish, max_count*/ 4,
670  /*slots*/ kNodeSlots,
671  /*children*/ kNodeSlots + 1);
672  }
673  constexpr static size_type LeafSize(const int slot_count = kNodeSlots) {
674  return LeafLayout(slot_count).AllocSize();
675  }
676  constexpr static size_type InternalSize() {
677  return InternalLayout().AllocSize();
678  }
679 
680  // N is the index of the type in the Layout definition.
681  // ElementType<N> is the Nth type in the Layout definition.
682  template <size_type N>
683  inline typename layout_type::template ElementType<N> *GetField() {
684  // We assert that we don't read from values that aren't there.
685  assert(N < 4 || is_internal());
686  return InternalLayout().template Pointer<N>(reinterpret_cast<char *>(this));
687  }
688  template <size_type N>
689  inline const typename layout_type::template ElementType<N> *GetField() const {
690  assert(N < 4 || is_internal());
691  return InternalLayout().template Pointer<N>(
692  reinterpret_cast<const char *>(this));
693  }
694  void set_parent(btree_node *p) { *GetField<0>() = p; }
695  field_type &mutable_finish() { return GetField<2>()[2]; }
696  slot_type *slot(int i) { return &GetField<3>()[i]; }
697  slot_type *start_slot() { return slot(start()); }
698  slot_type *finish_slot() { return slot(finish()); }
699  const slot_type *slot(int i) const { return &GetField<3>()[i]; }
700  void set_position(field_type v) { GetField<2>()[0] = v; }
701  void set_start(field_type v) { GetField<2>()[1] = v; }
702  void set_finish(field_type v) { GetField<2>()[2] = v; }
703  // This method is only called by the node init methods.
704  void set_max_count(field_type v) { GetField<2>()[3] = v; }
705 
706  public:
707  // Whether this is a leaf node or not. This value doesn't change after the
708  // node is created.
709  bool is_leaf() const { return GetField<2>()[3] != kInternalNodeMaxCount; }
710  // Whether this is an internal node or not. This value doesn't change after
711  // the node is created.
712  bool is_internal() const { return !is_leaf(); }
713 
714  // Getter for the position of this node in its parent.
715  field_type position() const { return GetField<2>()[0]; }
716 
717  // Getter for the offset of the first value in the `values` array.
718  field_type start() const {
719  // TODO(ezb): when floating storage is implemented, return GetField<2>()[1];
720  assert(GetField<2>()[1] == 0);
721  return 0;
722  }
723 
724  // Getter for the offset after the last value in the `values` array.
725  field_type finish() const { return GetField<2>()[2]; }
726 
727  // Getters for the number of values stored in this node.
728  field_type count() const {
729  assert(finish() >= start());
730  return finish() - start();
731  }
732  field_type max_count() const {
733  // Internal nodes have max_count==kInternalNodeMaxCount.
734  // Leaf nodes have max_count in [1, kNodeSlots].
735  const field_type max_count = GetField<2>()[3];
736  return max_count == field_type{kInternalNodeMaxCount}
737  ? field_type{kNodeSlots}
738  : max_count;
739  }
740 
741  // Getter for the parent of this node.
742  btree_node *parent() const { return *GetField<0>(); }
743  // Getter for whether the node is the root of the tree. The parent of the
744  // root of the tree is the leftmost node in the tree which is guaranteed to
745  // be a leaf.
746  bool is_root() const { return parent()->is_leaf(); }
747  void make_root() {
748  assert(parent()->is_root());
749  set_generation(parent()->generation());
750  set_parent(parent()->parent());
751  }
752 
753  // Gets the root node's generation integer, which is the one used by the tree.
754  uint32_t *get_root_generation() const {
755  assert(params_type::kEnableGenerations);
756  const btree_node *curr = this;
757  for (; !curr->is_root(); curr = curr->parent()) continue;
758  return const_cast<uint32_t *>(&curr->GetField<1>()[0]);
759  }
760 
761  // Returns the generation for iterator validation.
762  uint32_t generation() const {
763  return params_type::kEnableGenerations ? *get_root_generation() : 0;
764  }
765  // Updates generation. Should only be called on a root node or during node
766  // initialization.
767  void set_generation(uint32_t generation) {
768  if (params_type::kEnableGenerations) GetField<1>()[0] = generation;
769  }
770  // Updates the generation. We do this whenever the node is mutated.
771  void next_generation() {
772  if (params_type::kEnableGenerations) ++*get_root_generation();
773  }
774 
775  // Getters for the key/value at position i in the node.
776  const key_type &key(int i) const { return params_type::key(slot(i)); }
777  reference value(int i) { return params_type::element(slot(i)); }
778  const_reference value(int i) const { return params_type::element(slot(i)); }
779 
780  // Getters/setter for the child at position i in the node.
781  btree_node *child(int i) const { return GetField<4>()[i]; }
782  btree_node *start_child() const { return child(start()); }
783  btree_node *&mutable_child(int i) { return GetField<4>()[i]; }
784  void clear_child(int i) {
785  absl::container_internal::SanitizerPoisonObject(&mutable_child(i));
786  }
787  void set_child(int i, btree_node *c) {
788  absl::container_internal::SanitizerUnpoisonObject(&mutable_child(i));
789  mutable_child(i) = c;
790  c->set_position(i);
791  }
792  void init_child(int i, btree_node *c) {
793  set_child(i, c);
794  c->set_parent(this);
795  }
796 
797  // Returns the position of the first value whose key is not less than k.
798  template <typename K>
799  SearchResult<int, is_key_compare_to::value> lower_bound(
800  const K &k, const key_compare &comp) const {
801  return use_linear_search::value ? linear_search(k, comp)
802  : binary_search(k, comp);
803  }
804  // Returns the position of the first value whose key is greater than k.
805  template <typename K>
806  int upper_bound(const K &k, const key_compare &comp) const {
807  auto upper_compare = upper_bound_adapter<key_compare>(comp);
808  return use_linear_search::value ? linear_search(k, upper_compare).value
809  : binary_search(k, upper_compare).value;
810  }
811 
812  template <typename K, typename Compare>
813  SearchResult<int, btree_is_key_compare_to<Compare, key_type>::value>
814  linear_search(const K &k, const Compare &comp) const {
815  return linear_search_impl(k, start(), finish(), comp,
816  btree_is_key_compare_to<Compare, key_type>());
817  }
818 
819  template <typename K, typename Compare>
820  SearchResult<int, btree_is_key_compare_to<Compare, key_type>::value>
821  binary_search(const K &k, const Compare &comp) const {
822  return binary_search_impl(k, start(), finish(), comp,
823  btree_is_key_compare_to<Compare, key_type>());
824  }
825 
826  // Returns the position of the first value whose key is not less than k using
827  // linear search performed using plain compare.
828  template <typename K, typename Compare>
829  SearchResult<int, false> linear_search_impl(
830  const K &k, int s, const int e, const Compare &comp,
831  std::false_type /* IsCompareTo */) const {
832  while (s < e) {
833  if (!comp(key(s), k)) {
834  break;
835  }
836  ++s;
837  }
838  return SearchResult<int, false>{s};
839  }
840 
841  // Returns the position of the first value whose key is not less than k using
842  // linear search performed using compare-to.
843  template <typename K, typename Compare>
844  SearchResult<int, true> linear_search_impl(
845  const K &k, int s, const int e, const Compare &comp,
846  std::true_type /* IsCompareTo */) const {
847  while (s < e) {
848  const absl::weak_ordering c = comp(key(s), k);
849  if (c == 0) {
850  return {s, MatchKind::kEq};
851  } else if (c > 0) {
852  break;
853  }
854  ++s;
855  }
856  return {s, MatchKind::kNe};
857  }
858 
859  // Returns the position of the first value whose key is not less than k using
860  // binary search performed using plain compare.
861  template <typename K, typename Compare>
862  SearchResult<int, false> binary_search_impl(
863  const K &k, int s, int e, const Compare &comp,
864  std::false_type /* IsCompareTo */) const {
865  while (s != e) {
866  const int mid = (s + e) >> 1;
867  if (comp(key(mid), k)) {
868  s = mid + 1;
869  } else {
870  e = mid;
871  }
872  }
873  return SearchResult<int, false>{s};
874  }
875 
876  // Returns the position of the first value whose key is not less than k using
877  // binary search performed using compare-to.
878  template <typename K, typename CompareTo>
879  SearchResult<int, true> binary_search_impl(
880  const K &k, int s, int e, const CompareTo &comp,
881  std::true_type /* IsCompareTo */) const {
882  if (params_type::template can_have_multiple_equivalent_keys<K>()) {
883  MatchKind exact_match = MatchKind::kNe;
884  while (s != e) {
885  const int mid = (s + e) >> 1;
886  const absl::weak_ordering c = comp(key(mid), k);
887  if (c < 0) {
888  s = mid + 1;
889  } else {
890  e = mid;
891  if (c == 0) {
892  // Need to return the first value whose key is not less than k,
893  // which requires continuing the binary search if there could be
894  // multiple equivalent keys.
895  exact_match = MatchKind::kEq;
896  }
897  }
898  }
899  return {s, exact_match};
900  } else { // Can't have multiple equivalent keys.
901  while (s != e) {
902  const int mid = (s + e) >> 1;
903  const absl::weak_ordering c = comp(key(mid), k);
904  if (c < 0) {
905  s = mid + 1;
906  } else if (c > 0) {
907  e = mid;
908  } else {
909  return {mid, MatchKind::kEq};
910  }
911  }
912  return {s, MatchKind::kNe};
913  }
914  }
915 
916  // Emplaces a value at position i, shifting all existing values and
917  // children at positions >= i to the right by 1.
918  template <typename... Args>
919  void emplace_value(size_type i, allocator_type *alloc, Args &&... args);
920 
921  // Removes the values at positions [i, i + to_erase), shifting all existing
922  // values and children after that range to the left by to_erase. Clears all
923  // children between [i, i + to_erase).
924  void remove_values(field_type i, field_type to_erase, allocator_type *alloc);
925 
926  // Rebalances a node with its right sibling.
927  void rebalance_right_to_left(int to_move, btree_node *right,
928  allocator_type *alloc);
929  void rebalance_left_to_right(int to_move, btree_node *right,
930  allocator_type *alloc);
931 
932  // Splits a node, moving a portion of the node's values to its right sibling.
933  void split(int insert_position, btree_node *dest, allocator_type *alloc);
934 
935  // Merges a node with its right sibling, moving all of the values and the
936  // delimiting key in the parent node onto itself, and deleting the src node.
937  void merge(btree_node *src, allocator_type *alloc);
938 
939  // Node allocation/deletion routines.
940  void init_leaf(int max_count, btree_node *parent) {
941  set_generation(0);
942  set_parent(parent);
943  set_position(0);
944  set_start(0);
945  set_finish(0);
946  set_max_count(max_count);
947  absl::container_internal::SanitizerPoisonMemoryRegion(
948  start_slot(), max_count * sizeof(slot_type));
949  }
950  void init_internal(btree_node *parent) {
951  init_leaf(kNodeSlots, parent);
952  // Set `max_count` to a sentinel value to indicate that this node is
953  // internal.
954  set_max_count(kInternalNodeMaxCount);
955  absl::container_internal::SanitizerPoisonMemoryRegion(
956  &mutable_child(start()), (kNodeSlots + 1) * sizeof(btree_node *));
957  }
958 
959  static void deallocate(const size_type size, btree_node *node,
960  allocator_type *alloc) {
961  absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
962  }
963 
964  // Deletes a node and all of its children.
965  static void clear_and_delete(btree_node *node, allocator_type *alloc);
966 
967  private:
968  template <typename... Args>
969  void value_init(const field_type i, allocator_type *alloc, Args &&... args) {
970  next_generation();
971  absl::container_internal::SanitizerUnpoisonObject(slot(i));
972  params_type::construct(alloc, slot(i), std::forward<Args>(args)...);
973  }
974  void value_destroy(const field_type i, allocator_type *alloc) {
975  next_generation();
976  params_type::destroy(alloc, slot(i));
977  absl::container_internal::SanitizerPoisonObject(slot(i));
978  }
979  void value_destroy_n(const field_type i, const field_type n,
980  allocator_type *alloc) {
981  next_generation();
982  for (slot_type *s = slot(i), *end = slot(i + n); s != end; ++s) {
983  params_type::destroy(alloc, s);
984  absl::container_internal::SanitizerPoisonObject(s);
985  }
986  }
987 
988  static void transfer(slot_type *dest, slot_type *src, allocator_type *alloc) {
989  absl::container_internal::SanitizerUnpoisonObject(dest);
990  params_type::transfer(alloc, dest, src);
991  absl::container_internal::SanitizerPoisonObject(src);
992  }
993 
994  // Transfers value from slot `src_i` in `src_node` to slot `dest_i` in `this`.
995  void transfer(const size_type dest_i, const size_type src_i,
996  btree_node *src_node, allocator_type *alloc) {
997  next_generation();
998  transfer(slot(dest_i), src_node->slot(src_i), alloc);
999  }
1000 
1001  // Transfers `n` values starting at value `src_i` in `src_node` into the
1002  // values starting at value `dest_i` in `this`.
1003  void transfer_n(const size_type n, const size_type dest_i,
1004  const size_type src_i, btree_node *src_node,
1005  allocator_type *alloc) {
1006  next_generation();
1007  for (slot_type *src = src_node->slot(src_i), *end = src + n,
1008  *dest = slot(dest_i);
1009  src != end; ++src, ++dest) {
1010  transfer(dest, src, alloc);
1011  }
1012  }
1013 
1014  // Same as above, except that we start at the end and work our way to the
1015  // beginning.
1016  void transfer_n_backward(const size_type n, const size_type dest_i,
1017  const size_type src_i, btree_node *src_node,
1018  allocator_type *alloc) {
1019  next_generation();
1020  for (slot_type *src = src_node->slot(src_i + n - 1), *end = src - n,
1021  *dest = slot(dest_i + n - 1);
1022  src != end; --src, --dest) {
1023  transfer(dest, src, alloc);
1024  }
1025  }
1026 
1027  template <typename P>
1028  friend class btree;
1029  template <typename N, typename R, typename P>
1030  friend class btree_iterator;
1031  friend class BtreeNodePeer;
1032  friend struct btree_access;
1033 };
1034 
1035 template <typename Node, typename Reference, typename Pointer>
1036 class btree_iterator {
1037  using key_type = typename Node::key_type;
1038  using size_type = typename Node::size_type;
1039  using params_type = typename Node::params_type;
1040  using is_map_container = typename params_type::is_map_container;
1041 
1042  using node_type = Node;
1043  using normal_node = typename std::remove_const<Node>::type;
1044  using const_node = const Node;
1045  using normal_pointer = typename params_type::pointer;
1046  using normal_reference = typename params_type::reference;
1047  using const_pointer = typename params_type::const_pointer;
1048  using const_reference = typename params_type::const_reference;
1049  using slot_type = typename params_type::slot_type;
1050 
1051  using iterator =
1052  btree_iterator<normal_node, normal_reference, normal_pointer>;
1053  using const_iterator =
1054  btree_iterator<const_node, const_reference, const_pointer>;
1055 
1056  public:
1057  // These aliases are public for std::iterator_traits.
1058  using difference_type = typename Node::difference_type;
1059  using value_type = typename params_type::value_type;
1060  using pointer = Pointer;
1061  using reference = Reference;
1062  using iterator_category = std::bidirectional_iterator_tag;
1063 
1064  btree_iterator() : btree_iterator(nullptr, -1) {}
1065  explicit btree_iterator(Node *n) : btree_iterator(n, n->start()) {}
1066  btree_iterator(Node *n, int p) : node_(n), position_(p) {
1067 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1068  // Use `~uint32_t{}` as a sentinel value for iterator generations so it
1069  // doesn't match the initial value for the actual generation.
1070  generation_ = n != nullptr ? n->generation() : ~uint32_t{};
1071 #endif
1072  }
1073 
1074  // NOTE: this SFINAE allows for implicit conversions from iterator to
1075  // const_iterator, but it specifically avoids hiding the copy constructor so
1076  // that the trivial one will be used when possible.
1077  template <typename N, typename R, typename P,
1078  absl::enable_if_t<
1079  std::is_same<btree_iterator<N, R, P>, iterator>::value &&
1080  std::is_same<btree_iterator, const_iterator>::value,
1081  int> = 0>
1082  btree_iterator(const btree_iterator<N, R, P> other) // NOLINT
1083  : node_(other.node_), position_(other.position_) {
1084 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1085  generation_ = other.generation_;
1086 #endif
1087  }
1088 
1089  bool operator==(const iterator &other) const {
1090  return node_ == other.node_ && position_ == other.position_;
1091  }
1092  bool operator==(const const_iterator &other) const {
1093  return node_ == other.node_ && position_ == other.position_;
1094  }
1095  bool operator!=(const iterator &other) const {
1096  return node_ != other.node_ || position_ != other.position_;
1097  }
1098  bool operator!=(const const_iterator &other) const {
1099  return node_ != other.node_ || position_ != other.position_;
1100  }
1101 
1102  // Accessors for the key/value the iterator is pointing at.
1103  reference operator*() const {
1104  ABSL_HARDENING_ASSERT(node_ != nullptr);
1105  ABSL_HARDENING_ASSERT(node_->start() <= position_);
1106  ABSL_HARDENING_ASSERT(node_->finish() > position_);
1107  assert_valid_generation();
1108  return node_->value(position_);
1109  }
1110  pointer operator->() const { return &operator*(); }
1111 
1112  btree_iterator &operator++() {
1113  increment();
1114  return *this;
1115  }
1116  btree_iterator &operator--() {
1117  decrement();
1118  return *this;
1119  }
1120  btree_iterator operator++(int) {
1121  btree_iterator tmp = *this;
1122  ++*this;
1123  return tmp;
1124  }
1125  btree_iterator operator--(int) {
1126  btree_iterator tmp = *this;
1127  --*this;
1128  return tmp;
1129  }
1130 
1131  private:
1132  friend iterator;
1133  friend const_iterator;
1134  template <typename Params>
1135  friend class btree;
1136  template <typename Tree>
1137  friend class btree_container;
1138  template <typename Tree>
1139  friend class btree_set_container;
1140  template <typename Tree>
1141  friend class btree_map_container;
1142  template <typename Tree>
1143  friend class btree_multiset_container;
1144  template <typename TreeType, typename CheckerType>
1145  friend class base_checker;
1146  friend struct btree_access;
1147 
1148  // This SFINAE allows explicit conversions from const_iterator to
1149  // iterator, but also avoids hiding the copy constructor.
1150  // NOTE: the const_cast is safe because this constructor is only called by
1151  // non-const methods and the container owns the nodes.
1152  template <typename N, typename R, typename P,
1153  absl::enable_if_t<
1154  std::is_same<btree_iterator<N, R, P>, const_iterator>::value &&
1155  std::is_same<btree_iterator, iterator>::value,
1156  int> = 0>
1157  explicit btree_iterator(const btree_iterator<N, R, P> other)
1158  : node_(const_cast<node_type *>(other.node_)),
1159  position_(other.position_) {
1160 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1161  generation_ = other.generation_;
1162 #endif
1163  }
1164 
1165  // Increment/decrement the iterator.
1166  void increment() {
1167  assert_valid_generation();
1168  if (node_->is_leaf() && ++position_ < node_->finish()) {
1169  return;
1170  }
1171  increment_slow();
1172  }
1173  void increment_slow();
1174 
1175  void decrement() {
1176  assert_valid_generation();
1177  if (node_->is_leaf() && --position_ >= node_->start()) {
1178  return;
1179  }
1180  decrement_slow();
1181  }
1182  void decrement_slow();
1183 
1184  // Updates the generation. For use internally right before we return an
1185  // iterator to the user.
1186  void update_generation() {
1187 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1188  if (node_ != nullptr) generation_ = node_->generation();
1189 #endif
1190  }
1191 
1192  const key_type &key() const { return node_->key(position_); }
1193  decltype(std::declval<Node *>()->slot(0)) slot() {
1194  return node_->slot(position_);
1195  }
1196 
1197  void assert_valid_generation() const {
1198 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1199  if (node_ != nullptr && node_->generation() != generation_) {
1200  ABSL_INTERNAL_LOG(
1201  FATAL,
1202  "Attempting to use an invalidated iterator. The corresponding b-tree "
1203  "container has been mutated since this iterator was constructed.");
1204  }
1205 #endif
1206  }
1207 
1208  // The node in the tree the iterator is pointing at.
1209  Node *node_;
1210  // The position within the node of the tree the iterator is pointing at.
1211  // NOTE: this is an int rather than a field_type because iterators can point
1212  // to invalid positions (such as -1) in certain circumstances.
1213  int position_;
1214 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1215  // Used to check that the iterator hasn't been invalidated.
1216  uint32_t generation_;
1217 #endif
1218 };
1219 
1220 template <typename Params>
1221 class btree {
1222  using node_type = btree_node<Params>;
1223  using is_key_compare_to = typename Params::is_key_compare_to;
1224  using field_type = typename node_type::field_type;
1225 
1226  // We use a static empty node for the root/leftmost/rightmost of empty btrees
1227  // in order to avoid branching in begin()/end().
1228  struct alignas(node_type::Alignment()) EmptyNodeType : node_type {
1229  using field_type = typename node_type::field_type;
1230  node_type *parent;
1231 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1232  uint32_t generation = 0;
1233 #endif
1234  field_type position = 0;
1235  field_type start = 0;
1236  field_type finish = 0;
1237  // max_count must be != kInternalNodeMaxCount (so that this node is regarded
1238  // as a leaf node). max_count() is never called when the tree is empty.
1239  field_type max_count = node_type::kInternalNodeMaxCount + 1;
1240 
1241 #ifdef _MSC_VER
1242  // MSVC has constexpr code generations bugs here.
1243  EmptyNodeType() : parent(this) {}
1244 #else
1245  constexpr EmptyNodeType(node_type *p) : parent(p) {}
1246 #endif
1247  };
1248 
1249  static node_type *EmptyNode() {
1250 #ifdef _MSC_VER
1251  static EmptyNodeType *empty_node = new EmptyNodeType;
1252  // This assert fails on some other construction methods.
1253  assert(empty_node->parent == empty_node);
1254  return empty_node;
1255 #else
1256  static constexpr EmptyNodeType empty_node(
1257  const_cast<EmptyNodeType *>(&empty_node));
1258  return const_cast<EmptyNodeType *>(&empty_node);
1259 #endif
1260  }
1261 
1262  enum : uint32_t {
1263  kNodeSlots = node_type::kNodeSlots,
1264  kMinNodeValues = kNodeSlots / 2,
1265  };
1266 
1267  struct node_stats {
1268  using size_type = typename Params::size_type;
1269 
1270  node_stats(size_type l, size_type i) : leaf_nodes(l), internal_nodes(i) {}
1271 
1272  node_stats &operator+=(const node_stats &other) {
1273  leaf_nodes += other.leaf_nodes;
1274  internal_nodes += other.internal_nodes;
1275  return *this;
1276  }
1277 
1278  size_type leaf_nodes;
1279  size_type internal_nodes;
1280  };
1281 
1282  public:
1283  using key_type = typename Params::key_type;
1284  using value_type = typename Params::value_type;
1285  using size_type = typename Params::size_type;
1286  using difference_type = typename Params::difference_type;
1287  using key_compare = typename Params::key_compare;
1288  using original_key_compare = typename Params::original_key_compare;
1289  using value_compare = typename Params::value_compare;
1290  using allocator_type = typename Params::allocator_type;
1291  using reference = typename Params::reference;
1292  using const_reference = typename Params::const_reference;
1293  using pointer = typename Params::pointer;
1294  using const_pointer = typename Params::const_pointer;
1295  using iterator =
1296  typename btree_iterator<node_type, reference, pointer>::iterator;
1297  using const_iterator = typename iterator::const_iterator;
1298  using reverse_iterator = std::reverse_iterator<iterator>;
1299  using const_reverse_iterator = std::reverse_iterator<const_iterator>;
1300  using node_handle_type = node_handle<Params, Params, allocator_type>;
1301 
1302  // Internal types made public for use by btree_container types.
1303  using params_type = Params;
1304  using slot_type = typename Params::slot_type;
1305 
1306  private:
1307  // Copies or moves (depending on the template parameter) the values in
1308  // other into this btree in their order in other. This btree must be empty
1309  // before this method is called. This method is used in copy construction,
1310  // copy assignment, and move assignment.
1311  template <typename Btree>
1312  void copy_or_move_values_in_order(Btree &other);
1313 
1314  // Validates that various assumptions/requirements are true at compile time.
1315  constexpr static bool static_assert_validation();
1316 
1317  public:
1318  btree(const key_compare &comp, const allocator_type &alloc)
1319  : root_(EmptyNode()), rightmost_(comp, alloc, EmptyNode()), size_(0) {}
1320 
1321  btree(const btree &other) : btree(other, other.allocator()) {}
1322  btree(const btree &other, const allocator_type &alloc)
1323  : btree(other.key_comp(), alloc) {
1324  copy_or_move_values_in_order(other);
1325  }
1326  btree(btree &&other) noexcept
1327  : root_(absl::exchange(other.root_, EmptyNode())),
1328  rightmost_(std::move(other.rightmost_)),
1329  size_(absl::exchange(other.size_, 0)) {
1330  other.mutable_rightmost() = EmptyNode();
1331  }
1332  btree(btree &&other, const allocator_type &alloc)
1333  : btree(other.key_comp(), alloc) {
1334  if (alloc == other.allocator()) {
1335  swap(other);
1336  } else {
1337  // Move values from `other` one at a time when allocators are different.
1338  copy_or_move_values_in_order(other);
1339  }
1340  }
1341 
1342  ~btree() {
1343  // Put static_asserts in destructor to avoid triggering them before the type
1344  // is complete.
1345  static_assert(static_assert_validation(), "This call must be elided.");
1346  clear();
1347  }
1348 
1349  // Assign the contents of other to *this.
1350  btree &operator=(const btree &other);
1351  btree &operator=(btree &&other) noexcept;
1352 
1353  iterator begin() { return iterator(leftmost()); }
1354  const_iterator begin() const { return const_iterator(leftmost()); }
1355  iterator end() { return iterator(rightmost(), rightmost()->finish()); }
1356  const_iterator end() const {
1357  return const_iterator(rightmost(), rightmost()->finish());
1358  }
1359  reverse_iterator rbegin() { return reverse_iterator(end()); }
1360  const_reverse_iterator rbegin() const {
1361  return const_reverse_iterator(end());
1362  }
1363  reverse_iterator rend() { return reverse_iterator(begin()); }
1364  const_reverse_iterator rend() const {
1365  return const_reverse_iterator(begin());
1366  }
1367 
1368  // Finds the first element whose key is not less than `key`.
1369  template <typename K>
1370  iterator lower_bound(const K &key) {
1371  return internal_end(internal_lower_bound(key).value);
1372  }
1373  template <typename K>
1374  const_iterator lower_bound(const K &key) const {
1375  return internal_end(internal_lower_bound(key).value);
1376  }
1377 
1378  // Finds the first element whose key is not less than `key` and also returns
1379  // whether that element is equal to `key`.
1380  template <typename K>
1381  std::pair<iterator, bool> lower_bound_equal(const K &key) const;
1382 
1383  // Finds the first element whose key is greater than `key`.
1384  template <typename K>
1385  iterator upper_bound(const K &key) {
1386  return internal_end(internal_upper_bound(key));
1387  }
1388  template <typename K>
1389  const_iterator upper_bound(const K &key) const {
1390  return internal_end(internal_upper_bound(key));
1391  }
1392 
1393  // Finds the range of values which compare equal to key. The first member of
1394  // the returned pair is equal to lower_bound(key). The second member of the
1395  // pair is equal to upper_bound(key).
1396  template <typename K>
1397  std::pair<iterator, iterator> equal_range(const K &key);
1398  template <typename K>
1399  std::pair<const_iterator, const_iterator> equal_range(const K &key) const {
1400  return const_cast<btree *>(this)->equal_range(key);
1401  }
1402 
1403  // Inserts a value into the btree only if it does not already exist. The
1404  // boolean return value indicates whether insertion succeeded or failed.
1405  // Requirement: if `key` already exists in the btree, does not consume `args`.
1406  // Requirement: `key` is never referenced after consuming `args`.
1407  template <typename K, typename... Args>
1408  std::pair<iterator, bool> insert_unique(const K &key, Args &&... args);
1409 
1410  // Inserts with hint. Checks to see if the value should be placed immediately
1411  // before `position` in the tree. If so, then the insertion will take
1412  // amortized constant time. If not, the insertion will take amortized
1413  // logarithmic time as if a call to insert_unique() were made.
1414  // Requirement: if `key` already exists in the btree, does not consume `args`.
1415  // Requirement: `key` is never referenced after consuming `args`.
1416  template <typename K, typename... Args>
1417  std::pair<iterator, bool> insert_hint_unique(iterator position,
1418  const K &key,
1419  Args &&... args);
1420 
1421  // Insert a range of values into the btree.
1422  // Note: the first overload avoids constructing a value_type if the key
1423  // already exists in the btree.
1424  template <typename InputIterator,
1425  typename = decltype(std::declval<const key_compare &>()(
1426  params_type::key(*std::declval<InputIterator>()),
1427  std::declval<const key_type &>()))>
1428  void insert_iterator_unique(InputIterator b, InputIterator e, int);
1429  // We need the second overload for cases in which we need to construct a
1430  // value_type in order to compare it with the keys already in the btree.
1431  template <typename InputIterator>
1432  void insert_iterator_unique(InputIterator b, InputIterator e, char);
1433 
1434  // Inserts a value into the btree.
1435  template <typename ValueType>
1436  iterator insert_multi(const key_type &key, ValueType &&v);
1437 
1438  // Inserts a value into the btree.
1439  template <typename ValueType>
1440  iterator insert_multi(ValueType &&v) {
1441  return insert_multi(params_type::key(v), std::forward<ValueType>(v));
1442  }
1443 
1444  // Insert with hint. Check to see if the value should be placed immediately
1445  // before position in the tree. If it does, then the insertion will take
1446  // amortized constant time. If not, the insertion will take amortized
1447  // logarithmic time as if a call to insert_multi(v) were made.
1448  template <typename ValueType>
1449  iterator insert_hint_multi(iterator position, ValueType &&v);
1450 
1451  // Insert a range of values into the btree.
1452  template <typename InputIterator>
1453  void insert_iterator_multi(InputIterator b, InputIterator e);
1454 
1455  // Erase the specified iterator from the btree. The iterator must be valid
1456  // (i.e. not equal to end()). Return an iterator pointing to the node after
1457  // the one that was erased (or end() if none exists).
1458  // Requirement: does not read the value at `*iter`.
1459  iterator erase(iterator iter);
1460 
1461  // Erases range. Returns the number of keys erased and an iterator pointing
1462  // to the element after the last erased element.
1463  std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
1464 
1465  // Finds an element with key equivalent to `key` or returns `end()` if `key`
1466  // is not present.
1467  template <typename K>
1468  iterator find(const K &key) {
1469  return internal_end(internal_find(key));
1470  }
1471  template <typename K>
1472  const_iterator find(const K &key) const {
1473  return internal_end(internal_find(key));
1474  }
1475 
1476  // Clear the btree, deleting all of the values it contains.
1477  void clear();
1478 
1479  // Swaps the contents of `this` and `other`.
1480  void swap(btree &other);
1481 
1482  const key_compare &key_comp() const noexcept {
1483  return rightmost_.template get<0>();
1484  }
1485  template <typename K1, typename K2>
1486  bool compare_keys(const K1 &a, const K2 &b) const {
1487  return compare_internal::compare_result_as_less_than(key_comp()(a, b));
1488  }
1489 
1490  value_compare value_comp() const {
1491  return value_compare(original_key_compare(key_comp()));
1492  }
1493 
1494  // Verifies the structure of the btree.
1495  void verify() const;
1496 
1497  // Size routines.
1498  size_type size() const { return size_; }
1499  size_type max_size() const { return (std::numeric_limits<size_type>::max)(); }
1500  bool empty() const { return size_ == 0; }
1501 
1502  // The height of the btree. An empty tree will have height 0.
1503  size_type height() const {
1504  size_type h = 0;
1505  if (!empty()) {
1506  // Count the length of the chain from the leftmost node up to the
1507  // root. We actually count from the root back around to the level below
1508  // the root, but the calculation is the same because of the circularity
1509  // of that traversal.
1510  const node_type *n = root();
1511  do {
1512  ++h;
1513  n = n->parent();
1514  } while (n != root());
1515  }
1516  return h;
1517  }
1518 
1519  // The number of internal, leaf and total nodes used by the btree.
1520  size_type leaf_nodes() const { return internal_stats(root()).leaf_nodes; }
1521  size_type internal_nodes() const {
1522  return internal_stats(root()).internal_nodes;
1523  }
1524  size_type nodes() const {
1525  node_stats stats = internal_stats(root());
1526  return stats.leaf_nodes + stats.internal_nodes;
1527  }
1528 
1529  // The total number of bytes used by the btree.
1530  // TODO(b/169338300): update to support node_btree_*.
1531  size_type bytes_used() const {
1532  node_stats stats = internal_stats(root());
1533  if (stats.leaf_nodes == 1 && stats.internal_nodes == 0) {
1534  return sizeof(*this) + node_type::LeafSize(root()->max_count());
1535  } else {
1536  return sizeof(*this) + stats.leaf_nodes * node_type::LeafSize() +
1537  stats.internal_nodes * node_type::InternalSize();
1538  }
1539  }
1540 
1541  // The average number of bytes used per value stored in the btree assuming
1542  // random insertion order.
1543  static double average_bytes_per_value() {
1544  // The expected number of values per node with random insertion order is the
1545  // average of the maximum and minimum numbers of values per node.
1546  const double expected_values_per_node =
1547  (kNodeSlots + kMinNodeValues) / 2.0;
1548  return node_type::LeafSize() / expected_values_per_node;
1549  }
1550 
1551  // The fullness of the btree. Computed as the number of elements in the btree
1552  // divided by the maximum number of elements a tree with the current number
1553  // of nodes could hold. A value of 1 indicates perfect space
1554  // utilization. Smaller values indicate space wastage.
1555  // Returns 0 for empty trees.
1556  double fullness() const {
1557  if (empty()) return 0.0;
1558  return static_cast<double>(size()) / (nodes() * kNodeSlots);
1559  }
1560  // The overhead of the btree structure in bytes per node. Computed as the
1561  // total number of bytes used by the btree minus the number of bytes used for
1562  // storing elements divided by the number of elements.
1563  // Returns 0 for empty trees.
1564  double overhead() const {
1565  if (empty()) return 0.0;
1566  return (bytes_used() - size() * sizeof(value_type)) /
1567  static_cast<double>(size());
1568  }
1569 
1570  // The allocator used by the btree.
1571  allocator_type get_allocator() const { return allocator(); }
1572 
1573  private:
1574  friend struct btree_access;
1575 
1576  // Internal accessor routines.
1577  node_type *root() { return root_; }
1578  const node_type *root() const { return root_; }
1579  node_type *&mutable_root() noexcept { return root_; }
1580  node_type *rightmost() { return rightmost_.template get<2>(); }
1581  const node_type *rightmost() const { return rightmost_.template get<2>(); }
1582  node_type *&mutable_rightmost() noexcept {
1583  return rightmost_.template get<2>();
1584  }
1585  key_compare *mutable_key_comp() noexcept {
1586  return &rightmost_.template get<0>();
1587  }
1588 
1589  // The leftmost node is stored as the parent of the root node.
1590  node_type *leftmost() { return root()->parent(); }
1591  const node_type *leftmost() const { return root()->parent(); }
1592 
1593  // Allocator routines.
1594  allocator_type *mutable_allocator() noexcept {
1595  return &rightmost_.template get<1>();
1596  }
1597  const allocator_type &allocator() const noexcept {
1598  return rightmost_.template get<1>();
1599  }
1600 
1601  // Allocates a correctly aligned node of at least size bytes using the
1602  // allocator.
1603  node_type *allocate(const size_type size) {
1604  return reinterpret_cast<node_type *>(
1605  absl::container_internal::Allocate<node_type::Alignment()>(
1606  mutable_allocator(), size));
1607  }
1608 
1609  // Node creation/deletion routines.
1610  node_type *new_internal_node(node_type *parent) {
1611  node_type *n = allocate(node_type::InternalSize());
1612  n->init_internal(parent);
1613  return n;
1614  }
1615  node_type *new_leaf_node(node_type *parent) {
1616  node_type *n = allocate(node_type::LeafSize());
1617  n->init_leaf(kNodeSlots, parent);
1618  return n;
1619  }
1620  node_type *new_leaf_root_node(const int max_count) {
1621  node_type *n = allocate(node_type::LeafSize(max_count));
1622  n->init_leaf(max_count, /*parent=*/n);
1623  return n;
1624  }
1625 
1626  // Deletion helper routines.
1627  iterator rebalance_after_delete(iterator iter);
1628 
1629  // Rebalances or splits the node iter points to.
1630  void rebalance_or_split(iterator *iter);
1631 
1632  // Merges the values of left, right and the delimiting key on their parent
1633  // onto left, removing the delimiting key and deleting right.
1634  void merge_nodes(node_type *left, node_type *right);
1635 
1636  // Tries to merge node with its left or right sibling, and failing that,
1637  // rebalance with its left or right sibling. Returns true if a merge
1638  // occurred, at which point it is no longer valid to access node. Returns
1639  // false if no merging took place.
1640  bool try_merge_or_rebalance(iterator *iter);
1641 
1642  // Tries to shrink the height of the tree by 1.
1643  void try_shrink();
1644 
1645  iterator internal_end(iterator iter) {
1646  return iter.node_ != nullptr ? iter : end();
1647  }
1648  const_iterator internal_end(const_iterator iter) const {
1649  return iter.node_ != nullptr ? iter : end();
1650  }
1651 
1652  // Emplaces a value into the btree immediately before iter. Requires that
1653  // key(v) <= iter.key() and (--iter).key() <= key(v).
1654  template <typename... Args>
1655  iterator internal_emplace(iterator iter, Args &&... args);
1656 
1657  // Returns an iterator pointing to the first value >= the value "iter" is
1658  // pointing at. Note that "iter" might be pointing to an invalid location such
1659  // as iter.position_ == iter.node_->finish(). This routine simply moves iter
1660  // up in the tree to a valid location. Requires: iter.node_ is non-null.
1661  template <typename IterType>
1662  static IterType internal_last(IterType iter);
1663 
1664  // Returns an iterator pointing to the leaf position at which key would
1665  // reside in the tree, unless there is an exact match - in which case, the
1666  // result may not be on a leaf. When there's a three-way comparator, we can
1667  // return whether there was an exact match. This allows the caller to avoid a
1668  // subsequent comparison to determine if an exact match was made, which is
1669  // important for keys with expensive comparison, such as strings.
1670  template <typename K>
1671  SearchResult<iterator, is_key_compare_to::value> internal_locate(
1672  const K &key) const;
1673 
1674  // Internal routine which implements lower_bound().
1675  template <typename K>
1676  SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
1677  const K &key) const;
1678 
1679  // Internal routine which implements upper_bound().
1680  template <typename K>
1681  iterator internal_upper_bound(const K &key) const;
1682 
1683  // Internal routine which implements find().
1684  template <typename K>
1685  iterator internal_find(const K &key) const;
1686 
1687  // Verifies the tree structure of node.
1688  int internal_verify(const node_type *node, const key_type *lo,
1689  const key_type *hi) const;
1690 
1691  node_stats internal_stats(const node_type *node) const {
1692  // The root can be a static empty node.
1693  if (node == nullptr || (node == root() && empty())) {
1694  return node_stats(0, 0);
1695  }
1696  if (node->is_leaf()) {
1697  return node_stats(1, 0);
1698  }
1699  node_stats res(0, 1);
1700  for (int i = node->start(); i <= node->finish(); ++i) {
1701  res += internal_stats(node->child(i));
1702  }
1703  return res;
1704  }
1705 
1706  node_type *root_;
1707 
1708  // A pointer to the rightmost node. Note that the leftmost node is stored as
1709  // the root's parent. We use compressed tuple in order to save space because
1710  // key_compare and allocator_type are usually empty.
1711  absl::container_internal::CompressedTuple<key_compare, allocator_type,
1712  node_type *>
1713  rightmost_;
1714 
1715  // Number of values.
1716  size_type size_;
1717 };
1718 
1720 // btree_node methods
1721 template <typename P>
1722 template <typename... Args>
1723 inline void btree_node<P>::emplace_value(const size_type i,
1724  allocator_type *alloc,
1725  Args &&... args) {
1726  assert(i >= start());
1727  assert(i <= finish());
1728  // Shift old values to create space for new value and then construct it in
1729  // place.
1730  if (i < finish()) {
1731  transfer_n_backward(finish() - i, /*dest_i=*/i + 1, /*src_i=*/i, this,
1732  alloc);
1733  }
1734  value_init(i, alloc, std::forward<Args>(args)...);
1735  set_finish(finish() + 1);
1736 
1737  if (is_internal() && finish() > i + 1) {
1738  for (field_type j = finish(); j > i + 1; --j) {
1739  set_child(j, child(j - 1));
1740  }
1741  clear_child(i + 1);
1742  }
1743 }
1744 
1745 template <typename P>
1746 inline void btree_node<P>::remove_values(const field_type i,
1747  const field_type to_erase,
1748  allocator_type *alloc) {
1749  // Transfer values after the removed range into their new places.
1750  value_destroy_n(i, to_erase, alloc);
1751  const field_type orig_finish = finish();
1752  const field_type src_i = i + to_erase;
1753  transfer_n(orig_finish - src_i, i, src_i, this, alloc);
1754 
1755  if (is_internal()) {
1756  // Delete all children between begin and end.
1757  for (int j = 0; j < to_erase; ++j) {
1758  clear_and_delete(child(i + j + 1), alloc);
1759  }
1760  // Rotate children after end into new positions.
1761  for (int j = i + to_erase + 1; j <= orig_finish; ++j) {
1762  set_child(j - to_erase, child(j));
1763  clear_child(j);
1764  }
1765  }
1766  set_finish(orig_finish - to_erase);
1767 }
1768 
1769 template <typename P>
1770 void btree_node<P>::rebalance_right_to_left(const int to_move,
1771  btree_node *right,
1772  allocator_type *alloc) {
1773  assert(parent() == right->parent());
1774  assert(position() + 1 == right->position());
1775  assert(right->count() >= count());
1776  assert(to_move >= 1);
1777  assert(to_move <= right->count());
1778 
1779  // 1) Move the delimiting value in the parent to the left node.
1780  transfer(finish(), position(), parent(), alloc);
1781 
1782  // 2) Move the (to_move - 1) values from the right node to the left node.
1783  transfer_n(to_move - 1, finish() + 1, right->start(), right, alloc);
1784 
1785  // 3) Move the new delimiting value to the parent from the right node.
1786  parent()->transfer(position(), right->start() + to_move - 1, right, alloc);
1787 
1788  // 4) Shift the values in the right node to their correct positions.
1789  right->transfer_n(right->count() - to_move, right->start(),
1790  right->start() + to_move, right, alloc);
1791 
1792  if (is_internal()) {
1793  // Move the child pointers from the right to the left node.
1794  for (int i = 0; i < to_move; ++i) {
1795  init_child(finish() + i + 1, right->child(i));
1796  }
1797  for (int i = right->start(); i <= right->finish() - to_move; ++i) {
1798  assert(i + to_move <= right->max_count());
1799  right->init_child(i, right->child(i + to_move));
1800  right->clear_child(i + to_move);
1801  }
1802  }
1803 
1804  // Fixup `finish` on the left and right nodes.
1805  set_finish(finish() + to_move);
1806  right->set_finish(right->finish() - to_move);
1807 }
1808 
1809 template <typename P>
1810 void btree_node<P>::rebalance_left_to_right(const int to_move,
1811  btree_node *right,
1812  allocator_type *alloc) {
1813  assert(parent() == right->parent());
1814  assert(position() + 1 == right->position());
1815  assert(count() >= right->count());
1816  assert(to_move >= 1);
1817  assert(to_move <= count());
1818 
1819  // Values in the right node are shifted to the right to make room for the
1820  // new to_move values. Then, the delimiting value in the parent and the
1821  // other (to_move - 1) values in the left node are moved into the right node.
1822  // Lastly, a new delimiting value is moved from the left node into the
1823  // parent, and the remaining empty left node entries are destroyed.
1824 
1825  // 1) Shift existing values in the right node to their correct positions.
1826  right->transfer_n_backward(right->count(), right->start() + to_move,
1827  right->start(), right, alloc);
1828 
1829  // 2) Move the delimiting value in the parent to the right node.
1830  right->transfer(right->start() + to_move - 1, position(), parent(), alloc);
1831 
1832  // 3) Move the (to_move - 1) values from the left node to the right node.
1833  right->transfer_n(to_move - 1, right->start(), finish() - (to_move - 1), this,
1834  alloc);
1835 
1836  // 4) Move the new delimiting value to the parent from the left node.
1837  parent()->transfer(position(), finish() - to_move, this, alloc);
1838 
1839  if (is_internal()) {
1840  // Move the child pointers from the left to the right node.
1841  for (int i = right->finish(); i >= right->start(); --i) {
1842  right->init_child(i + to_move, right->child(i));
1843  right->clear_child(i);
1844  }
1845  for (int i = 1; i <= to_move; ++i) {
1846  right->init_child(i - 1, child(finish() - to_move + i));
1847  clear_child(finish() - to_move + i);
1848  }
1849  }
1850 
1851  // Fixup the counts on the left and right nodes.
1852  set_finish(finish() - to_move);
1853  right->set_finish(right->finish() + to_move);
1854 }
1855 
1856 template <typename P>
1857 void btree_node<P>::split(const int insert_position, btree_node *dest,
1858  allocator_type *alloc) {
1859  assert(dest->count() == 0);
1860  assert(max_count() == kNodeSlots);
1861 
1862  // We bias the split based on the position being inserted. If we're
1863  // inserting at the beginning of the left node then bias the split to put
1864  // more values on the right node. If we're inserting at the end of the
1865  // right node then bias the split to put more values on the left node.
1866  if (insert_position == start()) {
1867  dest->set_finish(dest->start() + finish() - 1);
1868  } else if (insert_position == kNodeSlots) {
1869  dest->set_finish(dest->start());
1870  } else {
1871  dest->set_finish(dest->start() + count() / 2);
1872  }
1873  set_finish(finish() - dest->count());
1874  assert(count() >= 1);
1875 
1876  // Move values from the left sibling to the right sibling.
1877  dest->transfer_n(dest->count(), dest->start(), finish(), this, alloc);
1878 
1879  // The split key is the largest value in the left sibling.
1880  --mutable_finish();
1881  parent()->emplace_value(position(), alloc, finish_slot());
1882  value_destroy(finish(), alloc);
1883  parent()->init_child(position() + 1, dest);
1884 
1885  if (is_internal()) {
1886  for (int i = dest->start(), j = finish() + 1; i <= dest->finish();
1887  ++i, ++j) {
1888  assert(child(j) != nullptr);
1889  dest->init_child(i, child(j));
1890  clear_child(j);
1891  }
1892  }
1893 }
1894 
1895 template <typename P>
1896 void btree_node<P>::merge(btree_node *src, allocator_type *alloc) {
1897  assert(parent() == src->parent());
1898  assert(position() + 1 == src->position());
1899 
1900  // Move the delimiting value to the left node.
1901  value_init(finish(), alloc, parent()->slot(position()));
1902 
1903  // Move the values from the right to the left node.
1904  transfer_n(src->count(), finish() + 1, src->start(), src, alloc);
1905 
1906  if (is_internal()) {
1907  // Move the child pointers from the right to the left node.
1908  for (int i = src->start(), j = finish() + 1; i <= src->finish(); ++i, ++j) {
1909  init_child(j, src->child(i));
1910  src->clear_child(i);
1911  }
1912  }
1913 
1914  // Fixup `finish` on the src and dest nodes.
1915  set_finish(start() + 1 + count() + src->count());
1916  src->set_finish(src->start());
1917 
1918  // Remove the value on the parent node and delete the src node.
1919  parent()->remove_values(position(), /*to_erase=*/1, alloc);
1920 }
1921 
1922 template <typename P>
1923 void btree_node<P>::clear_and_delete(btree_node *node, allocator_type *alloc) {
1924  if (node->is_leaf()) {
1925  node->value_destroy_n(node->start(), node->count(), alloc);
1926  deallocate(LeafSize(node->max_count()), node, alloc);
1927  return;
1928  }
1929  if (node->count() == 0) {
1930  deallocate(InternalSize(), node, alloc);
1931  return;
1932  }
1933 
1934  // The parent of the root of the subtree we are deleting.
1935  btree_node *delete_root_parent = node->parent();
1936 
1937  // Navigate to the leftmost leaf under node, and then delete upwards.
1938  while (node->is_internal()) node = node->start_child();
1939 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1940  // When generations are enabled, we delete the leftmost leaf last in case it's
1941  // the parent of the root and we need to check whether it's a leaf before we
1942  // can update the root's generation.
1943  // TODO(ezb): if we change btree_node::is_root to check a bool inside the node
1944  // instead of checking whether the parent is a leaf, we can remove this logic.
1945  btree_node *leftmost_leaf = node;
1946 #endif
1947  // Use `int` because `pos` needs to be able to hold `kNodeSlots+1`, which
1948  // isn't guaranteed to be a valid `field_type`.
1949  int pos = node->position();
1950  btree_node *parent = node->parent();
1951  for (;;) {
1952  // In each iteration of the next loop, we delete one leaf node and go right.
1953  assert(pos <= parent->finish());
1954  do {
1955  node = parent->child(pos);
1956  if (node->is_internal()) {
1957  // Navigate to the leftmost leaf under node.
1958  while (node->is_internal()) node = node->start_child();
1959  pos = node->position();
1960  parent = node->parent();
1961  }
1962  node->value_destroy_n(node->start(), node->count(), alloc);
1963 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1964  if (leftmost_leaf != node)
1965 #endif
1966  deallocate(LeafSize(node->max_count()), node, alloc);
1967  ++pos;
1968  } while (pos <= parent->finish());
1969 
1970  // Once we've deleted all children of parent, delete parent and go up/right.
1971  assert(pos > parent->finish());
1972  do {
1973  node = parent;
1974  pos = node->position();
1975  parent = node->parent();
1976  node->value_destroy_n(node->start(), node->count(), alloc);
1977  deallocate(InternalSize(), node, alloc);
1978  if (parent == delete_root_parent) {
1979 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1980  deallocate(LeafSize(leftmost_leaf->max_count()), leftmost_leaf, alloc);
1981 #endif
1982  return;
1983  }
1984  ++pos;
1985  } while (pos > parent->finish());
1986  }
1987 }
1988 
1990 // btree_iterator methods
1991 template <typename N, typename R, typename P>
1992 void btree_iterator<N, R, P>::increment_slow() {
1993  if (node_->is_leaf()) {
1994  assert(position_ >= node_->finish());
1995  btree_iterator save(*this);
1996  while (position_ == node_->finish() && !node_->is_root()) {
1997  assert(node_->parent()->child(node_->position()) == node_);
1998  position_ = node_->position();
1999  node_ = node_->parent();
2000  }
2001  // TODO(ezb): assert we aren't incrementing end() instead of handling.
2002  if (position_ == node_->finish()) {
2003  *this = save;
2004  }
2005  } else {
2006  assert(position_ < node_->finish());
2007  node_ = node_->child(position_ + 1);
2008  while (node_->is_internal()) {
2009  node_ = node_->start_child();
2010  }
2011  position_ = node_->start();
2012  }
2013 }
2014 
2015 template <typename N, typename R, typename P>
2016 void btree_iterator<N, R, P>::decrement_slow() {
2017  if (node_->is_leaf()) {
2018  assert(position_ <= -1);
2019  btree_iterator save(*this);
2020  while (position_ < node_->start() && !node_->is_root()) {
2021  assert(node_->parent()->child(node_->position()) == node_);
2022  position_ = node_->position() - 1;
2023  node_ = node_->parent();
2024  }
2025  // TODO(ezb): assert we aren't decrementing begin() instead of handling.
2026  if (position_ < node_->start()) {
2027  *this = save;
2028  }
2029  } else {
2030  assert(position_ >= node_->start());
2031  node_ = node_->child(position_);
2032  while (node_->is_internal()) {
2033  node_ = node_->child(node_->finish());
2034  }
2035  position_ = node_->finish() - 1;
2036  }
2037 }
2038 
2040 // btree methods
2041 template <typename P>
2042 template <typename Btree>
2043 void btree<P>::copy_or_move_values_in_order(Btree &other) {
2044  static_assert(std::is_same<btree, Btree>::value ||
2045  std::is_same<const btree, Btree>::value,
2046  "Btree type must be same or const.");
2047  assert(empty());
2048 
2049  // We can avoid key comparisons because we know the order of the
2050  // values is the same order we'll store them in.
2051  auto iter = other.begin();
2052  if (iter == other.end()) return;
2053  insert_multi(iter.slot());
2054  ++iter;
2055  for (; iter != other.end(); ++iter) {
2056  // If the btree is not empty, we can just insert the new value at the end
2057  // of the tree.
2058  internal_emplace(end(), iter.slot());
2059  }
2060 }
2061 
2062 template <typename P>
2063 constexpr bool btree<P>::static_assert_validation() {
2064  static_assert(std::is_nothrow_copy_constructible<key_compare>::value,
2065  "Key comparison must be nothrow copy constructible");
2066  static_assert(std::is_nothrow_copy_constructible<allocator_type>::value,
2067  "Allocator must be nothrow copy constructible");
2068  static_assert(type_traits_internal::is_trivially_copyable<iterator>::value,
2069  "iterator not trivially copyable.");
2070 
2071  // Note: We assert that kTargetValues, which is computed from
2072  // Params::kTargetNodeSize, must fit the node_type::field_type.
2073  static_assert(
2074  kNodeSlots < (1 << (8 * sizeof(typename node_type::field_type))),
2075  "target node size too large");
2076 
2077  // Verify that key_compare returns an absl::{weak,strong}_ordering or bool.
2078  static_assert(
2079  compare_has_valid_result_type<key_compare, key_type>(),
2080  "key comparison function must return absl::{weak,strong}_ordering or "
2081  "bool.");
2082 
2083  // Test the assumption made in setting kNodeSlotSpace.
2084  static_assert(node_type::MinimumOverhead() >= sizeof(void *) + 4,
2085  "node space assumption incorrect");
2086 
2087  return true;
2088 }
2089 
2090 template <typename P>
2091 template <typename K>
2092 auto btree<P>::lower_bound_equal(const K &key) const
2093  -> std::pair<iterator, bool> {
2094  const SearchResult<iterator, is_key_compare_to::value> res =
2095  internal_lower_bound(key);
2096  const iterator lower = iterator(internal_end(res.value));
2097  const bool equal = res.HasMatch()
2098  ? res.IsEq()
2099  : lower != end() && !compare_keys(key, lower.key());
2100  return {lower, equal};
2101 }
2102 
2103 template <typename P>
2104 template <typename K>
2105 auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
2106  const std::pair<iterator, bool> lower_and_equal = lower_bound_equal(key);
2107  const iterator lower = lower_and_equal.first;
2108  if (!lower_and_equal.second) {
2109  return {lower, lower};
2110  }
2111 
2112  const iterator next = std::next(lower);
2113  if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2114  // The next iterator after lower must point to a key greater than `key`.
2115  // Note: if this assert fails, then it may indicate that the comparator does
2116  // not meet the equivalence requirements for Compare
2117  // (see https://en.cppreference.com/w/cpp/named_req/Compare).
2118  assert(next == end() || compare_keys(key, next.key()));
2119  return {lower, next};
2120  }
2121  // Try once more to avoid the call to upper_bound() if there's only one
2122  // equivalent key. This should prevent all calls to upper_bound() in cases of
2123  // unique-containers with heterogeneous comparators in which all comparison
2124  // operators have the same equivalence classes.
2125  if (next == end() || compare_keys(key, next.key())) return {lower, next};
2126 
2127  // In this case, we need to call upper_bound() to avoid worst case O(N)
2128  // behavior if we were to iterate over equal keys.
2129  return {lower, upper_bound(key)};
2130 }
2131 
2132 template <typename P>
2133 template <typename K, typename... Args>
2134 auto btree<P>::insert_unique(const K &key, Args &&... args)
2135  -> std::pair<iterator, bool> {
2136  if (empty()) {
2137  mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2138  }
2139 
2140  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2141  iterator iter = res.value;
2142 
2143  if (res.HasMatch()) {
2144  if (res.IsEq()) {
2145  // The key already exists in the tree, do nothing.
2146  return {iter, false};
2147  }
2148  } else {
2149  iterator last = internal_last(iter);
2150  if (last.node_ && !compare_keys(key, last.key())) {
2151  // The key already exists in the tree, do nothing.
2152  return {last, false};
2153  }
2154  }
2155  return {internal_emplace(iter, std::forward<Args>(args)...), true};
2156 }
2157 
2158 template <typename P>
2159 template <typename K, typename... Args>
2160 inline auto btree<P>::insert_hint_unique(iterator position, const K &key,
2161  Args &&... args)
2162  -> std::pair<iterator, bool> {
2163  if (!empty()) {
2164  if (position == end() || compare_keys(key, position.key())) {
2165  if (position == begin() || compare_keys(std::prev(position).key(), key)) {
2166  // prev.key() < key < position.key()
2167  return {internal_emplace(position, std::forward<Args>(args)...), true};
2168  }
2169  } else if (compare_keys(position.key(), key)) {
2170  ++position;
2171  if (position == end() || compare_keys(key, position.key())) {
2172  // {original `position`}.key() < key < {current `position`}.key()
2173  return {internal_emplace(position, std::forward<Args>(args)...), true};
2174  }
2175  } else {
2176  // position.key() == key
2177  return {position, false};
2178  }
2179  }
2180  return insert_unique(key, std::forward<Args>(args)...);
2181 }
2182 
2183 template <typename P>
2184 template <typename InputIterator, typename>
2185 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int) {
2186  for (; b != e; ++b) {
2187  insert_hint_unique(end(), params_type::key(*b), *b);
2188  }
2189 }
2190 
2191 template <typename P>
2192 template <typename InputIterator>
2193 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char) {
2194  for (; b != e; ++b) {
2195  // Use a node handle to manage a temp slot.
2196  auto node_handle =
2197  CommonAccess::Construct<node_handle_type>(get_allocator(), *b);
2198  slot_type *slot = CommonAccess::GetSlot(node_handle);
2199  insert_hint_unique(end(), params_type::key(slot), slot);
2200  }
2201 }
2202 
2203 template <typename P>
2204 template <typename ValueType>
2205 auto btree<P>::insert_multi(const key_type &key, ValueType &&v) -> iterator {
2206  if (empty()) {
2207  mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2208  }
2209 
2210  iterator iter = internal_upper_bound(key);
2211  if (iter.node_ == nullptr) {
2212  iter = end();
2213  }
2214  return internal_emplace(iter, std::forward<ValueType>(v));
2215 }
2216 
2217 template <typename P>
2218 template <typename ValueType>
2219 auto btree<P>::insert_hint_multi(iterator position, ValueType &&v) -> iterator {
2220  if (!empty()) {
2221  const key_type &key = params_type::key(v);
2222  if (position == end() || !compare_keys(position.key(), key)) {
2223  if (position == begin() ||
2224  !compare_keys(key, std::prev(position).key())) {
2225  // prev.key() <= key <= position.key()
2226  return internal_emplace(position, std::forward<ValueType>(v));
2227  }
2228  } else {
2229  ++position;
2230  if (position == end() || !compare_keys(position.key(), key)) {
2231  // {original `position`}.key() < key < {current `position`}.key()
2232  return internal_emplace(position, std::forward<ValueType>(v));
2233  }
2234  }
2235  }
2236  return insert_multi(std::forward<ValueType>(v));
2237 }
2238 
2239 template <typename P>
2240 template <typename InputIterator>
2241 void btree<P>::insert_iterator_multi(InputIterator b, InputIterator e) {
2242  for (; b != e; ++b) {
2243  insert_hint_multi(end(), *b);
2244  }
2245 }
2246 
2247 template <typename P>
2248 auto btree<P>::operator=(const btree &other) -> btree & {
2249  if (this != &other) {
2250  clear();
2251 
2252  *mutable_key_comp() = other.key_comp();
2253  if (absl::allocator_traits<
2254  allocator_type>::propagate_on_container_copy_assignment::value) {
2255  *mutable_allocator() = other.allocator();
2256  }
2257 
2258  copy_or_move_values_in_order(other);
2259  }
2260  return *this;
2261 }
2262 
2263 template <typename P>
2264 auto btree<P>::operator=(btree &&other) noexcept -> btree & {
2265  if (this != &other) {
2266  clear();
2267 
2268  using std::swap;
2269  if (absl::allocator_traits<
2270  allocator_type>::propagate_on_container_copy_assignment::value) {
2271  swap(root_, other.root_);
2272  // Note: `rightmost_` also contains the allocator and the key comparator.
2273  swap(rightmost_, other.rightmost_);
2274  swap(size_, other.size_);
2275  } else {
2276  if (allocator() == other.allocator()) {
2277  swap(mutable_root(), other.mutable_root());
2278  swap(*mutable_key_comp(), *other.mutable_key_comp());
2279  swap(mutable_rightmost(), other.mutable_rightmost());
2280  swap(size_, other.size_);
2281  } else {
2282  // We aren't allowed to propagate the allocator and the allocator is
2283  // different so we can't take over its memory. We must move each element
2284  // individually. We need both `other` and `this` to have `other`s key
2285  // comparator while moving the values so we can't swap the key
2286  // comparators.
2287  *mutable_key_comp() = other.key_comp();
2288  copy_or_move_values_in_order(other);
2289  }
2290  }
2291  }
2292  return *this;
2293 }
2294 
2295 template <typename P>
2296 auto btree<P>::erase(iterator iter) -> iterator {
2297  iter.node_->value_destroy(iter.position_, mutable_allocator());
2298  iter.update_generation();
2299 
2300  const bool internal_delete = iter.node_->is_internal();
2301  if (internal_delete) {
2302  // Deletion of a value on an internal node. First, transfer the largest
2303  // value from our left child here, then erase/rebalance from that position.
2304  // We can get to the largest value from our left child by decrementing iter.
2305  iterator internal_iter(iter);
2306  --iter;
2307  assert(iter.node_->is_leaf());
2308  internal_iter.node_->transfer(internal_iter.position_, iter.position_,
2309  iter.node_, mutable_allocator());
2310  } else {
2311  // Shift values after erased position in leaf. In the internal case, we
2312  // don't need to do this because the leaf position is the end of the node.
2313  const field_type transfer_from = iter.position_ + 1;
2314  const field_type num_to_transfer = iter.node_->finish() - transfer_from;
2315  iter.node_->transfer_n(num_to_transfer, iter.position_, transfer_from,
2316  iter.node_, mutable_allocator());
2317  }
2318  // Update node finish and container size.
2319  iter.node_->set_finish(iter.node_->finish() - 1);
2320  --size_;
2321 
2322  // We want to return the next value after the one we just erased. If we
2323  // erased from an internal node (internal_delete == true), then the next
2324  // value is ++(++iter). If we erased from a leaf node (internal_delete ==
2325  // false) then the next value is ++iter. Note that ++iter may point to an
2326  // internal node and the value in the internal node may move to a leaf node
2327  // (iter.node_) when rebalancing is performed at the leaf level.
2328 
2329  iterator res = rebalance_after_delete(iter);
2330 
2331  // If we erased from an internal node, advance the iterator.
2332  if (internal_delete) {
2333  ++res;
2334  }
2335  return res;
2336 }
2337 
2338 template <typename P>
2339 auto btree<P>::rebalance_after_delete(iterator iter) -> iterator {
2340  // Merge/rebalance as we walk back up the tree.
2341  iterator res(iter);
2342  bool first_iteration = true;
2343  for (;;) {
2344  if (iter.node_ == root()) {
2345  try_shrink();
2346  if (empty()) {
2347  return end();
2348  }
2349  break;
2350  }
2351  if (iter.node_->count() >= kMinNodeValues) {
2352  break;
2353  }
2354  bool merged = try_merge_or_rebalance(&iter);
2355  // On the first iteration, we should update `res` with `iter` because `res`
2356  // may have been invalidated.
2357  if (first_iteration) {
2358  res = iter;
2359  first_iteration = false;
2360  }
2361  if (!merged) {
2362  break;
2363  }
2364  iter.position_ = iter.node_->position();
2365  iter.node_ = iter.node_->parent();
2366  }
2367  res.update_generation();
2368 
2369  // Adjust our return value. If we're pointing at the end of a node, advance
2370  // the iterator.
2371  if (res.position_ == res.node_->finish()) {
2372  res.position_ = res.node_->finish() - 1;
2373  ++res;
2374  }
2375 
2376  return res;
2377 }
2378 
2379 template <typename P>
2380 auto btree<P>::erase_range(iterator begin, iterator end)
2381  -> std::pair<size_type, iterator> {
2382  difference_type count = std::distance(begin, end);
2383  assert(count >= 0);
2384 
2385  if (count == 0) {
2386  return {0, begin};
2387  }
2388 
2389  if (static_cast<size_type>(count) == size_) {
2390  clear();
2391  return {count, this->end()};
2392  }
2393 
2394  if (begin.node_ == end.node_) {
2395  assert(end.position_ > begin.position_);
2396  begin.node_->remove_values(begin.position_, end.position_ - begin.position_,
2397  mutable_allocator());
2398  size_ -= count;
2399  return {count, rebalance_after_delete(begin)};
2400  }
2401 
2402  const size_type target_size = size_ - count;
2403  while (size_ > target_size) {
2404  if (begin.node_->is_leaf()) {
2405  const size_type remaining_to_erase = size_ - target_size;
2406  const size_type remaining_in_node =
2407  begin.node_->finish() - begin.position_;
2408  const size_type to_erase =
2409  (std::min)(remaining_to_erase, remaining_in_node);
2410  begin.node_->remove_values(begin.position_, to_erase,
2411  mutable_allocator());
2412  size_ -= to_erase;
2413  begin = rebalance_after_delete(begin);
2414  } else {
2415  begin = erase(begin);
2416  }
2417  }
2418  begin.update_generation();
2419  return {count, begin};
2420 }
2421 
2422 template <typename P>
2423 void btree<P>::clear() {
2424  if (!empty()) {
2425  node_type::clear_and_delete(root(), mutable_allocator());
2426  }
2427  mutable_root() = mutable_rightmost() = EmptyNode();
2428  size_ = 0;
2429 }
2430 
2431 template <typename P>
2432 void btree<P>::swap(btree &other) {
2433  using std::swap;
2434  if (absl::allocator_traits<
2435  allocator_type>::propagate_on_container_swap::value) {
2436  // Note: `rightmost_` also contains the allocator and the key comparator.
2437  swap(rightmost_, other.rightmost_);
2438  } else {
2439  // It's undefined behavior if the allocators are unequal here.
2440  assert(allocator() == other.allocator());
2441  swap(mutable_rightmost(), other.mutable_rightmost());
2442  swap(*mutable_key_comp(), *other.mutable_key_comp());
2443  }
2444  swap(mutable_root(), other.mutable_root());
2445  swap(size_, other.size_);
2446 }
2447 
2448 template <typename P>
2449 void btree<P>::verify() const {
2450  assert(root() != nullptr);
2451  assert(leftmost() != nullptr);
2452  assert(rightmost() != nullptr);
2453  assert(empty() || size() == internal_verify(root(), nullptr, nullptr));
2454  assert(leftmost() == (++const_iterator(root(), -1)).node_);
2455  assert(rightmost() == (--const_iterator(root(), root()->finish())).node_);
2456  assert(leftmost()->is_leaf());
2457  assert(rightmost()->is_leaf());
2458 }
2459 
2460 template <typename P>
2461 void btree<P>::rebalance_or_split(iterator *iter) {
2462  node_type *&node = iter->node_;
2463  int &insert_position = iter->position_;
2464  assert(node->count() == node->max_count());
2465  assert(kNodeSlots == node->max_count());
2466 
2467  // First try to make room on the node by rebalancing.
2468  node_type *parent = node->parent();
2469  if (node != root()) {
2470  if (node->position() > parent->start()) {
2471  // Try rebalancing with our left sibling.
2472  node_type *left = parent->child(node->position() - 1);
2473  assert(left->max_count() == kNodeSlots);
2474  if (left->count() < kNodeSlots) {
2475  // We bias rebalancing based on the position being inserted. If we're
2476  // inserting at the end of the right node then we bias rebalancing to
2477  // fill up the left node.
2478  int to_move = (kNodeSlots - left->count()) /
2479  (1 + (insert_position < static_cast<int>(kNodeSlots)));
2480  to_move = (std::max)(1, to_move);
2481 
2482  if (insert_position - to_move >= node->start() ||
2483  left->count() + to_move < static_cast<int>(kNodeSlots)) {
2484  left->rebalance_right_to_left(to_move, node, mutable_allocator());
2485 
2486  assert(node->max_count() - node->count() == to_move);
2487  insert_position = insert_position - to_move;
2488  if (insert_position < node->start()) {
2489  insert_position = insert_position + left->count() + 1;
2490  node = left;
2491  }
2492 
2493  assert(node->count() < node->max_count());
2494  return;
2495  }
2496  }
2497  }
2498 
2499  if (node->position() < parent->finish()) {
2500  // Try rebalancing with our right sibling.
2501  node_type *right = parent->child(node->position() + 1);
2502  assert(right->max_count() == kNodeSlots);
2503  if (right->count() < kNodeSlots) {
2504  // We bias rebalancing based on the position being inserted. If we're
2505  // inserting at the beginning of the left node then we bias rebalancing
2506  // to fill up the right node.
2507  int to_move = (static_cast<int>(kNodeSlots) - right->count()) /
2508  (1 + (insert_position > node->start()));
2509  to_move = (std::max)(1, to_move);
2510 
2511  if (insert_position <= node->finish() - to_move ||
2512  right->count() + to_move < static_cast<int>(kNodeSlots)) {
2513  node->rebalance_left_to_right(to_move, right, mutable_allocator());
2514 
2515  if (insert_position > node->finish()) {
2516  insert_position = insert_position - node->count() - 1;
2517  node = right;
2518  }
2519 
2520  assert(node->count() < node->max_count());
2521  return;
2522  }
2523  }
2524  }
2525 
2526  // Rebalancing failed, make sure there is room on the parent node for a new
2527  // value.
2528  assert(parent->max_count() == kNodeSlots);
2529  if (parent->count() == kNodeSlots) {
2530  iterator parent_iter(node->parent(), node->position());
2531  rebalance_or_split(&parent_iter);
2532  }
2533  } else {
2534  // Rebalancing not possible because this is the root node.
2535  // Create a new root node and set the current root node as the child of the
2536  // new root.
2537  parent = new_internal_node(parent);
2538  parent->set_generation(root()->generation());
2539  parent->init_child(parent->start(), root());
2540  mutable_root() = parent;
2541  // If the former root was a leaf node, then it's now the rightmost node.
2542  assert(parent->start_child()->is_internal() ||
2543  parent->start_child() == rightmost());
2544  }
2545 
2546  // Split the node.
2547  node_type *split_node;
2548  if (node->is_leaf()) {
2549  split_node = new_leaf_node(parent);
2550  node->split(insert_position, split_node, mutable_allocator());
2551  if (rightmost() == node) mutable_rightmost() = split_node;
2552  } else {
2553  split_node = new_internal_node(parent);
2554  node->split(insert_position, split_node, mutable_allocator());
2555  }
2556 
2557  if (insert_position > node->finish()) {
2558  insert_position = insert_position - node->count() - 1;
2559  node = split_node;
2560  }
2561 }
2562 
2563 template <typename P>
2564 void btree<P>::merge_nodes(node_type *left, node_type *right) {
2565  left->merge(right, mutable_allocator());
2566  if (rightmost() == right) mutable_rightmost() = left;
2567 }
2568 
2569 template <typename P>
2570 bool btree<P>::try_merge_or_rebalance(iterator *iter) {
2571  node_type *parent = iter->node_->parent();
2572  if (iter->node_->position() > parent->start()) {
2573  // Try merging with our left sibling.
2574  node_type *left = parent->child(iter->node_->position() - 1);
2575  assert(left->max_count() == kNodeSlots);
2576  if (1U + left->count() + iter->node_->count() <= kNodeSlots) {
2577  iter->position_ += 1 + left->count();
2578  merge_nodes(left, iter->node_);
2579  iter->node_ = left;
2580  return true;
2581  }
2582  }
2583  if (iter->node_->position() < parent->finish()) {
2584  // Try merging with our right sibling.
2585  node_type *right = parent->child(iter->node_->position() + 1);
2586  assert(right->max_count() == kNodeSlots);
2587  if (1U + iter->node_->count() + right->count() <= kNodeSlots) {
2588  merge_nodes(iter->node_, right);
2589  return true;
2590  }
2591  // Try rebalancing with our right sibling. We don't perform rebalancing if
2592  // we deleted the first element from iter->node_ and the node is not
2593  // empty. This is a small optimization for the common pattern of deleting
2594  // from the front of the tree.
2595  if (right->count() > kMinNodeValues &&
2596  (iter->node_->count() == 0 || iter->position_ > iter->node_->start())) {
2597  int to_move = (right->count() - iter->node_->count()) / 2;
2598  to_move = (std::min)(to_move, right->count() - 1);
2599  iter->node_->rebalance_right_to_left(to_move, right, mutable_allocator());
2600  return false;
2601  }
2602  }
2603  if (iter->node_->position() > parent->start()) {
2604  // Try rebalancing with our left sibling. We don't perform rebalancing if
2605  // we deleted the last element from iter->node_ and the node is not
2606  // empty. This is a small optimization for the common pattern of deleting
2607  // from the back of the tree.
2608  node_type *left = parent->child(iter->node_->position() - 1);
2609  if (left->count() > kMinNodeValues &&
2610  (iter->node_->count() == 0 ||
2611  iter->position_ < iter->node_->finish())) {
2612  int to_move = (left->count() - iter->node_->count()) / 2;
2613  to_move = (std::min)(to_move, left->count() - 1);
2614  left->rebalance_left_to_right(to_move, iter->node_, mutable_allocator());
2615  iter->position_ += to_move;
2616  return false;
2617  }
2618  }
2619  return false;
2620 }
2621 
2622 template <typename P>
2623 void btree<P>::try_shrink() {
2624  node_type *orig_root = root();
2625  if (orig_root->count() > 0) {
2626  return;
2627  }
2628  // Deleted the last item on the root node, shrink the height of the tree.
2629  if (orig_root->is_leaf()) {
2630  assert(size() == 0);
2631  mutable_root() = mutable_rightmost() = EmptyNode();
2632  } else {
2633  node_type *child = orig_root->start_child();
2634  child->make_root();
2635  mutable_root() = child;
2636  }
2637  node_type::clear_and_delete(orig_root, mutable_allocator());
2638 }
2639 
2640 template <typename P>
2641 template <typename IterType>
2642 inline IterType btree<P>::internal_last(IterType iter) {
2643  assert(iter.node_ != nullptr);
2644  while (iter.position_ == iter.node_->finish()) {
2645  iter.position_ = iter.node_->position();
2646  iter.node_ = iter.node_->parent();
2647  if (iter.node_->is_leaf()) {
2648  iter.node_ = nullptr;
2649  break;
2650  }
2651  }
2652  iter.update_generation();
2653  return iter;
2654 }
2655 
2656 template <typename P>
2657 template <typename... Args>
2658 inline auto btree<P>::internal_emplace(iterator iter, Args &&... args)
2659  -> iterator {
2660  if (iter.node_->is_internal()) {
2661  // We can't insert on an internal node. Instead, we'll insert after the
2662  // previous value which is guaranteed to be on a leaf node.
2663  --iter;
2664  ++iter.position_;
2665  }
2666  const field_type max_count = iter.node_->max_count();
2667  allocator_type *alloc = mutable_allocator();
2668  if (iter.node_->count() == max_count) {
2669  // Make room in the leaf for the new item.
2670  if (max_count < kNodeSlots) {
2671  // Insertion into the root where the root is smaller than the full node
2672  // size. Simply grow the size of the root node.
2673  assert(iter.node_ == root());
2674  iter.node_ =
2675  new_leaf_root_node((std::min<int>)(kNodeSlots, 2 * max_count));
2676  // Transfer the values from the old root to the new root.
2677  node_type *old_root = root();
2678  node_type *new_root = iter.node_;
2679  new_root->transfer_n(old_root->count(), new_root->start(),
2680  old_root->start(), old_root, alloc);
2681  new_root->set_finish(old_root->finish());
2682  old_root->set_finish(old_root->start());
2683  new_root->set_generation(old_root->generation());
2684  node_type::clear_and_delete(old_root, alloc);
2685  mutable_root() = mutable_rightmost() = new_root;
2686  } else {
2687  rebalance_or_split(&iter);
2688  }
2689  }
2690  iter.node_->emplace_value(iter.position_, alloc, std::forward<Args>(args)...);
2691  ++size_;
2692  iter.update_generation();
2693  return iter;
2694 }
2695 
2696 template <typename P>
2697 template <typename K>
2698 inline auto btree<P>::internal_locate(const K &key) const
2699  -> SearchResult<iterator, is_key_compare_to::value> {
2700  iterator iter(const_cast<node_type *>(root()));
2701  for (;;) {
2702  SearchResult<int, is_key_compare_to::value> res =
2703  iter.node_->lower_bound(key, key_comp());
2704  iter.position_ = res.value;
2705  if (res.IsEq()) {
2706  return {iter, MatchKind::kEq};
2707  }
2708  // Note: in the non-key-compare-to case, we don't need to walk all the way
2709  // down the tree if the keys are equal, but determining equality would
2710  // require doing an extra comparison on each node on the way down, and we
2711  // will need to go all the way to the leaf node in the expected case.
2712  if (iter.node_->is_leaf()) {
2713  break;
2714  }
2715  iter.node_ = iter.node_->child(iter.position_);
2716  }
2717  // Note: in the non-key-compare-to case, the key may actually be equivalent
2718  // here (and the MatchKind::kNe is ignored).
2719  return {iter, MatchKind::kNe};
2720 }
2721 
2722 template <typename P>
2723 template <typename K>
2724 auto btree<P>::internal_lower_bound(const K &key) const
2725  -> SearchResult<iterator, is_key_compare_to::value> {
2726  if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2727  SearchResult<iterator, is_key_compare_to::value> ret = internal_locate(key);
2728  ret.value = internal_last(ret.value);
2729  return ret;
2730  }
2731  iterator iter(const_cast<node_type *>(root()));
2732  SearchResult<int, is_key_compare_to::value> res;
2733  bool seen_eq = false;
2734  for (;;) {
2735  res = iter.node_->lower_bound(key, key_comp());
2736  iter.position_ = res.value;
2737  if (iter.node_->is_leaf()) {
2738  break;
2739  }
2740  seen_eq = seen_eq || res.IsEq();
2741  iter.node_ = iter.node_->child(iter.position_);
2742  }
2743  if (res.IsEq()) return {iter, MatchKind::kEq};
2744  return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
2745 }
2746 
2747 template <typename P>
2748 template <typename K>
2749 auto btree<P>::internal_upper_bound(const K &key) const -> iterator {
2750  iterator iter(const_cast<node_type *>(root()));
2751  for (;;) {
2752  iter.position_ = iter.node_->upper_bound(key, key_comp());
2753  if (iter.node_->is_leaf()) {
2754  break;
2755  }
2756  iter.node_ = iter.node_->child(iter.position_);
2757  }
2758  return internal_last(iter);
2759 }
2760 
2761 template <typename P>
2762 template <typename K>
2763 auto btree<P>::internal_find(const K &key) const -> iterator {
2764  SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2765  if (res.HasMatch()) {
2766  if (res.IsEq()) {
2767  return res.value;
2768  }
2769  } else {
2770  const iterator iter = internal_last(res.value);
2771  if (iter.node_ != nullptr && !compare_keys(key, iter.key())) {
2772  return iter;
2773  }
2774  }
2775  return {nullptr, 0};
2776 }
2777 
2778 template <typename P>
2779 int btree<P>::internal_verify(const node_type *node, const key_type *lo,
2780  const key_type *hi) const {
2781  assert(node->count() > 0);
2782  assert(node->count() <= node->max_count());
2783  if (lo) {
2784  assert(!compare_keys(node->key(node->start()), *lo));
2785  }
2786  if (hi) {
2787  assert(!compare_keys(*hi, node->key(node->finish() - 1)));
2788  }
2789  for (int i = node->start() + 1; i < node->finish(); ++i) {
2790  assert(!compare_keys(node->key(i), node->key(i - 1)));
2791  }
2792  int count = node->count();
2793  if (node->is_internal()) {
2794  for (int i = node->start(); i <= node->finish(); ++i) {
2795  assert(node->child(i) != nullptr);
2796  assert(node->child(i)->parent() == node);
2797  assert(node->child(i)->position() == i);
2798  count += internal_verify(node->child(i),
2799  i == node->start() ? lo : &node->key(i - 1),
2800  i == node->finish() ? hi : &node->key(i));
2801  }
2802  }
2803  return count;
2804 }
2805 
2806 struct btree_access {
2807  template <typename BtreeContainer, typename Pred>
2808  static auto erase_if(BtreeContainer &container, Pred pred)
2809  -> typename BtreeContainer::size_type {
2810  const auto initial_size = container.size();
2811  auto &tree = container.tree_;
2812  auto *alloc = tree.mutable_allocator();
2813  for (auto it = container.begin(); it != container.end();) {
2814  if (!pred(*it)) {
2815  ++it;
2816  continue;
2817  }
2818  auto *node = it.node_;
2819  if (node->is_internal()) {
2820  // Handle internal nodes normally.
2821  it = container.erase(it);
2822  continue;
2823  }
2824  // If this is a leaf node, then we do all the erases from this node
2825  // at once before doing rebalancing.
2826 
2827  // The current position to transfer slots to.
2828  int to_pos = it.position_;
2829  node->value_destroy(it.position_, alloc);
2830  while (++it.position_ < node->finish()) {
2831  it.update_generation();
2832  if (pred(*it)) {
2833  node->value_destroy(it.position_, alloc);
2834  } else {
2835  node->transfer(node->slot(to_pos++), node->slot(it.position_), alloc);
2836  }
2837  }
2838  const int num_deleted = node->finish() - to_pos;
2839  tree.size_ -= num_deleted;
2840  node->set_finish(to_pos);
2841  it.position_ = to_pos;
2842  it = tree.rebalance_after_delete(it);
2843  }
2844  return initial_size - container.size();
2845  }
2846 };
2847 
2848 #undef ABSL_BTREE_ENABLE_GENERATIONS
2849 
2850 } // namespace container_internal
2851 ABSL_NAMESPACE_END
2852 } // namespace absl
2853 
2854 #endif // ABSL_CONTAINER_INTERNAL_BTREE_H_
absl::container_internal::key_compare_adapter::checked_compare
Definition: abseil-cpp/absl/container/internal/btree.h:198
test_group_name.all
all
Definition: test_group_name.py:241
absl::Cord
Definition: abseil-cpp/absl/strings/cord.h:150
absl::container_internal::StringBtreeDefaultLess::operator()
absl::weak_ordering operator()(absl::string_view lhs, absl::string_view rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:111
absl::container_internal::StringBtreeDefaultLess::StringBtreeDefaultLess
StringBtreeDefaultLess()=default
absl::container_internal::StringBtreeDefaultLess::operator()
absl::weak_ordering operator()(absl::string_view lhs, const absl::Cord &rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:124
absl::container_internal::StringBtreeDefaultGreater
Definition: abseil-cpp/absl/container/internal/btree.h:130
absl::container_internal::key_compare_adapter::checked_compare::operator()
absl::weak_ordering operator()(const T &lhs, const U &rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:244
false
#define false
Definition: setup_once.h:323
absl::container_internal::StringBtreeDefaultGreater::operator()
absl::weak_ordering operator()(absl::string_view lhs, const absl::Cord &rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:156
absl::container_internal::compare_result_t
absl::result_of_t< const Compare(const T &, const U &)> compare_result_t
Definition: abseil-cpp/absl/container/internal/btree.h:89
absl::container_internal::key_compare_adapter::checked_compare::is_self_equivalent
bool is_self_equivalent(const Key &k) const
Definition: abseil-cpp/absl/container/internal/btree.h:207
copy
static int copy(grpc_slice_buffer *input, grpc_slice_buffer *output)
Definition: message_compress.cc:145
absl::Cord::Compare
int Compare(absl::string_view rhs) const
Definition: abseil-cpp/absl/strings/cord.cc:974
absl::container_internal::StringBtreeDefaultGreater::StringBtreeDefaultGreater
StringBtreeDefaultGreater(std::greater< std::string >)
Definition: abseil-cpp/absl/container/internal/btree.h:135
absl::string_view
Definition: abseil-cpp/absl/strings/string_view.h:167
absl::container_internal::key_compare_adapter::type
absl::conditional_t< std::is_base_of< BtreeTestOnlyCheckedCompareOptOutBase, Compare >::value, Compare, checked_compare > type
Definition: abseil-cpp/absl/container/internal/btree.h:266
absl::container_internal::key_compare_adapter::checked_compare::operator()
bool operator()(const T &lhs, const U &rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:228
absl::enable_if_t
typename std::enable_if< B, T >::type enable_if_t
Definition: abseil-cpp/absl/meta/type_traits.h:631
to
size_t to
Definition: abseil-cpp/absl/container/internal/layout_test.cc:1385
absl::container_internal::checked_compare_base::comp
const Compare & comp() const
Definition: abseil-cpp/absl/container/internal/btree.h:167
setup.k
k
Definition: third_party/bloaty/third_party/capstone/bindings/python/setup.py:42
absl::container_internal::StringBtreeDefaultLess::operator()
absl::weak_ordering operator()(const absl::Cord &lhs, absl::string_view rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:120
absl::container_internal::checked_compare_base< Compare, false >::comp
const Compare & comp() const
Definition: abseil-cpp/absl/container/internal/btree.h:172
T
#define T(upbtypeconst, upbtype, ctype, default_value)
call
FilterStackCall * call
Definition: call.cc:750
absl::compare_internal::compare_result_as_ordering
constexpr absl::weak_ordering compare_result_as_ordering(const Int c)
Definition: abseil-cpp/absl/types/compare.h:565
ABSL_NAMESPACE_BEGIN
#define ABSL_NAMESPACE_BEGIN
Definition: third_party/abseil-cpp/absl/base/config.h:170
absl::container_internal::key_compare_adapter::checked_compare::is_self_equivalent
bool is_self_equivalent(const T &) const
Definition: abseil-cpp/absl/container/internal/btree.h:213
absl::move
constexpr absl::remove_reference_t< T > && move(T &&t) noexcept
Definition: abseil-cpp/absl/utility/utility.h:221
absl::container_internal::StringBtreeDefaultLess::is_transparent
void is_transparent
Definition: abseil-cpp/absl/container/internal/btree.h:98
absl::container_internal::StringBtreeDefaultLess
Definition: abseil-cpp/absl/container/internal/btree.h:97
absl::equal
bool equal(InputIter1 first1, InputIter1 last1, InputIter2 first2, InputIter2 last2, Pred &&pred)
Definition: abseil-cpp/absl/algorithm/algorithm.h:104
Base
Definition: bloaty/third_party/googletest/googletest/test/gtest_unittest.cc:5141
b
uint64_t b
Definition: abseil-cpp/absl/container/internal/layout_test.cc:53
absl::container_internal::StringBtreeDefaultGreater::operator()
absl::weak_ordering operator()(const absl::Cord &lhs, absl::string_view rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:152
absl::container_internal::StringBtreeDefaultGreater::StringBtreeDefaultGreater
StringBtreeDefaultGreater(std::greater< absl::Cord >)
Definition: abseil-cpp/absl/container/internal/btree.h:147
absl::container_internal::btree_iterator
Definition: abseil-cpp/absl/container/internal/btree.h:1036
absl::container_internal::StringBtreeDefaultGreater::StringBtreeDefaultGreater
StringBtreeDefaultGreater()=default
value
const char * value
Definition: hpack_parser_table.cc:165
absl::container_internal::StringBtreeDefaultLess::StringBtreeDefaultLess
StringBtreeDefaultLess(std::less< absl::Cord >)
Definition: abseil-cpp/absl/container/internal/btree.h:115
key
const char * key
Definition: hpack_parser_table.cc:164
absl::container_internal::StringBtreeDefaultLess::operator()
absl::weak_ordering operator()(const absl::Cord &lhs, const absl::Cord &rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:116
absl::container_internal::key_compare_adapter
Definition: abseil-cpp/absl/container/internal/btree.h:190
testing::Key
internal::KeyMatcher< M > Key(M inner_matcher)
Definition: cares/cares/test/gmock-1.8.0/gmock/gmock.h:9141
absl::string_view::compare
constexpr int compare(string_view x) const noexcept
Definition: abseil-cpp/absl/strings/string_view.h:413
std
Definition: grpcpp/impl/codegen/async_unary_call.h:407
absl::container_internal::StringBtreeDefaultGreater::is_transparent
void is_transparent
Definition: abseil-cpp/absl/container/internal/btree.h:131
compare
static int compare(const TEST_INT **a, const TEST_INT **b)
Definition: stack_test.cc:214
absl::weak_ordering
Definition: abseil-cpp/absl/types/compare.h:347
absl::strings_internal::Compare
int Compare(const BigUnsigned< N > &lhs, const BigUnsigned< M > &rhs)
Definition: abseil-cpp/absl/strings/internal/charconv_bigint.h:353
absl::container_internal::StringBtreeDefaultGreater::StringBtreeDefaultGreater
StringBtreeDefaultGreater(std::greater< absl::string_view >)
Definition: abseil-cpp/absl/container/internal/btree.h:136
absl::container_internal::checked_compare_base< Compare, false >::checked_compare_base
checked_compare_base(Compare c)
Definition: abseil-cpp/absl/container/internal/btree.h:171
absl::container_internal::checked_compare_base< Compare, false >::compare
Compare compare
Definition: abseil-cpp/absl/container/internal/btree.h:173
absl::container_internal::StringBtreeDefaultLess::StringBtreeDefaultLess
StringBtreeDefaultLess(std::less< absl::string_view >)
Definition: abseil-cpp/absl/container/internal/btree.h:104
absl::container_internal::StringBtreeDefaultGreater::operator()
absl::weak_ordering operator()(absl::string_view lhs, absl::string_view rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:143
iter
Definition: test_winkernel.cpp:47
absl
Definition: abseil-cpp/absl/algorithm/algorithm.h:31
absl::container_internal::BtreeTestOnlyCheckedCompareOptOutBase
Definition: abseil-cpp/absl/container/internal/btree.h:177
absl::container_internal::btree_is_key_compare_to
std::is_convertible< compare_result_t< Compare, T, T >, absl::weak_ordering > btree_is_key_compare_to
Definition: abseil-cpp/absl/container/internal/btree.h:95
absl::container_internal::StringBtreeDefaultGreater::operator()
absl::weak_ordering operator()(const absl::Cord &lhs, const absl::Cord &rhs) const
Definition: abseil-cpp/absl/container/internal/btree.h:148
size
voidpf void uLong size
Definition: bloaty/third_party/zlib/contrib/minizip/ioapi.h:136
absl::container_internal::checked_compare_base
Definition: abseil-cpp/absl/container/internal/btree.h:164
compare
Definition: benchmark/tools/compare.py:1
absl::result_of_t
typename type_traits_internal::result_of< F >::type result_of_t
Definition: abseil-cpp/absl/meta/type_traits.h:658
absl::container_internal::checked_compare_base::checked_compare_base
checked_compare_base(Compare c)
Definition: abseil-cpp/absl/container/internal/btree.h:166
absl::container_internal::key_compare_adapter::checked_compare::checked_compare
checked_compare(Compare comp)
Definition: abseil-cpp/absl/container/internal/btree.h:219
absl::container_internal::StringBtreeDefaultLess::StringBtreeDefaultLess
StringBtreeDefaultLess(std::less< std::string >)
Definition: abseil-cpp/absl/container/internal/btree.h:103
container
static struct async_container * container
Definition: benchmark-million-async.c:33
absl::container_internal::btree< container_internal::set_params< Key, std::less< Key >, std::allocator< Key >, 256, true > >::iterator
typename btree_iterator< node_type, reference, pointer >::iterator iterator
Definition: abseil-cpp/absl/container/internal/btree.h:1296


grpc
Author(s):
autogenerated on Fri May 16 2025 02:57:49