Go to the documentation of this file.
45 #ifndef ABSL_CONTAINER_INTERNAL_BTREE_H_
46 #define ABSL_CONTAINER_INTERNAL_BTREE_H_
58 #include <type_traits>
61 #include "absl/base/internal/raw_logging.h"
62 #include "absl/base/macros.h"
63 #include "absl/container/internal/common.h"
64 #include "absl/container/internal/compressed_tuple.h"
65 #include "absl/container/internal/container_memory.h"
66 #include "absl/container/internal/layout.h"
67 #include "absl/memory/memory.h"
68 #include "absl/meta/type_traits.h"
69 #include "absl/strings/cord.h"
70 #include "absl/strings/string_view.h"
71 #include "absl/types/compare.h"
72 #include "absl/utility/utility.h"
76 namespace container_internal {
78 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
79 #error ABSL_BTREE_ENABLE_GENERATIONS cannot be directly set
80 #elif defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
81 defined(ABSL_HAVE_MEMORY_SANITIZER)
85 #define ABSL_BTREE_ENABLE_GENERATIONS
88 template <
typename Compare,
typename T,
typename U>
93 template <
typename Compare,
typename T>
107 explicit operator std::less<std::string>()
const {
return {}; }
108 explicit operator std::less<absl::string_view>()
const {
return {}; }
109 explicit operator std::less<absl::Cord>()
const {
return {}; }
139 explicit operator std::greater<std::string>()
const {
return {}; }
140 explicit operator std::greater<absl::string_view>()
const {
return {}; }
141 explicit operator std::greater<absl::Cord>()
const {
return {}; }
169 template <
typename Compare>
189 template <
typename Compare,
typename Key>
212 template <
typename T>
224 template <
typename T,
typename U,
226 std::is_same<bool, compare_result_t<Compare, T, U>>
::value,
234 const bool lhs_comp_rhs =
comp()(lhs, rhs);
235 assert(!lhs_comp_rhs || !
comp()(rhs, lhs));
240 typename T,
typename U,
253 if (lhs_comp_rhs > 0) {
254 assert(rhs_comp_lhs < 0 && "lhs_comp_rhs > 0 -> rhs_comp_lhs < 0
");
255 } else if (lhs_comp_rhs == 0) {
256 assert(rhs_comp_lhs == 0 && "lhs_comp_rhs == 0 -> rhs_comp_lhs == 0
");
258 assert(rhs_comp_lhs > 0 && "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0
");
264 using type = absl::conditional_t<
265 std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase, Compare>::value,
266 Compare, checked_compare>;
270 struct key_compare_adapter<std::less<std::string>, std::string> {
271 using type = StringBtreeDefaultLess;
275 struct key_compare_adapter<std::greater<std::string>, std::string> {
276 using type = StringBtreeDefaultGreater;
280 struct key_compare_adapter<std::less<absl::string_view>, absl::string_view> {
281 using type = StringBtreeDefaultLess;
285 struct key_compare_adapter<std::greater<absl::string_view>, absl::string_view> {
286 using type = StringBtreeDefaultGreater;
290 struct key_compare_adapter<std::less<absl::Cord>, absl::Cord> {
291 using type = StringBtreeDefaultLess;
295 struct key_compare_adapter<std::greater<absl::Cord>, absl::Cord> {
296 using type = StringBtreeDefaultGreater;
299 // Detects an 'absl_btree_prefer_linear_node_search' member. This is
300 // a protocol used as an opt-in or opt-out of linear search.
302 // For example, this would be useful for key types that wrap an integer
303 // and define their own cheap operator<(). For example:
307 // using absl_btree_prefer_linear_node_search = std::true_type;
310 // friend bool operator<(K a, K b) { return a.k_ < b.k_; }
314 // btree_map<K, V> m; // Uses linear search
316 // If T has the preference tag, then it has a preference.
317 // Btree will use the tag's truth value.
318 template <typename T, typename = void>
319 struct has_linear_node_search_preference : std::false_type {};
320 template <typename T, typename = void>
321 struct prefers_linear_node_search : std::false_type {};
322 template <typename T>
323 struct has_linear_node_search_preference<
324 T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
326 template <typename T>
327 struct prefers_linear_node_search<
328 T, absl::void_t<typename T::absl_btree_prefer_linear_node_search>>
329 : T::absl_btree_prefer_linear_node_search {};
331 template <typename Compare, typename Key>
332 constexpr bool compare_has_valid_result_type() {
333 using compare_result_type = compare_result_t<Compare, Key, Key>;
334 return std::is_same<compare_result_type, bool>::value ||
335 std::is_convertible<compare_result_type, absl::weak_ordering>::value;
338 template <typename original_key_compare, typename value_type>
339 class map_value_compare {
340 template <typename Params>
343 // Note: this `protected` is part of the API of std::map::value_compare. See
344 // https://en.cppreference.com/w/cpp/container/map/value_compare.
346 explicit map_value_compare(original_key_compare c) : comp(std::move(c)) {}
348 original_key_compare comp; // NOLINT
351 auto operator()(const value_type &lhs, const value_type &rhs) const
352 -> decltype(comp(lhs.first, rhs.first)) {
353 return comp(lhs.first, rhs.first);
357 template <typename Key, typename Compare, typename Alloc, int TargetNodeSize,
358 bool IsMulti, bool IsMap, typename SlotPolicy>
359 struct common_params {
360 using original_key_compare = Compare;
362 // If Compare is a common comparator for a string-like type, then we adapt it
363 // to use heterogeneous lookup and to be a key-compare-to comparator.
364 // We also adapt the comparator to diagnose invalid comparators in debug mode.
365 // We disable this when `Compare` is invalid in a way that will cause
366 // adaptation to fail (having invalid return type) so that we can give a
367 // better compilation failure in static_assert_validation. If we don't do
368 // this, then there will be cascading compilation failures that are confusing
371 absl::conditional_t<!compare_has_valid_result_type<Compare, Key>(),
373 typename key_compare_adapter<Compare, Key>::type>;
375 static constexpr bool kIsKeyCompareStringAdapted =
376 std::is_same<key_compare, StringBtreeDefaultLess>::value ||
377 std::is_same<key_compare, StringBtreeDefaultGreater>::value;
378 static constexpr bool kIsKeyCompareTransparent =
379 IsTransparent<original_key_compare>::value ||
380 kIsKeyCompareStringAdapted;
381 static constexpr bool kEnableGenerations =
382 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
388 // A type which indicates if we have a key-compare-to functor or a plain old
389 // key-compare functor.
390 using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
392 using allocator_type = Alloc;
393 using key_type = Key;
394 using size_type = size_t;
395 using difference_type = ptrdiff_t;
397 using slot_policy = SlotPolicy;
398 using slot_type = typename slot_policy::slot_type;
399 using value_type = typename slot_policy::value_type;
400 using init_type = typename slot_policy::mutable_value_type;
401 using pointer = value_type *;
402 using const_pointer = const value_type *;
403 using reference = value_type &;
404 using const_reference = const value_type &;
406 using value_compare =
407 absl::conditional_t<IsMap,
408 map_value_compare<original_key_compare, value_type>,
409 original_key_compare>;
410 using is_map_container = std::integral_constant<bool, IsMap>;
412 // For the given lookup key type, returns whether we can have multiple
413 // equivalent keys in the btree. If this is a multi-container, then we can.
414 // Otherwise, we can have multiple equivalent keys only if all of the
415 // following conditions are met:
416 // - The comparator is transparent.
417 // - The lookup key type is not the same as key_type.
418 // - The comparator is not a StringBtreeDefault{Less,Greater} comparator
419 // that we know has the same equivalence classes for all lookup types.
420 template <typename LookupKey>
421 constexpr static bool can_have_multiple_equivalent_keys() {
422 return IsMulti || (IsTransparent<key_compare>::value &&
423 !std::is_same<LookupKey, Key>::value &&
424 !kIsKeyCompareStringAdapted);
428 kTargetNodeSize = TargetNodeSize,
430 // Upper bound for the available space for slots. This is largest for leaf
431 // nodes, which have overhead of at least a pointer + 4 bytes (for storing
432 // 3 field_types and an enum).
434 TargetNodeSize - /*minimum overhead=*/(sizeof(void *) + 4),
437 // This is an integral type large enough to hold as many slots as will fit a
438 // node of TargetNodeSize bytes.
439 using node_count_type =
440 absl::conditional_t<(kNodeSlotSpace / sizeof(slot_type) >
441 (std::numeric_limits<uint8_t>::max)()),
442 uint16_t, uint8_t>; // NOLINT
444 // The following methods are necessary for passing this struct as PolicyTraits
445 // for node_handle and/or are used within btree.
446 static value_type &element(slot_type *slot) {
447 return slot_policy::element(slot);
449 static const value_type &element(const slot_type *slot) {
450 return slot_policy::element(slot);
452 template <class... Args>
453 static void construct(Alloc *alloc, slot_type *slot, Args &&... args) {
454 slot_policy::construct(alloc, slot, std::forward<Args>(args)...);
456 static void construct(Alloc *alloc, slot_type *slot, slot_type *other) {
457 slot_policy::construct(alloc, slot, other);
459 static void destroy(Alloc *alloc, slot_type *slot) {
460 slot_policy::destroy(alloc, slot);
462 static void transfer(Alloc *alloc, slot_type *new_slot, slot_type *old_slot) {
463 slot_policy::transfer(alloc, new_slot, old_slot);
467 // An adapter class that converts a lower-bound compare into an upper-bound
468 // compare. Note: there is no need to make a version of this adapter specialized
469 // for key-compare-to functors because the upper-bound (the first value greater
470 // than the input) is never an exact match.
471 template <typename Compare>
472 struct upper_bound_adapter {
473 explicit upper_bound_adapter(const Compare &c) : comp(c) {}
474 template <typename K1, typename K2>
475 bool operator()(const K1 &a, const K2 &b) const {
476 // Returns true when a is not greater than b.
477 return !compare_internal::compare_result_as_less_than(comp(b, a));
484 enum class MatchKind : uint8_t { kEq, kNe };
486 template <typename V, bool IsCompareTo>
487 struct SearchResult {
491 static constexpr bool HasMatch() { return true; }
492 bool IsEq() const { return match == MatchKind::kEq; }
495 // When we don't use CompareTo, `match` is not present.
496 // This ensures that callers can't use it accidentally when it provides no
497 // useful information.
498 template <typename V>
499 struct SearchResult<V, false> {
501 explicit SearchResult(V v) : value(v) {}
502 SearchResult(V v, MatchKind /*match*/) : value(v) {}
506 static constexpr bool HasMatch() { return false; }
507 static constexpr bool IsEq() { return false; }
510 // A node in the btree holding. The same node type is used for both internal
511 // and leaf nodes in the btree, though the nodes are allocated in such a way
512 // that the children array is only valid in internal nodes.
513 template <typename Params>
515 using is_key_compare_to = typename Params::is_key_compare_to;
516 using field_type = typename Params::node_count_type;
517 using allocator_type = typename Params::allocator_type;
518 using slot_type = typename Params::slot_type;
519 using original_key_compare = typename Params::original_key_compare;
522 using params_type = Params;
523 using key_type = typename Params::key_type;
524 using value_type = typename Params::value_type;
525 using pointer = typename Params::pointer;
526 using const_pointer = typename Params::const_pointer;
527 using reference = typename Params::reference;
528 using const_reference = typename Params::const_reference;
529 using key_compare = typename Params::key_compare;
530 using size_type = typename Params::size_type;
531 using difference_type = typename Params::difference_type;
533 // Btree decides whether to use linear node search as follows:
534 // - If the comparator expresses a preference, use that.
535 // - If the key expresses a preference, use that.
536 // - If the key is arithmetic and the comparator is std::less or
537 // std::greater, choose linear.
538 // - Otherwise, choose binary.
539 // TODO(ezb): Might make sense to add condition(s) based on node-size.
540 using use_linear_search = std::integral_constant<
541 bool, has_linear_node_search_preference<original_key_compare>::value
542 ? prefers_linear_node_search<original_key_compare>::value
543 : has_linear_node_search_preference<key_type>::value
544 ? prefers_linear_node_search<key_type>::value
545 : std::is_arithmetic<key_type>::value &&
546 (std::is_same<std::less<key_type>,
547 original_key_compare>::value ||
548 std::is_same<std::greater<key_type>,
549 original_key_compare>::value)>;
551 // This class is organized by absl::container_internal::Layout as if it had
552 // the following structure:
553 // // A pointer to the node's parent.
554 // btree_node *parent;
556 // // When ABSL_BTREE_ENABLE_GENERATIONS is defined, we also have a
557 // // generation integer in order to check that when iterators are
558 // // used, they haven't been invalidated already. Only the generation on
559 // // the root is used, but we have one on each node because whether a node
560 // // is root or not can change.
561 // uint32_t generation;
563 // // The position of the node in the node's parent.
564 // field_type position;
565 // // The index of the first populated value in `values`.
566 // // TODO(ezb): right now, `start` is always 0. Update insertion/merge
567 // // logic to allow for floating storage within nodes.
569 // // The index after the last populated value in `values`. Currently, this
570 // // is the same as the count of values.
571 // field_type finish;
572 // // The maximum number of values the node can hold. This is an integer in
573 // // [1, kNodeSlots] for root leaf nodes, kNodeSlots for non-root leaf
574 // // nodes, and kInternalNodeMaxCount (as a sentinel value) for internal
575 // // nodes (even though there are still kNodeSlots values in the node).
576 // // TODO(ezb): make max_count use only 4 bits and record log2(capacity)
577 // // to free extra bits for is_root, etc.
578 // field_type max_count;
580 // // The array of values. The capacity is `max_count` for leaf nodes and
581 // // kNodeSlots for internal nodes. Only the values in
582 // // [start, finish) have been initialized and are valid.
583 // slot_type values[max_count];
585 // // The array of child pointers. The keys in children[i] are all less
586 // // than key(i). The keys in children[i + 1] are all greater than key(i).
587 // // There are 0 children for leaf nodes and kNodeSlots + 1 children for
588 // // internal nodes.
589 // btree_node *children[kNodeSlots + 1];
591 // This class is only constructed by EmptyNodeType. Normally, pointers to the
592 // layout above are allocated, cast to btree_node*, and de-allocated within
593 // the btree implementation.
594 ~btree_node() = default;
595 btree_node(btree_node const &) = delete;
596 btree_node &operator=(btree_node const &) = delete;
598 // Public for EmptyNodeType.
599 constexpr static size_type Alignment() {
600 static_assert(LeafLayout(1).Alignment() == InternalLayout().Alignment(),
601 "Alignment of
all nodes must be
equal.
");
602 return InternalLayout().Alignment();
606 btree_node() = default;
610 absl::container_internal::Layout<btree_node *, uint32_t, field_type,
611 slot_type, btree_node *>;
612 constexpr static size_type SizeWithNSlots(size_type n) {
615 /*generation*/ params_type::kEnableGenerations ? 1 : 0,
616 /*position, start, finish, max_count*/ 4,
621 // A lower bound for the overhead of fields other than slots in a leaf node.
622 constexpr static size_type MinimumOverhead() {
623 return SizeWithNSlots(1) - sizeof(slot_type);
626 // Compute how many values we can fit onto a leaf node taking into account
628 constexpr static size_type NodeTargetSlots(const size_type begin,
629 const size_type end) {
630 return begin == end ? begin
631 : SizeWithNSlots((begin + end) / 2 + 1) >
632 params_type::kTargetNodeSize
633 ? NodeTargetSlots(begin, (begin + end) / 2)
634 : NodeTargetSlots((begin + end) / 2 + 1, end);
638 kTargetNodeSize = params_type::kTargetNodeSize,
639 kNodeTargetSlots = NodeTargetSlots(0, params_type::kTargetNodeSize),
641 // We need a minimum of 3 slots per internal node in order to perform
642 // splitting (1 value for the two nodes involved in the split and 1 value
643 // propagated to the parent as the delimiter for the split). For performance
644 // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy
645 // of 1/3 (for a node, not a b-tree).
649 kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots,
651 // The node is internal (i.e. is not a leaf node) if and only if `max_count`
653 kInternalNodeMaxCount = 0,
656 // Leaves can have less than kNodeSlots values.
657 constexpr static layout_type LeafLayout(const int slot_count = kNodeSlots) {
660 /*generation*/ params_type::kEnableGenerations ? 1 : 0,
661 /*position, start, finish, max_count*/ 4,
662 /*slots*/ slot_count,
665 constexpr static layout_type InternalLayout() {
668 /*generation*/ params_type::kEnableGenerations ? 1 : 0,
669 /*position, start, finish, max_count*/ 4,
670 /*slots*/ kNodeSlots,
671 /*children*/ kNodeSlots + 1);
673 constexpr static size_type LeafSize(const int slot_count = kNodeSlots) {
674 return LeafLayout(slot_count).AllocSize();
676 constexpr static size_type InternalSize() {
677 return InternalLayout().AllocSize();
680 // N is the index of the type in the Layout definition.
681 // ElementType<N> is the Nth type in the Layout definition.
682 template <size_type N>
683 inline typename layout_type::template ElementType<N> *GetField() {
684 // We assert that we don't read from values that aren't there.
685 assert(N < 4 || is_internal());
686 return InternalLayout().template Pointer<N>(reinterpret_cast<char *>(this));
688 template <size_type N>
689 inline const typename layout_type::template ElementType<N> *GetField() const {
690 assert(N < 4 || is_internal());
691 return InternalLayout().template Pointer<N>(
692 reinterpret_cast<const char *>(this));
694 void set_parent(btree_node *p) { *GetField<0>() = p; }
695 field_type &mutable_finish() { return GetField<2>()[2]; }
696 slot_type *slot(int i) { return &GetField<3>()[i]; }
697 slot_type *start_slot() { return slot(start()); }
698 slot_type *finish_slot() { return slot(finish()); }
699 const slot_type *slot(int i) const { return &GetField<3>()[i]; }
700 void set_position(field_type v) { GetField<2>()[0] = v; }
701 void set_start(field_type v) { GetField<2>()[1] = v; }
702 void set_finish(field_type v) { GetField<2>()[2] = v; }
703 // This method is only called by the node init methods.
704 void set_max_count(field_type v) { GetField<2>()[3] = v; }
707 // Whether this is a leaf node or not. This value doesn't change after the
709 bool is_leaf() const { return GetField<2>()[3] != kInternalNodeMaxCount; }
710 // Whether this is an internal node or not. This value doesn't change after
711 // the node is created.
712 bool is_internal() const { return !is_leaf(); }
714 // Getter for the position of this node in its parent.
715 field_type position() const { return GetField<2>()[0]; }
717 // Getter for the offset of the first value in the `values` array.
718 field_type start() const {
719 // TODO(ezb): when floating storage is implemented, return GetField<2>()[1];
720 assert(GetField<2>()[1] == 0);
724 // Getter for the offset after the last value in the `values` array.
725 field_type finish() const { return GetField<2>()[2]; }
727 // Getters for the number of values stored in this node.
728 field_type count() const {
729 assert(finish() >= start());
730 return finish() - start();
732 field_type max_count() const {
733 // Internal nodes have max_count==kInternalNodeMaxCount.
734 // Leaf nodes have max_count in [1, kNodeSlots].
735 const field_type max_count = GetField<2>()[3];
736 return max_count == field_type{kInternalNodeMaxCount}
737 ? field_type{kNodeSlots}
741 // Getter for the parent of this node.
742 btree_node *parent() const { return *GetField<0>(); }
743 // Getter for whether the node is the root of the tree. The parent of the
744 // root of the tree is the leftmost node in the tree which is guaranteed to
746 bool is_root() const { return parent()->is_leaf(); }
748 assert(parent()->is_root());
749 set_generation(parent()->generation());
750 set_parent(parent()->parent());
753 // Gets the root node's generation integer, which is the one used by the tree.
754 uint32_t *get_root_generation() const {
755 assert(params_type::kEnableGenerations);
756 const btree_node *curr = this;
757 for (; !curr->is_root(); curr = curr->parent()) continue;
758 return const_cast<uint32_t *>(&curr->GetField<1>()[0]);
761 // Returns the generation for iterator validation.
762 uint32_t generation() const {
763 return params_type::kEnableGenerations ? *get_root_generation() : 0;
765 // Updates generation. Should only be called on a root node or during node
767 void set_generation(uint32_t generation) {
768 if (params_type::kEnableGenerations) GetField<1>()[0] = generation;
770 // Updates the generation. We do this whenever the node is mutated.
771 void next_generation() {
772 if (params_type::kEnableGenerations) ++*get_root_generation();
775 // Getters for the key/value at position i in the node.
776 const key_type &key(int i) const { return params_type::key(slot(i)); }
777 reference value(int i) { return params_type::element(slot(i)); }
778 const_reference value(int i) const { return params_type::element(slot(i)); }
780 // Getters/setter for the child at position i in the node.
781 btree_node *child(int i) const { return GetField<4>()[i]; }
782 btree_node *start_child() const { return child(start()); }
783 btree_node *&mutable_child(int i) { return GetField<4>()[i]; }
784 void clear_child(int i) {
785 absl::container_internal::SanitizerPoisonObject(&mutable_child(i));
787 void set_child(int i, btree_node *c) {
788 absl::container_internal::SanitizerUnpoisonObject(&mutable_child(i));
789 mutable_child(i) = c;
792 void init_child(int i, btree_node *c) {
797 // Returns the position of the first value whose key is not less than k.
798 template <typename K>
799 SearchResult<int, is_key_compare_to::value> lower_bound(
800 const K &k, const key_compare &comp) const {
801 return use_linear_search::value ? linear_search(k, comp)
802 : binary_search(k, comp);
804 // Returns the position of the first value whose key is greater than k.
805 template <typename K>
806 int upper_bound(const K &k, const key_compare &comp) const {
807 auto upper_compare = upper_bound_adapter<key_compare>(comp);
808 return use_linear_search::value ? linear_search(k, upper_compare).value
809 : binary_search(k, upper_compare).value;
812 template <typename K, typename Compare>
813 SearchResult<int, btree_is_key_compare_to<Compare, key_type>::value>
814 linear_search(const K &k, const Compare &comp) const {
815 return linear_search_impl(k, start(), finish(), comp,
816 btree_is_key_compare_to<Compare, key_type>());
819 template <typename K, typename Compare>
820 SearchResult<int, btree_is_key_compare_to<Compare, key_type>::value>
821 binary_search(const K &k, const Compare &comp) const {
822 return binary_search_impl(k, start(), finish(), comp,
823 btree_is_key_compare_to<Compare, key_type>());
826 // Returns the position of the first value whose key is not less than k using
827 // linear search performed using plain compare.
828 template <typename K, typename Compare>
829 SearchResult<int, false> linear_search_impl(
830 const K &k, int s, const int e, const Compare &comp,
831 std::false_type /* IsCompareTo */) const {
833 if (!comp(key(s), k)) {
838 return SearchResult<int, false>{s};
841 // Returns the position of the first value whose key is not less than k using
842 // linear search performed using compare-to.
843 template <typename K, typename Compare>
844 SearchResult<int, true> linear_search_impl(
845 const K &k, int s, const int e, const Compare &comp,
846 std::true_type /* IsCompareTo */) const {
848 const absl::weak_ordering c = comp(key(s), k);
850 return {s, MatchKind::kEq};
856 return {s, MatchKind::kNe};
859 // Returns the position of the first value whose key is not less than k using
860 // binary search performed using plain compare.
861 template <typename K, typename Compare>
862 SearchResult<int, false> binary_search_impl(
863 const K &k, int s, int e, const Compare &comp,
864 std::false_type /* IsCompareTo */) const {
866 const int mid = (s + e) >> 1;
867 if (comp(key(mid), k)) {
873 return SearchResult<int, false>{s};
876 // Returns the position of the first value whose key is not less than k using
877 // binary search performed using compare-to.
878 template <typename K, typename CompareTo>
879 SearchResult<int, true> binary_search_impl(
880 const K &k, int s, int e, const CompareTo &comp,
881 std::true_type /* IsCompareTo */) const {
882 if (params_type::template can_have_multiple_equivalent_keys<K>()) {
883 MatchKind exact_match = MatchKind::kNe;
885 const int mid = (s + e) >> 1;
886 const absl::weak_ordering c = comp(key(mid), k);
892 // Need to return the first value whose key is not less than k,
893 // which requires continuing the binary search if there could be
894 // multiple equivalent keys.
895 exact_match = MatchKind::kEq;
899 return {s, exact_match};
900 } else { // Can't have multiple equivalent keys.
902 const int mid = (s + e) >> 1;
903 const absl::weak_ordering c = comp(key(mid), k);
909 return {mid, MatchKind::kEq};
912 return {s, MatchKind::kNe};
916 // Emplaces a value at position i, shifting all existing values and
917 // children at positions >= i to the right by 1.
918 template <typename... Args>
919 void emplace_value(size_type i, allocator_type *alloc, Args &&... args);
921 // Removes the values at positions [i, i + to_erase), shifting all existing
922 // values and children after that range to the left by to_erase. Clears all
923 // children between [i, i + to_erase).
924 void remove_values(field_type i, field_type to_erase, allocator_type *alloc);
926 // Rebalances a node with its right sibling.
927 void rebalance_right_to_left(int to_move, btree_node *right,
928 allocator_type *alloc);
929 void rebalance_left_to_right(int to_move, btree_node *right,
930 allocator_type *alloc);
932 // Splits a node, moving a portion of the node's values to its right sibling.
933 void split(int insert_position, btree_node *dest, allocator_type *alloc);
935 // Merges a node with its right sibling, moving all of the values and the
936 // delimiting key in the parent node onto itself, and deleting the src node.
937 void merge(btree_node *src, allocator_type *alloc);
939 // Node allocation/deletion routines.
940 void init_leaf(int max_count, btree_node *parent) {
946 set_max_count(max_count);
947 absl::container_internal::SanitizerPoisonMemoryRegion(
948 start_slot(), max_count * sizeof(slot_type));
950 void init_internal(btree_node *parent) {
951 init_leaf(kNodeSlots, parent);
952 // Set `max_count` to a sentinel value to indicate that this node is
954 set_max_count(kInternalNodeMaxCount);
955 absl::container_internal::SanitizerPoisonMemoryRegion(
956 &mutable_child(start()), (kNodeSlots + 1) * sizeof(btree_node *));
959 static void deallocate(const size_type size, btree_node *node,
960 allocator_type *alloc) {
961 absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
964 // Deletes a node and all of its children.
965 static void clear_and_delete(btree_node *node, allocator_type *alloc);
968 template <typename... Args>
969 void value_init(const field_type i, allocator_type *alloc, Args &&... args) {
971 absl::container_internal::SanitizerUnpoisonObject(slot(i));
972 params_type::construct(alloc, slot(i), std::forward<Args>(args)...);
974 void value_destroy(const field_type i, allocator_type *alloc) {
976 params_type::destroy(alloc, slot(i));
977 absl::container_internal::SanitizerPoisonObject(slot(i));
979 void value_destroy_n(const field_type i, const field_type n,
980 allocator_type *alloc) {
982 for (slot_type *s = slot(i), *end = slot(i + n); s != end; ++s) {
983 params_type::destroy(alloc, s);
984 absl::container_internal::SanitizerPoisonObject(s);
988 static void transfer(slot_type *dest, slot_type *src, allocator_type *alloc) {
989 absl::container_internal::SanitizerUnpoisonObject(dest);
990 params_type::transfer(alloc, dest, src);
991 absl::container_internal::SanitizerPoisonObject(src);
994 // Transfers value from slot `src_i` in `src_node` to slot `dest_i` in `this`.
995 void transfer(const size_type dest_i, const size_type src_i,
996 btree_node *src_node, allocator_type *alloc) {
998 transfer(slot(dest_i), src_node->slot(src_i), alloc);
1001 // Transfers `n` values starting at value `src_i` in `src_node` into the
1002 // values starting at value `dest_i` in `this`.
1003 void transfer_n(const size_type n, const size_type dest_i,
1004 const size_type src_i, btree_node *src_node,
1005 allocator_type *alloc) {
1007 for (slot_type *src = src_node->slot(src_i), *end = src + n,
1008 *dest = slot(dest_i);
1009 src != end; ++src, ++dest) {
1010 transfer(dest, src, alloc);
1014 // Same as above, except that we start at the end and work our way to the
1016 void transfer_n_backward(const size_type n, const size_type dest_i,
1017 const size_type src_i, btree_node *src_node,
1018 allocator_type *alloc) {
1020 for (slot_type *src = src_node->slot(src_i + n - 1), *end = src - n,
1021 *dest = slot(dest_i + n - 1);
1022 src != end; --src, --dest) {
1023 transfer(dest, src, alloc);
1027 template <typename P>
1029 template <typename N, typename R, typename P>
1030 friend class btree_iterator;
1031 friend class BtreeNodePeer;
1032 friend struct btree_access;
1035 template <typename Node, typename Reference, typename Pointer>
1036 class btree_iterator {
1037 using key_type = typename Node::key_type;
1038 using size_type = typename Node::size_type;
1039 using params_type = typename Node::params_type;
1040 using is_map_container = typename params_type::is_map_container;
1042 using node_type = Node;
1043 using normal_node = typename std::remove_const<Node>::type;
1044 using const_node = const Node;
1045 using normal_pointer = typename params_type::pointer;
1046 using normal_reference = typename params_type::reference;
1047 using const_pointer = typename params_type::const_pointer;
1048 using const_reference = typename params_type::const_reference;
1049 using slot_type = typename params_type::slot_type;
1052 btree_iterator<normal_node, normal_reference, normal_pointer>;
1053 using const_iterator =
1054 btree_iterator<const_node, const_reference, const_pointer>;
1057 // These aliases are public for std::iterator_traits.
1058 using difference_type = typename Node::difference_type;
1059 using value_type = typename params_type::value_type;
1060 using pointer = Pointer;
1061 using reference = Reference;
1062 using iterator_category = std::bidirectional_iterator_tag;
1064 btree_iterator() : btree_iterator(nullptr, -1) {}
1065 explicit btree_iterator(Node *n) : btree_iterator(n, n->start()) {}
1066 btree_iterator(Node *n, int p) : node_(n), position_(p) {
1067 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1068 // Use `~uint32_t{}` as a sentinel value for iterator generations so it
1069 // doesn't match the initial value for the actual generation.
1070 generation_ = n != nullptr ? n->generation() : ~uint32_t{};
1074 // NOTE: this SFINAE allows for implicit conversions from iterator to
1075 // const_iterator, but it specifically avoids hiding the copy constructor so
1076 // that the trivial one will be used when possible.
1077 template <typename N, typename R, typename P,
1079 std::is_same<btree_iterator<N, R, P>, iterator>::value &&
1080 std::is_same<btree_iterator, const_iterator>::value,
1082 btree_iterator(const btree_iterator<N, R, P> other) // NOLINT
1083 : node_(other.node_), position_(other.position_) {
1084 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1085 generation_ = other.generation_;
1089 bool operator==(const iterator &other) const {
1090 return node_ == other.node_ && position_ == other.position_;
1092 bool operator==(const const_iterator &other) const {
1093 return node_ == other.node_ && position_ == other.position_;
1095 bool operator!=(const iterator &other) const {
1096 return node_ != other.node_ || position_ != other.position_;
1098 bool operator!=(const const_iterator &other) const {
1099 return node_ != other.node_ || position_ != other.position_;
1102 // Accessors for the key/value the iterator is pointing at.
1103 reference operator*() const {
1104 ABSL_HARDENING_ASSERT(node_ != nullptr);
1105 ABSL_HARDENING_ASSERT(node_->start() <= position_);
1106 ABSL_HARDENING_ASSERT(node_->finish() > position_);
1107 assert_valid_generation();
1108 return node_->value(position_);
1110 pointer operator->() const { return &operator*(); }
1112 btree_iterator &operator++() {
1116 btree_iterator &operator--() {
1120 btree_iterator operator++(int) {
1121 btree_iterator tmp = *this;
1125 btree_iterator operator--(int) {
1126 btree_iterator tmp = *this;
1133 friend const_iterator;
1134 template <typename Params>
1136 template <typename Tree>
1137 friend class btree_container;
1138 template <typename Tree>
1139 friend class btree_set_container;
1140 template <typename Tree>
1141 friend class btree_map_container;
1142 template <typename Tree>
1143 friend class btree_multiset_container;
1144 template <typename TreeType, typename CheckerType>
1145 friend class base_checker;
1146 friend struct btree_access;
1148 // This SFINAE allows explicit conversions from const_iterator to
1149 // iterator, but also avoids hiding the copy constructor.
1150 // NOTE: the const_cast is safe because this constructor is only called by
1151 // non-const methods and the container owns the nodes.
1152 template <typename N, typename R, typename P,
1154 std::is_same<btree_iterator<N, R, P>, const_iterator>::value &&
1155 std::is_same<btree_iterator, iterator>::value,
1157 explicit btree_iterator(const btree_iterator<N, R, P> other)
1158 : node_(const_cast<node_type *>(other.node_)),
1159 position_(other.position_) {
1160 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1161 generation_ = other.generation_;
1165 // Increment/decrement the iterator.
1167 assert_valid_generation();
1168 if (node_->is_leaf() && ++position_ < node_->finish()) {
1173 void increment_slow();
1176 assert_valid_generation();
1177 if (node_->is_leaf() && --position_ >= node_->start()) {
1182 void decrement_slow();
1184 // Updates the generation. For use internally right before we return an
1185 // iterator to the user.
1186 void update_generation() {
1187 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1188 if (node_ != nullptr) generation_ = node_->generation();
1192 const key_type &key() const { return node_->key(position_); }
1193 decltype(std::declval<Node *>()->slot(0)) slot() {
1194 return node_->slot(position_);
1197 void assert_valid_generation() const {
1198 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1199 if (node_ != nullptr && node_->generation() != generation_) {
1202 "Attempting
to use an invalidated
iterator. The corresponding
b-tree
"
1208 // The node in the tree the iterator is pointing at.
1210 // The position within the node of the tree the iterator is pointing at.
1211 // NOTE: this is an int rather than a field_type because iterators can point
1212 // to invalid positions (such as -1) in certain circumstances.
1214 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1215 // Used to check that the iterator hasn't been invalidated.
1216 uint32_t generation_;
1220 template <typename Params>
1222 using node_type = btree_node<Params>;
1223 using is_key_compare_to = typename Params::is_key_compare_to;
1224 using field_type = typename node_type::field_type;
1226 // We use a static empty node for the root/leftmost/rightmost of empty btrees
1227 // in order to avoid branching in begin()/end().
1228 struct alignas(node_type::Alignment()) EmptyNodeType : node_type {
1229 using field_type = typename node_type::field_type;
1231 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1232 uint32_t generation = 0;
1234 field_type position = 0;
1235 field_type start = 0;
1236 field_type finish = 0;
1237 // max_count must be != kInternalNodeMaxCount (so that this node is regarded
1238 // as a leaf node). max_count() is never called when the tree is empty.
1239 field_type max_count = node_type::kInternalNodeMaxCount + 1;
1242 // MSVC has constexpr code generations bugs here.
1243 EmptyNodeType() : parent(this) {}
1245 constexpr EmptyNodeType(node_type *p) : parent(p) {}
1249 static node_type *EmptyNode() {
1251 static EmptyNodeType *empty_node = new EmptyNodeType;
1252 // This assert fails on some other construction methods.
1253 assert(empty_node->parent == empty_node);
1256 static constexpr EmptyNodeType empty_node(
1257 const_cast<EmptyNodeType *>(&empty_node));
1258 return const_cast<EmptyNodeType *>(&empty_node);
1263 kNodeSlots = node_type::kNodeSlots,
1264 kMinNodeValues = kNodeSlots / 2,
1268 using size_type = typename Params::size_type;
1270 node_stats(size_type l, size_type i) : leaf_nodes(l), internal_nodes(i) {}
1272 node_stats &operator+=(const node_stats &other) {
1273 leaf_nodes += other.leaf_nodes;
1274 internal_nodes += other.internal_nodes;
1278 size_type leaf_nodes;
1279 size_type internal_nodes;
1283 using key_type = typename Params::key_type;
1284 using value_type = typename Params::value_type;
1285 using size_type = typename Params::size_type;
1286 using difference_type = typename Params::difference_type;
1287 using key_compare = typename Params::key_compare;
1288 using original_key_compare = typename Params::original_key_compare;
1289 using value_compare = typename Params::value_compare;
1290 using allocator_type = typename Params::allocator_type;
1291 using reference = typename Params::reference;
1292 using const_reference = typename Params::const_reference;
1293 using pointer = typename Params::pointer;
1294 using const_pointer = typename Params::const_pointer;
1296 typename btree_iterator<node_type, reference, pointer>::iterator;
1297 using const_iterator = typename iterator::const_iterator;
1298 using reverse_iterator = std::reverse_iterator<iterator>;
1299 using const_reverse_iterator = std::reverse_iterator<const_iterator>;
1300 using node_handle_type = node_handle<Params, Params, allocator_type>;
1302 // Internal types made public for use by btree_container types.
1303 using params_type = Params;
1304 using slot_type = typename Params::slot_type;
1307 // Copies or moves (depending on the template parameter) the values in
1308 // other into this btree in their order in other. This btree must be empty
1309 // before this method is called. This method is used in copy construction,
1310 // copy assignment, and move assignment.
1311 template <typename Btree>
1312 void copy_or_move_values_in_order(Btree &other);
1314 // Validates that various assumptions/requirements are true at compile time.
1315 constexpr static bool static_assert_validation();
1318 btree(const key_compare &comp, const allocator_type &alloc)
1319 : root_(EmptyNode()), rightmost_(comp, alloc, EmptyNode()), size_(0) {}
1321 btree(const btree &other) : btree(other, other.allocator()) {}
1322 btree(const btree &other, const allocator_type &alloc)
1323 : btree(other.key_comp(), alloc) {
1324 copy_or_move_values_in_order(other);
1326 btree(btree &&other) noexcept
1327 : root_(absl::exchange(other.root_, EmptyNode())),
1328 rightmost_(std::move(other.rightmost_)),
1329 size_(absl::exchange(other.size_, 0)) {
1330 other.mutable_rightmost() = EmptyNode();
1332 btree(btree &&other, const allocator_type &alloc)
1333 : btree(other.key_comp(), alloc) {
1334 if (alloc == other.allocator()) {
1337 // Move values from `other` one at a time when allocators are different.
1338 copy_or_move_values_in_order(other);
1343 // Put static_asserts in destructor to avoid triggering them before the type
1345 static_assert(static_assert_validation(), "This
call must be elided.
");
1349 // Assign the contents of other to *this.
1350 btree &operator=(const btree &other);
1351 btree &operator=(btree &&other) noexcept;
1353 iterator begin() { return iterator(leftmost()); }
1354 const_iterator begin() const { return const_iterator(leftmost()); }
1355 iterator end() { return iterator(rightmost(), rightmost()->finish()); }
1356 const_iterator end() const {
1357 return const_iterator(rightmost(), rightmost()->finish());
1359 reverse_iterator rbegin() { return reverse_iterator(end()); }
1360 const_reverse_iterator rbegin() const {
1361 return const_reverse_iterator(end());
1363 reverse_iterator rend() { return reverse_iterator(begin()); }
1364 const_reverse_iterator rend() const {
1365 return const_reverse_iterator(begin());
1368 // Finds the first element whose key is not less than `key`.
1369 template <typename K>
1370 iterator lower_bound(const K &key) {
1371 return internal_end(internal_lower_bound(key).value);
1373 template <typename K>
1374 const_iterator lower_bound(const K &key) const {
1375 return internal_end(internal_lower_bound(key).value);
1378 // Finds the first element whose key is not less than `key` and also returns
1379 // whether that element is equal to `key`.
1380 template <typename K>
1381 std::pair<iterator, bool> lower_bound_equal(const K &key) const;
1383 // Finds the first element whose key is greater than `key`.
1384 template <typename K>
1385 iterator upper_bound(const K &key) {
1386 return internal_end(internal_upper_bound(key));
1388 template <typename K>
1389 const_iterator upper_bound(const K &key) const {
1390 return internal_end(internal_upper_bound(key));
1393 // Finds the range of values which compare equal to key. The first member of
1394 // the returned pair is equal to lower_bound(key). The second member of the
1395 // pair is equal to upper_bound(key).
1396 template <typename K>
1397 std::pair<iterator, iterator> equal_range(const K &key);
1398 template <typename K>
1399 std::pair<const_iterator, const_iterator> equal_range(const K &key) const {
1400 return const_cast<btree *>(this)->equal_range(key);
1403 // Inserts a value into the btree only if it does not already exist. The
1404 // boolean return value indicates whether insertion succeeded or failed.
1405 // Requirement: if `key` already exists in the btree, does not consume `args`.
1406 // Requirement: `key` is never referenced after consuming `args`.
1407 template <typename K, typename... Args>
1408 std::pair<iterator, bool> insert_unique(const K &key, Args &&... args);
1410 // Inserts with hint. Checks to see if the value should be placed immediately
1411 // before `position` in the tree. If so, then the insertion will take
1412 // amortized constant time. If not, the insertion will take amortized
1413 // logarithmic time as if a call to insert_unique() were made.
1414 // Requirement: if `key` already exists in the btree, does not consume `args`.
1415 // Requirement: `key` is never referenced after consuming `args`.
1416 template <typename K, typename... Args>
1417 std::pair<iterator, bool> insert_hint_unique(iterator position,
1421 // Insert a range of values into the btree.
1422 // Note: the first overload avoids constructing a value_type if the key
1423 // already exists in the btree.
1424 template <typename InputIterator,
1425 typename = decltype(std::declval<const key_compare &>()(
1426 params_type::key(*std::declval<InputIterator>()),
1427 std::declval<const key_type &>()))>
1428 void insert_iterator_unique(InputIterator b, InputIterator e, int);
1429 // We need the second overload for cases in which we need to construct a
1430 // value_type in order to compare it with the keys already in the btree.
1431 template <typename InputIterator>
1432 void insert_iterator_unique(InputIterator b, InputIterator e, char);
1434 // Inserts a value into the btree.
1435 template <typename ValueType>
1436 iterator insert_multi(const key_type &key, ValueType &&v);
1438 // Inserts a value into the btree.
1439 template <typename ValueType>
1440 iterator insert_multi(ValueType &&v) {
1441 return insert_multi(params_type::key(v), std::forward<ValueType>(v));
1444 // Insert with hint. Check to see if the value should be placed immediately
1445 // before position in the tree. If it does, then the insertion will take
1446 // amortized constant time. If not, the insertion will take amortized
1447 // logarithmic time as if a call to insert_multi(v) were made.
1448 template <typename ValueType>
1449 iterator insert_hint_multi(iterator position, ValueType &&v);
1451 // Insert a range of values into the btree.
1452 template <typename InputIterator>
1453 void insert_iterator_multi(InputIterator b, InputIterator e);
1455 // Erase the specified iterator from the btree. The iterator must be valid
1456 // (i.e. not equal to end()). Return an iterator pointing to the node after
1457 // the one that was erased (or end() if none exists).
1458 // Requirement: does not read the value at `*iter`.
1459 iterator erase(iterator iter);
1461 // Erases range. Returns the number of keys erased and an iterator pointing
1462 // to the element after the last erased element.
1463 std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
1465 // Finds an element with key equivalent to `key` or returns `end()` if `key`
1467 template <typename K>
1468 iterator find(const K &key) {
1469 return internal_end(internal_find(key));
1471 template <typename K>
1472 const_iterator find(const K &key) const {
1473 return internal_end(internal_find(key));
1476 // Clear the btree, deleting all of the values it contains.
1479 // Swaps the contents of `this` and `other`.
1480 void swap(btree &other);
1482 const key_compare &key_comp() const noexcept {
1483 return rightmost_.template get<0>();
1485 template <typename K1, typename K2>
1486 bool compare_keys(const K1 &a, const K2 &b) const {
1487 return compare_internal::compare_result_as_less_than(key_comp()(a, b));
1490 value_compare value_comp() const {
1491 return value_compare(original_key_compare(key_comp()));
1494 // Verifies the structure of the btree.
1495 void verify() const;
1498 size_type size() const { return size_; }
1499 size_type max_size() const { return (std::numeric_limits<size_type>::max)(); }
1500 bool empty() const { return size_ == 0; }
1502 // The height of the btree. An empty tree will have height 0.
1503 size_type height() const {
1506 // Count the length of the chain from the leftmost node up to the
1507 // root. We actually count from the root back around to the level below
1508 // the root, but the calculation is the same because of the circularity
1509 // of that traversal.
1510 const node_type *n = root();
1514 } while (n != root());
1519 // The number of internal, leaf and total nodes used by the btree.
1520 size_type leaf_nodes() const { return internal_stats(root()).leaf_nodes; }
1521 size_type internal_nodes() const {
1522 return internal_stats(root()).internal_nodes;
1524 size_type nodes() const {
1525 node_stats stats = internal_stats(root());
1526 return stats.leaf_nodes + stats.internal_nodes;
1529 // The total number of bytes used by the btree.
1530 // TODO(b/169338300): update to support node_btree_*.
1531 size_type bytes_used() const {
1532 node_stats stats = internal_stats(root());
1533 if (stats.leaf_nodes == 1 && stats.internal_nodes == 0) {
1534 return sizeof(*this) + node_type::LeafSize(root()->max_count());
1536 return sizeof(*this) + stats.leaf_nodes * node_type::LeafSize() +
1537 stats.internal_nodes * node_type::InternalSize();
1541 // The average number of bytes used per value stored in the btree assuming
1542 // random insertion order.
1543 static double average_bytes_per_value() {
1544 // The expected number of values per node with random insertion order is the
1545 // average of the maximum and minimum numbers of values per node.
1546 const double expected_values_per_node =
1547 (kNodeSlots + kMinNodeValues) / 2.0;
1548 return node_type::LeafSize() / expected_values_per_node;
1551 // The fullness of the btree. Computed as the number of elements in the btree
1552 // divided by the maximum number of elements a tree with the current number
1553 // of nodes could hold. A value of 1 indicates perfect space
1554 // utilization. Smaller values indicate space wastage.
1555 // Returns 0 for empty trees.
1556 double fullness() const {
1557 if (empty()) return 0.0;
1558 return static_cast<double>(size()) / (nodes() * kNodeSlots);
1560 // The overhead of the btree structure in bytes per node. Computed as the
1561 // total number of bytes used by the btree minus the number of bytes used for
1562 // storing elements divided by the number of elements.
1563 // Returns 0 for empty trees.
1564 double overhead() const {
1565 if (empty()) return 0.0;
1566 return (bytes_used() - size() * sizeof(value_type)) /
1567 static_cast<double>(size());
1570 // The allocator used by the btree.
1571 allocator_type get_allocator() const { return allocator(); }
1574 friend struct btree_access;
1576 // Internal accessor routines.
1577 node_type *root() { return root_; }
1578 const node_type *root() const { return root_; }
1579 node_type *&mutable_root() noexcept { return root_; }
1580 node_type *rightmost() { return rightmost_.template get<2>(); }
1581 const node_type *rightmost() const { return rightmost_.template get<2>(); }
1582 node_type *&mutable_rightmost() noexcept {
1583 return rightmost_.template get<2>();
1585 key_compare *mutable_key_comp() noexcept {
1586 return &rightmost_.template get<0>();
1589 // The leftmost node is stored as the parent of the root node.
1590 node_type *leftmost() { return root()->parent(); }
1591 const node_type *leftmost() const { return root()->parent(); }
1593 // Allocator routines.
1594 allocator_type *mutable_allocator() noexcept {
1595 return &rightmost_.template get<1>();
1597 const allocator_type &allocator() const noexcept {
1598 return rightmost_.template get<1>();
1601 // Allocates a correctly aligned node of at least size bytes using the
1603 node_type *allocate(const size_type size) {
1604 return reinterpret_cast<node_type *>(
1605 absl::container_internal::Allocate<node_type::Alignment()>(
1606 mutable_allocator(), size));
1609 // Node creation/deletion routines.
1610 node_type *new_internal_node(node_type *parent) {
1611 node_type *n = allocate(node_type::InternalSize());
1612 n->init_internal(parent);
1615 node_type *new_leaf_node(node_type *parent) {
1616 node_type *n = allocate(node_type::LeafSize());
1617 n->init_leaf(kNodeSlots, parent);
1620 node_type *new_leaf_root_node(const int max_count) {
1621 node_type *n = allocate(node_type::LeafSize(max_count));
1622 n->init_leaf(max_count, /*parent=*/n);
1626 // Deletion helper routines.
1627 iterator rebalance_after_delete(iterator iter);
1629 // Rebalances or splits the node iter points to.
1630 void rebalance_or_split(iterator *iter);
1632 // Merges the values of left, right and the delimiting key on their parent
1633 // onto left, removing the delimiting key and deleting right.
1634 void merge_nodes(node_type *left, node_type *right);
1636 // Tries to merge node with its left or right sibling, and failing that,
1637 // rebalance with its left or right sibling. Returns true if a merge
1638 // occurred, at which point it is no longer valid to access node. Returns
1639 // false if no merging took place.
1640 bool try_merge_or_rebalance(iterator *iter);
1642 // Tries to shrink the height of the tree by 1.
1645 iterator internal_end(iterator iter) {
1646 return iter.node_ != nullptr ? iter : end();
1648 const_iterator internal_end(const_iterator iter) const {
1649 return iter.node_ != nullptr ? iter : end();
1652 // Emplaces a value into the btree immediately before iter. Requires that
1653 // key(v) <= iter.key() and (--iter).key() <= key(v).
1654 template <typename... Args>
1655 iterator internal_emplace(iterator iter, Args &&... args);
1657 // Returns an iterator pointing to the first value >= the value "iter
" is
1658 // pointing at. Note that "iter" might be pointing to an invalid location such
1659 // as iter.position_ == iter.node_->finish(). This routine simply moves iter
1660 // up in the tree to a valid location. Requires: iter.node_ is non-null.
1661 template <typename IterType>
1662 static IterType internal_last(IterType iter);
1664 // Returns an iterator pointing to the leaf position at which key would
1665 // reside in the tree, unless there is an exact match - in which case, the
1666 // result may not be on a leaf. When there's a three-way comparator, we can
1667 // return whether there was an exact match. This allows the caller to avoid a
1668 // subsequent comparison to determine if an exact match was made, which is
1669 // important for keys with expensive comparison, such as strings.
1670 template <typename K>
1671 SearchResult<iterator, is_key_compare_to::value> internal_locate(
1672 const K &key) const;
1674 // Internal routine which implements lower_bound().
1675 template <typename K>
1676 SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
1677 const K &key) const;
1679 // Internal routine which implements upper_bound().
1680 template <typename K>
1681 iterator internal_upper_bound(const K &key) const;
1683 // Internal routine which implements find().
1684 template <typename K>
1685 iterator internal_find(const K &key) const;
1687 // Verifies the tree structure of node.
1688 int internal_verify(const node_type *node, const key_type *lo,
1689 const key_type *hi) const;
1691 node_stats internal_stats(const node_type *node) const {
1692 // The root can be a static empty node.
1693 if (node == nullptr || (node == root() && empty())) {
1694 return node_stats(0, 0);
1696 if (node->is_leaf()) {
1697 return node_stats(1, 0);
1699 node_stats res(0, 1);
1700 for (int i = node->start(); i <= node->finish(); ++i) {
1701 res += internal_stats(node->child(i));
1708 // A pointer to the rightmost node. Note that the leftmost node is stored as
1709 // the root's parent. We use compressed tuple in order to save space because
1710 // key_compare and allocator_type are usually empty.
1711 absl::container_internal::CompressedTuple<key_compare, allocator_type,
1715 // Number of values.
1720 // btree_node methods
1721 template <typename P>
1722 template <typename... Args>
1723 inline void btree_node<P>::emplace_value(const size_type i,
1724 allocator_type *alloc,
1726 assert(i >= start());
1727 assert(i <= finish());
1728 // Shift old values to create space for new value and then construct it in
1731 transfer_n_backward(finish() - i, /*dest_i=*/i + 1, /*src_i=*/i, this,
1734 value_init(i, alloc, std::forward<Args>(args)...);
1735 set_finish(finish() + 1);
1737 if (is_internal() && finish() > i + 1) {
1738 for (field_type j = finish(); j > i + 1; --j) {
1739 set_child(j, child(j - 1));
1745 template <typename P>
1746 inline void btree_node<P>::remove_values(const field_type i,
1747 const field_type to_erase,
1748 allocator_type *alloc) {
1749 // Transfer values after the removed range into their new places.
1750 value_destroy_n(i, to_erase, alloc);
1751 const field_type orig_finish = finish();
1752 const field_type src_i = i + to_erase;
1753 transfer_n(orig_finish - src_i, i, src_i, this, alloc);
1755 if (is_internal()) {
1756 // Delete all children between begin and end.
1757 for (int j = 0; j < to_erase; ++j) {
1758 clear_and_delete(child(i + j + 1), alloc);
1760 // Rotate children after end into new positions.
1761 for (int j = i + to_erase + 1; j <= orig_finish; ++j) {
1762 set_child(j - to_erase, child(j));
1766 set_finish(orig_finish - to_erase);
1769 template <typename P>
1770 void btree_node<P>::rebalance_right_to_left(const int to_move,
1772 allocator_type *alloc) {
1773 assert(parent() == right->parent());
1774 assert(position() + 1 == right->position());
1775 assert(right->count() >= count());
1776 assert(to_move >= 1);
1777 assert(to_move <= right->count());
1779 // 1) Move the delimiting value in the parent to the left node.
1780 transfer(finish(), position(), parent(), alloc);
1782 // 2) Move the (to_move - 1) values from the right node to the left node.
1783 transfer_n(to_move - 1, finish() + 1, right->start(), right, alloc);
1785 // 3) Move the new delimiting value to the parent from the right node.
1786 parent()->transfer(position(), right->start() + to_move - 1, right, alloc);
1788 // 4) Shift the values in the right node to their correct positions.
1789 right->transfer_n(right->count() - to_move, right->start(),
1790 right->start() + to_move, right, alloc);
1792 if (is_internal()) {
1793 // Move the child pointers from the right to the left node.
1794 for (int i = 0; i < to_move; ++i) {
1795 init_child(finish() + i + 1, right->child(i));
1797 for (int i = right->start(); i <= right->finish() - to_move; ++i) {
1798 assert(i + to_move <= right->max_count());
1799 right->init_child(i, right->child(i + to_move));
1800 right->clear_child(i + to_move);
1804 // Fixup `finish` on the left and right nodes.
1805 set_finish(finish() + to_move);
1806 right->set_finish(right->finish() - to_move);
1809 template <typename P>
1810 void btree_node<P>::rebalance_left_to_right(const int to_move,
1812 allocator_type *alloc) {
1813 assert(parent() == right->parent());
1814 assert(position() + 1 == right->position());
1815 assert(count() >= right->count());
1816 assert(to_move >= 1);
1817 assert(to_move <= count());
1819 // Values in the right node are shifted to the right to make room for the
1820 // new to_move values. Then, the delimiting value in the parent and the
1821 // other (to_move - 1) values in the left node are moved into the right node.
1822 // Lastly, a new delimiting value is moved from the left node into the
1823 // parent, and the remaining empty left node entries are destroyed.
1825 // 1) Shift existing values in the right node to their correct positions.
1826 right->transfer_n_backward(right->count(), right->start() + to_move,
1827 right->start(), right, alloc);
1829 // 2) Move the delimiting value in the parent to the right node.
1830 right->transfer(right->start() + to_move - 1, position(), parent(), alloc);
1832 // 3) Move the (to_move - 1) values from the left node to the right node.
1833 right->transfer_n(to_move - 1, right->start(), finish() - (to_move - 1), this,
1836 // 4) Move the new delimiting value to the parent from the left node.
1837 parent()->transfer(position(), finish() - to_move, this, alloc);
1839 if (is_internal()) {
1840 // Move the child pointers from the left to the right node.
1841 for (int i = right->finish(); i >= right->start(); --i) {
1842 right->init_child(i + to_move, right->child(i));
1843 right->clear_child(i);
1845 for (int i = 1; i <= to_move; ++i) {
1846 right->init_child(i - 1, child(finish() - to_move + i));
1847 clear_child(finish() - to_move + i);
1851 // Fixup the counts on the left and right nodes.
1852 set_finish(finish() - to_move);
1853 right->set_finish(right->finish() + to_move);
1856 template <typename P>
1857 void btree_node<P>::split(const int insert_position, btree_node *dest,
1858 allocator_type *alloc) {
1859 assert(dest->count() == 0);
1860 assert(max_count() == kNodeSlots);
1862 // We bias the split based on the position being inserted. If we're
1863 // inserting at the beginning of the left node then bias the split to put
1864 // more values on the right node. If we're inserting at the end of the
1865 // right node then bias the split to put more values on the left node.
1866 if (insert_position == start()) {
1867 dest->set_finish(dest->start() + finish() - 1);
1868 } else if (insert_position == kNodeSlots) {
1869 dest->set_finish(dest->start());
1871 dest->set_finish(dest->start() + count() / 2);
1873 set_finish(finish() - dest->count());
1874 assert(count() >= 1);
1876 // Move values from the left sibling to the right sibling.
1877 dest->transfer_n(dest->count(), dest->start(), finish(), this, alloc);
1879 // The split key is the largest value in the left sibling.
1881 parent()->emplace_value(position(), alloc, finish_slot());
1882 value_destroy(finish(), alloc);
1883 parent()->init_child(position() + 1, dest);
1885 if (is_internal()) {
1886 for (int i = dest->start(), j = finish() + 1; i <= dest->finish();
1888 assert(child(j) != nullptr);
1889 dest->init_child(i, child(j));
1895 template <typename P>
1896 void btree_node<P>::merge(btree_node *src, allocator_type *alloc) {
1897 assert(parent() == src->parent());
1898 assert(position() + 1 == src->position());
1900 // Move the delimiting value to the left node.
1901 value_init(finish(), alloc, parent()->slot(position()));
1903 // Move the values from the right to the left node.
1904 transfer_n(src->count(), finish() + 1, src->start(), src, alloc);
1906 if (is_internal()) {
1907 // Move the child pointers from the right to the left node.
1908 for (int i = src->start(), j = finish() + 1; i <= src->finish(); ++i, ++j) {
1909 init_child(j, src->child(i));
1910 src->clear_child(i);
1914 // Fixup `finish` on the src and dest nodes.
1915 set_finish(start() + 1 + count() + src->count());
1916 src->set_finish(src->start());
1918 // Remove the value on the parent node and delete the src node.
1919 parent()->remove_values(position(), /*to_erase=*/1, alloc);
1922 template <typename P>
1923 void btree_node<P>::clear_and_delete(btree_node *node, allocator_type *alloc) {
1924 if (node->is_leaf()) {
1925 node->value_destroy_n(node->start(), node->count(), alloc);
1926 deallocate(LeafSize(node->max_count()), node, alloc);
1929 if (node->count() == 0) {
1930 deallocate(InternalSize(), node, alloc);
1934 // The parent of the root of the subtree we are deleting.
1935 btree_node *delete_root_parent = node->parent();
1937 // Navigate to the leftmost leaf under node, and then delete upwards.
1938 while (node->is_internal()) node = node->start_child();
1939 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1940 // When generations are enabled, we delete the leftmost leaf last in case it's
1941 // the parent of the root and we need to check whether it's a leaf before we
1942 // can update the root's generation.
1943 // TODO(ezb): if we change btree_node::is_root to check a bool inside the node
1944 // instead of checking whether the parent is a leaf, we can remove this logic.
1945 btree_node *leftmost_leaf = node;
1947 // Use `int` because `pos` needs to be able to hold `kNodeSlots+1`, which
1948 // isn't guaranteed to be a valid `field_type`.
1949 int pos = node->position();
1950 btree_node *parent = node->parent();
1952 // In each iteration of the next loop, we delete one leaf node and go right.
1953 assert(pos <= parent->finish());
1955 node = parent->child(pos);
1956 if (node->is_internal()) {
1957 // Navigate to the leftmost leaf under node.
1958 while (node->is_internal()) node = node->start_child();
1959 pos = node->position();
1960 parent = node->parent();
1962 node->value_destroy_n(node->start(), node->count(), alloc);
1963 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1964 if (leftmost_leaf != node)
1966 deallocate(LeafSize(node->max_count()), node, alloc);
1968 } while (pos <= parent->finish());
1970 // Once we've deleted all children of parent, delete parent and go up/right.
1971 assert(pos > parent->finish());
1974 pos = node->position();
1975 parent = node->parent();
1976 node->value_destroy_n(node->start(), node->count(), alloc);
1977 deallocate(InternalSize(), node, alloc);
1978 if (parent == delete_root_parent) {
1979 #ifdef ABSL_BTREE_ENABLE_GENERATIONS
1980 deallocate(LeafSize(leftmost_leaf->max_count()), leftmost_leaf, alloc);
1985 } while (pos > parent->finish());
1990 // btree_iterator methods
1991 template <typename N, typename R, typename P>
1992 void btree_iterator<N, R, P>::increment_slow() {
1993 if (node_->is_leaf()) {
1994 assert(position_ >= node_->finish());
1995 btree_iterator save(*this);
1996 while (position_ == node_->finish() && !node_->is_root()) {
1997 assert(node_->parent()->child(node_->position()) == node_);
1998 position_ = node_->position();
1999 node_ = node_->parent();
2001 // TODO(ezb): assert we aren't incrementing end() instead of handling.
2002 if (position_ == node_->finish()) {
2006 assert(position_ < node_->finish());
2007 node_ = node_->child(position_ + 1);
2008 while (node_->is_internal()) {
2009 node_ = node_->start_child();
2011 position_ = node_->start();
2015 template <typename N, typename R, typename P>
2016 void btree_iterator<N, R, P>::decrement_slow() {
2017 if (node_->is_leaf()) {
2018 assert(position_ <= -1);
2019 btree_iterator save(*this);
2020 while (position_ < node_->start() && !node_->is_root()) {
2021 assert(node_->parent()->child(node_->position()) == node_);
2022 position_ = node_->position() - 1;
2023 node_ = node_->parent();
2025 // TODO(ezb): assert we aren't decrementing begin() instead of handling.
2026 if (position_ < node_->start()) {
2030 assert(position_ >= node_->start());
2031 node_ = node_->child(position_);
2032 while (node_->is_internal()) {
2033 node_ = node_->child(node_->finish());
2035 position_ = node_->finish() - 1;
2041 template <typename P>
2042 template <typename Btree>
2043 void btree<P>::copy_or_move_values_in_order(Btree &other) {
2044 static_assert(std::is_same<btree, Btree>::value ||
2045 std::is_same<const btree, Btree>::value,
2046 "Btree
type must be same or
const.
");
2049 // We can avoid key comparisons because we know the order of the
2050 // values is the same order we'll store them in.
2051 auto iter = other.begin();
2052 if (iter == other.end()) return;
2053 insert_multi(iter.slot());
2055 for (; iter != other.end(); ++iter) {
2056 // If the btree is not empty, we can just insert the new value at the end
2058 internal_emplace(end(), iter.slot());
2062 template <typename P>
2063 constexpr bool btree<P>::static_assert_validation() {
2064 static_assert(std::is_nothrow_copy_constructible<key_compare>::value,
2065 "Key comparison must be nothrow
copy constructible
");
2066 static_assert(std::is_nothrow_copy_constructible<allocator_type>::value,
2067 "Allocator must be nothrow
copy constructible
");
2068 static_assert(type_traits_internal::is_trivially_copyable<iterator>::value,
2069 "iterator not trivially copyable.
");
2071 // Note: We assert that kTargetValues, which is computed from
2072 // Params::kTargetNodeSize, must fit the node_type::field_type.
2074 kNodeSlots < (1 << (8 * sizeof(typename node_type::field_type))),
2075 "target node
size too large
");
2077 // Verify that key_compare returns an absl::{weak,strong}_ordering or bool.
2079 compare_has_valid_result_type<key_compare, key_type>(),
2080 "key comparison
function must
return absl::{weak,strong}_ordering or
"
2083 // Test the assumption made in setting kNodeSlotSpace.
2084 static_assert(node_type::MinimumOverhead() >= sizeof(void *) + 4,
2085 "node space assumption incorrect
");
2090 template <typename P>
2091 template <typename K>
2092 auto btree<P>::lower_bound_equal(const K &key) const
2093 -> std::pair<iterator, bool> {
2094 const SearchResult<iterator, is_key_compare_to::value> res =
2095 internal_lower_bound(key);
2096 const iterator lower = iterator(internal_end(res.value));
2097 const bool equal = res.HasMatch()
2099 : lower != end() && !compare_keys(key, lower.key());
2100 return {lower, equal};
2103 template <typename P>
2104 template <typename K>
2105 auto btree<P>::equal_range(const K &key) -> std::pair<iterator, iterator> {
2106 const std::pair<iterator, bool> lower_and_equal = lower_bound_equal(key);
2107 const iterator lower = lower_and_equal.first;
2108 if (!lower_and_equal.second) {
2109 return {lower, lower};
2112 const iterator next = std::next(lower);
2113 if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2114 // The next iterator after lower must point to a key greater than `key`.
2115 // Note: if this assert fails, then it may indicate that the comparator does
2116 // not meet the equivalence requirements for Compare
2117 // (see https://en.cppreference.com/w/cpp/named_req/Compare).
2118 assert(next == end() || compare_keys(key, next.key()));
2119 return {lower, next};
2121 // Try once more to avoid the call to upper_bound() if there's only one
2122 // equivalent key. This should prevent all calls to upper_bound() in cases of
2123 // unique-containers with heterogeneous comparators in which all comparison
2124 // operators have the same equivalence classes.
2125 if (next == end() || compare_keys(key, next.key())) return {lower, next};
2127 // In this case, we need to call upper_bound() to avoid worst case O(N)
2128 // behavior if we were to iterate over equal keys.
2129 return {lower, upper_bound(key)};
2132 template <typename P>
2133 template <typename K, typename... Args>
2134 auto btree<P>::insert_unique(const K &key, Args &&... args)
2135 -> std::pair<iterator, bool> {
2137 mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2140 SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2141 iterator iter = res.value;
2143 if (res.HasMatch()) {
2145 // The key already exists in the tree, do nothing.
2146 return {iter, false};
2149 iterator last = internal_last(iter);
2150 if (last.node_ && !compare_keys(key, last.key())) {
2151 // The key already exists in the tree, do nothing.
2152 return {last, false};
2155 return {internal_emplace(iter, std::forward<Args>(args)...), true};
2158 template <typename P>
2159 template <typename K, typename... Args>
2160 inline auto btree<P>::insert_hint_unique(iterator position, const K &key,
2162 -> std::pair<iterator, bool> {
2164 if (position == end() || compare_keys(key, position.key())) {
2165 if (position == begin() || compare_keys(std::prev(position).key(), key)) {
2166 // prev.key() < key < position.key()
2167 return {internal_emplace(position, std::forward<Args>(args)...), true};
2169 } else if (compare_keys(position.key(), key)) {
2171 if (position == end() || compare_keys(key, position.key())) {
2172 // {original `position`}.key() < key < {current `position`}.key()
2173 return {internal_emplace(position, std::forward<Args>(args)...), true};
2176 // position.key() == key
2177 return {position, false};
2180 return insert_unique(key, std::forward<Args>(args)...);
2183 template <typename P>
2184 template <typename InputIterator, typename>
2185 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int) {
2186 for (; b != e; ++b) {
2187 insert_hint_unique(end(), params_type::key(*b), *b);
2191 template <typename P>
2192 template <typename InputIterator>
2193 void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char) {
2194 for (; b != e; ++b) {
2195 // Use a node handle to manage a temp slot.
2197 CommonAccess::Construct<node_handle_type>(get_allocator(), *b);
2198 slot_type *slot = CommonAccess::GetSlot(node_handle);
2199 insert_hint_unique(end(), params_type::key(slot), slot);
2203 template <typename P>
2204 template <typename ValueType>
2205 auto btree<P>::insert_multi(const key_type &key, ValueType &&v) -> iterator {
2207 mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
2210 iterator iter = internal_upper_bound(key);
2211 if (iter.node_ == nullptr) {
2214 return internal_emplace(iter, std::forward<ValueType>(v));
2217 template <typename P>
2218 template <typename ValueType>
2219 auto btree<P>::insert_hint_multi(iterator position, ValueType &&v) -> iterator {
2221 const key_type &key = params_type::key(v);
2222 if (position == end() || !compare_keys(position.key(), key)) {
2223 if (position == begin() ||
2224 !compare_keys(key, std::prev(position).key())) {
2225 // prev.key() <= key <= position.key()
2226 return internal_emplace(position, std::forward<ValueType>(v));
2230 if (position == end() || !compare_keys(position.key(), key)) {
2231 // {original `position`}.key() < key < {current `position`}.key()
2232 return internal_emplace(position, std::forward<ValueType>(v));
2236 return insert_multi(std::forward<ValueType>(v));
2239 template <typename P>
2240 template <typename InputIterator>
2241 void btree<P>::insert_iterator_multi(InputIterator b, InputIterator e) {
2242 for (; b != e; ++b) {
2243 insert_hint_multi(end(), *b);
2247 template <typename P>
2248 auto btree<P>::operator=(const btree &other) -> btree & {
2249 if (this != &other) {
2252 *mutable_key_comp() = other.key_comp();
2253 if (absl::allocator_traits<
2254 allocator_type>::propagate_on_container_copy_assignment::value) {
2255 *mutable_allocator() = other.allocator();
2258 copy_or_move_values_in_order(other);
2263 template <typename P>
2264 auto btree<P>::operator=(btree &&other) noexcept -> btree & {
2265 if (this != &other) {
2269 if (absl::allocator_traits<
2270 allocator_type>::propagate_on_container_copy_assignment::value) {
2271 swap(root_, other.root_);
2272 // Note: `rightmost_` also contains the allocator and the key comparator.
2273 swap(rightmost_, other.rightmost_);
2274 swap(size_, other.size_);
2276 if (allocator() == other.allocator()) {
2277 swap(mutable_root(), other.mutable_root());
2278 swap(*mutable_key_comp(), *other.mutable_key_comp());
2279 swap(mutable_rightmost(), other.mutable_rightmost());
2280 swap(size_, other.size_);
2282 // We aren't allowed to propagate the allocator and the allocator is
2283 // different so we can't take over its memory. We must move each element
2284 // individually. We need both `other` and `this` to have `other`s key
2285 // comparator while moving the values so we can't swap the key
2287 *mutable_key_comp() = other.key_comp();
2288 copy_or_move_values_in_order(other);
2295 template <typename P>
2296 auto btree<P>::erase(iterator iter) -> iterator {
2297 iter.node_->value_destroy(iter.position_, mutable_allocator());
2298 iter.update_generation();
2300 const bool internal_delete = iter.node_->is_internal();
2301 if (internal_delete) {
2302 // Deletion of a value on an internal node. First, transfer the largest
2303 // value from our left child here, then erase/rebalance from that position.
2304 // We can get to the largest value from our left child by decrementing iter.
2305 iterator internal_iter(iter);
2307 assert(iter.node_->is_leaf());
2308 internal_iter.node_->transfer(internal_iter.position_, iter.position_,
2309 iter.node_, mutable_allocator());
2311 // Shift values after erased position in leaf. In the internal case, we
2312 // don't need to do this because the leaf position is the end of the node.
2313 const field_type transfer_from = iter.position_ + 1;
2314 const field_type num_to_transfer = iter.node_->finish() - transfer_from;
2315 iter.node_->transfer_n(num_to_transfer, iter.position_, transfer_from,
2316 iter.node_, mutable_allocator());
2318 // Update node finish and container size.
2319 iter.node_->set_finish(iter.node_->finish() - 1);
2322 // We want to return the next value after the one we just erased. If we
2323 // erased from an internal node (internal_delete == true), then the next
2324 // value is ++(++iter). If we erased from a leaf node (internal_delete ==
2325 // false) then the next value is ++iter. Note that ++iter may point to an
2326 // internal node and the value in the internal node may move to a leaf node
2327 // (iter.node_) when rebalancing is performed at the leaf level.
2329 iterator res = rebalance_after_delete(iter);
2331 // If we erased from an internal node, advance the iterator.
2332 if (internal_delete) {
2338 template <typename P>
2339 auto btree<P>::rebalance_after_delete(iterator iter) -> iterator {
2340 // Merge/rebalance as we walk back up the tree.
2342 bool first_iteration = true;
2344 if (iter.node_ == root()) {
2351 if (iter.node_->count() >= kMinNodeValues) {
2354 bool merged = try_merge_or_rebalance(&iter);
2355 // On the first iteration, we should update `res` with `iter` because `res`
2356 // may have been invalidated.
2357 if (first_iteration) {
2359 first_iteration = false;
2364 iter.position_ = iter.node_->position();
2365 iter.node_ = iter.node_->parent();
2367 res.update_generation();
2369 // Adjust our return value. If we're pointing at the end of a node, advance
2371 if (res.position_ == res.node_->finish()) {
2372 res.position_ = res.node_->finish() - 1;
2379 template <typename P>
2380 auto btree<P>::erase_range(iterator begin, iterator end)
2381 -> std::pair<size_type, iterator> {
2382 difference_type count = std::distance(begin, end);
2389 if (static_cast<size_type>(count) == size_) {
2391 return {count, this->end()};
2394 if (begin.node_ == end.node_) {
2395 assert(end.position_ > begin.position_);
2396 begin.node_->remove_values(begin.position_, end.position_ - begin.position_,
2397 mutable_allocator());
2399 return {count, rebalance_after_delete(begin)};
2402 const size_type target_size = size_ - count;
2403 while (size_ > target_size) {
2404 if (begin.node_->is_leaf()) {
2405 const size_type remaining_to_erase = size_ - target_size;
2406 const size_type remaining_in_node =
2407 begin.node_->finish() - begin.position_;
2408 const size_type to_erase =
2409 (std::min)(remaining_to_erase, remaining_in_node);
2410 begin.node_->remove_values(begin.position_, to_erase,
2411 mutable_allocator());
2413 begin = rebalance_after_delete(begin);
2415 begin = erase(begin);
2418 begin.update_generation();
2419 return {count, begin};
2422 template <typename P>
2423 void btree<P>::clear() {
2425 node_type::clear_and_delete(root(), mutable_allocator());
2427 mutable_root() = mutable_rightmost() = EmptyNode();
2431 template <typename P>
2432 void btree<P>::swap(btree &other) {
2434 if (absl::allocator_traits<
2435 allocator_type>::propagate_on_container_swap::value) {
2436 // Note: `rightmost_` also contains the allocator and the key comparator.
2437 swap(rightmost_, other.rightmost_);
2439 // It's undefined behavior if the allocators are unequal here.
2440 assert(allocator() == other.allocator());
2441 swap(mutable_rightmost(), other.mutable_rightmost());
2442 swap(*mutable_key_comp(), *other.mutable_key_comp());
2444 swap(mutable_root(), other.mutable_root());
2445 swap(size_, other.size_);
2448 template <typename P>
2449 void btree<P>::verify() const {
2450 assert(root() != nullptr);
2451 assert(leftmost() != nullptr);
2452 assert(rightmost() != nullptr);
2453 assert(empty() || size() == internal_verify(root(), nullptr, nullptr));
2454 assert(leftmost() == (++const_iterator(root(), -1)).node_);
2455 assert(rightmost() == (--const_iterator(root(), root()->finish())).node_);
2456 assert(leftmost()->is_leaf());
2457 assert(rightmost()->is_leaf());
2460 template <typename P>
2461 void btree<P>::rebalance_or_split(iterator *iter) {
2462 node_type *&node = iter->node_;
2463 int &insert_position = iter->position_;
2464 assert(node->count() == node->max_count());
2465 assert(kNodeSlots == node->max_count());
2467 // First try to make room on the node by rebalancing.
2468 node_type *parent = node->parent();
2469 if (node != root()) {
2470 if (node->position() > parent->start()) {
2471 // Try rebalancing with our left sibling.
2472 node_type *left = parent->child(node->position() - 1);
2473 assert(left->max_count() == kNodeSlots);
2474 if (left->count() < kNodeSlots) {
2475 // We bias rebalancing based on the position being inserted. If we're
2476 // inserting at the end of the right node then we bias rebalancing to
2477 // fill up the left node.
2478 int to_move = (kNodeSlots - left->count()) /
2479 (1 + (insert_position < static_cast<int>(kNodeSlots)));
2480 to_move = (std::max)(1, to_move);
2482 if (insert_position - to_move >= node->start() ||
2483 left->count() + to_move < static_cast<int>(kNodeSlots)) {
2484 left->rebalance_right_to_left(to_move, node, mutable_allocator());
2486 assert(node->max_count() - node->count() == to_move);
2487 insert_position = insert_position - to_move;
2488 if (insert_position < node->start()) {
2489 insert_position = insert_position + left->count() + 1;
2493 assert(node->count() < node->max_count());
2499 if (node->position() < parent->finish()) {
2500 // Try rebalancing with our right sibling.
2501 node_type *right = parent->child(node->position() + 1);
2502 assert(right->max_count() == kNodeSlots);
2503 if (right->count() < kNodeSlots) {
2504 // We bias rebalancing based on the position being inserted. If we're
2505 // inserting at the beginning of the left node then we bias rebalancing
2506 // to fill up the right node.
2507 int to_move = (static_cast<int>(kNodeSlots) - right->count()) /
2508 (1 + (insert_position > node->start()));
2509 to_move = (std::max)(1, to_move);
2511 if (insert_position <= node->finish() - to_move ||
2512 right->count() + to_move < static_cast<int>(kNodeSlots)) {
2513 node->rebalance_left_to_right(to_move, right, mutable_allocator());
2515 if (insert_position > node->finish()) {
2516 insert_position = insert_position - node->count() - 1;
2520 assert(node->count() < node->max_count());
2526 // Rebalancing failed, make sure there is room on the parent node for a new
2528 assert(parent->max_count() == kNodeSlots);
2529 if (parent->count() == kNodeSlots) {
2530 iterator parent_iter(node->parent(), node->position());
2531 rebalance_or_split(&parent_iter);
2534 // Rebalancing not possible because this is the root node.
2535 // Create a new root node and set the current root node as the child of the
2537 parent = new_internal_node(parent);
2538 parent->set_generation(root()->generation());
2539 parent->init_child(parent->start(), root());
2540 mutable_root() = parent;
2541 // If the former root was a leaf node, then it's now the rightmost node.
2542 assert(parent->start_child()->is_internal() ||
2543 parent->start_child() == rightmost());
2547 node_type *split_node;
2548 if (node->is_leaf()) {
2549 split_node = new_leaf_node(parent);
2550 node->split(insert_position, split_node, mutable_allocator());
2551 if (rightmost() == node) mutable_rightmost() = split_node;
2553 split_node = new_internal_node(parent);
2554 node->split(insert_position, split_node, mutable_allocator());
2557 if (insert_position > node->finish()) {
2558 insert_position = insert_position - node->count() - 1;
2563 template <typename P>
2564 void btree<P>::merge_nodes(node_type *left, node_type *right) {
2565 left->merge(right, mutable_allocator());
2566 if (rightmost() == right) mutable_rightmost() = left;
2569 template <typename P>
2570 bool btree<P>::try_merge_or_rebalance(iterator *iter) {
2571 node_type *parent = iter->node_->parent();
2572 if (iter->node_->position() > parent->start()) {
2573 // Try merging with our left sibling.
2574 node_type *left = parent->child(iter->node_->position() - 1);
2575 assert(left->max_count() == kNodeSlots);
2576 if (1U + left->count() + iter->node_->count() <= kNodeSlots) {
2577 iter->position_ += 1 + left->count();
2578 merge_nodes(left, iter->node_);
2583 if (iter->node_->position() < parent->finish()) {
2584 // Try merging with our right sibling.
2585 node_type *right = parent->child(iter->node_->position() + 1);
2586 assert(right->max_count() == kNodeSlots);
2587 if (1U + iter->node_->count() + right->count() <= kNodeSlots) {
2588 merge_nodes(iter->node_, right);
2591 // Try rebalancing with our right sibling. We don't perform rebalancing if
2592 // we deleted the first element from iter->node_ and the node is not
2593 // empty. This is a small optimization for the common pattern of deleting
2594 // from the front of the tree.
2595 if (right->count() > kMinNodeValues &&
2596 (iter->node_->count() == 0 || iter->position_ > iter->node_->start())) {
2597 int to_move = (right->count() - iter->node_->count()) / 2;
2598 to_move = (std::min)(to_move, right->count() - 1);
2599 iter->node_->rebalance_right_to_left(to_move, right, mutable_allocator());
2603 if (iter->node_->position() > parent->start()) {
2604 // Try rebalancing with our left sibling. We don't perform rebalancing if
2605 // we deleted the last element from iter->node_ and the node is not
2606 // empty. This is a small optimization for the common pattern of deleting
2607 // from the back of the tree.
2608 node_type *left = parent->child(iter->node_->position() - 1);
2609 if (left->count() > kMinNodeValues &&
2610 (iter->node_->count() == 0 ||
2611 iter->position_ < iter->node_->finish())) {
2612 int to_move = (left->count() - iter->node_->count()) / 2;
2613 to_move = (std::min)(to_move, left->count() - 1);
2614 left->rebalance_left_to_right(to_move, iter->node_, mutable_allocator());
2615 iter->position_ += to_move;
2622 template <typename P>
2623 void btree<P>::try_shrink() {
2624 node_type *orig_root = root();
2625 if (orig_root->count() > 0) {
2628 // Deleted the last item on the root node, shrink the height of the tree.
2629 if (orig_root->is_leaf()) {
2630 assert(size() == 0);
2631 mutable_root() = mutable_rightmost() = EmptyNode();
2633 node_type *child = orig_root->start_child();
2635 mutable_root() = child;
2637 node_type::clear_and_delete(orig_root, mutable_allocator());
2640 template <typename P>
2641 template <typename IterType>
2642 inline IterType btree<P>::internal_last(IterType iter) {
2643 assert(iter.node_ != nullptr);
2644 while (iter.position_ == iter.node_->finish()) {
2645 iter.position_ = iter.node_->position();
2646 iter.node_ = iter.node_->parent();
2647 if (iter.node_->is_leaf()) {
2648 iter.node_ = nullptr;
2652 iter.update_generation();
2656 template <typename P>
2657 template <typename... Args>
2658 inline auto btree<P>::internal_emplace(iterator iter, Args &&... args)
2660 if (iter.node_->is_internal()) {
2661 // We can't insert on an internal node. Instead, we'll insert after the
2662 // previous value which is guaranteed to be on a leaf node.
2666 const field_type max_count = iter.node_->max_count();
2667 allocator_type *alloc = mutable_allocator();
2668 if (iter.node_->count() == max_count) {
2669 // Make room in the leaf for the new item.
2670 if (max_count < kNodeSlots) {
2671 // Insertion into the root where the root is smaller than the full node
2672 // size. Simply grow the size of the root node.
2673 assert(iter.node_ == root());
2675 new_leaf_root_node((std::min<int>)(kNodeSlots, 2 * max_count));
2676 // Transfer the values from the old root to the new root.
2677 node_type *old_root = root();
2678 node_type *new_root = iter.node_;
2679 new_root->transfer_n(old_root->count(), new_root->start(),
2680 old_root->start(), old_root, alloc);
2681 new_root->set_finish(old_root->finish());
2682 old_root->set_finish(old_root->start());
2683 new_root->set_generation(old_root->generation());
2684 node_type::clear_and_delete(old_root, alloc);
2685 mutable_root() = mutable_rightmost() = new_root;
2687 rebalance_or_split(&iter);
2690 iter.node_->emplace_value(iter.position_, alloc, std::forward<Args>(args)...);
2692 iter.update_generation();
2696 template <typename P>
2697 template <typename K>
2698 inline auto btree<P>::internal_locate(const K &key) const
2699 -> SearchResult<iterator, is_key_compare_to::value> {
2700 iterator iter(const_cast<node_type *>(root()));
2702 SearchResult<int, is_key_compare_to::value> res =
2703 iter.node_->lower_bound(key, key_comp());
2704 iter.position_ = res.value;
2706 return {iter, MatchKind::kEq};
2708 // Note: in the non-key-compare-to case, we don't need to walk all the way
2709 // down the tree if the keys are equal, but determining equality would
2710 // require doing an extra comparison on each node on the way down, and we
2711 // will need to go all the way to the leaf node in the expected case.
2712 if (iter.node_->is_leaf()) {
2715 iter.node_ = iter.node_->child(iter.position_);
2717 // Note: in the non-key-compare-to case, the key may actually be equivalent
2718 // here (and the MatchKind::kNe is ignored).
2719 return {iter, MatchKind::kNe};
2722 template <typename P>
2723 template <typename K>
2724 auto btree<P>::internal_lower_bound(const K &key) const
2725 -> SearchResult<iterator, is_key_compare_to::value> {
2726 if (!params_type::template can_have_multiple_equivalent_keys<K>()) {
2727 SearchResult<iterator, is_key_compare_to::value> ret = internal_locate(key);
2728 ret.value = internal_last(ret.value);
2731 iterator iter(const_cast<node_type *>(root()));
2732 SearchResult<int, is_key_compare_to::value> res;
2733 bool seen_eq = false;
2735 res = iter.node_->lower_bound(key, key_comp());
2736 iter.position_ = res.value;
2737 if (iter.node_->is_leaf()) {
2740 seen_eq = seen_eq || res.IsEq();
2741 iter.node_ = iter.node_->child(iter.position_);
2743 if (res.IsEq()) return {iter, MatchKind::kEq};
2744 return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
2747 template <typename P>
2748 template <typename K>
2749 auto btree<P>::internal_upper_bound(const K &key) const -> iterator {
2750 iterator iter(const_cast<node_type *>(root()));
2752 iter.position_ = iter.node_->upper_bound(key, key_comp());
2753 if (iter.node_->is_leaf()) {
2756 iter.node_ = iter.node_->child(iter.position_);
2758 return internal_last(iter);
2761 template <typename P>
2762 template <typename K>
2763 auto btree<P>::internal_find(const K &key) const -> iterator {
2764 SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
2765 if (res.HasMatch()) {
2770 const iterator iter = internal_last(res.value);
2771 if (iter.node_ != nullptr && !compare_keys(key, iter.key())) {
2775 return {nullptr, 0};
2778 template <typename P>
2779 int btree<P>::internal_verify(const node_type *node, const key_type *lo,
2780 const key_type *hi) const {
2781 assert(node->count() > 0);
2782 assert(node->count() <= node->max_count());
2784 assert(!compare_keys(node->key(node->start()), *lo));
2787 assert(!compare_keys(*hi, node->key(node->finish() - 1)));
2789 for (int i = node->start() + 1; i < node->finish(); ++i) {
2790 assert(!compare_keys(node->key(i), node->key(i - 1)));
2792 int count = node->count();
2793 if (node->is_internal()) {
2794 for (int i = node->start(); i <= node->finish(); ++i) {
2795 assert(node->child(i) != nullptr);
2796 assert(node->child(i)->parent() == node);
2797 assert(node->child(i)->position() == i);
2798 count += internal_verify(node->child(i),
2799 i == node->start() ? lo : &node->key(i - 1),
2800 i == node->finish() ? hi : &node->key(i));
2806 struct btree_access {
2807 template <typename BtreeContainer, typename Pred>
2808 static auto erase_if(BtreeContainer &container, Pred pred)
2809 -> typename BtreeContainer::size_type {
2810 const auto initial_size = container.size();
2811 auto &tree = container.tree_;
2812 auto *alloc = tree.mutable_allocator();
2813 for (auto it = container.begin(); it != container.end();) {
2818 auto *node = it.node_;
2819 if (node->is_internal()) {
2820 // Handle internal nodes normally.
2821 it = container.erase(it);
2824 // If this is a leaf node, then we do all the erases from this node
2825 // at once before doing rebalancing.
2827 // The current position to transfer slots to.
2828 int to_pos = it.position_;
2829 node->value_destroy(it.position_, alloc);
2830 while (++it.position_ < node->finish()) {
2831 it.update_generation();
2833 node->value_destroy(it.position_, alloc);
2835 node->transfer(node->slot(to_pos++), node->slot(it.position_), alloc);
2838 const int num_deleted = node->finish() - to_pos;
2839 tree.size_ -= num_deleted;
2840 node->set_finish(to_pos);
2841 it.position_ = to_pos;
2842 it = tree.rebalance_after_delete(it);
2844 return initial_size - container.size();
2848 #undef ABSL_BTREE_ENABLE_GENERATIONS
2850 } // namespace container_internal
2854 #endif // ABSL_CONTAINER_INTERNAL_BTREE_H_
absl::weak_ordering operator()(absl::string_view lhs, absl::string_view rhs) const
StringBtreeDefaultLess()=default
absl::weak_ordering operator()(absl::string_view lhs, const absl::Cord &rhs) const
absl::weak_ordering operator()(const T &lhs, const U &rhs) const
absl::weak_ordering operator()(absl::string_view lhs, const absl::Cord &rhs) const
absl::result_of_t< const Compare(const T &, const U &)> compare_result_t
bool is_self_equivalent(const Key &k) const
static int copy(grpc_slice_buffer *input, grpc_slice_buffer *output)
int Compare(absl::string_view rhs) const
StringBtreeDefaultGreater(std::greater< std::string >)
absl::conditional_t< std::is_base_of< BtreeTestOnlyCheckedCompareOptOutBase, Compare >::value, Compare, checked_compare > type
bool operator()(const T &lhs, const U &rhs) const
typename std::enable_if< B, T >::type enable_if_t
const Compare & comp() const
absl::weak_ordering operator()(const absl::Cord &lhs, absl::string_view rhs) const
const Compare & comp() const
#define T(upbtypeconst, upbtype, ctype, default_value)
constexpr absl::weak_ordering compare_result_as_ordering(const Int c)
#define ABSL_NAMESPACE_BEGIN
bool is_self_equivalent(const T &) const
constexpr absl::remove_reference_t< T > && move(T &&t) noexcept
bool equal(InputIter1 first1, InputIter1 last1, InputIter2 first2, InputIter2 last2, Pred &&pred)
absl::weak_ordering operator()(const absl::Cord &lhs, absl::string_view rhs) const
StringBtreeDefaultGreater(std::greater< absl::Cord >)
StringBtreeDefaultGreater()=default
StringBtreeDefaultLess(std::less< absl::Cord >)
absl::weak_ordering operator()(const absl::Cord &lhs, const absl::Cord &rhs) const
internal::KeyMatcher< M > Key(M inner_matcher)
constexpr int compare(string_view x) const noexcept
static int compare(const TEST_INT **a, const TEST_INT **b)
int Compare(const BigUnsigned< N > &lhs, const BigUnsigned< M > &rhs)
StringBtreeDefaultGreater(std::greater< absl::string_view >)
checked_compare_base(Compare c)
StringBtreeDefaultLess(std::less< absl::string_view >)
absl::weak_ordering operator()(absl::string_view lhs, absl::string_view rhs) const
std::is_convertible< compare_result_t< Compare, T, T >, absl::weak_ordering > btree_is_key_compare_to
absl::weak_ordering operator()(const absl::Cord &lhs, const absl::Cord &rhs) const
typename type_traits_internal::result_of< F >::type result_of_t
checked_compare_base(Compare c)
checked_compare(Compare comp)
StringBtreeDefaultLess(std::less< std::string >)
static struct async_container * container
typename btree_iterator< node_type, reference, pointer >::iterator iterator
grpc
Author(s):
autogenerated on Fri May 16 2025 02:57:49