Go to the documentation of this file.
172 #ifndef ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
173 #define ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
183 #include <type_traits>
186 #include "absl/base/config.h"
187 #include "absl/base/internal/endian.h"
189 #include "absl/base/optimization.h"
190 #include "absl/base/port.h"
191 #include "absl/container/internal/common.h"
192 #include "absl/container/internal/compressed_tuple.h"
193 #include "absl/container/internal/container_memory.h"
194 #include "absl/container/internal/hash_policy_traits.h"
195 #include "absl/container/internal/hashtable_debug_hooks.h"
196 #include "absl/container/internal/hashtablez_sampler.h"
197 #include "absl/memory/memory.h"
198 #include "absl/meta/type_traits.h"
199 #include "absl/numeric/bits.h"
200 #include "absl/utility/utility.h"
202 #ifdef ABSL_INTERNAL_HAVE_SSE2
203 #include <emmintrin.h>
206 #ifdef ABSL_INTERNAL_HAVE_SSSE3
207 #include <tmmintrin.h>
214 #ifdef ABSL_INTERNAL_HAVE_ARM_NEON
215 #include <arm_neon.h>
220 namespace container_internal {
222 template <
typename AllocType>
228 template <
typename AllocType>
253 template <
size_t W
idth>
260 assert(((mask + 1) & mask) == 0 &&
"not a mask");
283 template <
class ContainerKey,
class Hash,
class Eq>
285 template <
class PassedKey,
class...
Args>
287 decltype(std::declval<const Hash&>()(std::declval<const PassedKey&>())),
288 decltype(std::declval<const Eq&>()(std::declval<const ContainerKey&>(),
289 std::declval<const PassedKey&>()))>*
293 template <
class E,
class Policy,
class Hash,
class Eq,
class... Ts>
296 template <
class Policy,
class Hash,
class Eq,
class... Ts>
299 RequireUsableKey<typename Policy::key_type, Hash, Eq>(),
300 std::declval<Ts>()...))>,
307 return noexcept(
swap(std::declval<T&>(), std::declval<T&>()));
314 template <
typename T>
329 template <
class T,
int SignificantBits,
int Shift = 0>
334 explicit operator bool()
const {
return this->
mask_ != 0; }
353 constexpr
int total_significant_bits = SignificantBits << Shift;
354 constexpr
int extra_bits =
sizeof(
T) * 8 - total_significant_bits;
371 template <
class T,
int SignificantBits,
int Shift = 0>
375 static_assert(Shift == 0 || Shift == 3,
"");
396 return a.mask_ ==
b.mask_;
399 return a.mask_ !=
b.mask_;
431 "Special markers need to have the MSB to make checking for them efficient");
434 "ctrl_t::kEmpty and ctrl_t::kDeleted must be smaller than "
435 "ctrl_t::kSentinel to make the SIMD test of IsEmptyOrDeleted() efficient");
438 "ctrl_t::kSentinel must be -1 to elide loading it from memory into SIMD "
439 "registers (pcmpeqd xmm, xmm)");
441 "ctrl_t::kEmpty must be -128 to make the SIMD check for its "
442 "existence efficient (psignb xmm, xmm)");
447 "ctrl_t::kEmpty and ctrl_t::kDeleted must share an unset bit that is not "
448 "shared by ctrl_t::kSentinel to make the scalar test for "
449 "MaskEmptyOrDeleted() efficient");
451 "ctrl_t::kDeleted must be -2 to make the implementation of "
452 "ConvertSpecialToEmptyAndFullToDeleted efficient");
476 return reinterpret_cast<uintptr_t>(ctrl) >> 12;
494 #ifdef ABSL_INTERNAL_HAVE_SSE2
528 inline __m128i _mm_cmpgt_epi8_fixed(__m128i a, __m128i
b) {
529 #if defined(__GNUC__) && !defined(__clang__)
531 const __m128i mask = _mm_set1_epi8(0x80);
532 const __m128i
diff = _mm_subs_epi8(
b, a);
533 return _mm_cmpeq_epi8(_mm_and_si128(
diff, mask), mask);
536 return _mm_cmpgt_epi8(a,
b);
539 struct GroupSse2Impl {
540 static constexpr
size_t kWidth = 16;
542 explicit GroupSse2Impl(
const ctrl_t*
pos) {
543 ctrl = _mm_loadu_si128(
reinterpret_cast<const __m128i*
>(
pos));
549 return BitMask<uint32_t, kWidth>(
550 static_cast<uint32_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(
match, ctrl))));
554 NonIterableBitMask<uint32_t, kWidth> MaskEmpty()
const {
555 #ifdef ABSL_INTERNAL_HAVE_SSSE3
557 return NonIterableBitMask<uint32_t, kWidth>(
558 static_cast<uint32_t>(_mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl))));
561 return NonIterableBitMask<uint32_t, kWidth>(
562 static_cast<uint32_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(
match, ctrl))));
567 NonIterableBitMask<uint32_t, kWidth> MaskEmptyOrDeleted()
const {
569 return NonIterableBitMask<uint32_t, kWidth>(
static_cast<uint32_t>(
570 _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl))));
574 uint32_t CountLeadingEmptyOrDeleted()
const {
577 _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1));
580 void ConvertSpecialToEmptyAndFullToDeleted(
ctrl_t*
dst)
const {
581 auto msbs = _mm_set1_epi8(
static_cast<char>(-128));
582 auto x126 = _mm_set1_epi8(126);
583 #ifdef ABSL_INTERNAL_HAVE_SSSE3
584 auto res = _mm_or_si128(_mm_shuffle_epi8(x126, ctrl), msbs);
586 auto zero = _mm_setzero_si128();
587 auto special_mask = _mm_cmpgt_epi8_fixed(zero, ctrl);
588 auto res = _mm_or_si128(msbs, _mm_andnot_si128(special_mask, x126));
590 _mm_storeu_si128(
reinterpret_cast<__m128i*
>(
dst), res);
595 #endif // ABSL_INTERNAL_RAW_HASH_SET_HAVE_SSE2
597 #if defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN)
598 struct GroupAArch64Impl {
599 static constexpr
size_t kWidth = 8;
601 explicit GroupAArch64Impl(
const ctrl_t*
pos) {
602 ctrl = vld1_u8(
reinterpret_cast<const uint8_t*
>(
pos));
606 uint8x8_t dup = vdup_n_u8(
hash);
607 auto mask = vceq_u8(ctrl, dup);
609 return BitMask<uint64_t, kWidth, 3>(
610 vget_lane_u64(vreinterpret_u64_u8(mask), 0) & msbs);
613 NonIterableBitMask<uint64_t, kWidth, 3> MaskEmpty()
const {
615 vget_lane_u64(vreinterpret_u64_u8(
617 vreinterpret_s8_u8(ctrl))),
619 return NonIterableBitMask<uint64_t, kWidth, 3>(mask);
622 NonIterableBitMask<uint64_t, kWidth, 3> MaskEmptyOrDeleted()
const {
624 vget_lane_u64(vreinterpret_u64_u8(vcgt_s8(
626 vreinterpret_s8_u8(ctrl))),
628 return NonIterableBitMask<uint64_t, kWidth, 3>(mask);
631 uint32_t CountLeadingEmptyOrDeleted()
const {
632 uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(ctrl), 0);
641 void ConvertSpecialToEmptyAndFullToDeleted(
ctrl_t*
dst)
const {
642 uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(ctrl), 0);
645 auto x = mask & msbs;
646 auto res = (~
x + (
x >> 7)) & ~lsbs;
652 #endif // ABSL_INTERNAL_HAVE_ARM_NEON && ABSL_IS_LITTLE_ENDIAN
702 auto x =
ctrl & msbs;
703 auto res = (~
x + (
x >> 7)) & ~lsbs;
710 #ifdef ABSL_INTERNAL_HAVE_SSE2
711 using Group = GroupSse2Impl;
712 #elif defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN)
713 using Group = GroupAArch64Impl;
725 template <
class Policy,
class Hash,
class Eq,
class Alloc>
779 return growth +
static_cast<size_t>((
static_cast<int64_t>(growth) - 1) / 7);
782 template <
class InputIter>
784 size_t bucket_count) {
785 if (bucket_count != 0) {
788 using InputIterCategory =
789 typename std::iterator_traits<InputIter>::iterator_category;
790 if (std::is_base_of<std::random_access_iterator_tag,
791 InputIterCategory>::
value) {
793 static_cast<size_t>(std::distance(
first, last)));
798 #define ABSL_INTERNAL_ASSERT_IS_FULL(ctrl, msg) \
799 ABSL_HARDENING_ASSERT((ctrl != nullptr && IsFull(*ctrl)) && msg)
803 (ctrl ==
nullptr ||
IsFull(*ctrl)) &&
804 "Invalid operation on iterator. The element might have "
805 "been erased, the table might have rehashed, or this may "
806 "be an end() iterator.");
841 template <
typename =
void>
846 Group g{ctrl + seq.offset()};
847 auto mask =
g.MaskEmptyOrDeleted();
855 return {seq.offset(mask.HighestBitSet()), seq.index()};
858 return {seq.offset(mask.LowestBitSet()), seq.index()};
861 assert(seq.index() <=
capacity &&
"full table!");
885 const void* slot,
size_t slot_size) {
888 auto* slot_i =
static_cast<const char*
>(slot) +
i * slot_size;
901 const void* slot,
size_t slot_size) {
910 return (num_control_bytes + slot_align - 1) & (~slot_align + 1);
939 template <
class Policy,
class Hash,
class Eq,
class Alloc>
974 auto KeyTypeCanBeHashed(
const Hash& h,
const key_type&
k) -> decltype(h(
k));
975 auto KeyTypeCanBeEq(
const Eq& eq,
const key_type&
k) -> decltype(eq(
k,
k));
984 "Policy::element() must return a reference");
986 template <
typename T>
988 : std::is_same<typename std::remove_cv<
989 typename std::remove_reference<reference>::type>::type,
990 typename std::remove_cv<
991 typename std::remove_reference<T>::type>::type> {};
1010 template <
class... Ts>
1015 "Allocators with custom pointer types are not supported");
1017 "Allocators with custom pointer types are not supported");
1036 "operator*() called on invalid iterator.");
1043 "operator-> called on invalid iterator.");
1050 "operator++ called on invalid iterator.");
1053 skip_empty_or_deleted();
1066 return a.ctrl_ ==
b.ctrl_;
1124 return a.inner_ ==
b.inner_;
1166 template <
class InputIter>
1175 template <
class InputIter>
1180 template <
class InputIter>
1185 template <
class InputIter>
1210 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
1221 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
1230 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
1239 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
1249 that.alloc_ref())) {}
1256 for (
const auto&
v : that) {
1262 infoz().RecordInsert(
hash,
target.probe_length);
1264 size_ = that.size();
1265 growth_left() -= that.size();
1281 that.hash_ref(), that.eq_ref(), that.alloc_ref()) {}
1290 if (
a == that.alloc_ref()) {
1295 std::swap(growth_left(), that.growth_left());
1328 auto it = iterator_at(0);
1329 it.skip_empty_or_deleted();
1337 const_iterator
end()
const {
return {}; }
1357 infoz().RecordClearedReservation();
1366 reset_growth_left();
1369 infoz().RecordStorageChanged(0,
capacity_);
1379 template <
class T, RequiresInsertable<T> = 0,
class T2 = T,
1380 typename std::enable_if<IsDecomposable<T2>::value,
int>::type = 0,
1383 return emplace(std::forward<T>(
value));
1401 class T, RequiresInsertable<T> = 0,
1404 return emplace(
value);
1418 template <
class T, RequiresInsertable<T> = 0,
class T2 = T,
1419 typename std::enable_if<IsDecomposable<T2>::value,
int>::type = 0,
1429 class T, RequiresInsertable<T> = 0,
1439 template <
class InputIt>
1444 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<const T&> = 0>
1445 void insert(std::initializer_list<T> ilist) {
1446 insert(ilist.begin(), ilist.end());
1449 void insert(std::initializer_list<init_type> ilist) {
1450 insert(ilist.begin(), ilist.end());
1463 return {res.first,
false,
std::move(node)};
1470 return res.position;
1482 template <
class...
Args,
typename std::enable_if<
1486 std::forward<Args>(
args)...);
1492 template <
class...
Args,
typename std::enable_if<
1503 template <
class...
Args>
1505 return emplace(std::forward<Args>(
args)...).first;
1539 template <
class...
Args>
1553 template <
class K = key_type,
class F>
1555 auto res = find_or_prepare_insert(
key);
1558 std::forward<F>(
f)(constructor(&alloc_ref(), &slot));
1561 return iterator_at(res.first);
1573 template <
class K = key_type>
1576 if (
it ==
end())
return 0;
1600 "erase() called on invalid iterator.");
1602 erase_meta_only(
it);
1606 while (
first != last) {
1614 template <
typename H,
typename E>
1616 assert(
this != &src);
1628 template <
typename H,
typename E>
1635 "extract() called on invalid iterator.");
1637 CommonAccess::Transfer<node_type>(alloc_ref(),
position.inner_.slot_);
1651 IsNoThrowSwappable<hasher>() && IsNoThrowSwappable<key_equal>() &&
1652 IsNoThrowSwappable<allocator_type>(
1655 swap(ctrl_, that.ctrl_);
1656 swap(slots_, that.slots_);
1659 swap(growth_left(), that.growth_left());
1660 swap(hash_ref(), that.hash_ref());
1661 swap(eq_ref(), that.eq_ref());
1662 swap(infoz(), that.infoz());
1663 SwapAlloc(alloc_ref(), that.alloc_ref(),
1664 typename AllocTraits::propagate_on_container_swap{});
1669 if (
n == 0 &&
size_ == 0) {
1671 infoz().RecordStorageChanged(0, 0);
1672 infoz().RecordClearedReservation();
1685 infoz().RecordReservation(
n);
1690 if (
n >
size() + growth_left()) {
1696 infoz().RecordReservation(
n);
1709 template <
class K = key_type>
1719 template <
class K = key_type>
1723 #ifdef ABSL_INTERNAL_HAVE_PREFETCH
1724 prefetch_heap_block();
1728 #endif // ABSL_INTERNAL_HAVE_PREFETCH
1738 template <
class K = key_type>
1742 Group g{ctrl_ + seq.offset()};
1745 EqualElement<K>{
key, eq_ref()},
1747 return iterator_at(seq.offset(
i));
1751 assert(seq.index() <=
capacity_ &&
"full table!");
1754 template <
class K = key_type>
1756 prefetch_heap_block();
1760 template <
class K = key_type>
1764 template <
class K = key_type>
1766 prefetch_heap_block();
1770 template <
class K = key_type>
1775 template <
class K = key_type>
1781 template <
class K = key_type>
1803 if (
a.size() !=
b.size())
return false;
1816 template <
typename H>
1820 return H::combine(H::combine_unordered(
std::move(h),
s.begin(),
s.end()),
1830 template <
class Container,
typename Enabler>
1832 HashtableDebugAccess;
1835 template <
class K,
class...
Args>
1843 template <
class K,
class...
Args>
1852 template <
class K2,
class...
Args>
1854 return eq(lhs, rhs);
1861 template <
class K,
class...
Args>
1863 auto res =
s.find_or_prepare_insert(
key);
1865 s.emplace_at(res.first, std::forward<Args>(
args)...);
1867 return {
s.iterator_at(res.first), res.second};
1872 template <
bool do_destroy>
1874 template <
class K,
class...
Args>
1876 auto res =
s.find_or_prepare_insert(
key);
1879 }
else if (do_destroy) {
1882 return {
s.iterator_at(res.first), res.second};
1894 assert(
IsFull(*
it.inner_.ctrl_) &&
"erasing a dangling iterator");
1896 const size_t index =
static_cast<size_t>(
it.inner_.ctrl_ - ctrl_);
1899 const auto empty_before =
Group(ctrl_ + index_before).
MaskEmpty();
1904 bool was_never_full =
1905 empty_before && empty_after &&
1906 static_cast<size_t>(empty_after.TrailingZeros() +
1911 growth_left() += was_never_full;
1912 infoz().RecordErase();
1933 slots_ ==
nullptr) {
1937 char*
mem =
static_cast<char*
>(Allocate<alignof(slot_type)>(
1944 reset_growth_left();
1962 Deallocate<alignof(slot_type)>(
1963 &alloc_ref(), ctrl_,
1974 auto* old_ctrl = ctrl_;
1975 auto* old_slots = slots_;
1980 size_t total_probe_length = 0;
1981 for (
size_t i = 0;
i != old_capacity; ++
i) {
1986 size_t new_i =
target.offset;
1987 total_probe_length +=
target.probe_length;
1995 Deallocate<alignof(slot_type)>(
1996 &alloc_ref(), old_ctrl,
1999 infoz().RecordRehash(total_probe_length);
2026 size_t total_probe_length = 0;
2033 const size_t new_i =
target.offset;
2034 total_probe_length +=
target.probe_length;
2040 const auto probe_index = [probe_offset,
this](
size_t pos) {
2067 reset_growth_left();
2068 infoz().RecordRehash(total_probe_length);
2123 drop_deletes_without_resize();
2134 Group g{ctrl_ + seq.offset()};
2142 assert(seq.index() <=
capacity_ &&
"full table!");
2165 prefetch_heap_block();
2169 Group g{ctrl_ + seq.offset()};
2172 EqualElement<K>{
key, eq_ref()},
2174 return {seq.offset(
i),
false};
2178 assert(seq.index() <=
capacity_ &&
"full table!");
2180 return {prepare_insert(
hash),
true};
2191 rehash_and_grow_if_necessary();
2198 infoz().RecordInsert(
hash,
target.probe_length);
2210 template <
class...
Args>
2213 std::forward<Args>(
args)...);
2217 "constructed value does not match the lookup key");
2257 return settings_.template get<4>();
2286 template <
typename P,
typename H,
typename E,
typename A,
typename Predicate>
2289 const auto initial_size =
c->size();
2290 for (
auto it =
c->begin(), last =
c->end();
it != last;) {
2297 return initial_size -
c->size();
2300 namespace hashtable_debug_internal {
2301 template <
typename Set>
2304 using Slot =
typename Traits::slot_type;
2308 size_t num_probes = 0;
2315 typename Set::template EqualElement<typename Set::key_type>{
2321 if (
g.MaskEmpty())
return num_probes;
2332 size_t per_slot = Traits::space_used(
static_cast<const Slot*
>(
nullptr));
2333 if (per_slot != ~
size_t{}) {
2334 m += per_slot * c.size();
2338 m += Traits::space_used(c.slots_ +
i);
2350 size_t per_slot = Traits::space_used(
static_cast<const Slot*
>(
nullptr));
2351 if (per_slot != ~
size_t{}) {
2352 m += per_slot *
size;
2363 #undef ABSL_INTERNAL_ASSERT_IS_FULL
2365 #endif // ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
bool is_small(size_t capacity)
#define ABSL_PREDICT_FALSE(x)
typename PolicyTraits::init_type init_type
IsDecomposable< void, PolicyTraits, absl::container_internal::hash_default_hash< Waker >, absl::container_internal::hash_default_eq< Waker >, Ts... > IsDecomposable
node_type extract(const_iterator position)
typename absl::allocator_traits< allocator_type >::template rebind_traits< value_type >::const_pointer const_pointer
raw_hash_set(const raw_hash_set &that, const allocator_type &a)
probe_seq< Group::kWidth > probe(const ctrl_t *ctrl, size_t hash, size_t capacity)
static size_t GetNumProbes(const Set &set, const typename Set::key_type &key)
std::pair< iterator, bool > insert(init_type &&value)
const_iterator find(const key_arg< K > &key, size_t hash) const
raw_hash_set(raw_hash_set &&that, const allocator_type &a)
friend bool operator!=(const raw_hash_set &a, const raw_hash_set &b)
static std::function< Slot &(Slot *)> element
raw_hash_set(std::initializer_list< init_type > init, size_t bucket_count=0, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
void insert(std::initializer_list< init_type > ilist)
iterator iterator_at(size_t i)
iterator find(const key_arg< K > &key)
typename Set::PolicyTraits Traits
static void ** find(grpc_chttp2_stream_map *map, uint32_t key)
return memset(p, 0, total)
const_iterator end() const
allocator_type & alloc_ref()
static bool reserve(upb_pb_encoder *e, size_t bytes)
iterator insert(const_iterator, const T &value)
pointer operator->() const
raw_hash_set(InputIter first, InputIter last, size_t bucket_count=0, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
raw_hash_set(std::initializer_list< T > init, const allocator_type &alloc)
iterator(ctrl_t *ctrl, slot_type *slot)
constructor(allocator_type *a, slot_type **slot)
typename raw_hash_set::value_type value_type
void Store64(void *p, uint64_t v)
float load_factor() const
iterator insert(const_iterator, node_type &&node)
void merge(raw_hash_set< Policy, H, E, Alloc > &&src)
memory_internal::ExtractOrT< memory_internal::GetPropagateOnContainerSwap, Alloc, std::false_type > propagate_on_container_swap
void prefetch(const key_arg< K > &key) const
ABSL_ATTRIBUTE_REINITIALIZES void clear()
const value_type & const_reference
memory_internal::ExtractOrT< memory_internal::GetPropagateOnContainerMoveAssignment, Alloc, std::false_type > propagate_on_container_move_assignment
unsigned char match[65280+2]
raw_hash_set(size_t bucket_count, const allocator_type &alloc)
allocator_type get_allocator() const
bool operator()(const K2 &lhs, Args &&...) const
integral_constant< bool, true > true_type
friend bool operator==(const raw_hash_set &a, const raw_hash_set &b)
raw_hash_set(std::initializer_list< T > init, size_t bucket_count=0, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
typename std::conditional< B, T, F >::type conditional_t
integral_constant< bool, false > false_type
std::pair< iterator, bool > insert(T &&value)
OPENSSL_EXPORT pem_password_cb void * u
iterator find(const key_arg< K > &key, size_t hash)
MockFunction< int(int)> apply
std::pair< size_t, bool > find_or_prepare_insert(const K &key)
typename raw_hash_set::value_type value_type
raw_hash_set() noexcept(std::is_nothrow_default_constructible< hasher >::value &&std::is_nothrow_default_constructible< key_equal >::value &&std::is_nothrow_default_constructible< allocator_type >::value)
bool has_element(const value_type &elem) const
void resize(size_t new_capacity)
#define ABSL_NAMESPACE_END
static size_t LowerBoundAllocatedByteSize(size_t size)
pointer operator->() const
const typedef MCPhysReg * iterator
size_t AllocSize(size_t capacity, size_t slot_size, size_t slot_align)
void PrefetchT2(const void *addr)
#define ABSL_HARDENING_ASSERT(expr)
std::pair< iterator, bool > insert(const T &value)
#define T(upbtypeconst, upbtype, ctype, default_value)
reference operator*() const
const_iterator & operator++()
raw_hash_set(const raw_hash_set &that)
raw_hash_set & move_assign(raw_hash_set &&that, std::false_type)
std::pair< const_iterator, const_iterator > equal_range(const key_arg< K > &key) const
friend bool operator==(const iterator &a, const iterator &b)
#define ABSL_ATTRIBUTE_NOINLINE
size_type erase(const key_arg< K > &key)
void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t *dst) const
raw_hash_set(size_t bucket_count, const hasher &hash, const allocator_type &alloc)
raw_hash_set(std::initializer_list< init_type > init, size_t bucket_count, const hasher &hash, const allocator_type &alloc)
uint128 operator*(uint128 lhs, uint128 rhs)
size_t CapacityToGrowth(size_t capacity)
probe_seq(size_t hash, size_t mask)
friend std::enable_if< H::template is_hashable< value_type >::value, H >::type AbslHashValue(H h, const raw_hash_set &s)
template FindInfo find_first_non_full(const ctrl_t *, size_t, size_t)
const_iterator iterator_at(size_t i) const
void erase_meta_only(const_iterator it)
raw_hash_set & operator=(const raw_hash_set &that)
bool contains(const key_arg< K > &key) const
hash_default_hash< T > hasher
static auto GetSlot(const Node &node) -> decltype(node.slot())
raw_hash_set(size_t bucket_count, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
typename raw_hash_set::difference_type difference_type
size_t offset(size_t i) const
#define ABSL_NAMESPACE_BEGIN
insert_return_type insert(node_type &&node)
raw_hash_set(InputIter first, InputIter last, size_t bucket_count, const allocator_type &alloc)
constexpr absl::remove_reference_t< T > && move(T &&t) noexcept
uint64_t Load64(const void *p)
reference operator*() const
#define ABSL_INTERNAL_ASSERT_IS_FULL(ctrl, msg)
constexpr bool IsNoThrowSwappable(std::true_type={})
NonIterableBitMask< uint64_t, kWidth, 3 > MaskEmptyOrDeleted() const
void insert(std::initializer_list< T > ilist)
void SanitizerUnpoisonMemoryRegion(const void *m, size_t s)
friend bool operator==(const const_iterator &a, const const_iterator &b)
const_iterator operator++(int)
uint32_t TrailingZeros() const
float max_load_factor() const
raw_hash_set(std::initializer_list< T > init, size_t bucket_count, const hasher &hash, const allocator_type &alloc)
void swap(btree_map< K, V, C, A > &x, btree_map< K, V, C, A > &y)
std::pair< iterator, bool > emplace(Args &&... args)
const allocator_type & alloc_ref() const
HashtablezInfoHandle & infoz()
internal::EqMatcher< T > Eq(T x)
std::pair< iterator, iterator > equal_range(const key_arg< K > &key)
hasher hash_function() const
unsigned __int64 uint64_t
friend bool operator==(const BitMask &a, const BitMask &b)
size_t bucket_count() const
friend bool operator!=(const iterator &a, const iterator &b)
OPENSSL_EXPORT ASN1_BIT_STRING * bits
iterator insert(const_iterator, T &&value)
raw_hash_set(std::initializer_list< init_type > init, size_t bucket_count, const allocator_type &alloc)
int extract(FILE *in, struct access *index, off_t offset, unsigned char *buf, int len)
constexpr size_t NumClonedBytes()
ABSL_INTERNAL_CONSTEXPR_CTZ std::enable_if< std::is_unsigned< T >::value, int >::type countr_zero(T x) noexcept
void swap(Json::Value &a, Json::Value &b)
Specialize std::swap() for Json::Value.
std::allocator< Waker > allocator_type
bool ShouldInsertBackwards(size_t hash, const ctrl_t *ctrl)
typename PolicyTraits::slot_type slot_type
typename Policy::init_type init_type
typename absl::allocator_traits< allocator_type >::template rebind_traits< slot_type > SlotAllocTraits
void skip_empty_or_deleted()
iterator insert(const_iterator, init_type &&value)
void * Alloc(FlagOpFn op)
void drop_deletes_without_resize() ABSL_ATTRIBUTE_NOINLINE
typename raw_hash_set::difference_type difference_type
_W64 unsigned int uintptr_t
void insert(InputIt first, InputIt last)
raw_hash_set(std::initializer_list< init_type > init, const allocator_type &alloc)
NonIterableBitMask< uint64_t, kWidth, 3 > MaskEmpty() const
void erase(const_iterator cit)
static std::function< void(void *, Slot *, Slot *)> transfer
size_t H1(size_t hash, const ctrl_t *ctrl)
raw_hash_set & operator=(raw_hash_set &&that) noexcept(absl::allocator_traits< allocator_type >::is_always_equal::value &&std::is_nothrow_move_assignable< hasher >::value &&std::is_nothrow_move_assignable< key_equal >::value)
uint32_t operator*() const
typename PolicyTraits::key_type key_type
raw_hash_set(InputIter first, InputIter last, size_t bucket_count, const hasher &hash, const allocator_type &alloc)
static constexpr size_t kWidth
typename raw_hash_set::const_reference reference
#define ABSL_PREDICT_TRUE(x)
raw_hash_set(std::initializer_list< T > init, size_t bucket_count, const allocator_type &alloc)
ABSL_INTERNAL_CONSTEXPR_CLZ std::enable_if< std::is_unsigned< T >::value, T >::type bit_width(T x) noexcept
ABSL_INTERNAL_CONSTEXPR_CLZ std::enable_if< std::is_unsigned< T >::value, int >::type countl_zero(T x) noexcept
friend bool operator!=(const const_iterator &a, const const_iterator &b)
typename PolicyTraits::value_type value_type
const_iterator begin() const
static bool Match(const upb_msgdef *m, const char *name, const upb_fielddef **f, const upb_oneofdef **o, const char *prefix, const char *suffix)
size_t GrowthToLowerboundCapacity(size_t growth)
void AssertIsValid(ctrl_t *ctrl)
void SanitizerPoisonMemoryRegion(const void *m, size_t s)
raw_hash_set< P, H, E, A >::size_type EraseIf(Predicate &pred, raw_hash_set< P, H, E, A > *c)
raw_hash_set(InputIter first, InputIter last, const allocator_type &alloc)
node_type extract(const key_arg< K > &key)
const_iterator(const ctrl_t *ctrl, const slot_type *slot)
void swap(raw_hash_set &that) noexcept(IsNoThrowSwappable< hasher >() &&IsNoThrowSwappable< key_equal >() &&IsNoThrowSwappable< allocator_type >(typename AllocTraits::propagate_on_container_swap{}))
typename absl::allocator_traits< allocator_type >::template rebind_alloc< slot_type > SlotAlloc
uint32_t LowestBitSet() const
typename type_traits_internal::VoidTImpl< Ts... >::type void_t
absl::remove_reference_t< reference > * pointer
static std::function< void(void *, Slot *, Slot)> construct
iterator emplace_hint(const_iterator, Args &&... args)
const hasher & hash_ref() const
#define ABSL_ASSUME(cond)
static void insert(upb_table *t, lookupkey_t key, upb_tabkey tabkey, upb_value val, uint32_t hash, hashfunc_t *hashfunc, eqlfunc_t *eql)
static void Reset(Node *node)
std::forward_iterator_tag iterator_category
const_iterator cbegin() const
size_t prepare_insert(size_t hash) ABSL_ATTRIBUTE_NOINLINE
BitMask< uint64_t, kWidth, 3 > Match(h2_t hash) const
size_t SlotOffset(size_t capacity, size_t slot_align)
typename Policy::slot_type slot_type
static size_t AllocatedByteSize(const Set &c)
typename raw_hash_set::const_pointer pointer
const_iterator cend() const
AllocList * next[kMaxLevel]
iterator lazy_emplace(const key_arg< K > &key, F &&f)
size_t count(const key_arg< K > &key) const
uint32_t TrailingZeros(T x)
typename std::enable_if<!std::is_same< T, init_type >::value, int >::type RequiresNotInit
ptrdiff_t difference_type
void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t *ctrl, size_t capacity)
raw_hash_set & move_assign(raw_hash_set &&that, std::true_type)
NonIterableBitMask(T mask)
absl::conditional_t< PolicyTraits::constant_iterators::value, const value_type &, value_type & > reference
size_t PerTableSalt(const ctrl_t *ctrl)
bool IsEmptyOrDeleted(ctrl_t c)
typename iterator::iterator_category iterator_category
HashtablezInfoHandle Sample(size_t inline_element_size ABSL_ATTRIBUTE_UNUSED)
void PrefetchT0(const void *addr)
const key_equal & eq_ref() const
iterator erase(const_iterator first, const_iterator last)
raw_hash_set(const allocator_type &alloc)
absl::container_internal::hash_default_hash< Waker > hasher
#define ABSL_ATTRIBUTE_REINITIALIZES
const_iterator(iterator i)
typename std::enable_if< absl::disjunction< std::is_convertible< T, init_type >, SameAsElementReference< T > >::value, int >::type RequiresInsertable
void prefetch_heap_block() const
ABSL_CONST_INIT const ABSL_DLL ctrl_t kEmptyGroup[16]
size_t SelectBucketCountForIterRange(InputIter first, InputIter last, size_t bucket_count)
absl::container_internal::hash_default_eq< Waker > key_equal
GroupPortableImpl(const ctrl_t *pos)
std::pair< decltype(std::declval< const Hash & >)(std::declval< const PassedKey & >))), decltype(std::declval< const Eq & >)(std::declval< const ContainerKey & >), std::declval< const PassedKey & >)))> * operator()(const PassedKey &, const Args &...) const
typename KeyArgImpl::template type< K, key_type > key_arg
size_t NormalizeCapacity(size_t n)
def merge(callgrind_files, srcs)
void operator()(Args &&... args) const
void ResetCtrl(size_t capacity, ctrl_t *ctrl, const void *slot, size_t slot_size)
typename absl::allocator_traits< allocator_type >::template rebind_traits< value_type >::pointer pointer
friend bool operator!=(const BitMask &a, const BitMask &b)
uint32_t HighestBitSet() const
std::pair< iterator, bool > operator()(const K &key, Args &&... args) const
typename std::remove_reference< T >::type remove_reference_t
void SetCtrl(size_t i, ctrl_t h, size_t capacity, ctrl_t *ctrl, const void *slot, size_t slot_size)
void rehash_and_grow_if_necessary()
void merge(raw_hash_set< Policy, H, E, Alloc > &src)
void emplace_at(size_t i, Args &&... args)
uint32_t CountLeadingEmptyOrDeleted() const
void SwapAlloc(AllocType &lhs, AllocType &rhs, std::true_type)
std::pair< std::string, std::string > pair
friend void swap(raw_hash_set &a, raw_hash_set &b) noexcept(noexcept(a.swap(b)))
void max_load_factor(float)
std::allocator< int > alloc
typename Traits::slot_type Slot
const_iterator find(const key_arg< K > &key) const
raw_hash_set(raw_hash_set &&that) noexcept(std::is_nothrow_copy_constructible< hasher >::value &&std::is_nothrow_copy_constructible< key_equal >::value &&std::is_nothrow_copy_constructible< allocator_type >::value)
static std::function< void(void *, Slot *)> destroy
T exchange(T &obj, U &&new_value)
absl::hash_internal::Hash< T > Hash
std::pair< iterator, bool > operator()(const K &key, Args &&...) &&
size_t operator()(const K &key, Args &&...) const
const_iterator operator()(const K &key, Args &&...) const
uint32_t LeadingZeros() const
const typedef T * const_pointer
bool IsValidCapacity(size_t n)
grpc
Author(s):
autogenerated on Fri May 16 2025 02:59:53