91 #ifndef ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_ 92 #define ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_ 102 #include <type_traits> 121 namespace container_internal {
123 template <
size_t W
idth>
127 assert(((mask + 1) & mask) == 0 &&
"not a mask");
148 template <
class ContainerKey,
class Hash,
class Eq>
150 template <
class PassedKey,
class... Args>
152 decltype(std::declval<const Hash&>()(std::declval<const PassedKey&>())),
153 decltype(std::declval<const Eq&>()(std::declval<const ContainerKey&>(),
154 std::declval<const PassedKey&>()))>*
155 operator()(
const PassedKey&,
const Args&...)
const;
158 template <
class E,
class Policy,
class Hash,
class Eq,
class... Ts>
161 template <
class Policy,
class Hash,
class Eq,
class... Ts>
164 Policy::apply(RequireUsableKey<typename Policy::key_type, Hash, Eq>(),
165 std::declval<Ts>()...))>,
166 Policy, Hash, Eq, Ts...> : std::true_type {};
172 return noexcept(
swap(std::declval<T&>(), std::declval<T&>()));
175 template <
typename T>
178 static_cast<uint64_t>(x))
180 static_cast<uint32_t>(x));
183 template <
typename T>
185 return sizeof(T) == 8
199 template <
class T,
int SignificantBits,
int Shift = 0>
202 static_assert(Shift == 0 || Shift == 3,
"");
234 constexpr
int total_significant_bits = SignificantBits << Shift;
235 constexpr
int extra_bits =
sizeof(T) * 8 - total_significant_bits;
262 "Special markers need to have the MSB to make checking for them efficient");
264 "kEmpty and kDeleted must be smaller than kSentinel to make the " 265 "SIMD test of IsEmptyOrDeleted() efficient");
267 "kSentinel must be -1 to elide loading it from memory into SIMD " 268 "registers (pcmpeqd xmm, xmm)");
269 static_assert(
kEmpty == -128,
270 "kEmpty must be -128 to make the SIMD check for its " 271 "existence efficient (psignb xmm, xmm)");
273 "kEmpty and kDeleted must share an unset bit that is not shared " 274 "by kSentinel to make the scalar test for MatchEmptyOrDeleted() " 277 "kDeleted must be -2 to make the implementation of " 278 "ConvertSpecialToEmptyAndFullToDeleted efficient");
283 alignas(16)
static constexpr
ctrl_t empty_group[] = {
286 return const_cast<ctrl_t*
>(empty_group);
301 return reinterpret_cast<uintptr_t
>(ctrl) >> 12;
304 inline size_t H1(
size_t hash,
const ctrl_t* ctrl) {
305 return (hash >> 7) ^
HashSeed(ctrl);
307 inline ctrl_t H2(
size_t hash) {
return hash & 0x7F; }
314 #if SWISSTABLE_HAVE_SSE2 321 inline __m128i _mm_cmpgt_epi8_fixed(__m128i
a, __m128i
b) {
322 #if defined(__GNUC__) && !defined(__clang__) 324 const __m128i mask = _mm_set1_epi8(0x80);
325 const __m128i diff = _mm_subs_epi8(b, a);
326 return _mm_cmpeq_epi8(_mm_and_si128(diff, mask), mask);
329 return _mm_cmpgt_epi8(a, b);
332 struct GroupSse2Impl {
333 static constexpr
size_t kWidth = 16;
335 explicit GroupSse2Impl(
const ctrl_t* pos) {
336 ctrl = _mm_loadu_si128(reinterpret_cast<const __m128i*>(pos));
341 auto match = _mm_set1_epi8(hash);
343 _mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl)));
348 #if SWISSTABLE_HAVE_SSSE3 351 _mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl)));
361 _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)));
365 uint32_t CountLeadingEmptyOrDeleted()
const {
368 _mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1);
371 void ConvertSpecialToEmptyAndFullToDeleted(
ctrl_t* dst)
const {
372 auto msbs = _mm_set1_epi8(static_cast<char>(-128));
373 auto x126 = _mm_set1_epi8(126);
374 #if SWISSTABLE_HAVE_SSSE3 375 auto res = _mm_or_si128(_mm_shuffle_epi8(x126, ctrl), msbs);
377 auto zero = _mm_setzero_si128();
378 auto special_mask = _mm_cmpgt_epi8_fixed(zero, ctrl);
379 auto res = _mm_or_si128(msbs, _mm_andnot_si128(special_mask, x126));
381 _mm_storeu_si128(reinterpret_cast<__m128i*>(dst), res);
386 #endif // SWISSTABLE_HAVE_SSE2 389 static constexpr
size_t kWidth = 8;
392 : ctrl(little_endian::
Load64(pos)) {}
408 constexpr uint64_t msbs = 0x8080808080808080ULL;
409 constexpr uint64_t lsbs = 0x0101010101010101ULL;
410 auto x = ctrl ^ (lsbs *
hash);
415 constexpr uint64_t msbs = 0x8080808080808080ULL;
420 constexpr uint64_t msbs = 0x8080808080808080ULL;
425 constexpr uint64_t gaps = 0x00FEFEFEFEFEFEFEULL;
426 return (
TrailingZeros(((~ctrl & (ctrl >> 7)) | gaps) + 1) + 7) >> 3;
430 constexpr uint64_t msbs = 0x8080808080808080ULL;
431 constexpr uint64_t lsbs = 0x0101010101010101ULL;
432 auto x = ctrl & msbs;
433 auto res = (~x + (x >> 7)) & ~lsbs;
440 #if SWISSTABLE_HAVE_SSE2 441 using Group = GroupSse2Impl;
446 template <
class Policy,
class Hash,
class Eq,
class Alloc>
460 ctrl_t* ctrl,
size_t capacity) {
485 return capacity - capacity / 8;
495 return growth +
static_cast<size_t>((
static_cast<int64_t
>(growth) - 1) / 7);
515 template <
class Policy,
class Hash,
class Eq,
class Alloc>
546 using key_arg =
typename KeyArgImpl::template type<K, key_type>;
550 auto KeyTypeCanBeHashed(
const Hash& h,
const key_type& k) -> decltype(h(k));
551 auto KeyTypeCanBeEq(
const Eq&
eq,
const key_type& k) -> decltype(
eq(k, k));
567 "Policy::element() must return a reference");
569 template <
typename T>
571 : std::is_same<typename std::remove_cv<
572 typename std::remove_reference<reference>::type>::type,
573 typename std::remove_cv<
574 typename std::remove_reference<T>::type>::type> {};
593 template <
class... Ts>
598 "Allocators with custom pointer types are not supported");
600 "Allocators with custom pointer types are not supported");
626 skip_empty_or_deleted();
733 template <
class InputIter>
741 template <
class InputIter>
746 template <
class InputIter>
751 template <
class InputIter>
776 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
782 raw_hash_set(std::initializer_list<init_type> init,
size_t bucket_count = 0,
787 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
792 raw_hash_set(std::initializer_list<init_type> init,
size_t bucket_count,
796 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
801 raw_hash_set(std::initializer_list<init_type> init,
size_t bucket_count,
805 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
815 that.alloc_ref())) {}
819 reserve(that.
size());
822 for (
const auto&
v : that) {
824 auto target = find_first_non_full(hash);
825 set_ctrl(target.offset,
H2(hash));
826 emplace_at(target.offset,
v);
827 infoz_.RecordInsert(hash, target.probe_length);
830 growth_left() -= that.size();
838 slots_(absl::
exchange(that.slots_,
nullptr)),
845 settings_(that.settings_) {
847 that.growth_left() = 0;
855 settings_(0, that.hash_ref(), that.eq_ref(), a) {
856 if (a == that.alloc_ref()) {
861 std::swap(growth_left(), that.growth_left());
864 reserve(that.size());
867 for (
auto& elem : that) insert(
std::move(elem));
894 auto it = iterator_at(0);
895 it.skip_empty_or_deleted();
910 size_t max_size()
const {
return (std::numeric_limits<size_t>::max)(); }
933 infoz_.RecordStorageChanged(0,
capacity_);
941 template <
class T, RequiresInsertable<T> = 0,
942 typename std::enable_if<IsDecomposable<T>::value,
int>::type = 0,
945 return emplace(std::forward<T>(
value));
966 return emplace(value);
978 template <
class T, RequiresInsertable<T> = 0,
979 typename std::enable_if<IsDecomposable<T>::value,
int>::type = 0,
982 return insert(std::forward<T>(
value)).first;
989 class T, RequiresInsertable<T> = 0,
992 return insert(value).first;
999 template <
class InputIt>
1001 for (; first != last; ++first) insert(*first);
1004 template <
class T, RequiresNotInit<T> = 0, RequiresInsertable<const T&> = 0>
1005 void insert(std::initializer_list<T> ilist) {
1006 insert(ilist.begin(), ilist.end());
1009 void insert(std::initializer_list<init_type> ilist) {
1010 insert(ilist.begin(), ilist.end());
1023 return {res.first,
false,
std::move(node)};
1040 template <
class... Args,
typename std::enable_if<
1042 std::pair<iterator, bool>
emplace(Args&&... args) {
1044 std::forward<Args>(args)...);
1050 template <
class... Args,
typename std::enable_if<
1052 std::pair<iterator, bool>
emplace(Args&&... args) {
1053 typename std::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type
1055 slot_type* slot =
reinterpret_cast<slot_type*
>(&raw);
1062 template <
class... Args>
1064 return emplace(std::forward<Args>(args)...).first;
1093 template <
class... Args>
1107 template <
class K = key_type,
class F>
1109 auto res = find_or_prepare_insert(key);
1111 slot_type* slot = slots_ + res.first;
1112 std::forward<F>(f)(constructor(&alloc_ref(), &slot));
1115 return iterator_at(res.first);
1127 template <
class K = key_type>
1129 auto it = find(key);
1130 if (it ==
end())
return 0;
1152 assert(it !=
end());
1154 erase_meta_only(it);
1157 iterator
erase(const_iterator first, const_iterator last) {
1158 while (first != last) {
1166 template <
typename H,
typename E>
1168 assert(
this != &src);
1169 for (
auto it = src.
begin(), e = src.
end(); it != e; ++it) {
1178 template <
typename H,
typename E>
1185 CommonAccess::Make<node_type>(alloc_ref(), position.inner_.slot_);
1186 erase_meta_only(position);
1194 auto it = find(key);
1195 return it ==
end() ?
node_type() : extract(const_iterator{it});
1199 IsNoThrowSwappable<hasher>() && IsNoThrowSwappable<key_equal>() &&
1201 IsNoThrowSwappable<allocator_type>())) {
1203 swap(ctrl_, that.ctrl_);
1204 swap(slots_, that.slots_);
1207 swap(growth_left(), that.growth_left());
1208 swap(hash_ref(), that.hash_ref());
1209 swap(eq_ref(), that.eq_ref());
1210 swap(infoz_, that.infoz_);
1212 swap(alloc_ref(), that.alloc_ref());
1221 if (n == 0 &&
size_ == 0) {
1223 infoz_.RecordStorageChanged(0, 0);
1246 template <
class K = key_type>
1248 return find(key) ==
end() ? 0 : 1;
1256 template <
class K = key_type>
1259 #if defined(__GNUC__) 1260 auto seq = probe(hash_ref()(key));
1261 __builtin_prefetch(static_cast<const void*>(ctrl_ + seq.offset()));
1262 __builtin_prefetch(static_cast<const void*>(slots_ + seq.offset()));
1273 template <
class K = key_type>
1275 auto seq = probe(hash);
1277 Group g{ctrl_ + seq.offset()};
1278 for (
int i : g.Match(
H2(hash))) {
1280 EqualElement<K>{key, eq_ref()},
1282 return iterator_at(seq.offset(
i));
1288 template <
class K = key_type>
1290 return find(key, hash_ref()(key));
1293 template <
class K = key_type>
1295 return const_cast<raw_hash_set*
>(
this)->find(key, hash);
1297 template <
class K = key_type>
1299 return find(key, hash_ref()(key));
1302 template <
class K = key_type>
1304 return find(key) !=
end();
1307 template <
class K = key_type>
1309 auto it = find(key);
1313 template <
class K = key_type>
1316 auto it = find(key);
1335 if (a.
size() != b.
size())
return false;
1354 template <
class Container,
typename Enabler>
1356 HashtableDebugAccess;
1359 template <
class K,
class... Args>
1367 template <
class K,
class... Args>
1376 template <
class K2,
class... Args>
1378 return eq(lhs, rhs);
1385 template <
class K,
class... Args>
1386 std::pair<iterator, bool>
operator()(
const K& key, Args&&... args)
const {
1387 auto res = s.find_or_prepare_insert(key);
1389 s.emplace_at(res.first, std::forward<Args>(args)...);
1391 return {s.iterator_at(res.first), res.second};
1396 template <
bool do_destroy>
1398 template <
class K,
class... Args>
1399 std::pair<iterator, bool>
operator()(
const K& key, Args&&...) && {
1400 auto res = s.find_or_prepare_insert(key);
1403 }
else if (do_destroy) {
1406 return {s.iterator_at(res.first), res.second};
1418 assert(
IsFull(*it.inner_.ctrl_) &&
"erasing a dangling iterator");
1420 const size_t index = it.inner_.ctrl_ - ctrl_;
1428 bool was_never_full =
1429 empty_before && empty_after &&
1430 static_cast<size_t>(empty_after.TrailingZeros() +
1434 growth_left() += was_never_full;
1435 infoz_.RecordErase();
1451 slots_ ==
nullptr) {
1456 char* mem =
static_cast<char*
>(
1457 Allocate<Layout::Alignment()>(&alloc_ref(), layout.AllocSize()));
1458 ctrl_ =
reinterpret_cast<ctrl_t*
>(layout.template Pointer<0>(mem));
1459 slots_ = layout.template Pointer<1>(mem);
1461 reset_growth_left();
1475 Deallocate<Layout::Alignment()>(&alloc_ref(), ctrl_, layout.AllocSize());
1485 auto* old_ctrl = ctrl_;
1486 auto* old_slots = slots_;
1491 size_t total_probe_length = 0;
1492 for (
size_t i = 0;
i != old_capacity; ++
i) {
1496 auto target = find_first_non_full(hash);
1497 size_t new_i = target.offset;
1498 total_probe_length += target.probe_length;
1499 set_ctrl(new_i,
H2(hash));
1505 sizeof(slot_type) * old_capacity);
1506 auto layout = MakeLayout(old_capacity);
1507 Deallocate<Layout::Alignment()>(&alloc_ref(), old_ctrl,
1508 layout.AllocSize());
1510 infoz_.RecordRehash(total_probe_length);
1515 assert(!is_small());
1533 typename std::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type
1535 size_t total_probe_length = 0;
1536 slot_type* slot =
reinterpret_cast<slot_type*
>(&raw);
1541 auto target = find_first_non_full(hash);
1542 size_t new_i = target.offset;
1543 total_probe_length += target.probe_length;
1548 const auto probe_index = [&](
size_t pos) {
1554 set_ctrl(i,
H2(hash));
1561 set_ctrl(new_i,
H2(hash));
1566 set_ctrl(new_i,
H2(hash));
1575 reset_growth_left();
1576 infoz_.RecordRehash(total_probe_length);
1584 drop_deletes_without_resize();
1593 auto seq = probe(hash);
1595 Group g{ctrl_ + seq.offset()};
1596 for (
int i : g.Match(
H2(hash))) {
1603 assert(seq.index() <
capacity_ &&
"full table!");
1622 auto seq = probe(hash);
1624 Group g{ctrl_ + seq.offset()};
1627 #if !defined(NDEBUG) 1633 return {seq.offset(mask.HighestBitSet()), seq.index()};
1636 return {seq.offset(mask.LowestBitSet()), seq.index()};
1638 assert(seq.index() <
capacity_ &&
"full table!");
1658 auto hash = hash_ref()(key);
1659 auto seq = probe(hash);
1661 Group g{ctrl_ + seq.offset()};
1662 for (
int i : g.Match(
H2(hash))) {
1664 EqualElement<K>{key, eq_ref()},
1666 return {seq.offset(
i),
false};
1671 return {prepare_insert(hash),
true};
1675 auto target = find_first_non_full(hash);
1678 rehash_and_grow_if_necessary();
1679 target = find_first_non_full(hash);
1682 growth_left() -=
IsEmpty(ctrl_[target.offset]);
1683 set_ctrl(target.offset,
H2(hash));
1684 infoz_.RecordInsert(hash, target.probe_length);
1685 return target.offset;
1696 template <
class... Args>
1699 std::forward<Args>(args)...);
1703 "constructed value does not match the lookup key");
1767 return settings_.template get<3>();
1774 slot_type* slots_ =
nullptr;
1783 namespace hashtable_debug_internal {
1784 template <
typename Set>
1787 using Slot =
typename Traits::slot_type;
1790 const typename Set::key_type& key) {
1791 size_t num_probes = 0;
1792 size_t hash =
set.hash_ref()(key);
1793 auto seq =
set.probe(hash);
1798 typename Set::template EqualElement<typename Set::key_type>{
1804 if (g.MatchEmpty())
return num_probes;
1811 size_t capacity = c.capacity_;
1812 if (capacity == 0)
return 0;
1813 auto layout = Set::MakeLayout(capacity);
1814 size_t m = layout.AllocSize();
1816 size_t per_slot = Traits::space_used(static_cast<const Slot*>(
nullptr));
1817 if (per_slot != ~
size_t{}) {
1818 m += per_slot * c.size();
1820 for (
size_t i = 0;
i != capacity; ++
i) {
1822 m += Traits::space_used(c.slots_ +
i);
1831 if (capacity == 0)
return 0;
1833 size_t m = layout.AllocSize();
1834 size_t per_slot = Traits::space_used(static_cast<const Slot*>(
nullptr));
1835 if (per_slot != ~
size_t{}) {
1836 m += per_slot *
size;
1846 #endif // ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
void erase(const_iterator cit)
void resize(size_t new_capacity)
typename iterator::iterator_category iterator_category
raw_hash_set & operator=(raw_hash_set &&that) noexcept(absl::allocator_traits< allocator_type >::is_always_equal::value &&std::is_nothrow_move_assignable< hasher >::value &&std::is_nothrow_move_assignable< key_equal >::value)
FindInfo find_first_non_full(size_t hash)
const_iterator(iterator i)
size_t prepare_insert(size_t hash) ABSL_ATTRIBUTE_NOINLINE
typename Policy::init_type init_type
size_t bucket_count() const
void erase_meta_only(const_iterator it)
static std::function< Slot &(Slot *)> element
BitMask< uint64_t, kWidth, 3 > MatchEmpty() const
iterator(ctrl_t *ctrl, slot_type *slot)
std::pair< iterator, bool > operator()(const K &key, Args &&...)&&
static auto GetSlot(const Node &node) -> decltype(node.slot())
reference operator*() const
typename absl::allocator_traits< allocator_type >::template rebind_alloc< slot_type > SlotAlloc
static void Reset(Node *node)
bool IsValidCapacity(size_t n)
raw_hash_set & move_assign(raw_hash_set &&that, std::true_type)
const_iterator iterator_at(size_t i) const
uint64_t Load64(const void *p)
float max_load_factor() const
size_t operator()(const K &key, Args &&...) const
raw_hash_set(const allocator_type &alloc)
raw_hash_set() noexcept(std::is_nothrow_default_constructible< hasher >::value &&std::is_nothrow_default_constructible< key_equal >::value &&std::is_nothrow_default_constructible< allocator_type >::value)
std::pair< iterator, bool > operator()(const K &key, Args &&...args) const
typename PolicyTraits::key_type key_type
pointer operator->() const
raw_hash_set(std::initializer_list< T > init, size_t bucket_count, const allocator_type &alloc)
size_t count(const key_arg< K > &key) const
typename std::remove_reference< T >::type remove_reference_t
int TrailingZeros() const
raw_hash_set(const raw_hash_set &that)
void insert(std::initializer_list< init_type > ilist)
#define ABSL_ATTRIBUTE_REINITIALIZES
void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t *dst) const
size_t HashSeed(const ctrl_t *ctrl)
void prefetch(const key_arg< K > &key) const
const hasher & hash_ref() const
typename std::enable_if< absl::disjunction< std::is_convertible< T, init_type >, SameAsElementReference< T >>::value, int >::type RequiresInsertable
raw_hash_set(std::initializer_list< init_type > init, const allocator_type &alloc)
#define ABSL_PREDICT_FALSE(x)
typename raw_hash_set::value_type value_type
std::pair< iterator, bool > emplace(Args &&...args)
friend void swap(raw_hash_set &a, raw_hash_set &b) noexcept(noexcept(a.swap(b)))
IsDecomposable< void, PolicyTraits, Hash, Eq, Ts... > IsDecomposable
const_iterator find(const key_arg< K > &key) const
ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros32(uint32_t n)
const_iterator begin() const
node_type extract(const_iterator position)
BitMask< uint64_t, kWidth, 3 > MatchEmptyOrDeleted() const
typename absl::allocator_traits< allocator_type >::template rebind_traits< value_type >::const_pointer const_pointer
std::forward_iterator_tag iterator_category
void insert(InputIt first, InputIt last)
const_iterator cend() const
raw_hash_set(InputIter first, InputIter last, const allocator_type &alloc)
void Store64(void *p, uint64_t v)
memory_internal::ExtractOrT< memory_internal::GetPropagateOnContainerMoveAssignment, Alloc, std::false_type > propagate_on_container_move_assignment
void operator()(Args &&...args) const
node_type extract(const key_arg< K > &key)
iterator erase(const_iterator first, const_iterator last)
ABSL_BASE_INTERNAL_FORCEINLINE int CountLeadingZeros64(uint64_t n)
typename raw_hash_set::const_pointer pointer
void emplace_at(size_t i, Args &&...args)
HashtablezInfoHandle Sample()
raw_hash_set(std::initializer_list< init_type > init, size_t bucket_count, const allocator_type &alloc)
iterator find(const key_arg< K > &key)
const value_type & const_reference
uint128 operator*(uint128 lhs, uint128 rhs)
friend bool operator==(const raw_hash_set &a, const raw_hash_set &b)
typename std::conditional< B, T, F >::type conditional_t
raw_hash_set & move_assign(raw_hash_set &&that, std::false_type)
ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero32(uint32_t n)
int HighestBitSet() const
uint32_t CountLeadingEmptyOrDeleted() const
bool operator()(const K2 &lhs, Args &&...) const
ABSL_ATTRIBUTE_REINITIALIZES void clear()
typename std::enable_if<!std::is_same< T, init_type >::value, int >::type RequiresNotInit
size_t H1(size_t hash, const ctrl_t *ctrl)
typename PolicyTraits::init_type init_type
typename Set::PolicyTraits Traits
typename raw_hash_set::difference_type difference_type
static std::function< void(void *, Slot *)> destroy
typename Policy::key_type key_type
raw_hash_set(size_t bucket_count, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
typename Traits::slot_type Slot
hash_default_hash< T > hasher
iterator lazy_emplace(const key_arg< K > &key, F &&f)
const_iterator(const ctrl_t *ctrl, const slot_type *slot)
bool ShouldInsertBackwards(size_t hash, ctrl_t *ctrl)
size_t GrowthToLowerboundCapacity(size_t growth)
pointer operator->() const
bool IsEmptyOrDeleted(ctrl_t c)
void SanitizerUnpoisonMemoryRegion(const void *m, size_t s)
AllocList * next[kMaxLevel]
void insert(std::initializer_list< T > ilist)
void swap(raw_hash_set &that) noexcept(IsNoThrowSwappable< hasher >()&&IsNoThrowSwappable< key_equal >()&&(!AllocTraits::propagate_on_container_swap::value||IsNoThrowSwappable< allocator_type >()))
reference operator*() const
raw_hash_set(std::initializer_list< T > init, size_t bucket_count, const hasher &hash, const allocator_type &alloc)
void set_ctrl(size_t i, ctrl_t h)
void merge(raw_hash_set< Policy, H, E, Alloc > &&src)
hash_default_hash< typename T::first_type > hash
raw_hash_set(const raw_hash_set &that, const allocator_type &a)
typename absl::allocator_traits< allocator_type >::template rebind_traits< value_type >::pointer pointer
std::pair< std::string, std::string > pair
absl::remove_reference_t< reference > * pointer
std::pair< iterator, bool > insert(const T &value)
probe_seq(size_t hash, size_t mask)
BitMask< uint64_t, kWidth, 3 > Match(h2_t hash) const
void rehash_and_grow_if_necessary()
bool contains(const key_arg< K > &key) const
void merge(raw_hash_set< Policy, H, E, Alloc > &src)
size_t CapacityToGrowth(size_t capacity)
float load_factor() const
void swap(absl::InlinedVector< T, N, A > &a, absl::InlinedVector< T, N, A > &b) noexcept(noexcept(a.swap(b)))
typename KeyArgImpl::template type< K, key_type > key_arg
insert_return_type insert(node_type &&node)
raw_hash_set(InputIter first, InputIter last, size_t bucket_count=0, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
allocator_type & alloc_ref()
static std::function< void(void *, Slot *, Slot *)> transfer
static std::function< void(void *, Slot *, Slot)> construct
raw_hash_set(std::initializer_list< T > init, const allocator_type &alloc)
friend bool operator==(const iterator &a, const iterator &b)
ptrdiff_t difference_type
friend bool operator==(const BitMask &a, const BitMask &b)
void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t *ctrl, size_t capacity)
static constexpr size_t kWidth
static size_t AllocatedByteSize(const Set &c)
constexpr bool IsNoThrowSwappable()
allocator_type get_allocator() const
static size_t LowerBoundAllocatedByteSize(size_t size)
void SanitizerPoisonMemoryRegion(const void *m, size_t s)
std::pair< size_t, bool > find_or_prepare_insert(const K &key)
size_t offset(size_t i) const
raw_hash_set(raw_hash_set &&that) noexcept(std::is_nothrow_copy_constructible< hasher >::value &&std::is_nothrow_copy_constructible< key_equal >::value &&std::is_nothrow_copy_constructible< allocator_type >::value)
typename type_traits_internal::VoidTImpl< Ts... >::type void_t
std::pair< const_iterator, const_iterator > equal_range(const key_arg< K > &key) const
raw_hash_set(raw_hash_set &&that, const allocator_type &a)
#define ABSL_ATTRIBUTE_NOINLINE
friend bool operator!=(const const_iterator &a, const const_iterator &b)
const_iterator cbegin() const
void SanitizerPoisonObject(const T *object)
absl::conditional_t< PolicyTraits::constant_iterators::value, const value_type &, value_type & > reference
static size_t GetNumProbes(const Set &set, const typename Set::key_type &key)
constructor(allocator_type *a, slot_type **slot)
#define ABSL_PREDICT_TRUE(x)
raw_hash_set(std::initializer_list< init_type > init, size_t bucket_count=0, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
const_iterator operator()(const K &key, Args &&...) const
friend bool operator!=(const raw_hash_set &a, const raw_hash_set &b)
typename raw_hash_set::difference_type difference_type
typename PolicyTraits::slot_type slot_type
typename raw_hash_set::const_reference reference
HashtablezInfoHandle infoz_
probe_seq< Group::kWidth > probe(size_t hash) const
iterator insert(const_iterator, const T &value)
std::pair< iterator, bool > insert(init_type &&value)
raw_hash_set(std::initializer_list< init_type > init, size_t bucket_count, const hasher &hash, const allocator_type &alloc)
iterator insert(const_iterator, init_type &&value)
void drop_deletes_without_resize() ABSL_ATTRIBUTE_NOINLINE
iterator iterator_at(size_t i)
const_iterator & operator++()
friend bool operator!=(const iterator &a, const iterator &b)
typename absl::allocator_traits< allocator_type >::template rebind_traits< slot_type > SlotAllocTraits
raw_hash_set(size_t bucket_count, const allocator_type &alloc)
raw_hash_set(InputIter first, InputIter last, size_t bucket_count, const allocator_type &alloc)
const allocator_type & alloc_ref() const
const_iterator end() const
void max_load_factor(float)
raw_hash_set(std::initializer_list< T > init, size_t bucket_count=0, const hasher &hash=hasher(), const key_equal &eq=key_equal(), const allocator_type &alloc=allocator_type())
raw_hash_set(InputIter first, InputIter last, size_t bucket_count, const hasher &hash, const allocator_type &alloc)
void SanitizerUnpoisonObject(const T *object)
typename PolicyTraits::value_type value_type
std::allocator< int > alloc
size_type erase(const key_arg< K > &key)
iterator insert(const_iterator, T &&value)
std::pair< iterator, bool > insert(T &&value)
constexpr absl::remove_reference_t< T > && move(T &&t) noexcept
typename raw_hash_set::value_type value_type
raw_hash_set(size_t bucket_count, const hasher &hash, const allocator_type &alloc)
iterator find(const key_arg< K > &key, size_t hash)
iterator emplace_hint(const_iterator, Args &&...args)
std::pair< iterator, iterator > equal_range(const key_arg< K > &key)
raw_hash_set & operator=(const raw_hash_set &that)
MockFunction< int(int)> apply
typename Policy::slot_type slot_type
const_iterator operator++(int)
hasher hash_function() const
void skip_empty_or_deleted()
hash_default_eq< typename T::first_type > eq
friend bool operator==(const const_iterator &a, const const_iterator &b)
T exchange(T &obj, U &&new_value)
absl::hash_internal::Hash< T > Hash
bool has_element(const value_type &elem) const
static Layout MakeLayout(size_t capacity)
GroupPortableImpl(const ctrl_t *pos)
friend bool operator!=(const BitMask &a, const BitMask &b)
const key_equal & eq_ref() const
const_iterator find(const key_arg< K > &key, size_t hash) const
size_t NormalizeCapacity(size_t n)
iterator insert(const_iterator, node_type &&node)
ABSL_BASE_INTERNAL_FORCEINLINE int CountTrailingZerosNonZero64(uint64_t n)