15 #include "absl/container/internal/raw_hash_set.h"
26 #include <unordered_map>
27 #include <unordered_set>
29 #include "gmock/gmock.h"
30 #include "gtest/gtest.h"
31 #include "absl/base/attributes.h"
32 #include "absl/base/config.h"
33 #include "absl/base/internal/cycleclock.h"
35 #include "absl/base/internal/raw_logging.h"
36 #include "absl/container/internal/container_memory.h"
37 #include "absl/container/internal/hash_function_defaults.h"
38 #include "absl/container/internal/hash_policy_testing.h"
39 #include "absl/container/internal/hashtable_debug.h"
40 #include "absl/strings/string_view.h"
44 namespace container_internal {
46 struct RawHashSetTestOnlyAccess {
48 static auto GetSlots(
const C& c) -> decltype(c.slots_) {
78 TEST(Util, GrowthAndCapacity) {
81 for (
size_t growth = 0; growth < 10000; ++growth) {
108 TEST(Util, probe_seq) {
109 probe_seq<16> seq(0, 127);
111 size_t res = seq.offset();
115 std::vector<size_t> offsets(8);
116 std::generate_n(offsets.begin(), 8,
gen);
118 seq = probe_seq<16>(128, 127);
119 std::generate_n(offsets.begin(), 8,
gen);
123 TEST(BitMask, Smoke) {
137 TEST(BitMask, WithShift) {
143 auto x = ctrl ^ (lsbs *
hash);
147 BitMask<uint64_t, 8, 3>
b(mask);
151 TEST(BitMask, LeadingTrailing) {
152 EXPECT_EQ((BitMask<uint32_t, 16>(0x00001a40).LeadingZeros()), 3);
155 EXPECT_EQ((BitMask<uint32_t, 16>(0x00000001).LeadingZeros()), 15);
158 EXPECT_EQ((BitMask<uint32_t, 16>(0x00008000).LeadingZeros()), 0);
161 EXPECT_EQ((BitMask<uint64_t, 8, 3>(0x0000008080808000).LeadingZeros()), 3);
164 EXPECT_EQ((BitMask<uint64_t, 8, 3>(0x0000000000000080).LeadingZeros()), 7);
167 EXPECT_EQ((BitMask<uint64_t, 8, 3>(0x8000000000000000).LeadingZeros()), 0);
179 CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1),
180 CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)};
202 CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1),
203 CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)};
221 CtrlT(7), CtrlT(5), CtrlT(3), CtrlT(1),
222 CtrlT(1), CtrlT(1), CtrlT(1), CtrlT(1)};
236 TEST(Batch, DropDeletes) {
239 std::vector<ctrl_t> ctrl(
kCapacity + 1 + kGroupWidth);
241 std::vector<ctrl_t>
pattern = {
246 if (i < kGroupWidth - 1)
251 for (
size_t i = 0;
i <
kCapacity + kGroupWidth; ++
i) {
261 TEST(
Group, CountLeadingEmptyOrDeleted) {
263 const std::vector<ctrl_t> full_examples = {
264 CtrlT(0), CtrlT(1), CtrlT(2), CtrlT(3),
270 for (
ctrl_t full : full_examples) {
291 template <
class Allocator,
class...
Args>
294 std::forward<Args>(
args)...);
297 template <
class Allocator>
298 static void destroy(Allocator*
alloc, slot_type* slot) {
302 template <
class Allocator>
304 slot_type* old_slot) {
309 static T&
element(slot_type* slot) {
return *slot; }
311 template <
class F,
class...
Args>
313 std::declval<F>(), std::declval<Args>()...))
316 std::forward<F>(f), std::forward<Args>(
args)...);
320 using IntPolicy = ValuePolicy<int64_t>;
321 using Uint8Policy = ValuePolicy<uint8_t>;
324 template <
class F,
class K,
class V,
325 class =
typename std::enable_if<
327 decltype(std::declval<F>()(
328 std::declval<const absl::string_view&>(), std::piecewise_construct,
329 std::declval<std::tuple<K>>(),
333 return std::forward<F>(f)(
key, std::piecewise_construct,
std::move(
p.first),
341 template <
class... Ts>
344 std::pair<std::string, std::string>
pair;
348 using init_type = std::pair<std::string, std::string>;
350 template <
class allocator_type,
class...
Args>
353 *
alloc, slot,
typename slot_type::ctor(), std::forward<Args>(
args)...);
356 template <
class allocator_type>
357 static void destroy(allocator_type*
alloc, slot_type* slot) {
361 template <
class allocator_type>
362 static void transfer(allocator_type*
alloc, slot_type* new_slot,
363 slot_type* old_slot) {
368 static std::pair<std::string, std::string>&
element(slot_type* slot) {
372 template <
class F,
class...
Args>
381 struct StringHash :
absl::Hash<absl::string_view> {
384 struct StringEq : std::equal_to<absl::string_view> {
389 : raw_hash_set<StringPolicy, StringHash, StringEq, std::allocator<int>> {
390 using Base =
typename StringTable::raw_hash_set;
396 : raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
397 std::equal_to<int64_t>, std::allocator<int64_t>> {
398 using Base =
typename IntTable::raw_hash_set;
403 : raw_hash_set<Uint8Policy, container_internal::hash_default_hash<uint8_t>,
404 std::equal_to<uint8_t>, std::allocator<uint8_t>> {
405 using Base =
typename Uint8Table::raw_hash_set;
409 template <
typename T>
410 struct CustomAlloc : std::allocator<T> {
413 template <
typename U>
414 CustomAlloc(
const CustomAlloc<U>& other) {}
416 template<
class U>
struct rebind {
417 using other = CustomAlloc<U>;
421 struct CustomAllocIntTable
422 : raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
423 std::equal_to<int64_t>, CustomAlloc<int64_t>> {
424 using Base =
typename CustomAllocIntTable::raw_hash_set;
430 size_t operator()(
const T&)
const {
435 struct BadTable : raw_hash_set<IntPolicy, BadFastHash, std::equal_to<int>,
436 std::allocator<int>> {
437 using Base =
typename BadTable::raw_hash_set;
442 TEST(Table, EmptyFunctorOptimization) {
443 static_assert(std::is_empty<std::equal_to<absl::string_view>>::
value,
"");
444 static_assert(std::is_empty<std::allocator<int>>::
value,
"");
454 struct MockTableInfozDisabled {
461 struct StatelessHash {
464 struct StatefulHash : StatelessHash {
469 EXPECT_EQ(
sizeof(MockTableInfozDisabled),
470 sizeof(raw_hash_set<StringPolicy, StatelessHash,
471 std::equal_to<absl::string_view>,
472 std::allocator<int>>));
474 EXPECT_EQ(
sizeof(MockTableInfozDisabled) +
sizeof(StatefulHash),
475 sizeof(raw_hash_set<StringPolicy, StatefulHash,
476 std::equal_to<absl::string_view>,
477 std::allocator<int>>));
480 sizeof(raw_hash_set<StringPolicy, StatelessHash,
481 std::equal_to<absl::string_view>,
482 std::allocator<int>>));
484 EXPECT_EQ(
sizeof(MockTable) +
sizeof(StatefulHash),
485 sizeof(raw_hash_set<StringPolicy, StatefulHash,
486 std::equal_to<absl::string_view>,
487 std::allocator<int>>));
497 TEST(Table, LookupEmpty) {
503 TEST(Table, Insert1) {
506 auto res =
t.emplace(0);
513 TEST(Table, Insert2) {
516 auto res =
t.emplace(0);
529 TEST(Table, InsertCollision) {
532 auto res =
t.emplace(1);
549 TEST(Table, InsertCollisionAndFindAfterDelete) {
554 for (
size_t i = 0;
i < kNumInserts; ++
i) {
555 auto res =
t.emplace(i);
563 for (
size_t i = 0;
i < kNumInserts; ++
i) {
565 for (
size_t j = i + 1;
j < kNumInserts; ++
j) {
567 auto res =
t.emplace(j);
576 TEST(Table, InsertWithinCapacity) {
579 const size_t original_capacity =
t.capacity();
580 const auto addr = [&](
int i) {
581 return reinterpret_cast<uintptr_t>(&*
t.find(i));
592 for (
int i = 0;
i < 100; ++
i) {
598 std::vector<int> dup_range;
599 for (
int i = 0;
i < 100; ++
i) {
600 dup_range.push_back(i % 10);
602 t.insert(dup_range.begin(), dup_range.end());
607 TEST(Table, LazyEmplace) {
610 auto it =
t.lazy_emplace(
"abc", [&](
const StringTable::constructor& f) {
617 it =
t.lazy_emplace(
"abc", [&](
const StringTable::constructor& f) {
625 TEST(Table, ContainsEmpty) {
631 TEST(Table, Contains1) {
642 TEST(Table, Contains2) {
653 int decompose_constructed;
654 int decompose_copy_constructed;
655 int decompose_copy_assigned;
656 int decompose_move_constructed;
657 int decompose_move_assigned;
658 struct DecomposeType {
659 DecomposeType(
int i = 0) :
i(
i) {
660 ++decompose_constructed;
663 explicit DecomposeType(
const char* d) : DecomposeType(*
d) {}
665 DecomposeType(
const DecomposeType& other) :
i(other.
i) {
666 ++decompose_copy_constructed;
668 DecomposeType& operator=(
const DecomposeType& other) {
669 ++decompose_copy_assigned;
673 DecomposeType(DecomposeType&& other) :
i(other.
i) {
674 ++decompose_move_constructed;
676 DecomposeType& operator=(DecomposeType&& other) {
677 ++decompose_move_assigned;
685 struct DecomposeHash {
686 using is_transparent = void;
687 size_t operator()(
const DecomposeType&
a)
const {
return a.i; }
688 size_t operator()(
int a)
const {
return a; }
689 size_t operator()(
const char* a)
const {
return *
a; }
693 using is_transparent = void;
694 bool operator()(
const DecomposeType& a,
const DecomposeType&
b)
const {
697 bool operator()(
const DecomposeType& a,
int b)
const {
return a.i ==
b; }
698 bool operator()(
const DecomposeType& a,
const char*
b)
const {
703 struct DecomposePolicy {
704 using slot_type = DecomposeType;
706 using init_type = DecomposeType;
708 template <
typename T>
709 static void construct(
void*, DecomposeType* slot,
T&&
v) {
710 ::new (slot) DecomposeType(std::forward<T>(
v));
712 static void destroy(
void*, DecomposeType* slot) { slot->~DecomposeType(); }
713 static DecomposeType&
element(slot_type* slot) {
return *slot; }
715 template <
class F,
class T>
716 static auto apply(F&& f,
const T& x) -> decltype(std::forward<F>(f)(x, x)) {
717 return std::forward<F>(f)(
x,
x);
721 template <
typename Hash,
typename Eq>
722 void TestDecompose(
bool construct_three) {
723 DecomposeType
elem{0};
725 const char* three_p =
"3";
726 const auto& three = three_p;
727 const int elem_vector_count = 256;
728 std::vector<DecomposeType> elem_vector(elem_vector_count, DecomposeType{0});
729 std::iota(elem_vector.begin(), elem_vector.end(), 0);
732 raw_hash_set<DecomposePolicy, Hash, Eq, std::allocator<int>>;
735 decompose_constructed = 0;
736 int expected_constructed = 0;
737 EXPECT_EQ(expected_constructed, decompose_constructed);
739 EXPECT_EQ(expected_constructed, decompose_constructed);
741 EXPECT_EQ(++expected_constructed, decompose_constructed);
743 EXPECT_EQ(++expected_constructed, decompose_constructed);
744 EXPECT_EQ(expected_constructed, decompose_constructed);
748 EXPECT_EQ(expected_constructed, decompose_constructed);
753 EXPECT_EQ(expected_constructed, decompose_constructed);
757 set1.insert(set1.begin(), 1);
758 EXPECT_EQ(expected_constructed, decompose_constructed);
762 set1.insert(set1.begin(), one);
763 EXPECT_EQ(expected_constructed, decompose_constructed);
768 EXPECT_EQ(expected_constructed, decompose_constructed);
770 expected_constructed += construct_three;
771 EXPECT_EQ(expected_constructed, decompose_constructed);
773 EXPECT_EQ(expected_constructed, decompose_constructed);
775 expected_constructed += construct_three;
776 EXPECT_EQ(expected_constructed, decompose_constructed);
780 set1.emplace_hint(set1.begin(), 1);
781 EXPECT_EQ(expected_constructed, decompose_constructed);
782 set1.emplace_hint(set1.begin(),
"3");
783 expected_constructed += construct_three;
784 EXPECT_EQ(expected_constructed, decompose_constructed);
785 set1.emplace_hint(set1.begin(), one);
786 EXPECT_EQ(expected_constructed, decompose_constructed);
787 set1.emplace_hint(set1.begin(), three);
788 expected_constructed += construct_three;
789 EXPECT_EQ(expected_constructed, decompose_constructed);
792 decompose_copy_constructed = 0;
793 decompose_copy_assigned = 0;
794 decompose_move_constructed = 0;
795 decompose_move_assigned = 0;
796 int expected_copy_constructed = 0;
797 int expected_move_constructed = 0;
799 DecomposeSet set2(elem_vector.begin(), elem_vector.end());
802 expected_copy_constructed += elem_vector_count;
803 EXPECT_EQ(expected_copy_constructed, decompose_copy_constructed);
804 EXPECT_EQ(expected_move_constructed, decompose_move_constructed);
810 std::list<DecomposeType> elem_list(elem_vector.begin(), elem_vector.end());
811 expected_copy_constructed = decompose_copy_constructed;
812 DecomposeSet set2(elem_list.begin(), elem_list.end());
815 expected_copy_constructed += elem_vector_count;
816 EXPECT_EQ(expected_copy_constructed, decompose_copy_constructed);
817 expected_move_constructed += elem_vector_count;
818 EXPECT_LT(expected_move_constructed, decompose_move_constructed);
819 expected_move_constructed += elem_vector_count;
820 EXPECT_GE(expected_move_constructed, decompose_move_constructed);
823 expected_copy_constructed = decompose_copy_constructed;
824 expected_move_constructed = decompose_move_constructed;
829 set2.insert(elem_vector.begin(), elem_vector.end());
832 const int expected_new_elements = elem_vector_count;
833 const int expected_max_element_moves = 2 * elem_vector_count;
834 expected_copy_constructed += expected_new_elements;
835 EXPECT_EQ(expected_copy_constructed, decompose_copy_constructed);
836 expected_move_constructed += expected_max_element_moves;
837 EXPECT_GE(expected_move_constructed, decompose_move_constructed);
840 expected_copy_constructed = decompose_copy_constructed;
841 expected_move_constructed = decompose_move_constructed;
845 TEST(Table, Decompose) {
846 TestDecompose<DecomposeHash, DecomposeEq>(
false);
848 struct TransparentHashIntOverload {
849 size_t operator()(
const DecomposeType& a)
const {
return a.i; }
850 size_t operator()(
int a)
const {
return a; }
852 struct TransparentEqIntOverload {
853 bool operator()(
const DecomposeType& a,
const DecomposeType&
b)
const {
856 bool operator()(
const DecomposeType& a,
int b)
const {
return a.i ==
b; }
858 TestDecompose<TransparentHashIntOverload, DecomposeEq>(
true);
859 TestDecompose<TransparentHashIntOverload, TransparentEqIntOverload>(
true);
860 TestDecompose<DecomposeHash, TransparentEqIntOverload>(
true);
865 size_t MaxDensitySize(
size_t n) {
868 for (
size_t i = 0;
i !=
n; ++
i)
t.emplace(i);
869 const size_t c =
t.bucket_count();
870 while (c ==
t.bucket_count())
t.emplace(n++);
874 struct Modulo1000Hash {
875 size_t operator()(
int x)
const {
return x % 1000; }
878 struct Modulo1000HashTable
879 :
public raw_hash_set<IntPolicy, Modulo1000Hash, std::equal_to<int>,
880 std::allocator<int>> {};
883 TEST(Table, RehashWithNoResize) {
884 Modulo1000HashTable
t;
888 const size_t kMinFullGroups = 7;
889 std::vector<int>
keys;
890 for (
size_t i = 0;
i < MaxDensitySize(
Group::kWidth * kMinFullGroups); ++
i) {
901 for (
size_t i = erase_begin;
i < erase_end; ++
i) {
904 keys.erase(
keys.begin() + erase_begin,
keys.begin() + erase_end);
906 auto last_key =
keys.back();
910 ASSERT_GT(last_key_num_probes, kMinFullGroups);
919 for (
const auto&
k :
keys) {
927 TEST(Table, InsertEraseStressTest) {
929 const size_t kMinElementCount = 250;
930 std::deque<int>
keys;
932 for (;
i < MaxDensitySize(kMinElementCount); ++
i) {
936 const size_t kNumIterations = 1000000;
937 for (;
i < kNumIterations; ++
i) {
945 TEST(Table, InsertOverloads) {
949 t.insert({
"ABC", {}});
950 t.insert({
"DEF",
"!!!"});
953 Pair(
"DEF",
"!!!")));
956 TEST(Table, LargeTable) {
958 for (
int64_t i = 0;
i != 100000; ++
i)
t.emplace(i << 40);
963 TEST(Table, EnsureNonQuadraticAsInRust) {
964 static const size_t kLargeSize = 1 << 15;
967 for (
size_t i = 0;
i != kLargeSize; ++
i) {
973 for (
const auto& entry : t) t2.insert(entry);
976 TEST(Table, ClearBug) {
979 constexpr
size_t max_size =
capacity / 2 + 1;
980 for (
size_t i = 0;
i < max_size; ++
i) {
987 for (
size_t i = 0;
i < max_size; ++
i) {
1002 auto res =
t.emplace(0);
1010 TEST(Table, EraseMaintainsValidIterator) {
1012 const int kNumElements = 100;
1013 for (
int i = 0;
i < kNumElements;
i ++) {
1018 int num_erase_calls = 0;
1019 auto it =
t.begin();
1020 while (
it !=
t.end()) {
1026 EXPECT_EQ(num_erase_calls, kNumElements);
1035 std::vector<int64_t> CollectBadMergeKeys(
size_t N) {
1038 auto topk_range = [](
size_t b,
size_t e,
1039 IntTable*
t) -> std::vector<int64_t> {
1040 for (
size_t i =
b;
i !=
e; ++
i) {
1043 std::vector<int64_t> res;
1044 res.reserve(kGroupSize);
1045 auto it =
t->begin();
1046 for (
size_t i =
b;
i !=
e &&
i !=
b + kGroupSize; ++
i, ++
it) {
1052 std::vector<int64_t> bad_keys;
1053 bad_keys.reserve(
N);
1057 for (
size_t b = 0; bad_keys.size() <
N;
b +=
N) {
1058 auto keys = topk_range(
b,
b +
N, &t);
1059 bad_keys.insert(bad_keys.end(),
keys.begin(),
keys.end());
1060 t.erase(
t.begin(),
t.end());
1072 friend ProbeStats
operator+(
const ProbeStats& a,
const ProbeStats&
b) {
1074 res.all_probes_histogram.resize(
std::max(res.all_probes_histogram.size(),
1075 b.all_probes_histogram.size()));
1076 std::transform(
b.all_probes_histogram.begin(),
b.all_probes_histogram.end(),
1077 res.all_probes_histogram.begin(),
1078 res.all_probes_histogram.begin(), std::plus<size_t>());
1079 res.single_table_ratios.insert(res.single_table_ratios.end(),
1080 b.single_table_ratios.begin(),
1081 b.single_table_ratios.end());
1086 double AvgRatio()
const {
1093 double MaxRatio()
const {
1099 double PercentileRatio(
double Percentile = 0.95)
const {
1101 auto mid =
r.begin() +
static_cast<size_t>(
r.size() * Percentile);
1102 if (mid !=
r.end()) {
1103 std::nth_element(
r.begin(), mid,
r.end());
1114 std::vector<double> ProbeNormalizedHistogram()
const {
1117 std::vector<double> res;
1119 res.push_back(p / total_elements);
1124 size_t PercentileProbe(
double Percentile = 0.99)
const {
1126 for (
double p : ProbeNormalizedHistogram()) {
1127 if (Percentile > p) {
1137 friend std::ostream&
operator<<(std::ostream&
out,
const ProbeStats& s) {
1138 out <<
"{AvgRatio:" <<
s.AvgRatio() <<
", MaxRatio:" <<
s.MaxRatio()
1139 <<
", PercentileRatio:" <<
s.PercentileRatio()
1140 <<
", MaxProbe:" <<
s.MaxProbe() <<
", Probes=[";
1141 for (
double p :
s.ProbeNormalizedHistogram()) {
1150 struct ExpectedStats {
1156 friend std::ostream&
operator<<(std::ostream&
out,
const ExpectedStats& s) {
1157 out <<
"{AvgRatio:" << s.avg_ratio <<
", MaxRatio:" << s.max_ratio
1158 <<
", PercentileRatios: [";
1159 for (
auto el : s.pecentile_ratios) {
1160 out << el.first <<
":" << el.second <<
", ";
1162 out <<
"], PercentileProbes: [";
1163 for (
auto el : s.pecentile_probes) {
1164 out << el.first <<
":" << el.second <<
", ";
1172 void VerifyStats(
size_t size,
const ExpectedStats& exp,
1173 const ProbeStats&
stats) {
1176 for (
auto pr : exp.pecentile_ratios) {
1178 <<
size <<
" " << pr.first <<
" " <<
stats;
1181 for (
auto pr : exp.pecentile_probes) {
1183 <<
size <<
" " << pr.first <<
" " <<
stats;
1187 using ProbeStatsPerSize = std::map<size_t, ProbeStats>;
1193 ProbeStats CollectProbeStatsOnKeysXoredWithSeed(
1194 const std::vector<int64_t>&
keys,
size_t num_iters) {
1195 const size_t reserve_size =
keys.size() * 2;
1200 while (num_iters--) {
1203 t1.reserve(reserve_size);
1204 for (
const auto&
key :
keys) {
1209 stats.all_probes_histogram.resize(
1210 std::max(
stats.all_probes_histogram.size(), probe_histogram.size()));
1211 std::transform(probe_histogram.begin(), probe_histogram.end(),
1212 stats.all_probes_histogram.begin(),
1213 stats.all_probes_histogram.begin(), std::plus<size_t>());
1215 size_t total_probe_seq_length = 0;
1216 for (
size_t i = 0;
i < probe_histogram.size(); ++
i) {
1217 total_probe_seq_length +=
i * probe_histogram[
i];
1219 stats.single_table_ratios.push_back(total_probe_seq_length * 1.0 /
1221 t1.erase(
t1.begin(),
t1.end());
1226 ExpectedStats XorSeedExpectedStats() {
1227 constexpr
bool kRandomizesInserts =
1238 if (kRandomizesInserts) {
1242 {{0.95, 0}, {0.99, 2}, {0.999, 4}, {0.9999, 10}}};
1247 {{0.95, 0}, {0.99, 2}, {0.999, 4}, {0.9999, 10}}};
1250 if (kRandomizesInserts) {
1254 {{0.95, 0}, {0.99, 1}, {0.999, 8}, {0.9999, 15}}};
1259 {{0.95, 0}, {0.99, 1}, {0.999, 4}, {0.9999, 10}}};
1267 TEST(Table, DISABLED_EnsureNonQuadraticTopNXorSeedByProbeSeqLength) {
1268 ProbeStatsPerSize
stats;
1270 for (
size_t size : sizes) {
1272 CollectProbeStatsOnKeysXoredWithSeed(CollectBadMergeKeys(
size), 200);
1274 auto expected = XorSeedExpectedStats();
1275 for (
size_t size : sizes) {
1285 ProbeStats CollectProbeStatsOnLinearlyTransformedKeys(
1286 const std::vector<int64_t>&
keys,
size_t num_iters) {
1289 std::random_device rd;
1290 std::mt19937 rng(rd());
1291 auto linear_transform = [](
size_t x,
size_t y) {
return x * 17 +
y * 13; };
1292 std::uniform_int_distribution<size_t> dist(0,
keys.size()-1);
1293 while (num_iters--) {
1295 size_t num_keys =
keys.size() / 10;
1296 size_t start = dist(rng);
1297 for (
size_t i = 0;
i != num_keys; ++
i) {
1298 for (
size_t j = 0;
j != 10; ++
j) {
1304 stats.all_probes_histogram.resize(
1305 std::max(
stats.all_probes_histogram.size(), probe_histogram.size()));
1306 std::transform(probe_histogram.begin(), probe_histogram.end(),
1307 stats.all_probes_histogram.begin(),
1308 stats.all_probes_histogram.begin(), std::plus<size_t>());
1310 size_t total_probe_seq_length = 0;
1311 for (
size_t i = 0;
i < probe_histogram.size(); ++
i) {
1312 total_probe_seq_length +=
i * probe_histogram[
i];
1314 stats.single_table_ratios.push_back(total_probe_seq_length * 1.0 /
1316 t1.erase(
t1.begin(),
t1.end());
1321 ExpectedStats LinearTransformExpectedStats() {
1322 constexpr
bool kRandomizesInserts =
1333 if (kRandomizesInserts) {
1337 {{0.95, 0}, {0.99, 1}, {0.999, 8}, {0.9999, 15}}};
1342 {{0.95, 1}, {0.99, 14}, {0.999, 23}, {0.9999, 26}}};
1345 if (kRandomizesInserts) {
1349 {{0.95, 1}, {0.99, 2}, {0.999, 9}, {0.9999, 15}}};
1354 {{0.95, 0}, {0.99, 1}, {0.999, 6}, {0.9999, 10}}};
1362 TEST(Table, DISABLED_EnsureNonQuadraticTopNLinearTransformByProbeSeqLength) {
1363 ProbeStatsPerSize
stats;
1365 for (
size_t size : sizes) {
1366 stats[
size] = CollectProbeStatsOnLinearlyTransformedKeys(
1367 CollectBadMergeKeys(
size), 300);
1369 auto expected = LinearTransformExpectedStats();
1370 for (
size_t size : sizes) {
1376 TEST(Table, EraseCollision) {
1410 TEST(Table, EraseInsertProbing) {
1437 auto res =
t.emplace(0);
1448 auto res =
t.emplace(0);
1459 TEST(Table, Rehash) {
1471 TEST(Table, RehashDoesNotRehashWhenNotNecessary) {
1475 auto*
p = &*
t.find(0);
1480 TEST(Table, RehashZeroDoesNotAllocateOnEmptyTable) {
1486 TEST(Table, RehashZeroDeallocatesEmptyTable) {
1495 TEST(Table, RehashZeroForcesRehash) {
1499 auto*
p = &*
t.find(0);
1504 TEST(Table, ConstructFromInitList) {
1505 using P = std::pair<std::string, std::string>;
1507 operator P()
const {
return {}; }
1509 StringTable
t = {
P(),
Q(), {}, {{}, {}}};
1533 TEST(Table, CopyConstructWithAlloc) {
1535 t.emplace(
"a",
"b");
1537 StringTable
u(t,
Alloc<std::pair<std::string, std::string>>());
1542 struct ExplicitAllocIntTable
1543 : raw_hash_set<IntPolicy, container_internal::hash_default_hash<int64_t>,
1544 std::equal_to<int64_t>, Alloc<int64_t>> {
1545 ExplicitAllocIntTable() {}
1548 TEST(Table, AllocWithExplicitCtor) {
1549 ExplicitAllocIntTable
t;
1553 TEST(Table, MoveConstruct) {
1556 t.emplace(
"a",
"b");
1565 t.emplace(
"a",
"b");
1574 t.emplace(
"a",
"b");
1583 TEST(Table, MoveConstructWithAlloc) {
1585 t.emplace(
"a",
"b");
1587 StringTable
u(
std::move(t),
Alloc<std::pair<std::string, std::string>>());
1592 TEST(Table, CopyAssign) {
1594 t.emplace(
"a",
"b");
1602 TEST(Table, CopySelfAssign) {
1604 t.emplace(
"a",
"b");
1611 TEST(Table, MoveAssign) {
1613 t.emplace(
"a",
"b");
1621 TEST(Table, Equality) {
1623 std::vector<std::pair<std::string, std::string>>
v = {{
"a",
"b"},
1630 TEST(Table, Equality2) {
1632 std::vector<std::pair<std::string, std::string>> v1 = {{
"a",
"b"},
1636 std::vector<std::pair<std::string, std::string>> v2 = {{
"a",
"a"},
1642 TEST(Table, Equality3) {
1644 std::vector<std::pair<std::string, std::string>> v1 = {{
"b",
"b"},
1648 std::vector<std::pair<std::string, std::string>> v2 = {{
"a",
"a"},
1654 TEST(Table, NumDeletedRegression) {
1663 TEST(Table, FindFullDeletedRegression) {
1665 for (
int i = 0;
i < 1000; ++
i) {
1672 TEST(Table, ReplacingDeletedSlotDoesNotRehash) {
1678 size_t c =
t.bucket_count();
1679 for (n = 1;
c ==
t.bucket_count(); ++
n)
t.emplace(n);
1684 const size_t c =
t.bucket_count();
1685 for (
size_t i = 0;
i !=
n; ++
i)
t.emplace(i);
1686 EXPECT_EQ(c,
t.bucket_count()) <<
"rehashing threshold = " <<
n;
1689 EXPECT_EQ(c,
t.bucket_count()) <<
"rehashing threshold = " <<
n;
1692 TEST(Table, NoThrowMoveConstruct) {
1696 std::equal_to<absl::string_view>>::
value);
1697 ASSERT_TRUE(std::is_nothrow_copy_constructible<std::allocator<int>>::
value);
1701 TEST(Table, NoThrowMoveAssign) {
1705 std::is_nothrow_move_assignable<std::equal_to<absl::string_view>>::
value);
1712 TEST(Table, NoThrowSwappable) {
1716 std::equal_to<absl::string_view>>());
1718 EXPECT_TRUE(container_internal::IsNoThrowSwappable<StringTable>());
1721 TEST(Table, HeterogeneousLookup) {
1723 size_t operator()(
int64_t i)
const {
return i; }
1724 size_t operator()(
double i)
const {
1731 bool operator()(
double a,
int64_t b)
const {
1735 bool operator()(
int64_t a,
double b)
const {
1739 bool operator()(
double a,
double b)
const {
1746 using is_transparent = void;
1747 size_t operator()(
int64_t i)
const {
return i; }
1748 size_t operator()(
double i)
const {
return i; }
1751 using is_transparent = void;
1753 bool operator()(
double a,
int64_t b)
const {
return a ==
b; }
1754 bool operator()(
int64_t a,
double b)
const {
return a ==
b; }
1755 bool operator()(
double a,
double b)
const {
return a ==
b; }
1758 raw_hash_set<IntPolicy, Hash, Eq, Alloc<int64_t>>
s{0, 1, 2};
1762 raw_hash_set<IntPolicy, THash, TEq, Alloc<int64_t>> ts{0, 1, 2};
1767 template <
class Table>
1768 using CallFind = decltype(std::declval<Table&>().
find(17));
1770 template <
class Table>
1771 using CallErase = decltype(std::declval<Table&>().erase(17));
1773 template <
class Table>
1774 using CallExtract = decltype(std::declval<Table&>().
extract(17));
1776 template <
class Table>
1777 using CallPrefetch = decltype(std::declval<Table&>().prefetch(17));
1779 template <
class Table>
1780 using CallCount = decltype(std::declval<Table&>().
count(17));
1782 template <
template <
typename>
class C,
class Table,
class =
void>
1785 template <
template <
typename>
class C,
class Table>
1788 TEST(Table, HeterogeneousLookupOverloads) {
1789 using NonTransparentTable =
1790 raw_hash_set<StringPolicy, absl::Hash<absl::string_view>,
1791 std::equal_to<absl::string_view>, std::allocator<int>>;
1793 EXPECT_FALSE((VerifyResultOf<CallFind, NonTransparentTable>()));
1794 EXPECT_FALSE((VerifyResultOf<CallErase, NonTransparentTable>()));
1795 EXPECT_FALSE((VerifyResultOf<CallExtract, NonTransparentTable>()));
1796 EXPECT_FALSE((VerifyResultOf<CallPrefetch, NonTransparentTable>()));
1797 EXPECT_FALSE((VerifyResultOf<CallCount, NonTransparentTable>()));
1799 using TransparentTable = raw_hash_set<
1803 std::allocator<int>>;
1805 EXPECT_TRUE((VerifyResultOf<CallFind, TransparentTable>()));
1806 EXPECT_TRUE((VerifyResultOf<CallErase, TransparentTable>()));
1807 EXPECT_TRUE((VerifyResultOf<CallExtract, TransparentTable>()));
1808 EXPECT_TRUE((VerifyResultOf<CallPrefetch, TransparentTable>()));
1809 EXPECT_TRUE((VerifyResultOf<CallCount, TransparentTable>()));
1819 StringTable::const_iterator
i;
1830 for (
size_t i = 3;
i != 6; ++
i)
EXPECT_TRUE(
t.emplace(i).second);
1836 t1.emplace(
"0",
"-0");
1837 t1.emplace(
"1",
"-1");
1838 t2.emplace(
"0",
"~0");
1839 t2.emplace(
"2",
"~2");
1850 TEST(Table, IteratorEmplaceConstructibleRequirement) {
1858 size_t operator()(
const Value&
v)
const {
1863 struct Table : raw_hash_set<ValuePolicy<Value>, H, std::equal_to<Value>,
1864 std::allocator<Value>> {
1865 using Base =
typename Table::raw_hash_set;
1883 using node_type = StringTable::node_type;
1888 EXPECT_TRUE((std::is_same<node_type::allocator_type,
1889 StringTable::allocator_type>::
value));
1893 constexpr
char k0[] =
"Very long string zero.";
1894 constexpr
char k1[] =
"Very long string one.";
1895 constexpr
char k2[] =
"Very long string two.";
1896 StringTable
t = {{
k0,
""}, {
k1,
""}, {
k2,
""}};
1900 auto node =
t.extract(
k0);
1906 StringTable::insert_return_type res = t2.insert(
std::move(node));
1914 node =
t.extract(
"Not there!");
1926 node =
t.extract(
k0);
1937 IntTable
t = {1, 2, 3};
1938 auto node =
t.extract(1);
1945 node =
t.extract(2);
1956 IntTable MakeSimpleTable(
size_t size) {
1958 while (
t.size() <
size)
t.insert(
t.size());
1962 std::vector<int> OrderOfIteration(
const IntTable& t) {
1963 return {
t.begin(),
t.end()};
1972 TEST(Table, IterationOrderChangesByInstance) {
1973 for (
size_t size : {2, 6, 12, 20}) {
1974 const auto reference_table = MakeSimpleTable(
size);
1975 const auto reference = OrderOfIteration(reference_table);
1977 std::vector<IntTable> tables;
1978 bool found_difference =
false;
1979 for (
int i = 0; !found_difference &&
i < 5000; ++
i) {
1980 tables.push_back(MakeSimpleTable(
size));
1981 found_difference = OrderOfIteration(tables.back()) != reference;
1983 if (!found_difference) {
1985 <<
"Iteration order remained the same across many attempts with size "
1991 TEST(Table, IterationOrderChangesOnRehash) {
1992 std::vector<IntTable> garbage;
1993 for (
int i = 0;
i < 5000; ++
i) {
1994 auto t = MakeSimpleTable(20);
1995 const auto reference = OrderOfIteration(t);
1998 auto trial = OrderOfIteration(t);
1999 if (trial != reference) {
2005 FAIL() <<
"Iteration order remained the same across many attempts.";
2010 TEST(Table, UnstablePointers) {
2013 const auto addr = [&](
int i) {
2027 TEST(TableDeathTest, EraseOfEndAsserts) {
2029 bool assert_enabled =
false;
2031 assert_enabled =
true;
2034 if (!assert_enabled)
return;
2038 constexpr
char kDeathMsg[] =
"erase.. called on invalid iterator";
2042 #if defined(ABSL_INTERNAL_HASHTABLEZ_SAMPLE)
2049 size_t start_size = 0;
2050 std::unordered_set<const HashtablezInfo*> preexisting_info;
2051 start_size += sampler.Iterate([&](
const HashtablezInfo& info) {
2052 preexisting_info.insert(&info);
2056 std::vector<IntTable> tables;
2057 for (
int i = 0;
i < 1000000; ++
i) {
2058 tables.emplace_back();
2060 const bool do_reserve = (
i % 10 > 5);
2061 const bool do_rehash = !do_reserve && (
i % 10 > 0);
2065 tables.back().reserve(10 * (i % 10));
2068 tables.back().insert(1);
2069 tables.back().insert(i % 5);
2073 tables.back().rehash(10 * (i % 10));
2076 size_t end_size = 0;
2077 std::unordered_map<size_t, int> observed_checksums;
2078 std::unordered_map<ssize_t, int> reservations;
2079 end_size += sampler.Iterate([&](
const HashtablezInfo& info) {
2080 if (preexisting_info.count(&info) == 0) {
2081 observed_checksums[info.hashes_bitwise_xor.load(
2082 std::memory_order_relaxed)]++;
2083 reservations[info.max_reserve.load(std::memory_order_relaxed)]++;
2089 EXPECT_NEAR((end_size - start_size) /
static_cast<double>(tables.size()),
2091 EXPECT_EQ(observed_checksums.size(), 5);
2092 for (
const auto& [
_,
count] : observed_checksums) {
2093 EXPECT_NEAR((100 *
count) /
static_cast<double>(tables.size()), 0.2, 0.05);
2097 for (
const auto& [reservation,
count] : reservations) {
2105 #endif // ABSL_INTERNAL_HASHTABLEZ_SAMPLE
2107 TEST(RawHashSamplerTest, DoNotSampleCustomAllocators) {
2113 size_t start_size = 0;
2114 start_size += sampler.Iterate([&](
const HashtablezInfo&) { ++start_size; });
2116 std::vector<CustomAllocIntTable> tables;
2117 for (
int i = 0;
i < 1000000; ++
i) {
2118 tables.emplace_back();
2119 tables.back().insert(1);
2121 size_t end_size = 0;
2122 end_size += sampler.Iterate([&](
const HashtablezInfo&) { ++end_size; });
2124 EXPECT_NEAR((end_size - start_size) /
static_cast<double>(tables.size()),
2128 #ifdef ABSL_HAVE_ADDRESS_SANITIZER
2129 TEST(Sanitizer, PoisoningUnused) {
2139 for (
size_t i = 0;
i <
t.capacity(); ++
i) {
2140 EXPECT_EQ(slots + i != &v1, __asan_address_is_poisoned(slots + i));
2144 TEST(Sanitizer, PoisoningOnErase) {
2152 #endif // ABSL_HAVE_ADDRESS_SANITIZER
2154 TEST(Table, AlignOne) {
2159 std::unordered_set<uint8_t>
verifier;
2165 auto it =
t.find(u);
2166 auto verifier_it =
verifier.find(u);
2167 if (
it ==
t.end()) {