30 namespace container_internal {
37 ABSL_CONST_INIT std::atomic<int32_t> g_hashtablez_sample_parameter{1 << 10};
43 uint64_t NextRandom(uint64_t rnd) {
44 const uint64_t prng_mult = uint64_t{0x5DEECE66D};
45 const uint64_t prng_add = 0xB;
46 const uint64_t prng_mod_power = 48;
47 const uint64_t prng_mod_mask = ~(~uint64_t{0} << prng_mod_power);
48 return (prng_mult * rnd + prng_add) & prng_mod_mask;
63 int64_t GetGeometricVariable(int64_t mean) {
64 #if ABSL_HAVE_THREAD_LOCAL 66 #else // ABSL_HAVE_THREAD_LOCAL 71 #endif // ABSL_HAVE_THREAD_LOCAL 78 uint64_t r =
reinterpret_cast<uint64_t
>(&rng) +
79 global_rand.fetch_add(1, std::memory_order_relaxed);
80 for (
int i = 0;
i < 20; ++
i) {
86 rng = NextRandom(rng);
91 const uint64_t prng_mod_power = 48;
94 double q =
static_cast<uint32_t
>(rng >> (prng_mod_power - 26)) + 1.0;
96 double interval = (log2(q) - 26) * (-
std::log(2.0) * mean);
101 if (interval > static_cast<double>(std::numeric_limits<int64_t>::max() / 2)) {
102 return std::numeric_limits<int64_t>::max() / 2;
109 return static_cast<int64_t
>(interval);
121 return dispose_.exchange(f, std::memory_order_relaxed);
128 capacity.store(0, std::memory_order_relaxed);
129 size.store(0, std::memory_order_relaxed);
130 num_erases.store(0, std::memory_order_relaxed);
131 max_probe_length.store(0, std::memory_order_relaxed);
132 total_probe_length.store(0, std::memory_order_relaxed);
133 hashes_bitwise_or.store(0, std::memory_order_relaxed);
134 hashes_bitwise_and.store(~
size_t{}, std::memory_order_relaxed);
153 while (s !=
nullptr) {
161 sample->
next =
all_.load(std::memory_order_relaxed);
162 while (!
all_.compare_exchange_weak(sample->
next, sample,
163 std::memory_order_release,
164 std::memory_order_relaxed)) {
169 if (
auto* dispose =
dispose_.load(std::memory_order_relaxed)) {
196 if (size > g_hashtablez_max_samples.load(std::memory_order_relaxed)) {
203 if (sample ==
nullptr) {
220 while (s !=
nullptr) {
222 if (s->dead ==
nullptr) {
237 bool first = *next_sample < 0;
238 *next_sample = GetGeometricVariable(
239 g_hashtablez_sample_parameter.load(std::memory_order_relaxed));
244 if (!g_hashtablez_enabled.load(std::memory_order_relaxed))
return nullptr;
256 #if ABSL_PER_THREAD_TLS == 1 258 #endif // ABSL_PER_THREAD_TLS == 1 265 size_t distance_from_desired) {
268 size_t probe_length = distance_from_desired;
269 #if SWISSTABLE_HAVE_SSE2 280 std::memory_order_relaxed);
282 info->
size.fetch_add(1, std::memory_order_relaxed);
286 g_hashtablez_enabled.store(enabled, std::memory_order_release);
291 g_hashtablez_sample_parameter.store(rate, std::memory_order_release);
293 ABSL_RAW_LOG(ERROR,
"Invalid hashtablez sample rate: %lld",
294 static_cast<long long>(rate));
300 g_hashtablez_max_samples.store(max, std::memory_order_release);
302 ABSL_RAW_LOG(ERROR,
"Invalid hashtablez max samples: %lld",
303 static_cast<long long>(max));
void SetHashtablezSampleParameter(int32_t rate)
HashtablezInfo * SampleSlow(int64_t *next_sample)
void RecordInsertSlow(HashtablezInfo *info, size_t hash, size_t distance_from_desired)
#define ABSL_RAW_LOG(severity,...)
void SetHashtablezEnabled(bool enabled)
HashtablezInfo * PopDead()
HashtablezInfo graveyard_
void PushDead(HashtablezInfo *sample)
void UnsampleSlow(HashtablezInfo *info)
void SetHashtablezMaxSamples(int32_t max)
void PrepareForSampling() EXCLUSIVE_LOCKS_REQUIRED(init_mu)
std::atomic< size_t > max_probe_length
const bool kAbslContainerInternalSampleEverything
std::atomic< size_t > total_probe_length
std::atomic< size_t > hashes_bitwise_and
std::atomic< size_t > hashes_bitwise_or
void(*)(const HashtablezInfo &) DisposeCallback
AllocList * next[kMaxLevel]
hash_default_hash< typename T::first_type > hash
static constexpr int kMaxStackDepth
std::atomic< size_t > dropped_samples_
int64_t Iterate(const std::function< void(const HashtablezInfo &stack)> &f)
std::atomic< DisposeCallback > dispose_
std::atomic< HashtablezInfo * > all_
#define ABSL_PREDICT_TRUE(x)
std::atomic< size_t > size_estimate_
static HashtablezSampler & Global()
void PushNew(HashtablezInfo *sample)
#define ABSL_PER_THREAD_TLS_KEYWORD
DisposeCallback SetDisposeCallback(DisposeCallback f)
HashtablezInfo * Register()
void Unregister(HashtablezInfo *sample)
std::atomic< size_t > size
ABSL_ATTRIBUTE_NOINLINE ABSL_ATTRIBUTE_NO_TAIL_CALL int GetStackTrace(void **result, int max_depth, int skip_count)