17 #ifndef ABSL_LOW_LEVEL_ALLOC_MISSING 28 namespace synchronization_internal {
44 identity->
ticker.store(0, std::memory_order_relaxed);
45 identity->
wait_start.store(0, std::memory_order_relaxed);
46 identity->
is_idle.store(
false, std::memory_order_relaxed);
51 identity->
ticker.fetch_add(1, std::memory_order_relaxed) + 1;
52 const int wait_start = identity->
wait_start.load(std::memory_order_relaxed);
53 const bool is_idle = identity->
is_idle.load(std::memory_order_relaxed);
77 int ticker = identity->
ticker.load(std::memory_order_relaxed);
78 identity->
wait_start.store(ticker ? ticker : 1, std::memory_order_relaxed);
79 identity->
is_idle.store(
false, std::memory_order_relaxed);
93 identity->
is_idle.store(
false, std::memory_order_relaxed);
94 identity->
wait_start.store(0, std::memory_order_relaxed);
100 #endif // ABSL_LOW_LEVEL_ALLOC_MISSING bool Wait(KernelTimeout t)
static void Init(base_internal::ThreadIdentity *identity)
base_internal::ThreadIdentity * GetOrCreateCurrentThreadIdentity()
ABSL_ATTRIBUTE_WEAK bool AbslInternalPerThreadSemWait(absl::synchronization_internal::KernelTimeout t)
static void SetThreadBlockedCounter(std::atomic< int > *counter)
std::atomic< bool > is_idle
std::atomic< int > * blocked_count_ptr
static const int kIdlePeriods
ABSL_ATTRIBUTE_WEAK void AbslInternalPerThreadSemPost(absl::base_internal::ThreadIdentity *identity)
std::atomic< int > wait_start
static void Tick(base_internal::ThreadIdentity *identity)
#define ABSL_ATTRIBUTE_WEAK
static std::atomic< int > * GetThreadBlockedCounter()
static Waiter * GetWaiter(base_internal::ThreadIdentity *identity)
std::atomic< int > ticker