10 #ifndef EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_
11 #define EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_
37 template <
typename Work,
unsigned kSize>
45 for (
unsigned i = 0;
i < kSize;
i++)
54 unsigned front =
front_.load(std::memory_order_relaxed);
56 uint8_t s =
e->state.load(std::memory_order_relaxed);
58 !
e->state.compare_exchange_strong(
s,
kBusy, std::memory_order_acquire))
60 front_.store(front + 1 + (kSize << 1), std::memory_order_relaxed);
62 e->state.store(
kReady, std::memory_order_release);
69 unsigned front =
front_.load(std::memory_order_relaxed);
71 uint8_t s =
e->state.load(std::memory_order_relaxed);
73 !
e->state.compare_exchange_strong(
s,
kBusy, std::memory_order_acquire))
75 Work
w = std::move(
e->w);
76 e->state.store(
kEmpty, std::memory_order_release);
78 front_.store(front, std::memory_order_relaxed);
85 std::unique_lock<std::mutex> lock(
mutex_);
86 unsigned back =
back_.load(std::memory_order_relaxed);
88 uint8_t s =
e->state.load(std::memory_order_relaxed);
90 !
e->state.compare_exchange_strong(
s,
kBusy, std::memory_order_acquire))
93 back_.store(back, std::memory_order_relaxed);
95 e->state.store(
kReady, std::memory_order_release);
101 if (
Empty())
return Work();
102 std::unique_lock<std::mutex> lock(
mutex_);
103 unsigned back =
back_.load(std::memory_order_relaxed);
105 uint8_t s =
e->state.load(std::memory_order_relaxed);
107 !
e->state.compare_exchange_strong(
s,
kBusy, std::memory_order_acquire))
109 Work
w = std::move(
e->w);
110 e->state.store(
kEmpty, std::memory_order_release);
111 back_.store(back + 1 + (kSize << 1), std::memory_order_relaxed);
118 if (
Empty())
return 0;
119 std::unique_lock<std::mutex> lock(
mutex_);
120 unsigned back =
back_.load(std::memory_order_relaxed);
123 if (
size > 1) mid = back + (
size - 1) / 2;
126 for (;
static_cast<int>(mid - back) >= 0; mid--) {
128 uint8_t s =
e->state.load(std::memory_order_relaxed);
130 if (
s !=
kReady || !
e->state.compare_exchange_strong(
131 s,
kBusy, std::memory_order_acquire))
139 result->push_back(std::move(
e->w));
140 e->state.store(
kEmpty, std::memory_order_release);
144 back_.store(start + 1 + (kSize << 1), std::memory_order_relaxed);
150 unsigned Size()
const {
return SizeOrNotEmpty<true>(); }
154 bool Empty()
const {
return SizeOrNotEmpty<false>() == 0; }
164 static const unsigned kMask = kSize - 1;
165 static const unsigned kMask2 = (kSize << 1) - 1;
190 template<
bool NeedSizeEstimate>
194 unsigned front =
front_.load(std::memory_order_acquire);
197 unsigned back =
back_.load(std::memory_order_acquire);
198 unsigned front1 =
front_.load(std::memory_order_relaxed);
199 if (front != front1) {
201 std::atomic_thread_fence(std::memory_order_acquire);
204 if (NeedSizeEstimate) {
208 unsigned maybe_zero = ((front ^ back) &
kMask2);
226 if (
size >
static_cast<int>(kSize))
size = kSize;
227 return static_cast<unsigned>(
size);
236 #endif // EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_