10 #ifndef EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_ 11 #define EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_ 37 template <
typename Work,
unsigned kSize>
45 for (
unsigned i = 0;
i < kSize;
i++)
54 unsigned front =
front_.load(std::memory_order_relaxed);
56 uint8_t s = e->state.load(std::memory_order_relaxed);
58 !e->state.compare_exchange_strong(s,
kBusy, std::memory_order_acquire))
60 front_.store(front + 1 + (kSize << 1), std::memory_order_relaxed);
62 e->state.store(
kReady, std::memory_order_release);
69 unsigned front =
front_.load(std::memory_order_relaxed);
71 uint8_t s = e->state.load(std::memory_order_relaxed);
73 !e->state.compare_exchange_strong(s,
kBusy, std::memory_order_acquire))
75 Work
w = std::move(e->w);
76 e->state.store(
kEmpty, std::memory_order_release);
78 front_.store(front, std::memory_order_relaxed);
85 std::unique_lock<std::mutex> lock(
mutex_);
86 unsigned back =
back_.load(std::memory_order_relaxed);
88 uint8_t s = e->state.load(std::memory_order_relaxed);
90 !e->state.compare_exchange_strong(s,
kBusy, std::memory_order_acquire))
93 back_.store(back, std::memory_order_relaxed);
95 e->state.store(
kReady, std::memory_order_release);
101 if (
Empty())
return Work();
102 std::unique_lock<std::mutex> lock(
mutex_);
103 unsigned back =
back_.load(std::memory_order_relaxed);
105 uint8_t s = e->state.load(std::memory_order_relaxed);
107 !e->state.compare_exchange_strong(s,
kBusy, std::memory_order_acquire))
109 Work
w = std::move(e->w);
110 e->state.store(
kEmpty, std::memory_order_release);
111 back_.store(back + 1 + (kSize << 1), std::memory_order_relaxed);
118 if (
Empty())
return 0;
119 std::unique_lock<std::mutex> lock(
mutex_);
120 unsigned back =
back_.load(std::memory_order_relaxed);
123 if (size > 1) mid = back + (size - 1) / 2;
126 for (;
static_cast<int>(mid - back) >= 0; mid--) {
128 uint8_t s = e->state.load(std::memory_order_relaxed);
130 if (s !=
kReady || !e->state.compare_exchange_strong(
131 s,
kBusy, std::memory_order_acquire))
139 result->push_back(std::move(e->w));
140 e->state.store(
kEmpty, std::memory_order_release);
144 back_.store(start + 1 + (kSize << 1), std::memory_order_relaxed);
150 unsigned Size()
const {
return SizeOrNotEmpty<true>(); }
154 bool Empty()
const {
return SizeOrNotEmpty<false>() == 0; }
164 static const unsigned kMask = kSize - 1;
165 static const unsigned kMask2 = (kSize << 1) - 1;
190 template<
bool NeedSizeEstimate>
194 unsigned front = front_.load(std::memory_order_acquire);
197 unsigned back = back_.load(std::memory_order_acquire);
198 unsigned front1 = front_.load(std::memory_order_relaxed);
199 if (front != front1) {
201 std::atomic_thread_fence(std::memory_order_acquire);
204 if (NeedSizeEstimate) {
208 unsigned maybe_zero = ((front ^ back) & kMask2);
219 int size = (front &
kMask2) - (back & kMask2);
221 if (size < 0) size += 2 * kSize;
226 if (size > static_cast<int>(kSize)) size = kSize;
227 return static_cast<unsigned>(
size);
236 #endif // EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_ EIGEN_ALWAYS_INLINE unsigned CalculateSize(unsigned front, unsigned back) const
#define EIGEN_ALWAYS_INLINE
std::atomic< unsigned > back_
std::atomic< uint8_t > state
Namespace containing all symbols from the Eigen library.
void operator=(const RunQueue &)=delete
static const unsigned kMask2
std::atomic< unsigned > front_
Array< double, 1, 3 > e(1./3., 0.5, 2.)
#define eigen_plain_assert(x)
static const unsigned kMask
unsigned PopBackHalf(std::vector< Work > *result)
unsigned SizeOrNotEmpty() const