3 #ifndef __ZMQ_ATOMIC_COUNTER_HPP_INCLUDED__
4 #define __ZMQ_ATOMIC_COUNTER_HPP_INCLUDED__
9 #if defined ZMQ_FORCE_MUTEXES
10 #define ZMQ_ATOMIC_COUNTER_MUTEX
11 #elif (defined __cplusplus && __cplusplus >= 201103L) \
12 || (defined _MSC_VER && _MSC_VER >= 1900)
13 #define ZMQ_ATOMIC_COUNTER_CXX11
14 #elif defined ZMQ_HAVE_ATOMIC_INTRINSICS
15 #define ZMQ_ATOMIC_COUNTER_INTRINSIC
16 #elif (defined __i386__ || defined __x86_64__) && defined __GNUC__
17 #define ZMQ_ATOMIC_COUNTER_X86
18 #elif defined __ARM_ARCH_7A__ && defined __GNUC__
19 #define ZMQ_ATOMIC_COUNTER_ARM
20 #elif defined ZMQ_HAVE_WINDOWS
21 #define ZMQ_ATOMIC_COUNTER_WINDOWS
22 #elif (defined ZMQ_HAVE_SOLARIS || defined ZMQ_HAVE_NETBSD \
23 || defined ZMQ_HAVE_GNU)
24 #define ZMQ_ATOMIC_COUNTER_ATOMIC_H
25 #elif defined __tile__
26 #define ZMQ_ATOMIC_COUNTER_TILE
28 #define ZMQ_ATOMIC_COUNTER_MUTEX
31 #if defined ZMQ_ATOMIC_COUNTER_MUTEX
33 #elif defined ZMQ_ATOMIC_COUNTER_CXX11
35 #elif defined ZMQ_ATOMIC_COUNTER_WINDOWS
37 #elif defined ZMQ_ATOMIC_COUNTER_ATOMIC_H
39 #elif defined ZMQ_ATOMIC_COUNTER_TILE
40 #include <arch/atomic.h>
56 #if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_ARM64))
57 class __declspec(align (8)) atomic_counter_t
58 #elif defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_ARM_ARMV7VE))
59 class __declspec(align (4)) atomic_counter_t
77 #if defined ZMQ_ATOMIC_COUNTER_WINDOWS
78 old_value = InterlockedExchangeAdd ((LONG *) &_value, increment_);
79 #elif defined ZMQ_ATOMIC_COUNTER_INTRINSIC
80 old_value = __atomic_fetch_add (&_value, increment_, __ATOMIC_ACQ_REL);
81 #elif defined ZMQ_ATOMIC_COUNTER_CXX11
82 old_value = _value.fetch_add (increment_, std::memory_order_acq_rel);
83 #elif defined ZMQ_ATOMIC_COUNTER_ATOMIC_H
84 integer_t new_value = atomic_add_32_nv (&_value, increment_);
85 old_value = new_value - increment_;
86 #elif defined ZMQ_ATOMIC_COUNTER_TILE
87 old_value = arch_atomic_add (&_value, increment_);
88 #elif defined ZMQ_ATOMIC_COUNTER_X86
89 __asm__
volatile(
"lock; xadd %0, %1 \n\t"
90 :
"=r"(old_value),
"=m"(_value)
91 :
"0"(increment_),
"m"(_value)
93 #elif defined ZMQ_ATOMIC_COUNTER_ARM
95 __asm__
volatile(
" dmb sy\n\t"
96 "1: ldrex %0, [%5]\n\t"
98 " strex %1, %2, [%5]\n\t"
102 :
"=&r"(old_value),
"=&r"(flag),
"=&r"(tmp),
104 :
"Ir"(increment_),
"r"(&_value)
106 #elif defined ZMQ_ATOMIC_COUNTER_MUTEX
109 _value += increment_;
112 #error atomic_counter is not implemented for this platform
120 #if defined ZMQ_ATOMIC_COUNTER_WINDOWS
121 LONG delta = -((LONG) decrement_);
122 integer_t old = InterlockedExchangeAdd ((LONG *) &_value, delta);
123 return old - decrement_ != 0;
124 #elif defined ZMQ_ATOMIC_COUNTER_INTRINSIC
126 __atomic_sub_fetch (&_value, decrement_, __ATOMIC_ACQ_REL);
128 #elif defined ZMQ_ATOMIC_COUNTER_CXX11
130 _value.fetch_sub (decrement_, std::memory_order_acq_rel);
131 return old - decrement_ != 0;
132 #elif defined ZMQ_ATOMIC_COUNTER_ATOMIC_H
133 int32_t delta = -((int32_t) decrement_);
134 integer_t nv = atomic_add_32_nv (&_value, delta);
136 #elif defined ZMQ_ATOMIC_COUNTER_TILE
137 int32_t delta = -((int32_t) decrement_);
138 integer_t nv = arch_atomic_add (&_value, delta);
140 #elif defined ZMQ_ATOMIC_COUNTER_X86
143 __asm__
volatile(
"lock; xaddl %0,%1"
144 :
"=r"(oldval),
"=m"(*
val)
145 :
"0"(oldval),
"m"(*
val)
147 return oldval != decrement_;
148 #elif defined ZMQ_ATOMIC_COUNTER_ARM
150 __asm__
volatile(
" dmb sy\n\t"
151 "1: ldrex %0, [%5]\n\t"
152 " sub %2, %0, %4\n\t"
153 " strex %1, %2, [%5]\n\t"
157 :
"=&r"(old_value),
"=&r"(flag),
"=&r"(tmp),
159 :
"Ir"(decrement_),
"r"(&_value)
161 return old_value - decrement_ != 0;
162 #elif defined ZMQ_ATOMIC_COUNTER_MUTEX
164 _value -= decrement_;
165 bool result = _value ?
true :
false;
169 #error atomic_counter is not implemented for this platform
179 #if defined ZMQ_ATOMIC_COUNTER_CXX11
180 std::atomic<integer_t> _value;
185 #if defined ZMQ_ATOMIC_COUNTER_MUTEX
189 #if !defined ZMQ_ATOMIC_COUNTER_CXX11
192 #if defined(__GNUC__) || defined(__INTEL_COMPILER) \
193 || (defined(__SUNPRO_C) && __SUNPRO_C >= 0x590) \
194 || (defined(__SUNPRO_CC) && __SUNPRO_CC >= 0x590)
195 } __attribute__ ((aligned (
sizeof (
void *))));
202 #undef ZMQ_ATOMIC_COUNTER_MUTEX
203 #undef ZMQ_ATOMIC_COUNTER_INTRINSIC
204 #undef ZMQ_ATOMIC_COUNTER_CXX11
205 #undef ZMQ_ATOMIC_COUNTER_X86
206 #undef ZMQ_ATOMIC_COUNTER_ARM
207 #undef ZMQ_ATOMIC_COUNTER_WINDOWS
208 #undef ZMQ_ATOMIC_COUNTER_ATOMIC_H
209 #undef ZMQ_ATOMIC_COUNTER_TILE