include
boost
atomic
detail
linux-arm.hpp
Go to the documentation of this file.
1
#ifndef BOOST_DETAIL_ATOMIC_LINUX_ARM_HPP
2
#define BOOST_DETAIL_ATOMIC_LINUX_ARM_HPP
3
4
// Distributed under the Boost Software License, Version 1.0.
5
// See accompanying file LICENSE_1_0.txt or copy at
6
// http://www.boost.org/LICENSE_1_0.txt)
7
//
8
// Copyright (c) 2009, 2011 Helge Bahmann
9
// Copyright (c) 2009 Phil Endecott
10
// Linux-specific code by Phil Endecott
11
12
// Different ARM processors have different atomic instructions. In particular,
13
// architecture versions before v6 (which are still in widespread use, e.g. the
14
// Intel/Marvell XScale chips like the one in the NSLU2) have only atomic swap.
15
// On Linux the kernel provides some support that lets us abstract away from
16
// these differences: it provides emulated CAS and barrier functions at special
17
// addresses that are garaunteed not to be interrupted by the kernel. Using
18
// this facility is slightly slower than inline assembler would be, but much
19
// faster than a system call.
20
//
21
// While this emulated CAS is "strong" in the sense that it does not fail
22
// "spuriously" (i.e.: it never fails to perform the exchange when the value
23
// found equals the value expected), it does not return the found value on
24
// failure. To satisfy the atomic API, compare_exchange_{weak|strong} must
25
// return the found value on failure, and we have to manually load this value
26
// after the emulated CAS reports failure. This in turn introduces a race
27
// between the CAS failing (due to the "wrong" value being found) and subsequently
28
// loading (which might turn up the "right" value). From an application's
29
// point of view this looks like "spurious failure", and therefore the
30
// emulated CAS is only good enough to provide compare_exchange_weak
31
// semantics.
32
33
#include <
boost/memory_order.hpp
>
34
#include <
boost/atomic/detail/base.hpp
>
35
36
#define BOOST_ATOMIC_CHAR_LOCK_FREE 2
37
#define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
38
#define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
39
#define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
40
#define BOOST_ATOMIC_SHORT_LOCK_FREE 2
41
#define BOOST_ATOMIC_INT_LOCK_FREE 2
42
#define BOOST_ATOMIC_LONG_LOCK_FREE 2
43
#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
44
#define BOOST_ATOMIC_ADDRESS_LOCK_FREE 2
45
#define BOOST_ATOMIC_BOOL_LOCK_FREE 2
46
47
namespace
boost
{
48
namespace
detail {
49
namespace
atomic {
50
51
static
inline
void
52
arm_barrier
(
void
)
53
{
54
void (*kernel_dmb)(void) = (
void
(*)(void)) 0xffff0fa0;
55
kernel_dmb();
56
}
57
58
static
inline
void
59
platform_fence_before
(
memory_order
order)
60
{
61
switch
(order) {
62
case
memory_order_release
:
63
case
memory_order_acq_rel
:
64
case
memory_order_seq_cst
:
65
arm_barrier
();
66
case
memory_order_consume
:
67
default
:;
68
}
69
}
70
71
static
inline
void
72
platform_fence_after
(
memory_order
order)
73
{
74
switch
(order) {
75
case
memory_order_acquire
:
76
case
memory_order_acq_rel
:
77
case
memory_order_seq_cst
:
78
arm_barrier
();
79
default
:;
80
}
81
}
82
83
static
inline
void
84
platform_fence_before_store
(
memory_order
order)
85
{
86
platform_fence_before
(order);
87
}
88
89
static
inline
void
90
platform_fence_after_store
(
memory_order
order)
91
{
92
if
(order ==
memory_order_seq_cst
)
93
arm_barrier
();
94
}
95
96
static
inline
void
97
platform_fence_after_load
(
memory_order
order)
98
{
99
platform_fence_after
(order);
100
}
101
102
template
<
typename
T>
103
bool
104
platform_cmpxchg32
(T & expected, T desired,
volatile
T * ptr)
105
{
106
typedef
T (*kernel_cmpxchg32_t)(T oldval, T newval,
volatile
T * ptr);
107
108
if
(((kernel_cmpxchg32_t) 0xffff0fc0)(expected, desired, ptr) == 0) {
109
return
true
;
110
}
else
{
111
expected = *ptr;
112
return
false
;
113
}
114
}
115
116
}
117
}
118
119
#define BOOST_ATOMIC_THREAD_FENCE 2
120
static
inline
void
121
atomic_thread_fence
(
memory_order
order)
122
{
123
switch
(order) {
124
case
memory_order_acquire
:
125
case
memory_order_release
:
126
case
memory_order_acq_rel
:
127
case
memory_order_seq_cst
:
128
detail::atomic::arm_barrier
();
129
default
:;
130
}
131
}
132
133
#define BOOST_ATOMIC_SIGNAL_FENCE 2
134
static
inline
void
135
atomic_signal_fence
(
memory_order
)
136
{
137
__asm__ __volatile__ (
""
:::
"memory"
);
138
}
139
140
}
141
142
#include <
boost/atomic/detail/cas32weak.hpp
>
143
144
#endif
boost::memory_order_seq_cst
@ memory_order_seq_cst
Definition:
memory_order.hpp:47
boost::detail::atomic::platform_fence_before
static void platform_fence_before(memory_order order)
Definition:
gcc-armv6+.hpp:103
boost::atomic_signal_fence
static void atomic_signal_fence(memory_order)
Definition:
gcc-armv6+.hpp:193
boost::detail::atomic::platform_cmpxchg32
bool platform_cmpxchg32(T &expected, T desired, volatile T *ptr)
Definition:
gcc-armv6+.hpp:148
boost::memory_order_consume
@ memory_order_consume
Definition:
memory_order.hpp:48
boost
Definition:
base.hpp:116
boost::detail::atomic::arm_barrier
static void arm_barrier(void)
Definition:
gcc-armv6+.hpp:91
boost::detail::atomic::platform_fence_before_store
static void platform_fence_before_store(memory_order order)
Definition:
gcc-armv6+.hpp:128
boost::memory_order_acq_rel
@ memory_order_acq_rel
Definition:
memory_order.hpp:46
boost::memory_order_acquire
@ memory_order_acquire
Definition:
memory_order.hpp:44
boost::memory_order_release
@ memory_order_release
Definition:
memory_order.hpp:45
boost::detail::atomic::platform_fence_after_load
static void platform_fence_after_load(memory_order order)
Definition:
gcc-armv6+.hpp:141
base.hpp
memory_order.hpp
boost::memory_order
memory_order
Definition:
memory_order.hpp:41
boost::detail::atomic::platform_fence_after
static void platform_fence_after(memory_order order)
Definition:
gcc-armv6+.hpp:116
cas32weak.hpp
boost::atomic_thread_fence
static void atomic_thread_fence(memory_order order)
Definition:
gcc-armv6+.hpp:179
boost::detail::atomic::platform_fence_after_store
static void platform_fence_after_store(memory_order order)
Definition:
gcc-armv6+.hpp:134
rosatomic
Author(s): Josh Faust
autogenerated on Wed Mar 2 2022 00:54:12