linux-arm.hpp
Go to the documentation of this file.
00001 #ifndef BOOST_DETAIL_ATOMIC_LINUX_ARM_HPP
00002 #define BOOST_DETAIL_ATOMIC_LINUX_ARM_HPP
00003 
00004 //  Distributed under the Boost Software License, Version 1.0.
00005 //  See accompanying file LICENSE_1_0.txt or copy at
00006 //  http://www.boost.org/LICENSE_1_0.txt)
00007 //
00008 //  Copyright (c) 2009, 2011 Helge Bahmann
00009 //  Copyright (c) 2009 Phil Endecott
00010 //  Linux-specific code by Phil Endecott
00011 
00012 // Different ARM processors have different atomic instructions.  In particular, 
00013 // architecture versions before v6 (which are still in widespread use, e.g. the 
00014 // Intel/Marvell XScale chips like the one in the NSLU2) have only atomic swap.  
00015 // On Linux the kernel provides some support that lets us abstract away from 
00016 // these differences: it provides emulated CAS and barrier functions at special 
00017 // addresses that are garaunteed not to be interrupted by the kernel.  Using 
00018 // this facility is slightly slower than inline assembler would be, but much 
00019 // faster than a system call.
00020 // 
00021 // While this emulated CAS is "strong" in the sense that it does not fail
00022 // "spuriously" (i.e.: it never fails to perform the exchange when the value
00023 // found equals the value expected), it does not return the found value on
00024 // failure. To satisfy the atomic API, compare_exchange_{weak|strong} must
00025 // return the found value on failure, and we have to manually load this value
00026 // after the emulated CAS reports failure. This in turn introduces a race
00027 // between the CAS failing (due to the "wrong" value being found) and subsequently
00028 // loading (which might turn up the "right" value). From an application's
00029 // point of view this looks like "spurious failure", and therefore the
00030 // emulated CAS is only good enough to provide compare_exchange_weak
00031 // semantics.
00032 
00033 #include <boost/memory_order.hpp>
00034 #include <boost/atomic/detail/base.hpp>
00035 
00036 #define BOOST_ATOMIC_CHAR_LOCK_FREE 2
00037 #define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
00038 #define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
00039 #define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
00040 #define BOOST_ATOMIC_SHORT_LOCK_FREE 2
00041 #define BOOST_ATOMIC_INT_LOCK_FREE 2
00042 #define BOOST_ATOMIC_LONG_LOCK_FREE 2
00043 #define BOOST_ATOMIC_LLONG_LOCK_FREE 0
00044 #define BOOST_ATOMIC_ADDRESS_LOCK_FREE 2
00045 #define BOOST_ATOMIC_BOOL_LOCK_FREE 2
00046 
00047 namespace boost {
00048 namespace detail {
00049 namespace atomic {
00050 
00051 static inline void
00052 arm_barrier(void)
00053 {
00054         void (*kernel_dmb)(void) = (void (*)(void)) 0xffff0fa0;
00055         kernel_dmb();
00056 }
00057 
00058 static inline void
00059 platform_fence_before(memory_order order)
00060 {
00061         switch(order) {
00062                 case memory_order_release:
00063                 case memory_order_acq_rel:
00064                 case memory_order_seq_cst:
00065                         arm_barrier();
00066                 case memory_order_consume:
00067                 default:;
00068         }
00069 }
00070 
00071 static inline void
00072 platform_fence_after(memory_order order)
00073 {
00074         switch(order) {
00075                 case memory_order_acquire:
00076                 case memory_order_acq_rel:
00077                 case memory_order_seq_cst:
00078                         arm_barrier();
00079                 default:;
00080         }
00081 }
00082 
00083 static inline void
00084 platform_fence_before_store(memory_order order)
00085 {
00086         platform_fence_before(order);
00087 }
00088 
00089 static inline void
00090 platform_fence_after_store(memory_order order)
00091 {
00092         if (order == memory_order_seq_cst)
00093                 arm_barrier();
00094 }
00095 
00096 static inline void
00097 platform_fence_after_load(memory_order order)
00098 {
00099         platform_fence_after(order);
00100 }
00101 
00102 template<typename T>
00103 bool
00104 platform_cmpxchg32(T & expected, T desired, volatile T * ptr)
00105 {
00106         typedef T (*kernel_cmpxchg32_t)(T oldval, T newval, volatile T * ptr);
00107         
00108         if (((kernel_cmpxchg32_t) 0xffff0fc0)(expected, desired, ptr) == 0) {
00109                 return true;
00110         } else {
00111                 expected = *ptr;
00112                 return false;
00113         }
00114 }
00115 
00116 }
00117 }
00118 
00119 #define BOOST_ATOMIC_THREAD_FENCE 2
00120 static inline void
00121 atomic_thread_fence(memory_order order)
00122 {
00123         switch(order) {
00124                 case memory_order_acquire:
00125                 case memory_order_release:
00126                 case memory_order_acq_rel:
00127                 case memory_order_seq_cst:
00128                         detail::atomic::arm_barrier();
00129                 default:;
00130         }
00131 }
00132 
00133 #define BOOST_ATOMIC_SIGNAL_FENCE 2
00134 static inline void
00135 atomic_signal_fence(memory_order)
00136 {
00137         __asm__ __volatile__ ("" ::: "memory");
00138 }
00139 
00140 }
00141 
00142 #include <boost/atomic/detail/cas32weak.hpp>
00143 
00144 #endif


rosatomic
Author(s): Josh Faust
autogenerated on Sat Jun 8 2019 20:43:34