46 #define __HAVE_ARCH_CMPXCHG 1 48 static __inline__
unsigned long 49 __cmpxchg_u32(
volatile unsigned int *p,
unsigned long old,
unsigned long newp)
53 __asm__ __volatile__ (
55 "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ 64 :
"=&r" (prev),
"=m" (*p)
65 :
"r" (p),
"r" (old),
"r" (newp),
"m" (*p)
84 static __inline__
unsigned long 85 __oro_cmpxchg(
volatile void *ptr,
unsigned long old,
unsigned long newp,
unsigned int size)
89 return __cmpxchg_u32((
volatile unsigned int *) ptr, old, newp);
94 #define oro_cmpxchg(ptr,o,n) \ 96 __typeof__(*(ptr)) _o_ = (o); \ 97 __typeof__(*(ptr)) _n_ = (n); \ 98 (__typeof__(*(ptr))) __oro_cmpxchg((ptr), (unsigned long)_o_, \ 99 (unsigned long)_n_, sizeof(*(ptr))); \
#define PPC405_ERR77(ra, rb)
static __inline__ unsigned long __oro_cmpxchg(volatile void *ptr, unsigned long old, unsigned long newp, unsigned int size)
static __inline__ unsigned long __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long newp)