16#ifndef __SYS_ATOMIC_H__
17#define __SYS_ATOMIC_H__
28 __asm__
volatile(
"lwz%U1%X1 %0,%1" :
"=r"(t) :
"m"(v->
counter));
35 __asm__
volatile(
"stw%U0%X0 %1,%0" :
"=m"(v->
counter) :
"r"(i));
43"1: lwarx %0,0,%3 # atomic_add\n\
57"1: lwarx %0,0,%2 # atomic_add_return\n\
68#define sysAtomicAddNegative(a, v) (sysAtomicAddReturn((a), (v)) < 0)
75"1: lwarx %0,0,%3 # atomic_sub\n\
89"1: lwarx %0,0,%2 # atomic_sub_return\n\
105"1: lwarx %0,0,%2 # atomic_inc\n\
119"1: lwarx %0,0,%1 # atomic_inc_return\n\
125 :
"cc",
"xer",
"memory");
138#define sysAtomicIncAndTest(v) (sysAtomicIncReturn(v) == 0)
145"1: lwarx %0,0,%2 # atomic_dec\n\
159"1: lwarx %0,0,%1 # atomic_dec_return\n\
165 :
"cc",
"xer",
"memory");
184 :
"=&r" (
prev),
"+m" (*(
volatile unsigned int *)p)
199 :
"=&r" (
prev),
"+m" (*(
volatile u32 *)p)
212static inline u32
__xchg(
volatile void *ptr, u32 x,
unsigned int size)
226 __typeof__(*(ptr)) _x_ = (x); \
227 (__typeof__(*(ptr))) __xchg((ptr), (u32)_x_, sizeof(*(ptr))); \
240"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
247 :
"=&r" (
prev),
"+m" (*p)
248 :
"r" (p),
"r" (oldv),
"r" (newv)
260"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
267 :
"=&r" (
prev),
"+m" (*p)
268 :
"r" (p),
"r" (oldv),
"r" (newv)
292#define cmpxchg(ptr, o, n) \
294 __typeof__(*(ptr)) _o_ = (o); \
295 __typeof__(*(ptr)) _n_ = (n); \
296 (__typeof__(*(ptr))) __cmpxchg((ptr), (u64)_o_, \
297 (u64)_n_, sizeof(*(ptr))); \
300#define sysAtomicCompareAndSwap(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
301#define sysAtomicSwap(v, newv) (xchg(&((v)->counter), newv))
317"1: lwarx %0,0,%1 # atomic_add_unless\n\
326 :
"r" (&v->
counter),
"r" (a),
"r" (u)
332#define sysAtomicIncNotZero(v) sysAtomicAddUnless((v), 1, 0)
334#define sysAtomicSubAndTest(a, v) (sysAtomicSubReturn((a), (v)) == 0)
335#define sysAtomicDecAndTest(v) (sysAtomicDecReturn((v)) == 0)
347"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
365 __asm__
volatile(
"ld%U1%X1 %0,%1" :
"=r"(t) :
"m"(v->
counter));
372 __asm__
volatile(
"std%U0%X0 %1,%0" :
"=m"(v->
counter) :
"r"(i));
380"1: ldarx %0,0,%3 # atomic64_add\n\
394"1: ldarx %0,0,%2 # atomic64_add_return\n\
405#define sysAtomic64AddNegative(a, v) (sysAtomic64AddReturn((a), (v)) < 0)
412"1: ldarx %0,0,%3 # atomic64_sub\n\
426"1: ldarx %0,0,%2 # atomic64_sub_return\n\
442"1: ldarx %0,0,%2 # atomic64_inc\n\
456"1: ldarx %0,0,%1 # atomic64_inc_return\n\
462 :
"cc",
"xer",
"memory");
475#define sysAtomic64IncAndTest(v) (sysAtomic64IncReturn(v) == 0)
482"1: ldarx %0,0,%2 # atomic64_dec\n\
496"1: ldarx %0,0,%1 # atomic64_dec_return\n\
502 :
"cc",
"xer",
"memory");
507#define sysAtomic64SubAndTest(a, v) (sysAtomic64SubReturn((a), (v)) == 0)
508#define sysAtomic64DecAndTest(v) (sysAtomic64DecReturn((v)) == 0)
519"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
527 :
"cc",
"xer",
"memory");
532#define sysAtomic64CompareAndSwap(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
533#define sysAtomic64Swap(v, newv) (xchg(&((v)->counter), newv))
549"1: ldarx %0,0,%1 # atomic_add_unless\n\
558 :
"r" (&v->
counter),
"r" (a),
"r" (u)
564#define sysAtomic64IncNotZero(v) sysAtomic64AddUnless((v), 1, 0)
static u32 sysAtomicIncReturn(atomic_t *v)
static void sysAtomicAdd(u32 a, atomic_t *v)
static u64 __cmpxchg_u64(volatile u64 *p, u64 oldv, u64 newv)
static void sysAtomic64Inc(atomic64_t *v)
void __cmpxchg_called_with_bad_pointer(void)
static u32 __xchg(volatile void *ptr, u32 x, unsigned int size)
static u32 __xchg_u64(volatile void *p, u32 val)
static u32 sysAtomicAddReturn(u32 a, atomic_t *v)
static u32 sysAtomicDecReturn(atomic_t *v)
static u32 sysAtomicAddUnless(atomic_t *v, u32 a, u32 u)
static u64 sysAtomic64Read(const atomic64_t *v)
static u32 sysAtomicDecIfPositive(atomic_t *v)
static u64 sysAtomic64SubReturn(u64 a, atomic64_t *v)
static u64 sysAtomic64DecReturn(atomic64_t *v)
static u64 sysAtomic64DecIfPositive(atomic64_t *v)
static void sysAtomic64Sub(u64 a, atomic64_t *v)
static void sysAtomicDec(atomic_t *v)
static void sysAtomic64Set(atomic64_t *v, u64 i)
static void sysAtomic64Dec(atomic64_t *v)
static u64 __cmpxchg_u32(volatile unsigned int *p, u64 oldv, u64 newv)
static void sysAtomic64Add(u64 a, atomic64_t *v)
static void sysAtomicInc(atomic_t *v)
static void sysAtomicSub(u32 a, atomic_t *v)
static void sysAtomicSet(atomic_t *v, int i)
static u32 sysAtomicSubReturn(u32 a, atomic_t *v)
static u64 sysAtomic64IncReturn(atomic64_t *v)
static u32 __xchg_u32(volatile void *p, u32 val)
static u64 __cmpxchg(volatile void *ptr, u64 oldv, u64 newv, unsigned int size)
static u64 sysAtomic64AddReturn(u64 a, atomic64_t *v)
static u32 sysAtomic64AddUnless(atomic64_t *v, u64 a, u64 u)
static u32 sysAtomicRead(const atomic_t *v)
void __xchg_called_with_bad_pointer(void)