1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
8 typedef struct { volatile int counter; } atomic_t;
11 #include <asm/synch.h>
12 #include <asm/asm-compat.h>
14 #define ATOMIC_INIT(i) { (i) }
16 #define atomic_read(v) ((v)->counter)
17 #define atomic_set(v,i) (((v)->counter) = (i))
19 static __inline__ void atomic_add(int a, atomic_t *v)
24 "1: lwarx %0,0,%3 # atomic_add\n\
29 : "=&r" (t), "=m" (v->counter)
30 : "r" (a), "r" (&v->counter), "m" (v->counter)
34 static __inline__ int atomic_add_return(int a, atomic_t *v)
40 "1: lwarx %0,0,%2 # atomic_add_return\n\
47 : "r" (a), "r" (&v->counter)
53 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
55 static __inline__ void atomic_sub(int a, atomic_t *v)
60 "1: lwarx %0,0,%3 # atomic_sub\n\
65 : "=&r" (t), "=m" (v->counter)
66 : "r" (a), "r" (&v->counter), "m" (v->counter)
70 static __inline__ int atomic_sub_return(int a, atomic_t *v)
76 "1: lwarx %0,0,%2 # atomic_sub_return\n\
83 : "r" (a), "r" (&v->counter)
89 static __inline__ void atomic_inc(atomic_t *v)
94 "1: lwarx %0,0,%2 # atomic_inc\n\
99 : "=&r" (t), "=m" (v->counter)
100 : "r" (&v->counter), "m" (v->counter)
104 static __inline__ int atomic_inc_return(atomic_t *v)
108 __asm__ __volatile__(
110 "1: lwarx %0,0,%1 # atomic_inc_return\n\
124 * atomic_inc_and_test - increment and test
125 * @v: pointer of type atomic_t
127 * Atomically increments @v by 1
128 * and returns true if the result is zero, or false for all
131 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
133 static __inline__ void atomic_dec(atomic_t *v)
137 __asm__ __volatile__(
138 "1: lwarx %0,0,%2 # atomic_dec\n\
143 : "=&r" (t), "=m" (v->counter)
144 : "r" (&v->counter), "m" (v->counter)
148 static __inline__ int atomic_dec_return(atomic_t *v)
152 __asm__ __volatile__(
154 "1: lwarx %0,0,%1 # atomic_dec_return\n\
167 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
168 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
171 * atomic_add_unless - add unless the number is a given value
172 * @v: pointer of type atomic_t
173 * @a: the amount to add to v...
174 * @u: ...unless v is equal to u.
176 * Atomically adds @a to @v, so long as it was not @u.
177 * Returns non-zero if @v was not @u, and zero otherwise.
179 #define atomic_add_unless(v, a, u) \
182 c = atomic_read(v); \
184 if (unlikely(c == (u))) \
186 old = atomic_cmpxchg((v), c, c + (a)); \
187 if (likely(old == c)) \
193 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
195 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
196 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
199 * Atomically test *v and decrement if it is greater than 0.
200 * The function returns the old value of *v minus 1.
202 static __inline__ int atomic_dec_if_positive(atomic_t *v)
206 __asm__ __volatile__(
208 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
223 #define smp_mb__before_atomic_dec() smp_mb()
224 #define smp_mb__after_atomic_dec() smp_mb()
225 #define smp_mb__before_atomic_inc() smp_mb()
226 #define smp_mb__after_atomic_inc() smp_mb()
230 typedef struct { volatile long counter; } atomic64_t;
232 #define ATOMIC64_INIT(i) { (i) }
234 #define atomic64_read(v) ((v)->counter)
235 #define atomic64_set(v,i) (((v)->counter) = (i))
237 static __inline__ void atomic64_add(long a, atomic64_t *v)
241 __asm__ __volatile__(
242 "1: ldarx %0,0,%3 # atomic64_add\n\
246 : "=&r" (t), "=m" (v->counter)
247 : "r" (a), "r" (&v->counter), "m" (v->counter)
251 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
255 __asm__ __volatile__(
257 "1: ldarx %0,0,%2 # atomic64_add_return\n\
263 : "r" (a), "r" (&v->counter)
269 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
271 static __inline__ void atomic64_sub(long a, atomic64_t *v)
275 __asm__ __volatile__(
276 "1: ldarx %0,0,%3 # atomic64_sub\n\
280 : "=&r" (t), "=m" (v->counter)
281 : "r" (a), "r" (&v->counter), "m" (v->counter)
285 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
289 __asm__ __volatile__(
291 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
297 : "r" (a), "r" (&v->counter)
303 static __inline__ void atomic64_inc(atomic64_t *v)
307 __asm__ __volatile__(
308 "1: ldarx %0,0,%2 # atomic64_inc\n\
312 : "=&r" (t), "=m" (v->counter)
313 : "r" (&v->counter), "m" (v->counter)
317 static __inline__ long atomic64_inc_return(atomic64_t *v)
321 __asm__ __volatile__(
323 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
336 * atomic64_inc_and_test - increment and test
337 * @v: pointer of type atomic64_t
339 * Atomically increments @v by 1
340 * and returns true if the result is zero, or false for all
343 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
345 static __inline__ void atomic64_dec(atomic64_t *v)
349 __asm__ __volatile__(
350 "1: ldarx %0,0,%2 # atomic64_dec\n\
354 : "=&r" (t), "=m" (v->counter)
355 : "r" (&v->counter), "m" (v->counter)
359 static __inline__ long atomic64_dec_return(atomic64_t *v)
363 __asm__ __volatile__(
365 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
377 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
378 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
381 * Atomically test *v and decrement if it is greater than 0.
382 * The function returns the old value of *v minus 1.
384 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
388 __asm__ __volatile__(
390 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
404 #endif /* __powerpc64__ */
406 #include <asm-generic/atomic.h>
407 #endif /* __KERNEL__ */
408 #endif /* _ASM_POWERPC_ATOMIC_H_ */