1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
8 typedef struct { volatile int counter; } atomic_t;
11 #include <linux/compiler.h>
12 #include <asm/synch.h>
13 #include <asm/asm-compat.h>
15 #define ATOMIC_INIT(i) { (i) }
17 #define atomic_read(v) ((v)->counter)
18 #define atomic_set(v,i) (((v)->counter) = (i))
20 static __inline__ void atomic_add(int a, atomic_t *v)
25 "1: lwarx %0,0,%3 # atomic_add\n\
30 : "=&r" (t), "=m" (v->counter)
31 : "r" (a), "r" (&v->counter), "m" (v->counter)
35 static __inline__ int atomic_add_return(int a, atomic_t *v)
41 "1: lwarx %0,0,%2 # atomic_add_return\n\
48 : "r" (a), "r" (&v->counter)
54 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
56 static __inline__ void atomic_sub(int a, atomic_t *v)
61 "1: lwarx %0,0,%3 # atomic_sub\n\
66 : "=&r" (t), "=m" (v->counter)
67 : "r" (a), "r" (&v->counter), "m" (v->counter)
71 static __inline__ int atomic_sub_return(int a, atomic_t *v)
77 "1: lwarx %0,0,%2 # atomic_sub_return\n\
84 : "r" (a), "r" (&v->counter)
90 static __inline__ void atomic_inc(atomic_t *v)
95 "1: lwarx %0,0,%2 # atomic_inc\n\
100 : "=&r" (t), "=m" (v->counter)
101 : "r" (&v->counter), "m" (v->counter)
105 static __inline__ int atomic_inc_return(atomic_t *v)
109 __asm__ __volatile__(
111 "1: lwarx %0,0,%1 # atomic_inc_return\n\
125 * atomic_inc_and_test - increment and test
126 * @v: pointer of type atomic_t
128 * Atomically increments @v by 1
129 * and returns true if the result is zero, or false for all
132 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
134 static __inline__ void atomic_dec(atomic_t *v)
138 __asm__ __volatile__(
139 "1: lwarx %0,0,%2 # atomic_dec\n\
144 : "=&r" (t), "=m" (v->counter)
145 : "r" (&v->counter), "m" (v->counter)
149 static __inline__ int atomic_dec_return(atomic_t *v)
153 __asm__ __volatile__(
155 "1: lwarx %0,0,%1 # atomic_dec_return\n\
168 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
169 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
172 * atomic_add_unless - add unless the number is a given value
173 * @v: pointer of type atomic_t
174 * @a: the amount to add to v...
175 * @u: ...unless v is equal to u.
177 * Atomically adds @a to @v, so long as it was not @u.
178 * Returns non-zero if @v was not @u, and zero otherwise.
180 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
184 __asm__ __volatile__ (
186 "1: lwarx %0,0,%1 # atomic_add_unless\n\
197 : "r" (&v->counter), "r" (a), "r" (u)
203 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
205 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
206 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
209 * Atomically test *v and decrement if it is greater than 0.
210 * The function returns the old value of *v minus 1.
212 static __inline__ int atomic_dec_if_positive(atomic_t *v)
216 __asm__ __volatile__(
218 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
233 #define smp_mb__before_atomic_dec() smp_mb()
234 #define smp_mb__after_atomic_dec() smp_mb()
235 #define smp_mb__before_atomic_inc() smp_mb()
236 #define smp_mb__after_atomic_inc() smp_mb()
240 typedef struct { volatile long counter; } atomic64_t;
242 #define ATOMIC64_INIT(i) { (i) }
244 #define atomic64_read(v) ((v)->counter)
245 #define atomic64_set(v,i) (((v)->counter) = (i))
247 static __inline__ void atomic64_add(long a, atomic64_t *v)
251 __asm__ __volatile__(
252 "1: ldarx %0,0,%3 # atomic64_add\n\
256 : "=&r" (t), "=m" (v->counter)
257 : "r" (a), "r" (&v->counter), "m" (v->counter)
261 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
265 __asm__ __volatile__(
267 "1: ldarx %0,0,%2 # atomic64_add_return\n\
273 : "r" (a), "r" (&v->counter)
279 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
281 static __inline__ void atomic64_sub(long a, atomic64_t *v)
285 __asm__ __volatile__(
286 "1: ldarx %0,0,%3 # atomic64_sub\n\
290 : "=&r" (t), "=m" (v->counter)
291 : "r" (a), "r" (&v->counter), "m" (v->counter)
295 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
299 __asm__ __volatile__(
301 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
307 : "r" (a), "r" (&v->counter)
313 static __inline__ void atomic64_inc(atomic64_t *v)
317 __asm__ __volatile__(
318 "1: ldarx %0,0,%2 # atomic64_inc\n\
322 : "=&r" (t), "=m" (v->counter)
323 : "r" (&v->counter), "m" (v->counter)
327 static __inline__ long atomic64_inc_return(atomic64_t *v)
331 __asm__ __volatile__(
333 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
346 * atomic64_inc_and_test - increment and test
347 * @v: pointer of type atomic64_t
349 * Atomically increments @v by 1
350 * and returns true if the result is zero, or false for all
353 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
355 static __inline__ void atomic64_dec(atomic64_t *v)
359 __asm__ __volatile__(
360 "1: ldarx %0,0,%2 # atomic64_dec\n\
364 : "=&r" (t), "=m" (v->counter)
365 : "r" (&v->counter), "m" (v->counter)
369 static __inline__ long atomic64_dec_return(atomic64_t *v)
373 __asm__ __volatile__(
375 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
387 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
388 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
391 * Atomically test *v and decrement if it is greater than 0.
392 * The function returns the old value of *v minus 1.
394 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
398 __asm__ __volatile__(
400 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
414 #endif /* __powerpc64__ */
416 #include <asm-generic/atomic.h>
417 #endif /* __KERNEL__ */
418 #endif /* _ASM_POWERPC_ATOMIC_H_ */