1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
8 typedef struct { volatile int counter; } atomic_t;
11 #include <linux/compiler.h>
12 #include <asm/synch.h>
13 #include <asm/asm-compat.h>
14 #include <asm/system.h>
16 #define ATOMIC_INIT(i) { (i) }
18 #define atomic_read(v) ((v)->counter)
19 #define atomic_set(v,i) (((v)->counter) = (i))
21 static __inline__ void atomic_add(int a, atomic_t *v)
26 "1: lwarx %0,0,%3 # atomic_add\n\
31 : "=&r" (t), "+m" (v->counter)
32 : "r" (a), "r" (&v->counter)
36 static __inline__ int atomic_add_return(int a, atomic_t *v)
42 "1: lwarx %0,0,%2 # atomic_add_return\n\
49 : "r" (a), "r" (&v->counter)
55 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
57 static __inline__ void atomic_sub(int a, atomic_t *v)
62 "1: lwarx %0,0,%3 # atomic_sub\n\
67 : "=&r" (t), "+m" (v->counter)
68 : "r" (a), "r" (&v->counter)
72 static __inline__ int atomic_sub_return(int a, atomic_t *v)
78 "1: lwarx %0,0,%2 # atomic_sub_return\n\
85 : "r" (a), "r" (&v->counter)
91 static __inline__ void atomic_inc(atomic_t *v)
96 "1: lwarx %0,0,%2 # atomic_inc\n\
101 : "=&r" (t), "+m" (v->counter)
106 static __inline__ int atomic_inc_return(atomic_t *v)
110 __asm__ __volatile__(
112 "1: lwarx %0,0,%1 # atomic_inc_return\n\
126 * atomic_inc_and_test - increment and test
127 * @v: pointer of type atomic_t
129 * Atomically increments @v by 1
130 * and returns true if the result is zero, or false for all
133 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
135 static __inline__ void atomic_dec(atomic_t *v)
139 __asm__ __volatile__(
140 "1: lwarx %0,0,%2 # atomic_dec\n\
145 : "=&r" (t), "+m" (v->counter)
150 static __inline__ int atomic_dec_return(atomic_t *v)
154 __asm__ __volatile__(
156 "1: lwarx %0,0,%1 # atomic_dec_return\n\
169 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
170 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
173 * atomic_add_unless - add unless the number is a given value
174 * @v: pointer of type atomic_t
175 * @a: the amount to add to v...
176 * @u: ...unless v is equal to u.
178 * Atomically adds @a to @v, so long as it was not @u.
179 * Returns non-zero if @v was not @u, and zero otherwise.
181 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
185 __asm__ __volatile__ (
187 "1: lwarx %0,0,%1 # atomic_add_unless\n\
198 : "r" (&v->counter), "r" (a), "r" (u)
204 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
206 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
207 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
210 * Atomically test *v and decrement if it is greater than 0.
211 * The function returns the old value of *v minus 1, even if
212 * the atomic variable, v, was not decremented.
214 static __inline__ int atomic_dec_if_positive(atomic_t *v)
218 __asm__ __volatile__(
220 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
236 #define smp_mb__before_atomic_dec() smp_mb()
237 #define smp_mb__after_atomic_dec() smp_mb()
238 #define smp_mb__before_atomic_inc() smp_mb()
239 #define smp_mb__after_atomic_inc() smp_mb()
243 typedef struct { volatile long counter; } atomic64_t;
245 #define ATOMIC64_INIT(i) { (i) }
247 #define atomic64_read(v) ((v)->counter)
248 #define atomic64_set(v,i) (((v)->counter) = (i))
250 static __inline__ void atomic64_add(long a, atomic64_t *v)
254 __asm__ __volatile__(
255 "1: ldarx %0,0,%3 # atomic64_add\n\
259 : "=&r" (t), "+m" (v->counter)
260 : "r" (a), "r" (&v->counter)
264 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
268 __asm__ __volatile__(
270 "1: ldarx %0,0,%2 # atomic64_add_return\n\
276 : "r" (a), "r" (&v->counter)
282 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
284 static __inline__ void atomic64_sub(long a, atomic64_t *v)
288 __asm__ __volatile__(
289 "1: ldarx %0,0,%3 # atomic64_sub\n\
293 : "=&r" (t), "+m" (v->counter)
294 : "r" (a), "r" (&v->counter)
298 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
302 __asm__ __volatile__(
304 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
310 : "r" (a), "r" (&v->counter)
316 static __inline__ void atomic64_inc(atomic64_t *v)
320 __asm__ __volatile__(
321 "1: ldarx %0,0,%2 # atomic64_inc\n\
325 : "=&r" (t), "+m" (v->counter)
330 static __inline__ long atomic64_inc_return(atomic64_t *v)
334 __asm__ __volatile__(
336 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
349 * atomic64_inc_and_test - increment and test
350 * @v: pointer of type atomic64_t
352 * Atomically increments @v by 1
353 * and returns true if the result is zero, or false for all
356 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
358 static __inline__ void atomic64_dec(atomic64_t *v)
362 __asm__ __volatile__(
363 "1: ldarx %0,0,%2 # atomic64_dec\n\
367 : "=&r" (t), "+m" (v->counter)
372 static __inline__ long atomic64_dec_return(atomic64_t *v)
376 __asm__ __volatile__(
378 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
390 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
391 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
394 * Atomically test *v and decrement if it is greater than 0.
395 * The function returns the old value of *v minus 1.
397 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
401 __asm__ __volatile__(
403 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
417 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
418 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
421 * atomic64_add_unless - add unless the number is a given value
422 * @v: pointer of type atomic64_t
423 * @a: the amount to add to v...
424 * @u: ...unless v is equal to u.
426 * Atomically adds @a to @v, so long as it was not @u.
427 * Returns non-zero if @v was not @u, and zero otherwise.
429 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
433 __asm__ __volatile__ (
435 "1: ldarx %0,0,%1 # atomic_add_unless\n\
445 : "r" (&v->counter), "r" (a), "r" (u)
451 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
453 #endif /* __powerpc64__ */
455 #include <asm-generic/atomic.h>
456 #endif /* __KERNEL__ */
457 #endif /* _ASM_POWERPC_ATOMIC_H_ */