2 * PowerPC64 atomic operations
4 * Copyright (C) 2001 Paul Mackerras <paulus@au.ibm.com>, IBM
5 * Copyright (C) 2001 Anton Blanchard <anton@au.ibm.com>, IBM
7 * This program is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License
9 * as published by the Free Software Foundation; either version
10 * 2 of the License, or (at your option) any later version.
13 #ifndef _ASM_PPC64_ATOMIC_H_
14 #define _ASM_PPC64_ATOMIC_H_
16 #include <asm/memory.h>
18 typedef struct { volatile int counter; } atomic_t;
20 #define ATOMIC_INIT(i) { (i) }
22 #define atomic_read(v) ((v)->counter)
23 #define atomic_set(v,i) (((v)->counter) = (i))
25 static __inline__ void atomic_add(int a, atomic_t *v)
30 "1: lwarx %0,0,%3 # atomic_add\n\
34 : "=&r" (t), "=m" (v->counter)
35 : "r" (a), "r" (&v->counter), "m" (v->counter)
39 static __inline__ int atomic_add_return(int a, atomic_t *v)
45 "1: lwarx %0,0,%2 # atomic_add_return\n\
51 : "r" (a), "r" (&v->counter)
57 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
59 static __inline__ void atomic_sub(int a, atomic_t *v)
64 "1: lwarx %0,0,%3 # atomic_sub\n\
68 : "=&r" (t), "=m" (v->counter)
69 : "r" (a), "r" (&v->counter), "m" (v->counter)
73 static __inline__ int atomic_sub_return(int a, atomic_t *v)
79 "1: lwarx %0,0,%2 # atomic_sub_return\n\
85 : "r" (a), "r" (&v->counter)
91 static __inline__ void atomic_inc(atomic_t *v)
96 "1: lwarx %0,0,%2 # atomic_inc\n\
100 : "=&r" (t), "=m" (v->counter)
101 : "r" (&v->counter), "m" (v->counter)
105 static __inline__ int atomic_inc_return(atomic_t *v)
109 __asm__ __volatile__(
111 "1: lwarx %0,0,%1 # atomic_inc_return\n\
124 * atomic_inc_and_test - increment and test
125 * @v: pointer of type atomic_t
127 * Atomically increments @v by 1
128 * and returns true if the result is zero, or false for all
131 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
133 static __inline__ void atomic_dec(atomic_t *v)
137 __asm__ __volatile__(
138 "1: lwarx %0,0,%2 # atomic_dec\n\
142 : "=&r" (t), "=m" (v->counter)
143 : "r" (&v->counter), "m" (v->counter)
147 static __inline__ int atomic_dec_return(atomic_t *v)
151 __asm__ __volatile__(
153 "1: lwarx %0,0,%1 # atomic_dec_return\n\
165 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
166 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
169 * Atomically test *v and decrement if it is greater than 0.
170 * The function returns the old value of *v minus 1.
172 static __inline__ int atomic_dec_if_positive(atomic_t *v)
176 __asm__ __volatile__(
178 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
192 #define smp_mb__before_atomic_dec() smp_mb()
193 #define smp_mb__after_atomic_dec() smp_mb()
194 #define smp_mb__before_atomic_inc() smp_mb()
195 #define smp_mb__after_atomic_inc() smp_mb()
197 #endif /* _ASM_PPC64_ATOMIC_H_ */