2 * PowerPC atomic operations
5 #ifndef _ASM_PPC_ATOMIC_H_
6 #define _ASM_PPC_ATOMIC_H_
8 typedef struct { volatile int counter; } atomic_t;
12 #define ATOMIC_INIT(i) { (i) }
14 #define atomic_read(v) ((v)->counter)
15 #define atomic_set(v,i) (((v)->counter) = (i))
17 extern void atomic_clear_mask(unsigned long mask, unsigned long *addr);
20 #define SMP_SYNC "sync"
21 #define SMP_ISYNC "\n\tisync"
27 /* Erratum #77 on the 405 means we need a sync or dcbt before every stwcx.
28 * The old ATOMIC_SYNC_FIX covered some but not all of this.
30 #ifdef CONFIG_IBM405_ERR77
31 #define PPC405_ERR77(ra,rb) "dcbt " #ra "," #rb ";"
33 #define PPC405_ERR77(ra,rb)
36 static __inline__ void atomic_add(int a, atomic_t *v)
41 "1: lwarx %0,0,%3 # atomic_add\n\
46 : "=&r" (t), "=m" (v->counter)
47 : "r" (a), "r" (&v->counter), "m" (v->counter)
51 static __inline__ int atomic_add_return(int a, atomic_t *v)
56 "1: lwarx %0,0,%2 # atomic_add_return\n\
63 : "r" (a), "r" (&v->counter)
69 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
71 static __inline__ void atomic_sub(int a, atomic_t *v)
76 "1: lwarx %0,0,%3 # atomic_sub\n\
81 : "=&r" (t), "=m" (v->counter)
82 : "r" (a), "r" (&v->counter), "m" (v->counter)
86 static __inline__ int atomic_sub_return(int a, atomic_t *v)
91 "1: lwarx %0,0,%2 # atomic_sub_return\n\
98 : "r" (a), "r" (&v->counter)
104 static __inline__ void atomic_inc(atomic_t *v)
108 __asm__ __volatile__(
109 "1: lwarx %0,0,%2 # atomic_inc\n\
114 : "=&r" (t), "=m" (v->counter)
115 : "r" (&v->counter), "m" (v->counter)
119 static __inline__ int atomic_inc_return(atomic_t *v)
123 __asm__ __volatile__(
124 "1: lwarx %0,0,%1 # atomic_inc_return\n\
138 * atomic_inc_and_test - increment and test
139 * @v: pointer of type atomic_t
141 * Atomically increments @v by 1
142 * and returns true if the result is zero, or false for all
145 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
147 static __inline__ void atomic_dec(atomic_t *v)
151 __asm__ __volatile__(
152 "1: lwarx %0,0,%2 # atomic_dec\n\
157 : "=&r" (t), "=m" (v->counter)
158 : "r" (&v->counter), "m" (v->counter)
162 static __inline__ int atomic_dec_return(atomic_t *v)
166 __asm__ __volatile__(
167 "1: lwarx %0,0,%1 # atomic_dec_return\n\
180 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
181 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
184 * Atomically test *v and decrement if it is greater than 0.
185 * The function returns the old value of *v minus 1.
187 static __inline__ int atomic_dec_if_positive(atomic_t *v)
191 __asm__ __volatile__(
192 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
207 #define __MB __asm__ __volatile__ (SMP_SYNC : : : "memory")
208 #define smp_mb__before_atomic_dec() __MB
209 #define smp_mb__after_atomic_dec() __MB
210 #define smp_mb__before_atomic_inc() __MB
211 #define smp_mb__after_atomic_inc() __MB
213 #endif /* __KERNEL__ */
214 #endif /* _ASM_PPC_ATOMIC_H_ */