1 #ifndef __ARCH_I386_ATOMIC__
2 #define __ARCH_I386_ATOMIC__
4 #include <linux/compiler.h>
5 #include <asm/processor.h>
6 #include <asm/cmpxchg.h>
9 * Atomic operations that C can't guarantee us. Useful for
10 * resource counting etc..
14 * Make sure gcc doesn't try to be clever and move things around
15 * on us. We need to use _exactly_ the address the user gave us,
16 * not some alias that contains the same information.
18 typedef struct { int counter; } atomic_t;
20 #define ATOMIC_INIT(i) { (i) }
23 * atomic_read - read atomic variable
24 * @v: pointer of type atomic_t
26 * Atomically reads the value of @v.
28 #define atomic_read(v) ((v)->counter)
31 * atomic_set - set atomic variable
32 * @v: pointer of type atomic_t
35 * Atomically sets the value of @v to @i.
37 #define atomic_set(v,i) (((v)->counter) = (i))
40 * atomic_add - add integer to atomic variable
41 * @i: integer value to add
42 * @v: pointer of type atomic_t
44 * Atomically adds @i to @v.
46 static __inline__ void atomic_add(int i, atomic_t *v)
49 LOCK_PREFIX "addl %1,%0"
55 * atomic_sub - subtract integer from atomic variable
56 * @i: integer value to subtract
57 * @v: pointer of type atomic_t
59 * Atomically subtracts @i from @v.
61 static __inline__ void atomic_sub(int i, atomic_t *v)
64 LOCK_PREFIX "subl %1,%0"
70 * atomic_sub_and_test - subtract value from variable and test result
71 * @i: integer value to subtract
72 * @v: pointer of type atomic_t
74 * Atomically subtracts @i from @v and returns
75 * true if the result is zero, or false for all
78 static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
83 LOCK_PREFIX "subl %2,%0; sete %1"
84 :"+m" (v->counter), "=qm" (c)
85 :"ir" (i) : "memory");
90 * atomic_inc - increment atomic variable
91 * @v: pointer of type atomic_t
93 * Atomically increments @v by 1.
95 static __inline__ void atomic_inc(atomic_t *v)
103 * atomic_dec - decrement atomic variable
104 * @v: pointer of type atomic_t
106 * Atomically decrements @v by 1.
108 static __inline__ void atomic_dec(atomic_t *v)
110 __asm__ __volatile__(
111 LOCK_PREFIX "decl %0"
116 * atomic_dec_and_test - decrement and test
117 * @v: pointer of type atomic_t
119 * Atomically decrements @v by 1 and
120 * returns true if the result is 0, or false for all other
123 static __inline__ int atomic_dec_and_test(atomic_t *v)
127 __asm__ __volatile__(
128 LOCK_PREFIX "decl %0; sete %1"
129 :"+m" (v->counter), "=qm" (c)
135 * atomic_inc_and_test - increment and test
136 * @v: pointer of type atomic_t
138 * Atomically increments @v by 1
139 * and returns true if the result is zero, or false for all
142 static __inline__ int atomic_inc_and_test(atomic_t *v)
146 __asm__ __volatile__(
147 LOCK_PREFIX "incl %0; sete %1"
148 :"+m" (v->counter), "=qm" (c)
154 * atomic_add_negative - add and test if negative
155 * @v: pointer of type atomic_t
156 * @i: integer value to add
158 * Atomically adds @i to @v and returns true
159 * if the result is negative, or false when
160 * result is greater than or equal to zero.
162 static __inline__ int atomic_add_negative(int i, atomic_t *v)
166 __asm__ __volatile__(
167 LOCK_PREFIX "addl %2,%0; sets %1"
168 :"+m" (v->counter), "=qm" (c)
169 :"ir" (i) : "memory");
174 * atomic_add_return - add integer and return
175 * @v: pointer of type atomic_t
176 * @i: integer value to add
178 * Atomically adds @i to @v and returns @i + @v
180 static __inline__ int atomic_add_return(int i, atomic_t *v)
185 if(unlikely(boot_cpu_data.x86 <= 3))
188 /* Modern 486+ processor */
190 __asm__ __volatile__(
191 LOCK_PREFIX "xaddl %0, %1"
192 :"+r" (i), "+m" (v->counter)
197 no_xadd: /* Legacy 386 processor */
198 local_irq_save(flags);
199 __i = atomic_read(v);
200 atomic_set(v, i + __i);
201 local_irq_restore(flags);
207 * atomic_sub_return - subtract integer and return
208 * @v: pointer of type atomic_t
209 * @i: integer value to subtract
211 * Atomically subtracts @i from @v and returns @v - @i
213 static __inline__ int atomic_sub_return(int i, atomic_t *v)
215 return atomic_add_return(-i,v);
218 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
219 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
222 * atomic_add_unless - add unless the number is already a given value
223 * @v: pointer of type atomic_t
224 * @a: the amount to add to v...
225 * @u: ...unless v is equal to u.
227 * Atomically adds @a to @v, so long as @v was not already @u.
228 * Returns non-zero if @v was not @u, and zero otherwise.
230 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
235 if (unlikely(c == (u)))
237 old = atomic_cmpxchg((v), c, c + (a));
238 if (likely(old == c))
245 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
247 #define atomic_inc_return(v) (atomic_add_return(1,v))
248 #define atomic_dec_return(v) (atomic_sub_return(1,v))
250 /* These are x86-specific, used by some header files */
251 #define atomic_clear_mask(mask, addr) \
252 __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
253 : : "r" (~(mask)),"m" (*addr) : "memory")
255 #define atomic_set_mask(mask, addr) \
256 __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
257 : : "r" (mask),"m" (*(addr)) : "memory")
259 /* Atomic operations are already serializing on x86 */
260 #define smp_mb__before_atomic_dec() barrier()
261 #define smp_mb__after_atomic_dec() barrier()
262 #define smp_mb__before_atomic_inc() barrier()
263 #define smp_mb__after_atomic_inc() barrier()
265 #include <asm-generic/atomic.h>