1 #ifndef __ARCH_I386_ATOMIC__
2 #define __ARCH_I386_ATOMIC__
4 #include <linux/config.h>
5 #include <linux/compiler.h>
6 #include <asm/processor.h>
9 * Atomic operations that C can't guarantee us. Useful for
10 * resource counting etc..
14 * Make sure gcc doesn't try to be clever and move things around
15 * on us. We need to use _exactly_ the address the user gave us,
16 * not some alias that contains the same information.
18 typedef struct { volatile int counter; } atomic_t;
20 #define ATOMIC_INIT(i) { (i) }
23 * atomic_read - read atomic variable
24 * @v: pointer of type atomic_t
26 * Atomically reads the value of @v.
28 #define atomic_read(v) ((v)->counter)
31 * atomic_set - set atomic variable
32 * @v: pointer of type atomic_t
35 * Atomically sets the value of @v to @i.
37 #define atomic_set(v,i) (((v)->counter) = (i))
40 * atomic_add - add integer to atomic variable
41 * @i: integer value to add
42 * @v: pointer of type atomic_t
44 * Atomically adds @i to @v.
46 static __inline__ void atomic_add(int i, atomic_t *v)
49 LOCK_PREFIX "addl %1,%0"
51 :"ir" (i), "m" (v->counter));
55 * atomic_sub - subtract the atomic variable
56 * @i: integer value to subtract
57 * @v: pointer of type atomic_t
59 * Atomically subtracts @i from @v.
61 static __inline__ void atomic_sub(int i, atomic_t *v)
64 LOCK_PREFIX "subl %1,%0"
66 :"ir" (i), "m" (v->counter));
70 * atomic_sub_and_test - subtract value from variable and test result
71 * @i: integer value to subtract
72 * @v: pointer of type atomic_t
74 * Atomically subtracts @i from @v and returns
75 * true if the result is zero, or false for all
78 static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
83 LOCK_PREFIX "subl %2,%0; sete %1"
84 :"=m" (v->counter), "=qm" (c)
85 :"ir" (i), "m" (v->counter) : "memory");
90 * atomic_inc - increment atomic variable
91 * @v: pointer of type atomic_t
93 * Atomically increments @v by 1.
95 static __inline__ void atomic_inc(atomic_t *v)
104 * atomic_dec - decrement atomic variable
105 * @v: pointer of type atomic_t
107 * Atomically decrements @v by 1.
109 static __inline__ void atomic_dec(atomic_t *v)
111 __asm__ __volatile__(
112 LOCK_PREFIX "decl %0"
118 * atomic_dec_and_test - decrement and test
119 * @v: pointer of type atomic_t
121 * Atomically decrements @v by 1 and
122 * returns true if the result is 0, or false for all other
125 static __inline__ int atomic_dec_and_test(atomic_t *v)
129 __asm__ __volatile__(
130 LOCK_PREFIX "decl %0; sete %1"
131 :"=m" (v->counter), "=qm" (c)
132 :"m" (v->counter) : "memory");
137 * atomic_inc_and_test - increment and test
138 * @v: pointer of type atomic_t
140 * Atomically increments @v by 1
141 * and returns true if the result is zero, or false for all
144 static __inline__ int atomic_inc_and_test(atomic_t *v)
148 __asm__ __volatile__(
149 LOCK_PREFIX "incl %0; sete %1"
150 :"=m" (v->counter), "=qm" (c)
151 :"m" (v->counter) : "memory");
156 * atomic_add_negative - add and test if negative
157 * @v: pointer of type atomic_t
158 * @i: integer value to add
160 * Atomically adds @i to @v and returns true
161 * if the result is negative, or false when
162 * result is greater than or equal to zero.
164 static __inline__ int atomic_add_negative(int i, atomic_t *v)
168 __asm__ __volatile__(
169 LOCK_PREFIX "addl %2,%0; sets %1"
170 :"=m" (v->counter), "=qm" (c)
171 :"ir" (i), "m" (v->counter) : "memory");
176 * atomic_add_return - add and return
177 * @v: pointer of type atomic_t
178 * @i: integer value to add
180 * Atomically adds @i to @v and returns @i + @v
182 static __inline__ int atomic_add_return(int i, atomic_t *v)
187 if(unlikely(boot_cpu_data.x86==3))
190 /* Modern 486+ processor */
192 __asm__ __volatile__(
193 LOCK_PREFIX "xaddl %0, %1;"
195 :"m"(v->counter), "0"(i));
199 no_xadd: /* Legacy 386 processor */
200 local_irq_save(flags);
201 __i = atomic_read(v);
202 atomic_set(v, i + __i);
203 local_irq_restore(flags);
208 static __inline__ int atomic_sub_return(int i, atomic_t *v)
210 return atomic_add_return(-i,v);
213 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
214 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
217 * atomic_add_unless - add unless the number is a given value
218 * @v: pointer of type atomic_t
219 * @a: the amount to add to v...
220 * @u: ...unless v is equal to u.
222 * Atomically adds @a to @v, so long as it was not @u.
223 * Returns non-zero if @v was not @u, and zero otherwise.
225 #define atomic_add_unless(v, a, u) \
228 c = atomic_read(v); \
230 if (unlikely(c == (u))) \
232 old = atomic_cmpxchg((v), c, c + (a)); \
233 if (likely(old == c)) \
239 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
241 #define atomic_inc_return(v) (atomic_add_return(1,v))
242 #define atomic_dec_return(v) (atomic_sub_return(1,v))
244 /* These are x86-specific, used by some header files */
245 #define atomic_clear_mask(mask, addr) \
246 __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
247 : : "r" (~(mask)),"m" (*addr) : "memory")
249 #define atomic_set_mask(mask, addr) \
250 __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
251 : : "r" (mask),"m" (*(addr)) : "memory")
253 /* Atomic operations are already serializing on x86 */
254 #define smp_mb__before_atomic_dec() barrier()
255 #define smp_mb__after_atomic_dec() barrier()
256 #define smp_mb__before_atomic_inc() barrier()
257 #define smp_mb__after_atomic_inc() barrier()
259 #include <asm-generic/atomic.h>