1 #ifndef ASM_X86__ATOMIC_32_H
2 #define ASM_X86__ATOMIC_32_H
4 #include <linux/compiler.h>
5 #include <asm/processor.h>
6 #include <asm/cmpxchg.h>
9 * Atomic operations that C can't guarantee us. Useful for
10 * resource counting etc..
14 * Make sure gcc doesn't try to be clever and move things around
15 * on us. We need to use _exactly_ the address the user gave us,
16 * not some alias that contains the same information.
22 #define ATOMIC_INIT(i) { (i) }
25 * atomic_read - read atomic variable
26 * @v: pointer of type atomic_t
28 * Atomically reads the value of @v.
30 #define atomic_read(v) ((v)->counter)
33 * atomic_set - set atomic variable
34 * @v: pointer of type atomic_t
37 * Atomically sets the value of @v to @i.
39 #define atomic_set(v, i) (((v)->counter) = (i))
42 * atomic_add - add integer to atomic variable
43 * @i: integer value to add
44 * @v: pointer of type atomic_t
46 * Atomically adds @i to @v.
48 static inline void atomic_add(int i, atomic_t *v)
50 asm volatile(LOCK_PREFIX "addl %1,%0"
56 * atomic_sub - subtract integer from atomic variable
57 * @i: integer value to subtract
58 * @v: pointer of type atomic_t
60 * Atomically subtracts @i from @v.
62 static inline void atomic_sub(int i, atomic_t *v)
64 asm volatile(LOCK_PREFIX "subl %1,%0"
70 * atomic_sub_and_test - subtract value from variable and test result
71 * @i: integer value to subtract
72 * @v: pointer of type atomic_t
74 * Atomically subtracts @i from @v and returns
75 * true if the result is zero, or false for all
78 static inline int atomic_sub_and_test(int i, atomic_t *v)
82 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
83 : "+m" (v->counter), "=qm" (c)
84 : "ir" (i) : "memory");
89 * atomic_inc - increment atomic variable
90 * @v: pointer of type atomic_t
92 * Atomically increments @v by 1.
94 static inline void atomic_inc(atomic_t *v)
96 asm volatile(LOCK_PREFIX "incl %0"
101 * atomic_dec - decrement atomic variable
102 * @v: pointer of type atomic_t
104 * Atomically decrements @v by 1.
106 static inline void atomic_dec(atomic_t *v)
108 asm volatile(LOCK_PREFIX "decl %0"
109 : "+m" (v->counter));
113 * atomic_dec_and_test - decrement and test
114 * @v: pointer of type atomic_t
116 * Atomically decrements @v by 1 and
117 * returns true if the result is 0, or false for all other
120 static inline int atomic_dec_and_test(atomic_t *v)
124 asm volatile(LOCK_PREFIX "decl %0; sete %1"
125 : "+m" (v->counter), "=qm" (c)
131 * atomic_inc_and_test - increment and test
132 * @v: pointer of type atomic_t
134 * Atomically increments @v by 1
135 * and returns true if the result is zero, or false for all
138 static inline int atomic_inc_and_test(atomic_t *v)
142 asm volatile(LOCK_PREFIX "incl %0; sete %1"
143 : "+m" (v->counter), "=qm" (c)
149 * atomic_add_negative - add and test if negative
150 * @v: pointer of type atomic_t
151 * @i: integer value to add
153 * Atomically adds @i to @v and returns true
154 * if the result is negative, or false when
155 * result is greater than or equal to zero.
157 static inline int atomic_add_negative(int i, atomic_t *v)
161 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
162 : "+m" (v->counter), "=qm" (c)
163 : "ir" (i) : "memory");
168 * atomic_add_return - add integer and return
169 * @v: pointer of type atomic_t
170 * @i: integer value to add
172 * Atomically adds @i to @v and returns @i + @v
174 static inline int atomic_add_return(int i, atomic_t *v)
179 if (unlikely(boot_cpu_data.x86 <= 3))
182 /* Modern 486+ processor */
184 asm volatile(LOCK_PREFIX "xaddl %0, %1"
185 : "+r" (i), "+m" (v->counter)
190 no_xadd: /* Legacy 386 processor */
191 local_irq_save(flags);
192 __i = atomic_read(v);
193 atomic_set(v, i + __i);
194 local_irq_restore(flags);
200 * atomic_sub_return - subtract integer and return
201 * @v: pointer of type atomic_t
202 * @i: integer value to subtract
204 * Atomically subtracts @i from @v and returns @v - @i
206 static inline int atomic_sub_return(int i, atomic_t *v)
208 return atomic_add_return(-i, v);
211 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
212 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
215 * atomic_add_unless - add unless the number is already a given value
216 * @v: pointer of type atomic_t
217 * @a: the amount to add to v...
218 * @u: ...unless v is equal to u.
220 * Atomically adds @a to @v, so long as @v was not already @u.
221 * Returns non-zero if @v was not @u, and zero otherwise.
223 static inline int atomic_add_unless(atomic_t *v, int a, int u)
228 if (unlikely(c == (u)))
230 old = atomic_cmpxchg((v), c, c + (a));
231 if (likely(old == c))
238 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
240 #define atomic_inc_return(v) (atomic_add_return(1, v))
241 #define atomic_dec_return(v) (atomic_sub_return(1, v))
243 /* These are x86-specific, used by some header files */
244 #define atomic_clear_mask(mask, addr) \
245 asm volatile(LOCK_PREFIX "andl %0,%1" \
246 : : "r" (~(mask)), "m" (*(addr)) : "memory")
248 #define atomic_set_mask(mask, addr) \
249 asm volatile(LOCK_PREFIX "orl %0,%1" \
250 : : "r" (mask), "m" (*(addr)) : "memory")
252 /* Atomic operations are already serializing on x86 */
253 #define smp_mb__before_atomic_dec() barrier()
254 #define smp_mb__after_atomic_dec() barrier()
255 #define smp_mb__before_atomic_inc() barrier()
256 #define smp_mb__after_atomic_inc() barrier()
258 #include <asm-generic/atomic.h>
259 #endif /* ASM_X86__ATOMIC_32_H */