1 #ifndef __ARCH_X86_64_ATOMIC__
2 #define __ARCH_X86_64_ATOMIC__
4 #include <asm/alternative.h>
6 /* atomic_t should be 32 bit signed type */
9 * Atomic operations that C can't guarantee us. Useful for
10 * resource counting etc..
14 #define LOCK "lock ; "
20 * Make sure gcc doesn't try to be clever and move things around
21 * on us. We need to use _exactly_ the address the user gave us,
22 * not some alias that contains the same information.
24 typedef struct { volatile int counter; } atomic_t;
26 #define ATOMIC_INIT(i) { (i) }
29 * atomic_read - read atomic variable
30 * @v: pointer of type atomic_t
32 * Atomically reads the value of @v.
34 #define atomic_read(v) ((v)->counter)
37 * atomic_set - set atomic variable
38 * @v: pointer of type atomic_t
41 * Atomically sets the value of @v to @i.
43 #define atomic_set(v,i) (((v)->counter) = (i))
46 * atomic_add - add integer to atomic variable
47 * @i: integer value to add
48 * @v: pointer of type atomic_t
50 * Atomically adds @i to @v.
52 static __inline__ void atomic_add(int i, atomic_t *v)
55 LOCK_PREFIX "addl %1,%0"
57 :"ir" (i), "m" (v->counter));
61 * atomic_sub - subtract the atomic variable
62 * @i: integer value to subtract
63 * @v: pointer of type atomic_t
65 * Atomically subtracts @i from @v.
67 static __inline__ void atomic_sub(int i, atomic_t *v)
70 LOCK_PREFIX "subl %1,%0"
72 :"ir" (i), "m" (v->counter));
76 * atomic_sub_and_test - subtract value from variable and test result
77 * @i: integer value to subtract
78 * @v: pointer of type atomic_t
80 * Atomically subtracts @i from @v and returns
81 * true if the result is zero, or false for all
84 static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
89 LOCK_PREFIX "subl %2,%0; sete %1"
90 :"=m" (v->counter), "=qm" (c)
91 :"ir" (i), "m" (v->counter) : "memory");
96 * atomic_inc - increment atomic variable
97 * @v: pointer of type atomic_t
99 * Atomically increments @v by 1.
101 static __inline__ void atomic_inc(atomic_t *v)
103 __asm__ __volatile__(
104 LOCK_PREFIX "incl %0"
110 * atomic_dec - decrement atomic variable
111 * @v: pointer of type atomic_t
113 * Atomically decrements @v by 1.
115 static __inline__ void atomic_dec(atomic_t *v)
117 __asm__ __volatile__(
118 LOCK_PREFIX "decl %0"
124 * atomic_dec_and_test - decrement and test
125 * @v: pointer of type atomic_t
127 * Atomically decrements @v by 1 and
128 * returns true if the result is 0, or false for all other
131 static __inline__ int atomic_dec_and_test(atomic_t *v)
135 __asm__ __volatile__(
136 LOCK_PREFIX "decl %0; sete %1"
137 :"=m" (v->counter), "=qm" (c)
138 :"m" (v->counter) : "memory");
143 * atomic_inc_and_test - increment and test
144 * @v: pointer of type atomic_t
146 * Atomically increments @v by 1
147 * and returns true if the result is zero, or false for all
150 static __inline__ int atomic_inc_and_test(atomic_t *v)
154 __asm__ __volatile__(
155 LOCK_PREFIX "incl %0; sete %1"
156 :"=m" (v->counter), "=qm" (c)
157 :"m" (v->counter) : "memory");
162 * atomic_add_negative - add and test if negative
163 * @i: integer value to add
164 * @v: pointer of type atomic_t
166 * Atomically adds @i to @v and returns true
167 * if the result is negative, or false when
168 * result is greater than or equal to zero.
170 static __inline__ int atomic_add_negative(int i, atomic_t *v)
174 __asm__ __volatile__(
175 LOCK_PREFIX "addl %2,%0; sets %1"
176 :"=m" (v->counter), "=qm" (c)
177 :"ir" (i), "m" (v->counter) : "memory");
182 * atomic_add_return - add and return
183 * @i: integer value to add
184 * @v: pointer of type atomic_t
186 * Atomically adds @i to @v and returns @i + @v
188 static __inline__ int atomic_add_return(int i, atomic_t *v)
191 __asm__ __volatile__(
192 LOCK_PREFIX "xaddl %0, %1;"
194 :"m"(v->counter), "0"(i));
198 static __inline__ int atomic_sub_return(int i, atomic_t *v)
200 return atomic_add_return(-i,v);
203 #define atomic_inc_return(v) (atomic_add_return(1,v))
204 #define atomic_dec_return(v) (atomic_sub_return(1,v))
206 /* An 64bit atomic type */
208 typedef struct { volatile long counter; } atomic64_t;
210 #define ATOMIC64_INIT(i) { (i) }
213 * atomic64_read - read atomic64 variable
214 * @v: pointer of type atomic64_t
216 * Atomically reads the value of @v.
217 * Doesn't imply a read memory barrier.
219 #define atomic64_read(v) ((v)->counter)
222 * atomic64_set - set atomic64 variable
223 * @v: pointer to type atomic64_t
226 * Atomically sets the value of @v to @i.
228 #define atomic64_set(v,i) (((v)->counter) = (i))
231 * atomic64_add - add integer to atomic64 variable
232 * @i: integer value to add
233 * @v: pointer to type atomic64_t
235 * Atomically adds @i to @v.
237 static __inline__ void atomic64_add(long i, atomic64_t *v)
239 __asm__ __volatile__(
240 LOCK_PREFIX "addq %1,%0"
242 :"ir" (i), "m" (v->counter));
246 * atomic64_sub - subtract the atomic64 variable
247 * @i: integer value to subtract
248 * @v: pointer to type atomic64_t
250 * Atomically subtracts @i from @v.
252 static __inline__ void atomic64_sub(long i, atomic64_t *v)
254 __asm__ __volatile__(
255 LOCK_PREFIX "subq %1,%0"
257 :"ir" (i), "m" (v->counter));
261 * atomic64_sub_and_test - subtract value from variable and test result
262 * @i: integer value to subtract
263 * @v: pointer to type atomic64_t
265 * Atomically subtracts @i from @v and returns
266 * true if the result is zero, or false for all
269 static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
273 __asm__ __volatile__(
274 LOCK_PREFIX "subq %2,%0; sete %1"
275 :"=m" (v->counter), "=qm" (c)
276 :"ir" (i), "m" (v->counter) : "memory");
281 * atomic64_inc - increment atomic64 variable
282 * @v: pointer to type atomic64_t
284 * Atomically increments @v by 1.
286 static __inline__ void atomic64_inc(atomic64_t *v)
288 __asm__ __volatile__(
289 LOCK_PREFIX "incq %0"
295 * atomic64_dec - decrement atomic64 variable
296 * @v: pointer to type atomic64_t
298 * Atomically decrements @v by 1.
300 static __inline__ void atomic64_dec(atomic64_t *v)
302 __asm__ __volatile__(
303 LOCK_PREFIX "decq %0"
309 * atomic64_dec_and_test - decrement and test
310 * @v: pointer to type atomic64_t
312 * Atomically decrements @v by 1 and
313 * returns true if the result is 0, or false for all other
316 static __inline__ int atomic64_dec_and_test(atomic64_t *v)
320 __asm__ __volatile__(
321 LOCK_PREFIX "decq %0; sete %1"
322 :"=m" (v->counter), "=qm" (c)
323 :"m" (v->counter) : "memory");
328 * atomic64_inc_and_test - increment and test
329 * @v: pointer to type atomic64_t
331 * Atomically increments @v by 1
332 * and returns true if the result is zero, or false for all
335 static __inline__ int atomic64_inc_and_test(atomic64_t *v)
339 __asm__ __volatile__(
340 LOCK_PREFIX "incq %0; sete %1"
341 :"=m" (v->counter), "=qm" (c)
342 :"m" (v->counter) : "memory");
347 * atomic64_add_negative - add and test if negative
348 * @i: integer value to add
349 * @v: pointer to type atomic64_t
351 * Atomically adds @i to @v and returns true
352 * if the result is negative, or false when
353 * result is greater than or equal to zero.
355 static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
359 __asm__ __volatile__(
360 LOCK_PREFIX "addq %2,%0; sets %1"
361 :"=m" (v->counter), "=qm" (c)
362 :"ir" (i), "m" (v->counter) : "memory");
367 * atomic64_add_return - add and return
368 * @i: integer value to add
369 * @v: pointer to type atomic64_t
371 * Atomically adds @i to @v and returns @i + @v
373 static __inline__ long atomic64_add_return(long i, atomic64_t *v)
376 __asm__ __volatile__(
377 LOCK_PREFIX "xaddq %0, %1;"
379 :"m"(v->counter), "0"(i));
383 static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
385 return atomic64_add_return(-i,v);
388 #define atomic64_inc_return(v) (atomic64_add_return(1,v))
389 #define atomic64_dec_return(v) (atomic64_sub_return(1,v))
391 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
392 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
395 * atomic_add_unless - add unless the number is a given value
396 * @v: pointer of type atomic_t
397 * @a: the amount to add to v...
398 * @u: ...unless v is equal to u.
400 * Atomically adds @a to @v, so long as it was not @u.
401 * Returns non-zero if @v was not @u, and zero otherwise.
403 #define atomic_add_unless(v, a, u) \
406 c = atomic_read(v); \
408 if (unlikely(c == (u))) \
410 old = atomic_cmpxchg((v), c, c + (a)); \
411 if (likely(old == c)) \
417 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
419 /* These are x86-specific, used by some header files */
420 #define atomic_clear_mask(mask, addr) \
421 __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
422 : : "r" (~(mask)),"m" (*addr) : "memory")
424 #define atomic_set_mask(mask, addr) \
425 __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
426 : : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
428 /* Atomic operations are already serializing on x86 */
429 #define smp_mb__before_atomic_dec() barrier()
430 #define smp_mb__after_atomic_dec() barrier()
431 #define smp_mb__before_atomic_inc() barrier()
432 #define smp_mb__after_atomic_inc() barrier()
434 #include <asm-generic/atomic.h>