1 #ifndef __ARCH_X86_64_ATOMIC__
2 #define __ARCH_X86_64_ATOMIC__
4 #include <asm/alternative.h>
5 #include <asm/cmpxchg.h>
7 /* atomic_t should be 32 bit signed type */
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
15 #define LOCK "lock ; "
21 * Make sure gcc doesn't try to be clever and move things around
22 * on us. We need to use _exactly_ the address the user gave us,
23 * not some alias that contains the same information.
29 #define ATOMIC_INIT(i) { (i) }
32 * atomic_read - read atomic variable
33 * @v: pointer of type atomic_t
35 * Atomically reads the value of @v.
37 #define atomic_read(v) ((v)->counter)
40 * atomic_set - set atomic variable
41 * @v: pointer of type atomic_t
44 * Atomically sets the value of @v to @i.
46 #define atomic_set(v, i) (((v)->counter) = (i))
49 * atomic_add - add integer to atomic variable
50 * @i: integer value to add
51 * @v: pointer of type atomic_t
53 * Atomically adds @i to @v.
55 static inline void atomic_add(int i, atomic_t *v)
57 asm volatile(LOCK_PREFIX "addl %1,%0"
59 : "ir" (i), "m" (v->counter));
63 * atomic_sub - subtract the atomic variable
64 * @i: integer value to subtract
65 * @v: pointer of type atomic_t
67 * Atomically subtracts @i from @v.
69 static inline void atomic_sub(int i, atomic_t *v)
71 asm volatile(LOCK_PREFIX "subl %1,%0"
73 : "ir" (i), "m" (v->counter));
77 * atomic_sub_and_test - subtract value from variable and test result
78 * @i: integer value to subtract
79 * @v: pointer of type atomic_t
81 * Atomically subtracts @i from @v and returns
82 * true if the result is zero, or false for all
85 static inline int atomic_sub_and_test(int i, atomic_t *v)
89 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
90 : "=m" (v->counter), "=qm" (c)
91 : "ir" (i), "m" (v->counter) : "memory");
96 * atomic_inc - increment atomic variable
97 * @v: pointer of type atomic_t
99 * Atomically increments @v by 1.
101 static inline void atomic_inc(atomic_t *v)
103 asm volatile(LOCK_PREFIX "incl %0"
109 * atomic_dec - decrement atomic variable
110 * @v: pointer of type atomic_t
112 * Atomically decrements @v by 1.
114 static inline void atomic_dec(atomic_t *v)
116 asm volatile(LOCK_PREFIX "decl %0"
122 * atomic_dec_and_test - decrement and test
123 * @v: pointer of type atomic_t
125 * Atomically decrements @v by 1 and
126 * returns true if the result is 0, or false for all other
129 static inline int atomic_dec_and_test(atomic_t *v)
133 asm volatile(LOCK_PREFIX "decl %0; sete %1"
134 : "=m" (v->counter), "=qm" (c)
135 : "m" (v->counter) : "memory");
140 * atomic_inc_and_test - increment and test
141 * @v: pointer of type atomic_t
143 * Atomically increments @v by 1
144 * and returns true if the result is zero, or false for all
147 static inline int atomic_inc_and_test(atomic_t *v)
151 asm volatile(LOCK_PREFIX "incl %0; sete %1"
152 : "=m" (v->counter), "=qm" (c)
153 : "m" (v->counter) : "memory");
158 * atomic_add_negative - add and test if negative
159 * @i: integer value to add
160 * @v: pointer of type atomic_t
162 * Atomically adds @i to @v and returns true
163 * if the result is negative, or false when
164 * result is greater than or equal to zero.
166 static inline int atomic_add_negative(int i, atomic_t *v)
170 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
171 : "=m" (v->counter), "=qm" (c)
172 : "ir" (i), "m" (v->counter) : "memory");
177 * atomic_add_return - add and return
178 * @i: integer value to add
179 * @v: pointer of type atomic_t
181 * Atomically adds @i to @v and returns @i + @v
183 static inline int atomic_add_return(int i, atomic_t *v)
186 asm volatile(LOCK_PREFIX "xaddl %0, %1"
187 : "+r" (i), "+m" (v->counter)
192 static inline int atomic_sub_return(int i, atomic_t *v)
194 return atomic_add_return(-i, v);
197 #define atomic_inc_return(v) (atomic_add_return(1, v))
198 #define atomic_dec_return(v) (atomic_sub_return(1, v))
200 /* An 64bit atomic type */
206 #define ATOMIC64_INIT(i) { (i) }
209 * atomic64_read - read atomic64 variable
210 * @v: pointer of type atomic64_t
212 * Atomically reads the value of @v.
213 * Doesn't imply a read memory barrier.
215 #define atomic64_read(v) ((v)->counter)
218 * atomic64_set - set atomic64 variable
219 * @v: pointer to type atomic64_t
222 * Atomically sets the value of @v to @i.
224 #define atomic64_set(v, i) (((v)->counter) = (i))
227 * atomic64_add - add integer to atomic64 variable
228 * @i: integer value to add
229 * @v: pointer to type atomic64_t
231 * Atomically adds @i to @v.
233 static inline void atomic64_add(long i, atomic64_t *v)
235 asm volatile(LOCK_PREFIX "addq %1,%0"
237 : "ir" (i), "m" (v->counter));
241 * atomic64_sub - subtract the atomic64 variable
242 * @i: integer value to subtract
243 * @v: pointer to type atomic64_t
245 * Atomically subtracts @i from @v.
247 static inline void atomic64_sub(long i, atomic64_t *v)
249 asm volatile(LOCK_PREFIX "subq %1,%0"
251 : "ir" (i), "m" (v->counter));
255 * atomic64_sub_and_test - subtract value from variable and test result
256 * @i: integer value to subtract
257 * @v: pointer to type atomic64_t
259 * Atomically subtracts @i from @v and returns
260 * true if the result is zero, or false for all
263 static inline int atomic64_sub_and_test(long i, atomic64_t *v)
267 asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
268 : "=m" (v->counter), "=qm" (c)
269 : "ir" (i), "m" (v->counter) : "memory");
274 * atomic64_inc - increment atomic64 variable
275 * @v: pointer to type atomic64_t
277 * Atomically increments @v by 1.
279 static inline void atomic64_inc(atomic64_t *v)
281 asm volatile(LOCK_PREFIX "incq %0"
287 * atomic64_dec - decrement atomic64 variable
288 * @v: pointer to type atomic64_t
290 * Atomically decrements @v by 1.
292 static inline void atomic64_dec(atomic64_t *v)
294 asm volatile(LOCK_PREFIX "decq %0"
300 * atomic64_dec_and_test - decrement and test
301 * @v: pointer to type atomic64_t
303 * Atomically decrements @v by 1 and
304 * returns true if the result is 0, or false for all other
307 static inline int atomic64_dec_and_test(atomic64_t *v)
311 asm volatile(LOCK_PREFIX "decq %0; sete %1"
312 : "=m" (v->counter), "=qm" (c)
313 : "m" (v->counter) : "memory");
318 * atomic64_inc_and_test - increment and test
319 * @v: pointer to type atomic64_t
321 * Atomically increments @v by 1
322 * and returns true if the result is zero, or false for all
325 static inline int atomic64_inc_and_test(atomic64_t *v)
329 asm volatile(LOCK_PREFIX "incq %0; sete %1"
330 : "=m" (v->counter), "=qm" (c)
331 : "m" (v->counter) : "memory");
336 * atomic64_add_negative - add and test if negative
337 * @i: integer value to add
338 * @v: pointer to type atomic64_t
340 * Atomically adds @i to @v and returns true
341 * if the result is negative, or false when
342 * result is greater than or equal to zero.
344 static inline int atomic64_add_negative(long i, atomic64_t *v)
348 asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
349 : "=m" (v->counter), "=qm" (c)
350 : "ir" (i), "m" (v->counter) : "memory");
355 * atomic64_add_return - add and return
356 * @i: integer value to add
357 * @v: pointer to type atomic64_t
359 * Atomically adds @i to @v and returns @i + @v
361 static inline long atomic64_add_return(long i, atomic64_t *v)
364 asm volatile(LOCK_PREFIX "xaddq %0, %1;"
365 : "+r" (i), "+m" (v->counter)
370 static inline long atomic64_sub_return(long i, atomic64_t *v)
372 return atomic64_add_return(-i, v);
375 #define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
376 #define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
378 #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
379 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
381 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
382 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
385 * atomic_add_unless - add unless the number is a given value
386 * @v: pointer of type atomic_t
387 * @a: the amount to add to v...
388 * @u: ...unless v is equal to u.
390 * Atomically adds @a to @v, so long as it was not @u.
391 * Returns non-zero if @v was not @u, and zero otherwise.
393 static inline int atomic_add_unless(atomic_t *v, int a, int u)
398 if (unlikely(c == (u)))
400 old = atomic_cmpxchg((v), c, c + (a));
401 if (likely(old == c))
408 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
411 * atomic64_add_unless - add unless the number is a given value
412 * @v: pointer of type atomic64_t
413 * @a: the amount to add to v...
414 * @u: ...unless v is equal to u.
416 * Atomically adds @a to @v, so long as it was not @u.
417 * Returns non-zero if @v was not @u, and zero otherwise.
419 static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
422 c = atomic64_read(v);
424 if (unlikely(c == (u)))
426 old = atomic64_cmpxchg((v), c, c + (a));
427 if (likely(old == c))
434 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
436 /* These are x86-specific, used by some header files */
437 #define atomic_clear_mask(mask, addr) \
438 asm volatile(LOCK_PREFIX "andl %0,%1" \
439 : : "r" (~(mask)), "m" (*(addr)) : "memory")
441 #define atomic_set_mask(mask, addr) \
442 asm volatile(LOCK_PREFIX "orl %0,%1" \
443 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
446 /* Atomic operations are already serializing on x86 */
447 #define smp_mb__before_atomic_dec() barrier()
448 #define smp_mb__after_atomic_dec() barrier()
449 #define smp_mb__before_atomic_inc() barrier()
450 #define smp_mb__after_atomic_inc() barrier()
452 #include <asm-generic/atomic.h>