2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
22 #include <asm/system.h>
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) ((v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v, i) ((v)->counter = (i))
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
48 * Atomically adds @i to @v.
50 static __inline__ void atomic_add(int i, atomic_t * v)
52 if (cpu_has_llsc && R10000_LLSC_WAR) {
57 "1: ll %0, %1 # atomic_add \n"
62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
64 } else if (cpu_has_llsc) {
69 "1: ll %0, %1 # atomic_add \n"
77 : "=&r" (temp), "=m" (v->counter)
78 : "Ir" (i), "m" (v->counter));
82 raw_local_irq_save(flags);
84 raw_local_irq_restore(flags);
89 * atomic_sub - subtract the atomic variable
90 * @i: integer value to subtract
91 * @v: pointer of type atomic_t
93 * Atomically subtracts @i from @v.
95 static __inline__ void atomic_sub(int i, atomic_t * v)
97 if (cpu_has_llsc && R10000_LLSC_WAR) {
100 __asm__ __volatile__(
102 "1: ll %0, %1 # atomic_sub \n"
107 : "=&r" (temp), "=m" (v->counter)
108 : "Ir" (i), "m" (v->counter));
109 } else if (cpu_has_llsc) {
112 __asm__ __volatile__(
114 "1: ll %0, %1 # atomic_sub \n"
122 : "=&r" (temp), "=m" (v->counter)
123 : "Ir" (i), "m" (v->counter));
127 raw_local_irq_save(flags);
129 raw_local_irq_restore(flags);
134 * Same as above, but return the result value
136 static __inline__ int atomic_add_return(int i, atomic_t * v)
142 if (cpu_has_llsc && R10000_LLSC_WAR) {
145 __asm__ __volatile__(
147 "1: ll %1, %2 # atomic_add_return \n"
148 " addu %0, %1, %3 \n"
151 " addu %0, %1, %3 \n"
153 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154 : "Ir" (i), "m" (v->counter)
156 } else if (cpu_has_llsc) {
159 __asm__ __volatile__(
161 "1: ll %1, %2 # atomic_add_return \n"
162 " addu %0, %1, %3 \n"
165 " addu %0, %1, %3 \n"
170 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
176 raw_local_irq_save(flags);
180 raw_local_irq_restore(flags);
188 static __inline__ int atomic_sub_return(int i, atomic_t * v)
194 if (cpu_has_llsc && R10000_LLSC_WAR) {
197 __asm__ __volatile__(
199 "1: ll %1, %2 # atomic_sub_return \n"
200 " subu %0, %1, %3 \n"
203 " subu %0, %1, %3 \n"
205 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
206 : "Ir" (i), "m" (v->counter)
208 } else if (cpu_has_llsc) {
211 __asm__ __volatile__(
213 "1: ll %1, %2 # atomic_sub_return \n"
214 " subu %0, %1, %3 \n"
217 " subu %0, %1, %3 \n"
222 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
223 : "Ir" (i), "m" (v->counter)
228 raw_local_irq_save(flags);
232 raw_local_irq_restore(flags);
241 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
242 * @i: integer value to subtract
243 * @v: pointer of type atomic_t
245 * Atomically test @v and subtract @i if @v is greater or equal than @i.
246 * The function returns the old value of @v minus @i.
248 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
254 if (cpu_has_llsc && R10000_LLSC_WAR) {
257 __asm__ __volatile__(
259 "1: ll %1, %2 # atomic_sub_if_positive\n"
260 " subu %0, %1, %3 \n"
265 " subu %0, %1, %3 \n"
269 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
270 : "Ir" (i), "m" (v->counter)
272 } else if (cpu_has_llsc) {
275 __asm__ __volatile__(
277 "1: ll %1, %2 # atomic_sub_if_positive\n"
278 " subu %0, %1, %3 \n"
283 " subu %0, %1, %3 \n"
290 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
291 : "Ir" (i), "m" (v->counter)
296 raw_local_irq_save(flags);
301 raw_local_irq_restore(flags);
309 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
310 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
313 * atomic_add_unless - add unless the number is a given value
314 * @v: pointer of type atomic_t
315 * @a: the amount to add to v...
316 * @u: ...unless v is equal to u.
318 * Atomically adds @a to @v, so long as it was not @u.
319 * Returns non-zero if @v was not @u, and zero otherwise.
321 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
326 if (unlikely(c == (u)))
328 old = atomic_cmpxchg((v), c, c + (a));
329 if (likely(old == c))
335 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
337 #define atomic_dec_return(v) atomic_sub_return(1, (v))
338 #define atomic_inc_return(v) atomic_add_return(1, (v))
341 * atomic_sub_and_test - subtract value from variable and test result
342 * @i: integer value to subtract
343 * @v: pointer of type atomic_t
345 * Atomically subtracts @i from @v and returns
346 * true if the result is zero, or false for all
349 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
352 * atomic_inc_and_test - increment and test
353 * @v: pointer of type atomic_t
355 * Atomically increments @v by 1
356 * and returns true if the result is zero, or false for all
359 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
362 * atomic_dec_and_test - decrement by 1 and test
363 * @v: pointer of type atomic_t
365 * Atomically decrements @v by 1 and
366 * returns true if the result is 0, or false for all other
369 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
372 * atomic_dec_if_positive - decrement by 1 if old value positive
373 * @v: pointer of type atomic_t
375 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
378 * atomic_inc - increment atomic variable
379 * @v: pointer of type atomic_t
381 * Atomically increments @v by 1.
383 #define atomic_inc(v) atomic_add(1, (v))
386 * atomic_dec - decrement and test
387 * @v: pointer of type atomic_t
389 * Atomically decrements @v by 1.
391 #define atomic_dec(v) atomic_sub(1, (v))
394 * atomic_add_negative - add and test if negative
395 * @v: pointer of type atomic_t
396 * @i: integer value to add
398 * Atomically adds @i to @v and returns true
399 * if the result is negative, or false when
400 * result is greater than or equal to zero.
402 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
406 #define ATOMIC64_INIT(i) { (i) }
409 * atomic64_read - read atomic variable
410 * @v: pointer of type atomic64_t
413 #define atomic64_read(v) ((v)->counter)
416 * atomic64_set - set atomic variable
417 * @v: pointer of type atomic64_t
420 #define atomic64_set(v, i) ((v)->counter = (i))
423 * atomic64_add - add integer to atomic variable
424 * @i: integer value to add
425 * @v: pointer of type atomic64_t
427 * Atomically adds @i to @v.
429 static __inline__ void atomic64_add(long i, atomic64_t * v)
431 if (cpu_has_llsc && R10000_LLSC_WAR) {
434 __asm__ __volatile__(
436 "1: lld %0, %1 # atomic64_add \n"
441 : "=&r" (temp), "=m" (v->counter)
442 : "Ir" (i), "m" (v->counter));
443 } else if (cpu_has_llsc) {
446 __asm__ __volatile__(
448 "1: lld %0, %1 # atomic64_add \n"
456 : "=&r" (temp), "=m" (v->counter)
457 : "Ir" (i), "m" (v->counter));
461 raw_local_irq_save(flags);
463 raw_local_irq_restore(flags);
468 * atomic64_sub - subtract the atomic variable
469 * @i: integer value to subtract
470 * @v: pointer of type atomic64_t
472 * Atomically subtracts @i from @v.
474 static __inline__ void atomic64_sub(long i, atomic64_t * v)
476 if (cpu_has_llsc && R10000_LLSC_WAR) {
479 __asm__ __volatile__(
481 "1: lld %0, %1 # atomic64_sub \n"
486 : "=&r" (temp), "=m" (v->counter)
487 : "Ir" (i), "m" (v->counter));
488 } else if (cpu_has_llsc) {
491 __asm__ __volatile__(
493 "1: lld %0, %1 # atomic64_sub \n"
501 : "=&r" (temp), "=m" (v->counter)
502 : "Ir" (i), "m" (v->counter));
506 raw_local_irq_save(flags);
508 raw_local_irq_restore(flags);
513 * Same as above, but return the result value
515 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
521 if (cpu_has_llsc && R10000_LLSC_WAR) {
524 __asm__ __volatile__(
526 "1: lld %1, %2 # atomic64_add_return \n"
527 " addu %0, %1, %3 \n"
530 " addu %0, %1, %3 \n"
532 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
533 : "Ir" (i), "m" (v->counter)
535 } else if (cpu_has_llsc) {
538 __asm__ __volatile__(
540 "1: lld %1, %2 # atomic64_add_return \n"
541 " addu %0, %1, %3 \n"
544 " addu %0, %1, %3 \n"
549 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
550 : "Ir" (i), "m" (v->counter)
555 raw_local_irq_save(flags);
559 raw_local_irq_restore(flags);
567 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
573 if (cpu_has_llsc && R10000_LLSC_WAR) {
576 __asm__ __volatile__(
578 "1: lld %1, %2 # atomic64_sub_return \n"
579 " subu %0, %1, %3 \n"
582 " subu %0, %1, %3 \n"
584 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585 : "Ir" (i), "m" (v->counter)
587 } else if (cpu_has_llsc) {
590 __asm__ __volatile__(
592 "1: lld %1, %2 # atomic64_sub_return \n"
593 " subu %0, %1, %3 \n"
596 " subu %0, %1, %3 \n"
601 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
602 : "Ir" (i), "m" (v->counter)
607 raw_local_irq_save(flags);
611 raw_local_irq_restore(flags);
620 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
621 * @i: integer value to subtract
622 * @v: pointer of type atomic64_t
624 * Atomically test @v and subtract @i if @v is greater or equal than @i.
625 * The function returns the old value of @v minus @i.
627 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
633 if (cpu_has_llsc && R10000_LLSC_WAR) {
636 __asm__ __volatile__(
638 "1: lld %1, %2 # atomic64_sub_if_positive\n"
639 " dsubu %0, %1, %3 \n"
644 " dsubu %0, %1, %3 \n"
648 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
649 : "Ir" (i), "m" (v->counter)
651 } else if (cpu_has_llsc) {
654 __asm__ __volatile__(
656 "1: lld %1, %2 # atomic64_sub_if_positive\n"
657 " dsubu %0, %1, %3 \n"
662 " dsubu %0, %1, %3 \n"
669 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
670 : "Ir" (i), "m" (v->counter)
675 raw_local_irq_save(flags);
680 raw_local_irq_restore(flags);
688 #define atomic64_cmpxchg(v, o, n) \
689 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
690 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
693 * atomic64_add_unless - add unless the number is a given value
694 * @v: pointer of type atomic64_t
695 * @a: the amount to add to v...
696 * @u: ...unless v is equal to u.
698 * Atomically adds @a to @v, so long as it was not @u.
699 * Returns non-zero if @v was not @u, and zero otherwise.
701 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
704 c = atomic64_read(v);
706 if (unlikely(c == (u)))
708 old = atomic64_cmpxchg((v), c, c + (a));
709 if (likely(old == c))
716 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
718 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
719 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
722 * atomic64_sub_and_test - subtract value from variable and test result
723 * @i: integer value to subtract
724 * @v: pointer of type atomic64_t
726 * Atomically subtracts @i from @v and returns
727 * true if the result is zero, or false for all
730 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
733 * atomic64_inc_and_test - increment and test
734 * @v: pointer of type atomic64_t
736 * Atomically increments @v by 1
737 * and returns true if the result is zero, or false for all
740 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
743 * atomic64_dec_and_test - decrement by 1 and test
744 * @v: pointer of type atomic64_t
746 * Atomically decrements @v by 1 and
747 * returns true if the result is 0, or false for all other
750 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
753 * atomic64_dec_if_positive - decrement by 1 if old value positive
754 * @v: pointer of type atomic64_t
756 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
759 * atomic64_inc - increment atomic variable
760 * @v: pointer of type atomic64_t
762 * Atomically increments @v by 1.
764 #define atomic64_inc(v) atomic64_add(1, (v))
767 * atomic64_dec - decrement and test
768 * @v: pointer of type atomic64_t
770 * Atomically decrements @v by 1.
772 #define atomic64_dec(v) atomic64_sub(1, (v))
775 * atomic64_add_negative - add and test if negative
776 * @v: pointer of type atomic64_t
777 * @i: integer value to add
779 * Atomically adds @i to @v and returns true
780 * if the result is negative, or false when
781 * result is greater than or equal to zero.
783 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
785 #endif /* CONFIG_64BIT */
788 * atomic*_return operations are serializing but not the non-*_return
791 #define smp_mb__before_atomic_dec() smp_llsc_mb()
792 #define smp_mb__after_atomic_dec() smp_llsc_mb()
793 #define smp_mb__before_atomic_inc() smp_llsc_mb()
794 #define smp_mb__after_atomic_inc() smp_llsc_mb()
796 #include <asm-generic/atomic.h>
798 #endif /* _ASM_ATOMIC_H */