2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <asm/barrier.h>
19 #include <asm/cpu-features.h>
22 typedef struct { volatile int counter; } atomic_t;
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) ((v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v,i) ((v)->counter = (i))
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
48 * Atomically adds @i to @v.
50 static __inline__ void atomic_add(int i, atomic_t * v)
52 if (cpu_has_llsc && R10000_LLSC_WAR) {
57 "1: ll %0, %1 # atomic_add \n"
62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
64 } else if (cpu_has_llsc) {
69 "1: ll %0, %1 # atomic_add \n"
77 : "=&r" (temp), "=m" (v->counter)
78 : "Ir" (i), "m" (v->counter));
82 raw_local_irq_save(flags);
84 raw_local_irq_restore(flags);
89 * atomic_sub - subtract the atomic variable
90 * @i: integer value to subtract
91 * @v: pointer of type atomic_t
93 * Atomically subtracts @i from @v.
95 static __inline__ void atomic_sub(int i, atomic_t * v)
97 if (cpu_has_llsc && R10000_LLSC_WAR) {
100 __asm__ __volatile__(
102 "1: ll %0, %1 # atomic_sub \n"
107 : "=&r" (temp), "=m" (v->counter)
108 : "Ir" (i), "m" (v->counter));
109 } else if (cpu_has_llsc) {
112 __asm__ __volatile__(
114 "1: ll %0, %1 # atomic_sub \n"
122 : "=&r" (temp), "=m" (v->counter)
123 : "Ir" (i), "m" (v->counter));
127 raw_local_irq_save(flags);
129 raw_local_irq_restore(flags);
134 * Same as above, but return the result value
136 static __inline__ int atomic_add_return(int i, atomic_t * v)
138 unsigned long result;
142 if (cpu_has_llsc && R10000_LLSC_WAR) {
145 __asm__ __volatile__(
147 "1: ll %1, %2 # atomic_add_return \n"
148 " addu %0, %1, %3 \n"
151 " addu %0, %1, %3 \n"
153 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154 : "Ir" (i), "m" (v->counter)
156 } else if (cpu_has_llsc) {
159 __asm__ __volatile__(
161 "1: ll %1, %2 # atomic_add_return \n"
162 " addu %0, %1, %3 \n"
165 " addu %0, %1, %3 \n"
170 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
176 raw_local_irq_save(flags);
180 raw_local_irq_restore(flags);
188 static __inline__ int atomic_sub_return(int i, atomic_t * v)
190 unsigned long result;
194 if (cpu_has_llsc && R10000_LLSC_WAR) {
197 __asm__ __volatile__(
199 "1: ll %1, %2 # atomic_sub_return \n"
200 " subu %0, %1, %3 \n"
203 " subu %0, %1, %3 \n"
205 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
206 : "Ir" (i), "m" (v->counter)
208 } else if (cpu_has_llsc) {
211 __asm__ __volatile__(
213 "1: ll %1, %2 # atomic_sub_return \n"
214 " subu %0, %1, %3 \n"
217 " subu %0, %1, %3 \n"
222 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
223 : "Ir" (i), "m" (v->counter)
228 raw_local_irq_save(flags);
232 raw_local_irq_restore(flags);
241 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
242 * @i: integer value to subtract
243 * @v: pointer of type atomic_t
245 * Atomically test @v and subtract @i if @v is greater or equal than @i.
246 * The function returns the old value of @v minus @i.
248 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
250 unsigned long result;
254 if (cpu_has_llsc && R10000_LLSC_WAR) {
257 __asm__ __volatile__(
259 "1: ll %1, %2 # atomic_sub_if_positive\n"
260 " subu %0, %1, %3 \n"
265 " subu %0, %1, %3 \n"
269 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
270 : "Ir" (i), "m" (v->counter)
272 } else if (cpu_has_llsc) {
275 __asm__ __volatile__(
277 "1: ll %1, %2 # atomic_sub_if_positive\n"
278 " subu %0, %1, %3 \n"
283 " subu %0, %1, %3 \n"
290 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
291 : "Ir" (i), "m" (v->counter)
296 raw_local_irq_save(flags);
301 raw_local_irq_restore(flags);
309 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
310 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
313 * atomic_add_unless - add unless the number is a given value
314 * @v: pointer of type atomic_t
315 * @a: the amount to add to v...
316 * @u: ...unless v is equal to u.
318 * Atomically adds @a to @v, so long as it was not @u.
319 * Returns non-zero if @v was not @u, and zero otherwise.
321 #define atomic_add_unless(v, a, u) \
324 c = atomic_read(v); \
325 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
329 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
331 #define atomic_dec_return(v) atomic_sub_return(1,(v))
332 #define atomic_inc_return(v) atomic_add_return(1,(v))
335 * atomic_sub_and_test - subtract value from variable and test result
336 * @i: integer value to subtract
337 * @v: pointer of type atomic_t
339 * Atomically subtracts @i from @v and returns
340 * true if the result is zero, or false for all
343 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
346 * atomic_inc_and_test - increment and test
347 * @v: pointer of type atomic_t
349 * Atomically increments @v by 1
350 * and returns true if the result is zero, or false for all
353 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
356 * atomic_dec_and_test - decrement by 1 and test
357 * @v: pointer of type atomic_t
359 * Atomically decrements @v by 1 and
360 * returns true if the result is 0, or false for all other
363 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
366 * atomic_dec_if_positive - decrement by 1 if old value positive
367 * @v: pointer of type atomic_t
369 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
372 * atomic_inc - increment atomic variable
373 * @v: pointer of type atomic_t
375 * Atomically increments @v by 1.
377 #define atomic_inc(v) atomic_add(1,(v))
380 * atomic_dec - decrement and test
381 * @v: pointer of type atomic_t
383 * Atomically decrements @v by 1.
385 #define atomic_dec(v) atomic_sub(1,(v))
388 * atomic_add_negative - add and test if negative
389 * @v: pointer of type atomic_t
390 * @i: integer value to add
392 * Atomically adds @i to @v and returns true
393 * if the result is negative, or false when
394 * result is greater than or equal to zero.
396 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
400 typedef struct { volatile long counter; } atomic64_t;
402 #define ATOMIC64_INIT(i) { (i) }
405 * atomic64_read - read atomic variable
406 * @v: pointer of type atomic64_t
409 #define atomic64_read(v) ((v)->counter)
412 * atomic64_set - set atomic variable
413 * @v: pointer of type atomic64_t
416 #define atomic64_set(v,i) ((v)->counter = (i))
419 * atomic64_add - add integer to atomic variable
420 * @i: integer value to add
421 * @v: pointer of type atomic64_t
423 * Atomically adds @i to @v.
425 static __inline__ void atomic64_add(long i, atomic64_t * v)
427 if (cpu_has_llsc && R10000_LLSC_WAR) {
430 __asm__ __volatile__(
432 "1: lld %0, %1 # atomic64_add \n"
437 : "=&r" (temp), "=m" (v->counter)
438 : "Ir" (i), "m" (v->counter));
439 } else if (cpu_has_llsc) {
442 __asm__ __volatile__(
444 "1: lld %0, %1 # atomic64_add \n"
452 : "=&r" (temp), "=m" (v->counter)
453 : "Ir" (i), "m" (v->counter));
457 raw_local_irq_save(flags);
459 raw_local_irq_restore(flags);
464 * atomic64_sub - subtract the atomic variable
465 * @i: integer value to subtract
466 * @v: pointer of type atomic64_t
468 * Atomically subtracts @i from @v.
470 static __inline__ void atomic64_sub(long i, atomic64_t * v)
472 if (cpu_has_llsc && R10000_LLSC_WAR) {
475 __asm__ __volatile__(
477 "1: lld %0, %1 # atomic64_sub \n"
482 : "=&r" (temp), "=m" (v->counter)
483 : "Ir" (i), "m" (v->counter));
484 } else if (cpu_has_llsc) {
487 __asm__ __volatile__(
489 "1: lld %0, %1 # atomic64_sub \n"
497 : "=&r" (temp), "=m" (v->counter)
498 : "Ir" (i), "m" (v->counter));
502 raw_local_irq_save(flags);
504 raw_local_irq_restore(flags);
509 * Same as above, but return the result value
511 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
513 unsigned long result;
517 if (cpu_has_llsc && R10000_LLSC_WAR) {
520 __asm__ __volatile__(
522 "1: lld %1, %2 # atomic64_add_return \n"
523 " addu %0, %1, %3 \n"
526 " addu %0, %1, %3 \n"
528 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
529 : "Ir" (i), "m" (v->counter)
531 } else if (cpu_has_llsc) {
534 __asm__ __volatile__(
536 "1: lld %1, %2 # atomic64_add_return \n"
537 " addu %0, %1, %3 \n"
540 " addu %0, %1, %3 \n"
545 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
546 : "Ir" (i), "m" (v->counter)
551 raw_local_irq_save(flags);
555 raw_local_irq_restore(flags);
563 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
565 unsigned long result;
569 if (cpu_has_llsc && R10000_LLSC_WAR) {
572 __asm__ __volatile__(
574 "1: lld %1, %2 # atomic64_sub_return \n"
575 " subu %0, %1, %3 \n"
578 " subu %0, %1, %3 \n"
580 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
581 : "Ir" (i), "m" (v->counter)
583 } else if (cpu_has_llsc) {
586 __asm__ __volatile__(
588 "1: lld %1, %2 # atomic64_sub_return \n"
589 " subu %0, %1, %3 \n"
592 " subu %0, %1, %3 \n"
597 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
598 : "Ir" (i), "m" (v->counter)
603 raw_local_irq_save(flags);
607 raw_local_irq_restore(flags);
616 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
617 * @i: integer value to subtract
618 * @v: pointer of type atomic64_t
620 * Atomically test @v and subtract @i if @v is greater or equal than @i.
621 * The function returns the old value of @v minus @i.
623 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
625 unsigned long result;
629 if (cpu_has_llsc && R10000_LLSC_WAR) {
632 __asm__ __volatile__(
634 "1: lld %1, %2 # atomic64_sub_if_positive\n"
635 " dsubu %0, %1, %3 \n"
640 " dsubu %0, %1, %3 \n"
644 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
645 : "Ir" (i), "m" (v->counter)
647 } else if (cpu_has_llsc) {
650 __asm__ __volatile__(
652 "1: lld %1, %2 # atomic64_sub_if_positive\n"
653 " dsubu %0, %1, %3 \n"
658 " dsubu %0, %1, %3 \n"
665 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
666 : "Ir" (i), "m" (v->counter)
671 raw_local_irq_save(flags);
676 raw_local_irq_restore(flags);
684 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
685 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
688 * atomic64_sub_and_test - subtract value from variable and test result
689 * @i: integer value to subtract
690 * @v: pointer of type atomic64_t
692 * Atomically subtracts @i from @v and returns
693 * true if the result is zero, or false for all
696 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
699 * atomic64_inc_and_test - increment and test
700 * @v: pointer of type atomic64_t
702 * Atomically increments @v by 1
703 * and returns true if the result is zero, or false for all
706 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
709 * atomic64_dec_and_test - decrement by 1 and test
710 * @v: pointer of type atomic64_t
712 * Atomically decrements @v by 1 and
713 * returns true if the result is 0, or false for all other
716 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
719 * atomic64_dec_if_positive - decrement by 1 if old value positive
720 * @v: pointer of type atomic64_t
722 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
725 * atomic64_inc - increment atomic variable
726 * @v: pointer of type atomic64_t
728 * Atomically increments @v by 1.
730 #define atomic64_inc(v) atomic64_add(1,(v))
733 * atomic64_dec - decrement and test
734 * @v: pointer of type atomic64_t
736 * Atomically decrements @v by 1.
738 #define atomic64_dec(v) atomic64_sub(1,(v))
741 * atomic64_add_negative - add and test if negative
742 * @v: pointer of type atomic64_t
743 * @i: integer value to add
745 * Atomically adds @i to @v and returns true
746 * if the result is negative, or false when
747 * result is greater than or equal to zero.
749 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
751 #endif /* CONFIG_64BIT */
754 * atomic*_return operations are serializing but not the non-*_return
757 #define smp_mb__before_atomic_dec() smp_mb()
758 #define smp_mb__after_atomic_dec() smp_mb()
759 #define smp_mb__before_atomic_inc() smp_mb()
760 #define smp_mb__after_atomic_inc() smp_mb()
762 #include <asm-generic/atomic.h>
763 #endif /* _ASM_ATOMIC_H */