2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
26 #include <asm/cpu-features.h>
27 #include <asm/interrupt.h>
30 typedef struct { volatile int counter; } atomic_t;
32 #define ATOMIC_INIT(i) { (i) }
35 * atomic_read - read atomic variable
36 * @v: pointer of type atomic_t
38 * Atomically reads the value of @v.
40 #define atomic_read(v) ((v)->counter)
43 * atomic_set - set atomic variable
44 * @v: pointer of type atomic_t
47 * Atomically sets the value of @v to @i.
49 #define atomic_set(v,i) ((v)->counter = (i))
52 * atomic_add - add integer to atomic variable
53 * @i: integer value to add
54 * @v: pointer of type atomic_t
56 * Atomically adds @i to @v.
58 static __inline__ void atomic_add(int i, atomic_t * v)
60 if (cpu_has_llsc && R10000_LLSC_WAR) {
65 "1: ll %0, %1 # atomic_add \n"
70 : "=&r" (temp), "=m" (v->counter)
71 : "Ir" (i), "m" (v->counter));
72 } else if (cpu_has_llsc) {
77 "1: ll %0, %1 # atomic_add \n"
82 : "=&r" (temp), "=m" (v->counter)
83 : "Ir" (i), "m" (v->counter));
87 local_irq_save(flags);
89 local_irq_restore(flags);
94 * atomic_sub - subtract the atomic variable
95 * @i: integer value to subtract
96 * @v: pointer of type atomic_t
98 * Atomically subtracts @i from @v.
100 static __inline__ void atomic_sub(int i, atomic_t * v)
102 if (cpu_has_llsc && R10000_LLSC_WAR) {
105 __asm__ __volatile__(
107 "1: ll %0, %1 # atomic_sub \n"
112 : "=&r" (temp), "=m" (v->counter)
113 : "Ir" (i), "m" (v->counter));
114 } else if (cpu_has_llsc) {
117 __asm__ __volatile__(
119 "1: ll %0, %1 # atomic_sub \n"
124 : "=&r" (temp), "=m" (v->counter)
125 : "Ir" (i), "m" (v->counter));
129 local_irq_save(flags);
131 local_irq_restore(flags);
136 * Same as above, but return the result value
138 static __inline__ int atomic_add_return(int i, atomic_t * v)
140 unsigned long result;
142 if (cpu_has_llsc && R10000_LLSC_WAR) {
145 __asm__ __volatile__(
147 "1: ll %1, %2 # atomic_add_return \n"
148 " addu %0, %1, %3 \n"
151 " addu %0, %1, %3 \n"
154 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155 : "Ir" (i), "m" (v->counter)
157 } else if (cpu_has_llsc) {
160 __asm__ __volatile__(
162 "1: ll %1, %2 # atomic_add_return \n"
163 " addu %0, %1, %3 \n"
166 " addu %0, %1, %3 \n"
169 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
170 : "Ir" (i), "m" (v->counter)
175 local_irq_save(flags);
179 local_irq_restore(flags);
185 static __inline__ int atomic_sub_return(int i, atomic_t * v)
187 unsigned long result;
189 if (cpu_has_llsc && R10000_LLSC_WAR) {
192 __asm__ __volatile__(
194 "1: ll %1, %2 # atomic_sub_return \n"
195 " subu %0, %1, %3 \n"
198 " subu %0, %1, %3 \n"
201 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
202 : "Ir" (i), "m" (v->counter)
204 } else if (cpu_has_llsc) {
207 __asm__ __volatile__(
209 "1: ll %1, %2 # atomic_sub_return \n"
210 " subu %0, %1, %3 \n"
213 " subu %0, %1, %3 \n"
216 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
217 : "Ir" (i), "m" (v->counter)
222 local_irq_save(flags);
226 local_irq_restore(flags);
233 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
234 * @i: integer value to subtract
235 * @v: pointer of type atomic_t
237 * Atomically test @v and subtract @i if @v is greater or equal than @i.
238 * The function returns the old value of @v minus @i.
240 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
242 unsigned long result;
244 if (cpu_has_llsc && R10000_LLSC_WAR) {
247 __asm__ __volatile__(
249 "1: ll %1, %2 # atomic_sub_if_positive\n"
250 " subu %0, %1, %3 \n"
255 " subu %0, %1, %3 \n"
260 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
261 : "Ir" (i), "m" (v->counter)
263 } else if (cpu_has_llsc) {
266 __asm__ __volatile__(
268 "1: ll %1, %2 # atomic_sub_if_positive\n"
269 " subu %0, %1, %3 \n"
274 " subu %0, %1, %3 \n"
279 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
280 : "Ir" (i), "m" (v->counter)
285 local_irq_save(flags);
290 local_irq_restore(flags);
296 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
297 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
300 * atomic_add_unless - add unless the number is a given value
301 * @v: pointer of type atomic_t
302 * @a: the amount to add to v...
303 * @u: ...unless v is equal to u.
305 * Atomically adds @a to @v, so long as it was not @u.
306 * Returns non-zero if @v was not @u, and zero otherwise.
308 #define atomic_add_unless(v, a, u) \
311 c = atomic_read(v); \
312 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
316 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
318 #define atomic_dec_return(v) atomic_sub_return(1,(v))
319 #define atomic_inc_return(v) atomic_add_return(1,(v))
322 * atomic_sub_and_test - subtract value from variable and test result
323 * @i: integer value to subtract
324 * @v: pointer of type atomic_t
326 * Atomically subtracts @i from @v and returns
327 * true if the result is zero, or false for all
330 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
333 * atomic_inc_and_test - increment and test
334 * @v: pointer of type atomic_t
336 * Atomically increments @v by 1
337 * and returns true if the result is zero, or false for all
340 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
343 * atomic_dec_and_test - decrement by 1 and test
344 * @v: pointer of type atomic_t
346 * Atomically decrements @v by 1 and
347 * returns true if the result is 0, or false for all other
350 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
353 * atomic_dec_if_positive - decrement by 1 if old value positive
354 * @v: pointer of type atomic_t
356 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
359 * atomic_inc - increment atomic variable
360 * @v: pointer of type atomic_t
362 * Atomically increments @v by 1.
364 #define atomic_inc(v) atomic_add(1,(v))
367 * atomic_dec - decrement and test
368 * @v: pointer of type atomic_t
370 * Atomically decrements @v by 1.
372 #define atomic_dec(v) atomic_sub(1,(v))
375 * atomic_add_negative - add and test if negative
376 * @v: pointer of type atomic_t
377 * @i: integer value to add
379 * Atomically adds @i to @v and returns true
380 * if the result is negative, or false when
381 * result is greater than or equal to zero.
383 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
387 typedef struct { volatile __s64 counter; } atomic64_t;
389 #define ATOMIC64_INIT(i) { (i) }
392 * atomic64_read - read atomic variable
393 * @v: pointer of type atomic64_t
396 #define atomic64_read(v) ((v)->counter)
399 * atomic64_set - set atomic variable
400 * @v: pointer of type atomic64_t
403 #define atomic64_set(v,i) ((v)->counter = (i))
406 * atomic64_add - add integer to atomic variable
407 * @i: integer value to add
408 * @v: pointer of type atomic64_t
410 * Atomically adds @i to @v.
412 static __inline__ void atomic64_add(long i, atomic64_t * v)
414 if (cpu_has_llsc && R10000_LLSC_WAR) {
417 __asm__ __volatile__(
419 "1: lld %0, %1 # atomic64_add \n"
424 : "=&r" (temp), "=m" (v->counter)
425 : "Ir" (i), "m" (v->counter));
426 } else if (cpu_has_llsc) {
429 __asm__ __volatile__(
431 "1: lld %0, %1 # atomic64_add \n"
436 : "=&r" (temp), "=m" (v->counter)
437 : "Ir" (i), "m" (v->counter));
441 local_irq_save(flags);
443 local_irq_restore(flags);
448 * atomic64_sub - subtract the atomic variable
449 * @i: integer value to subtract
450 * @v: pointer of type atomic64_t
452 * Atomically subtracts @i from @v.
454 static __inline__ void atomic64_sub(long i, atomic64_t * v)
456 if (cpu_has_llsc && R10000_LLSC_WAR) {
459 __asm__ __volatile__(
461 "1: lld %0, %1 # atomic64_sub \n"
466 : "=&r" (temp), "=m" (v->counter)
467 : "Ir" (i), "m" (v->counter));
468 } else if (cpu_has_llsc) {
471 __asm__ __volatile__(
473 "1: lld %0, %1 # atomic64_sub \n"
478 : "=&r" (temp), "=m" (v->counter)
479 : "Ir" (i), "m" (v->counter));
483 local_irq_save(flags);
485 local_irq_restore(flags);
490 * Same as above, but return the result value
492 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
494 unsigned long result;
496 if (cpu_has_llsc && R10000_LLSC_WAR) {
499 __asm__ __volatile__(
501 "1: lld %1, %2 # atomic64_add_return \n"
502 " addu %0, %1, %3 \n"
505 " addu %0, %1, %3 \n"
508 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
509 : "Ir" (i), "m" (v->counter)
511 } else if (cpu_has_llsc) {
514 __asm__ __volatile__(
516 "1: lld %1, %2 # atomic64_add_return \n"
517 " addu %0, %1, %3 \n"
520 " addu %0, %1, %3 \n"
523 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
524 : "Ir" (i), "m" (v->counter)
529 local_irq_save(flags);
533 local_irq_restore(flags);
539 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
541 unsigned long result;
543 if (cpu_has_llsc && R10000_LLSC_WAR) {
546 __asm__ __volatile__(
548 "1: lld %1, %2 # atomic64_sub_return \n"
549 " subu %0, %1, %3 \n"
552 " subu %0, %1, %3 \n"
555 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
556 : "Ir" (i), "m" (v->counter)
558 } else if (cpu_has_llsc) {
561 __asm__ __volatile__(
563 "1: lld %1, %2 # atomic64_sub_return \n"
564 " subu %0, %1, %3 \n"
567 " subu %0, %1, %3 \n"
570 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
571 : "Ir" (i), "m" (v->counter)
576 local_irq_save(flags);
580 local_irq_restore(flags);
587 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
588 * @i: integer value to subtract
589 * @v: pointer of type atomic64_t
591 * Atomically test @v and subtract @i if @v is greater or equal than @i.
592 * The function returns the old value of @v minus @i.
594 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
596 unsigned long result;
598 if (cpu_has_llsc && R10000_LLSC_WAR) {
601 __asm__ __volatile__(
603 "1: lld %1, %2 # atomic64_sub_if_positive\n"
604 " dsubu %0, %1, %3 \n"
609 " dsubu %0, %1, %3 \n"
614 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
615 : "Ir" (i), "m" (v->counter)
617 } else if (cpu_has_llsc) {
620 __asm__ __volatile__(
622 "1: lld %1, %2 # atomic64_sub_if_positive\n"
623 " dsubu %0, %1, %3 \n"
628 " dsubu %0, %1, %3 \n"
633 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
634 : "Ir" (i), "m" (v->counter)
639 local_irq_save(flags);
644 local_irq_restore(flags);
650 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
651 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
654 * atomic64_sub_and_test - subtract value from variable and test result
655 * @i: integer value to subtract
656 * @v: pointer of type atomic64_t
658 * Atomically subtracts @i from @v and returns
659 * true if the result is zero, or false for all
662 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
665 * atomic64_inc_and_test - increment and test
666 * @v: pointer of type atomic64_t
668 * Atomically increments @v by 1
669 * and returns true if the result is zero, or false for all
672 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
675 * atomic64_dec_and_test - decrement by 1 and test
676 * @v: pointer of type atomic64_t
678 * Atomically decrements @v by 1 and
679 * returns true if the result is 0, or false for all other
682 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
685 * atomic64_dec_if_positive - decrement by 1 if old value positive
686 * @v: pointer of type atomic64_t
688 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
691 * atomic64_inc - increment atomic variable
692 * @v: pointer of type atomic64_t
694 * Atomically increments @v by 1.
696 #define atomic64_inc(v) atomic64_add(1,(v))
699 * atomic64_dec - decrement and test
700 * @v: pointer of type atomic64_t
702 * Atomically decrements @v by 1.
704 #define atomic64_dec(v) atomic64_sub(1,(v))
707 * atomic64_add_negative - add and test if negative
708 * @v: pointer of type atomic64_t
709 * @i: integer value to add
711 * Atomically adds @i to @v and returns true
712 * if the result is negative, or false when
713 * result is greater than or equal to zero.
715 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
717 #endif /* CONFIG_64BIT */
720 * atomic*_return operations are serializing but not the non-*_return
723 #define smp_mb__before_atomic_dec() smp_mb()
724 #define smp_mb__after_atomic_dec() smp_mb()
725 #define smp_mb__before_atomic_inc() smp_mb()
726 #define smp_mb__after_atomic_inc() smp_mb()
728 #include <asm-generic/atomic.h>
729 #endif /* _ASM_ATOMIC_H */