2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/spinlock.h>
25 #include <linux/irqflags.h>
26 #include <asm/cpu-features.h>
29 typedef struct { volatile int counter; } atomic_t;
31 #define ATOMIC_INIT(i) { (i) }
34 * atomic_read - read atomic variable
35 * @v: pointer of type atomic_t
37 * Atomically reads the value of @v.
39 #define atomic_read(v) ((v)->counter)
42 * atomic_set - set atomic variable
43 * @v: pointer of type atomic_t
46 * Atomically sets the value of @v to @i.
48 #define atomic_set(v,i) ((v)->counter = (i))
51 * atomic_add - add integer to atomic variable
52 * @i: integer value to add
53 * @v: pointer of type atomic_t
55 * Atomically adds @i to @v.
57 static __inline__ void atomic_add(int i, atomic_t * v)
59 if (cpu_has_llsc && R10000_LLSC_WAR) {
64 "1: ll %0, %1 # atomic_add \n"
69 : "=&r" (temp), "=m" (v->counter)
70 : "Ir" (i), "m" (v->counter));
71 } else if (cpu_has_llsc) {
76 "1: ll %0, %1 # atomic_add \n"
81 : "=&r" (temp), "=m" (v->counter)
82 : "Ir" (i), "m" (v->counter));
86 local_irq_save(flags);
88 local_irq_restore(flags);
93 * atomic_sub - subtract the atomic variable
94 * @i: integer value to subtract
95 * @v: pointer of type atomic_t
97 * Atomically subtracts @i from @v.
99 static __inline__ void atomic_sub(int i, atomic_t * v)
101 if (cpu_has_llsc && R10000_LLSC_WAR) {
104 __asm__ __volatile__(
106 "1: ll %0, %1 # atomic_sub \n"
111 : "=&r" (temp), "=m" (v->counter)
112 : "Ir" (i), "m" (v->counter));
113 } else if (cpu_has_llsc) {
116 __asm__ __volatile__(
118 "1: ll %0, %1 # atomic_sub \n"
123 : "=&r" (temp), "=m" (v->counter)
124 : "Ir" (i), "m" (v->counter));
128 local_irq_save(flags);
130 local_irq_restore(flags);
135 * Same as above, but return the result value
137 static __inline__ int atomic_add_return(int i, atomic_t * v)
139 unsigned long result;
141 if (cpu_has_llsc && R10000_LLSC_WAR) {
144 __asm__ __volatile__(
146 "1: ll %1, %2 # atomic_add_return \n"
147 " addu %0, %1, %3 \n"
150 " addu %0, %1, %3 \n"
153 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154 : "Ir" (i), "m" (v->counter)
156 } else if (cpu_has_llsc) {
159 __asm__ __volatile__(
161 "1: ll %1, %2 # atomic_add_return \n"
162 " addu %0, %1, %3 \n"
165 " addu %0, %1, %3 \n"
168 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
169 : "Ir" (i), "m" (v->counter)
174 local_irq_save(flags);
178 local_irq_restore(flags);
184 static __inline__ int atomic_sub_return(int i, atomic_t * v)
186 unsigned long result;
188 if (cpu_has_llsc && R10000_LLSC_WAR) {
191 __asm__ __volatile__(
193 "1: ll %1, %2 # atomic_sub_return \n"
194 " subu %0, %1, %3 \n"
197 " subu %0, %1, %3 \n"
200 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
201 : "Ir" (i), "m" (v->counter)
203 } else if (cpu_has_llsc) {
206 __asm__ __volatile__(
208 "1: ll %1, %2 # atomic_sub_return \n"
209 " subu %0, %1, %3 \n"
212 " subu %0, %1, %3 \n"
215 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
216 : "Ir" (i), "m" (v->counter)
221 local_irq_save(flags);
225 local_irq_restore(flags);
232 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
233 * @i: integer value to subtract
234 * @v: pointer of type atomic_t
236 * Atomically test @v and subtract @i if @v is greater or equal than @i.
237 * The function returns the old value of @v minus @i.
239 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
241 unsigned long result;
243 if (cpu_has_llsc && R10000_LLSC_WAR) {
246 __asm__ __volatile__(
248 "1: ll %1, %2 # atomic_sub_if_positive\n"
249 " subu %0, %1, %3 \n"
254 " subu %0, %1, %3 \n"
259 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
260 : "Ir" (i), "m" (v->counter)
262 } else if (cpu_has_llsc) {
265 __asm__ __volatile__(
267 "1: ll %1, %2 # atomic_sub_if_positive\n"
268 " subu %0, %1, %3 \n"
273 " subu %0, %1, %3 \n"
278 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
279 : "Ir" (i), "m" (v->counter)
284 local_irq_save(flags);
289 local_irq_restore(flags);
295 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
296 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
299 * atomic_add_unless - add unless the number is a given value
300 * @v: pointer of type atomic_t
301 * @a: the amount to add to v...
302 * @u: ...unless v is equal to u.
304 * Atomically adds @a to @v, so long as it was not @u.
305 * Returns non-zero if @v was not @u, and zero otherwise.
307 #define atomic_add_unless(v, a, u) \
310 c = atomic_read(v); \
311 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
315 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
317 #define atomic_dec_return(v) atomic_sub_return(1,(v))
318 #define atomic_inc_return(v) atomic_add_return(1,(v))
321 * atomic_sub_and_test - subtract value from variable and test result
322 * @i: integer value to subtract
323 * @v: pointer of type atomic_t
325 * Atomically subtracts @i from @v and returns
326 * true if the result is zero, or false for all
329 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
332 * atomic_inc_and_test - increment and test
333 * @v: pointer of type atomic_t
335 * Atomically increments @v by 1
336 * and returns true if the result is zero, or false for all
339 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
342 * atomic_dec_and_test - decrement by 1 and test
343 * @v: pointer of type atomic_t
345 * Atomically decrements @v by 1 and
346 * returns true if the result is 0, or false for all other
349 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
352 * atomic_dec_if_positive - decrement by 1 if old value positive
353 * @v: pointer of type atomic_t
355 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
358 * atomic_inc - increment atomic variable
359 * @v: pointer of type atomic_t
361 * Atomically increments @v by 1.
363 #define atomic_inc(v) atomic_add(1,(v))
366 * atomic_dec - decrement and test
367 * @v: pointer of type atomic_t
369 * Atomically decrements @v by 1.
371 #define atomic_dec(v) atomic_sub(1,(v))
374 * atomic_add_negative - add and test if negative
375 * @v: pointer of type atomic_t
376 * @i: integer value to add
378 * Atomically adds @i to @v and returns true
379 * if the result is negative, or false when
380 * result is greater than or equal to zero.
382 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
386 typedef struct { volatile __s64 counter; } atomic64_t;
388 #define ATOMIC64_INIT(i) { (i) }
391 * atomic64_read - read atomic variable
392 * @v: pointer of type atomic64_t
395 #define atomic64_read(v) ((v)->counter)
398 * atomic64_set - set atomic variable
399 * @v: pointer of type atomic64_t
402 #define atomic64_set(v,i) ((v)->counter = (i))
405 * atomic64_add - add integer to atomic variable
406 * @i: integer value to add
407 * @v: pointer of type atomic64_t
409 * Atomically adds @i to @v.
411 static __inline__ void atomic64_add(long i, atomic64_t * v)
413 if (cpu_has_llsc && R10000_LLSC_WAR) {
416 __asm__ __volatile__(
418 "1: lld %0, %1 # atomic64_add \n"
423 : "=&r" (temp), "=m" (v->counter)
424 : "Ir" (i), "m" (v->counter));
425 } else if (cpu_has_llsc) {
428 __asm__ __volatile__(
430 "1: lld %0, %1 # atomic64_add \n"
435 : "=&r" (temp), "=m" (v->counter)
436 : "Ir" (i), "m" (v->counter));
440 local_irq_save(flags);
442 local_irq_restore(flags);
447 * atomic64_sub - subtract the atomic variable
448 * @i: integer value to subtract
449 * @v: pointer of type atomic64_t
451 * Atomically subtracts @i from @v.
453 static __inline__ void atomic64_sub(long i, atomic64_t * v)
455 if (cpu_has_llsc && R10000_LLSC_WAR) {
458 __asm__ __volatile__(
460 "1: lld %0, %1 # atomic64_sub \n"
465 : "=&r" (temp), "=m" (v->counter)
466 : "Ir" (i), "m" (v->counter));
467 } else if (cpu_has_llsc) {
470 __asm__ __volatile__(
472 "1: lld %0, %1 # atomic64_sub \n"
477 : "=&r" (temp), "=m" (v->counter)
478 : "Ir" (i), "m" (v->counter));
482 local_irq_save(flags);
484 local_irq_restore(flags);
489 * Same as above, but return the result value
491 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
493 unsigned long result;
495 if (cpu_has_llsc && R10000_LLSC_WAR) {
498 __asm__ __volatile__(
500 "1: lld %1, %2 # atomic64_add_return \n"
501 " addu %0, %1, %3 \n"
504 " addu %0, %1, %3 \n"
507 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
508 : "Ir" (i), "m" (v->counter)
510 } else if (cpu_has_llsc) {
513 __asm__ __volatile__(
515 "1: lld %1, %2 # atomic64_add_return \n"
516 " addu %0, %1, %3 \n"
519 " addu %0, %1, %3 \n"
522 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
523 : "Ir" (i), "m" (v->counter)
528 local_irq_save(flags);
532 local_irq_restore(flags);
538 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
540 unsigned long result;
542 if (cpu_has_llsc && R10000_LLSC_WAR) {
545 __asm__ __volatile__(
547 "1: lld %1, %2 # atomic64_sub_return \n"
548 " subu %0, %1, %3 \n"
551 " subu %0, %1, %3 \n"
554 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
555 : "Ir" (i), "m" (v->counter)
557 } else if (cpu_has_llsc) {
560 __asm__ __volatile__(
562 "1: lld %1, %2 # atomic64_sub_return \n"
563 " subu %0, %1, %3 \n"
566 " subu %0, %1, %3 \n"
569 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
570 : "Ir" (i), "m" (v->counter)
575 local_irq_save(flags);
579 local_irq_restore(flags);
586 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
587 * @i: integer value to subtract
588 * @v: pointer of type atomic64_t
590 * Atomically test @v and subtract @i if @v is greater or equal than @i.
591 * The function returns the old value of @v minus @i.
593 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
595 unsigned long result;
597 if (cpu_has_llsc && R10000_LLSC_WAR) {
600 __asm__ __volatile__(
602 "1: lld %1, %2 # atomic64_sub_if_positive\n"
603 " dsubu %0, %1, %3 \n"
608 " dsubu %0, %1, %3 \n"
613 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
614 : "Ir" (i), "m" (v->counter)
616 } else if (cpu_has_llsc) {
619 __asm__ __volatile__(
621 "1: lld %1, %2 # atomic64_sub_if_positive\n"
622 " dsubu %0, %1, %3 \n"
627 " dsubu %0, %1, %3 \n"
632 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
633 : "Ir" (i), "m" (v->counter)
638 local_irq_save(flags);
643 local_irq_restore(flags);
649 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
650 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
653 * atomic64_sub_and_test - subtract value from variable and test result
654 * @i: integer value to subtract
655 * @v: pointer of type atomic64_t
657 * Atomically subtracts @i from @v and returns
658 * true if the result is zero, or false for all
661 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
664 * atomic64_inc_and_test - increment and test
665 * @v: pointer of type atomic64_t
667 * Atomically increments @v by 1
668 * and returns true if the result is zero, or false for all
671 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
674 * atomic64_dec_and_test - decrement by 1 and test
675 * @v: pointer of type atomic64_t
677 * Atomically decrements @v by 1 and
678 * returns true if the result is 0, or false for all other
681 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
684 * atomic64_dec_if_positive - decrement by 1 if old value positive
685 * @v: pointer of type atomic64_t
687 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
690 * atomic64_inc - increment atomic variable
691 * @v: pointer of type atomic64_t
693 * Atomically increments @v by 1.
695 #define atomic64_inc(v) atomic64_add(1,(v))
698 * atomic64_dec - decrement and test
699 * @v: pointer of type atomic64_t
701 * Atomically decrements @v by 1.
703 #define atomic64_dec(v) atomic64_sub(1,(v))
706 * atomic64_add_negative - add and test if negative
707 * @v: pointer of type atomic64_t
708 * @i: integer value to add
710 * Atomically adds @i to @v and returns true
711 * if the result is negative, or false when
712 * result is greater than or equal to zero.
714 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
716 #endif /* CONFIG_64BIT */
719 * atomic*_return operations are serializing but not the non-*_return
722 #define smp_mb__before_atomic_dec() smp_mb()
723 #define smp_mb__after_atomic_dec() smp_mb()
724 #define smp_mb__before_atomic_inc() smp_mb()
725 #define smp_mb__after_atomic_inc() smp_mb()
727 #include <asm-generic/atomic.h>
728 #endif /* _ASM_ATOMIC_H */