2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
26 #include <asm/cpu-features.h>
29 extern spinlock_t atomic_lock;
31 typedef struct { volatile int counter; } atomic_t;
33 #define ATOMIC_INIT(i) { (i) }
36 * atomic_read - read atomic variable
37 * @v: pointer of type atomic_t
39 * Atomically reads the value of @v.
41 #define atomic_read(v) ((v)->counter)
44 * atomic_set - set atomic variable
45 * @v: pointer of type atomic_t
48 * Atomically sets the value of @v to @i.
50 #define atomic_set(v,i) ((v)->counter = (i))
53 * atomic_add - add integer to atomic variable
54 * @i: integer value to add
55 * @v: pointer of type atomic_t
57 * Atomically adds @i to @v.
59 static __inline__ void atomic_add(int i, atomic_t * v)
61 if (cpu_has_llsc && R10000_LLSC_WAR) {
65 "1: ll %0, %1 # atomic_add \n"
69 : "=&r" (temp), "=m" (v->counter)
70 : "Ir" (i), "m" (v->counter));
71 } else if (cpu_has_llsc) {
75 "1: ll %0, %1 # atomic_add \n"
79 : "=&r" (temp), "=m" (v->counter)
80 : "Ir" (i), "m" (v->counter));
84 spin_lock_irqsave(&atomic_lock, flags);
86 spin_unlock_irqrestore(&atomic_lock, flags);
91 * atomic_sub - subtract the atomic variable
92 * @i: integer value to subtract
93 * @v: pointer of type atomic_t
95 * Atomically subtracts @i from @v.
97 static __inline__ void atomic_sub(int i, atomic_t * v)
99 if (cpu_has_llsc && R10000_LLSC_WAR) {
102 __asm__ __volatile__(
103 "1: ll %0, %1 # atomic_sub \n"
107 : "=&r" (temp), "=m" (v->counter)
108 : "Ir" (i), "m" (v->counter));
109 } else if (cpu_has_llsc) {
112 __asm__ __volatile__(
113 "1: ll %0, %1 # atomic_sub \n"
117 : "=&r" (temp), "=m" (v->counter)
118 : "Ir" (i), "m" (v->counter));
122 spin_lock_irqsave(&atomic_lock, flags);
124 spin_unlock_irqrestore(&atomic_lock, flags);
129 * Same as above, but return the result value
131 static __inline__ int atomic_add_return(int i, atomic_t * v)
133 unsigned long result;
135 if (cpu_has_llsc && R10000_LLSC_WAR) {
138 __asm__ __volatile__(
139 "1: ll %1, %2 # atomic_add_return \n"
140 " addu %0, %1, %3 \n"
143 " addu %0, %1, %3 \n"
145 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
146 : "Ir" (i), "m" (v->counter)
148 } else if (cpu_has_llsc) {
151 __asm__ __volatile__(
152 "1: ll %1, %2 # atomic_add_return \n"
153 " addu %0, %1, %3 \n"
156 " addu %0, %1, %3 \n"
158 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
159 : "Ir" (i), "m" (v->counter)
164 spin_lock_irqsave(&atomic_lock, flags);
168 spin_unlock_irqrestore(&atomic_lock, flags);
174 static __inline__ int atomic_sub_return(int i, atomic_t * v)
176 unsigned long result;
178 if (cpu_has_llsc && R10000_LLSC_WAR) {
181 __asm__ __volatile__(
182 "1: ll %1, %2 # atomic_sub_return \n"
183 " subu %0, %1, %3 \n"
186 " subu %0, %1, %3 \n"
188 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
189 : "Ir" (i), "m" (v->counter)
191 } else if (cpu_has_llsc) {
194 __asm__ __volatile__(
195 "1: ll %1, %2 # atomic_sub_return \n"
196 " subu %0, %1, %3 \n"
199 " subu %0, %1, %3 \n"
201 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
202 : "Ir" (i), "m" (v->counter)
207 spin_lock_irqsave(&atomic_lock, flags);
211 spin_unlock_irqrestore(&atomic_lock, flags);
218 * atomic_sub_if_positive - add integer to atomic variable
219 * @v: pointer of type atomic_t
221 * Atomically test @v and decrement if it is greater than 0.
222 * The function returns the old value of @v minus 1.
224 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
226 unsigned long result;
228 if (cpu_has_llsc && R10000_LLSC_WAR) {
231 __asm__ __volatile__(
232 "1: ll %1, %2 # atomic_sub_if_positive\n"
233 " subu %0, %1, %3 \n"
239 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
240 : "Ir" (i), "m" (v->counter)
242 } else if (cpu_has_llsc) {
245 __asm__ __volatile__(
246 "1: ll %1, %2 # atomic_sub_if_positive\n"
247 " subu %0, %1, %3 \n"
253 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
254 : "Ir" (i), "m" (v->counter)
259 spin_lock_irqsave(&atomic_lock, flags);
264 spin_unlock_irqrestore(&atomic_lock, flags);
270 #define atomic_dec_return(v) atomic_sub_return(1,(v))
271 #define atomic_inc_return(v) atomic_add_return(1,(v))
274 * atomic_sub_and_test - subtract value from variable and test result
275 * @i: integer value to subtract
276 * @v: pointer of type atomic_t
278 * Atomically subtracts @i from @v and returns
279 * true if the result is zero, or false for all
282 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
285 * atomic_inc_and_test - increment and test
286 * @v: pointer of type atomic_t
288 * Atomically increments @v by 1
289 * and returns true if the result is zero, or false for all
292 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
295 * atomic_dec_and_test - decrement by 1 and test
296 * @v: pointer of type atomic_t
298 * Atomically decrements @v by 1 and
299 * returns true if the result is 0, or false for all other
302 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
305 * atomic_dec_if_positive - decrement by 1 if old value positive
306 * @v: pointer of type atomic_t
308 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
311 * atomic_inc - increment atomic variable
312 * @v: pointer of type atomic_t
314 * Atomically increments @v by 1.
316 #define atomic_inc(v) atomic_add(1,(v))
319 * atomic_dec - decrement and test
320 * @v: pointer of type atomic_t
322 * Atomically decrements @v by 1.
324 #define atomic_dec(v) atomic_sub(1,(v))
327 * atomic_add_negative - add and test if negative
328 * @v: pointer of type atomic_t
329 * @i: integer value to add
331 * Atomically adds @i to @v and returns true
332 * if the result is negative, or false when
333 * result is greater than or equal to zero.
335 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
339 typedef struct { volatile __s64 counter; } atomic64_t;
341 #define ATOMIC64_INIT(i) { (i) }
344 * atomic64_read - read atomic variable
345 * @v: pointer of type atomic64_t
348 #define atomic64_read(v) ((v)->counter)
351 * atomic64_set - set atomic variable
352 * @v: pointer of type atomic64_t
355 #define atomic64_set(v,i) ((v)->counter = (i))
358 * atomic64_add - add integer to atomic variable
359 * @i: integer value to add
360 * @v: pointer of type atomic64_t
362 * Atomically adds @i to @v.
364 static __inline__ void atomic64_add(long i, atomic64_t * v)
366 if (cpu_has_llsc && R10000_LLSC_WAR) {
369 __asm__ __volatile__(
370 "1: lld %0, %1 # atomic64_add \n"
374 : "=&r" (temp), "=m" (v->counter)
375 : "Ir" (i), "m" (v->counter));
376 } else if (cpu_has_llsc) {
379 __asm__ __volatile__(
380 "1: lld %0, %1 # atomic64_add \n"
384 : "=&r" (temp), "=m" (v->counter)
385 : "Ir" (i), "m" (v->counter));
389 spin_lock_irqsave(&atomic_lock, flags);
391 spin_unlock_irqrestore(&atomic_lock, flags);
396 * atomic64_sub - subtract the atomic variable
397 * @i: integer value to subtract
398 * @v: pointer of type atomic64_t
400 * Atomically subtracts @i from @v.
402 static __inline__ void atomic64_sub(long i, atomic64_t * v)
404 if (cpu_has_llsc && R10000_LLSC_WAR) {
407 __asm__ __volatile__(
408 "1: lld %0, %1 # atomic64_sub \n"
412 : "=&r" (temp), "=m" (v->counter)
413 : "Ir" (i), "m" (v->counter));
414 } else if (cpu_has_llsc) {
417 __asm__ __volatile__(
418 "1: lld %0, %1 # atomic64_sub \n"
422 : "=&r" (temp), "=m" (v->counter)
423 : "Ir" (i), "m" (v->counter));
427 spin_lock_irqsave(&atomic_lock, flags);
429 spin_unlock_irqrestore(&atomic_lock, flags);
434 * Same as above, but return the result value
436 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
438 unsigned long result;
440 if (cpu_has_llsc && R10000_LLSC_WAR) {
443 __asm__ __volatile__(
444 "1: lld %1, %2 # atomic64_add_return \n"
445 " addu %0, %1, %3 \n"
448 " addu %0, %1, %3 \n"
450 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
451 : "Ir" (i), "m" (v->counter)
453 } else if (cpu_has_llsc) {
456 __asm__ __volatile__(
457 "1: lld %1, %2 # atomic64_add_return \n"
458 " addu %0, %1, %3 \n"
461 " addu %0, %1, %3 \n"
463 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
464 : "Ir" (i), "m" (v->counter)
469 spin_lock_irqsave(&atomic_lock, flags);
473 spin_unlock_irqrestore(&atomic_lock, flags);
479 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
481 unsigned long result;
483 if (cpu_has_llsc && R10000_LLSC_WAR) {
486 __asm__ __volatile__(
487 "1: lld %1, %2 # atomic64_sub_return \n"
488 " subu %0, %1, %3 \n"
491 " subu %0, %1, %3 \n"
493 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
494 : "Ir" (i), "m" (v->counter)
496 } else if (cpu_has_llsc) {
499 __asm__ __volatile__(
500 "1: lld %1, %2 # atomic64_sub_return \n"
501 " subu %0, %1, %3 \n"
504 " subu %0, %1, %3 \n"
506 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
507 : "Ir" (i), "m" (v->counter)
512 spin_lock_irqsave(&atomic_lock, flags);
516 spin_unlock_irqrestore(&atomic_lock, flags);
523 * atomic64_sub_if_positive - add integer to atomic variable
524 * @v: pointer of type atomic64_t
526 * Atomically test @v and decrement if it is greater than 0.
527 * The function returns the old value of @v minus 1.
529 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
531 unsigned long result;
533 if (cpu_has_llsc && R10000_LLSC_WAR) {
536 __asm__ __volatile__(
537 "1: lld %1, %2 # atomic64_sub_if_positive\n"
538 " dsubu %0, %1, %3 \n"
544 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
545 : "Ir" (i), "m" (v->counter)
547 } else if (cpu_has_llsc) {
550 __asm__ __volatile__(
551 "1: lld %1, %2 # atomic64_sub_if_positive\n"
552 " dsubu %0, %1, %3 \n"
558 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
559 : "Ir" (i), "m" (v->counter)
564 spin_lock_irqsave(&atomic_lock, flags);
569 spin_unlock_irqrestore(&atomic_lock, flags);
575 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
576 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
579 * atomic64_sub_and_test - subtract value from variable and test result
580 * @i: integer value to subtract
581 * @v: pointer of type atomic64_t
583 * Atomically subtracts @i from @v and returns
584 * true if the result is zero, or false for all
587 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
590 * atomic64_inc_and_test - increment and test
591 * @v: pointer of type atomic64_t
593 * Atomically increments @v by 1
594 * and returns true if the result is zero, or false for all
597 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
600 * atomic64_dec_and_test - decrement by 1 and test
601 * @v: pointer of type atomic64_t
603 * Atomically decrements @v by 1 and
604 * returns true if the result is 0, or false for all other
607 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
610 * atomic64_dec_if_positive - decrement by 1 if old value positive
611 * @v: pointer of type atomic64_t
613 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
616 * atomic64_inc - increment atomic variable
617 * @v: pointer of type atomic64_t
619 * Atomically increments @v by 1.
621 #define atomic64_inc(v) atomic64_add(1,(v))
624 * atomic64_dec - decrement and test
625 * @v: pointer of type atomic64_t
627 * Atomically decrements @v by 1.
629 #define atomic64_dec(v) atomic64_sub(1,(v))
632 * atomic64_add_negative - add and test if negative
633 * @v: pointer of type atomic64_t
634 * @i: integer value to add
636 * Atomically adds @i to @v and returns true
637 * if the result is negative, or false when
638 * result is greater than or equal to zero.
640 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
642 #endif /* CONFIG_MIPS64 */
645 * atomic*_return operations are serializing but not the non-*_return
648 #define smp_mb__before_atomic_dec() smp_mb()
649 #define smp_mb__after_atomic_dec() smp_mb()
650 #define smp_mb__before_atomic_inc() smp_mb()
651 #define smp_mb__after_atomic_inc() smp_mb()
653 #endif /* _ASM_ATOMIC_H */