capabilities: remain source compatible with 32-bit raw legacy capability support.
[linux-2.6] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <asm/barrier.h>
19 #include <asm/cpu-features.h>
20 #include <asm/war.h>
21 #include <asm/system.h>
22
23 typedef struct { volatile int counter; } atomic_t;
24
25 #define ATOMIC_INIT(i)    { (i) }
26
27 /*
28  * atomic_read - read atomic variable
29  * @v: pointer of type atomic_t
30  *
31  * Atomically reads the value of @v.
32  */
33 #define atomic_read(v)          ((v)->counter)
34
35 /*
36  * atomic_set - set atomic variable
37  * @v: pointer of type atomic_t
38  * @i: required value
39  *
40  * Atomically sets the value of @v to @i.
41  */
42 #define atomic_set(v, i)                ((v)->counter = (i))
43
44 /*
45  * atomic_add - add integer to atomic variable
46  * @i: integer value to add
47  * @v: pointer of type atomic_t
48  *
49  * Atomically adds @i to @v.
50  */
51 static __inline__ void atomic_add(int i, atomic_t * v)
52 {
53         if (cpu_has_llsc && R10000_LLSC_WAR) {
54                 unsigned long temp;
55
56                 __asm__ __volatile__(
57                 "       .set    mips3                                   \n"
58                 "1:     ll      %0, %1          # atomic_add            \n"
59                 "       addu    %0, %2                                  \n"
60                 "       sc      %0, %1                                  \n"
61                 "       beqzl   %0, 1b                                  \n"
62                 "       .set    mips0                                   \n"
63                 : "=&r" (temp), "=m" (v->counter)
64                 : "Ir" (i), "m" (v->counter));
65         } else if (cpu_has_llsc) {
66                 unsigned long temp;
67
68                 __asm__ __volatile__(
69                 "       .set    mips3                                   \n"
70                 "1:     ll      %0, %1          # atomic_add            \n"
71                 "       addu    %0, %2                                  \n"
72                 "       sc      %0, %1                                  \n"
73                 "       beqz    %0, 2f                                  \n"
74                 "       .subsection 2                                   \n"
75                 "2:     b       1b                                      \n"
76                 "       .previous                                       \n"
77                 "       .set    mips0                                   \n"
78                 : "=&r" (temp), "=m" (v->counter)
79                 : "Ir" (i), "m" (v->counter));
80         } else {
81                 unsigned long flags;
82
83                 raw_local_irq_save(flags);
84                 v->counter += i;
85                 raw_local_irq_restore(flags);
86         }
87 }
88
89 /*
90  * atomic_sub - subtract the atomic variable
91  * @i: integer value to subtract
92  * @v: pointer of type atomic_t
93  *
94  * Atomically subtracts @i from @v.
95  */
96 static __inline__ void atomic_sub(int i, atomic_t * v)
97 {
98         if (cpu_has_llsc && R10000_LLSC_WAR) {
99                 unsigned long temp;
100
101                 __asm__ __volatile__(
102                 "       .set    mips3                                   \n"
103                 "1:     ll      %0, %1          # atomic_sub            \n"
104                 "       subu    %0, %2                                  \n"
105                 "       sc      %0, %1                                  \n"
106                 "       beqzl   %0, 1b                                  \n"
107                 "       .set    mips0                                   \n"
108                 : "=&r" (temp), "=m" (v->counter)
109                 : "Ir" (i), "m" (v->counter));
110         } else if (cpu_has_llsc) {
111                 unsigned long temp;
112
113                 __asm__ __volatile__(
114                 "       .set    mips3                                   \n"
115                 "1:     ll      %0, %1          # atomic_sub            \n"
116                 "       subu    %0, %2                                  \n"
117                 "       sc      %0, %1                                  \n"
118                 "       beqz    %0, 2f                                  \n"
119                 "       .subsection 2                                   \n"
120                 "2:     b       1b                                      \n"
121                 "       .previous                                       \n"
122                 "       .set    mips0                                   \n"
123                 : "=&r" (temp), "=m" (v->counter)
124                 : "Ir" (i), "m" (v->counter));
125         } else {
126                 unsigned long flags;
127
128                 raw_local_irq_save(flags);
129                 v->counter -= i;
130                 raw_local_irq_restore(flags);
131         }
132 }
133
134 /*
135  * Same as above, but return the result value
136  */
137 static __inline__ int atomic_add_return(int i, atomic_t * v)
138 {
139         unsigned long result;
140
141         smp_llsc_mb();
142
143         if (cpu_has_llsc && R10000_LLSC_WAR) {
144                 unsigned long temp;
145
146                 __asm__ __volatile__(
147                 "       .set    mips3                                   \n"
148                 "1:     ll      %1, %2          # atomic_add_return     \n"
149                 "       addu    %0, %1, %3                              \n"
150                 "       sc      %0, %2                                  \n"
151                 "       beqzl   %0, 1b                                  \n"
152                 "       addu    %0, %1, %3                              \n"
153                 "       .set    mips0                                   \n"
154                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155                 : "Ir" (i), "m" (v->counter)
156                 : "memory");
157         } else if (cpu_has_llsc) {
158                 unsigned long temp;
159
160                 __asm__ __volatile__(
161                 "       .set    mips3                                   \n"
162                 "1:     ll      %1, %2          # atomic_add_return     \n"
163                 "       addu    %0, %1, %3                              \n"
164                 "       sc      %0, %2                                  \n"
165                 "       beqz    %0, 2f                                  \n"
166                 "       addu    %0, %1, %3                              \n"
167                 "       .subsection 2                                   \n"
168                 "2:     b       1b                                      \n"
169                 "       .previous                                       \n"
170                 "       .set    mips0                                   \n"
171                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
172                 : "Ir" (i), "m" (v->counter)
173                 : "memory");
174         } else {
175                 unsigned long flags;
176
177                 raw_local_irq_save(flags);
178                 result = v->counter;
179                 result += i;
180                 v->counter = result;
181                 raw_local_irq_restore(flags);
182         }
183
184         smp_llsc_mb();
185
186         return result;
187 }
188
189 static __inline__ int atomic_sub_return(int i, atomic_t * v)
190 {
191         unsigned long result;
192
193         smp_llsc_mb();
194
195         if (cpu_has_llsc && R10000_LLSC_WAR) {
196                 unsigned long temp;
197
198                 __asm__ __volatile__(
199                 "       .set    mips3                                   \n"
200                 "1:     ll      %1, %2          # atomic_sub_return     \n"
201                 "       subu    %0, %1, %3                              \n"
202                 "       sc      %0, %2                                  \n"
203                 "       beqzl   %0, 1b                                  \n"
204                 "       subu    %0, %1, %3                              \n"
205                 "       .set    mips0                                   \n"
206                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
207                 : "Ir" (i), "m" (v->counter)
208                 : "memory");
209         } else if (cpu_has_llsc) {
210                 unsigned long temp;
211
212                 __asm__ __volatile__(
213                 "       .set    mips3                                   \n"
214                 "1:     ll      %1, %2          # atomic_sub_return     \n"
215                 "       subu    %0, %1, %3                              \n"
216                 "       sc      %0, %2                                  \n"
217                 "       beqz    %0, 2f                                  \n"
218                 "       subu    %0, %1, %3                              \n"
219                 "       .subsection 2                                   \n"
220                 "2:     b       1b                                      \n"
221                 "       .previous                                       \n"
222                 "       .set    mips0                                   \n"
223                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
224                 : "Ir" (i), "m" (v->counter)
225                 : "memory");
226         } else {
227                 unsigned long flags;
228
229                 raw_local_irq_save(flags);
230                 result = v->counter;
231                 result -= i;
232                 v->counter = result;
233                 raw_local_irq_restore(flags);
234         }
235
236         smp_llsc_mb();
237
238         return result;
239 }
240
241 /*
242  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
243  * @i: integer value to subtract
244  * @v: pointer of type atomic_t
245  *
246  * Atomically test @v and subtract @i if @v is greater or equal than @i.
247  * The function returns the old value of @v minus @i.
248  */
249 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
250 {
251         unsigned long result;
252
253         smp_llsc_mb();
254
255         if (cpu_has_llsc && R10000_LLSC_WAR) {
256                 unsigned long temp;
257
258                 __asm__ __volatile__(
259                 "       .set    mips3                                   \n"
260                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
261                 "       subu    %0, %1, %3                              \n"
262                 "       bltz    %0, 1f                                  \n"
263                 "       sc      %0, %2                                  \n"
264                 "       .set    noreorder                               \n"
265                 "       beqzl   %0, 1b                                  \n"
266                 "        subu   %0, %1, %3                              \n"
267                 "       .set    reorder                                 \n"
268                 "1:                                                     \n"
269                 "       .set    mips0                                   \n"
270                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271                 : "Ir" (i), "m" (v->counter)
272                 : "memory");
273         } else if (cpu_has_llsc) {
274                 unsigned long temp;
275
276                 __asm__ __volatile__(
277                 "       .set    mips3                                   \n"
278                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
279                 "       subu    %0, %1, %3                              \n"
280                 "       bltz    %0, 1f                                  \n"
281                 "       sc      %0, %2                                  \n"
282                 "       .set    noreorder                               \n"
283                 "       beqz    %0, 2f                                  \n"
284                 "        subu   %0, %1, %3                              \n"
285                 "       .set    reorder                                 \n"
286                 "1:                                                     \n"
287                 "       .subsection 2                                   \n"
288                 "2:     b       1b                                      \n"
289                 "       .previous                                       \n"
290                 "       .set    mips0                                   \n"
291                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
292                 : "Ir" (i), "m" (v->counter)
293                 : "memory");
294         } else {
295                 unsigned long flags;
296
297                 raw_local_irq_save(flags);
298                 result = v->counter;
299                 result -= i;
300                 if (result >= 0)
301                         v->counter = result;
302                 raw_local_irq_restore(flags);
303         }
304
305         smp_llsc_mb();
306
307         return result;
308 }
309
310 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
311 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
312
313 /**
314  * atomic_add_unless - add unless the number is a given value
315  * @v: pointer of type atomic_t
316  * @a: the amount to add to v...
317  * @u: ...unless v is equal to u.
318  *
319  * Atomically adds @a to @v, so long as it was not @u.
320  * Returns non-zero if @v was not @u, and zero otherwise.
321  */
322 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
323 {
324         int c, old;
325         c = atomic_read(v);
326         for (;;) {
327                 if (unlikely(c == (u)))
328                         break;
329                 old = atomic_cmpxchg((v), c, c + (a));
330                 if (likely(old == c))
331                         break;
332                 c = old;
333         }
334         return c != (u);
335 }
336 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
337
338 #define atomic_dec_return(v) atomic_sub_return(1, (v))
339 #define atomic_inc_return(v) atomic_add_return(1, (v))
340
341 /*
342  * atomic_sub_and_test - subtract value from variable and test result
343  * @i: integer value to subtract
344  * @v: pointer of type atomic_t
345  *
346  * Atomically subtracts @i from @v and returns
347  * true if the result is zero, or false for all
348  * other cases.
349  */
350 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
351
352 /*
353  * atomic_inc_and_test - increment and test
354  * @v: pointer of type atomic_t
355  *
356  * Atomically increments @v by 1
357  * and returns true if the result is zero, or false for all
358  * other cases.
359  */
360 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
361
362 /*
363  * atomic_dec_and_test - decrement by 1 and test
364  * @v: pointer of type atomic_t
365  *
366  * Atomically decrements @v by 1 and
367  * returns true if the result is 0, or false for all other
368  * cases.
369  */
370 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
371
372 /*
373  * atomic_dec_if_positive - decrement by 1 if old value positive
374  * @v: pointer of type atomic_t
375  */
376 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
377
378 /*
379  * atomic_inc - increment atomic variable
380  * @v: pointer of type atomic_t
381  *
382  * Atomically increments @v by 1.
383  */
384 #define atomic_inc(v) atomic_add(1, (v))
385
386 /*
387  * atomic_dec - decrement and test
388  * @v: pointer of type atomic_t
389  *
390  * Atomically decrements @v by 1.
391  */
392 #define atomic_dec(v) atomic_sub(1, (v))
393
394 /*
395  * atomic_add_negative - add and test if negative
396  * @v: pointer of type atomic_t
397  * @i: integer value to add
398  *
399  * Atomically adds @i to @v and returns true
400  * if the result is negative, or false when
401  * result is greater than or equal to zero.
402  */
403 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
404
405 #ifdef CONFIG_64BIT
406
407 typedef struct { volatile long counter; } atomic64_t;
408
409 #define ATOMIC64_INIT(i)    { (i) }
410
411 /*
412  * atomic64_read - read atomic variable
413  * @v: pointer of type atomic64_t
414  *
415  */
416 #define atomic64_read(v)        ((v)->counter)
417
418 /*
419  * atomic64_set - set atomic variable
420  * @v: pointer of type atomic64_t
421  * @i: required value
422  */
423 #define atomic64_set(v, i)      ((v)->counter = (i))
424
425 /*
426  * atomic64_add - add integer to atomic variable
427  * @i: integer value to add
428  * @v: pointer of type atomic64_t
429  *
430  * Atomically adds @i to @v.
431  */
432 static __inline__ void atomic64_add(long i, atomic64_t * v)
433 {
434         if (cpu_has_llsc && R10000_LLSC_WAR) {
435                 unsigned long temp;
436
437                 __asm__ __volatile__(
438                 "       .set    mips3                                   \n"
439                 "1:     lld     %0, %1          # atomic64_add          \n"
440                 "       addu    %0, %2                                  \n"
441                 "       scd     %0, %1                                  \n"
442                 "       beqzl   %0, 1b                                  \n"
443                 "       .set    mips0                                   \n"
444                 : "=&r" (temp), "=m" (v->counter)
445                 : "Ir" (i), "m" (v->counter));
446         } else if (cpu_has_llsc) {
447                 unsigned long temp;
448
449                 __asm__ __volatile__(
450                 "       .set    mips3                                   \n"
451                 "1:     lld     %0, %1          # atomic64_add          \n"
452                 "       addu    %0, %2                                  \n"
453                 "       scd     %0, %1                                  \n"
454                 "       beqz    %0, 2f                                  \n"
455                 "       .subsection 2                                   \n"
456                 "2:     b       1b                                      \n"
457                 "       .previous                                       \n"
458                 "       .set    mips0                                   \n"
459                 : "=&r" (temp), "=m" (v->counter)
460                 : "Ir" (i), "m" (v->counter));
461         } else {
462                 unsigned long flags;
463
464                 raw_local_irq_save(flags);
465                 v->counter += i;
466                 raw_local_irq_restore(flags);
467         }
468 }
469
470 /*
471  * atomic64_sub - subtract the atomic variable
472  * @i: integer value to subtract
473  * @v: pointer of type atomic64_t
474  *
475  * Atomically subtracts @i from @v.
476  */
477 static __inline__ void atomic64_sub(long i, atomic64_t * v)
478 {
479         if (cpu_has_llsc && R10000_LLSC_WAR) {
480                 unsigned long temp;
481
482                 __asm__ __volatile__(
483                 "       .set    mips3                                   \n"
484                 "1:     lld     %0, %1          # atomic64_sub          \n"
485                 "       subu    %0, %2                                  \n"
486                 "       scd     %0, %1                                  \n"
487                 "       beqzl   %0, 1b                                  \n"
488                 "       .set    mips0                                   \n"
489                 : "=&r" (temp), "=m" (v->counter)
490                 : "Ir" (i), "m" (v->counter));
491         } else if (cpu_has_llsc) {
492                 unsigned long temp;
493
494                 __asm__ __volatile__(
495                 "       .set    mips3                                   \n"
496                 "1:     lld     %0, %1          # atomic64_sub          \n"
497                 "       subu    %0, %2                                  \n"
498                 "       scd     %0, %1                                  \n"
499                 "       beqz    %0, 2f                                  \n"
500                 "       .subsection 2                                   \n"
501                 "2:     b       1b                                      \n"
502                 "       .previous                                       \n"
503                 "       .set    mips0                                   \n"
504                 : "=&r" (temp), "=m" (v->counter)
505                 : "Ir" (i), "m" (v->counter));
506         } else {
507                 unsigned long flags;
508
509                 raw_local_irq_save(flags);
510                 v->counter -= i;
511                 raw_local_irq_restore(flags);
512         }
513 }
514
515 /*
516  * Same as above, but return the result value
517  */
518 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
519 {
520         unsigned long result;
521
522         smp_llsc_mb();
523
524         if (cpu_has_llsc && R10000_LLSC_WAR) {
525                 unsigned long temp;
526
527                 __asm__ __volatile__(
528                 "       .set    mips3                                   \n"
529                 "1:     lld     %1, %2          # atomic64_add_return   \n"
530                 "       addu    %0, %1, %3                              \n"
531                 "       scd     %0, %2                                  \n"
532                 "       beqzl   %0, 1b                                  \n"
533                 "       addu    %0, %1, %3                              \n"
534                 "       .set    mips0                                   \n"
535                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
536                 : "Ir" (i), "m" (v->counter)
537                 : "memory");
538         } else if (cpu_has_llsc) {
539                 unsigned long temp;
540
541                 __asm__ __volatile__(
542                 "       .set    mips3                                   \n"
543                 "1:     lld     %1, %2          # atomic64_add_return   \n"
544                 "       addu    %0, %1, %3                              \n"
545                 "       scd     %0, %2                                  \n"
546                 "       beqz    %0, 2f                                  \n"
547                 "       addu    %0, %1, %3                              \n"
548                 "       .subsection 2                                   \n"
549                 "2:     b       1b                                      \n"
550                 "       .previous                                       \n"
551                 "       .set    mips0                                   \n"
552                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
553                 : "Ir" (i), "m" (v->counter)
554                 : "memory");
555         } else {
556                 unsigned long flags;
557
558                 raw_local_irq_save(flags);
559                 result = v->counter;
560                 result += i;
561                 v->counter = result;
562                 raw_local_irq_restore(flags);
563         }
564
565         smp_llsc_mb();
566
567         return result;
568 }
569
570 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
571 {
572         unsigned long result;
573
574         smp_llsc_mb();
575
576         if (cpu_has_llsc && R10000_LLSC_WAR) {
577                 unsigned long temp;
578
579                 __asm__ __volatile__(
580                 "       .set    mips3                                   \n"
581                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
582                 "       subu    %0, %1, %3                              \n"
583                 "       scd     %0, %2                                  \n"
584                 "       beqzl   %0, 1b                                  \n"
585                 "       subu    %0, %1, %3                              \n"
586                 "       .set    mips0                                   \n"
587                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
588                 : "Ir" (i), "m" (v->counter)
589                 : "memory");
590         } else if (cpu_has_llsc) {
591                 unsigned long temp;
592
593                 __asm__ __volatile__(
594                 "       .set    mips3                                   \n"
595                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
596                 "       subu    %0, %1, %3                              \n"
597                 "       scd     %0, %2                                  \n"
598                 "       beqz    %0, 2f                                  \n"
599                 "       subu    %0, %1, %3                              \n"
600                 "       .subsection 2                                   \n"
601                 "2:     b       1b                                      \n"
602                 "       .previous                                       \n"
603                 "       .set    mips0                                   \n"
604                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
605                 : "Ir" (i), "m" (v->counter)
606                 : "memory");
607         } else {
608                 unsigned long flags;
609
610                 raw_local_irq_save(flags);
611                 result = v->counter;
612                 result -= i;
613                 v->counter = result;
614                 raw_local_irq_restore(flags);
615         }
616
617         smp_llsc_mb();
618
619         return result;
620 }
621
622 /*
623  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
624  * @i: integer value to subtract
625  * @v: pointer of type atomic64_t
626  *
627  * Atomically test @v and subtract @i if @v is greater or equal than @i.
628  * The function returns the old value of @v minus @i.
629  */
630 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
631 {
632         unsigned long result;
633
634         smp_llsc_mb();
635
636         if (cpu_has_llsc && R10000_LLSC_WAR) {
637                 unsigned long temp;
638
639                 __asm__ __volatile__(
640                 "       .set    mips3                                   \n"
641                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
642                 "       dsubu   %0, %1, %3                              \n"
643                 "       bltz    %0, 1f                                  \n"
644                 "       scd     %0, %2                                  \n"
645                 "       .set    noreorder                               \n"
646                 "       beqzl   %0, 1b                                  \n"
647                 "        dsubu  %0, %1, %3                              \n"
648                 "       .set    reorder                                 \n"
649                 "1:                                                     \n"
650                 "       .set    mips0                                   \n"
651                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
652                 : "Ir" (i), "m" (v->counter)
653                 : "memory");
654         } else if (cpu_has_llsc) {
655                 unsigned long temp;
656
657                 __asm__ __volatile__(
658                 "       .set    mips3                                   \n"
659                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
660                 "       dsubu   %0, %1, %3                              \n"
661                 "       bltz    %0, 1f                                  \n"
662                 "       scd     %0, %2                                  \n"
663                 "       .set    noreorder                               \n"
664                 "       beqz    %0, 2f                                  \n"
665                 "        dsubu  %0, %1, %3                              \n"
666                 "       .set    reorder                                 \n"
667                 "1:                                                     \n"
668                 "       .subsection 2                                   \n"
669                 "2:     b       1b                                      \n"
670                 "       .previous                                       \n"
671                 "       .set    mips0                                   \n"
672                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
673                 : "Ir" (i), "m" (v->counter)
674                 : "memory");
675         } else {
676                 unsigned long flags;
677
678                 raw_local_irq_save(flags);
679                 result = v->counter;
680                 result -= i;
681                 if (result >= 0)
682                         v->counter = result;
683                 raw_local_irq_restore(flags);
684         }
685
686         smp_llsc_mb();
687
688         return result;
689 }
690
691 #define atomic64_cmpxchg(v, o, n) \
692         ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
693 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
694
695 /**
696  * atomic64_add_unless - add unless the number is a given value
697  * @v: pointer of type atomic64_t
698  * @a: the amount to add to v...
699  * @u: ...unless v is equal to u.
700  *
701  * Atomically adds @a to @v, so long as it was not @u.
702  * Returns non-zero if @v was not @u, and zero otherwise.
703  */
704 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
705 {
706         long c, old;
707         c = atomic64_read(v);
708         for (;;) {
709                 if (unlikely(c == (u)))
710                         break;
711                 old = atomic64_cmpxchg((v), c, c + (a));
712                 if (likely(old == c))
713                         break;
714                 c = old;
715         }
716         return c != (u);
717 }
718
719 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
720
721 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
722 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
723
724 /*
725  * atomic64_sub_and_test - subtract value from variable and test result
726  * @i: integer value to subtract
727  * @v: pointer of type atomic64_t
728  *
729  * Atomically subtracts @i from @v and returns
730  * true if the result is zero, or false for all
731  * other cases.
732  */
733 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
734
735 /*
736  * atomic64_inc_and_test - increment and test
737  * @v: pointer of type atomic64_t
738  *
739  * Atomically increments @v by 1
740  * and returns true if the result is zero, or false for all
741  * other cases.
742  */
743 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
744
745 /*
746  * atomic64_dec_and_test - decrement by 1 and test
747  * @v: pointer of type atomic64_t
748  *
749  * Atomically decrements @v by 1 and
750  * returns true if the result is 0, or false for all other
751  * cases.
752  */
753 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
754
755 /*
756  * atomic64_dec_if_positive - decrement by 1 if old value positive
757  * @v: pointer of type atomic64_t
758  */
759 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
760
761 /*
762  * atomic64_inc - increment atomic variable
763  * @v: pointer of type atomic64_t
764  *
765  * Atomically increments @v by 1.
766  */
767 #define atomic64_inc(v) atomic64_add(1, (v))
768
769 /*
770  * atomic64_dec - decrement and test
771  * @v: pointer of type atomic64_t
772  *
773  * Atomically decrements @v by 1.
774  */
775 #define atomic64_dec(v) atomic64_sub(1, (v))
776
777 /*
778  * atomic64_add_negative - add and test if negative
779  * @v: pointer of type atomic64_t
780  * @i: integer value to add
781  *
782  * Atomically adds @i to @v and returns true
783  * if the result is negative, or false when
784  * result is greater than or equal to zero.
785  */
786 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
787
788 #endif /* CONFIG_64BIT */
789
790 /*
791  * atomic*_return operations are serializing but not the non-*_return
792  * versions.
793  */
794 #define smp_mb__before_atomic_dec()     smp_llsc_mb()
795 #define smp_mb__after_atomic_dec()      smp_llsc_mb()
796 #define smp_mb__before_atomic_inc()     smp_llsc_mb()
797 #define smp_mb__after_atomic_inc()      smp_llsc_mb()
798
799 #include <asm-generic/atomic.h>
800
801 #endif /* _ASM_ATOMIC_H */