[PATCH] i386: apic= command line option should always be
[linux-2.6] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13  */
14
15 /*
16  * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17  * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18  * main big wrapper ...
19  */
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
22
23 #ifndef _ASM_ATOMIC_H
24 #define _ASM_ATOMIC_H
25
26 #include <asm/cpu-features.h>
27 #include <asm/interrupt.h>
28 #include <asm/war.h>
29
30 typedef struct { volatile int counter; } atomic_t;
31
32 #define ATOMIC_INIT(i)    { (i) }
33
34 /*
35  * atomic_read - read atomic variable
36  * @v: pointer of type atomic_t
37  *
38  * Atomically reads the value of @v.
39  */
40 #define atomic_read(v)          ((v)->counter)
41
42 /*
43  * atomic_set - set atomic variable
44  * @v: pointer of type atomic_t
45  * @i: required value
46  *
47  * Atomically sets the value of @v to @i.
48  */
49 #define atomic_set(v,i)         ((v)->counter = (i))
50
51 /*
52  * atomic_add - add integer to atomic variable
53  * @i: integer value to add
54  * @v: pointer of type atomic_t
55  *
56  * Atomically adds @i to @v.
57  */
58 static __inline__ void atomic_add(int i, atomic_t * v)
59 {
60         if (cpu_has_llsc && R10000_LLSC_WAR) {
61                 unsigned long temp;
62
63                 __asm__ __volatile__(
64                 "       .set    mips3                                   \n"
65                 "1:     ll      %0, %1          # atomic_add            \n"
66                 "       addu    %0, %2                                  \n"
67                 "       sc      %0, %1                                  \n"
68                 "       beqzl   %0, 1b                                  \n"
69                 "       .set    mips0                                   \n"
70                 : "=&r" (temp), "=m" (v->counter)
71                 : "Ir" (i), "m" (v->counter));
72         } else if (cpu_has_llsc) {
73                 unsigned long temp;
74
75                 __asm__ __volatile__(
76                 "       .set    mips3                                   \n"
77                 "1:     ll      %0, %1          # atomic_add            \n"
78                 "       addu    %0, %2                                  \n"
79                 "       sc      %0, %1                                  \n"
80                 "       beqz    %0, 1b                                  \n"
81                 "       .set    mips0                                   \n"
82                 : "=&r" (temp), "=m" (v->counter)
83                 : "Ir" (i), "m" (v->counter));
84         } else {
85                 unsigned long flags;
86
87                 local_irq_save(flags);
88                 v->counter += i;
89                 local_irq_restore(flags);
90         }
91 }
92
93 /*
94  * atomic_sub - subtract the atomic variable
95  * @i: integer value to subtract
96  * @v: pointer of type atomic_t
97  *
98  * Atomically subtracts @i from @v.
99  */
100 static __inline__ void atomic_sub(int i, atomic_t * v)
101 {
102         if (cpu_has_llsc && R10000_LLSC_WAR) {
103                 unsigned long temp;
104
105                 __asm__ __volatile__(
106                 "       .set    mips3                                   \n"
107                 "1:     ll      %0, %1          # atomic_sub            \n"
108                 "       subu    %0, %2                                  \n"
109                 "       sc      %0, %1                                  \n"
110                 "       beqzl   %0, 1b                                  \n"
111                 "       .set    mips0                                   \n"
112                 : "=&r" (temp), "=m" (v->counter)
113                 : "Ir" (i), "m" (v->counter));
114         } else if (cpu_has_llsc) {
115                 unsigned long temp;
116
117                 __asm__ __volatile__(
118                 "       .set    mips3                                   \n"
119                 "1:     ll      %0, %1          # atomic_sub            \n"
120                 "       subu    %0, %2                                  \n"
121                 "       sc      %0, %1                                  \n"
122                 "       beqz    %0, 1b                                  \n"
123                 "       .set    mips0                                   \n"
124                 : "=&r" (temp), "=m" (v->counter)
125                 : "Ir" (i), "m" (v->counter));
126         } else {
127                 unsigned long flags;
128
129                 local_irq_save(flags);
130                 v->counter -= i;
131                 local_irq_restore(flags);
132         }
133 }
134
135 /*
136  * Same as above, but return the result value
137  */
138 static __inline__ int atomic_add_return(int i, atomic_t * v)
139 {
140         unsigned long result;
141
142         if (cpu_has_llsc && R10000_LLSC_WAR) {
143                 unsigned long temp;
144
145                 __asm__ __volatile__(
146                 "       .set    mips3                                   \n"
147                 "1:     ll      %1, %2          # atomic_add_return     \n"
148                 "       addu    %0, %1, %3                              \n"
149                 "       sc      %0, %2                                  \n"
150                 "       beqzl   %0, 1b                                  \n"
151                 "       addu    %0, %1, %3                              \n"
152                 "       sync                                            \n"
153                 "       .set    mips0                                   \n"
154                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155                 : "Ir" (i), "m" (v->counter)
156                 : "memory");
157         } else if (cpu_has_llsc) {
158                 unsigned long temp;
159
160                 __asm__ __volatile__(
161                 "       .set    mips3                                   \n"
162                 "1:     ll      %1, %2          # atomic_add_return     \n"
163                 "       addu    %0, %1, %3                              \n"
164                 "       sc      %0, %2                                  \n"
165                 "       beqz    %0, 1b                                  \n"
166                 "       addu    %0, %1, %3                              \n"
167                 "       sync                                            \n"
168                 "       .set    mips0                                   \n"
169                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
170                 : "Ir" (i), "m" (v->counter)
171                 : "memory");
172         } else {
173                 unsigned long flags;
174
175                 local_irq_save(flags);
176                 result = v->counter;
177                 result += i;
178                 v->counter = result;
179                 local_irq_restore(flags);
180         }
181
182         return result;
183 }
184
185 static __inline__ int atomic_sub_return(int i, atomic_t * v)
186 {
187         unsigned long result;
188
189         if (cpu_has_llsc && R10000_LLSC_WAR) {
190                 unsigned long temp;
191
192                 __asm__ __volatile__(
193                 "       .set    mips3                                   \n"
194                 "1:     ll      %1, %2          # atomic_sub_return     \n"
195                 "       subu    %0, %1, %3                              \n"
196                 "       sc      %0, %2                                  \n"
197                 "       beqzl   %0, 1b                                  \n"
198                 "       subu    %0, %1, %3                              \n"
199                 "       sync                                            \n"
200                 "       .set    mips0                                   \n"
201                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
202                 : "Ir" (i), "m" (v->counter)
203                 : "memory");
204         } else if (cpu_has_llsc) {
205                 unsigned long temp;
206
207                 __asm__ __volatile__(
208                 "       .set    mips3                                   \n"
209                 "1:     ll      %1, %2          # atomic_sub_return     \n"
210                 "       subu    %0, %1, %3                              \n"
211                 "       sc      %0, %2                                  \n"
212                 "       beqz    %0, 1b                                  \n"
213                 "       subu    %0, %1, %3                              \n"
214                 "       sync                                            \n"
215                 "       .set    mips0                                   \n"
216                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
217                 : "Ir" (i), "m" (v->counter)
218                 : "memory");
219         } else {
220                 unsigned long flags;
221
222                 local_irq_save(flags);
223                 result = v->counter;
224                 result -= i;
225                 v->counter = result;
226                 local_irq_restore(flags);
227         }
228
229         return result;
230 }
231
232 /*
233  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
234  * @i: integer value to subtract
235  * @v: pointer of type atomic_t
236  *
237  * Atomically test @v and subtract @i if @v is greater or equal than @i.
238  * The function returns the old value of @v minus @i.
239  */
240 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
241 {
242         unsigned long result;
243
244         if (cpu_has_llsc && R10000_LLSC_WAR) {
245                 unsigned long temp;
246
247                 __asm__ __volatile__(
248                 "       .set    mips3                                   \n"
249                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
250                 "       subu    %0, %1, %3                              \n"
251                 "       bltz    %0, 1f                                  \n"
252                 "       sc      %0, %2                                  \n"
253                 "       .set    noreorder                               \n"
254                 "       beqzl   %0, 1b                                  \n"
255                 "        subu   %0, %1, %3                              \n"
256                 "       .set    reorder                                 \n"
257                 "       sync                                            \n"
258                 "1:                                                     \n"
259                 "       .set    mips0                                   \n"
260                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
261                 : "Ir" (i), "m" (v->counter)
262                 : "memory");
263         } else if (cpu_has_llsc) {
264                 unsigned long temp;
265
266                 __asm__ __volatile__(
267                 "       .set    mips3                                   \n"
268                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
269                 "       subu    %0, %1, %3                              \n"
270                 "       bltz    %0, 1f                                  \n"
271                 "       sc      %0, %2                                  \n"
272                 "       .set    noreorder                               \n"
273                 "       beqz    %0, 1b                                  \n"
274                 "        subu   %0, %1, %3                              \n"
275                 "       .set    reorder                                 \n"
276                 "       sync                                            \n"
277                 "1:                                                     \n"
278                 "       .set    mips0                                   \n"
279                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
280                 : "Ir" (i), "m" (v->counter)
281                 : "memory");
282         } else {
283                 unsigned long flags;
284
285                 local_irq_save(flags);
286                 result = v->counter;
287                 result -= i;
288                 if (result >= 0)
289                         v->counter = result;
290                 local_irq_restore(flags);
291         }
292
293         return result;
294 }
295
296 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
297 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
298
299 /**
300  * atomic_add_unless - add unless the number is a given value
301  * @v: pointer of type atomic_t
302  * @a: the amount to add to v...
303  * @u: ...unless v is equal to u.
304  *
305  * Atomically adds @a to @v, so long as it was not @u.
306  * Returns non-zero if @v was not @u, and zero otherwise.
307  */
308 #define atomic_add_unless(v, a, u)                              \
309 ({                                                              \
310         int c, old;                                             \
311         c = atomic_read(v);                                     \
312         while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
313                 c = old;                                        \
314         c != (u);                                               \
315 })
316 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
317
318 #define atomic_dec_return(v) atomic_sub_return(1,(v))
319 #define atomic_inc_return(v) atomic_add_return(1,(v))
320
321 /*
322  * atomic_sub_and_test - subtract value from variable and test result
323  * @i: integer value to subtract
324  * @v: pointer of type atomic_t
325  *
326  * Atomically subtracts @i from @v and returns
327  * true if the result is zero, or false for all
328  * other cases.
329  */
330 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
331
332 /*
333  * atomic_inc_and_test - increment and test
334  * @v: pointer of type atomic_t
335  *
336  * Atomically increments @v by 1
337  * and returns true if the result is zero, or false for all
338  * other cases.
339  */
340 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
341
342 /*
343  * atomic_dec_and_test - decrement by 1 and test
344  * @v: pointer of type atomic_t
345  *
346  * Atomically decrements @v by 1 and
347  * returns true if the result is 0, or false for all other
348  * cases.
349  */
350 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
351
352 /*
353  * atomic_dec_if_positive - decrement by 1 if old value positive
354  * @v: pointer of type atomic_t
355  */
356 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
357
358 /*
359  * atomic_inc - increment atomic variable
360  * @v: pointer of type atomic_t
361  *
362  * Atomically increments @v by 1.
363  */
364 #define atomic_inc(v) atomic_add(1,(v))
365
366 /*
367  * atomic_dec - decrement and test
368  * @v: pointer of type atomic_t
369  *
370  * Atomically decrements @v by 1.
371  */
372 #define atomic_dec(v) atomic_sub(1,(v))
373
374 /*
375  * atomic_add_negative - add and test if negative
376  * @v: pointer of type atomic_t
377  * @i: integer value to add
378  *
379  * Atomically adds @i to @v and returns true
380  * if the result is negative, or false when
381  * result is greater than or equal to zero.
382  */
383 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
384
385 #ifdef CONFIG_64BIT
386
387 typedef struct { volatile __s64 counter; } atomic64_t;
388
389 #define ATOMIC64_INIT(i)    { (i) }
390
391 /*
392  * atomic64_read - read atomic variable
393  * @v: pointer of type atomic64_t
394  *
395  */
396 #define atomic64_read(v)        ((v)->counter)
397
398 /*
399  * atomic64_set - set atomic variable
400  * @v: pointer of type atomic64_t
401  * @i: required value
402  */
403 #define atomic64_set(v,i)       ((v)->counter = (i))
404
405 /*
406  * atomic64_add - add integer to atomic variable
407  * @i: integer value to add
408  * @v: pointer of type atomic64_t
409  *
410  * Atomically adds @i to @v.
411  */
412 static __inline__ void atomic64_add(long i, atomic64_t * v)
413 {
414         if (cpu_has_llsc && R10000_LLSC_WAR) {
415                 unsigned long temp;
416
417                 __asm__ __volatile__(
418                 "       .set    mips3                                   \n"
419                 "1:     lld     %0, %1          # atomic64_add          \n"
420                 "       addu    %0, %2                                  \n"
421                 "       scd     %0, %1                                  \n"
422                 "       beqzl   %0, 1b                                  \n"
423                 "       .set    mips0                                   \n"
424                 : "=&r" (temp), "=m" (v->counter)
425                 : "Ir" (i), "m" (v->counter));
426         } else if (cpu_has_llsc) {
427                 unsigned long temp;
428
429                 __asm__ __volatile__(
430                 "       .set    mips3                                   \n"
431                 "1:     lld     %0, %1          # atomic64_add          \n"
432                 "       addu    %0, %2                                  \n"
433                 "       scd     %0, %1                                  \n"
434                 "       beqz    %0, 1b                                  \n"
435                 "       .set    mips0                                   \n"
436                 : "=&r" (temp), "=m" (v->counter)
437                 : "Ir" (i), "m" (v->counter));
438         } else {
439                 unsigned long flags;
440
441                 local_irq_save(flags);
442                 v->counter += i;
443                 local_irq_restore(flags);
444         }
445 }
446
447 /*
448  * atomic64_sub - subtract the atomic variable
449  * @i: integer value to subtract
450  * @v: pointer of type atomic64_t
451  *
452  * Atomically subtracts @i from @v.
453  */
454 static __inline__ void atomic64_sub(long i, atomic64_t * v)
455 {
456         if (cpu_has_llsc && R10000_LLSC_WAR) {
457                 unsigned long temp;
458
459                 __asm__ __volatile__(
460                 "       .set    mips3                                   \n"
461                 "1:     lld     %0, %1          # atomic64_sub          \n"
462                 "       subu    %0, %2                                  \n"
463                 "       scd     %0, %1                                  \n"
464                 "       beqzl   %0, 1b                                  \n"
465                 "       .set    mips0                                   \n"
466                 : "=&r" (temp), "=m" (v->counter)
467                 : "Ir" (i), "m" (v->counter));
468         } else if (cpu_has_llsc) {
469                 unsigned long temp;
470
471                 __asm__ __volatile__(
472                 "       .set    mips3                                   \n"
473                 "1:     lld     %0, %1          # atomic64_sub          \n"
474                 "       subu    %0, %2                                  \n"
475                 "       scd     %0, %1                                  \n"
476                 "       beqz    %0, 1b                                  \n"
477                 "       .set    mips0                                   \n"
478                 : "=&r" (temp), "=m" (v->counter)
479                 : "Ir" (i), "m" (v->counter));
480         } else {
481                 unsigned long flags;
482
483                 local_irq_save(flags);
484                 v->counter -= i;
485                 local_irq_restore(flags);
486         }
487 }
488
489 /*
490  * Same as above, but return the result value
491  */
492 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
493 {
494         unsigned long result;
495
496         if (cpu_has_llsc && R10000_LLSC_WAR) {
497                 unsigned long temp;
498
499                 __asm__ __volatile__(
500                 "       .set    mips3                                   \n"
501                 "1:     lld     %1, %2          # atomic64_add_return   \n"
502                 "       addu    %0, %1, %3                              \n"
503                 "       scd     %0, %2                                  \n"
504                 "       beqzl   %0, 1b                                  \n"
505                 "       addu    %0, %1, %3                              \n"
506                 "       sync                                            \n"
507                 "       .set    mips0                                   \n"
508                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
509                 : "Ir" (i), "m" (v->counter)
510                 : "memory");
511         } else if (cpu_has_llsc) {
512                 unsigned long temp;
513
514                 __asm__ __volatile__(
515                 "       .set    mips3                                   \n"
516                 "1:     lld     %1, %2          # atomic64_add_return   \n"
517                 "       addu    %0, %1, %3                              \n"
518                 "       scd     %0, %2                                  \n"
519                 "       beqz    %0, 1b                                  \n"
520                 "       addu    %0, %1, %3                              \n"
521                 "       sync                                            \n"
522                 "       .set    mips0                                   \n"
523                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
524                 : "Ir" (i), "m" (v->counter)
525                 : "memory");
526         } else {
527                 unsigned long flags;
528
529                 local_irq_save(flags);
530                 result = v->counter;
531                 result += i;
532                 v->counter = result;
533                 local_irq_restore(flags);
534         }
535
536         return result;
537 }
538
539 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
540 {
541         unsigned long result;
542
543         if (cpu_has_llsc && R10000_LLSC_WAR) {
544                 unsigned long temp;
545
546                 __asm__ __volatile__(
547                 "       .set    mips3                                   \n"
548                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
549                 "       subu    %0, %1, %3                              \n"
550                 "       scd     %0, %2                                  \n"
551                 "       beqzl   %0, 1b                                  \n"
552                 "       subu    %0, %1, %3                              \n"
553                 "       sync                                            \n"
554                 "       .set    mips0                                   \n"
555                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
556                 : "Ir" (i), "m" (v->counter)
557                 : "memory");
558         } else if (cpu_has_llsc) {
559                 unsigned long temp;
560
561                 __asm__ __volatile__(
562                 "       .set    mips3                                   \n"
563                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
564                 "       subu    %0, %1, %3                              \n"
565                 "       scd     %0, %2                                  \n"
566                 "       beqz    %0, 1b                                  \n"
567                 "       subu    %0, %1, %3                              \n"
568                 "       sync                                            \n"
569                 "       .set    mips0                                   \n"
570                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
571                 : "Ir" (i), "m" (v->counter)
572                 : "memory");
573         } else {
574                 unsigned long flags;
575
576                 local_irq_save(flags);
577                 result = v->counter;
578                 result -= i;
579                 v->counter = result;
580                 local_irq_restore(flags);
581         }
582
583         return result;
584 }
585
586 /*
587  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
588  * @i: integer value to subtract
589  * @v: pointer of type atomic64_t
590  *
591  * Atomically test @v and subtract @i if @v is greater or equal than @i.
592  * The function returns the old value of @v minus @i.
593  */
594 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
595 {
596         unsigned long result;
597
598         if (cpu_has_llsc && R10000_LLSC_WAR) {
599                 unsigned long temp;
600
601                 __asm__ __volatile__(
602                 "       .set    mips3                                   \n"
603                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
604                 "       dsubu   %0, %1, %3                              \n"
605                 "       bltz    %0, 1f                                  \n"
606                 "       scd     %0, %2                                  \n"
607                 "       .set    noreorder                               \n"
608                 "       beqzl   %0, 1b                                  \n"
609                 "        dsubu  %0, %1, %3                              \n"
610                 "       .set    reorder                                 \n"
611                 "       sync                                            \n"
612                 "1:                                                     \n"
613                 "       .set    mips0                                   \n"
614                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
615                 : "Ir" (i), "m" (v->counter)
616                 : "memory");
617         } else if (cpu_has_llsc) {
618                 unsigned long temp;
619
620                 __asm__ __volatile__(
621                 "       .set    mips3                                   \n"
622                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
623                 "       dsubu   %0, %1, %3                              \n"
624                 "       bltz    %0, 1f                                  \n"
625                 "       scd     %0, %2                                  \n"
626                 "       .set    noreorder                               \n"
627                 "       beqz    %0, 1b                                  \n"
628                 "        dsubu  %0, %1, %3                              \n"
629                 "       .set    reorder                                 \n"
630                 "       sync                                            \n"
631                 "1:                                                     \n"
632                 "       .set    mips0                                   \n"
633                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
634                 : "Ir" (i), "m" (v->counter)
635                 : "memory");
636         } else {
637                 unsigned long flags;
638
639                 local_irq_save(flags);
640                 result = v->counter;
641                 result -= i;
642                 if (result >= 0)
643                         v->counter = result;
644                 local_irq_restore(flags);
645         }
646
647         return result;
648 }
649
650 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
651 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
652
653 /*
654  * atomic64_sub_and_test - subtract value from variable and test result
655  * @i: integer value to subtract
656  * @v: pointer of type atomic64_t
657  *
658  * Atomically subtracts @i from @v and returns
659  * true if the result is zero, or false for all
660  * other cases.
661  */
662 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
663
664 /*
665  * atomic64_inc_and_test - increment and test
666  * @v: pointer of type atomic64_t
667  *
668  * Atomically increments @v by 1
669  * and returns true if the result is zero, or false for all
670  * other cases.
671  */
672 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
673
674 /*
675  * atomic64_dec_and_test - decrement by 1 and test
676  * @v: pointer of type atomic64_t
677  *
678  * Atomically decrements @v by 1 and
679  * returns true if the result is 0, or false for all other
680  * cases.
681  */
682 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
683
684 /*
685  * atomic64_dec_if_positive - decrement by 1 if old value positive
686  * @v: pointer of type atomic64_t
687  */
688 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
689
690 /*
691  * atomic64_inc - increment atomic variable
692  * @v: pointer of type atomic64_t
693  *
694  * Atomically increments @v by 1.
695  */
696 #define atomic64_inc(v) atomic64_add(1,(v))
697
698 /*
699  * atomic64_dec - decrement and test
700  * @v: pointer of type atomic64_t
701  *
702  * Atomically decrements @v by 1.
703  */
704 #define atomic64_dec(v) atomic64_sub(1,(v))
705
706 /*
707  * atomic64_add_negative - add and test if negative
708  * @v: pointer of type atomic64_t
709  * @i: integer value to add
710  *
711  * Atomically adds @i to @v and returns true
712  * if the result is negative, or false when
713  * result is greater than or equal to zero.
714  */
715 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
716
717 #endif /* CONFIG_64BIT */
718
719 /*
720  * atomic*_return operations are serializing but not the non-*_return
721  * versions.
722  */
723 #define smp_mb__before_atomic_dec()     smp_mb()
724 #define smp_mb__after_atomic_dec()      smp_mb()
725 #define smp_mb__before_atomic_inc()     smp_mb()
726 #define smp_mb__after_atomic_inc()      smp_mb()
727
728 #include <asm-generic/atomic.h>
729 #endif /* _ASM_ATOMIC_H */