1 #ifndef _ASM_IA64_ATOMIC_H
2 #define _ASM_IA64_ATOMIC_H
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
8 * NOTE: don't mess with the types below! The "unsigned long" and
9 * "int" types were carefully placed so as to ensure proper operation
12 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
13 * David Mosberger-Tang <davidm@hpl.hp.com>
15 #include <linux/types.h>
17 #include <asm/intrinsics.h>
20 * On IA-64, counter must always be volatile to ensure that that the
21 * memory accesses are ordered.
23 typedef struct { volatile __s32 counter; } atomic_t;
24 typedef struct { volatile __s64 counter; } atomic64_t;
26 #define ATOMIC_INIT(i) ((atomic_t) { (i) })
27 #define ATOMIC64_INIT(i) ((atomic64_t) { (i) })
29 #define atomic_read(v) ((v)->counter)
30 #define atomic64_read(v) ((v)->counter)
32 #define atomic_set(v,i) (((v)->counter) = (i))
33 #define atomic64_set(v,i) (((v)->counter) = (i))
36 ia64_atomic_add (int i, atomic_t *v)
45 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old);
50 ia64_atomic64_add (__s64 i, atomic64_t *v)
59 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old);
64 ia64_atomic_sub (int i, atomic_t *v)
73 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old);
78 ia64_atomic64_sub (__s64 i, atomic64_t *v)
87 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old);
91 #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
92 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
94 #define atomic_add_unless(v, a, u) \
98 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
102 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
104 #define atomic_add_return(i,v) \
106 int __ia64_aar_i = (i); \
107 (__builtin_constant_p(i) \
108 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
109 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
110 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
111 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
112 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
113 : ia64_atomic_add(__ia64_aar_i, v); \
116 #define atomic64_add_return(i,v) \
118 long __ia64_aar_i = (i); \
119 (__builtin_constant_p(i) \
120 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
121 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
122 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
123 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
124 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
125 : ia64_atomic64_add(__ia64_aar_i, v); \
129 * Atomically add I to V and return TRUE if the resulting value is
132 static __inline__ int
133 atomic_add_negative (int i, atomic_t *v)
135 return atomic_add_return(i, v) < 0;
138 static __inline__ int
139 atomic64_add_negative (__s64 i, atomic64_t *v)
141 return atomic64_add_return(i, v) < 0;
144 #define atomic_sub_return(i,v) \
146 int __ia64_asr_i = (i); \
147 (__builtin_constant_p(i) \
148 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
149 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
150 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
151 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
152 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
153 : ia64_atomic_sub(__ia64_asr_i, v); \
156 #define atomic64_sub_return(i,v) \
158 long __ia64_asr_i = (i); \
159 (__builtin_constant_p(i) \
160 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
161 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
162 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
163 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
164 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
165 : ia64_atomic64_sub(__ia64_asr_i, v); \
168 #define atomic_dec_return(v) atomic_sub_return(1, (v))
169 #define atomic_inc_return(v) atomic_add_return(1, (v))
170 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
171 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
173 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
174 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
175 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
176 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
177 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
178 #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
180 #define atomic_add(i,v) atomic_add_return((i), (v))
181 #define atomic_sub(i,v) atomic_sub_return((i), (v))
182 #define atomic_inc(v) atomic_add(1, (v))
183 #define atomic_dec(v) atomic_sub(1, (v))
185 #define atomic64_add(i,v) atomic64_add_return((i), (v))
186 #define atomic64_sub(i,v) atomic64_sub_return((i), (v))
187 #define atomic64_inc(v) atomic64_add(1, (v))
188 #define atomic64_dec(v) atomic64_sub(1, (v))
190 /* Atomic operations are already serializing */
191 #define smp_mb__before_atomic_dec() barrier()
192 #define smp_mb__after_atomic_dec() barrier()
193 #define smp_mb__before_atomic_inc() barrier()
194 #define smp_mb__after_atomic_inc() barrier()
196 #include <asm-generic/atomic.h>
197 #endif /* _ASM_IA64_ATOMIC_H */