1 #ifndef _ASM_GENERIC_ATOMIC_H
2 #define _ASM_GENERIC_ATOMIC_H
4 * Copyright (C) 2005 Silicon Graphics, Inc.
5 * Christoph Lameter <clameter@sgi.com>
7 * Allows to provide arch independent atomic definitions without the need to
8 * edit all arch specific atomic.h files.
11 #include <asm/types.h>
14 * Suppport for atomic_long_t
16 * Casts for parameters are avoided for existing atomic functions in order to
17 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the
18 * macros of a platform may have.
21 #if BITS_PER_LONG == 64
23 typedef atomic64_t atomic_long_t;
25 #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
27 static inline long atomic_long_read(atomic_long_t *l)
29 atomic64_t *v = (atomic64_t *)l;
31 return (long)atomic64_read(v);
34 static inline void atomic_long_set(atomic_long_t *l, long i)
36 atomic64_t *v = (atomic64_t *)l;
41 static inline void atomic_long_inc(atomic_long_t *l)
43 atomic64_t *v = (atomic64_t *)l;
48 static inline void atomic_long_dec(atomic_long_t *l)
50 atomic64_t *v = (atomic64_t *)l;
55 static inline void atomic_long_add(long i, atomic_long_t *l)
57 atomic64_t *v = (atomic64_t *)l;
62 static inline void atomic_long_sub(long i, atomic_long_t *l)
64 atomic64_t *v = (atomic64_t *)l;
69 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
71 atomic64_t *v = (atomic64_t *)l;
73 return atomic64_sub_and_test(i, v);
76 static inline int atomic_long_dec_and_test(atomic_long_t *l)
78 atomic64_t *v = (atomic64_t *)l;
80 return atomic64_dec_and_test(v);
83 static inline int atomic_long_inc_and_test(atomic_long_t *l)
85 atomic64_t *v = (atomic64_t *)l;
87 return atomic64_inc_and_test(v);
90 static inline int atomic_long_add_negative(long i, atomic_long_t *l)
92 atomic64_t *v = (atomic64_t *)l;
94 return atomic64_add_negative(i, v);
97 static inline long atomic_long_add_return(long i, atomic_long_t *l)
99 atomic64_t *v = (atomic64_t *)l;
101 return (long)atomic64_add_return(i, v);
104 static inline long atomic_long_sub_return(long i, atomic_long_t *l)
106 atomic64_t *v = (atomic64_t *)l;
108 return (long)atomic64_sub_return(i, v);
111 static inline long atomic_long_inc_return(atomic_long_t *l)
113 atomic64_t *v = (atomic64_t *)l;
115 return (long)atomic64_inc_return(v);
118 static inline long atomic_long_dec_return(atomic_long_t *l)
120 atomic64_t *v = (atomic64_t *)l;
122 return (long)atomic64_dec_return(v);
125 static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
127 atomic64_t *v = (atomic64_t *)l;
129 return (long)atomic64_add_unless(v, a, u);
132 #define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))
134 #define atomic_long_cmpxchg(l, old, new) \
135 (atomic_cmpxchg((atomic64_t *)(l), (old), (new)))
136 #define atomic_long_xchg(v, new) \
137 (atomic_xchg((atomic64_t *)(l), (new)))
139 #else /* BITS_PER_LONG == 64 */
141 typedef atomic_t atomic_long_t;
143 #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
144 static inline long atomic_long_read(atomic_long_t *l)
146 atomic_t *v = (atomic_t *)l;
148 return (long)atomic_read(v);
151 static inline void atomic_long_set(atomic_long_t *l, long i)
153 atomic_t *v = (atomic_t *)l;
158 static inline void atomic_long_inc(atomic_long_t *l)
160 atomic_t *v = (atomic_t *)l;
165 static inline void atomic_long_dec(atomic_long_t *l)
167 atomic_t *v = (atomic_t *)l;
172 static inline void atomic_long_add(long i, atomic_long_t *l)
174 atomic_t *v = (atomic_t *)l;
179 static inline void atomic_long_sub(long i, atomic_long_t *l)
181 atomic_t *v = (atomic_t *)l;
186 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l)
188 atomic_t *v = (atomic_t *)l;
190 return atomic_sub_and_test(i, v);
193 static inline int atomic_long_dec_and_test(atomic_long_t *l)
195 atomic_t *v = (atomic_t *)l;
197 return atomic_dec_and_test(v);
200 static inline int atomic_long_inc_and_test(atomic_long_t *l)
202 atomic_t *v = (atomic_t *)l;
204 return atomic_inc_and_test(v);
207 static inline int atomic_long_add_negative(long i, atomic_long_t *l)
209 atomic_t *v = (atomic_t *)l;
211 return atomic_add_negative(i, v);
214 static inline long atomic_long_add_return(long i, atomic_long_t *l)
216 atomic_t *v = (atomic_t *)l;
218 return (long)atomic_add_return(i, v);
221 static inline long atomic_long_sub_return(long i, atomic_long_t *l)
223 atomic_t *v = (atomic_t *)l;
225 return (long)atomic_sub_return(i, v);
228 static inline long atomic_long_inc_return(atomic_long_t *l)
230 atomic_t *v = (atomic_t *)l;
232 return (long)atomic_inc_return(v);
235 static inline long atomic_long_dec_return(atomic_long_t *l)
237 atomic_t *v = (atomic_t *)l;
239 return (long)atomic_dec_return(v);
242 static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
244 atomic_t *v = (atomic_t *)l;
246 return (long)atomic_add_unless(v, a, u);
249 #define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))
251 #define atomic_long_cmpxchg(l, old, new) \
252 (atomic_cmpxchg((atomic_t *)(l), (old), (new)))
253 #define atomic_long_xchg(v, new) \
254 (atomic_xchg((atomic_t *)(l), (new)))
256 #endif /* BITS_PER_LONG == 64 */
258 #endif /* _ASM_GENERIC_ATOMIC_H */