1 #ifndef _ARCH_MIPS_LOCAL_H
2 #define _ARCH_MIPS_LOCAL_H
4 #include <linux/percpu.h>
5 #include <linux/bitops.h>
6 #include <asm/atomic.h>
7 #include <asm/cmpxchg.h>
15 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
17 #define local_read(l) atomic_long_read(&(l)->a)
18 #define local_set(l,i) atomic_long_set(&(l)->a, (i))
20 #define local_add(i,l) atomic_long_add((i),(&(l)->a))
21 #define local_sub(i,l) atomic_long_sub((i),(&(l)->a))
22 #define local_inc(l) atomic_long_inc(&(l)->a)
23 #define local_dec(l) atomic_long_dec(&(l)->a)
26 * Same as above, but return the result value
28 static __inline__ long local_add_return(long i, local_t * l)
32 if (cpu_has_llsc && R10000_LLSC_WAR) {
37 "1:" __LL "%1, %2 # local_add_return \n"
43 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
44 : "Ir" (i), "m" (l->a.counter)
46 } else if (cpu_has_llsc) {
51 "1:" __LL "%1, %2 # local_add_return \n"
57 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
58 : "Ir" (i), "m" (l->a.counter)
63 local_irq_save(flags);
64 result = l->a.counter;
66 l->a.counter = result;
67 local_irq_restore(flags);
73 static __inline__ long local_sub_return(long i, local_t * l)
77 if (cpu_has_llsc && R10000_LLSC_WAR) {
82 "1:" __LL "%1, %2 # local_sub_return \n"
88 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
89 : "Ir" (i), "m" (l->a.counter)
91 } else if (cpu_has_llsc) {
96 "1:" __LL "%1, %2 # local_sub_return \n"
100 " subu %0, %1, %3 \n"
102 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
103 : "Ir" (i), "m" (l->a.counter)
108 local_irq_save(flags);
109 result = l->a.counter;
111 l->a.counter = result;
112 local_irq_restore(flags);
119 * local_sub_if_positive - conditionally subtract integer from atomic variable
120 * @i: integer value to subtract
121 * @l: pointer of type local_t
123 * Atomically test @l and subtract @i if @l is greater or equal than @i.
124 * The function returns the old value of @l minus @i.
126 static __inline__ long local_sub_if_positive(long i, local_t * l)
128 unsigned long result;
130 if (cpu_has_llsc && R10000_LLSC_WAR) {
133 __asm__ __volatile__(
135 "1:" __LL "%1, %2 # local_sub_if_positive\n"
136 " dsubu %0, %1, %3 \n"
141 " dsubu %0, %1, %3 \n"
145 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
146 : "Ir" (i), "m" (l->a.counter)
148 } else if (cpu_has_llsc) {
151 __asm__ __volatile__(
153 "1:" __LL "%1, %2 # local_sub_if_positive\n"
154 " dsubu %0, %1, %3 \n"
159 " dsubu %0, %1, %3 \n"
163 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
164 : "Ir" (i), "m" (l->a.counter)
169 local_irq_save(flags);
170 result = l->a.counter;
173 l->a.counter = result;
174 local_irq_restore(flags);
180 #define local_cmpxchg(l, o, n) \
181 ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
182 #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n)))
185 * local_add_unless - add unless the number is a given value
186 * @l: pointer of type local_t
187 * @a: the amount to add to l...
188 * @u: ...unless l is equal to u.
190 * Atomically adds @a to @l, so long as it was not @u.
191 * Returns non-zero if @l was not @u, and zero otherwise.
193 #define local_add_unless(l, a, u) \
197 while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
201 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
203 #define local_dec_return(l) local_sub_return(1,(l))
204 #define local_inc_return(l) local_add_return(1,(l))
207 * local_sub_and_test - subtract value from variable and test result
208 * @i: integer value to subtract
209 * @l: pointer of type local_t
211 * Atomically subtracts @i from @l and returns
212 * true if the result is zero, or false for all
215 #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
218 * local_inc_and_test - increment and test
219 * @l: pointer of type local_t
221 * Atomically increments @l by 1
222 * and returns true if the result is zero, or false for all
225 #define local_inc_and_test(l) (local_inc_return(l) == 0)
228 * local_dec_and_test - decrement by 1 and test
229 * @l: pointer of type local_t
231 * Atomically decrements @l by 1 and
232 * returns true if the result is 0, or false for all other
235 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
238 * local_dec_if_positive - decrement by 1 if old value positive
239 * @l: pointer of type local_t
241 #define local_dec_if_positive(l) local_sub_if_positive(1, l)
244 * local_add_negative - add and test if negative
245 * @l: pointer of type local_t
246 * @i: integer value to add
248 * Atomically adds @i to @l and returns true
249 * if the result is negative, or false when
250 * result is greater than or equal to zero.
252 #define local_add_negative(i,l) (local_add_return(i, (l)) < 0)
254 /* Use these for per-cpu local_t variables: on some archs they are
255 * much more efficient than these naive implementations. Note they take
256 * a variable, not an address.
259 #define __local_inc(l) ((l)->a.counter++)
260 #define __local_dec(l) ((l)->a.counter++)
261 #define __local_add(i,l) ((l)->a.counter+=(i))
262 #define __local_sub(i,l) ((l)->a.counter-=(i))
264 /* Need to disable preemption for the cpu local counters otherwise we could
265 still access a variable of a previous CPU in a non atomic way. */
266 #define cpu_local_wrap_v(l) \
272 #define cpu_local_wrap(l) \
273 ({ preempt_disable(); \
275 preempt_enable(); }) \
277 #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
278 #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
279 #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
280 #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
281 #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
282 #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
284 #define __cpu_local_inc(l) cpu_local_inc(l)
285 #define __cpu_local_dec(l) cpu_local_dec(l)
286 #define __cpu_local_add(i, l) cpu_local_add((i), (l))
287 #define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
289 #endif /* _ARCH_MIPS_LOCAL_H */