1 #ifndef _ARCH_MIPS_LOCAL_H
2 #define _ARCH_MIPS_LOCAL_H
4 #include <linux/percpu.h>
5 #include <linux/bitops.h>
6 #include <asm/atomic.h>
14 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
16 #define local_read(l) atomic_long_read(&(l)->a)
17 #define local_set(l,i) atomic_long_set(&(l)->a, (i))
19 #define local_add(i,l) atomic_long_add((i),(&(l)->a))
20 #define local_sub(i,l) atomic_long_sub((i),(&(l)->a))
21 #define local_inc(l) atomic_long_inc(&(l)->a)
22 #define local_dec(l) atomic_long_dec(&(l)->a)
25 * Same as above, but return the result value
27 static __inline__ long local_add_return(long i, local_t * l)
31 if (cpu_has_llsc && R10000_LLSC_WAR) {
36 "1:" __LL "%1, %2 # local_add_return \n"
42 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
43 : "Ir" (i), "m" (l->a.counter)
45 } else if (cpu_has_llsc) {
50 "1:" __LL "%1, %2 # local_add_return \n"
56 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
57 : "Ir" (i), "m" (l->a.counter)
62 local_irq_save(flags);
63 result = l->a.counter;
65 l->a.counter = result;
66 local_irq_restore(flags);
72 static __inline__ long local_sub_return(long i, local_t * l)
76 if (cpu_has_llsc && R10000_LLSC_WAR) {
81 "1:" __LL "%1, %2 # local_sub_return \n"
87 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
88 : "Ir" (i), "m" (l->a.counter)
90 } else if (cpu_has_llsc) {
95 "1:" __LL "%1, %2 # local_sub_return \n"
101 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
102 : "Ir" (i), "m" (l->a.counter)
107 local_irq_save(flags);
108 result = l->a.counter;
110 l->a.counter = result;
111 local_irq_restore(flags);
118 * local_sub_if_positive - conditionally subtract integer from atomic variable
119 * @i: integer value to subtract
120 * @l: pointer of type local_t
122 * Atomically test @l and subtract @i if @l is greater or equal than @i.
123 * The function returns the old value of @l minus @i.
125 static __inline__ long local_sub_if_positive(long i, local_t * l)
127 unsigned long result;
129 if (cpu_has_llsc && R10000_LLSC_WAR) {
132 __asm__ __volatile__(
134 "1:" __LL "%1, %2 # local_sub_if_positive\n"
135 " dsubu %0, %1, %3 \n"
140 " dsubu %0, %1, %3 \n"
144 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
145 : "Ir" (i), "m" (l->a.counter)
147 } else if (cpu_has_llsc) {
150 __asm__ __volatile__(
152 "1:" __LL "%1, %2 # local_sub_if_positive\n"
153 " dsubu %0, %1, %3 \n"
158 " dsubu %0, %1, %3 \n"
162 : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
163 : "Ir" (i), "m" (l->a.counter)
168 local_irq_save(flags);
169 result = l->a.counter;
172 l->a.counter = result;
173 local_irq_restore(flags);
179 #define local_cmpxchg(l, o, n) \
180 ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
181 #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n)))
184 * local_add_unless - add unless the number is a given value
185 * @l: pointer of type local_t
186 * @a: the amount to add to l...
187 * @u: ...unless l is equal to u.
189 * Atomically adds @a to @l, so long as it was not @u.
190 * Returns non-zero if @l was not @u, and zero otherwise.
192 #define local_add_unless(l, a, u) \
196 while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
200 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
202 #define local_dec_return(l) local_sub_return(1,(l))
203 #define local_inc_return(l) local_add_return(1,(l))
206 * local_sub_and_test - subtract value from variable and test result
207 * @i: integer value to subtract
208 * @l: pointer of type local_t
210 * Atomically subtracts @i from @l and returns
211 * true if the result is zero, or false for all
214 #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
217 * local_inc_and_test - increment and test
218 * @l: pointer of type local_t
220 * Atomically increments @l by 1
221 * and returns true if the result is zero, or false for all
224 #define local_inc_and_test(l) (local_inc_return(l) == 0)
227 * local_dec_and_test - decrement by 1 and test
228 * @l: pointer of type local_t
230 * Atomically decrements @l by 1 and
231 * returns true if the result is 0, or false for all other
234 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
237 * local_dec_if_positive - decrement by 1 if old value positive
238 * @l: pointer of type local_t
240 #define local_dec_if_positive(l) local_sub_if_positive(1, l)
243 * local_add_negative - add and test if negative
244 * @l: pointer of type local_t
245 * @i: integer value to add
247 * Atomically adds @i to @l and returns true
248 * if the result is negative, or false when
249 * result is greater than or equal to zero.
251 #define local_add_negative(i,l) (local_add_return(i, (l)) < 0)
253 /* Use these for per-cpu local_t variables: on some archs they are
254 * much more efficient than these naive implementations. Note they take
255 * a variable, not an address.
258 #define __local_inc(l) ((l)->a.counter++)
259 #define __local_dec(l) ((l)->a.counter++)
260 #define __local_add(i,l) ((l)->a.counter+=(i))
261 #define __local_sub(i,l) ((l)->a.counter-=(i))
263 /* Need to disable preemption for the cpu local counters otherwise we could
264 still access a variable of a previous CPU in a non atomic way. */
265 #define cpu_local_wrap_v(l) \
271 #define cpu_local_wrap(l) \
272 ({ preempt_disable(); \
274 preempt_enable(); }) \
276 #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
277 #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
278 #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
279 #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
280 #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
281 #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
283 #define __cpu_local_inc(l) cpu_local_inc(l)
284 #define __cpu_local_dec(l) cpu_local_dec(l)
285 #define __cpu_local_add(i, l) cpu_local_add((i), (l))
286 #define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
288 #endif /* _ARCH_MIPS_LOCAL_H */