1 #ifndef _ARCH_I386_LOCAL_H
2 #define _ARCH_I386_LOCAL_H
4 #include <linux/percpu.h>
5 #include <asm/system.h>
6 #include <asm/atomic.h>
13 #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
15 #define local_read(l) atomic_long_read(&(l)->a)
16 #define local_set(l,i) atomic_long_set(&(l)->a, (i))
18 static __inline__ void local_inc(local_t *l)
22 :"+m" (l->a.counter));
25 static __inline__ void local_dec(local_t *l)
29 :"+m" (l->a.counter));
32 static __inline__ void local_add(long i, local_t *l)
40 static __inline__ void local_sub(long i, local_t *l)
49 * local_sub_and_test - subtract value from variable and test result
50 * @i: integer value to subtract
51 * @l: pointer of type local_t
53 * Atomically subtracts @i from @l and returns
54 * true if the result is zero, or false for all
57 static __inline__ int local_sub_and_test(long i, local_t *l)
63 :"+m" (l->a.counter), "=qm" (c)
64 :"ir" (i) : "memory");
69 * local_dec_and_test - decrement and test
70 * @l: pointer of type local_t
72 * Atomically decrements @l by 1 and
73 * returns true if the result is 0, or false for all other
76 static __inline__ int local_dec_and_test(local_t *l)
82 :"+m" (l->a.counter), "=qm" (c)
88 * local_inc_and_test - increment and test
89 * @l: pointer of type local_t
91 * Atomically increments @l by 1
92 * and returns true if the result is zero, or false for all
95 static __inline__ int local_inc_and_test(local_t *l)
101 :"+m" (l->a.counter), "=qm" (c)
107 * local_add_negative - add and test if negative
108 * @l: pointer of type local_t
109 * @i: integer value to add
111 * Atomically adds @i to @l and returns true
112 * if the result is negative, or false when
113 * result is greater than or equal to zero.
115 static __inline__ int local_add_negative(long i, local_t *l)
119 __asm__ __volatile__(
120 "addl %2,%0; sets %1"
121 :"+m" (l->a.counter), "=qm" (c)
122 :"ir" (i) : "memory");
127 * local_add_return - add and return
128 * @l: pointer of type local_t
129 * @i: integer value to add
131 * Atomically adds @i to @l and returns @i + @l
133 static __inline__ long local_add_return(long i, local_t *l)
138 if(unlikely(boot_cpu_data.x86 <= 3))
141 /* Modern 486+ processor */
143 __asm__ __volatile__(
145 :"+r" (i), "+m" (l->a.counter)
150 no_xadd: /* Legacy 386 processor */
151 local_irq_save(flags);
153 local_set(l, i + __i);
154 local_irq_restore(flags);
159 static __inline__ long local_sub_return(long i, local_t *l)
161 return local_add_return(-i,l);
164 #define local_inc_return(l) (local_add_return(1,l))
165 #define local_dec_return(l) (local_sub_return(1,l))
167 #define local_cmpxchg(l, o, n) \
168 (cmpxchg_local(&((l)->a.counter), (o), (n)))
169 /* Always has a lock prefix */
170 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
173 * local_add_unless - add unless the number is a given value
174 * @l: pointer of type local_t
175 * @a: the amount to add to l...
176 * @u: ...unless l is equal to u.
178 * Atomically adds @a to @l, so long as it was not @u.
179 * Returns non-zero if @l was not @u, and zero otherwise.
181 #define local_add_unless(l, a, u) \
186 if (unlikely(c == (u))) \
188 old = local_cmpxchg((l), c, c + (a)); \
189 if (likely(old == c)) \
195 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
197 /* On x86, these are no better than the atomic variants. */
198 #define __local_inc(l) local_inc(l)
199 #define __local_dec(l) local_dec(l)
200 #define __local_add(i,l) local_add((i),(l))
201 #define __local_sub(i,l) local_sub((i),(l))
203 /* Use these for per-cpu local_t variables: on some archs they are
204 * much more efficient than these naive implementations. Note they take
205 * a variable, not an address.
208 /* Need to disable preemption for the cpu local counters otherwise we could
209 still access a variable of a previous CPU in a non atomic way. */
210 #define cpu_local_wrap_v(l) \
216 #define cpu_local_wrap(l) \
217 ({ preempt_disable(); \
219 preempt_enable(); }) \
221 #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
222 #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
223 #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l)))
224 #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l)))
225 #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
226 #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
228 #define __cpu_local_inc(l) cpu_local_inc(l)
229 #define __cpu_local_dec(l) cpu_local_dec(l)
230 #define __cpu_local_add(i, l) cpu_local_add((i), (l))
231 #define __cpu_local_sub(i, l) cpu_local_sub((i), (l))
233 #endif /* _ARCH_I386_LOCAL_H */