1 #ifndef __ARCH_BLACKFIN_ATOMIC__
2 #define __ARCH_BLACKFIN_ATOMIC__
4 #include <linux/types.h>
5 #include <asm/system.h> /* local_irq_XXX() */
8 * Atomic operations that C can't guarantee us. Useful for
9 * resource counting etc..
11 * Generally we do not concern about SMP BFIN systems, so we don't have
14 * Tony Kou (tonyko@lineo.ca) Lineo Inc. 2001
17 #define ATOMIC_INIT(i) { (i) }
18 #define atomic_set(v, i) (((v)->counter) = i)
22 #define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
24 asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
26 asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);
28 asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);
30 asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);
32 asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
34 asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
36 static inline void atomic_add(int i, atomic_t *v)
38 __raw_atomic_update_asm(&v->counter, i);
41 static inline void atomic_sub(int i, atomic_t *v)
43 __raw_atomic_update_asm(&v->counter, -i);
46 static inline int atomic_add_return(int i, atomic_t *v)
48 return __raw_atomic_update_asm(&v->counter, i);
51 static inline int atomic_sub_return(int i, atomic_t *v)
53 return __raw_atomic_update_asm(&v->counter, -i);
56 static inline void atomic_inc(volatile atomic_t *v)
58 __raw_atomic_update_asm(&v->counter, 1);
61 static inline void atomic_dec(volatile atomic_t *v)
63 __raw_atomic_update_asm(&v->counter, -1);
66 static inline void atomic_clear_mask(int mask, atomic_t *v)
68 __raw_atomic_clear_asm(&v->counter, mask);
71 static inline void atomic_set_mask(int mask, atomic_t *v)
73 __raw_atomic_set_asm(&v->counter, mask);
76 static inline int atomic_test_mask(int mask, atomic_t *v)
78 return __raw_atomic_test_asm(&v->counter, mask);
81 /* Atomic operations are already serializing */
82 #define smp_mb__before_atomic_dec() barrier()
83 #define smp_mb__after_atomic_dec() barrier()
84 #define smp_mb__before_atomic_inc() barrier()
85 #define smp_mb__after_atomic_inc() barrier()
87 #else /* !CONFIG_SMP */
89 #define atomic_read(v) ((v)->counter)
91 static inline void atomic_add(int i, atomic_t *v)
95 local_irq_save_hw(flags);
97 local_irq_restore_hw(flags);
100 static inline void atomic_sub(int i, atomic_t *v)
104 local_irq_save_hw(flags);
106 local_irq_restore_hw(flags);
110 static inline int atomic_add_return(int i, atomic_t *v)
115 local_irq_save_hw(flags);
118 local_irq_restore_hw(flags);
124 static inline int atomic_sub_return(int i, atomic_t *v)
129 local_irq_save_hw(flags);
132 local_irq_restore_hw(flags);
137 static inline void atomic_inc(volatile atomic_t *v)
141 local_irq_save_hw(flags);
143 local_irq_restore_hw(flags);
146 static inline void atomic_dec(volatile atomic_t *v)
150 local_irq_save_hw(flags);
152 local_irq_restore_hw(flags);
155 static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
159 local_irq_save_hw(flags);
161 local_irq_restore_hw(flags);
164 static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
168 local_irq_save_hw(flags);
170 local_irq_restore_hw(flags);
173 /* Atomic operations are already serializing */
174 #define smp_mb__before_atomic_dec() barrier()
175 #define smp_mb__after_atomic_dec() barrier()
176 #define smp_mb__before_atomic_inc() barrier()
177 #define smp_mb__after_atomic_inc() barrier()
179 #endif /* !CONFIG_SMP */
181 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
182 #define atomic_dec_return(v) atomic_sub_return(1,(v))
183 #define atomic_inc_return(v) atomic_add_return(1,(v))
185 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
186 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
188 #define atomic_add_unless(v, a, u) \
191 c = atomic_read(v); \
192 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
196 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
199 * atomic_inc_and_test - increment and test
200 * @v: pointer of type atomic_t
202 * Atomically increments @v by 1
203 * and returns true if the result is zero, or false for all
206 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
208 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
209 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
211 #include <asm-generic/atomic.h>
213 #endif /* __ARCH_BLACKFIN_ATOMIC __ */