1 #ifndef __ARCH_H8300_ATOMIC__
2 #define __ARCH_H8300_ATOMIC__
4 #include <linux/types.h>
7 * Atomic operations that C can't guarantee us. Useful for
8 * resource counting etc..
11 #define ATOMIC_INIT(i) { (i) }
13 #define atomic_read(v) ((v)->counter)
14 #define atomic_set(v, i) (((v)->counter) = i)
16 #include <asm/system.h>
17 #include <linux/kernel.h>
19 static __inline__ int atomic_add_return(int i, atomic_t *v)
22 local_irq_save(flags);
23 ret = v->counter += i;
24 local_irq_restore(flags);
28 #define atomic_add(i, v) atomic_add_return(i, v)
29 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
31 static __inline__ int atomic_sub_return(int i, atomic_t *v)
34 local_irq_save(flags);
35 ret = v->counter -= i;
36 local_irq_restore(flags);
40 #define atomic_sub(i, v) atomic_sub_return(i, v)
41 #define atomic_sub_and_test(i,v) (atomic_sub_return(i, v) == 0)
43 static __inline__ int atomic_inc_return(atomic_t *v)
46 local_irq_save(flags);
49 local_irq_restore(flags);
53 #define atomic_inc(v) atomic_inc_return(v)
56 * atomic_inc_and_test - increment and test
57 * @v: pointer of type atomic_t
59 * Atomically increments @v by 1
60 * and returns true if the result is zero, or false for all
63 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
65 static __inline__ int atomic_dec_return(atomic_t *v)
68 local_irq_save(flags);
71 local_irq_restore(flags);
75 #define atomic_dec(v) atomic_dec_return(v)
77 static __inline__ int atomic_dec_and_test(atomic_t *v)
80 local_irq_save(flags);
83 local_irq_restore(flags);
87 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
92 local_irq_save(flags);
94 if (likely(ret == old))
96 local_irq_restore(flags);
100 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
102 static inline int atomic_add_unless(atomic_t *v, int a, int u)
107 local_irq_save(flags);
111 local_irq_restore(flags);
114 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
116 static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
118 __asm__ __volatile__("stc ccr,r1l\n\t"
124 : "=m" (*v) : "g" (~(mask)) :"er0","er1");
127 static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
129 __asm__ __volatile__("stc ccr,r1l\n\t"
135 : "=m" (*v) : "g" (mask) :"er0","er1");
138 /* Atomic operations are already serializing */
139 #define smp_mb__before_atomic_dec() barrier()
140 #define smp_mb__after_atomic_dec() barrier()
141 #define smp_mb__before_atomic_inc() barrier()
142 #define smp_mb__after_atomic_inc() barrier()
144 #include <asm-generic/atomic.h>
145 #endif /* __ARCH_H8300_ATOMIC __ */