1 #ifndef __ARCH_H8300_ATOMIC__
2 #define __ARCH_H8300_ATOMIC__
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
9 typedef struct { int counter; } atomic_t;
10 #define ATOMIC_INIT(i) { (i) }
12 #define atomic_read(v) ((v)->counter)
13 #define atomic_set(v, i) (((v)->counter) = i)
15 #include <asm/system.h>
16 #include <linux/kernel.h>
18 static __inline__ int atomic_add_return(int i, atomic_t *v)
21 local_irq_save(flags);
22 ret = v->counter += i;
23 local_irq_restore(flags);
27 #define atomic_add(i, v) atomic_add_return(i, v)
28 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
30 static __inline__ int atomic_sub_return(int i, atomic_t *v)
33 local_irq_save(flags);
34 ret = v->counter -= i;
35 local_irq_restore(flags);
39 #define atomic_sub(i, v) atomic_sub_return(i, v)
40 #define atomic_sub_and_test(i,v) (atomic_sub_return(i, v) == 0)
42 static __inline__ int atomic_inc_return(atomic_t *v)
45 local_irq_save(flags);
48 local_irq_restore(flags);
52 #define atomic_inc(v) atomic_inc_return(v)
55 * atomic_inc_and_test - increment and test
56 * @v: pointer of type atomic_t
58 * Atomically increments @v by 1
59 * and returns true if the result is zero, or false for all
62 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
64 static __inline__ int atomic_dec_return(atomic_t *v)
67 local_irq_save(flags);
70 local_irq_restore(flags);
74 #define atomic_dec(v) atomic_dec_return(v)
76 static __inline__ int atomic_dec_and_test(atomic_t *v)
79 local_irq_save(flags);
82 local_irq_restore(flags);
86 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
91 local_irq_save(flags);
93 if (likely(ret == old))
95 local_irq_restore(flags);
99 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
101 static inline int atomic_add_unless(atomic_t *v, int a, int u)
106 local_irq_save(flags);
110 local_irq_restore(flags);
113 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
115 static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
117 __asm__ __volatile__("stc ccr,r1l\n\t"
123 : "=m" (*v) : "g" (~(mask)) :"er0","er1");
126 static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
128 __asm__ __volatile__("stc ccr,r1l\n\t"
134 : "=m" (*v) : "g" (mask) :"er0","er1");
137 /* Atomic operations are already serializing */
138 #define smp_mb__before_atomic_dec() barrier()
139 #define smp_mb__after_atomic_dec() barrier()
140 #define smp_mb__before_atomic_inc() barrier()
141 #define smp_mb__after_atomic_inc() barrier()
143 #include <asm-generic/atomic.h>
144 #endif /* __ARCH_H8300_ATOMIC __ */