1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
4 #include <linux/config.h>
5 #include <asm/system.h>
6 #include <linux/kernel.h>
7 #include <asm/current.h>
11 * Simple spin lock operations. There are two variants, one clears IRQ's
12 * on the local processor, one does not.
14 * We make no fairness assumptions. They have a cost.
18 volatile unsigned int lock;
19 #ifdef CONFIG_DEBUG_SPINLOCK
23 struct task_struct * task;
24 const char *base_file;
28 #ifdef CONFIG_DEBUG_SPINLOCK
29 #define SPIN_LOCK_UNLOCKED (spinlock_t){ 0, -1, 0, NULL, NULL, NULL }
31 #define SPIN_LOCK_UNLOCKED (spinlock_t){ 0 }
34 #define spin_lock_init(x) do { *(x) = SPIN_LOCK_UNLOCKED; } while(0)
35 #define spin_is_locked(x) ((x)->lock != 0)
36 #define spin_unlock_wait(x) do { barrier(); } while ((x)->lock)
38 #ifdef CONFIG_DEBUG_SPINLOCK
39 extern void _raw_spin_unlock(spinlock_t * lock);
40 extern void debug_spin_lock(spinlock_t * lock, const char *, int);
41 extern int debug_spin_trylock(spinlock_t * lock, const char *, int);
42 #define _raw_spin_lock(LOCK) \
43 debug_spin_lock(LOCK, __BASE_FILE__, __LINE__)
44 #define _raw_spin_trylock(LOCK) \
45 debug_spin_trylock(LOCK, __BASE_FILE__, __LINE__)
47 static inline void _raw_spin_unlock(spinlock_t * lock)
53 static inline void _raw_spin_lock(spinlock_t * lock)
69 : "=&r" (tmp), "=m" (lock->lock)
70 : "m"(lock->lock) : "memory");
73 static inline int _raw_spin_trylock(spinlock_t *lock)
75 return !test_and_set_bit(0, &lock->lock);
77 #endif /* CONFIG_DEBUG_SPINLOCK */
79 #define _raw_spin_lock_flags(lock, flags) _raw_spin_lock(lock)
81 /***********************************************************/
84 volatile unsigned int lock;
87 #define RW_LOCK_UNLOCKED (rwlock_t){ 0 }
89 #define rwlock_init(x) do { *(x) = RW_LOCK_UNLOCKED; } while(0)
91 static inline int read_can_lock(rwlock_t *lock)
93 return (lock->lock & 1) == 0;
96 static inline int write_can_lock(rwlock_t *lock)
98 return lock->lock == 0;
101 #ifdef CONFIG_DEBUG_RWLOCK
102 extern void _raw_write_lock(rwlock_t * lock);
103 extern void _raw_read_lock(rwlock_t * lock);
105 static inline void _raw_write_lock(rwlock_t * lock)
109 __asm__ __volatile__(
121 : "=m" (*lock), "=&r" (regx)
122 : "m" (*lock) : "memory");
125 static inline void _raw_read_lock(rwlock_t * lock)
129 __asm__ __volatile__(
141 : "=m" (*lock), "=&r" (regx)
142 : "m" (*lock) : "memory");
144 #endif /* CONFIG_DEBUG_RWLOCK */
146 static inline int _raw_read_trylock(rwlock_t * lock)
151 __asm__ __volatile__(
162 : "=m" (*lock), "=&r" (regx), "=&r" (success)
163 : "m" (*lock) : "memory");
168 static inline int _raw_write_trylock(rwlock_t * lock)
173 __asm__ __volatile__(
184 : "=m" (*lock), "=&r" (regx), "=&r" (success)
185 : "m" (*lock) : "memory");
190 static inline void _raw_write_unlock(rwlock_t * lock)
196 static inline void _raw_read_unlock(rwlock_t * lock)
199 __asm__ __volatile__(
208 : "=m" (*lock), "=&r" (regx)
209 : "m" (*lock) : "memory");
212 #endif /* _ALPHA_SPINLOCK_H */