[PATCH] vmalloc: optimization, cleanup, bugfixes
[linux-2.6] / include / asm-alpha / spinlock.h
1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
3
4 #include <asm/system.h>
5 #include <linux/kernel.h>
6 #include <asm/current.h>
7
8 /*
9  * Simple spin lock operations.  There are two variants, one clears IRQ's
10  * on the local processor, one does not.
11  *
12  * We make no fairness assumptions. They have a cost.
13  */
14
15 #define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
16 #define __raw_spin_is_locked(x) ((x)->lock != 0)
17 #define __raw_spin_unlock_wait(x) \
18                 do { cpu_relax(); } while ((x)->lock)
19
20 static inline void __raw_spin_unlock(raw_spinlock_t * lock)
21 {
22         mb();
23         lock->lock = 0;
24 }
25
26 static inline void __raw_spin_lock(raw_spinlock_t * lock)
27 {
28         long tmp;
29
30         __asm__ __volatile__(
31         "1:     ldl_l   %0,%1\n"
32         "       bne     %0,2f\n"
33         "       lda     %0,1\n"
34         "       stl_c   %0,%1\n"
35         "       beq     %0,2f\n"
36         "       mb\n"
37         ".subsection 2\n"
38         "2:     ldl     %0,%1\n"
39         "       bne     %0,2b\n"
40         "       br      1b\n"
41         ".previous"
42         : "=&r" (tmp), "=m" (lock->lock)
43         : "m"(lock->lock) : "memory");
44 }
45
46 static inline int __raw_spin_trylock(raw_spinlock_t *lock)
47 {
48         return !test_and_set_bit(0, &lock->lock);
49 }
50
51 /***********************************************************/
52
53 static inline int __raw_read_can_lock(raw_rwlock_t *lock)
54 {
55         return (lock->lock & 1) == 0;
56 }
57
58 static inline int __raw_write_can_lock(raw_rwlock_t *lock)
59 {
60         return lock->lock == 0;
61 }
62
63 static inline void __raw_read_lock(raw_rwlock_t *lock)
64 {
65         long regx;
66
67         __asm__ __volatile__(
68         "1:     ldl_l   %1,%0\n"
69         "       blbs    %1,6f\n"
70         "       subl    %1,2,%1\n"
71         "       stl_c   %1,%0\n"
72         "       beq     %1,6f\n"
73         "       mb\n"
74         ".subsection 2\n"
75         "6:     ldl     %1,%0\n"
76         "       blbs    %1,6b\n"
77         "       br      1b\n"
78         ".previous"
79         : "=m" (*lock), "=&r" (regx)
80         : "m" (*lock) : "memory");
81 }
82
83 static inline void __raw_write_lock(raw_rwlock_t *lock)
84 {
85         long regx;
86
87         __asm__ __volatile__(
88         "1:     ldl_l   %1,%0\n"
89         "       bne     %1,6f\n"
90         "       lda     %1,1\n"
91         "       stl_c   %1,%0\n"
92         "       beq     %1,6f\n"
93         "       mb\n"
94         ".subsection 2\n"
95         "6:     ldl     %1,%0\n"
96         "       bne     %1,6b\n"
97         "       br      1b\n"
98         ".previous"
99         : "=m" (*lock), "=&r" (regx)
100         : "m" (*lock) : "memory");
101 }
102
103 static inline int __raw_read_trylock(raw_rwlock_t * lock)
104 {
105         long regx;
106         int success;
107
108         __asm__ __volatile__(
109         "1:     ldl_l   %1,%0\n"
110         "       lda     %2,0\n"
111         "       blbs    %1,2f\n"
112         "       subl    %1,2,%2\n"
113         "       stl_c   %2,%0\n"
114         "       beq     %2,6f\n"
115         "2:     mb\n"
116         ".subsection 2\n"
117         "6:     br      1b\n"
118         ".previous"
119         : "=m" (*lock), "=&r" (regx), "=&r" (success)
120         : "m" (*lock) : "memory");
121
122         return success;
123 }
124
125 static inline int __raw_write_trylock(raw_rwlock_t * lock)
126 {
127         long regx;
128         int success;
129
130         __asm__ __volatile__(
131         "1:     ldl_l   %1,%0\n"
132         "       lda     %2,0\n"
133         "       bne     %1,2f\n"
134         "       lda     %2,1\n"
135         "       stl_c   %2,%0\n"
136         "       beq     %2,6f\n"
137         "2:     mb\n"
138         ".subsection 2\n"
139         "6:     br      1b\n"
140         ".previous"
141         : "=m" (*lock), "=&r" (regx), "=&r" (success)
142         : "m" (*lock) : "memory");
143
144         return success;
145 }
146
147 static inline void __raw_read_unlock(raw_rwlock_t * lock)
148 {
149         long regx;
150         __asm__ __volatile__(
151         "       mb\n"
152         "1:     ldl_l   %1,%0\n"
153         "       addl    %1,2,%1\n"
154         "       stl_c   %1,%0\n"
155         "       beq     %1,6f\n"
156         ".subsection 2\n"
157         "6:     br      1b\n"
158         ".previous"
159         : "=m" (*lock), "=&r" (regx)
160         : "m" (*lock) : "memory");
161 }
162
163 static inline void __raw_write_unlock(raw_rwlock_t * lock)
164 {
165         mb();
166         lock->lock = 0;
167 }
168
169 #define _raw_spin_relax(lock)   cpu_relax()
170 #define _raw_read_relax(lock)   cpu_relax()
171 #define _raw_write_relax(lock)  cpu_relax()
172
173 #endif /* _ALPHA_SPINLOCK_H */