V4L/DVB (5846): Clean up setting state and scheduling timeouts
[linux-2.6] / include / asm-mips / hazards.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 2004 Ralf Baechle <ralf@linux-mips.org>
7  * Copyright (C) MIPS Technologies, Inc.
8  *   written by Ralf Baechle <ralf@linux-mips.org>
9  */
10 #ifndef _ASM_HAZARDS_H
11 #define _ASM_HAZARDS_H
12
13
14 #ifdef __ASSEMBLY__
15 #define ASMMACRO(name, code...) .macro name; code; .endm
16 #else
17
18 #define ASMMACRO(name, code...)                                         \
19 __asm__(".macro " #name "; " #code "; .endm");                          \
20                                                                         \
21 static inline void name(void)                                           \
22 {                                                                       \
23         __asm__ __volatile__ (#name);                                   \
24 }
25
26 #endif
27
28 ASMMACRO(_ssnop,
29          sll    $0, $0, 1
30         )
31
32 ASMMACRO(_ehb,
33          sll    $0, $0, 3
34         )
35
36 /*
37  * TLB hazards
38  */
39 #if defined(CONFIG_CPU_MIPSR2)
40
41 /*
42  * MIPSR2 defines ehb for hazard avoidance
43  */
44
45 ASMMACRO(mtc0_tlbw_hazard,
46          _ehb
47         )
48 ASMMACRO(tlbw_use_hazard,
49          _ehb
50         )
51 ASMMACRO(tlb_probe_hazard,
52          _ehb
53         )
54 ASMMACRO(irq_enable_hazard,
55          _ehb
56         )
57 ASMMACRO(irq_disable_hazard,
58         _ehb
59         )
60 ASMMACRO(back_to_back_c0_hazard,
61          _ehb
62         )
63 /*
64  * gcc has a tradition of misscompiling the previous construct using the
65  * address of a label as argument to inline assembler.  Gas otoh has the
66  * annoying difference between la and dla which are only usable for 32-bit
67  * rsp. 64-bit code, so can't be used without conditional compilation.
68  * The alterantive is switching the assembler to 64-bit code which happens
69  * to work right even for 32-bit code ...
70  */
71 #define instruction_hazard()                                            \
72 do {                                                                    \
73         unsigned long tmp;                                              \
74                                                                         \
75         __asm__ __volatile__(                                           \
76         "       .set    mips64r2                                \n"     \
77         "       dla     %0, 1f                                  \n"     \
78         "       jr.hb   %0                                      \n"     \
79         "       .set    mips0                                   \n"     \
80         "1:                                                     \n"     \
81         : "=r" (tmp));                                                  \
82 } while (0)
83
84 #elif defined(CONFIG_CPU_R10000)
85
86 /*
87  * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
88  */
89
90 ASMMACRO(mtc0_tlbw_hazard,
91         )
92 ASMMACRO(tlbw_use_hazard,
93         )
94 ASMMACRO(tlb_probe_hazard,
95         )
96 ASMMACRO(irq_enable_hazard,
97         )
98 ASMMACRO(irq_disable_hazard,
99         )
100 ASMMACRO(back_to_back_c0_hazard,
101         )
102 #define instruction_hazard() do { } while (0)
103
104 #elif defined(CONFIG_CPU_RM9000)
105
106 /*
107  * RM9000 hazards.  When the JTLB is updated by tlbwi or tlbwr, a subsequent
108  * use of the JTLB for instructions should not occur for 4 cpu cycles and use
109  * for data translations should not occur for 3 cpu cycles.
110  */
111
112 ASMMACRO(mtc0_tlbw_hazard,
113          _ssnop; _ssnop; _ssnop; _ssnop
114         )
115 ASMMACRO(tlbw_use_hazard,
116          _ssnop; _ssnop; _ssnop; _ssnop
117         )
118 ASMMACRO(tlb_probe_hazard,
119          _ssnop; _ssnop; _ssnop; _ssnop
120         )
121 ASMMACRO(irq_enable_hazard,
122         )
123 ASMMACRO(irq_disable_hazard,
124         )
125 ASMMACRO(back_to_back_c0_hazard,
126         )
127 #define instruction_hazard() do { } while (0)
128
129 #elif defined(CONFIG_CPU_SB1)
130
131 /*
132  * Mostly like R4000 for historic reasons
133  */
134 ASMMACRO(mtc0_tlbw_hazard,
135         )
136 ASMMACRO(tlbw_use_hazard,
137         )
138 ASMMACRO(tlb_probe_hazard,
139         )
140 ASMMACRO(irq_enable_hazard,
141         )
142 ASMMACRO(irq_disable_hazard,
143          _ssnop; _ssnop; _ssnop
144         )
145 ASMMACRO(back_to_back_c0_hazard,
146         )
147 #define instruction_hazard() do { } while (0)
148
149 #else
150
151 /*
152  * Finally the catchall case for all other processors including R4000, R4400,
153  * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
154  *
155  * The taken branch will result in a two cycle penalty for the two killed
156  * instructions on R4000 / R4400.  Other processors only have a single cycle
157  * hazard so this is nice trick to have an optimal code for a range of
158  * processors.
159  */
160 ASMMACRO(mtc0_tlbw_hazard,
161         nop; nop
162         )
163 ASMMACRO(tlbw_use_hazard,
164         nop; nop; nop
165         )
166 ASMMACRO(tlb_probe_hazard,
167          nop; nop; nop
168         )
169 ASMMACRO(irq_enable_hazard,
170         )
171 ASMMACRO(irq_disable_hazard,
172         nop; nop; nop
173         )
174 ASMMACRO(back_to_back_c0_hazard,
175          _ssnop; _ssnop; _ssnop;
176         )
177 #define instruction_hazard() do { } while (0)
178
179 #endif
180
181
182 /* FPU hazards */
183
184 #if defined(CONFIG_CPU_SB1)
185 ASMMACRO(enable_fpu_hazard,
186          .set   push;
187          .set   mips64;
188          .set   noreorder;
189          _ssnop;
190          bnezl  $0,.+4;
191          _ssnop;
192          .set   pop
193 )
194 ASMMACRO(disable_fpu_hazard,
195 )
196
197 #elif defined(CONFIG_CPU_MIPSR2)
198 ASMMACRO(enable_fpu_hazard,
199          _ehb
200 )
201 ASMMACRO(disable_fpu_hazard,
202          _ehb
203 )
204 #else
205 ASMMACRO(enable_fpu_hazard,
206          nop; nop; nop; nop
207 )
208 ASMMACRO(disable_fpu_hazard,
209          _ehb
210 )
211 #endif
212
213 #endif /* _ASM_HAZARDS_H */