Merge branch 'fix/asoc' into for-linus
[linux-2.6] / arch / m68k / include / asm / uaccess_mm.h
1 #ifndef __M68K_UACCESS_H
2 #define __M68K_UACCESS_H
3
4 /*
5  * User space memory access functions
6  */
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/types.h>
10 #include <linux/sched.h>
11 #include <asm/segment.h>
12
13 #define VERIFY_READ     0
14 #define VERIFY_WRITE    1
15
16 /* We let the MMU do all checking */
17 static inline int access_ok(int type, const void __user *addr,
18                             unsigned long size)
19 {
20         return 1;
21 }
22
23 /*
24  * The exception table consists of pairs of addresses: the first is the
25  * address of an instruction that is allowed to fault, and the second is
26  * the address at which the program should continue.  No registers are
27  * modified, so it is entirely up to the continuation code to figure out
28  * what to do.
29  *
30  * All the routines below use bits of fixup code that are out of line
31  * with the main instruction path.  This means when everything is well,
32  * we don't even have to jump over them.  Further, they do not intrude
33  * on our cache or tlb entries.
34  */
35
36 struct exception_table_entry
37 {
38         unsigned long insn, fixup;
39 };
40
41 extern int __put_user_bad(void);
42 extern int __get_user_bad(void);
43
44 #define __put_user_asm(res, x, ptr, bwl, reg, err)      \
45 asm volatile ("\n"                                      \
46         "1:     moves."#bwl"    %2,%1\n"                \
47         "2:\n"                                          \
48         "       .section .fixup,\"ax\"\n"               \
49         "       .even\n"                                \
50         "10:    moveq.l %3,%0\n"                        \
51         "       jra 2b\n"                               \
52         "       .previous\n"                            \
53         "\n"                                            \
54         "       .section __ex_table,\"a\"\n"            \
55         "       .align  4\n"                            \
56         "       .long   1b,10b\n"                       \
57         "       .long   2b,10b\n"                       \
58         "       .previous"                              \
59         : "+d" (res), "=m" (*(ptr))                     \
60         : #reg (x), "i" (err))
61
62 /*
63  * These are the main single-value transfer routines.  They automatically
64  * use the right size if we just have the right pointer type.
65  */
66
67 #define __put_user(x, ptr)                                              \
68 ({                                                                      \
69         typeof(*(ptr)) __pu_val = (x);                                  \
70         int __pu_err = 0;                                               \
71         __chk_user_ptr(ptr);                                            \
72         switch (sizeof (*(ptr))) {                                      \
73         case 1:                                                         \
74                 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
75                 break;                                                  \
76         case 2:                                                         \
77                 __put_user_asm(__pu_err, __pu_val, ptr, w, d, -EFAULT); \
78                 break;                                                  \
79         case 4:                                                         \
80                 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
81                 break;                                                  \
82         case 8:                                                         \
83             {                                                           \
84                 const void __user *__pu_ptr = (ptr);                    \
85                 asm volatile ("\n"                                      \
86                         "1:     moves.l %2,(%1)+\n"                     \
87                         "2:     moves.l %R2,(%1)\n"                     \
88                         "3:\n"                                          \
89                         "       .section .fixup,\"ax\"\n"               \
90                         "       .even\n"                                \
91                         "10:    movel %3,%0\n"                          \
92                         "       jra 3b\n"                               \
93                         "       .previous\n"                            \
94                         "\n"                                            \
95                         "       .section __ex_table,\"a\"\n"            \
96                         "       .align 4\n"                             \
97                         "       .long 1b,10b\n"                         \
98                         "       .long 2b,10b\n"                         \
99                         "       .long 3b,10b\n"                         \
100                         "       .previous"                              \
101                         : "+d" (__pu_err), "+a" (__pu_ptr)              \
102                         : "r" (__pu_val), "i" (-EFAULT)                 \
103                         : "memory");                                    \
104                 break;                                                  \
105             }                                                           \
106         default:                                                        \
107                 __pu_err = __put_user_bad();                            \
108                 break;                                                  \
109         }                                                               \
110         __pu_err;                                                       \
111 })
112 #define put_user(x, ptr)        __put_user(x, ptr)
113
114
115 #define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({     \
116         type __gu_val;                                          \
117         asm volatile ("\n"                                      \
118                 "1:     moves."#bwl"    %2,%1\n"                \
119                 "2:\n"                                          \
120                 "       .section .fixup,\"ax\"\n"               \
121                 "       .even\n"                                \
122                 "10:    move.l  %3,%0\n"                        \
123                 "       sub."#bwl"      %1,%1\n"                \
124                 "       jra     2b\n"                           \
125                 "       .previous\n"                            \
126                 "\n"                                            \
127                 "       .section __ex_table,\"a\"\n"            \
128                 "       .align  4\n"                            \
129                 "       .long   1b,10b\n"                       \
130                 "       .previous"                              \
131                 : "+d" (res), "=&" #reg (__gu_val)              \
132                 : "m" (*(ptr)), "i" (err));                     \
133         (x) = (typeof(*(ptr)))(unsigned long)__gu_val;          \
134 })
135
136 #define __get_user(x, ptr)                                              \
137 ({                                                                      \
138         int __gu_err = 0;                                               \
139         __chk_user_ptr(ptr);                                            \
140         switch (sizeof(*(ptr))) {                                       \
141         case 1:                                                         \
142                 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT);    \
143                 break;                                                  \
144         case 2:                                                         \
145                 __get_user_asm(__gu_err, x, ptr, u16, w, d, -EFAULT);   \
146                 break;                                                  \
147         case 4:                                                         \
148                 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT);   \
149                 break;                                                  \
150 /*      case 8: disabled because gcc-4.1 has a broken typeof            \
151             {                                                           \
152                 const void *__gu_ptr = (ptr);                           \
153                 u64 __gu_val;                                           \
154                 asm volatile ("\n"                                      \
155                         "1:     moves.l (%2)+,%1\n"                     \
156                         "2:     moves.l (%2),%R1\n"                     \
157                         "3:\n"                                          \
158                         "       .section .fixup,\"ax\"\n"               \
159                         "       .even\n"                                \
160                         "10:    move.l  %3,%0\n"                        \
161                         "       sub.l   %1,%1\n"                        \
162                         "       sub.l   %R1,%R1\n"                      \
163                         "       jra     3b\n"                           \
164                         "       .previous\n"                            \
165                         "\n"                                            \
166                         "       .section __ex_table,\"a\"\n"            \
167                         "       .align  4\n"                            \
168                         "       .long   1b,10b\n"                       \
169                         "       .long   2b,10b\n"                       \
170                         "       .previous"                              \
171                         : "+d" (__gu_err), "=&r" (__gu_val),            \
172                           "+a" (__gu_ptr)                               \
173                         : "i" (-EFAULT)                                 \
174                         : "memory");                                    \
175                 (x) = (typeof(*(ptr)))__gu_val;                         \
176                 break;                                                  \
177             }   */                                                      \
178         default:                                                        \
179                 __gu_err = __get_user_bad();                            \
180                 break;                                                  \
181         }                                                               \
182         __gu_err;                                                       \
183 })
184 #define get_user(x, ptr) __get_user(x, ptr)
185
186 unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
187 unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
188
189 #define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
190         asm volatile ("\n"                                              \
191                 "1:     moves."#s1"     (%2)+,%3\n"                     \
192                 "       move."#s1"      %3,(%1)+\n"                     \
193                 "2:     moves."#s2"     (%2)+,%3\n"                     \
194                 "       move."#s2"      %3,(%1)+\n"                     \
195                 "       .ifnc   \""#s3"\",\"\"\n"                       \
196                 "3:     moves."#s3"     (%2)+,%3\n"                     \
197                 "       move."#s3"      %3,(%1)+\n"                     \
198                 "       .endif\n"                                       \
199                 "4:\n"                                                  \
200                 "       .section __ex_table,\"a\"\n"                    \
201                 "       .align  4\n"                                    \
202                 "       .long   1b,10f\n"                               \
203                 "       .long   2b,20f\n"                               \
204                 "       .ifnc   \""#s3"\",\"\"\n"                       \
205                 "       .long   3b,30f\n"                               \
206                 "       .endif\n"                                       \
207                 "       .previous\n"                                    \
208                 "\n"                                                    \
209                 "       .section .fixup,\"ax\"\n"                       \
210                 "       .even\n"                                        \
211                 "10:    clr."#s1"       (%1)+\n"                        \
212                 "20:    clr."#s2"       (%1)+\n"                        \
213                 "       .ifnc   \""#s3"\",\"\"\n"                       \
214                 "30:    clr."#s3"       (%1)+\n"                        \
215                 "       .endif\n"                                       \
216                 "       moveq.l #"#n",%0\n"                             \
217                 "       jra     4b\n"                                   \
218                 "       .previous\n"                                    \
219                 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp)      \
220                 : : "memory")
221
222 static __always_inline unsigned long
223 __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
224 {
225         unsigned long res = 0, tmp;
226
227         switch (n) {
228         case 1:
229                 __get_user_asm(res, *(u8 *)to, (u8 __user *)from, u8, b, d, 1);
230                 break;
231         case 2:
232                 __get_user_asm(res, *(u16 *)to, (u16 __user *)from, u16, w, d, 2);
233                 break;
234         case 3:
235                 __constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,);
236                 break;
237         case 4:
238                 __get_user_asm(res, *(u32 *)to, (u32 __user *)from, u32, l, r, 4);
239                 break;
240         case 5:
241                 __constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,);
242                 break;
243         case 6:
244                 __constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,);
245                 break;
246         case 7:
247                 __constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b);
248                 break;
249         case 8:
250                 __constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,);
251                 break;
252         case 9:
253                 __constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b);
254                 break;
255         case 10:
256                 __constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w);
257                 break;
258         case 12:
259                 __constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l);
260                 break;
261         default:
262                 /* we limit the inlined version to 3 moves */
263                 return __generic_copy_from_user(to, from, n);
264         }
265
266         return res;
267 }
268
269 #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3)  \
270         asm volatile ("\n"                                              \
271                 "       move."#s1"      (%2)+,%3\n"                     \
272                 "11:    moves."#s1"     %3,(%1)+\n"                     \
273                 "12:    move."#s2"      (%2)+,%3\n"                     \
274                 "21:    moves."#s2"     %3,(%1)+\n"                     \
275                 "22:\n"                                                 \
276                 "       .ifnc   \""#s3"\",\"\"\n"                       \
277                 "       move."#s3"      (%2)+,%3\n"                     \
278                 "31:    moves."#s3"     %3,(%1)+\n"                     \
279                 "32:\n"                                                 \
280                 "       .endif\n"                                       \
281                 "4:\n"                                                  \
282                 "\n"                                                    \
283                 "       .section __ex_table,\"a\"\n"                    \
284                 "       .align  4\n"                                    \
285                 "       .long   11b,5f\n"                               \
286                 "       .long   12b,5f\n"                               \
287                 "       .long   21b,5f\n"                               \
288                 "       .long   22b,5f\n"                               \
289                 "       .ifnc   \""#s3"\",\"\"\n"                       \
290                 "       .long   31b,5f\n"                               \
291                 "       .long   32b,5f\n"                               \
292                 "       .endif\n"                                       \
293                 "       .previous\n"                                    \
294                 "\n"                                                    \
295                 "       .section .fixup,\"ax\"\n"                       \
296                 "       .even\n"                                        \
297                 "5:     moveq.l #"#n",%0\n"                             \
298                 "       jra     4b\n"                                   \
299                 "       .previous\n"                                    \
300                 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp)       \
301                 : : "memory")
302
303 static __always_inline unsigned long
304 __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
305 {
306         unsigned long res = 0, tmp;
307
308         switch (n) {
309         case 1:
310                 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
311                 break;
312         case 2:
313                 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, d, 2);
314                 break;
315         case 3:
316                 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
317                 break;
318         case 4:
319                 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
320                 break;
321         case 5:
322                 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
323                 break;
324         case 6:
325                 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
326                 break;
327         case 7:
328                 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
329                 break;
330         case 8:
331                 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
332                 break;
333         case 9:
334                 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
335                 break;
336         case 10:
337                 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
338                 break;
339         case 12:
340                 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
341                 break;
342         default:
343                 /* limit the inlined version to 3 moves */
344                 return __generic_copy_to_user(to, from, n);
345         }
346
347         return res;
348 }
349
350 #define __copy_from_user(to, from, n)           \
351 (__builtin_constant_p(n) ?                      \
352  __constant_copy_from_user(to, from, n) :       \
353  __generic_copy_from_user(to, from, n))
354
355 #define __copy_to_user(to, from, n)             \
356 (__builtin_constant_p(n) ?                      \
357  __constant_copy_to_user(to, from, n) :         \
358  __generic_copy_to_user(to, from, n))
359
360 #define __copy_to_user_inatomic         __copy_to_user
361 #define __copy_from_user_inatomic       __copy_from_user
362
363 #define copy_from_user(to, from, n)     __copy_from_user(to, from, n)
364 #define copy_to_user(to, from, n)       __copy_to_user(to, from, n)
365
366 long strncpy_from_user(char *dst, const char __user *src, long count);
367 long strnlen_user(const char __user *src, long n);
368 unsigned long __clear_user(void __user *to, unsigned long n);
369
370 #define clear_user      __clear_user
371
372 #define strlen_user(str) strnlen_user(str, 32767)
373
374 #endif /* _M68K_UACCESS_H */