1 #ifndef _ASM_X86_UACCESS_64_H
2 #define _ASM_X86_UACCESS_64_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/prefetch.h>
10 #include <linux/lockdep.h>
14 * Copy To/From Userspace
17 /* Handles exceptions in both to and from, but doesn't do access_ok */
18 __must_check unsigned long
19 copy_user_generic(void *to, const void *from, unsigned len);
21 __must_check unsigned long
22 copy_to_user(void __user *to, const void *from, unsigned len);
23 __must_check unsigned long
24 copy_from_user(void *to, const void __user *from, unsigned len);
25 __must_check unsigned long
26 copy_in_user(void __user *to, const void __user *from, unsigned len);
28 static __always_inline __must_check
29 int __copy_from_user(void *dst, const void __user *src, unsigned size)
34 if (!__builtin_constant_p(size))
35 return copy_user_generic(dst, (__force void *)src, size);
37 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
38 ret, "b", "b", "=q", 1);
40 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
41 ret, "w", "w", "=r", 2);
43 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
44 ret, "l", "k", "=r", 4);
46 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
47 ret, "q", "", "=r", 8);
50 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
51 ret, "q", "", "=r", 10);
54 __get_user_asm(*(u16 *)(8 + (char *)dst),
55 (u16 __user *)(8 + (char __user *)src),
56 ret, "w", "w", "=r", 2);
59 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
60 ret, "q", "", "=r", 16);
63 __get_user_asm(*(u64 *)(8 + (char *)dst),
64 (u64 __user *)(8 + (char __user *)src),
65 ret, "q", "", "=r", 8);
68 return copy_user_generic(dst, (__force void *)src, size);
72 static __always_inline __must_check
73 int __copy_to_user(void __user *dst, const void *src, unsigned size)
78 if (!__builtin_constant_p(size))
79 return copy_user_generic((__force void *)dst, src, size);
81 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
82 ret, "b", "b", "iq", 1);
84 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
85 ret, "w", "w", "ir", 2);
87 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
88 ret, "l", "k", "ir", 4);
90 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
91 ret, "q", "", "ir", 8);
94 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
95 ret, "q", "", "ir", 10);
99 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
100 ret, "w", "w", "ir", 2);
103 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
104 ret, "q", "", "ir", 16);
108 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
109 ret, "q", "", "ir", 8);
112 return copy_user_generic((__force void *)dst, src, size);
116 static __always_inline __must_check
117 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
122 if (!__builtin_constant_p(size))
123 return copy_user_generic((__force void *)dst,
124 (__force void *)src, size);
128 __get_user_asm(tmp, (u8 __user *)src,
129 ret, "b", "b", "=q", 1);
131 __put_user_asm(tmp, (u8 __user *)dst,
132 ret, "b", "b", "iq", 1);
137 __get_user_asm(tmp, (u16 __user *)src,
138 ret, "w", "w", "=r", 2);
140 __put_user_asm(tmp, (u16 __user *)dst,
141 ret, "w", "w", "ir", 2);
147 __get_user_asm(tmp, (u32 __user *)src,
148 ret, "l", "k", "=r", 4);
150 __put_user_asm(tmp, (u32 __user *)dst,
151 ret, "l", "k", "ir", 4);
156 __get_user_asm(tmp, (u64 __user *)src,
157 ret, "q", "", "=r", 8);
159 __put_user_asm(tmp, (u64 __user *)dst,
160 ret, "q", "", "ir", 8);
164 return copy_user_generic((__force void *)dst,
165 (__force void *)src, size);
170 strncpy_from_user(char *dst, const char __user *src, long count);
172 __strncpy_from_user(char *dst, const char __user *src, long count);
173 __must_check long strnlen_user(const char __user *str, long n);
174 __must_check long __strnlen_user(const char __user *str, long n);
175 __must_check long strlen_user(const char __user *str);
176 __must_check unsigned long clear_user(void __user *mem, unsigned long len);
177 __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
179 __must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
182 static __must_check __always_inline int
183 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
185 return copy_user_generic((__force void *)dst, src, size);
188 extern long __copy_user_nocache(void *dst, const void __user *src,
189 unsigned size, int zerorest);
191 static inline int __copy_from_user_nocache(void *dst, const void __user *src,
195 return __copy_user_nocache(dst, src, size, 1);
198 static inline int __copy_from_user_inatomic_nocache(void *dst,
199 const void __user *src,
202 return __copy_user_nocache(dst, src, size, 0);
206 copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest);
208 #endif /* _ASM_X86_UACCESS_64_H */