1 #ifndef __X86_64_UACCESS_H
2 #define __X86_64_UACCESS_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/prefetch.h>
12 #define ARCH_HAS_SEARCH_EXTABLE
14 extern void __put_user_1(void);
15 extern void __put_user_2(void);
16 extern void __put_user_4(void);
17 extern void __put_user_8(void);
18 extern void __put_user_bad(void);
20 #define __put_user_x(size, ret, x, ptr) \
21 asm volatile("call __put_user_" #size \
26 #define put_user(x, ptr) \
27 __put_user_check((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
29 #define __get_user(x, ptr) \
30 __get_user_nocheck((x), (ptr), sizeof(*(ptr)))
31 #define __put_user(x, ptr) \
32 __put_user_nocheck((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
34 #define __get_user_unaligned __get_user
35 #define __put_user_unaligned __put_user
37 #define __put_user_check(x, ptr, size) \
40 typeof(*(ptr)) __user *__pu_addr = (ptr); \
43 __put_user_x(1, __pu_err, x, __pu_addr); \
46 __put_user_x(2, __pu_err, x, __pu_addr); \
49 __put_user_x(4, __pu_err, x, __pu_addr); \
52 __put_user_x(8, __pu_err, x, __pu_addr); \
60 #define __get_user_nocheck(x, ptr, size) \
63 unsigned long __gu_val; \
64 __get_user_size(__gu_val, (ptr), (size), __gu_err); \
65 (x) = (__force typeof(*(ptr)))__gu_val; \
69 #define __get_user_size(x, ptr, size, retval) \
72 __chk_user_ptr(ptr); \
75 __get_user_asm(x, ptr, retval, "b", "b", "=q", -EFAULT);\
78 __get_user_asm(x, ptr, retval, "w", "w", "=r", -EFAULT);\
81 __get_user_asm(x, ptr, retval, "l", "k", "=r", -EFAULT);\
84 __get_user_asm(x, ptr, retval, "q", "", "=r", -EFAULT); \
87 (x) = __get_user_bad(); \
91 #define __get_user_asm(x, addr, err, itype, rtype, ltype, errno) \
92 asm volatile("1: mov"itype" %2,%"rtype"1\n" \
94 ".section .fixup, \"ax\"\n" \
96 " xor"itype" %"rtype"1,%"rtype"1\n" \
99 _ASM_EXTABLE(1b, 3b) \
100 : "=r" (err), ltype (x) \
101 : "m" (__m(addr)), "i"(errno), "0"(err))
104 * Copy To/From Userspace
107 /* Handles exceptions in both to and from, but doesn't do access_ok */
108 __must_check unsigned long
109 copy_user_generic(void *to, const void *from, unsigned len);
111 __must_check unsigned long
112 copy_to_user(void __user *to, const void *from, unsigned len);
113 __must_check unsigned long
114 copy_from_user(void *to, const void __user *from, unsigned len);
115 __must_check unsigned long
116 copy_in_user(void __user *to, const void __user *from, unsigned len);
118 static __always_inline __must_check
119 int __copy_from_user(void *dst, const void __user *src, unsigned size)
122 if (!__builtin_constant_p(size))
123 return copy_user_generic(dst, (__force void *)src, size);
125 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
126 ret, "b", "b", "=q", 1);
128 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
129 ret, "w", "w", "=r", 2);
131 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
132 ret, "l", "k", "=r", 4);
134 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
135 ret, "q", "", "=r", 8);
138 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
139 ret, "q", "", "=r", 16);
142 __get_user_asm(*(u16 *)(8 + (char *)dst),
143 (u16 __user *)(8 + (char __user *)src),
144 ret, "w", "w", "=r", 2);
147 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
148 ret, "q", "", "=r", 16);
151 __get_user_asm(*(u64 *)(8 + (char *)dst),
152 (u64 __user *)(8 + (char __user *)src),
153 ret, "q", "", "=r", 8);
156 return copy_user_generic(dst, (__force void *)src, size);
160 static __always_inline __must_check
161 int __copy_to_user(void __user *dst, const void *src, unsigned size)
164 if (!__builtin_constant_p(size))
165 return copy_user_generic((__force void *)dst, src, size);
167 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
168 ret, "b", "b", "iq", 1);
170 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
171 ret, "w", "w", "ir", 2);
173 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
174 ret, "l", "k", "ir", 4);
176 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
177 ret, "q", "", "ir", 8);
180 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
181 ret, "q", "", "ir", 10);
185 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
186 ret, "w", "w", "ir", 2);
189 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
190 ret, "q", "", "ir", 16);
194 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
195 ret, "q", "", "ir", 8);
198 return copy_user_generic((__force void *)dst, src, size);
202 static __always_inline __must_check
203 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
206 if (!__builtin_constant_p(size))
207 return copy_user_generic((__force void *)dst,
208 (__force void *)src, size);
212 __get_user_asm(tmp, (u8 __user *)src,
213 ret, "b", "b", "=q", 1);
215 __put_user_asm(tmp, (u8 __user *)dst,
216 ret, "b", "b", "iq", 1);
221 __get_user_asm(tmp, (u16 __user *)src,
222 ret, "w", "w", "=r", 2);
224 __put_user_asm(tmp, (u16 __user *)dst,
225 ret, "w", "w", "ir", 2);
231 __get_user_asm(tmp, (u32 __user *)src,
232 ret, "l", "k", "=r", 4);
234 __put_user_asm(tmp, (u32 __user *)dst,
235 ret, "l", "k", "ir", 4);
240 __get_user_asm(tmp, (u64 __user *)src,
241 ret, "q", "", "=r", 8);
243 __put_user_asm(tmp, (u64 __user *)dst,
244 ret, "q", "", "ir", 8);
248 return copy_user_generic((__force void *)dst,
249 (__force void *)src, size);
254 strncpy_from_user(char *dst, const char __user *src, long count);
256 __strncpy_from_user(char *dst, const char __user *src, long count);
257 __must_check long strnlen_user(const char __user *str, long n);
258 __must_check long __strnlen_user(const char __user *str, long n);
259 __must_check long strlen_user(const char __user *str);
260 __must_check unsigned long clear_user(void __user *mem, unsigned long len);
261 __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
263 __must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
266 static __must_check __always_inline int
267 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
269 return copy_user_generic((__force void *)dst, src, size);
272 #define ARCH_HAS_NOCACHE_UACCESS 1
273 extern long __copy_user_nocache(void *dst, const void __user *src,
274 unsigned size, int zerorest);
276 static inline int __copy_from_user_nocache(void *dst, const void __user *src,
280 return __copy_user_nocache(dst, src, size, 1);
283 static inline int __copy_from_user_inatomic_nocache(void *dst,
284 const void __user *src,
287 return __copy_user_nocache(dst, src, size, 0);
290 #endif /* __X86_64_UACCESS_H */