1 #ifndef __X86_64_UACCESS_H
2 #define __X86_64_UACCESS_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/prefetch.h>
12 #define ARCH_HAS_SEARCH_EXTABLE
15 * Copy To/From Userspace
18 /* Handles exceptions in both to and from, but doesn't do access_ok */
19 __must_check unsigned long
20 copy_user_generic(void *to, const void *from, unsigned len);
22 __must_check unsigned long
23 copy_to_user(void __user *to, const void *from, unsigned len);
24 __must_check unsigned long
25 copy_from_user(void *to, const void __user *from, unsigned len);
26 __must_check unsigned long
27 copy_in_user(void __user *to, const void __user *from, unsigned len);
29 static __always_inline __must_check
30 int __copy_from_user(void *dst, const void __user *src, unsigned size)
33 if (!__builtin_constant_p(size))
34 return copy_user_generic(dst, (__force void *)src, size);
36 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
37 ret, "b", "b", "=q", 1);
39 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
40 ret, "w", "w", "=r", 2);
42 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
43 ret, "l", "k", "=r", 4);
45 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
46 ret, "q", "", "=r", 8);
49 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
50 ret, "q", "", "=r", 16);
53 __get_user_asm(*(u16 *)(8 + (char *)dst),
54 (u16 __user *)(8 + (char __user *)src),
55 ret, "w", "w", "=r", 2);
58 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
59 ret, "q", "", "=r", 16);
62 __get_user_asm(*(u64 *)(8 + (char *)dst),
63 (u64 __user *)(8 + (char __user *)src),
64 ret, "q", "", "=r", 8);
67 return copy_user_generic(dst, (__force void *)src, size);
71 static __always_inline __must_check
72 int __copy_to_user(void __user *dst, const void *src, unsigned size)
75 if (!__builtin_constant_p(size))
76 return copy_user_generic((__force void *)dst, src, size);
78 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
79 ret, "b", "b", "iq", 1);
81 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
82 ret, "w", "w", "ir", 2);
84 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
85 ret, "l", "k", "ir", 4);
87 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
88 ret, "q", "", "ir", 8);
91 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
92 ret, "q", "", "ir", 10);
96 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
97 ret, "w", "w", "ir", 2);
100 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
101 ret, "q", "", "ir", 16);
105 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
106 ret, "q", "", "ir", 8);
109 return copy_user_generic((__force void *)dst, src, size);
113 static __always_inline __must_check
114 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
117 if (!__builtin_constant_p(size))
118 return copy_user_generic((__force void *)dst,
119 (__force void *)src, size);
123 __get_user_asm(tmp, (u8 __user *)src,
124 ret, "b", "b", "=q", 1);
126 __put_user_asm(tmp, (u8 __user *)dst,
127 ret, "b", "b", "iq", 1);
132 __get_user_asm(tmp, (u16 __user *)src,
133 ret, "w", "w", "=r", 2);
135 __put_user_asm(tmp, (u16 __user *)dst,
136 ret, "w", "w", "ir", 2);
142 __get_user_asm(tmp, (u32 __user *)src,
143 ret, "l", "k", "=r", 4);
145 __put_user_asm(tmp, (u32 __user *)dst,
146 ret, "l", "k", "ir", 4);
151 __get_user_asm(tmp, (u64 __user *)src,
152 ret, "q", "", "=r", 8);
154 __put_user_asm(tmp, (u64 __user *)dst,
155 ret, "q", "", "ir", 8);
159 return copy_user_generic((__force void *)dst,
160 (__force void *)src, size);
165 strncpy_from_user(char *dst, const char __user *src, long count);
167 __strncpy_from_user(char *dst, const char __user *src, long count);
168 __must_check long strnlen_user(const char __user *str, long n);
169 __must_check long __strnlen_user(const char __user *str, long n);
170 __must_check long strlen_user(const char __user *str);
171 __must_check unsigned long clear_user(void __user *mem, unsigned long len);
172 __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
174 __must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
177 static __must_check __always_inline int
178 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
180 return copy_user_generic((__force void *)dst, src, size);
183 #define ARCH_HAS_NOCACHE_UACCESS 1
184 extern long __copy_user_nocache(void *dst, const void __user *src,
185 unsigned size, int zerorest);
187 static inline int __copy_from_user_nocache(void *dst, const void __user *src,
191 return __copy_user_nocache(dst, src, size, 1);
194 static inline int __copy_from_user_inatomic_nocache(void *dst,
195 const void __user *src,
198 return __copy_user_nocache(dst, src, size, 0);
201 #endif /* __X86_64_UACCESS_H */