1 #ifndef __X86_64_UACCESS_H
2 #define __X86_64_UACCESS_H
5 * User space memory access functions
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/prefetch.h>
12 #define ARCH_HAS_SEARCH_EXTABLE
14 #define __get_user(x, ptr) \
15 __get_user_nocheck((x), (ptr), sizeof(*(ptr)))
16 #define __put_user(x, ptr) \
17 __put_user_nocheck((__typeof__(*(ptr)))(x), (ptr), sizeof(*(ptr)))
19 #define __get_user_unaligned __get_user
20 #define __put_user_unaligned __put_user
23 * Copy To/From Userspace
26 /* Handles exceptions in both to and from, but doesn't do access_ok */
27 __must_check unsigned long
28 copy_user_generic(void *to, const void *from, unsigned len);
30 __must_check unsigned long
31 copy_to_user(void __user *to, const void *from, unsigned len);
32 __must_check unsigned long
33 copy_from_user(void *to, const void __user *from, unsigned len);
34 __must_check unsigned long
35 copy_in_user(void __user *to, const void __user *from, unsigned len);
37 static __always_inline __must_check
38 int __copy_from_user(void *dst, const void __user *src, unsigned size)
41 if (!__builtin_constant_p(size))
42 return copy_user_generic(dst, (__force void *)src, size);
44 case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src,
45 ret, "b", "b", "=q", 1);
47 case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src,
48 ret, "w", "w", "=r", 2);
50 case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src,
51 ret, "l", "k", "=r", 4);
53 case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src,
54 ret, "q", "", "=r", 8);
57 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
58 ret, "q", "", "=r", 16);
61 __get_user_asm(*(u16 *)(8 + (char *)dst),
62 (u16 __user *)(8 + (char __user *)src),
63 ret, "w", "w", "=r", 2);
66 __get_user_asm(*(u64 *)dst, (u64 __user *)src,
67 ret, "q", "", "=r", 16);
70 __get_user_asm(*(u64 *)(8 + (char *)dst),
71 (u64 __user *)(8 + (char __user *)src),
72 ret, "q", "", "=r", 8);
75 return copy_user_generic(dst, (__force void *)src, size);
79 static __always_inline __must_check
80 int __copy_to_user(void __user *dst, const void *src, unsigned size)
83 if (!__builtin_constant_p(size))
84 return copy_user_generic((__force void *)dst, src, size);
86 case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst,
87 ret, "b", "b", "iq", 1);
89 case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst,
90 ret, "w", "w", "ir", 2);
92 case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst,
93 ret, "l", "k", "ir", 4);
95 case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst,
96 ret, "q", "", "ir", 8);
99 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
100 ret, "q", "", "ir", 10);
104 __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst,
105 ret, "w", "w", "ir", 2);
108 __put_user_asm(*(u64 *)src, (u64 __user *)dst,
109 ret, "q", "", "ir", 16);
113 __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst,
114 ret, "q", "", "ir", 8);
117 return copy_user_generic((__force void *)dst, src, size);
121 static __always_inline __must_check
122 int __copy_in_user(void __user *dst, const void __user *src, unsigned size)
125 if (!__builtin_constant_p(size))
126 return copy_user_generic((__force void *)dst,
127 (__force void *)src, size);
131 __get_user_asm(tmp, (u8 __user *)src,
132 ret, "b", "b", "=q", 1);
134 __put_user_asm(tmp, (u8 __user *)dst,
135 ret, "b", "b", "iq", 1);
140 __get_user_asm(tmp, (u16 __user *)src,
141 ret, "w", "w", "=r", 2);
143 __put_user_asm(tmp, (u16 __user *)dst,
144 ret, "w", "w", "ir", 2);
150 __get_user_asm(tmp, (u32 __user *)src,
151 ret, "l", "k", "=r", 4);
153 __put_user_asm(tmp, (u32 __user *)dst,
154 ret, "l", "k", "ir", 4);
159 __get_user_asm(tmp, (u64 __user *)src,
160 ret, "q", "", "=r", 8);
162 __put_user_asm(tmp, (u64 __user *)dst,
163 ret, "q", "", "ir", 8);
167 return copy_user_generic((__force void *)dst,
168 (__force void *)src, size);
173 strncpy_from_user(char *dst, const char __user *src, long count);
175 __strncpy_from_user(char *dst, const char __user *src, long count);
176 __must_check long strnlen_user(const char __user *str, long n);
177 __must_check long __strnlen_user(const char __user *str, long n);
178 __must_check long strlen_user(const char __user *str);
179 __must_check unsigned long clear_user(void __user *mem, unsigned long len);
180 __must_check unsigned long __clear_user(void __user *mem, unsigned long len);
182 __must_check long __copy_from_user_inatomic(void *dst, const void __user *src,
185 static __must_check __always_inline int
186 __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size)
188 return copy_user_generic((__force void *)dst, src, size);
191 #define ARCH_HAS_NOCACHE_UACCESS 1
192 extern long __copy_user_nocache(void *dst, const void __user *src,
193 unsigned size, int zerorest);
195 static inline int __copy_from_user_nocache(void *dst, const void __user *src,
199 return __copy_user_nocache(dst, src, size, 1);
202 static inline int __copy_from_user_inatomic_nocache(void *dst,
203 const void __user *src,
206 return __copy_user_nocache(dst, src, size, 0);
209 #endif /* __X86_64_UACCESS_H */