1 /* Copyright 2002 Andi Kleen, SuSE Labs.
2 * Subject to the GNU Public License v2.
4 * Functions to copy from and to user space.
7 #include <linux/linkage.h>
8 #include <asm/dwarf2.h>
10 #define FIX_ALIGNMENT 1
12 #include <asm/current.h>
13 #include <asm/asm-offsets.h>
14 #include <asm/thread_info.h>
15 #include <asm/cpufeature.h>
18 * copy_user_nocache - Uncached memory copy with exception handling
19 * This will force destination/source out of cache for more performance.
25 * rcx zero flag when 1 zero on exception
28 * eax uncopied bytes or 0 if successful.
30 ENTRY(__copy_user_nocache)
33 CFI_ADJUST_CFA_OFFSET 8
35 pushq %rcx /* save zero flag */
36 CFI_ADJUST_CFA_OFFSET 8
39 xorl %eax,%eax /* zero for the exception handler */
42 /* check for bad alignment of destination */
46 .Lafter_bad_alignment:
58 .Ls1: movq (%rsi),%r11
59 .Ls2: movq 1*8(%rsi),%r8
60 .Ls3: movq 2*8(%rsi),%r9
61 .Ls4: movq 3*8(%rsi),%r10
62 .Ld1: movnti %r11,(%rdi)
63 .Ld2: movnti %r8,1*8(%rdi)
64 .Ld3: movnti %r9,2*8(%rdi)
65 .Ld4: movnti %r10,3*8(%rdi)
67 .Ls5: movq 4*8(%rsi),%r11
68 .Ls6: movq 5*8(%rsi),%r8
69 .Ls7: movq 6*8(%rsi),%r9
70 .Ls8: movq 7*8(%rsi),%r10
71 .Ld5: movnti %r11,4*8(%rdi)
72 .Ld6: movnti %r8,5*8(%rdi)
73 .Ld7: movnti %r9,6*8(%rdi)
74 .Ld8: movnti %r10,7*8(%rdi)
93 .Ld9: movnti %r8,(%rdi)
105 .Ls10: movb (%rsi),%bl
106 .Ld10: movb %bl,(%rdi)
115 CFI_ADJUST_CFA_OFFSET -8
118 CFI_ADJUST_CFA_OFFSET -8
125 /* align destination */
135 .Ls11: movb (%rsi),%bl
136 .Ld11: movb %bl,(%rdi)
142 jmp .Lafter_bad_alignment
145 /* table sorted by exception address */
146 .section __ex_table,"a"
148 .quad .Ls1,.Ls1e /* .Ls[1-4] - 0 bytes copied */
152 .quad .Ld1,.Ls1e /* .Ld[1-4] - 0..24 bytes coped */
156 .quad .Ls5,.Ls5e /* .Ls[5-8] - 32 bytes copied */
160 .quad .Ld5,.Ls5e /* .Ld[5-8] - 32..56 bytes copied */
169 .quad .Ls11,.Lzero_rest
170 .quad .Ld11,.Lzero_rest
175 /* eax: zero, ebx: 64 */
176 .Ls1e: addl $8,%eax /* eax: bytes left uncopied: Ls1e: 64 .. Ls8e: 8 */
184 addq %rbx,%rdi /* +64 */
185 subq %rax,%rdi /* correct destination with computed offset */
187 shlq $6,%rdx /* loop counter * 64 (stride length) */
188 addq %rax,%rdx /* add offset to loopcnt */
189 andl $63,%ecx /* remaining bytes */
190 addq %rcx,%rdx /* add them */
193 /* exception on quad word loop in tail handling */
194 /* ecx: loopcnt/8, %edx: length, rdi: correct */
199 /* edx: bytes to zero, rdi: dest, eax:zero */
201 cmpl $0,(%rsp) /* zero flag set? */
208 /* when there is another exception while zeroing the rest just return */
213 ENDPROC(__copy_user_nocache)