Merge tag 'wireless-drivers-for-davem-2015-06-01' of git://git.kernel.org/pub/scm...
[deliverable/linux.git] / arch / x86 / lib / copy_user_nocache_64.S
1 /*
2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
3 * Copyright 2002 Andi Kleen, SuSE Labs.
4 * Subject to the GNU Public License v2.
5 *
6 * Functions to copy from and to user space.
7 */
8
9 #include <linux/linkage.h>
10 #include <asm/dwarf2.h>
11
12 #define FIX_ALIGNMENT 1
13
14 #include <asm/current.h>
15 #include <asm/asm-offsets.h>
16 #include <asm/thread_info.h>
17 #include <asm/asm.h>
18 #include <asm/smap.h>
19
20 .macro ALIGN_DESTINATION
21 #ifdef FIX_ALIGNMENT
22 /* check for bad alignment of destination */
23 movl %edi,%ecx
24 andl $7,%ecx
25 jz 102f /* already aligned */
26 subl $8,%ecx
27 negl %ecx
28 subl %ecx,%edx
29 100: movb (%rsi),%al
30 101: movb %al,(%rdi)
31 incq %rsi
32 incq %rdi
33 decl %ecx
34 jnz 100b
35 102:
36 .section .fixup,"ax"
37 103: addl %ecx,%edx /* ecx is zerorest also */
38 jmp copy_user_handle_tail
39 .previous
40
41 _ASM_EXTABLE(100b,103b)
42 _ASM_EXTABLE(101b,103b)
43 #endif
44 .endm
45
46 /*
47 * copy_user_nocache - Uncached memory copy with exception handling
48 * This will force destination/source out of cache for more performance.
49 */
50 ENTRY(__copy_user_nocache)
51 CFI_STARTPROC
52 ASM_STAC
53 cmpl $8,%edx
54 jb 20f /* less then 8 bytes, go to byte copy loop */
55 ALIGN_DESTINATION
56 movl %edx,%ecx
57 andl $63,%edx
58 shrl $6,%ecx
59 jz 17f
60 1: movq (%rsi),%r8
61 2: movq 1*8(%rsi),%r9
62 3: movq 2*8(%rsi),%r10
63 4: movq 3*8(%rsi),%r11
64 5: movnti %r8,(%rdi)
65 6: movnti %r9,1*8(%rdi)
66 7: movnti %r10,2*8(%rdi)
67 8: movnti %r11,3*8(%rdi)
68 9: movq 4*8(%rsi),%r8
69 10: movq 5*8(%rsi),%r9
70 11: movq 6*8(%rsi),%r10
71 12: movq 7*8(%rsi),%r11
72 13: movnti %r8,4*8(%rdi)
73 14: movnti %r9,5*8(%rdi)
74 15: movnti %r10,6*8(%rdi)
75 16: movnti %r11,7*8(%rdi)
76 leaq 64(%rsi),%rsi
77 leaq 64(%rdi),%rdi
78 decl %ecx
79 jnz 1b
80 17: movl %edx,%ecx
81 andl $7,%edx
82 shrl $3,%ecx
83 jz 20f
84 18: movq (%rsi),%r8
85 19: movnti %r8,(%rdi)
86 leaq 8(%rsi),%rsi
87 leaq 8(%rdi),%rdi
88 decl %ecx
89 jnz 18b
90 20: andl %edx,%edx
91 jz 23f
92 movl %edx,%ecx
93 21: movb (%rsi),%al
94 22: movb %al,(%rdi)
95 incq %rsi
96 incq %rdi
97 decl %ecx
98 jnz 21b
99 23: xorl %eax,%eax
100 ASM_CLAC
101 sfence
102 ret
103
104 .section .fixup,"ax"
105 30: shll $6,%ecx
106 addl %ecx,%edx
107 jmp 60f
108 40: lea (%rdx,%rcx,8),%rdx
109 jmp 60f
110 50: movl %ecx,%edx
111 60: sfence
112 jmp copy_user_handle_tail
113 .previous
114
115 _ASM_EXTABLE(1b,30b)
116 _ASM_EXTABLE(2b,30b)
117 _ASM_EXTABLE(3b,30b)
118 _ASM_EXTABLE(4b,30b)
119 _ASM_EXTABLE(5b,30b)
120 _ASM_EXTABLE(6b,30b)
121 _ASM_EXTABLE(7b,30b)
122 _ASM_EXTABLE(8b,30b)
123 _ASM_EXTABLE(9b,30b)
124 _ASM_EXTABLE(10b,30b)
125 _ASM_EXTABLE(11b,30b)
126 _ASM_EXTABLE(12b,30b)
127 _ASM_EXTABLE(13b,30b)
128 _ASM_EXTABLE(14b,30b)
129 _ASM_EXTABLE(15b,30b)
130 _ASM_EXTABLE(16b,30b)
131 _ASM_EXTABLE(18b,40b)
132 _ASM_EXTABLE(19b,40b)
133 _ASM_EXTABLE(21b,50b)
134 _ASM_EXTABLE(22b,50b)
135 CFI_ENDPROC
136 ENDPROC(__copy_user_nocache)
This page took 0.05154 seconds and 5 git commands to generate.