Commit | Line | Data |
---|---|---|
3f50dbc1 | 1 | /* |
1da177e4 LT |
2 | * User address space access functions. |
3 | * The non inlined parts of asm-i386/uaccess.h are here. | |
4 | * | |
5 | * Copyright 1997 Andi Kleen <ak@muc.de> | |
6 | * Copyright 1997 Linus Torvalds | |
7 | */ | |
1da177e4 LT |
8 | #include <linux/mm.h> |
9 | #include <linux/highmem.h> | |
10 | #include <linux/blkdev.h> | |
11 | #include <linux/module.h> | |
3fcfab16 | 12 | #include <linux/backing-dev.h> |
b6a8b316 | 13 | #include <linux/interrupt.h> |
1da177e4 LT |
14 | #include <asm/uaccess.h> |
15 | #include <asm/mmx.h> | |
16 | ||
17 | static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n) | |
18 | { | |
19 | #ifdef CONFIG_X86_INTEL_USERCOPY | |
20 | if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask)) | |
21 | return 0; | |
22 | #endif | |
23 | return 1; | |
24 | } | |
3f50dbc1 PC |
25 | #define movsl_is_ok(a1, a2, n) \ |
26 | __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n)) | |
1da177e4 LT |
27 | |
28 | /* | |
29 | * Copy a null terminated string from userspace. | |
30 | */ | |
31 | ||
3f50dbc1 | 32 | #define __do_strncpy_from_user(dst, src, count, res) \ |
1da177e4 LT |
33 | do { \ |
34 | int __d0, __d1, __d2; \ | |
35 | might_sleep(); \ | |
c10d38dd NP |
36 | if (current->mm) \ |
37 | might_lock_read(¤t->mm->mmap_sem); \ | |
1da177e4 LT |
38 | __asm__ __volatile__( \ |
39 | " testl %1,%1\n" \ | |
40 | " jz 2f\n" \ | |
41 | "0: lodsb\n" \ | |
42 | " stosb\n" \ | |
43 | " testb %%al,%%al\n" \ | |
44 | " jz 1f\n" \ | |
45 | " decl %1\n" \ | |
46 | " jnz 0b\n" \ | |
47 | "1: subl %1,%0\n" \ | |
48 | "2:\n" \ | |
49 | ".section .fixup,\"ax\"\n" \ | |
50 | "3: movl %5,%0\n" \ | |
51 | " jmp 2b\n" \ | |
52 | ".previous\n" \ | |
28777441 | 53 | _ASM_EXTABLE(0b,3b) \ |
1da177e4 LT |
54 | : "=d"(res), "=c"(count), "=&a" (__d0), "=&S" (__d1), \ |
55 | "=&D" (__d2) \ | |
56 | : "i"(-EFAULT), "0"(count), "1"(count), "3"(src), "4"(dst) \ | |
57 | : "memory"); \ | |
58 | } while (0) | |
59 | ||
60 | /** | |
61 | * __strncpy_from_user: - Copy a NUL terminated string from userspace, with less checking. | |
62 | * @dst: Destination address, in kernel space. This buffer must be at | |
63 | * least @count bytes long. | |
64 | * @src: Source address, in user space. | |
65 | * @count: Maximum number of bytes to copy, including the trailing NUL. | |
3f50dbc1 | 66 | * |
1da177e4 LT |
67 | * Copies a NUL-terminated string from userspace to kernel space. |
68 | * Caller must check the specified block with access_ok() before calling | |
69 | * this function. | |
70 | * | |
71 | * On success, returns the length of the string (not including the trailing | |
72 | * NUL). | |
73 | * | |
74 | * If access to userspace fails, returns -EFAULT (some data may have been | |
75 | * copied). | |
76 | * | |
77 | * If @count is smaller than the length of the string, copies @count bytes | |
78 | * and returns @count. | |
79 | */ | |
80 | long | |
81 | __strncpy_from_user(char *dst, const char __user *src, long count) | |
82 | { | |
83 | long res; | |
84 | __do_strncpy_from_user(dst, src, count, res); | |
85 | return res; | |
86 | } | |
129f6946 | 87 | EXPORT_SYMBOL(__strncpy_from_user); |
1da177e4 LT |
88 | |
89 | /** | |
90 | * strncpy_from_user: - Copy a NUL terminated string from userspace. | |
91 | * @dst: Destination address, in kernel space. This buffer must be at | |
92 | * least @count bytes long. | |
93 | * @src: Source address, in user space. | |
94 | * @count: Maximum number of bytes to copy, including the trailing NUL. | |
3f50dbc1 | 95 | * |
1da177e4 LT |
96 | * Copies a NUL-terminated string from userspace to kernel space. |
97 | * | |
98 | * On success, returns the length of the string (not including the trailing | |
99 | * NUL). | |
100 | * | |
101 | * If access to userspace fails, returns -EFAULT (some data may have been | |
102 | * copied). | |
103 | * | |
104 | * If @count is smaller than the length of the string, copies @count bytes | |
105 | * and returns @count. | |
106 | */ | |
107 | long | |
108 | strncpy_from_user(char *dst, const char __user *src, long count) | |
109 | { | |
110 | long res = -EFAULT; | |
111 | if (access_ok(VERIFY_READ, src, 1)) | |
112 | __do_strncpy_from_user(dst, src, count, res); | |
113 | return res; | |
114 | } | |
129f6946 | 115 | EXPORT_SYMBOL(strncpy_from_user); |
1da177e4 LT |
116 | |
117 | /* | |
118 | * Zero Userspace | |
119 | */ | |
120 | ||
121 | #define __do_clear_user(addr,size) \ | |
122 | do { \ | |
123 | int __d0; \ | |
124 | might_sleep(); \ | |
c10d38dd NP |
125 | if (current->mm) \ |
126 | might_lock_read(¤t->mm->mmap_sem); \ | |
3f50dbc1 | 127 | __asm__ __volatile__( \ |
1da177e4 LT |
128 | "0: rep; stosl\n" \ |
129 | " movl %2,%0\n" \ | |
130 | "1: rep; stosb\n" \ | |
131 | "2:\n" \ | |
132 | ".section .fixup,\"ax\"\n" \ | |
133 | "3: lea 0(%2,%0,4),%0\n" \ | |
134 | " jmp 2b\n" \ | |
135 | ".previous\n" \ | |
28777441 PA |
136 | _ASM_EXTABLE(0b,3b) \ |
137 | _ASM_EXTABLE(1b,2b) \ | |
1da177e4 LT |
138 | : "=&c"(size), "=&D" (__d0) \ |
139 | : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \ | |
140 | } while (0) | |
141 | ||
142 | /** | |
143 | * clear_user: - Zero a block of memory in user space. | |
144 | * @to: Destination address, in user space. | |
145 | * @n: Number of bytes to zero. | |
146 | * | |
147 | * Zero a block of memory in user space. | |
148 | * | |
149 | * Returns number of bytes that could not be cleared. | |
150 | * On success, this will be zero. | |
151 | */ | |
152 | unsigned long | |
153 | clear_user(void __user *to, unsigned long n) | |
154 | { | |
1da177e4 LT |
155 | if (access_ok(VERIFY_WRITE, to, n)) |
156 | __do_clear_user(to, n); | |
157 | return n; | |
158 | } | |
129f6946 | 159 | EXPORT_SYMBOL(clear_user); |
1da177e4 LT |
160 | |
161 | /** | |
162 | * __clear_user: - Zero a block of memory in user space, with less checking. | |
163 | * @to: Destination address, in user space. | |
164 | * @n: Number of bytes to zero. | |
165 | * | |
166 | * Zero a block of memory in user space. Caller must check | |
167 | * the specified block with access_ok() before calling this function. | |
168 | * | |
169 | * Returns number of bytes that could not be cleared. | |
170 | * On success, this will be zero. | |
171 | */ | |
172 | unsigned long | |
173 | __clear_user(void __user *to, unsigned long n) | |
174 | { | |
175 | __do_clear_user(to, n); | |
176 | return n; | |
177 | } | |
129f6946 | 178 | EXPORT_SYMBOL(__clear_user); |
1da177e4 LT |
179 | |
180 | /** | |
35e38a6e | 181 | * strnlen_user: - Get the size of a string in user space. |
1da177e4 LT |
182 | * @s: The string to measure. |
183 | * @n: The maximum valid length | |
184 | * | |
185 | * Get the size of a NUL-terminated string in user space. | |
186 | * | |
187 | * Returns the size of the string INCLUDING the terminating NUL. | |
188 | * On exception, returns 0. | |
189 | * If the string is too long, returns a value greater than @n. | |
190 | */ | |
191 | long strnlen_user(const char __user *s, long n) | |
192 | { | |
193 | unsigned long mask = -__addr_ok(s); | |
194 | unsigned long res, tmp; | |
195 | ||
196 | might_sleep(); | |
c10d38dd NP |
197 | if (current->mm) |
198 | might_lock_read(¤t->mm->mmap_sem); | |
1da177e4 LT |
199 | |
200 | __asm__ __volatile__( | |
201 | " testl %0, %0\n" | |
202 | " jz 3f\n" | |
203 | " andl %0,%%ecx\n" | |
204 | "0: repne; scasb\n" | |
205 | " setne %%al\n" | |
206 | " subl %%ecx,%0\n" | |
207 | " addl %0,%%eax\n" | |
208 | "1:\n" | |
209 | ".section .fixup,\"ax\"\n" | |
210 | "2: xorl %%eax,%%eax\n" | |
211 | " jmp 1b\n" | |
212 | "3: movb $1,%%al\n" | |
213 | " jmp 1b\n" | |
214 | ".previous\n" | |
215 | ".section __ex_table,\"a\"\n" | |
216 | " .align 4\n" | |
217 | " .long 0b,2b\n" | |
218 | ".previous" | |
219 | :"=r" (n), "=D" (s), "=a" (res), "=c" (tmp) | |
220 | :"0" (n), "1" (s), "2" (0), "3" (mask) | |
221 | :"cc"); | |
222 | return res & mask; | |
223 | } | |
129f6946 | 224 | EXPORT_SYMBOL(strnlen_user); |
1da177e4 LT |
225 | |
226 | #ifdef CONFIG_X86_INTEL_USERCOPY | |
227 | static unsigned long | |
228 | __copy_user_intel(void __user *to, const void *from, unsigned long size) | |
229 | { | |
230 | int d0, d1; | |
231 | __asm__ __volatile__( | |
232 | " .align 2,0x90\n" | |
233 | "1: movl 32(%4), %%eax\n" | |
234 | " cmpl $67, %0\n" | |
235 | " jbe 3f\n" | |
236 | "2: movl 64(%4), %%eax\n" | |
237 | " .align 2,0x90\n" | |
238 | "3: movl 0(%4), %%eax\n" | |
239 | "4: movl 4(%4), %%edx\n" | |
240 | "5: movl %%eax, 0(%3)\n" | |
241 | "6: movl %%edx, 4(%3)\n" | |
242 | "7: movl 8(%4), %%eax\n" | |
243 | "8: movl 12(%4),%%edx\n" | |
244 | "9: movl %%eax, 8(%3)\n" | |
245 | "10: movl %%edx, 12(%3)\n" | |
246 | "11: movl 16(%4), %%eax\n" | |
247 | "12: movl 20(%4), %%edx\n" | |
248 | "13: movl %%eax, 16(%3)\n" | |
249 | "14: movl %%edx, 20(%3)\n" | |
250 | "15: movl 24(%4), %%eax\n" | |
251 | "16: movl 28(%4), %%edx\n" | |
252 | "17: movl %%eax, 24(%3)\n" | |
253 | "18: movl %%edx, 28(%3)\n" | |
254 | "19: movl 32(%4), %%eax\n" | |
255 | "20: movl 36(%4), %%edx\n" | |
256 | "21: movl %%eax, 32(%3)\n" | |
257 | "22: movl %%edx, 36(%3)\n" | |
258 | "23: movl 40(%4), %%eax\n" | |
259 | "24: movl 44(%4), %%edx\n" | |
260 | "25: movl %%eax, 40(%3)\n" | |
261 | "26: movl %%edx, 44(%3)\n" | |
262 | "27: movl 48(%4), %%eax\n" | |
263 | "28: movl 52(%4), %%edx\n" | |
264 | "29: movl %%eax, 48(%3)\n" | |
265 | "30: movl %%edx, 52(%3)\n" | |
266 | "31: movl 56(%4), %%eax\n" | |
267 | "32: movl 60(%4), %%edx\n" | |
268 | "33: movl %%eax, 56(%3)\n" | |
269 | "34: movl %%edx, 60(%3)\n" | |
270 | " addl $-64, %0\n" | |
271 | " addl $64, %4\n" | |
272 | " addl $64, %3\n" | |
273 | " cmpl $63, %0\n" | |
274 | " ja 1b\n" | |
275 | "35: movl %0, %%eax\n" | |
276 | " shrl $2, %0\n" | |
277 | " andl $3, %%eax\n" | |
278 | " cld\n" | |
279 | "99: rep; movsl\n" | |
280 | "36: movl %%eax, %0\n" | |
281 | "37: rep; movsb\n" | |
282 | "100:\n" | |
283 | ".section .fixup,\"ax\"\n" | |
284 | "101: lea 0(%%eax,%0,4),%0\n" | |
285 | " jmp 100b\n" | |
286 | ".previous\n" | |
287 | ".section __ex_table,\"a\"\n" | |
288 | " .align 4\n" | |
289 | " .long 1b,100b\n" | |
290 | " .long 2b,100b\n" | |
291 | " .long 3b,100b\n" | |
292 | " .long 4b,100b\n" | |
293 | " .long 5b,100b\n" | |
294 | " .long 6b,100b\n" | |
295 | " .long 7b,100b\n" | |
296 | " .long 8b,100b\n" | |
297 | " .long 9b,100b\n" | |
298 | " .long 10b,100b\n" | |
299 | " .long 11b,100b\n" | |
300 | " .long 12b,100b\n" | |
301 | " .long 13b,100b\n" | |
302 | " .long 14b,100b\n" | |
303 | " .long 15b,100b\n" | |
304 | " .long 16b,100b\n" | |
305 | " .long 17b,100b\n" | |
306 | " .long 18b,100b\n" | |
307 | " .long 19b,100b\n" | |
308 | " .long 20b,100b\n" | |
309 | " .long 21b,100b\n" | |
310 | " .long 22b,100b\n" | |
311 | " .long 23b,100b\n" | |
312 | " .long 24b,100b\n" | |
313 | " .long 25b,100b\n" | |
314 | " .long 26b,100b\n" | |
315 | " .long 27b,100b\n" | |
316 | " .long 28b,100b\n" | |
317 | " .long 29b,100b\n" | |
318 | " .long 30b,100b\n" | |
319 | " .long 31b,100b\n" | |
320 | " .long 32b,100b\n" | |
321 | " .long 33b,100b\n" | |
322 | " .long 34b,100b\n" | |
323 | " .long 35b,100b\n" | |
324 | " .long 36b,100b\n" | |
325 | " .long 37b,100b\n" | |
326 | " .long 99b,101b\n" | |
327 | ".previous" | |
328 | : "=&c"(size), "=&D" (d0), "=&S" (d1) | |
329 | : "1"(to), "2"(from), "0"(size) | |
330 | : "eax", "edx", "memory"); | |
331 | return size; | |
332 | } | |
333 | ||
334 | static unsigned long | |
335 | __copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size) | |
336 | { | |
337 | int d0, d1; | |
338 | __asm__ __volatile__( | |
339 | " .align 2,0x90\n" | |
340 | "0: movl 32(%4), %%eax\n" | |
3f50dbc1 PC |
341 | " cmpl $67, %0\n" |
342 | " jbe 2f\n" | |
1da177e4 | 343 | "1: movl 64(%4), %%eax\n" |
3f50dbc1 PC |
344 | " .align 2,0x90\n" |
345 | "2: movl 0(%4), %%eax\n" | |
346 | "21: movl 4(%4), %%edx\n" | |
347 | " movl %%eax, 0(%3)\n" | |
348 | " movl %%edx, 4(%3)\n" | |
349 | "3: movl 8(%4), %%eax\n" | |
350 | "31: movl 12(%4),%%edx\n" | |
351 | " movl %%eax, 8(%3)\n" | |
1da177e4 LT |
352 | " movl %%edx, 12(%3)\n" |
353 | "4: movl 16(%4), %%eax\n" | |
354 | "41: movl 20(%4), %%edx\n" | |
355 | " movl %%eax, 16(%3)\n" | |
356 | " movl %%edx, 20(%3)\n" | |
357 | "10: movl 24(%4), %%eax\n" | |
358 | "51: movl 28(%4), %%edx\n" | |
359 | " movl %%eax, 24(%3)\n" | |
360 | " movl %%edx, 28(%3)\n" | |
361 | "11: movl 32(%4), %%eax\n" | |
362 | "61: movl 36(%4), %%edx\n" | |
363 | " movl %%eax, 32(%3)\n" | |
364 | " movl %%edx, 36(%3)\n" | |
365 | "12: movl 40(%4), %%eax\n" | |
366 | "71: movl 44(%4), %%edx\n" | |
367 | " movl %%eax, 40(%3)\n" | |
368 | " movl %%edx, 44(%3)\n" | |
369 | "13: movl 48(%4), %%eax\n" | |
370 | "81: movl 52(%4), %%edx\n" | |
371 | " movl %%eax, 48(%3)\n" | |
372 | " movl %%edx, 52(%3)\n" | |
373 | "14: movl 56(%4), %%eax\n" | |
374 | "91: movl 60(%4), %%edx\n" | |
375 | " movl %%eax, 56(%3)\n" | |
376 | " movl %%edx, 60(%3)\n" | |
3f50dbc1 PC |
377 | " addl $-64, %0\n" |
378 | " addl $64, %4\n" | |
379 | " addl $64, %3\n" | |
380 | " cmpl $63, %0\n" | |
381 | " ja 0b\n" | |
382 | "5: movl %0, %%eax\n" | |
383 | " shrl $2, %0\n" | |
384 | " andl $3, %%eax\n" | |
385 | " cld\n" | |
386 | "6: rep; movsl\n" | |
1da177e4 | 387 | " movl %%eax,%0\n" |
3f50dbc1 PC |
388 | "7: rep; movsb\n" |
389 | "8:\n" | |
1da177e4 | 390 | ".section .fixup,\"ax\"\n" |
3f50dbc1 PC |
391 | "9: lea 0(%%eax,%0,4),%0\n" |
392 | "16: pushl %0\n" | |
393 | " pushl %%eax\n" | |
1da177e4 | 394 | " xorl %%eax,%%eax\n" |
3f50dbc1 PC |
395 | " rep; stosb\n" |
396 | " popl %%eax\n" | |
397 | " popl %0\n" | |
398 | " jmp 8b\n" | |
399 | ".previous\n" | |
1da177e4 | 400 | ".section __ex_table,\"a\"\n" |
3f50dbc1 PC |
401 | " .align 4\n" |
402 | " .long 0b,16b\n" | |
1da177e4 LT |
403 | " .long 1b,16b\n" |
404 | " .long 2b,16b\n" | |
405 | " .long 21b,16b\n" | |
3f50dbc1 | 406 | " .long 3b,16b\n" |
1da177e4 | 407 | " .long 31b,16b\n" |
3f50dbc1 | 408 | " .long 4b,16b\n" |
1da177e4 LT |
409 | " .long 41b,16b\n" |
410 | " .long 10b,16b\n" | |
411 | " .long 51b,16b\n" | |
412 | " .long 11b,16b\n" | |
413 | " .long 61b,16b\n" | |
414 | " .long 12b,16b\n" | |
415 | " .long 71b,16b\n" | |
416 | " .long 13b,16b\n" | |
417 | " .long 81b,16b\n" | |
418 | " .long 14b,16b\n" | |
419 | " .long 91b,16b\n" | |
3f50dbc1 PC |
420 | " .long 6b,9b\n" |
421 | " .long 7b,16b\n" | |
422 | ".previous" | |
1da177e4 LT |
423 | : "=&c"(size), "=&D" (d0), "=&S" (d1) |
424 | : "1"(to), "2"(from), "0"(size) | |
425 | : "eax", "edx", "memory"); | |
426 | return size; | |
427 | } | |
c22ce143 HY |
428 | |
429 | /* | |
430 | * Non Temporal Hint version of __copy_user_zeroing_intel. It is cache aware. | |
431 | * hyoshiok@miraclelinux.com | |
432 | */ | |
433 | ||
434 | static unsigned long __copy_user_zeroing_intel_nocache(void *to, | |
435 | const void __user *from, unsigned long size) | |
436 | { | |
3f50dbc1 | 437 | int d0, d1; |
c22ce143 HY |
438 | |
439 | __asm__ __volatile__( | |
440 | " .align 2,0x90\n" | |
441 | "0: movl 32(%4), %%eax\n" | |
442 | " cmpl $67, %0\n" | |
443 | " jbe 2f\n" | |
444 | "1: movl 64(%4), %%eax\n" | |
445 | " .align 2,0x90\n" | |
446 | "2: movl 0(%4), %%eax\n" | |
447 | "21: movl 4(%4), %%edx\n" | |
448 | " movnti %%eax, 0(%3)\n" | |
449 | " movnti %%edx, 4(%3)\n" | |
450 | "3: movl 8(%4), %%eax\n" | |
451 | "31: movl 12(%4),%%edx\n" | |
452 | " movnti %%eax, 8(%3)\n" | |
453 | " movnti %%edx, 12(%3)\n" | |
454 | "4: movl 16(%4), %%eax\n" | |
455 | "41: movl 20(%4), %%edx\n" | |
456 | " movnti %%eax, 16(%3)\n" | |
457 | " movnti %%edx, 20(%3)\n" | |
458 | "10: movl 24(%4), %%eax\n" | |
459 | "51: movl 28(%4), %%edx\n" | |
460 | " movnti %%eax, 24(%3)\n" | |
461 | " movnti %%edx, 28(%3)\n" | |
462 | "11: movl 32(%4), %%eax\n" | |
463 | "61: movl 36(%4), %%edx\n" | |
464 | " movnti %%eax, 32(%3)\n" | |
465 | " movnti %%edx, 36(%3)\n" | |
466 | "12: movl 40(%4), %%eax\n" | |
467 | "71: movl 44(%4), %%edx\n" | |
468 | " movnti %%eax, 40(%3)\n" | |
469 | " movnti %%edx, 44(%3)\n" | |
470 | "13: movl 48(%4), %%eax\n" | |
471 | "81: movl 52(%4), %%edx\n" | |
472 | " movnti %%eax, 48(%3)\n" | |
473 | " movnti %%edx, 52(%3)\n" | |
474 | "14: movl 56(%4), %%eax\n" | |
475 | "91: movl 60(%4), %%edx\n" | |
476 | " movnti %%eax, 56(%3)\n" | |
477 | " movnti %%edx, 60(%3)\n" | |
478 | " addl $-64, %0\n" | |
479 | " addl $64, %4\n" | |
480 | " addl $64, %3\n" | |
481 | " cmpl $63, %0\n" | |
482 | " ja 0b\n" | |
483 | " sfence \n" | |
484 | "5: movl %0, %%eax\n" | |
485 | " shrl $2, %0\n" | |
486 | " andl $3, %%eax\n" | |
487 | " cld\n" | |
488 | "6: rep; movsl\n" | |
489 | " movl %%eax,%0\n" | |
490 | "7: rep; movsb\n" | |
491 | "8:\n" | |
492 | ".section .fixup,\"ax\"\n" | |
493 | "9: lea 0(%%eax,%0,4),%0\n" | |
494 | "16: pushl %0\n" | |
495 | " pushl %%eax\n" | |
496 | " xorl %%eax,%%eax\n" | |
497 | " rep; stosb\n" | |
498 | " popl %%eax\n" | |
499 | " popl %0\n" | |
500 | " jmp 8b\n" | |
501 | ".previous\n" | |
502 | ".section __ex_table,\"a\"\n" | |
503 | " .align 4\n" | |
504 | " .long 0b,16b\n" | |
505 | " .long 1b,16b\n" | |
506 | " .long 2b,16b\n" | |
507 | " .long 21b,16b\n" | |
508 | " .long 3b,16b\n" | |
509 | " .long 31b,16b\n" | |
510 | " .long 4b,16b\n" | |
511 | " .long 41b,16b\n" | |
512 | " .long 10b,16b\n" | |
513 | " .long 51b,16b\n" | |
514 | " .long 11b,16b\n" | |
515 | " .long 61b,16b\n" | |
516 | " .long 12b,16b\n" | |
517 | " .long 71b,16b\n" | |
518 | " .long 13b,16b\n" | |
519 | " .long 81b,16b\n" | |
520 | " .long 14b,16b\n" | |
521 | " .long 91b,16b\n" | |
522 | " .long 6b,9b\n" | |
523 | " .long 7b,16b\n" | |
524 | ".previous" | |
525 | : "=&c"(size), "=&D" (d0), "=&S" (d1) | |
526 | : "1"(to), "2"(from), "0"(size) | |
527 | : "eax", "edx", "memory"); | |
528 | return size; | |
529 | } | |
530 | ||
7c12d811 N |
531 | static unsigned long __copy_user_intel_nocache(void *to, |
532 | const void __user *from, unsigned long size) | |
533 | { | |
3f50dbc1 | 534 | int d0, d1; |
7c12d811 N |
535 | |
536 | __asm__ __volatile__( | |
537 | " .align 2,0x90\n" | |
538 | "0: movl 32(%4), %%eax\n" | |
539 | " cmpl $67, %0\n" | |
540 | " jbe 2f\n" | |
541 | "1: movl 64(%4), %%eax\n" | |
542 | " .align 2,0x90\n" | |
543 | "2: movl 0(%4), %%eax\n" | |
544 | "21: movl 4(%4), %%edx\n" | |
545 | " movnti %%eax, 0(%3)\n" | |
546 | " movnti %%edx, 4(%3)\n" | |
547 | "3: movl 8(%4), %%eax\n" | |
548 | "31: movl 12(%4),%%edx\n" | |
549 | " movnti %%eax, 8(%3)\n" | |
550 | " movnti %%edx, 12(%3)\n" | |
551 | "4: movl 16(%4), %%eax\n" | |
552 | "41: movl 20(%4), %%edx\n" | |
553 | " movnti %%eax, 16(%3)\n" | |
554 | " movnti %%edx, 20(%3)\n" | |
555 | "10: movl 24(%4), %%eax\n" | |
556 | "51: movl 28(%4), %%edx\n" | |
557 | " movnti %%eax, 24(%3)\n" | |
558 | " movnti %%edx, 28(%3)\n" | |
559 | "11: movl 32(%4), %%eax\n" | |
560 | "61: movl 36(%4), %%edx\n" | |
561 | " movnti %%eax, 32(%3)\n" | |
562 | " movnti %%edx, 36(%3)\n" | |
563 | "12: movl 40(%4), %%eax\n" | |
564 | "71: movl 44(%4), %%edx\n" | |
565 | " movnti %%eax, 40(%3)\n" | |
566 | " movnti %%edx, 44(%3)\n" | |
567 | "13: movl 48(%4), %%eax\n" | |
568 | "81: movl 52(%4), %%edx\n" | |
569 | " movnti %%eax, 48(%3)\n" | |
570 | " movnti %%edx, 52(%3)\n" | |
571 | "14: movl 56(%4), %%eax\n" | |
572 | "91: movl 60(%4), %%edx\n" | |
573 | " movnti %%eax, 56(%3)\n" | |
574 | " movnti %%edx, 60(%3)\n" | |
575 | " addl $-64, %0\n" | |
576 | " addl $64, %4\n" | |
577 | " addl $64, %3\n" | |
578 | " cmpl $63, %0\n" | |
579 | " ja 0b\n" | |
580 | " sfence \n" | |
581 | "5: movl %0, %%eax\n" | |
582 | " shrl $2, %0\n" | |
583 | " andl $3, %%eax\n" | |
584 | " cld\n" | |
585 | "6: rep; movsl\n" | |
586 | " movl %%eax,%0\n" | |
587 | "7: rep; movsb\n" | |
588 | "8:\n" | |
589 | ".section .fixup,\"ax\"\n" | |
590 | "9: lea 0(%%eax,%0,4),%0\n" | |
591 | "16: jmp 8b\n" | |
592 | ".previous\n" | |
593 | ".section __ex_table,\"a\"\n" | |
594 | " .align 4\n" | |
595 | " .long 0b,16b\n" | |
596 | " .long 1b,16b\n" | |
597 | " .long 2b,16b\n" | |
598 | " .long 21b,16b\n" | |
599 | " .long 3b,16b\n" | |
600 | " .long 31b,16b\n" | |
601 | " .long 4b,16b\n" | |
602 | " .long 41b,16b\n" | |
603 | " .long 10b,16b\n" | |
604 | " .long 51b,16b\n" | |
605 | " .long 11b,16b\n" | |
606 | " .long 61b,16b\n" | |
607 | " .long 12b,16b\n" | |
608 | " .long 71b,16b\n" | |
609 | " .long 13b,16b\n" | |
610 | " .long 81b,16b\n" | |
611 | " .long 14b,16b\n" | |
612 | " .long 91b,16b\n" | |
613 | " .long 6b,9b\n" | |
614 | " .long 7b,16b\n" | |
615 | ".previous" | |
616 | : "=&c"(size), "=&D" (d0), "=&S" (d1) | |
617 | : "1"(to), "2"(from), "0"(size) | |
618 | : "eax", "edx", "memory"); | |
619 | return size; | |
620 | } | |
621 | ||
1da177e4 | 622 | #else |
c22ce143 | 623 | |
1da177e4 LT |
624 | /* |
625 | * Leave these declared but undefined. They should not be any references to | |
626 | * them | |
627 | */ | |
c22ce143 HY |
628 | unsigned long __copy_user_zeroing_intel(void *to, const void __user *from, |
629 | unsigned long size); | |
630 | unsigned long __copy_user_intel(void __user *to, const void *from, | |
631 | unsigned long size); | |
632 | unsigned long __copy_user_zeroing_intel_nocache(void *to, | |
633 | const void __user *from, unsigned long size); | |
1da177e4 LT |
634 | #endif /* CONFIG_X86_INTEL_USERCOPY */ |
635 | ||
636 | /* Generic arbitrary sized copy. */ | |
3f50dbc1 | 637 | #define __copy_user(to, from, size) \ |
1da177e4 LT |
638 | do { \ |
639 | int __d0, __d1, __d2; \ | |
640 | __asm__ __volatile__( \ | |
641 | " cmp $7,%0\n" \ | |
642 | " jbe 1f\n" \ | |
643 | " movl %1,%0\n" \ | |
644 | " negl %0\n" \ | |
645 | " andl $7,%0\n" \ | |
646 | " subl %0,%3\n" \ | |
647 | "4: rep; movsb\n" \ | |
648 | " movl %3,%0\n" \ | |
649 | " shrl $2,%0\n" \ | |
650 | " andl $3,%3\n" \ | |
651 | " .align 2,0x90\n" \ | |
652 | "0: rep; movsl\n" \ | |
653 | " movl %3,%0\n" \ | |
654 | "1: rep; movsb\n" \ | |
655 | "2:\n" \ | |
656 | ".section .fixup,\"ax\"\n" \ | |
657 | "5: addl %3,%0\n" \ | |
658 | " jmp 2b\n" \ | |
659 | "3: lea 0(%3,%0,4),%0\n" \ | |
660 | " jmp 2b\n" \ | |
661 | ".previous\n" \ | |
662 | ".section __ex_table,\"a\"\n" \ | |
663 | " .align 4\n" \ | |
664 | " .long 4b,5b\n" \ | |
665 | " .long 0b,3b\n" \ | |
666 | " .long 1b,2b\n" \ | |
667 | ".previous" \ | |
668 | : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ | |
669 | : "3"(size), "0"(size), "1"(to), "2"(from) \ | |
670 | : "memory"); \ | |
671 | } while (0) | |
672 | ||
3f50dbc1 | 673 | #define __copy_user_zeroing(to, from, size) \ |
1da177e4 LT |
674 | do { \ |
675 | int __d0, __d1, __d2; \ | |
676 | __asm__ __volatile__( \ | |
677 | " cmp $7,%0\n" \ | |
678 | " jbe 1f\n" \ | |
679 | " movl %1,%0\n" \ | |
680 | " negl %0\n" \ | |
681 | " andl $7,%0\n" \ | |
682 | " subl %0,%3\n" \ | |
683 | "4: rep; movsb\n" \ | |
684 | " movl %3,%0\n" \ | |
685 | " shrl $2,%0\n" \ | |
686 | " andl $3,%3\n" \ | |
687 | " .align 2,0x90\n" \ | |
688 | "0: rep; movsl\n" \ | |
689 | " movl %3,%0\n" \ | |
690 | "1: rep; movsb\n" \ | |
691 | "2:\n" \ | |
692 | ".section .fixup,\"ax\"\n" \ | |
693 | "5: addl %3,%0\n" \ | |
694 | " jmp 6f\n" \ | |
695 | "3: lea 0(%3,%0,4),%0\n" \ | |
696 | "6: pushl %0\n" \ | |
697 | " pushl %%eax\n" \ | |
698 | " xorl %%eax,%%eax\n" \ | |
699 | " rep; stosb\n" \ | |
700 | " popl %%eax\n" \ | |
701 | " popl %0\n" \ | |
702 | " jmp 2b\n" \ | |
703 | ".previous\n" \ | |
704 | ".section __ex_table,\"a\"\n" \ | |
705 | " .align 4\n" \ | |
706 | " .long 4b,5b\n" \ | |
707 | " .long 0b,3b\n" \ | |
708 | " .long 1b,6b\n" \ | |
709 | ".previous" \ | |
710 | : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \ | |
711 | : "3"(size), "0"(size), "1"(to), "2"(from) \ | |
712 | : "memory"); \ | |
713 | } while (0) | |
714 | ||
c22ce143 HY |
715 | unsigned long __copy_to_user_ll(void __user *to, const void *from, |
716 | unsigned long n) | |
1da177e4 | 717 | { |
1da177e4 LT |
718 | #ifndef CONFIG_X86_WP_WORKS_OK |
719 | if (unlikely(boot_cpu_data.wp_works_ok == 0) && | |
3f50dbc1 | 720 | ((unsigned long)to) < TASK_SIZE) { |
b6a8b316 TG |
721 | /* |
722 | * When we are in an atomic section (see | |
723 | * mm/filemap.c:file_read_actor), return the full | |
724 | * length to take the slow path. | |
725 | */ | |
726 | if (in_atomic()) | |
727 | return n; | |
728 | ||
3f50dbc1 | 729 | /* |
1da177e4 LT |
730 | * CPU does not honor the WP bit when writing |
731 | * from supervisory mode, and due to preemption or SMP, | |
732 | * the page tables can change at any time. | |
733 | * Do it manually. Manfred <manfred@colorfullife.com> | |
734 | */ | |
735 | while (n) { | |
3f50dbc1 | 736 | unsigned long offset = ((unsigned long)to)%PAGE_SIZE; |
1da177e4 LT |
737 | unsigned long len = PAGE_SIZE - offset; |
738 | int retval; | |
739 | struct page *pg; | |
740 | void *maddr; | |
3f50dbc1 | 741 | |
1da177e4 LT |
742 | if (len > n) |
743 | len = n; | |
744 | ||
745 | survive: | |
746 | down_read(¤t->mm->mmap_sem); | |
747 | retval = get_user_pages(current, current->mm, | |
3f50dbc1 | 748 | (unsigned long)to, 1, 1, 0, &pg, NULL); |
1da177e4 | 749 | |
b460cbc5 | 750 | if (retval == -ENOMEM && is_global_init(current)) { |
1da177e4 | 751 | up_read(¤t->mm->mmap_sem); |
3fcfab16 | 752 | congestion_wait(WRITE, HZ/50); |
1da177e4 LT |
753 | goto survive; |
754 | } | |
755 | ||
756 | if (retval != 1) { | |
757 | up_read(¤t->mm->mmap_sem); | |
3f50dbc1 PC |
758 | break; |
759 | } | |
1da177e4 LT |
760 | |
761 | maddr = kmap_atomic(pg, KM_USER0); | |
762 | memcpy(maddr + offset, from, len); | |
763 | kunmap_atomic(maddr, KM_USER0); | |
764 | set_page_dirty_lock(pg); | |
765 | put_page(pg); | |
766 | up_read(¤t->mm->mmap_sem); | |
767 | ||
768 | from += len; | |
769 | to += len; | |
770 | n -= len; | |
771 | } | |
772 | return n; | |
773 | } | |
774 | #endif | |
775 | if (movsl_is_ok(to, from, n)) | |
776 | __copy_user(to, from, n); | |
777 | else | |
778 | n = __copy_user_intel(to, from, n); | |
779 | return n; | |
780 | } | |
129f6946 | 781 | EXPORT_SYMBOL(__copy_to_user_ll); |
1da177e4 | 782 | |
c22ce143 HY |
783 | unsigned long __copy_from_user_ll(void *to, const void __user *from, |
784 | unsigned long n) | |
1da177e4 | 785 | { |
1da177e4 LT |
786 | if (movsl_is_ok(to, from, n)) |
787 | __copy_user_zeroing(to, from, n); | |
788 | else | |
789 | n = __copy_user_zeroing_intel(to, from, n); | |
790 | return n; | |
791 | } | |
129f6946 | 792 | EXPORT_SYMBOL(__copy_from_user_ll); |
1da177e4 | 793 | |
7c12d811 N |
794 | unsigned long __copy_from_user_ll_nozero(void *to, const void __user *from, |
795 | unsigned long n) | |
796 | { | |
7c12d811 N |
797 | if (movsl_is_ok(to, from, n)) |
798 | __copy_user(to, from, n); | |
799 | else | |
800 | n = __copy_user_intel((void __user *)to, | |
801 | (const void *)from, n); | |
802 | return n; | |
803 | } | |
804 | EXPORT_SYMBOL(__copy_from_user_ll_nozero); | |
805 | ||
c22ce143 HY |
806 | unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from, |
807 | unsigned long n) | |
808 | { | |
c22ce143 | 809 | #ifdef CONFIG_X86_INTEL_USERCOPY |
3f50dbc1 PC |
810 | if (n > 64 && cpu_has_xmm2) |
811 | n = __copy_user_zeroing_intel_nocache(to, from, n); | |
c22ce143 HY |
812 | else |
813 | __copy_user_zeroing(to, from, n); | |
814 | #else | |
3f50dbc1 | 815 | __copy_user_zeroing(to, from, n); |
c22ce143 HY |
816 | #endif |
817 | return n; | |
818 | } | |
914c8269 | 819 | EXPORT_SYMBOL(__copy_from_user_ll_nocache); |
c22ce143 | 820 | |
7c12d811 N |
821 | unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from, |
822 | unsigned long n) | |
823 | { | |
7c12d811 | 824 | #ifdef CONFIG_X86_INTEL_USERCOPY |
3f50dbc1 PC |
825 | if (n > 64 && cpu_has_xmm2) |
826 | n = __copy_user_intel_nocache(to, from, n); | |
7c12d811 N |
827 | else |
828 | __copy_user(to, from, n); | |
829 | #else | |
3f50dbc1 | 830 | __copy_user(to, from, n); |
7c12d811 N |
831 | #endif |
832 | return n; | |
833 | } | |
914c8269 | 834 | EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero); |
7c12d811 | 835 | |
1da177e4 LT |
836 | /** |
837 | * copy_to_user: - Copy a block of data into user space. | |
838 | * @to: Destination address, in user space. | |
839 | * @from: Source address, in kernel space. | |
840 | * @n: Number of bytes to copy. | |
841 | * | |
842 | * Context: User context only. This function may sleep. | |
843 | * | |
844 | * Copy data from kernel space to user space. | |
845 | * | |
846 | * Returns number of bytes that could not be copied. | |
847 | * On success, this will be zero. | |
848 | */ | |
849 | unsigned long | |
850 | copy_to_user(void __user *to, const void *from, unsigned long n) | |
851 | { | |
1da177e4 LT |
852 | if (access_ok(VERIFY_WRITE, to, n)) |
853 | n = __copy_to_user(to, from, n); | |
854 | return n; | |
855 | } | |
856 | EXPORT_SYMBOL(copy_to_user); | |
857 | ||
858 | /** | |
859 | * copy_from_user: - Copy a block of data from user space. | |
860 | * @to: Destination address, in kernel space. | |
861 | * @from: Source address, in user space. | |
862 | * @n: Number of bytes to copy. | |
863 | * | |
864 | * Context: User context only. This function may sleep. | |
865 | * | |
866 | * Copy data from user space to kernel space. | |
867 | * | |
868 | * Returns number of bytes that could not be copied. | |
869 | * On success, this will be zero. | |
870 | * | |
871 | * If some data could not be copied, this function will pad the copied | |
872 | * data to the requested size using zero bytes. | |
873 | */ | |
874 | unsigned long | |
875 | copy_from_user(void *to, const void __user *from, unsigned long n) | |
876 | { | |
1da177e4 LT |
877 | if (access_ok(VERIFY_READ, from, n)) |
878 | n = __copy_from_user(to, from, n); | |
879 | else | |
880 | memset(to, 0, n); | |
881 | return n; | |
882 | } | |
883 | EXPORT_SYMBOL(copy_from_user); |