Merge branch 'topic/cs423x-merge' into for-linus
[sfrench/cifs-2.6.git] / arch / x86 / lib / usercopy_32.c
1 /*
2  * User address space access functions.
3  * The non inlined parts of asm-i386/uaccess.h are here.
4  *
5  * Copyright 1997 Andi Kleen <ak@muc.de>
6  * Copyright 1997 Linus Torvalds
7  */
8 #include <linux/mm.h>
9 #include <linux/highmem.h>
10 #include <linux/blkdev.h>
11 #include <linux/module.h>
12 #include <linux/backing-dev.h>
13 #include <linux/interrupt.h>
14 #include <asm/uaccess.h>
15 #include <asm/mmx.h>
16
17 #ifdef CONFIG_X86_INTEL_USERCOPY
18 /*
19  * Alignment at which movsl is preferred for bulk memory copies.
20  */
21 struct movsl_mask movsl_mask __read_mostly;
22 #endif
23
24 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
25 {
26 #ifdef CONFIG_X86_INTEL_USERCOPY
27         if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
28                 return 0;
29 #endif
30         return 1;
31 }
32 #define movsl_is_ok(a1, a2, n) \
33         __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
34
35 /*
36  * Copy a null terminated string from userspace.
37  */
38
39 #define __do_strncpy_from_user(dst, src, count, res)                       \
40 do {                                                                       \
41         int __d0, __d1, __d2;                                              \
42         might_fault();                                                     \
43         __asm__ __volatile__(                                              \
44                 "       testl %1,%1\n"                                     \
45                 "       jz 2f\n"                                           \
46                 "0:     lodsb\n"                                           \
47                 "       stosb\n"                                           \
48                 "       testb %%al,%%al\n"                                 \
49                 "       jz 1f\n"                                           \
50                 "       decl %1\n"                                         \
51                 "       jnz 0b\n"                                          \
52                 "1:     subl %1,%0\n"                                      \
53                 "2:\n"                                                     \
54                 ".section .fixup,\"ax\"\n"                                 \
55                 "3:     movl %5,%0\n"                                      \
56                 "       jmp 2b\n"                                          \
57                 ".previous\n"                                              \
58                 _ASM_EXTABLE(0b,3b)                                        \
59                 : "=&d"(res), "=&c"(count), "=&a" (__d0), "=&S" (__d1),    \
60                   "=&D" (__d2)                                             \
61                 : "i"(-EFAULT), "0"(count), "1"(count), "3"(src), "4"(dst) \
62                 : "memory");                                               \
63 } while (0)
64
65 /**
66  * __strncpy_from_user: - Copy a NUL terminated string from userspace, with less checking.
67  * @dst:   Destination address, in kernel space.  This buffer must be at
68  *         least @count bytes long.
69  * @src:   Source address, in user space.
70  * @count: Maximum number of bytes to copy, including the trailing NUL.
71  *
72  * Copies a NUL-terminated string from userspace to kernel space.
73  * Caller must check the specified block with access_ok() before calling
74  * this function.
75  *
76  * On success, returns the length of the string (not including the trailing
77  * NUL).
78  *
79  * If access to userspace fails, returns -EFAULT (some data may have been
80  * copied).
81  *
82  * If @count is smaller than the length of the string, copies @count bytes
83  * and returns @count.
84  */
85 long
86 __strncpy_from_user(char *dst, const char __user *src, long count)
87 {
88         long res;
89         __do_strncpy_from_user(dst, src, count, res);
90         return res;
91 }
92 EXPORT_SYMBOL(__strncpy_from_user);
93
94 /**
95  * strncpy_from_user: - Copy a NUL terminated string from userspace.
96  * @dst:   Destination address, in kernel space.  This buffer must be at
97  *         least @count bytes long.
98  * @src:   Source address, in user space.
99  * @count: Maximum number of bytes to copy, including the trailing NUL.
100  *
101  * Copies a NUL-terminated string from userspace to kernel space.
102  *
103  * On success, returns the length of the string (not including the trailing
104  * NUL).
105  *
106  * If access to userspace fails, returns -EFAULT (some data may have been
107  * copied).
108  *
109  * If @count is smaller than the length of the string, copies @count bytes
110  * and returns @count.
111  */
112 long
113 strncpy_from_user(char *dst, const char __user *src, long count)
114 {
115         long res = -EFAULT;
116         if (access_ok(VERIFY_READ, src, 1))
117                 __do_strncpy_from_user(dst, src, count, res);
118         return res;
119 }
120 EXPORT_SYMBOL(strncpy_from_user);
121
122 /*
123  * Zero Userspace
124  */
125
126 #define __do_clear_user(addr,size)                                      \
127 do {                                                                    \
128         int __d0;                                                       \
129         might_fault();                                                  \
130         __asm__ __volatile__(                                           \
131                 "0:     rep; stosl\n"                                   \
132                 "       movl %2,%0\n"                                   \
133                 "1:     rep; stosb\n"                                   \
134                 "2:\n"                                                  \
135                 ".section .fixup,\"ax\"\n"                              \
136                 "3:     lea 0(%2,%0,4),%0\n"                            \
137                 "       jmp 2b\n"                                       \
138                 ".previous\n"                                           \
139                 _ASM_EXTABLE(0b,3b)                                     \
140                 _ASM_EXTABLE(1b,2b)                                     \
141                 : "=&c"(size), "=&D" (__d0)                             \
142                 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0));     \
143 } while (0)
144
145 /**
146  * clear_user: - Zero a block of memory in user space.
147  * @to:   Destination address, in user space.
148  * @n:    Number of bytes to zero.
149  *
150  * Zero a block of memory in user space.
151  *
152  * Returns number of bytes that could not be cleared.
153  * On success, this will be zero.
154  */
155 unsigned long
156 clear_user(void __user *to, unsigned long n)
157 {
158         might_fault();
159         if (access_ok(VERIFY_WRITE, to, n))
160                 __do_clear_user(to, n);
161         return n;
162 }
163 EXPORT_SYMBOL(clear_user);
164
165 /**
166  * __clear_user: - Zero a block of memory in user space, with less checking.
167  * @to:   Destination address, in user space.
168  * @n:    Number of bytes to zero.
169  *
170  * Zero a block of memory in user space.  Caller must check
171  * the specified block with access_ok() before calling this function.
172  *
173  * Returns number of bytes that could not be cleared.
174  * On success, this will be zero.
175  */
176 unsigned long
177 __clear_user(void __user *to, unsigned long n)
178 {
179         __do_clear_user(to, n);
180         return n;
181 }
182 EXPORT_SYMBOL(__clear_user);
183
184 /**
185  * strnlen_user: - Get the size of a string in user space.
186  * @s: The string to measure.
187  * @n: The maximum valid length
188  *
189  * Get the size of a NUL-terminated string in user space.
190  *
191  * Returns the size of the string INCLUDING the terminating NUL.
192  * On exception, returns 0.
193  * If the string is too long, returns a value greater than @n.
194  */
195 long strnlen_user(const char __user *s, long n)
196 {
197         unsigned long mask = -__addr_ok(s);
198         unsigned long res, tmp;
199
200         might_fault();
201
202         __asm__ __volatile__(
203                 "       testl %0, %0\n"
204                 "       jz 3f\n"
205                 "       andl %0,%%ecx\n"
206                 "0:     repne; scasb\n"
207                 "       setne %%al\n"
208                 "       subl %%ecx,%0\n"
209                 "       addl %0,%%eax\n"
210                 "1:\n"
211                 ".section .fixup,\"ax\"\n"
212                 "2:     xorl %%eax,%%eax\n"
213                 "       jmp 1b\n"
214                 "3:     movb $1,%%al\n"
215                 "       jmp 1b\n"
216                 ".previous\n"
217                 ".section __ex_table,\"a\"\n"
218                 "       .align 4\n"
219                 "       .long 0b,2b\n"
220                 ".previous"
221                 :"=&r" (n), "=&D" (s), "=&a" (res), "=&c" (tmp)
222                 :"0" (n), "1" (s), "2" (0), "3" (mask)
223                 :"cc");
224         return res & mask;
225 }
226 EXPORT_SYMBOL(strnlen_user);
227
228 #ifdef CONFIG_X86_INTEL_USERCOPY
229 static unsigned long
230 __copy_user_intel(void __user *to, const void *from, unsigned long size)
231 {
232         int d0, d1;
233         __asm__ __volatile__(
234                        "       .align 2,0x90\n"
235                        "1:     movl 32(%4), %%eax\n"
236                        "       cmpl $67, %0\n"
237                        "       jbe 3f\n"
238                        "2:     movl 64(%4), %%eax\n"
239                        "       .align 2,0x90\n"
240                        "3:     movl 0(%4), %%eax\n"
241                        "4:     movl 4(%4), %%edx\n"
242                        "5:     movl %%eax, 0(%3)\n"
243                        "6:     movl %%edx, 4(%3)\n"
244                        "7:     movl 8(%4), %%eax\n"
245                        "8:     movl 12(%4),%%edx\n"
246                        "9:     movl %%eax, 8(%3)\n"
247                        "10:    movl %%edx, 12(%3)\n"
248                        "11:    movl 16(%4), %%eax\n"
249                        "12:    movl 20(%4), %%edx\n"
250                        "13:    movl %%eax, 16(%3)\n"
251                        "14:    movl %%edx, 20(%3)\n"
252                        "15:    movl 24(%4), %%eax\n"
253                        "16:    movl 28(%4), %%edx\n"
254                        "17:    movl %%eax, 24(%3)\n"
255                        "18:    movl %%edx, 28(%3)\n"
256                        "19:    movl 32(%4), %%eax\n"
257                        "20:    movl 36(%4), %%edx\n"
258                        "21:    movl %%eax, 32(%3)\n"
259                        "22:    movl %%edx, 36(%3)\n"
260                        "23:    movl 40(%4), %%eax\n"
261                        "24:    movl 44(%4), %%edx\n"
262                        "25:    movl %%eax, 40(%3)\n"
263                        "26:    movl %%edx, 44(%3)\n"
264                        "27:    movl 48(%4), %%eax\n"
265                        "28:    movl 52(%4), %%edx\n"
266                        "29:    movl %%eax, 48(%3)\n"
267                        "30:    movl %%edx, 52(%3)\n"
268                        "31:    movl 56(%4), %%eax\n"
269                        "32:    movl 60(%4), %%edx\n"
270                        "33:    movl %%eax, 56(%3)\n"
271                        "34:    movl %%edx, 60(%3)\n"
272                        "       addl $-64, %0\n"
273                        "       addl $64, %4\n"
274                        "       addl $64, %3\n"
275                        "       cmpl $63, %0\n"
276                        "       ja  1b\n"
277                        "35:    movl  %0, %%eax\n"
278                        "       shrl  $2, %0\n"
279                        "       andl  $3, %%eax\n"
280                        "       cld\n"
281                        "99:    rep; movsl\n"
282                        "36:    movl %%eax, %0\n"
283                        "37:    rep; movsb\n"
284                        "100:\n"
285                        ".section .fixup,\"ax\"\n"
286                        "101:   lea 0(%%eax,%0,4),%0\n"
287                        "       jmp 100b\n"
288                        ".previous\n"
289                        ".section __ex_table,\"a\"\n"
290                        "       .align 4\n"
291                        "       .long 1b,100b\n"
292                        "       .long 2b,100b\n"
293                        "       .long 3b,100b\n"
294                        "       .long 4b,100b\n"
295                        "       .long 5b,100b\n"
296                        "       .long 6b,100b\n"
297                        "       .long 7b,100b\n"
298                        "       .long 8b,100b\n"
299                        "       .long 9b,100b\n"
300                        "       .long 10b,100b\n"
301                        "       .long 11b,100b\n"
302                        "       .long 12b,100b\n"
303                        "       .long 13b,100b\n"
304                        "       .long 14b,100b\n"
305                        "       .long 15b,100b\n"
306                        "       .long 16b,100b\n"
307                        "       .long 17b,100b\n"
308                        "       .long 18b,100b\n"
309                        "       .long 19b,100b\n"
310                        "       .long 20b,100b\n"
311                        "       .long 21b,100b\n"
312                        "       .long 22b,100b\n"
313                        "       .long 23b,100b\n"
314                        "       .long 24b,100b\n"
315                        "       .long 25b,100b\n"
316                        "       .long 26b,100b\n"
317                        "       .long 27b,100b\n"
318                        "       .long 28b,100b\n"
319                        "       .long 29b,100b\n"
320                        "       .long 30b,100b\n"
321                        "       .long 31b,100b\n"
322                        "       .long 32b,100b\n"
323                        "       .long 33b,100b\n"
324                        "       .long 34b,100b\n"
325                        "       .long 35b,100b\n"
326                        "       .long 36b,100b\n"
327                        "       .long 37b,100b\n"
328                        "       .long 99b,101b\n"
329                        ".previous"
330                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
331                        :  "1"(to), "2"(from), "0"(size)
332                        : "eax", "edx", "memory");
333         return size;
334 }
335
336 static unsigned long
337 __copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size)
338 {
339         int d0, d1;
340         __asm__ __volatile__(
341                        "        .align 2,0x90\n"
342                        "0:      movl 32(%4), %%eax\n"
343                        "        cmpl $67, %0\n"
344                        "        jbe 2f\n"
345                        "1:      movl 64(%4), %%eax\n"
346                        "        .align 2,0x90\n"
347                        "2:      movl 0(%4), %%eax\n"
348                        "21:     movl 4(%4), %%edx\n"
349                        "        movl %%eax, 0(%3)\n"
350                        "        movl %%edx, 4(%3)\n"
351                        "3:      movl 8(%4), %%eax\n"
352                        "31:     movl 12(%4),%%edx\n"
353                        "        movl %%eax, 8(%3)\n"
354                        "        movl %%edx, 12(%3)\n"
355                        "4:      movl 16(%4), %%eax\n"
356                        "41:     movl 20(%4), %%edx\n"
357                        "        movl %%eax, 16(%3)\n"
358                        "        movl %%edx, 20(%3)\n"
359                        "10:     movl 24(%4), %%eax\n"
360                        "51:     movl 28(%4), %%edx\n"
361                        "        movl %%eax, 24(%3)\n"
362                        "        movl %%edx, 28(%3)\n"
363                        "11:     movl 32(%4), %%eax\n"
364                        "61:     movl 36(%4), %%edx\n"
365                        "        movl %%eax, 32(%3)\n"
366                        "        movl %%edx, 36(%3)\n"
367                        "12:     movl 40(%4), %%eax\n"
368                        "71:     movl 44(%4), %%edx\n"
369                        "        movl %%eax, 40(%3)\n"
370                        "        movl %%edx, 44(%3)\n"
371                        "13:     movl 48(%4), %%eax\n"
372                        "81:     movl 52(%4), %%edx\n"
373                        "        movl %%eax, 48(%3)\n"
374                        "        movl %%edx, 52(%3)\n"
375                        "14:     movl 56(%4), %%eax\n"
376                        "91:     movl 60(%4), %%edx\n"
377                        "        movl %%eax, 56(%3)\n"
378                        "        movl %%edx, 60(%3)\n"
379                        "        addl $-64, %0\n"
380                        "        addl $64, %4\n"
381                        "        addl $64, %3\n"
382                        "        cmpl $63, %0\n"
383                        "        ja  0b\n"
384                        "5:      movl  %0, %%eax\n"
385                        "        shrl  $2, %0\n"
386                        "        andl $3, %%eax\n"
387                        "        cld\n"
388                        "6:      rep; movsl\n"
389                        "        movl %%eax,%0\n"
390                        "7:      rep; movsb\n"
391                        "8:\n"
392                        ".section .fixup,\"ax\"\n"
393                        "9:      lea 0(%%eax,%0,4),%0\n"
394                        "16:     pushl %0\n"
395                        "        pushl %%eax\n"
396                        "        xorl %%eax,%%eax\n"
397                        "        rep; stosb\n"
398                        "        popl %%eax\n"
399                        "        popl %0\n"
400                        "        jmp 8b\n"
401                        ".previous\n"
402                        ".section __ex_table,\"a\"\n"
403                        "        .align 4\n"
404                        "        .long 0b,16b\n"
405                        "        .long 1b,16b\n"
406                        "        .long 2b,16b\n"
407                        "        .long 21b,16b\n"
408                        "        .long 3b,16b\n"
409                        "        .long 31b,16b\n"
410                        "        .long 4b,16b\n"
411                        "        .long 41b,16b\n"
412                        "        .long 10b,16b\n"
413                        "        .long 51b,16b\n"
414                        "        .long 11b,16b\n"
415                        "        .long 61b,16b\n"
416                        "        .long 12b,16b\n"
417                        "        .long 71b,16b\n"
418                        "        .long 13b,16b\n"
419                        "        .long 81b,16b\n"
420                        "        .long 14b,16b\n"
421                        "        .long 91b,16b\n"
422                        "        .long 6b,9b\n"
423                        "        .long 7b,16b\n"
424                        ".previous"
425                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
426                        :  "1"(to), "2"(from), "0"(size)
427                        : "eax", "edx", "memory");
428         return size;
429 }
430
431 /*
432  * Non Temporal Hint version of __copy_user_zeroing_intel.  It is cache aware.
433  * hyoshiok@miraclelinux.com
434  */
435
436 static unsigned long __copy_user_zeroing_intel_nocache(void *to,
437                                 const void __user *from, unsigned long size)
438 {
439         int d0, d1;
440
441         __asm__ __volatile__(
442                "        .align 2,0x90\n"
443                "0:      movl 32(%4), %%eax\n"
444                "        cmpl $67, %0\n"
445                "        jbe 2f\n"
446                "1:      movl 64(%4), %%eax\n"
447                "        .align 2,0x90\n"
448                "2:      movl 0(%4), %%eax\n"
449                "21:     movl 4(%4), %%edx\n"
450                "        movnti %%eax, 0(%3)\n"
451                "        movnti %%edx, 4(%3)\n"
452                "3:      movl 8(%4), %%eax\n"
453                "31:     movl 12(%4),%%edx\n"
454                "        movnti %%eax, 8(%3)\n"
455                "        movnti %%edx, 12(%3)\n"
456                "4:      movl 16(%4), %%eax\n"
457                "41:     movl 20(%4), %%edx\n"
458                "        movnti %%eax, 16(%3)\n"
459                "        movnti %%edx, 20(%3)\n"
460                "10:     movl 24(%4), %%eax\n"
461                "51:     movl 28(%4), %%edx\n"
462                "        movnti %%eax, 24(%3)\n"
463                "        movnti %%edx, 28(%3)\n"
464                "11:     movl 32(%4), %%eax\n"
465                "61:     movl 36(%4), %%edx\n"
466                "        movnti %%eax, 32(%3)\n"
467                "        movnti %%edx, 36(%3)\n"
468                "12:     movl 40(%4), %%eax\n"
469                "71:     movl 44(%4), %%edx\n"
470                "        movnti %%eax, 40(%3)\n"
471                "        movnti %%edx, 44(%3)\n"
472                "13:     movl 48(%4), %%eax\n"
473                "81:     movl 52(%4), %%edx\n"
474                "        movnti %%eax, 48(%3)\n"
475                "        movnti %%edx, 52(%3)\n"
476                "14:     movl 56(%4), %%eax\n"
477                "91:     movl 60(%4), %%edx\n"
478                "        movnti %%eax, 56(%3)\n"
479                "        movnti %%edx, 60(%3)\n"
480                "        addl $-64, %0\n"
481                "        addl $64, %4\n"
482                "        addl $64, %3\n"
483                "        cmpl $63, %0\n"
484                "        ja  0b\n"
485                "        sfence \n"
486                "5:      movl  %0, %%eax\n"
487                "        shrl  $2, %0\n"
488                "        andl $3, %%eax\n"
489                "        cld\n"
490                "6:      rep; movsl\n"
491                "        movl %%eax,%0\n"
492                "7:      rep; movsb\n"
493                "8:\n"
494                ".section .fixup,\"ax\"\n"
495                "9:      lea 0(%%eax,%0,4),%0\n"
496                "16:     pushl %0\n"
497                "        pushl %%eax\n"
498                "        xorl %%eax,%%eax\n"
499                "        rep; stosb\n"
500                "        popl %%eax\n"
501                "        popl %0\n"
502                "        jmp 8b\n"
503                ".previous\n"
504                ".section __ex_table,\"a\"\n"
505                "        .align 4\n"
506                "        .long 0b,16b\n"
507                "        .long 1b,16b\n"
508                "        .long 2b,16b\n"
509                "        .long 21b,16b\n"
510                "        .long 3b,16b\n"
511                "        .long 31b,16b\n"
512                "        .long 4b,16b\n"
513                "        .long 41b,16b\n"
514                "        .long 10b,16b\n"
515                "        .long 51b,16b\n"
516                "        .long 11b,16b\n"
517                "        .long 61b,16b\n"
518                "        .long 12b,16b\n"
519                "        .long 71b,16b\n"
520                "        .long 13b,16b\n"
521                "        .long 81b,16b\n"
522                "        .long 14b,16b\n"
523                "        .long 91b,16b\n"
524                "        .long 6b,9b\n"
525                "        .long 7b,16b\n"
526                ".previous"
527                : "=&c"(size), "=&D" (d0), "=&S" (d1)
528                :  "1"(to), "2"(from), "0"(size)
529                : "eax", "edx", "memory");
530         return size;
531 }
532
533 static unsigned long __copy_user_intel_nocache(void *to,
534                                 const void __user *from, unsigned long size)
535 {
536         int d0, d1;
537
538         __asm__ __volatile__(
539                "        .align 2,0x90\n"
540                "0:      movl 32(%4), %%eax\n"
541                "        cmpl $67, %0\n"
542                "        jbe 2f\n"
543                "1:      movl 64(%4), %%eax\n"
544                "        .align 2,0x90\n"
545                "2:      movl 0(%4), %%eax\n"
546                "21:     movl 4(%4), %%edx\n"
547                "        movnti %%eax, 0(%3)\n"
548                "        movnti %%edx, 4(%3)\n"
549                "3:      movl 8(%4), %%eax\n"
550                "31:     movl 12(%4),%%edx\n"
551                "        movnti %%eax, 8(%3)\n"
552                "        movnti %%edx, 12(%3)\n"
553                "4:      movl 16(%4), %%eax\n"
554                "41:     movl 20(%4), %%edx\n"
555                "        movnti %%eax, 16(%3)\n"
556                "        movnti %%edx, 20(%3)\n"
557                "10:     movl 24(%4), %%eax\n"
558                "51:     movl 28(%4), %%edx\n"
559                "        movnti %%eax, 24(%3)\n"
560                "        movnti %%edx, 28(%3)\n"
561                "11:     movl 32(%4), %%eax\n"
562                "61:     movl 36(%4), %%edx\n"
563                "        movnti %%eax, 32(%3)\n"
564                "        movnti %%edx, 36(%3)\n"
565                "12:     movl 40(%4), %%eax\n"
566                "71:     movl 44(%4), %%edx\n"
567                "        movnti %%eax, 40(%3)\n"
568                "        movnti %%edx, 44(%3)\n"
569                "13:     movl 48(%4), %%eax\n"
570                "81:     movl 52(%4), %%edx\n"
571                "        movnti %%eax, 48(%3)\n"
572                "        movnti %%edx, 52(%3)\n"
573                "14:     movl 56(%4), %%eax\n"
574                "91:     movl 60(%4), %%edx\n"
575                "        movnti %%eax, 56(%3)\n"
576                "        movnti %%edx, 60(%3)\n"
577                "        addl $-64, %0\n"
578                "        addl $64, %4\n"
579                "        addl $64, %3\n"
580                "        cmpl $63, %0\n"
581                "        ja  0b\n"
582                "        sfence \n"
583                "5:      movl  %0, %%eax\n"
584                "        shrl  $2, %0\n"
585                "        andl $3, %%eax\n"
586                "        cld\n"
587                "6:      rep; movsl\n"
588                "        movl %%eax,%0\n"
589                "7:      rep; movsb\n"
590                "8:\n"
591                ".section .fixup,\"ax\"\n"
592                "9:      lea 0(%%eax,%0,4),%0\n"
593                "16:     jmp 8b\n"
594                ".previous\n"
595                ".section __ex_table,\"a\"\n"
596                "        .align 4\n"
597                "        .long 0b,16b\n"
598                "        .long 1b,16b\n"
599                "        .long 2b,16b\n"
600                "        .long 21b,16b\n"
601                "        .long 3b,16b\n"
602                "        .long 31b,16b\n"
603                "        .long 4b,16b\n"
604                "        .long 41b,16b\n"
605                "        .long 10b,16b\n"
606                "        .long 51b,16b\n"
607                "        .long 11b,16b\n"
608                "        .long 61b,16b\n"
609                "        .long 12b,16b\n"
610                "        .long 71b,16b\n"
611                "        .long 13b,16b\n"
612                "        .long 81b,16b\n"
613                "        .long 14b,16b\n"
614                "        .long 91b,16b\n"
615                "        .long 6b,9b\n"
616                "        .long 7b,16b\n"
617                ".previous"
618                : "=&c"(size), "=&D" (d0), "=&S" (d1)
619                :  "1"(to), "2"(from), "0"(size)
620                : "eax", "edx", "memory");
621         return size;
622 }
623
624 #else
625
626 /*
627  * Leave these declared but undefined.  They should not be any references to
628  * them
629  */
630 unsigned long __copy_user_zeroing_intel(void *to, const void __user *from,
631                                         unsigned long size);
632 unsigned long __copy_user_intel(void __user *to, const void *from,
633                                         unsigned long size);
634 unsigned long __copy_user_zeroing_intel_nocache(void *to,
635                                 const void __user *from, unsigned long size);
636 #endif /* CONFIG_X86_INTEL_USERCOPY */
637
638 /* Generic arbitrary sized copy.  */
639 #define __copy_user(to, from, size)                                     \
640 do {                                                                    \
641         int __d0, __d1, __d2;                                           \
642         __asm__ __volatile__(                                           \
643                 "       cmp  $7,%0\n"                                   \
644                 "       jbe  1f\n"                                      \
645                 "       movl %1,%0\n"                                   \
646                 "       negl %0\n"                                      \
647                 "       andl $7,%0\n"                                   \
648                 "       subl %0,%3\n"                                   \
649                 "4:     rep; movsb\n"                                   \
650                 "       movl %3,%0\n"                                   \
651                 "       shrl $2,%0\n"                                   \
652                 "       andl $3,%3\n"                                   \
653                 "       .align 2,0x90\n"                                \
654                 "0:     rep; movsl\n"                                   \
655                 "       movl %3,%0\n"                                   \
656                 "1:     rep; movsb\n"                                   \
657                 "2:\n"                                                  \
658                 ".section .fixup,\"ax\"\n"                              \
659                 "5:     addl %3,%0\n"                                   \
660                 "       jmp 2b\n"                                       \
661                 "3:     lea 0(%3,%0,4),%0\n"                            \
662                 "       jmp 2b\n"                                       \
663                 ".previous\n"                                           \
664                 ".section __ex_table,\"a\"\n"                           \
665                 "       .align 4\n"                                     \
666                 "       .long 4b,5b\n"                                  \
667                 "       .long 0b,3b\n"                                  \
668                 "       .long 1b,2b\n"                                  \
669                 ".previous"                                             \
670                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
671                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
672                 : "memory");                                            \
673 } while (0)
674
675 #define __copy_user_zeroing(to, from, size)                             \
676 do {                                                                    \
677         int __d0, __d1, __d2;                                           \
678         __asm__ __volatile__(                                           \
679                 "       cmp  $7,%0\n"                                   \
680                 "       jbe  1f\n"                                      \
681                 "       movl %1,%0\n"                                   \
682                 "       negl %0\n"                                      \
683                 "       andl $7,%0\n"                                   \
684                 "       subl %0,%3\n"                                   \
685                 "4:     rep; movsb\n"                                   \
686                 "       movl %3,%0\n"                                   \
687                 "       shrl $2,%0\n"                                   \
688                 "       andl $3,%3\n"                                   \
689                 "       .align 2,0x90\n"                                \
690                 "0:     rep; movsl\n"                                   \
691                 "       movl %3,%0\n"                                   \
692                 "1:     rep; movsb\n"                                   \
693                 "2:\n"                                                  \
694                 ".section .fixup,\"ax\"\n"                              \
695                 "5:     addl %3,%0\n"                                   \
696                 "       jmp 6f\n"                                       \
697                 "3:     lea 0(%3,%0,4),%0\n"                            \
698                 "6:     pushl %0\n"                                     \
699                 "       pushl %%eax\n"                                  \
700                 "       xorl %%eax,%%eax\n"                             \
701                 "       rep; stosb\n"                                   \
702                 "       popl %%eax\n"                                   \
703                 "       popl %0\n"                                      \
704                 "       jmp 2b\n"                                       \
705                 ".previous\n"                                           \
706                 ".section __ex_table,\"a\"\n"                           \
707                 "       .align 4\n"                                     \
708                 "       .long 4b,5b\n"                                  \
709                 "       .long 0b,3b\n"                                  \
710                 "       .long 1b,6b\n"                                  \
711                 ".previous"                                             \
712                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
713                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
714                 : "memory");                                            \
715 } while (0)
716
717 unsigned long __copy_to_user_ll(void __user *to, const void *from,
718                                 unsigned long n)
719 {
720 #ifndef CONFIG_X86_WP_WORKS_OK
721         if (unlikely(boot_cpu_data.wp_works_ok == 0) &&
722                         ((unsigned long)to) < TASK_SIZE) {
723                 /*
724                  * When we are in an atomic section (see
725                  * mm/filemap.c:file_read_actor), return the full
726                  * length to take the slow path.
727                  */
728                 if (in_atomic())
729                         return n;
730
731                 /*
732                  * CPU does not honor the WP bit when writing
733                  * from supervisory mode, and due to preemption or SMP,
734                  * the page tables can change at any time.
735                  * Do it manually.      Manfred <manfred@colorfullife.com>
736                  */
737                 while (n) {
738                         unsigned long offset = ((unsigned long)to)%PAGE_SIZE;
739                         unsigned long len = PAGE_SIZE - offset;
740                         int retval;
741                         struct page *pg;
742                         void *maddr;
743
744                         if (len > n)
745                                 len = n;
746
747 survive:
748                         down_read(&current->mm->mmap_sem);
749                         retval = get_user_pages(current, current->mm,
750                                         (unsigned long)to, 1, 1, 0, &pg, NULL);
751
752                         if (retval == -ENOMEM && is_global_init(current)) {
753                                 up_read(&current->mm->mmap_sem);
754                                 congestion_wait(WRITE, HZ/50);
755                                 goto survive;
756                         }
757
758                         if (retval != 1) {
759                                 up_read(&current->mm->mmap_sem);
760                                 break;
761                         }
762
763                         maddr = kmap_atomic(pg, KM_USER0);
764                         memcpy(maddr + offset, from, len);
765                         kunmap_atomic(maddr, KM_USER0);
766                         set_page_dirty_lock(pg);
767                         put_page(pg);
768                         up_read(&current->mm->mmap_sem);
769
770                         from += len;
771                         to += len;
772                         n -= len;
773                 }
774                 return n;
775         }
776 #endif
777         if (movsl_is_ok(to, from, n))
778                 __copy_user(to, from, n);
779         else
780                 n = __copy_user_intel(to, from, n);
781         return n;
782 }
783 EXPORT_SYMBOL(__copy_to_user_ll);
784
785 unsigned long __copy_from_user_ll(void *to, const void __user *from,
786                                         unsigned long n)
787 {
788         if (movsl_is_ok(to, from, n))
789                 __copy_user_zeroing(to, from, n);
790         else
791                 n = __copy_user_zeroing_intel(to, from, n);
792         return n;
793 }
794 EXPORT_SYMBOL(__copy_from_user_ll);
795
796 unsigned long __copy_from_user_ll_nozero(void *to, const void __user *from,
797                                          unsigned long n)
798 {
799         if (movsl_is_ok(to, from, n))
800                 __copy_user(to, from, n);
801         else
802                 n = __copy_user_intel((void __user *)to,
803                                       (const void *)from, n);
804         return n;
805 }
806 EXPORT_SYMBOL(__copy_from_user_ll_nozero);
807
808 unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from,
809                                         unsigned long n)
810 {
811 #ifdef CONFIG_X86_INTEL_USERCOPY
812         if (n > 64 && cpu_has_xmm2)
813                 n = __copy_user_zeroing_intel_nocache(to, from, n);
814         else
815                 __copy_user_zeroing(to, from, n);
816 #else
817         __copy_user_zeroing(to, from, n);
818 #endif
819         return n;
820 }
821 EXPORT_SYMBOL(__copy_from_user_ll_nocache);
822
823 unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
824                                         unsigned long n)
825 {
826 #ifdef CONFIG_X86_INTEL_USERCOPY
827         if (n > 64 && cpu_has_xmm2)
828                 n = __copy_user_intel_nocache(to, from, n);
829         else
830                 __copy_user(to, from, n);
831 #else
832         __copy_user(to, from, n);
833 #endif
834         return n;
835 }
836 EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);
837
838 /**
839  * copy_to_user: - Copy a block of data into user space.
840  * @to:   Destination address, in user space.
841  * @from: Source address, in kernel space.
842  * @n:    Number of bytes to copy.
843  *
844  * Context: User context only.  This function may sleep.
845  *
846  * Copy data from kernel space to user space.
847  *
848  * Returns number of bytes that could not be copied.
849  * On success, this will be zero.
850  */
851 unsigned long
852 copy_to_user(void __user *to, const void *from, unsigned long n)
853 {
854         if (access_ok(VERIFY_WRITE, to, n))
855                 n = __copy_to_user(to, from, n);
856         return n;
857 }
858 EXPORT_SYMBOL(copy_to_user);
859
860 /**
861  * copy_from_user: - Copy a block of data from user space.
862  * @to:   Destination address, in kernel space.
863  * @from: Source address, in user space.
864  * @n:    Number of bytes to copy.
865  *
866  * Context: User context only.  This function may sleep.
867  *
868  * Copy data from user space to kernel space.
869  *
870  * Returns number of bytes that could not be copied.
871  * On success, this will be zero.
872  *
873  * If some data could not be copied, this function will pad the copied
874  * data to the requested size using zero bytes.
875  */
876 unsigned long
877 copy_from_user(void *to, const void __user *from, unsigned long n)
878 {
879         if (access_ok(VERIFY_READ, from, n))
880                 n = __copy_from_user(to, from, n);
881         else
882                 memset(to, 0, n);
883         return n;
884 }
885 EXPORT_SYMBOL(copy_from_user);