x86, extable: Remove open-coded exception table entries in arch/x86/lib/usercopy_32.c
[platform/adaptation/renesas_rcar/renesas_kernel.git] / arch / x86 / lib / usercopy_32.c
1 /*
2  * User address space access functions.
3  * The non inlined parts of asm-i386/uaccess.h are here.
4  *
5  * Copyright 1997 Andi Kleen <ak@muc.de>
6  * Copyright 1997 Linus Torvalds
7  */
8 #include <linux/mm.h>
9 #include <linux/highmem.h>
10 #include <linux/blkdev.h>
11 #include <linux/module.h>
12 #include <linux/backing-dev.h>
13 #include <linux/interrupt.h>
14 #include <asm/uaccess.h>
15 #include <asm/mmx.h>
16 #include <asm/asm.h>
17
18 #ifdef CONFIG_X86_INTEL_USERCOPY
19 /*
20  * Alignment at which movsl is preferred for bulk memory copies.
21  */
22 struct movsl_mask movsl_mask __read_mostly;
23 #endif
24
25 static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
26 {
27 #ifdef CONFIG_X86_INTEL_USERCOPY
28         if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
29                 return 0;
30 #endif
31         return 1;
32 }
33 #define movsl_is_ok(a1, a2, n) \
34         __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
35
36 /*
37  * Zero Userspace
38  */
39
40 #define __do_clear_user(addr,size)                                      \
41 do {                                                                    \
42         int __d0;                                                       \
43         might_fault();                                                  \
44         __asm__ __volatile__(                                           \
45                 "0:     rep; stosl\n"                                   \
46                 "       movl %2,%0\n"                                   \
47                 "1:     rep; stosb\n"                                   \
48                 "2:\n"                                                  \
49                 ".section .fixup,\"ax\"\n"                              \
50                 "3:     lea 0(%2,%0,4),%0\n"                            \
51                 "       jmp 2b\n"                                       \
52                 ".previous\n"                                           \
53                 _ASM_EXTABLE(0b,3b)                                     \
54                 _ASM_EXTABLE(1b,2b)                                     \
55                 : "=&c"(size), "=&D" (__d0)                             \
56                 : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0));     \
57 } while (0)
58
59 /**
60  * clear_user: - Zero a block of memory in user space.
61  * @to:   Destination address, in user space.
62  * @n:    Number of bytes to zero.
63  *
64  * Zero a block of memory in user space.
65  *
66  * Returns number of bytes that could not be cleared.
67  * On success, this will be zero.
68  */
69 unsigned long
70 clear_user(void __user *to, unsigned long n)
71 {
72         might_fault();
73         if (access_ok(VERIFY_WRITE, to, n))
74                 __do_clear_user(to, n);
75         return n;
76 }
77 EXPORT_SYMBOL(clear_user);
78
79 /**
80  * __clear_user: - Zero a block of memory in user space, with less checking.
81  * @to:   Destination address, in user space.
82  * @n:    Number of bytes to zero.
83  *
84  * Zero a block of memory in user space.  Caller must check
85  * the specified block with access_ok() before calling this function.
86  *
87  * Returns number of bytes that could not be cleared.
88  * On success, this will be zero.
89  */
90 unsigned long
91 __clear_user(void __user *to, unsigned long n)
92 {
93         __do_clear_user(to, n);
94         return n;
95 }
96 EXPORT_SYMBOL(__clear_user);
97
98 /**
99  * strnlen_user: - Get the size of a string in user space.
100  * @s: The string to measure.
101  * @n: The maximum valid length
102  *
103  * Get the size of a NUL-terminated string in user space.
104  *
105  * Returns the size of the string INCLUDING the terminating NUL.
106  * On exception, returns 0.
107  * If the string is too long, returns a value greater than @n.
108  */
109 long strnlen_user(const char __user *s, long n)
110 {
111         unsigned long mask = -__addr_ok(s);
112         unsigned long res, tmp;
113
114         might_fault();
115
116         __asm__ __volatile__(
117                 "       testl %0, %0\n"
118                 "       jz 3f\n"
119                 "       andl %0,%%ecx\n"
120                 "0:     repne; scasb\n"
121                 "       setne %%al\n"
122                 "       subl %%ecx,%0\n"
123                 "       addl %0,%%eax\n"
124                 "1:\n"
125                 ".section .fixup,\"ax\"\n"
126                 "2:     xorl %%eax,%%eax\n"
127                 "       jmp 1b\n"
128                 "3:     movb $1,%%al\n"
129                 "       jmp 1b\n"
130                 ".previous\n"
131                 _ASM_EXTABLE(0b,2b)
132                 :"=&r" (n), "=&D" (s), "=&a" (res), "=&c" (tmp)
133                 :"0" (n), "1" (s), "2" (0), "3" (mask)
134                 :"cc");
135         return res & mask;
136 }
137 EXPORT_SYMBOL(strnlen_user);
138
139 #ifdef CONFIG_X86_INTEL_USERCOPY
140 static unsigned long
141 __copy_user_intel(void __user *to, const void *from, unsigned long size)
142 {
143         int d0, d1;
144         __asm__ __volatile__(
145                        "       .align 2,0x90\n"
146                        "1:     movl 32(%4), %%eax\n"
147                        "       cmpl $67, %0\n"
148                        "       jbe 3f\n"
149                        "2:     movl 64(%4), %%eax\n"
150                        "       .align 2,0x90\n"
151                        "3:     movl 0(%4), %%eax\n"
152                        "4:     movl 4(%4), %%edx\n"
153                        "5:     movl %%eax, 0(%3)\n"
154                        "6:     movl %%edx, 4(%3)\n"
155                        "7:     movl 8(%4), %%eax\n"
156                        "8:     movl 12(%4),%%edx\n"
157                        "9:     movl %%eax, 8(%3)\n"
158                        "10:    movl %%edx, 12(%3)\n"
159                        "11:    movl 16(%4), %%eax\n"
160                        "12:    movl 20(%4), %%edx\n"
161                        "13:    movl %%eax, 16(%3)\n"
162                        "14:    movl %%edx, 20(%3)\n"
163                        "15:    movl 24(%4), %%eax\n"
164                        "16:    movl 28(%4), %%edx\n"
165                        "17:    movl %%eax, 24(%3)\n"
166                        "18:    movl %%edx, 28(%3)\n"
167                        "19:    movl 32(%4), %%eax\n"
168                        "20:    movl 36(%4), %%edx\n"
169                        "21:    movl %%eax, 32(%3)\n"
170                        "22:    movl %%edx, 36(%3)\n"
171                        "23:    movl 40(%4), %%eax\n"
172                        "24:    movl 44(%4), %%edx\n"
173                        "25:    movl %%eax, 40(%3)\n"
174                        "26:    movl %%edx, 44(%3)\n"
175                        "27:    movl 48(%4), %%eax\n"
176                        "28:    movl 52(%4), %%edx\n"
177                        "29:    movl %%eax, 48(%3)\n"
178                        "30:    movl %%edx, 52(%3)\n"
179                        "31:    movl 56(%4), %%eax\n"
180                        "32:    movl 60(%4), %%edx\n"
181                        "33:    movl %%eax, 56(%3)\n"
182                        "34:    movl %%edx, 60(%3)\n"
183                        "       addl $-64, %0\n"
184                        "       addl $64, %4\n"
185                        "       addl $64, %3\n"
186                        "       cmpl $63, %0\n"
187                        "       ja  1b\n"
188                        "35:    movl  %0, %%eax\n"
189                        "       shrl  $2, %0\n"
190                        "       andl  $3, %%eax\n"
191                        "       cld\n"
192                        "99:    rep; movsl\n"
193                        "36:    movl %%eax, %0\n"
194                        "37:    rep; movsb\n"
195                        "100:\n"
196                        ".section .fixup,\"ax\"\n"
197                        "101:   lea 0(%%eax,%0,4),%0\n"
198                        "       jmp 100b\n"
199                        ".previous\n"
200                        _ASM_EXTABLE(1b,100b)
201                        _ASM_EXTABLE(2b,100b)
202                        _ASM_EXTABLE(3b,100b)
203                        _ASM_EXTABLE(4b,100b)
204                        _ASM_EXTABLE(5b,100b)
205                        _ASM_EXTABLE(6b,100b)
206                        _ASM_EXTABLE(7b,100b)
207                        _ASM_EXTABLE(8b,100b)
208                        _ASM_EXTABLE(9b,100b)
209                        _ASM_EXTABLE(10b,100b)
210                        _ASM_EXTABLE(11b,100b)
211                        _ASM_EXTABLE(12b,100b)
212                        _ASM_EXTABLE(13b,100b)
213                        _ASM_EXTABLE(14b,100b)
214                        _ASM_EXTABLE(15b,100b)
215                        _ASM_EXTABLE(16b,100b)
216                        _ASM_EXTABLE(17b,100b)
217                        _ASM_EXTABLE(18b,100b)
218                        _ASM_EXTABLE(19b,100b)
219                        _ASM_EXTABLE(20b,100b)
220                        _ASM_EXTABLE(21b,100b)
221                        _ASM_EXTABLE(22b,100b)
222                        _ASM_EXTABLE(23b,100b)
223                        _ASM_EXTABLE(24b,100b)
224                        _ASM_EXTABLE(25b,100b)
225                        _ASM_EXTABLE(26b,100b)
226                        _ASM_EXTABLE(27b,100b)
227                        _ASM_EXTABLE(28b,100b)
228                        _ASM_EXTABLE(29b,100b)
229                        _ASM_EXTABLE(30b,100b)
230                        _ASM_EXTABLE(31b,100b)
231                        _ASM_EXTABLE(32b,100b)
232                        _ASM_EXTABLE(33b,100b)
233                        _ASM_EXTABLE(34b,100b)
234                        _ASM_EXTABLE(35b,100b)
235                        _ASM_EXTABLE(36b,100b)
236                        _ASM_EXTABLE(37b,100b)
237                        _ASM_EXTABLE(99b,101b)
238                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
239                        :  "1"(to), "2"(from), "0"(size)
240                        : "eax", "edx", "memory");
241         return size;
242 }
243
244 static unsigned long
245 __copy_user_zeroing_intel(void *to, const void __user *from, unsigned long size)
246 {
247         int d0, d1;
248         __asm__ __volatile__(
249                        "        .align 2,0x90\n"
250                        "0:      movl 32(%4), %%eax\n"
251                        "        cmpl $67, %0\n"
252                        "        jbe 2f\n"
253                        "1:      movl 64(%4), %%eax\n"
254                        "        .align 2,0x90\n"
255                        "2:      movl 0(%4), %%eax\n"
256                        "21:     movl 4(%4), %%edx\n"
257                        "        movl %%eax, 0(%3)\n"
258                        "        movl %%edx, 4(%3)\n"
259                        "3:      movl 8(%4), %%eax\n"
260                        "31:     movl 12(%4),%%edx\n"
261                        "        movl %%eax, 8(%3)\n"
262                        "        movl %%edx, 12(%3)\n"
263                        "4:      movl 16(%4), %%eax\n"
264                        "41:     movl 20(%4), %%edx\n"
265                        "        movl %%eax, 16(%3)\n"
266                        "        movl %%edx, 20(%3)\n"
267                        "10:     movl 24(%4), %%eax\n"
268                        "51:     movl 28(%4), %%edx\n"
269                        "        movl %%eax, 24(%3)\n"
270                        "        movl %%edx, 28(%3)\n"
271                        "11:     movl 32(%4), %%eax\n"
272                        "61:     movl 36(%4), %%edx\n"
273                        "        movl %%eax, 32(%3)\n"
274                        "        movl %%edx, 36(%3)\n"
275                        "12:     movl 40(%4), %%eax\n"
276                        "71:     movl 44(%4), %%edx\n"
277                        "        movl %%eax, 40(%3)\n"
278                        "        movl %%edx, 44(%3)\n"
279                        "13:     movl 48(%4), %%eax\n"
280                        "81:     movl 52(%4), %%edx\n"
281                        "        movl %%eax, 48(%3)\n"
282                        "        movl %%edx, 52(%3)\n"
283                        "14:     movl 56(%4), %%eax\n"
284                        "91:     movl 60(%4), %%edx\n"
285                        "        movl %%eax, 56(%3)\n"
286                        "        movl %%edx, 60(%3)\n"
287                        "        addl $-64, %0\n"
288                        "        addl $64, %4\n"
289                        "        addl $64, %3\n"
290                        "        cmpl $63, %0\n"
291                        "        ja  0b\n"
292                        "5:      movl  %0, %%eax\n"
293                        "        shrl  $2, %0\n"
294                        "        andl $3, %%eax\n"
295                        "        cld\n"
296                        "6:      rep; movsl\n"
297                        "        movl %%eax,%0\n"
298                        "7:      rep; movsb\n"
299                        "8:\n"
300                        ".section .fixup,\"ax\"\n"
301                        "9:      lea 0(%%eax,%0,4),%0\n"
302                        "16:     pushl %0\n"
303                        "        pushl %%eax\n"
304                        "        xorl %%eax,%%eax\n"
305                        "        rep; stosb\n"
306                        "        popl %%eax\n"
307                        "        popl %0\n"
308                        "        jmp 8b\n"
309                        ".previous\n"
310                        _ASM_EXTABLE(0b,16b)
311                        _ASM_EXTABLE(1b,16b)
312                        _ASM_EXTABLE(2b,16b)
313                        _ASM_EXTABLE(21b,16b)
314                        _ASM_EXTABLE(3b,16b)
315                        _ASM_EXTABLE(31b,16b)
316                        _ASM_EXTABLE(4b,16b)
317                        _ASM_EXTABLE(41b,16b)
318                        _ASM_EXTABLE(10b,16b)
319                        _ASM_EXTABLE(51b,16b)
320                        _ASM_EXTABLE(11b,16b)
321                        _ASM_EXTABLE(61b,16b)
322                        _ASM_EXTABLE(12b,16b)
323                        _ASM_EXTABLE(71b,16b)
324                        _ASM_EXTABLE(13b,16b)
325                        _ASM_EXTABLE(81b,16b)
326                        _ASM_EXTABLE(14b,16b)
327                        _ASM_EXTABLE(91b,16b)
328                        _ASM_EXTABLE(6b,9b)
329                        _ASM_EXTABLE(7b,16b)
330                        : "=&c"(size), "=&D" (d0), "=&S" (d1)
331                        :  "1"(to), "2"(from), "0"(size)
332                        : "eax", "edx", "memory");
333         return size;
334 }
335
336 /*
337  * Non Temporal Hint version of __copy_user_zeroing_intel.  It is cache aware.
338  * hyoshiok@miraclelinux.com
339  */
340
341 static unsigned long __copy_user_zeroing_intel_nocache(void *to,
342                                 const void __user *from, unsigned long size)
343 {
344         int d0, d1;
345
346         __asm__ __volatile__(
347                "        .align 2,0x90\n"
348                "0:      movl 32(%4), %%eax\n"
349                "        cmpl $67, %0\n"
350                "        jbe 2f\n"
351                "1:      movl 64(%4), %%eax\n"
352                "        .align 2,0x90\n"
353                "2:      movl 0(%4), %%eax\n"
354                "21:     movl 4(%4), %%edx\n"
355                "        movnti %%eax, 0(%3)\n"
356                "        movnti %%edx, 4(%3)\n"
357                "3:      movl 8(%4), %%eax\n"
358                "31:     movl 12(%4),%%edx\n"
359                "        movnti %%eax, 8(%3)\n"
360                "        movnti %%edx, 12(%3)\n"
361                "4:      movl 16(%4), %%eax\n"
362                "41:     movl 20(%4), %%edx\n"
363                "        movnti %%eax, 16(%3)\n"
364                "        movnti %%edx, 20(%3)\n"
365                "10:     movl 24(%4), %%eax\n"
366                "51:     movl 28(%4), %%edx\n"
367                "        movnti %%eax, 24(%3)\n"
368                "        movnti %%edx, 28(%3)\n"
369                "11:     movl 32(%4), %%eax\n"
370                "61:     movl 36(%4), %%edx\n"
371                "        movnti %%eax, 32(%3)\n"
372                "        movnti %%edx, 36(%3)\n"
373                "12:     movl 40(%4), %%eax\n"
374                "71:     movl 44(%4), %%edx\n"
375                "        movnti %%eax, 40(%3)\n"
376                "        movnti %%edx, 44(%3)\n"
377                "13:     movl 48(%4), %%eax\n"
378                "81:     movl 52(%4), %%edx\n"
379                "        movnti %%eax, 48(%3)\n"
380                "        movnti %%edx, 52(%3)\n"
381                "14:     movl 56(%4), %%eax\n"
382                "91:     movl 60(%4), %%edx\n"
383                "        movnti %%eax, 56(%3)\n"
384                "        movnti %%edx, 60(%3)\n"
385                "        addl $-64, %0\n"
386                "        addl $64, %4\n"
387                "        addl $64, %3\n"
388                "        cmpl $63, %0\n"
389                "        ja  0b\n"
390                "        sfence \n"
391                "5:      movl  %0, %%eax\n"
392                "        shrl  $2, %0\n"
393                "        andl $3, %%eax\n"
394                "        cld\n"
395                "6:      rep; movsl\n"
396                "        movl %%eax,%0\n"
397                "7:      rep; movsb\n"
398                "8:\n"
399                ".section .fixup,\"ax\"\n"
400                "9:      lea 0(%%eax,%0,4),%0\n"
401                "16:     pushl %0\n"
402                "        pushl %%eax\n"
403                "        xorl %%eax,%%eax\n"
404                "        rep; stosb\n"
405                "        popl %%eax\n"
406                "        popl %0\n"
407                "        jmp 8b\n"
408                ".previous\n"
409                _ASM_EXTABLE(0b,16b)
410                _ASM_EXTABLE(1b,16b)
411                _ASM_EXTABLE(2b,16b)
412                _ASM_EXTABLE(21b,16b)
413                _ASM_EXTABLE(3b,16b)
414                _ASM_EXTABLE(31b,16b)
415                _ASM_EXTABLE(4b,16b)
416                _ASM_EXTABLE(41b,16b)
417                _ASM_EXTABLE(10b,16b)
418                _ASM_EXTABLE(51b,16b)
419                _ASM_EXTABLE(11b,16b)
420                _ASM_EXTABLE(61b,16b)
421                _ASM_EXTABLE(12b,16b)
422                _ASM_EXTABLE(71b,16b)
423                _ASM_EXTABLE(13b,16b)
424                _ASM_EXTABLE(81b,16b)
425                _ASM_EXTABLE(14b,16b)
426                _ASM_EXTABLE(91b,16b)
427                _ASM_EXTABLE(6b,9b)
428                _ASM_EXTABLE(7b,16b)
429                : "=&c"(size), "=&D" (d0), "=&S" (d1)
430                :  "1"(to), "2"(from), "0"(size)
431                : "eax", "edx", "memory");
432         return size;
433 }
434
435 static unsigned long __copy_user_intel_nocache(void *to,
436                                 const void __user *from, unsigned long size)
437 {
438         int d0, d1;
439
440         __asm__ __volatile__(
441                "        .align 2,0x90\n"
442                "0:      movl 32(%4), %%eax\n"
443                "        cmpl $67, %0\n"
444                "        jbe 2f\n"
445                "1:      movl 64(%4), %%eax\n"
446                "        .align 2,0x90\n"
447                "2:      movl 0(%4), %%eax\n"
448                "21:     movl 4(%4), %%edx\n"
449                "        movnti %%eax, 0(%3)\n"
450                "        movnti %%edx, 4(%3)\n"
451                "3:      movl 8(%4), %%eax\n"
452                "31:     movl 12(%4),%%edx\n"
453                "        movnti %%eax, 8(%3)\n"
454                "        movnti %%edx, 12(%3)\n"
455                "4:      movl 16(%4), %%eax\n"
456                "41:     movl 20(%4), %%edx\n"
457                "        movnti %%eax, 16(%3)\n"
458                "        movnti %%edx, 20(%3)\n"
459                "10:     movl 24(%4), %%eax\n"
460                "51:     movl 28(%4), %%edx\n"
461                "        movnti %%eax, 24(%3)\n"
462                "        movnti %%edx, 28(%3)\n"
463                "11:     movl 32(%4), %%eax\n"
464                "61:     movl 36(%4), %%edx\n"
465                "        movnti %%eax, 32(%3)\n"
466                "        movnti %%edx, 36(%3)\n"
467                "12:     movl 40(%4), %%eax\n"
468                "71:     movl 44(%4), %%edx\n"
469                "        movnti %%eax, 40(%3)\n"
470                "        movnti %%edx, 44(%3)\n"
471                "13:     movl 48(%4), %%eax\n"
472                "81:     movl 52(%4), %%edx\n"
473                "        movnti %%eax, 48(%3)\n"
474                "        movnti %%edx, 52(%3)\n"
475                "14:     movl 56(%4), %%eax\n"
476                "91:     movl 60(%4), %%edx\n"
477                "        movnti %%eax, 56(%3)\n"
478                "        movnti %%edx, 60(%3)\n"
479                "        addl $-64, %0\n"
480                "        addl $64, %4\n"
481                "        addl $64, %3\n"
482                "        cmpl $63, %0\n"
483                "        ja  0b\n"
484                "        sfence \n"
485                "5:      movl  %0, %%eax\n"
486                "        shrl  $2, %0\n"
487                "        andl $3, %%eax\n"
488                "        cld\n"
489                "6:      rep; movsl\n"
490                "        movl %%eax,%0\n"
491                "7:      rep; movsb\n"
492                "8:\n"
493                ".section .fixup,\"ax\"\n"
494                "9:      lea 0(%%eax,%0,4),%0\n"
495                "16:     jmp 8b\n"
496                ".previous\n"
497                _ASM_EXTABLE(0b,16b)
498                _ASM_EXTABLE(1b,16b)
499                _ASM_EXTABLE(2b,16b)
500                _ASM_EXTABLE(21b,16b)
501                _ASM_EXTABLE(3b,16b)
502                _ASM_EXTABLE(31b,16b)
503                _ASM_EXTABLE(4b,16b)
504                _ASM_EXTABLE(41b,16b)
505                _ASM_EXTABLE(10b,16b)
506                _ASM_EXTABLE(51b,16b)
507                _ASM_EXTABLE(11b,16b)
508                _ASM_EXTABLE(61b,16b)
509                _ASM_EXTABLE(12b,16b)
510                _ASM_EXTABLE(71b,16b)
511                _ASM_EXTABLE(13b,16b)
512                _ASM_EXTABLE(81b,16b)
513                _ASM_EXTABLE(14b,16b)
514                _ASM_EXTABLE(91b,16b)
515                _ASM_EXTABLE(6b,9b)
516                _ASM_EXTABLE(7b,16b)
517                : "=&c"(size), "=&D" (d0), "=&S" (d1)
518                :  "1"(to), "2"(from), "0"(size)
519                : "eax", "edx", "memory");
520         return size;
521 }
522
523 #else
524
525 /*
526  * Leave these declared but undefined.  They should not be any references to
527  * them
528  */
529 unsigned long __copy_user_zeroing_intel(void *to, const void __user *from,
530                                         unsigned long size);
531 unsigned long __copy_user_intel(void __user *to, const void *from,
532                                         unsigned long size);
533 unsigned long __copy_user_zeroing_intel_nocache(void *to,
534                                 const void __user *from, unsigned long size);
535 #endif /* CONFIG_X86_INTEL_USERCOPY */
536
537 /* Generic arbitrary sized copy.  */
538 #define __copy_user(to, from, size)                                     \
539 do {                                                                    \
540         int __d0, __d1, __d2;                                           \
541         __asm__ __volatile__(                                           \
542                 "       cmp  $7,%0\n"                                   \
543                 "       jbe  1f\n"                                      \
544                 "       movl %1,%0\n"                                   \
545                 "       negl %0\n"                                      \
546                 "       andl $7,%0\n"                                   \
547                 "       subl %0,%3\n"                                   \
548                 "4:     rep; movsb\n"                                   \
549                 "       movl %3,%0\n"                                   \
550                 "       shrl $2,%0\n"                                   \
551                 "       andl $3,%3\n"                                   \
552                 "       .align 2,0x90\n"                                \
553                 "0:     rep; movsl\n"                                   \
554                 "       movl %3,%0\n"                                   \
555                 "1:     rep; movsb\n"                                   \
556                 "2:\n"                                                  \
557                 ".section .fixup,\"ax\"\n"                              \
558                 "5:     addl %3,%0\n"                                   \
559                 "       jmp 2b\n"                                       \
560                 "3:     lea 0(%3,%0,4),%0\n"                            \
561                 "       jmp 2b\n"                                       \
562                 ".previous\n"                                           \
563                 _ASM_EXTABLE(4b,5b)                                     \
564                 _ASM_EXTABLE(0b,3b)                                     \
565                 _ASM_EXTABLE(1b,2b)                                     \
566                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
567                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
568                 : "memory");                                            \
569 } while (0)
570
571 #define __copy_user_zeroing(to, from, size)                             \
572 do {                                                                    \
573         int __d0, __d1, __d2;                                           \
574         __asm__ __volatile__(                                           \
575                 "       cmp  $7,%0\n"                                   \
576                 "       jbe  1f\n"                                      \
577                 "       movl %1,%0\n"                                   \
578                 "       negl %0\n"                                      \
579                 "       andl $7,%0\n"                                   \
580                 "       subl %0,%3\n"                                   \
581                 "4:     rep; movsb\n"                                   \
582                 "       movl %3,%0\n"                                   \
583                 "       shrl $2,%0\n"                                   \
584                 "       andl $3,%3\n"                                   \
585                 "       .align 2,0x90\n"                                \
586                 "0:     rep; movsl\n"                                   \
587                 "       movl %3,%0\n"                                   \
588                 "1:     rep; movsb\n"                                   \
589                 "2:\n"                                                  \
590                 ".section .fixup,\"ax\"\n"                              \
591                 "5:     addl %3,%0\n"                                   \
592                 "       jmp 6f\n"                                       \
593                 "3:     lea 0(%3,%0,4),%0\n"                            \
594                 "6:     pushl %0\n"                                     \
595                 "       pushl %%eax\n"                                  \
596                 "       xorl %%eax,%%eax\n"                             \
597                 "       rep; stosb\n"                                   \
598                 "       popl %%eax\n"                                   \
599                 "       popl %0\n"                                      \
600                 "       jmp 2b\n"                                       \
601                 ".previous\n"                                           \
602                 _ASM_EXTABLE(4b,5b)                                     \
603                 _ASM_EXTABLE(0b,3b)                                     \
604                 _ASM_EXTABLE(1b,6b)                                     \
605                 : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)   \
606                 : "3"(size), "0"(size), "1"(to), "2"(from)              \
607                 : "memory");                                            \
608 } while (0)
609
610 unsigned long __copy_to_user_ll(void __user *to, const void *from,
611                                 unsigned long n)
612 {
613 #ifndef CONFIG_X86_WP_WORKS_OK
614         if (unlikely(boot_cpu_data.wp_works_ok == 0) &&
615                         ((unsigned long)to) < TASK_SIZE) {
616                 /*
617                  * When we are in an atomic section (see
618                  * mm/filemap.c:file_read_actor), return the full
619                  * length to take the slow path.
620                  */
621                 if (in_atomic())
622                         return n;
623
624                 /*
625                  * CPU does not honor the WP bit when writing
626                  * from supervisory mode, and due to preemption or SMP,
627                  * the page tables can change at any time.
628                  * Do it manually.      Manfred <manfred@colorfullife.com>
629                  */
630                 while (n) {
631                         unsigned long offset = ((unsigned long)to)%PAGE_SIZE;
632                         unsigned long len = PAGE_SIZE - offset;
633                         int retval;
634                         struct page *pg;
635                         void *maddr;
636
637                         if (len > n)
638                                 len = n;
639
640 survive:
641                         down_read(&current->mm->mmap_sem);
642                         retval = get_user_pages(current, current->mm,
643                                         (unsigned long)to, 1, 1, 0, &pg, NULL);
644
645                         if (retval == -ENOMEM && is_global_init(current)) {
646                                 up_read(&current->mm->mmap_sem);
647                                 congestion_wait(BLK_RW_ASYNC, HZ/50);
648                                 goto survive;
649                         }
650
651                         if (retval != 1) {
652                                 up_read(&current->mm->mmap_sem);
653                                 break;
654                         }
655
656                         maddr = kmap_atomic(pg);
657                         memcpy(maddr + offset, from, len);
658                         kunmap_atomic(maddr);
659                         set_page_dirty_lock(pg);
660                         put_page(pg);
661                         up_read(&current->mm->mmap_sem);
662
663                         from += len;
664                         to += len;
665                         n -= len;
666                 }
667                 return n;
668         }
669 #endif
670         if (movsl_is_ok(to, from, n))
671                 __copy_user(to, from, n);
672         else
673                 n = __copy_user_intel(to, from, n);
674         return n;
675 }
676 EXPORT_SYMBOL(__copy_to_user_ll);
677
678 unsigned long __copy_from_user_ll(void *to, const void __user *from,
679                                         unsigned long n)
680 {
681         if (movsl_is_ok(to, from, n))
682                 __copy_user_zeroing(to, from, n);
683         else
684                 n = __copy_user_zeroing_intel(to, from, n);
685         return n;
686 }
687 EXPORT_SYMBOL(__copy_from_user_ll);
688
689 unsigned long __copy_from_user_ll_nozero(void *to, const void __user *from,
690                                          unsigned long n)
691 {
692         if (movsl_is_ok(to, from, n))
693                 __copy_user(to, from, n);
694         else
695                 n = __copy_user_intel((void __user *)to,
696                                       (const void *)from, n);
697         return n;
698 }
699 EXPORT_SYMBOL(__copy_from_user_ll_nozero);
700
701 unsigned long __copy_from_user_ll_nocache(void *to, const void __user *from,
702                                         unsigned long n)
703 {
704 #ifdef CONFIG_X86_INTEL_USERCOPY
705         if (n > 64 && cpu_has_xmm2)
706                 n = __copy_user_zeroing_intel_nocache(to, from, n);
707         else
708                 __copy_user_zeroing(to, from, n);
709 #else
710         __copy_user_zeroing(to, from, n);
711 #endif
712         return n;
713 }
714 EXPORT_SYMBOL(__copy_from_user_ll_nocache);
715
716 unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
717                                         unsigned long n)
718 {
719 #ifdef CONFIG_X86_INTEL_USERCOPY
720         if (n > 64 && cpu_has_xmm2)
721                 n = __copy_user_intel_nocache(to, from, n);
722         else
723                 __copy_user(to, from, n);
724 #else
725         __copy_user(to, from, n);
726 #endif
727         return n;
728 }
729 EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);
730
731 /**
732  * copy_to_user: - Copy a block of data into user space.
733  * @to:   Destination address, in user space.
734  * @from: Source address, in kernel space.
735  * @n:    Number of bytes to copy.
736  *
737  * Context: User context only.  This function may sleep.
738  *
739  * Copy data from kernel space to user space.
740  *
741  * Returns number of bytes that could not be copied.
742  * On success, this will be zero.
743  */
744 unsigned long
745 copy_to_user(void __user *to, const void *from, unsigned long n)
746 {
747         if (access_ok(VERIFY_WRITE, to, n))
748                 n = __copy_to_user(to, from, n);
749         return n;
750 }
751 EXPORT_SYMBOL(copy_to_user);
752
753 /**
754  * copy_from_user: - Copy a block of data from user space.
755  * @to:   Destination address, in kernel space.
756  * @from: Source address, in user space.
757  * @n:    Number of bytes to copy.
758  *
759  * Context: User context only.  This function may sleep.
760  *
761  * Copy data from user space to kernel space.
762  *
763  * Returns number of bytes that could not be copied.
764  * On success, this will be zero.
765  *
766  * If some data could not be copied, this function will pad the copied
767  * data to the requested size using zero bytes.
768  */
769 unsigned long
770 _copy_from_user(void *to, const void __user *from, unsigned long n)
771 {
772         if (access_ok(VERIFY_READ, from, n))
773                 n = __copy_from_user(to, from, n);
774         else
775                 memset(to, 0, n);
776         return n;
777 }
778 EXPORT_SYMBOL(_copy_from_user);
779
780 void copy_from_user_overflow(void)
781 {
782         WARN(1, "Buffer overflow detected!\n");
783 }
784 EXPORT_SYMBOL(copy_from_user_overflow);