Commit 090cd1b4 authored by Andrew Morton's avatar Andrew Morton Committed by Linus Torvalds

[PATCH] fix intel copy_to_user()

A `cat /dev/kmem' oopses the kernel.  This is because new Intel
copy_to_user() doesn't handle segfaults against the kernel-side source
address of the copy.

It is not obvious how to fix read_kmem(), and handling faults on either
source or dest is traditional behaviour, so fix it in the copy function by
adding exception table entries for the reads as well as the writes.
parent 895b7792
...@@ -222,85 +222,105 @@ __copy_user_intel(void *to, const void *from,unsigned long size) ...@@ -222,85 +222,105 @@ __copy_user_intel(void *to, const void *from,unsigned long size)
{ {
int d0, d1; int d0, d1;
__asm__ __volatile__( __asm__ __volatile__(
" .align 2,0x90\n" " .align 2,0x90\n"
"0: movl 32(%4), %%eax\n" "1: movl 32(%4), %%eax\n"
" cmpl $67, %0\n" " cmpl $67, %0\n"
" jbe 1f\n" " jbe 3f\n"
" movl 64(%4), %%eax\n" "2: movl 64(%4), %%eax\n"
" .align 2,0x90\n" " .align 2,0x90\n"
"1: movl 0(%4), %%eax\n" "3: movl 0(%4), %%eax\n"
" movl 4(%4), %%edx\n" "4: movl 4(%4), %%edx\n"
"2: movl %%eax, 0(%3)\n" "5: movl %%eax, 0(%3)\n"
"21: movl %%edx, 4(%3)\n" "6: movl %%edx, 4(%3)\n"
" movl 8(%4), %%eax\n" "7: movl 8(%4), %%eax\n"
" movl 12(%4),%%edx\n" "8: movl 12(%4),%%edx\n"
"3: movl %%eax, 8(%3)\n" "9: movl %%eax, 8(%3)\n"
"31: movl %%edx, 12(%3)\n" "10: movl %%edx, 12(%3)\n"
" movl 16(%4), %%eax\n" "11: movl 16(%4), %%eax\n"
" movl 20(%4), %%edx\n" "12: movl 20(%4), %%edx\n"
"4: movl %%eax, 16(%3)\n" "13: movl %%eax, 16(%3)\n"
"41: movl %%edx, 20(%3)\n" "14: movl %%edx, 20(%3)\n"
" movl 24(%4), %%eax\n" "15: movl 24(%4), %%eax\n"
" movl 28(%4), %%edx\n" "16: movl 28(%4), %%edx\n"
"10: movl %%eax, 24(%3)\n" "17: movl %%eax, 24(%3)\n"
"51: movl %%edx, 28(%3)\n" "18: movl %%edx, 28(%3)\n"
" movl 32(%4), %%eax\n" "19: movl 32(%4), %%eax\n"
" movl 36(%4), %%edx\n" "20: movl 36(%4), %%edx\n"
"11: movl %%eax, 32(%3)\n" "21: movl %%eax, 32(%3)\n"
"61: movl %%edx, 36(%3)\n" "22: movl %%edx, 36(%3)\n"
" movl 40(%4), %%eax\n" "23: movl 40(%4), %%eax\n"
" movl 44(%4), %%edx\n" "24: movl 44(%4), %%edx\n"
"12: movl %%eax, 40(%3)\n" "25: movl %%eax, 40(%3)\n"
"71: movl %%edx, 44(%3)\n" "26: movl %%edx, 44(%3)\n"
" movl 48(%4), %%eax\n" "27: movl 48(%4), %%eax\n"
" movl 52(%4), %%edx\n" "28: movl 52(%4), %%edx\n"
"13: movl %%eax, 48(%3)\n" "29: movl %%eax, 48(%3)\n"
"81: movl %%edx, 52(%3)\n" "30: movl %%edx, 52(%3)\n"
" movl 56(%4), %%eax\n" "31: movl 56(%4), %%eax\n"
" movl 60(%4), %%edx\n" "32: movl 60(%4), %%edx\n"
"14: movl %%eax, 56(%3)\n" "33: movl %%eax, 56(%3)\n"
"91: movl %%edx, 60(%3)\n" "34: movl %%edx, 60(%3)\n"
" addl $-64, %0\n" " addl $-64, %0\n"
" addl $64, %4\n" " addl $64, %4\n"
" addl $64, %3\n" " addl $64, %3\n"
" cmpl $63, %0\n" " cmpl $63, %0\n"
" ja 0b\n" " ja 1b\n"
"5: movl %0, %%eax\n" "35: movl %0, %%eax\n"
" shrl $2, %0\n" " shrl $2, %0\n"
" andl $3, %%eax\n" " andl $3, %%eax\n"
" cld\n" " cld\n"
"6: rep; movsl\n" "99: rep; movsl\n"
" movl %%eax, %0\n" "36: movl %%eax, %0\n"
"7: rep; movsb\n" "37: rep; movsb\n"
"8:\n" "100:\n"
".section .fixup,\"ax\"\n" ".section .fixup,\"ax\"\n"
"9: lea 0(%%eax,%0,4),%0\n" "101: lea 0(%%eax,%0,4),%0\n"
" jmp 8b\n" " jmp 100b\n"
".previous\n" ".previous\n"
".section __ex_table,\"a\"\n" ".section __ex_table,\"a\"\n"
" .align 4\n" " .align 4\n"
" .long 2b,8b\n" " .long 1b,100b\n"
" .long 21b,8b\n" " .long 2b,100b\n"
" .long 3b,8b\n" " .long 3b,100b\n"
" .long 31b,8b\n" " .long 4b,100b\n"
" .long 4b,8b\n" " .long 5b,100b\n"
" .long 41b,8b\n" " .long 6b,100b\n"
" .long 10b,8b\n" " .long 7b,100b\n"
" .long 51b,8b\n" " .long 8b,100b\n"
" .long 11b,8b\n" " .long 9b,100b\n"
" .long 61b,8b\n" " .long 10b,100b\n"
" .long 12b,8b\n" " .long 11b,100b\n"
" .long 71b,8b\n" " .long 12b,100b\n"
" .long 13b,8b\n" " .long 13b,100b\n"
" .long 81b,8b\n" " .long 14b,100b\n"
" .long 14b,8b\n" " .long 15b,100b\n"
" .long 91b,8b\n" " .long 16b,100b\n"
" .long 6b,9b\n" " .long 17b,100b\n"
" .long 7b,8b\n" " .long 18b,100b\n"
".previous" " .long 19b,100b\n"
" .long 20b,100b\n"
" .long 21b,100b\n"
" .long 22b,100b\n"
" .long 23b,100b\n"
" .long 24b,100b\n"
" .long 25b,100b\n"
" .long 26b,100b\n"
" .long 27b,100b\n"
" .long 28b,100b\n"
" .long 29b,100b\n"
" .long 30b,100b\n"
" .long 31b,100b\n"
" .long 32b,100b\n"
" .long 33b,100b\n"
" .long 34b,100b\n"
" .long 35b,100b\n"
" .long 36b,100b\n"
" .long 37b,100b\n"
" .long 99b,101b\n"
".previous"
: "=&c"(size), "=&D" (d0), "=&S" (d1) : "=&c"(size), "=&D" (d0), "=&S" (d1)
: "1"(to), "2"(from), "0"(size) : "1"(to), "2"(from), "0"(size)
: "eax", "edx", "memory"); : "eax", "edx", "memory");
return size; return size;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment