Commit 278b917f authored by Youquan Song's avatar Youquan Song Committed by Borislav Petkov

x86/mce: Add _ASM_EXTABLE_CPY for copy user access

_ASM_EXTABLE_UA is a general exception entry to record the exception fixup
for all exception spots between kernel and user space access.

To enable recovery from machine checks while coping data from user
addresses it is necessary to be able to distinguish the places that are
looping copying data from those that copy a single byte/word/etc.

Add a new macro _ASM_EXTABLE_CPY and use it in place of _ASM_EXTABLE_UA
in the copy functions.

Record the exception reason number to regs->ax at
ex_handler_uaccess which is used to check MCE triggered.

The new fixup routine ex_handler_copy() is almost an exact copy of
ex_handler_uaccess() The difference is that it sets regs->ax to the trap
number. Following patches use this to avoid trying to copy remaining
bytes from the tail of the copy and possibly hitting the poison again.

New mce.kflags bit MCE_IN_KERNEL_COPYIN will be used by mce_severity()
calculation to indicate that a machine check is recoverable because the
kernel was copying from user space.
Signed-off-by: default avatarYouquan Song <youquan.song@intel.com>
Signed-off-by: default avatarTony Luck <tony.luck@intel.com>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Link: https://lkml.kernel.org/r/20201006210910.21062-4-tony.luck@intel.com
parent a05d54c4
...@@ -135,6 +135,9 @@ ...@@ -135,6 +135,9 @@
# define _ASM_EXTABLE_UA(from, to) \ # define _ASM_EXTABLE_UA(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess) _ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess)
# define _ASM_EXTABLE_CPY(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_copy)
# define _ASM_EXTABLE_FAULT(from, to) \ # define _ASM_EXTABLE_FAULT(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_fault) _ASM_EXTABLE_HANDLE(from, to, ex_handler_fault)
...@@ -160,6 +163,9 @@ ...@@ -160,6 +163,9 @@
# define _ASM_EXTABLE_UA(from, to) \ # define _ASM_EXTABLE_UA(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess) _ASM_EXTABLE_HANDLE(from, to, ex_handler_uaccess)
# define _ASM_EXTABLE_CPY(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_copy)
# define _ASM_EXTABLE_FAULT(from, to) \ # define _ASM_EXTABLE_FAULT(from, to) \
_ASM_EXTABLE_HANDLE(from, to, ex_handler_fault) _ASM_EXTABLE_HANDLE(from, to, ex_handler_fault)
......
...@@ -136,8 +136,23 @@ ...@@ -136,8 +136,23 @@
#define MCE_HANDLED_NFIT BIT_ULL(3) #define MCE_HANDLED_NFIT BIT_ULL(3)
#define MCE_HANDLED_EDAC BIT_ULL(4) #define MCE_HANDLED_EDAC BIT_ULL(4)
#define MCE_HANDLED_MCELOG BIT_ULL(5) #define MCE_HANDLED_MCELOG BIT_ULL(5)
/*
* Indicates an MCE which has happened in kernel space but from
* which the kernel can recover simply by executing fixup_exception()
* so that an error is returned to the caller of the function that
* hit the machine check.
*/
#define MCE_IN_KERNEL_RECOV BIT_ULL(6) #define MCE_IN_KERNEL_RECOV BIT_ULL(6)
/*
* Indicates an MCE that happened in kernel space while copying data
* from user. In this case fixup_exception() gets the kernel to the
* error exit for the copy function. Machine check handler can then
* treat it like a fault taken in user mode.
*/
#define MCE_IN_KERNEL_COPYIN BIT_ULL(7)
/* /*
* This structure contains all data related to the MCE log. Also * This structure contains all data related to the MCE log. Also
* carries a signature to make it easier to find from external * carries a signature to make it easier to find from external
......
...@@ -36,8 +36,8 @@ ...@@ -36,8 +36,8 @@
jmp .Lcopy_user_handle_tail jmp .Lcopy_user_handle_tail
.previous .previous
_ASM_EXTABLE_UA(100b, 103b) _ASM_EXTABLE_CPY(100b, 103b)
_ASM_EXTABLE_UA(101b, 103b) _ASM_EXTABLE_CPY(101b, 103b)
.endm .endm
/* /*
...@@ -116,26 +116,26 @@ SYM_FUNC_START(copy_user_generic_unrolled) ...@@ -116,26 +116,26 @@ SYM_FUNC_START(copy_user_generic_unrolled)
60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */ 60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */
.previous .previous
_ASM_EXTABLE_UA(1b, 30b) _ASM_EXTABLE_CPY(1b, 30b)
_ASM_EXTABLE_UA(2b, 30b) _ASM_EXTABLE_CPY(2b, 30b)
_ASM_EXTABLE_UA(3b, 30b) _ASM_EXTABLE_CPY(3b, 30b)
_ASM_EXTABLE_UA(4b, 30b) _ASM_EXTABLE_CPY(4b, 30b)
_ASM_EXTABLE_UA(5b, 30b) _ASM_EXTABLE_CPY(5b, 30b)
_ASM_EXTABLE_UA(6b, 30b) _ASM_EXTABLE_CPY(6b, 30b)
_ASM_EXTABLE_UA(7b, 30b) _ASM_EXTABLE_CPY(7b, 30b)
_ASM_EXTABLE_UA(8b, 30b) _ASM_EXTABLE_CPY(8b, 30b)
_ASM_EXTABLE_UA(9b, 30b) _ASM_EXTABLE_CPY(9b, 30b)
_ASM_EXTABLE_UA(10b, 30b) _ASM_EXTABLE_CPY(10b, 30b)
_ASM_EXTABLE_UA(11b, 30b) _ASM_EXTABLE_CPY(11b, 30b)
_ASM_EXTABLE_UA(12b, 30b) _ASM_EXTABLE_CPY(12b, 30b)
_ASM_EXTABLE_UA(13b, 30b) _ASM_EXTABLE_CPY(13b, 30b)
_ASM_EXTABLE_UA(14b, 30b) _ASM_EXTABLE_CPY(14b, 30b)
_ASM_EXTABLE_UA(15b, 30b) _ASM_EXTABLE_CPY(15b, 30b)
_ASM_EXTABLE_UA(16b, 30b) _ASM_EXTABLE_CPY(16b, 30b)
_ASM_EXTABLE_UA(18b, 40b) _ASM_EXTABLE_CPY(18b, 40b)
_ASM_EXTABLE_UA(19b, 40b) _ASM_EXTABLE_CPY(19b, 40b)
_ASM_EXTABLE_UA(21b, 50b) _ASM_EXTABLE_CPY(21b, 50b)
_ASM_EXTABLE_UA(22b, 50b) _ASM_EXTABLE_CPY(22b, 50b)
SYM_FUNC_END(copy_user_generic_unrolled) SYM_FUNC_END(copy_user_generic_unrolled)
EXPORT_SYMBOL(copy_user_generic_unrolled) EXPORT_SYMBOL(copy_user_generic_unrolled)
...@@ -180,8 +180,8 @@ SYM_FUNC_START(copy_user_generic_string) ...@@ -180,8 +180,8 @@ SYM_FUNC_START(copy_user_generic_string)
jmp .Lcopy_user_handle_tail jmp .Lcopy_user_handle_tail
.previous .previous
_ASM_EXTABLE_UA(1b, 11b) _ASM_EXTABLE_CPY(1b, 11b)
_ASM_EXTABLE_UA(3b, 12b) _ASM_EXTABLE_CPY(3b, 12b)
SYM_FUNC_END(copy_user_generic_string) SYM_FUNC_END(copy_user_generic_string)
EXPORT_SYMBOL(copy_user_generic_string) EXPORT_SYMBOL(copy_user_generic_string)
...@@ -213,7 +213,7 @@ SYM_FUNC_START(copy_user_enhanced_fast_string) ...@@ -213,7 +213,7 @@ SYM_FUNC_START(copy_user_enhanced_fast_string)
jmp .Lcopy_user_handle_tail jmp .Lcopy_user_handle_tail
.previous .previous
_ASM_EXTABLE_UA(1b, 12b) _ASM_EXTABLE_CPY(1b, 12b)
SYM_FUNC_END(copy_user_enhanced_fast_string) SYM_FUNC_END(copy_user_enhanced_fast_string)
EXPORT_SYMBOL(copy_user_enhanced_fast_string) EXPORT_SYMBOL(copy_user_enhanced_fast_string)
...@@ -237,7 +237,7 @@ SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail) ...@@ -237,7 +237,7 @@ SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail)
ASM_CLAC ASM_CLAC
ret ret
_ASM_EXTABLE_UA(1b, 2b) _ASM_EXTABLE_CPY(1b, 2b)
SYM_CODE_END(.Lcopy_user_handle_tail) SYM_CODE_END(.Lcopy_user_handle_tail)
/* /*
...@@ -366,27 +366,27 @@ SYM_FUNC_START(__copy_user_nocache) ...@@ -366,27 +366,27 @@ SYM_FUNC_START(__copy_user_nocache)
jmp .Lcopy_user_handle_tail jmp .Lcopy_user_handle_tail
.previous .previous
_ASM_EXTABLE_UA(1b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(1b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(2b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(2b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(3b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(3b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(4b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(4b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(5b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(5b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(6b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(6b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(7b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(7b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(8b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(8b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(9b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(9b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(10b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(10b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(11b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(11b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(12b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(12b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(13b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(13b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(14b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(14b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(15b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(15b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(16b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(16b, .L_fixup_4x8b_copy)
_ASM_EXTABLE_UA(20b, .L_fixup_8b_copy) _ASM_EXTABLE_CPY(20b, .L_fixup_8b_copy)
_ASM_EXTABLE_UA(21b, .L_fixup_8b_copy) _ASM_EXTABLE_CPY(21b, .L_fixup_8b_copy)
_ASM_EXTABLE_UA(30b, .L_fixup_4b_copy) _ASM_EXTABLE_CPY(30b, .L_fixup_4b_copy)
_ASM_EXTABLE_UA(31b, .L_fixup_4b_copy) _ASM_EXTABLE_CPY(31b, .L_fixup_4b_copy)
_ASM_EXTABLE_UA(40b, .L_fixup_1b_copy) _ASM_EXTABLE_CPY(40b, .L_fixup_1b_copy)
_ASM_EXTABLE_UA(41b, .L_fixup_1b_copy) _ASM_EXTABLE_CPY(41b, .L_fixup_1b_copy)
SYM_FUNC_END(__copy_user_nocache) SYM_FUNC_END(__copy_user_nocache)
EXPORT_SYMBOL(__copy_user_nocache) EXPORT_SYMBOL(__copy_user_nocache)
...@@ -80,6 +80,18 @@ __visible bool ex_handler_uaccess(const struct exception_table_entry *fixup, ...@@ -80,6 +80,18 @@ __visible bool ex_handler_uaccess(const struct exception_table_entry *fixup,
} }
EXPORT_SYMBOL(ex_handler_uaccess); EXPORT_SYMBOL(ex_handler_uaccess);
__visible bool ex_handler_copy(const struct exception_table_entry *fixup,
struct pt_regs *regs, int trapnr,
unsigned long error_code,
unsigned long fault_addr)
{
WARN_ONCE(trapnr == X86_TRAP_GP, "General protection fault in user access. Non-canonical address?");
regs->ip = ex_fixup_addr(fixup);
regs->ax = trapnr;
return true;
}
EXPORT_SYMBOL(ex_handler_copy);
__visible bool ex_handler_rdmsr_unsafe(const struct exception_table_entry *fixup, __visible bool ex_handler_rdmsr_unsafe(const struct exception_table_entry *fixup,
struct pt_regs *regs, int trapnr, struct pt_regs *regs, int trapnr,
unsigned long error_code, unsigned long error_code,
...@@ -136,7 +148,7 @@ enum handler_type ex_get_fault_handler_type(unsigned long ip) ...@@ -136,7 +148,7 @@ enum handler_type ex_get_fault_handler_type(unsigned long ip)
handler = ex_fixup_handler(e); handler = ex_fixup_handler(e);
if (handler == ex_handler_fault) if (handler == ex_handler_fault)
return EX_HANDLER_FAULT; return EX_HANDLER_FAULT;
else if (handler == ex_handler_uaccess) else if (handler == ex_handler_uaccess || handler == ex_handler_copy)
return EX_HANDLER_UACCESS; return EX_HANDLER_UACCESS;
else else
return EX_HANDLER_OTHER; return EX_HANDLER_OTHER;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment