Commit b19b74bc authored by Kirill A. Shutemov's avatar Kirill A. Shutemov Committed by Dave Hansen

x86/mm: Rework address range check in get_user() and put_user()

The functions get_user() and put_user() check that the target address
range resides in the user space portion of the virtual address space.
In order to perform this check, the functions compare the end of the
range against TASK_SIZE_MAX.

For kernels compiled with CONFIG_X86_5LEVEL, this process requires some
additional trickery using ALTERNATIVE, as TASK_SIZE_MAX depends on the
paging mode in use.

Linus suggested that this check could be simplified for 64-bit kernels.
It is sufficient to check bit 63 of the address to ensure that the range
belongs to user space. Additionally, the use of branches can be avoided
by setting the target address to all ones if bit 63 is set.

There's no need to check the end of the access range as there's huge
gap between end of userspace range and start of the kernel range. The
gap consists of canonical hole and unused ranges on both kernel and
userspace sides.

If an address with bit 63 set is passed down, it will trigger a #GP
exception. _ASM_EXTABLE_UA() complains about this. Replace it with
plain _ASM_EXTABLE() as it is expected behaviour now.

The updated get_user() and put_user() checks are also compatible with
Linear Address Masking, which allows user space to encode metadata in
the upper bits of pointers and eliminates the need to untag the address
before handling it.
Suggested-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
Signed-off-by: default avatarKirill A. Shutemov <kirill.shutemov@linux.intel.com>
Signed-off-by: default avatarDave Hansen <dave.hansen@linux.intel.com>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lore.kernel.org/all/20230312112612.31869-2-kirill.shutemov%40linux.intel.com
parent eeac8ede
...@@ -37,22 +37,22 @@ ...@@ -37,22 +37,22 @@
#define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC #define ASM_BARRIER_NOSPEC ALTERNATIVE "", "lfence", X86_FEATURE_LFENCE_RDTSC
#ifdef CONFIG_X86_5LEVEL .macro check_range size:req
#define LOAD_TASK_SIZE_MINUS_N(n) \ .if IS_ENABLED(CONFIG_X86_64)
ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rdx), \ mov %rax, %rdx
__stringify(mov $((1 << 56) - 4096 - (n)),%rdx), X86_FEATURE_LA57 sar $63, %rdx
#else or %rdx, %rax
#define LOAD_TASK_SIZE_MINUS_N(n) \ .else
mov $(TASK_SIZE_MAX - (n)),%_ASM_DX cmp $TASK_SIZE_MAX-\size+1, %eax
#endif jae .Lbad_get_user
sbb %edx, %edx /* array_index_mask_nospec() */
and %edx, %eax
.endif
.endm
.text .text
SYM_FUNC_START(__get_user_1) SYM_FUNC_START(__get_user_1)
LOAD_TASK_SIZE_MINUS_N(0) check_range size=1
cmp %_ASM_DX,%_ASM_AX
jae bad_get_user
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
and %_ASM_DX, %_ASM_AX
ASM_STAC ASM_STAC
1: movzbl (%_ASM_AX),%edx 1: movzbl (%_ASM_AX),%edx
xor %eax,%eax xor %eax,%eax
...@@ -62,11 +62,7 @@ SYM_FUNC_END(__get_user_1) ...@@ -62,11 +62,7 @@ SYM_FUNC_END(__get_user_1)
EXPORT_SYMBOL(__get_user_1) EXPORT_SYMBOL(__get_user_1)
SYM_FUNC_START(__get_user_2) SYM_FUNC_START(__get_user_2)
LOAD_TASK_SIZE_MINUS_N(1) check_range size=2
cmp %_ASM_DX,%_ASM_AX
jae bad_get_user
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
and %_ASM_DX, %_ASM_AX
ASM_STAC ASM_STAC
2: movzwl (%_ASM_AX),%edx 2: movzwl (%_ASM_AX),%edx
xor %eax,%eax xor %eax,%eax
...@@ -76,11 +72,7 @@ SYM_FUNC_END(__get_user_2) ...@@ -76,11 +72,7 @@ SYM_FUNC_END(__get_user_2)
EXPORT_SYMBOL(__get_user_2) EXPORT_SYMBOL(__get_user_2)
SYM_FUNC_START(__get_user_4) SYM_FUNC_START(__get_user_4)
LOAD_TASK_SIZE_MINUS_N(3) check_range size=4
cmp %_ASM_DX,%_ASM_AX
jae bad_get_user
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
and %_ASM_DX, %_ASM_AX
ASM_STAC ASM_STAC
3: movl (%_ASM_AX),%edx 3: movl (%_ASM_AX),%edx
xor %eax,%eax xor %eax,%eax
...@@ -90,30 +82,17 @@ SYM_FUNC_END(__get_user_4) ...@@ -90,30 +82,17 @@ SYM_FUNC_END(__get_user_4)
EXPORT_SYMBOL(__get_user_4) EXPORT_SYMBOL(__get_user_4)
SYM_FUNC_START(__get_user_8) SYM_FUNC_START(__get_user_8)
#ifdef CONFIG_X86_64 check_range size=8
LOAD_TASK_SIZE_MINUS_N(7)
cmp %_ASM_DX,%_ASM_AX
jae bad_get_user
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
and %_ASM_DX, %_ASM_AX
ASM_STAC ASM_STAC
#ifdef CONFIG_X86_64
4: movq (%_ASM_AX),%rdx 4: movq (%_ASM_AX),%rdx
xor %eax,%eax
ASM_CLAC
RET
#else #else
LOAD_TASK_SIZE_MINUS_N(7)
cmp %_ASM_DX,%_ASM_AX
jae bad_get_user_8
sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
and %_ASM_DX, %_ASM_AX
ASM_STAC
4: movl (%_ASM_AX),%edx 4: movl (%_ASM_AX),%edx
5: movl 4(%_ASM_AX),%ecx 5: movl 4(%_ASM_AX),%ecx
#endif
xor %eax,%eax xor %eax,%eax
ASM_CLAC ASM_CLAC
RET RET
#endif
SYM_FUNC_END(__get_user_8) SYM_FUNC_END(__get_user_8)
EXPORT_SYMBOL(__get_user_8) EXPORT_SYMBOL(__get_user_8)
...@@ -166,7 +145,7 @@ EXPORT_SYMBOL(__get_user_nocheck_8) ...@@ -166,7 +145,7 @@ EXPORT_SYMBOL(__get_user_nocheck_8)
SYM_CODE_START_LOCAL(.Lbad_get_user_clac) SYM_CODE_START_LOCAL(.Lbad_get_user_clac)
ASM_CLAC ASM_CLAC
bad_get_user: .Lbad_get_user:
xor %edx,%edx xor %edx,%edx
mov $(-EFAULT),%_ASM_AX mov $(-EFAULT),%_ASM_AX
RET RET
...@@ -184,23 +163,23 @@ SYM_CODE_END(.Lbad_get_user_8_clac) ...@@ -184,23 +163,23 @@ SYM_CODE_END(.Lbad_get_user_8_clac)
#endif #endif
/* get_user */ /* get_user */
_ASM_EXTABLE_UA(1b, .Lbad_get_user_clac) _ASM_EXTABLE(1b, .Lbad_get_user_clac)
_ASM_EXTABLE_UA(2b, .Lbad_get_user_clac) _ASM_EXTABLE(2b, .Lbad_get_user_clac)
_ASM_EXTABLE_UA(3b, .Lbad_get_user_clac) _ASM_EXTABLE(3b, .Lbad_get_user_clac)
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
_ASM_EXTABLE_UA(4b, .Lbad_get_user_clac) _ASM_EXTABLE(4b, .Lbad_get_user_clac)
#else #else
_ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac) _ASM_EXTABLE(4b, .Lbad_get_user_8_clac)
_ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac) _ASM_EXTABLE(5b, .Lbad_get_user_8_clac)
#endif #endif
/* __get_user */ /* __get_user */
_ASM_EXTABLE_UA(6b, .Lbad_get_user_clac) _ASM_EXTABLE(6b, .Lbad_get_user_clac)
_ASM_EXTABLE_UA(7b, .Lbad_get_user_clac) _ASM_EXTABLE(7b, .Lbad_get_user_clac)
_ASM_EXTABLE_UA(8b, .Lbad_get_user_clac) _ASM_EXTABLE(8b, .Lbad_get_user_clac)
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
_ASM_EXTABLE_UA(9b, .Lbad_get_user_clac) _ASM_EXTABLE(9b, .Lbad_get_user_clac)
#else #else
_ASM_EXTABLE_UA(9b, .Lbad_get_user_8_clac) _ASM_EXTABLE(9b, .Lbad_get_user_8_clac)
_ASM_EXTABLE_UA(10b, .Lbad_get_user_8_clac) _ASM_EXTABLE(10b, .Lbad_get_user_8_clac)
#endif #endif
...@@ -33,20 +33,20 @@ ...@@ -33,20 +33,20 @@
* as they get called from within inline assembly. * as they get called from within inline assembly.
*/ */
#ifdef CONFIG_X86_5LEVEL .macro check_range size:req
#define LOAD_TASK_SIZE_MINUS_N(n) \ .if IS_ENABLED(CONFIG_X86_64)
ALTERNATIVE __stringify(mov $((1 << 47) - 4096 - (n)),%rbx), \ mov %rcx, %rbx
__stringify(mov $((1 << 56) - 4096 - (n)),%rbx), X86_FEATURE_LA57 sar $63, %rbx
#else or %rbx, %rcx
#define LOAD_TASK_SIZE_MINUS_N(n) \ .else
mov $(TASK_SIZE_MAX - (n)),%_ASM_BX cmp $TASK_SIZE_MAX-\size+1, %ecx
#endif jae .Lbad_put_user
.endif
.endm
.text .text
SYM_FUNC_START(__put_user_1) SYM_FUNC_START(__put_user_1)
LOAD_TASK_SIZE_MINUS_N(0) check_range size=1
cmp %_ASM_BX,%_ASM_CX
jae .Lbad_put_user
ASM_STAC ASM_STAC
1: movb %al,(%_ASM_CX) 1: movb %al,(%_ASM_CX)
xor %ecx,%ecx xor %ecx,%ecx
...@@ -66,9 +66,7 @@ SYM_FUNC_END(__put_user_nocheck_1) ...@@ -66,9 +66,7 @@ SYM_FUNC_END(__put_user_nocheck_1)
EXPORT_SYMBOL(__put_user_nocheck_1) EXPORT_SYMBOL(__put_user_nocheck_1)
SYM_FUNC_START(__put_user_2) SYM_FUNC_START(__put_user_2)
LOAD_TASK_SIZE_MINUS_N(1) check_range size=2
cmp %_ASM_BX,%_ASM_CX
jae .Lbad_put_user
ASM_STAC ASM_STAC
3: movw %ax,(%_ASM_CX) 3: movw %ax,(%_ASM_CX)
xor %ecx,%ecx xor %ecx,%ecx
...@@ -88,9 +86,7 @@ SYM_FUNC_END(__put_user_nocheck_2) ...@@ -88,9 +86,7 @@ SYM_FUNC_END(__put_user_nocheck_2)
EXPORT_SYMBOL(__put_user_nocheck_2) EXPORT_SYMBOL(__put_user_nocheck_2)
SYM_FUNC_START(__put_user_4) SYM_FUNC_START(__put_user_4)
LOAD_TASK_SIZE_MINUS_N(3) check_range size=4
cmp %_ASM_BX,%_ASM_CX
jae .Lbad_put_user
ASM_STAC ASM_STAC
5: movl %eax,(%_ASM_CX) 5: movl %eax,(%_ASM_CX)
xor %ecx,%ecx xor %ecx,%ecx
...@@ -110,9 +106,7 @@ SYM_FUNC_END(__put_user_nocheck_4) ...@@ -110,9 +106,7 @@ SYM_FUNC_END(__put_user_nocheck_4)
EXPORT_SYMBOL(__put_user_nocheck_4) EXPORT_SYMBOL(__put_user_nocheck_4)
SYM_FUNC_START(__put_user_8) SYM_FUNC_START(__put_user_8)
LOAD_TASK_SIZE_MINUS_N(7) check_range size=8
cmp %_ASM_BX,%_ASM_CX
jae .Lbad_put_user
ASM_STAC ASM_STAC
7: mov %_ASM_AX,(%_ASM_CX) 7: mov %_ASM_AX,(%_ASM_CX)
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
...@@ -144,15 +138,15 @@ SYM_CODE_START_LOCAL(.Lbad_put_user_clac) ...@@ -144,15 +138,15 @@ SYM_CODE_START_LOCAL(.Lbad_put_user_clac)
RET RET
SYM_CODE_END(.Lbad_put_user_clac) SYM_CODE_END(.Lbad_put_user_clac)
_ASM_EXTABLE_UA(1b, .Lbad_put_user_clac) _ASM_EXTABLE(1b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(2b, .Lbad_put_user_clac) _ASM_EXTABLE(2b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(3b, .Lbad_put_user_clac) _ASM_EXTABLE(3b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(4b, .Lbad_put_user_clac) _ASM_EXTABLE(4b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(5b, .Lbad_put_user_clac) _ASM_EXTABLE(5b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(6b, .Lbad_put_user_clac) _ASM_EXTABLE(6b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(7b, .Lbad_put_user_clac) _ASM_EXTABLE(7b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(9b, .Lbad_put_user_clac) _ASM_EXTABLE(9b, .Lbad_put_user_clac)
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
_ASM_EXTABLE_UA(8b, .Lbad_put_user_clac) _ASM_EXTABLE(8b, .Lbad_put_user_clac)
_ASM_EXTABLE_UA(10b, .Lbad_put_user_clac) _ASM_EXTABLE(10b, .Lbad_put_user_clac)
#endif #endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment