Commit 82e844a6 authored by Josh Poimboeuf's avatar Josh Poimboeuf Committed by Thomas Gleixner

x86/uaccess: Remove redundant CLACs in getuser/putuser error paths

The same getuser/putuser error paths are used regardless of whether AC
is set.  In non-exception failure cases, this results in an unnecessary
CLAC.

Fixes the following warnings:

  arch/x86/lib/getuser.o: warning: objtool: .altinstr_replacement+0x18: redundant UACCESS disable
  arch/x86/lib/putuser.o: warning: objtool: .altinstr_replacement+0x18: redundant UACCESS disable
Signed-off-by: default avatarJosh Poimboeuf <jpoimboe@redhat.com>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lkml.kernel.org/r/bc14ded2755ae75bd9010c446079e113dbddb74b.1563413318.git.jpoimboe@redhat.com
parent 5e307a6b
...@@ -115,29 +115,29 @@ ENDPROC(__get_user_8) ...@@ -115,29 +115,29 @@ ENDPROC(__get_user_8)
EXPORT_SYMBOL(__get_user_8) EXPORT_SYMBOL(__get_user_8)
bad_get_user_clac:
ASM_CLAC
bad_get_user: bad_get_user:
xor %edx,%edx xor %edx,%edx
mov $(-EFAULT),%_ASM_AX mov $(-EFAULT),%_ASM_AX
ASM_CLAC
ret ret
END(bad_get_user)
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
bad_get_user_8_clac:
ASM_CLAC
bad_get_user_8: bad_get_user_8:
xor %edx,%edx xor %edx,%edx
xor %ecx,%ecx xor %ecx,%ecx
mov $(-EFAULT),%_ASM_AX mov $(-EFAULT),%_ASM_AX
ASM_CLAC
ret ret
END(bad_get_user_8)
#endif #endif
_ASM_EXTABLE_UA(1b, bad_get_user) _ASM_EXTABLE_UA(1b, bad_get_user_clac)
_ASM_EXTABLE_UA(2b, bad_get_user) _ASM_EXTABLE_UA(2b, bad_get_user_clac)
_ASM_EXTABLE_UA(3b, bad_get_user) _ASM_EXTABLE_UA(3b, bad_get_user_clac)
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
_ASM_EXTABLE_UA(4b, bad_get_user) _ASM_EXTABLE_UA(4b, bad_get_user_clac)
#else #else
_ASM_EXTABLE_UA(4b, bad_get_user_8) _ASM_EXTABLE_UA(4b, bad_get_user_8_clac)
_ASM_EXTABLE_UA(5b, bad_get_user_8) _ASM_EXTABLE_UA(5b, bad_get_user_8_clac)
#endif #endif
...@@ -32,8 +32,6 @@ ...@@ -32,8 +32,6 @@
*/ */
#define ENTER mov PER_CPU_VAR(current_task), %_ASM_BX #define ENTER mov PER_CPU_VAR(current_task), %_ASM_BX
#define EXIT ASM_CLAC ; \
ret
.text .text
ENTRY(__put_user_1) ENTRY(__put_user_1)
...@@ -43,7 +41,8 @@ ENTRY(__put_user_1) ...@@ -43,7 +41,8 @@ ENTRY(__put_user_1)
ASM_STAC ASM_STAC
1: movb %al,(%_ASM_CX) 1: movb %al,(%_ASM_CX)
xor %eax,%eax xor %eax,%eax
EXIT ASM_CLAC
ret
ENDPROC(__put_user_1) ENDPROC(__put_user_1)
EXPORT_SYMBOL(__put_user_1) EXPORT_SYMBOL(__put_user_1)
...@@ -56,7 +55,8 @@ ENTRY(__put_user_2) ...@@ -56,7 +55,8 @@ ENTRY(__put_user_2)
ASM_STAC ASM_STAC
2: movw %ax,(%_ASM_CX) 2: movw %ax,(%_ASM_CX)
xor %eax,%eax xor %eax,%eax
EXIT ASM_CLAC
ret
ENDPROC(__put_user_2) ENDPROC(__put_user_2)
EXPORT_SYMBOL(__put_user_2) EXPORT_SYMBOL(__put_user_2)
...@@ -69,7 +69,8 @@ ENTRY(__put_user_4) ...@@ -69,7 +69,8 @@ ENTRY(__put_user_4)
ASM_STAC ASM_STAC
3: movl %eax,(%_ASM_CX) 3: movl %eax,(%_ASM_CX)
xor %eax,%eax xor %eax,%eax
EXIT ASM_CLAC
ret
ENDPROC(__put_user_4) ENDPROC(__put_user_4)
EXPORT_SYMBOL(__put_user_4) EXPORT_SYMBOL(__put_user_4)
...@@ -85,19 +86,21 @@ ENTRY(__put_user_8) ...@@ -85,19 +86,21 @@ ENTRY(__put_user_8)
5: movl %edx,4(%_ASM_CX) 5: movl %edx,4(%_ASM_CX)
#endif #endif
xor %eax,%eax xor %eax,%eax
EXIT ASM_CLAC
RET
ENDPROC(__put_user_8) ENDPROC(__put_user_8)
EXPORT_SYMBOL(__put_user_8) EXPORT_SYMBOL(__put_user_8)
bad_put_user_clac:
ASM_CLAC
bad_put_user: bad_put_user:
movl $-EFAULT,%eax movl $-EFAULT,%eax
EXIT RET
END(bad_put_user)
_ASM_EXTABLE_UA(1b, bad_put_user) _ASM_EXTABLE_UA(1b, bad_put_user_clac)
_ASM_EXTABLE_UA(2b, bad_put_user) _ASM_EXTABLE_UA(2b, bad_put_user_clac)
_ASM_EXTABLE_UA(3b, bad_put_user) _ASM_EXTABLE_UA(3b, bad_put_user_clac)
_ASM_EXTABLE_UA(4b, bad_put_user) _ASM_EXTABLE_UA(4b, bad_put_user_clac)
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
_ASM_EXTABLE_UA(5b, bad_put_user) _ASM_EXTABLE_UA(5b, bad_put_user_clac)
#endif #endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment