Commit 2e77a62c authored by Mark Rutland's avatar Mark Rutland Committed by Will Deacon

arm64: extable: add a dedicated uaccess handler

For inline assembly, we place exception fixups out-of-line in the
`.fixup` section such that these are out of the way of the fast path.
This has a few drawbacks:

* Since the fixup code is anonymous, backtraces will symbolize fixups as
  offsets from the nearest prior symbol, currently
  `__entry_tramp_text_end`. This is confusing, and painful to debug
  without access to the relevant vmlinux.

* Since the exception handler adjusts the PC to execute the fixup, and
  the fixup uses a direct branch back into the function it fixes,
  backtraces of fixups miss the original function. This is confusing,
  and violates requirements for RELIABLE_STACKTRACE (and therefore
  LIVEPATCH).

* Inline assembly and associated fixups are generated from templates,
  and we have many copies of logically identical fixups which only
  differ in which specific registers are written to and which address is
  branched to at the end of the fixup. This is potentially wasteful of
  I-cache resources, and makes it hard to add additional logic to fixups
  without significant bloat.

This patch address all three concerns for inline uaccess fixups by
adding a dedicated exception handler which updates registers in
exception context and subsequent returns back into the function which
faulted, removing the need for fixups specialized to each faulting
instruction.

Other than backtracing, there should be no functional change as a result
of this patch.
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Reviewed-by: default avatarArd Biesheuvel <ardb@kernel.org>
Cc: Catalin Marinas <catalin.marinas@arm.com>
Cc: James Morse <james.morse@arm.com>
Cc: Robin Murphy <robin.murphy@arm.com>
Cc: Will Deacon <will@kernel.org>
Link: https://lore.kernel.org/r/20211019160219.5202-12-mark.rutland@arm.comSigned-off-by: default avatarWill Deacon <will@kernel.org>
parent d6e2cc56
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#define EX_TYPE_NONE 0 #define EX_TYPE_NONE 0
#define EX_TYPE_FIXUP 1 #define EX_TYPE_FIXUP 1
#define EX_TYPE_BPF 2 #define EX_TYPE_BPF 2
#define EX_TYPE_UACCESS_ERR_ZERO 3
#ifdef __ASSEMBLY__ #ifdef __ASSEMBLY__
...@@ -37,8 +38,11 @@ ...@@ -37,8 +38,11 @@
#else /* __ASSEMBLY__ */ #else /* __ASSEMBLY__ */
#include <linux/bits.h>
#include <linux/stringify.h> #include <linux/stringify.h>
#include <asm/gpr-num.h>
#define __ASM_EXTABLE_RAW(insn, fixup, type, data) \ #define __ASM_EXTABLE_RAW(insn, fixup, type, data) \
".pushsection __ex_table, \"a\"\n" \ ".pushsection __ex_table, \"a\"\n" \
".align 2\n" \ ".align 2\n" \
...@@ -51,6 +55,26 @@ ...@@ -51,6 +55,26 @@
#define _ASM_EXTABLE(insn, fixup) \ #define _ASM_EXTABLE(insn, fixup) \
__ASM_EXTABLE_RAW(#insn, #fixup, __stringify(EX_TYPE_FIXUP), "0") __ASM_EXTABLE_RAW(#insn, #fixup, __stringify(EX_TYPE_FIXUP), "0")
#define EX_DATA_REG_ERR_SHIFT 0
#define EX_DATA_REG_ERR GENMASK(4, 0)
#define EX_DATA_REG_ZERO_SHIFT 5
#define EX_DATA_REG_ZERO GENMASK(9, 5)
#define EX_DATA_REG(reg, gpr) \
"((.L__gpr_num_" #gpr ") << " __stringify(EX_DATA_REG_##reg##_SHIFT) ")"
#define _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, zero) \
__DEFINE_ASM_GPR_NUMS \
__ASM_EXTABLE_RAW(#insn, #fixup, \
__stringify(EX_TYPE_UACCESS_ERR_ZERO), \
"(" \
EX_DATA_REG(ERR, err) " | " \
EX_DATA_REG(ZERO, zero) \
")")
#define _ASM_EXTABLE_UACCESS_ERR(insn, fixup, err) \
_ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, wzr)
#endif /* __ASSEMBLY__ */ #endif /* __ASSEMBLY__ */
#endif /* __ASM_ASM_EXTABLE_H */ #endif /* __ASM_ASM_EXTABLE_H */
...@@ -25,19 +25,14 @@ do { \ ...@@ -25,19 +25,14 @@ do { \
" cbz %w0, 3f\n" \ " cbz %w0, 3f\n" \
" sub %w4, %w4, %w0\n" \ " sub %w4, %w4, %w0\n" \
" cbnz %w4, 1b\n" \ " cbnz %w4, 1b\n" \
" mov %w0, %w7\n" \ " mov %w0, %w6\n" \
"3:\n" \ "3:\n" \
" dmb ish\n" \ " dmb ish\n" \
" .pushsection .fixup,\"ax\"\n" \ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
" .align 2\n" \ _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0) \
"4: mov %w0, %w6\n" \
" b 3b\n" \
" .popsection\n" \
_ASM_EXTABLE(1b, 4b) \
_ASM_EXTABLE(2b, 4b) \
: "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \ : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp), \
"+r" (loops) \ "+r" (loops) \
: "r" (oparg), "Ir" (-EFAULT), "Ir" (-EAGAIN) \ : "r" (oparg), "Ir" (-EAGAIN) \
: "memory"); \ : "memory"); \
uaccess_disable_privileged(); \ uaccess_disable_privileged(); \
} while (0) } while (0)
...@@ -105,18 +100,14 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr, ...@@ -105,18 +100,14 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
" cbz %w3, 3f\n" " cbz %w3, 3f\n"
" sub %w4, %w4, %w3\n" " sub %w4, %w4, %w3\n"
" cbnz %w4, 1b\n" " cbnz %w4, 1b\n"
" mov %w0, %w8\n" " mov %w0, %w7\n"
"3:\n" "3:\n"
" dmb ish\n" " dmb ish\n"
"4:\n" "4:\n"
" .pushsection .fixup,\"ax\"\n" _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
"5: mov %w0, %w7\n" _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
" b 4b\n"
" .popsection\n"
_ASM_EXTABLE(1b, 5b)
_ASM_EXTABLE(2b, 5b)
: "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops) : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
: "r" (oldval), "r" (newval), "Ir" (-EFAULT), "Ir" (-EAGAIN) : "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
: "memory"); : "memory");
uaccess_disable_privileged(); uaccess_disable_privileged();
......
...@@ -255,15 +255,9 @@ static inline void __user *__uaccess_mask_ptr(const void __user *ptr) ...@@ -255,15 +255,9 @@ static inline void __user *__uaccess_mask_ptr(const void __user *ptr)
asm volatile( \ asm volatile( \
"1: " load " " reg "1, [%2]\n" \ "1: " load " " reg "1, [%2]\n" \
"2:\n" \ "2:\n" \
" .section .fixup, \"ax\"\n" \ _ASM_EXTABLE_UACCESS_ERR_ZERO(1b, 2b, %w0, %w1) \
" .align 2\n" \
"3: mov %w0, %3\n" \
" mov %1, #0\n" \
" b 2b\n" \
" .previous\n" \
_ASM_EXTABLE(1b, 3b) \
: "+r" (err), "=&r" (x) \ : "+r" (err), "=&r" (x) \
: "r" (addr), "i" (-EFAULT)) : "r" (addr))
#define __raw_get_mem(ldr, x, ptr, err) \ #define __raw_get_mem(ldr, x, ptr, err) \
do { \ do { \
...@@ -332,14 +326,9 @@ do { \ ...@@ -332,14 +326,9 @@ do { \
asm volatile( \ asm volatile( \
"1: " store " " reg "1, [%2]\n" \ "1: " store " " reg "1, [%2]\n" \
"2:\n" \ "2:\n" \
" .section .fixup,\"ax\"\n" \ _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %w0) \
" .align 2\n" \
"3: mov %w0, %3\n" \
" b 2b\n" \
" .previous\n" \
_ASM_EXTABLE(1b, 3b) \
: "+r" (err) \ : "+r" (err) \
: "r" (x), "r" (addr), "i" (-EFAULT)) : "r" (x), "r" (addr))
#define __raw_put_mem(str, x, ptr, err) \ #define __raw_put_mem(str, x, ptr, err) \
do { \ do { \
......
...@@ -279,7 +279,7 @@ static void __init register_insn_emulation_sysctl(void) ...@@ -279,7 +279,7 @@ static void __init register_insn_emulation_sysctl(void)
do { \ do { \
uaccess_enable_privileged(); \ uaccess_enable_privileged(); \
__asm__ __volatile__( \ __asm__ __volatile__( \
" mov %w3, %w7\n" \ " mov %w3, %w6\n" \
"0: ldxr"B" %w2, [%4]\n" \ "0: ldxr"B" %w2, [%4]\n" \
"1: stxr"B" %w0, %w1, [%4]\n" \ "1: stxr"B" %w0, %w1, [%4]\n" \
" cbz %w0, 2f\n" \ " cbz %w0, 2f\n" \
...@@ -290,16 +290,10 @@ do { \ ...@@ -290,16 +290,10 @@ do { \
"2:\n" \ "2:\n" \
" mov %w1, %w2\n" \ " mov %w1, %w2\n" \
"3:\n" \ "3:\n" \
" .pushsection .fixup,\"ax\"\n" \ _ASM_EXTABLE_UACCESS_ERR(0b, 3b, %w0) \
" .align 2\n" \ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0) \
"4: mov %w0, %w6\n" \
" b 3b\n" \
" .popsection" \
_ASM_EXTABLE(0b, 4b) \
_ASM_EXTABLE(1b, 4b) \
: "=&r" (res), "+r" (data), "=&r" (temp), "=&r" (temp2) \ : "=&r" (res), "+r" (data), "=&r" (temp), "=&r" (temp2) \
: "r" ((unsigned long)addr), "i" (-EAGAIN), \ : "r" ((unsigned long)addr), "i" (-EAGAIN), \
"i" (-EFAULT), \
"i" (__SWP_LL_SC_LOOPS) \ "i" (__SWP_LL_SC_LOOPS) \
: "memory"); \ : "memory"); \
uaccess_disable_privileged(); \ uaccess_disable_privileged(); \
......
...@@ -527,14 +527,9 @@ NOKPROBE_SYMBOL(do_ptrauth_fault); ...@@ -527,14 +527,9 @@ NOKPROBE_SYMBOL(do_ptrauth_fault);
"1: " insn ", %1\n" \ "1: " insn ", %1\n" \
" mov %w0, #0\n" \ " mov %w0, #0\n" \
"2:\n" \ "2:\n" \
" .pushsection .fixup,\"ax\"\n" \ _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %w0) \
" .align 2\n" \
"3: mov %w0, %w2\n" \
" b 2b\n" \
" .popsection\n" \
_ASM_EXTABLE(1b, 3b) \
: "=r" (res) \ : "=r" (res) \
: "r" (address), "i" (-EFAULT)); \ : "r" (address)); \
uaccess_ttbr0_disable(); \ uaccess_ttbr0_disable(); \
} }
......
...@@ -3,10 +3,12 @@ ...@@ -3,10 +3,12 @@
* Based on arch/arm/mm/extable.c * Based on arch/arm/mm/extable.c
*/ */
#include <linux/bitfield.h>
#include <linux/extable.h> #include <linux/extable.h>
#include <linux/uaccess.h> #include <linux/uaccess.h>
#include <asm/asm-extable.h> #include <asm/asm-extable.h>
#include <asm/ptrace.h>
typedef bool (*ex_handler_t)(const struct exception_table_entry *, typedef bool (*ex_handler_t)(const struct exception_table_entry *,
struct pt_regs *); struct pt_regs *);
...@@ -24,6 +26,19 @@ static bool ex_handler_fixup(const struct exception_table_entry *ex, ...@@ -24,6 +26,19 @@ static bool ex_handler_fixup(const struct exception_table_entry *ex,
return true; return true;
} }
static bool ex_handler_uaccess_err_zero(const struct exception_table_entry *ex,
struct pt_regs *regs)
{
int reg_err = FIELD_GET(EX_DATA_REG_ERR, ex->data);
int reg_zero = FIELD_GET(EX_DATA_REG_ZERO, ex->data);
pt_regs_write_reg(regs, reg_err, -EFAULT);
pt_regs_write_reg(regs, reg_zero, 0);
regs->pc = get_ex_fixup(ex);
return true;
}
bool fixup_exception(struct pt_regs *regs) bool fixup_exception(struct pt_regs *regs)
{ {
const struct exception_table_entry *ex; const struct exception_table_entry *ex;
...@@ -37,6 +52,8 @@ bool fixup_exception(struct pt_regs *regs) ...@@ -37,6 +52,8 @@ bool fixup_exception(struct pt_regs *regs)
return ex_handler_fixup(ex, regs); return ex_handler_fixup(ex, regs);
case EX_TYPE_BPF: case EX_TYPE_BPF:
return ex_handler_bpf(ex, regs); return ex_handler_bpf(ex, regs);
case EX_TYPE_UACCESS_ERR_ZERO:
return ex_handler_uaccess_err_zero(ex, regs);
} }
BUG(); BUG();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment