Commit 01eb0187 authored by Nicholas Piggin's avatar Nicholas Piggin Committed by Michael Ellerman

powerpc/64s: Fix restore_math unnecessarily changing MSR

Before returning to user, if there are missing FP/VEC/VSX bits from the
user MSR then those registers had been saved and must be restored again
before use. restore_math will decide whether to restore immediately, or
skip the restore and let fp/vec/vsx unavailable faults demand load the
registers.

Each time restore_math restores one of the FP/VSX or VEC register sets
is loaded, an 8-bit counter is incremented (load_fp and load_vec). When
these wrap to zero, restore_math no longer restores that register set
until after they are next demand faulted.

It's quite usual for those counters to have different values, so if one
wraps to zero and restore_math no longer restores its registers or user
MSR bit but the other is not zero yet does not need to be restored
(because the kernel is not frequently using the FPU), then restore_math
will be called and it will also not return in the early exit check.
This causes msr_check_and_set to test and set the MSR at every kernel
exit despite having no work to do.

This can cause workloads (e.g., a NULL syscall microbenchmark) to run
fast for a time while both counters are non-zero, then slow down when
one of the counters reaches zero, then speed up again after the second
counter reaches zero. The cost is significant, about 10% slowdown on a
NULL syscall benchmark, and the jittery behaviour is very undesirable.

Fix this by having restore_math test all conditions first, and only
update MSR if we will be loading registers.
Signed-off-by: default avatarNicholas Piggin <npiggin@gmail.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20200623234139.2262227-2-npiggin@gmail.com
parent 891b4fe8
...@@ -471,49 +471,58 @@ EXPORT_SYMBOL(giveup_all); ...@@ -471,49 +471,58 @@ EXPORT_SYMBOL(giveup_all);
#ifdef CONFIG_PPC_BOOK3S_64 #ifdef CONFIG_PPC_BOOK3S_64
#ifdef CONFIG_PPC_FPU #ifdef CONFIG_PPC_FPU
static int restore_fp(struct task_struct *tsk) static bool should_restore_fp(void)
{ {
if (tsk->thread.load_fp) { if (current->thread.load_fp) {
load_fp_state(&current->thread.fp_state);
current->thread.load_fp++; current->thread.load_fp++;
return 1; return true;
} }
return 0; return false;
}
static void do_restore_fp(void)
{
load_fp_state(&current->thread.fp_state);
} }
#else #else
static int restore_fp(struct task_struct *tsk) { return 0; } static bool should_restore_fp(void) { return false; }
static void do_restore_fp(void) { }
#endif /* CONFIG_PPC_FPU */ #endif /* CONFIG_PPC_FPU */
#ifdef CONFIG_ALTIVEC #ifdef CONFIG_ALTIVEC
#define loadvec(thr) ((thr).load_vec) static bool should_restore_altivec(void)
static int restore_altivec(struct task_struct *tsk)
{ {
if (cpu_has_feature(CPU_FTR_ALTIVEC) && (tsk->thread.load_vec)) { if (cpu_has_feature(CPU_FTR_ALTIVEC) && (current->thread.load_vec)) {
load_vr_state(&tsk->thread.vr_state); current->thread.load_vec++;
tsk->thread.used_vr = 1; return true;
tsk->thread.load_vec++;
return 1;
} }
return 0; return false;
}
static void do_restore_altivec(void)
{
load_vr_state(&current->thread.vr_state);
current->thread.used_vr = 1;
} }
#else #else
#define loadvec(thr) 0 static bool should_restore_altivec(void) { return false; }
static inline int restore_altivec(struct task_struct *tsk) { return 0; } static void do_restore_altivec(void) { }
#endif /* CONFIG_ALTIVEC */ #endif /* CONFIG_ALTIVEC */
#ifdef CONFIG_VSX #ifdef CONFIG_VSX
static int restore_vsx(struct task_struct *tsk) static bool should_restore_vsx(void)
{ {
if (cpu_has_feature(CPU_FTR_VSX)) { if (cpu_has_feature(CPU_FTR_VSX))
tsk->thread.used_vsr = 1; return true;
return 1; return false;
} }
static void do_restore_vsx(void)
return 0; {
current->thread.used_vsr = 1;
} }
#else #else
static inline int restore_vsx(struct task_struct *tsk) { return 0; } static bool should_restore_vsx(void) { return false; }
static void do_restore_vsx(void) { }
#endif /* CONFIG_VSX */ #endif /* CONFIG_VSX */
/* /*
...@@ -529,31 +538,42 @@ static inline int restore_vsx(struct task_struct *tsk) { return 0; } ...@@ -529,31 +538,42 @@ static inline int restore_vsx(struct task_struct *tsk) { return 0; }
void notrace restore_math(struct pt_regs *regs) void notrace restore_math(struct pt_regs *regs)
{ {
unsigned long msr; unsigned long msr;
unsigned long new_msr = 0;
if (!current->thread.load_fp && !loadvec(current->thread))
return;
msr = regs->msr; msr = regs->msr;
msr_check_and_set(msr_all_available);
/* /*
* Only reload if the bit is not set in the user MSR, the bit BEING set * new_msr tracks the facilities that are to be restored. Only reload
* indicates that the registers are hot * if the bit is not set in the user MSR (if it is set, the registers
* are live for the user thread).
*/ */
if ((!(msr & MSR_FP)) && restore_fp(current)) if ((!(msr & MSR_FP)) && should_restore_fp())
msr |= MSR_FP | current->thread.fpexc_mode; new_msr |= MSR_FP | current->thread.fpexc_mode;
if ((!(msr & MSR_VEC)) && restore_altivec(current)) if ((!(msr & MSR_VEC)) && should_restore_altivec())
msr |= MSR_VEC; new_msr |= MSR_VEC;
if ((msr & (MSR_FP | MSR_VEC)) == (MSR_FP | MSR_VEC) && if ((!(msr & MSR_VSX)) && should_restore_vsx()) {
restore_vsx(current)) { if (((msr | new_msr) & (MSR_FP | MSR_VEC)) == (MSR_FP | MSR_VEC))
msr |= MSR_VSX; new_msr |= MSR_VSX;
} }
msr_check_and_clear(msr_all_available); if (new_msr) {
msr_check_and_set(new_msr);
if (new_msr & MSR_FP)
do_restore_fp();
regs->msr = msr; if (new_msr & MSR_VEC)
do_restore_altivec();
if (new_msr & MSR_VSX)
do_restore_vsx();
msr_check_and_clear(new_msr);
regs->msr |= new_msr;
}
} }
#endif #endif
......
...@@ -206,6 +206,13 @@ notrace unsigned long syscall_exit_prepare(unsigned long r3, ...@@ -206,6 +206,13 @@ notrace unsigned long syscall_exit_prepare(unsigned long r3,
else if (cpu_has_feature(CPU_FTR_ALTIVEC)) else if (cpu_has_feature(CPU_FTR_ALTIVEC))
mathflags |= MSR_VEC; mathflags |= MSR_VEC;
/*
* If userspace MSR has all available FP bits set,
* then they are live and no need to restore. If not,
* it means the regs were given up and restore_math
* may decide to restore them (to avoid taking an FP
* fault).
*/
if ((regs->msr & mathflags) != mathflags) if ((regs->msr & mathflags) != mathflags)
restore_math(regs); restore_math(regs);
} }
...@@ -277,6 +284,7 @@ notrace unsigned long interrupt_exit_user_prepare(struct pt_regs *regs, unsigned ...@@ -277,6 +284,7 @@ notrace unsigned long interrupt_exit_user_prepare(struct pt_regs *regs, unsigned
else if (cpu_has_feature(CPU_FTR_ALTIVEC)) else if (cpu_has_feature(CPU_FTR_ALTIVEC))
mathflags |= MSR_VEC; mathflags |= MSR_VEC;
/* See above restore_math comment */
if ((regs->msr & mathflags) != mathflags) if ((regs->msr & mathflags) != mathflags)
restore_math(regs); restore_math(regs);
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment