Commit c28ac96b authored by Russell King's avatar Russell King

[ARM] Optimise kernel->userspace exit code a little.

This improves interrupts and exceptions returning to user space
by a few of cycles.  We change the requirements for
*_restore_user_regs slightly so we can remove a few instructions.
We also re-shuffle the code to handle pending work.
parent 2dc559c8
...@@ -725,7 +725,7 @@ __dabt_svc: sub sp, sp, #S_FRAME_SIZE ...@@ -725,7 +725,7 @@ __dabt_svc: sub sp, sp, #S_FRAME_SIZE
msr cpsr_c, r9 msr cpsr_c, r9
mov r2, sp mov r2, sp
bl do_DataAbort bl do_DataAbort
set_cpsr_c r0, #PSR_I_BIT | MODE_SVC disable_irq r0
ldr r0, [sp, #S_PSR] ldr r0, [sp, #S_PSR]
msr spsr, r0 msr spsr, r0
ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
...@@ -776,9 +776,9 @@ svc_preempt: teq r9, #0 @ was preempt count = 0 ...@@ -776,9 +776,9 @@ svc_preempt: teq r9, #0 @ was preempt count = 0
movne pc, lr movne pc, lr
mov r7, #PREEMPT_ACTIVE mov r7, #PREEMPT_ACTIVE
str r7, [r8, #TI_PREEMPT] @ set PREEMPT_ACTIVE str r7, [r8, #TI_PREEMPT] @ set PREEMPT_ACTIVE
1: set_cpsr_c r2, #MODE_SVC @ enable IRQs 1: enable_irq r2 @ enable IRQs
bl schedule bl schedule
set_cpsr_c r0, #PSR_I_BIT | MODE_SVC @ disable IRQs disable_irq r0 @ disable IRQs
ldr r0, [r8, #TI_FLAGS] @ get new tasks TI_FLAGS ldr r0, [r8, #TI_FLAGS] @ get new tasks TI_FLAGS
tst r0, #_TIF_NEED_RESCHED tst r0, #_TIF_NEED_RESCHED
beq preempt_return @ go again beq preempt_return @ go again
...@@ -801,7 +801,7 @@ __und_svc: sub sp, sp, #S_FRAME_SIZE ...@@ -801,7 +801,7 @@ __und_svc: sub sp, sp, #S_FRAME_SIZE
mov r0, sp @ struct pt_regs *regs mov r0, sp @ struct pt_regs *regs
bl do_undefinstr bl do_undefinstr
1: set_cpsr_c r0, #PSR_I_BIT | MODE_SVC 1: disable_irq r0
ldr lr, [sp, #S_PSR] @ Get SVC cpsr ldr lr, [sp, #S_PSR] @ Get SVC cpsr
msr spsr, lr msr spsr, lr
ldmia sp, {r0 - pc}^ @ Restore SVC registers ldmia sp, {r0 - pc}^ @ Restore SVC registers
...@@ -822,7 +822,7 @@ __pabt_svc: sub sp, sp, #S_FRAME_SIZE ...@@ -822,7 +822,7 @@ __pabt_svc: sub sp, sp, #S_FRAME_SIZE
mov r0, r2 @ address (pc) mov r0, r2 @ address (pc)
mov r1, sp @ regs mov r1, sp @ regs
bl do_PrefetchAbort @ call abort handler bl do_PrefetchAbort @ call abort handler
set_cpsr_c r0, #PSR_I_BIT | MODE_SVC disable_irq r0
ldr r0, [sp, #S_PSR] ldr r0, [sp, #S_PSR]
msr spsr, r0 msr spsr, r0
ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
...@@ -861,7 +861,7 @@ __dabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go ...@@ -861,7 +861,7 @@ __dabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
#else #else
bl CPU_ABORT_HANDLER bl CPU_ABORT_HANDLER
#endif #endif
set_cpsr_c r2, #MODE_SVC @ Enable interrupts enable_irq r2 @ Enable interrupts
mov r2, sp mov r2, sp
adrsvc al, lr, ret_from_exception adrsvc al, lr, ret_from_exception
b do_DataAbort b do_DataAbort
...@@ -916,7 +916,7 @@ __und_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go ...@@ -916,7 +916,7 @@ __und_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
adrsvc al, r9, ret_from_exception @ r9 = normal FP return adrsvc al, r9, ret_from_exception @ r9 = normal FP return
adrsvc al, lr, fpundefinstr @ lr = undefined instr return adrsvc al, lr, fpundefinstr @ lr = undefined instr return
call_fpe: set_cpsr_c r0, #MODE_SVC @ Enable interrupts call_fpe: enable_irq r0 @ Enable interrupts
get_thread_info r10 @ get current thread get_thread_info r10 @ get current thread
ldr r4, [r10, #TI_TASK] @ get current task ldr r4, [r10, #TI_TASK] @ get current task
mov r8, #1 mov r8, #1
...@@ -939,7 +939,7 @@ __pabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go ...@@ -939,7 +939,7 @@ __pabt_usr: sub sp, sp, #S_FRAME_SIZE @ Allocate frame size in one go
stmdb r8, {sp, lr}^ @ Save sp_usr lr_usr stmdb r8, {sp, lr}^ @ Save sp_usr lr_usr
alignment_trap r4, r7, __temp_abt alignment_trap r4, r7, __temp_abt
zero_fp zero_fp
set_cpsr_c r0, #MODE_SVC @ Enable interrupts enable_irq r0 @ Enable interrupts
mov r0, r5 @ address (pc) mov r0, r5 @ address (pc)
mov r1, sp @ regs mov r1, sp @ regs
bl do_PrefetchAbort @ call abort handler bl do_PrefetchAbort @ call abort handler
......
...@@ -35,18 +35,27 @@ ENTRY(__do_softirq) ...@@ -35,18 +35,27 @@ ENTRY(__do_softirq)
* stack. * stack.
*/ */
ret_fast_syscall: ret_fast_syscall:
set_cpsr_c r1, #PSR_I_BIT | MODE_SVC @ disable interrupts disable_irq r1 @ disable interrupts
ldr r1, [tsk, #TI_FLAGS] ldr r1, [tsk, #TI_FLAGS]
tst r1, #_TIF_WORK_MASK tst r1, #_TIF_WORK_MASK
bne ret_fast_work bne fast_work_pending
fast_restore_user_regs fast_restore_user_regs
/* /*
* Ok, we need to do extra processing, enter the slow path. * Ok, we need to do extra processing, enter the slow path.
*/ */
ret_fast_work: fast_work_pending:
str r0, [sp, #S_R0+S_OFF]! @ returned r0 str r0, [sp, #S_R0+S_OFF]! @ returned r0
b work_pending work_pending:
tst r1, #_TIF_NEED_RESCHED
bne work_resched
tst r1, #_TIF_NOTIFY_RESUME | _TIF_SIGPENDING
beq no_work_pending
mov r0, sp @ 'regs'
mov r2, why @ 'syscall'
bl do_notify_resume
disable_irq r1 @ disable interrupts
b no_work_pending
work_resched: work_resched:
bl schedule bl schedule
...@@ -55,22 +64,12 @@ work_resched: ...@@ -55,22 +64,12 @@ work_resched:
*/ */
ENTRY(ret_to_user) ENTRY(ret_to_user)
ret_slow_syscall: ret_slow_syscall:
set_cpsr_c r1, #PSR_I_BIT | MODE_SVC @ disable interrupts disable_irq r1 @ disable interrupts
ldr r1, [tsk, #TI_FLAGS] ldr r1, [tsk, #TI_FLAGS]
tst r1, #_TIF_WORK_MASK tst r1, #_TIF_WORK_MASK
beq no_work_pending bne work_pending
work_pending:
tst r1, #_TIF_NEED_RESCHED
bne work_resched
tst r1, #_TIF_NOTIFY_RESUME | _TIF_SIGPENDING
blne __do_notify_resume
no_work_pending: no_work_pending:
restore_user_regs slow_restore_user_regs
__do_notify_resume:
mov r0, sp @ 'regs'
mov r2, why @ 'syscall'
b do_notify_resume @ note the bl above sets lr
/* /*
* This is how we return from a fork. * This is how we return from a fork.
...@@ -80,9 +79,9 @@ ENTRY(ret_from_fork) ...@@ -80,9 +79,9 @@ ENTRY(ret_from_fork)
bl schedule_tail bl schedule_tail
#endif #endif
get_thread_info tsk get_thread_info tsk
ldr ip, [tsk, #TI_FLAGS] @ check for syscall tracing ldr r1, [tsk, #TI_FLAGS] @ check for syscall tracing
mov why, #1 mov why, #1
tst ip, #_TIF_SYSCALL_TRACE @ are we tracing syscalls? tst r1, #_TIF_SYSCALL_TRACE @ are we tracing syscalls?
beq ret_slow_syscall beq ret_slow_syscall
mov r1, sp mov r1, sp
mov r0, #1 @ trace exit [IP = 1] mov r0, #1 @ trace exit [IP = 1]
...@@ -134,7 +133,7 @@ ENTRY(vector_swi) ...@@ -134,7 +133,7 @@ ENTRY(vector_swi)
ldr ip, [ip] ldr ip, [ip]
mcr p15, 0, ip, c1, c0 @ update control register mcr p15, 0, ip, c1, c0 @ update control register
#endif #endif
enable_irqs ip enable_irq ip
str r4, [sp, #-S_OFF]! @ push fifth arg str r4, [sp, #-S_OFF]! @ push fifth arg
......
...@@ -11,13 +11,13 @@ ...@@ -11,13 +11,13 @@
#define MODE_SVC 0x13 #define MODE_SVC 0x13
#endif #endif
.macro zero_fp .macro zero_fp
#ifndef CONFIG_NO_FRAME_POINTER #ifndef CONFIG_NO_FRAME_POINTER
mov fp, #0 mov fp, #0
#endif #endif
.endm .endm
.text .text
@ Bad Abort numbers @ Bad Abort numbers
@ ----------------- @ -----------------
...@@ -69,119 +69,146 @@ ...@@ -69,119 +69,146 @@
#define S_OFF 8 #define S_OFF 8
#ifdef CONFIG_CPU_32 #ifdef CONFIG_CPU_32
.macro save_user_regs
sub sp, sp, #S_FRAME_SIZE .macro set_cpsr_c, reg, mode
stmia sp, {r0 - r12} @ Calling r0 - r12 #if 1
add r8, sp, #S_PC /* broken binutils */
stmdb r8, {sp, lr}^ @ Calling sp, lr mov \reg, \mode
mrs r8, spsr @ called from non-FIQ mode, so ok. msr cpsr_c, \reg
str lr, [sp, #S_PC] @ Save calling PC #else
str r8, [sp, #S_PSR] @ Save CPSR msr cpsr_c, \mode
str r0, [sp, #S_OLD_R0] @ Save OLD_R0 #endif
.endm .endm
.macro restore_user_regs .macro disable_irq, temp
ldr r0, [sp, #S_PSR] @ Get calling cpsr set_cpsr_c \temp, #PSR_I_BIT | MODE_SVC
mov ip, #PSR_I_BIT | MODE_SVC .endm
msr cpsr_c, ip @ disable IRQs
msr spsr, r0 @ save in spsr_svc .macro enable_irq, temp
ldr lr, [sp, #S_PC] @ Get PC set_cpsr_c \temp, #MODE_SVC
ldmia sp, {r0 - lr}^ @ Get calling r0 - lr .endm
mov r0, r0
add sp, sp, #S_FRAME_SIZE .macro save_user_regs
movs pc, lr @ return & move spsr_svc into cpsr sub sp, sp, #S_FRAME_SIZE
.endm stmia sp, {r0 - r12} @ Calling r0 - r12
add r8, sp, #S_PC
.macro fast_restore_user_regs stmdb r8, {sp, lr}^ @ Calling sp, lr
mov ip, #PSR_I_BIT | MODE_SVC mrs r8, spsr @ called from non-FIQ mode, so ok.
msr cpsr_c, ip @ disable IRQs str lr, [sp, #S_PC] @ Save calling PC
ldr r1, [sp, #S_OFF + S_PSR] @ get calling cpsr str r8, [sp, #S_PSR] @ Save CPSR
ldr lr, [sp, #S_OFF + S_PC]! @ get pc str r0, [sp, #S_OLD_R0] @ Save OLD_R0
msr spsr, r1 @ save in spsr_svc .endm
ldmdb sp, {r1 - lr}^ @ get calling r1 - lr
mov r0, r0 .macro restore_user_regs
add sp, sp, #S_FRAME_SIZE - S_PC ldr r1, [sp, #S_PSR] @ Get calling cpsr
movs pc, lr @ return & move spsr_svc into cpsr disable_irq ip @ disable IRQs
.endm ldr lr, [sp, #S_PC]! @ Get PC
msr spsr, r1 @ save in spsr_svc
.macro mask_pc, rd, rm ldmdb sp, {r0 - lr}^ @ Get calling r0 - lr
.endm mov r0, r0
add sp, sp, #S_FRAME_SIZE - S_PC
.macro enable_irqs, temp movs pc, lr @ return & move spsr_svc into cpsr
mov \temp, #MODE_SVC .endm
msr cpsr_c, \temp
.endm /*
* Must be called with IRQs already disabled.
.macro get_thread_info, rd */
mov \rd, sp, lsr #13 .macro fast_restore_user_regs
mov \rd, \rd, lsl #13 ldr r1, [sp, #S_OFF + S_PSR] @ get calling cpsr
.endm ldr lr, [sp, #S_OFF + S_PC]! @ get pc
msr spsr, r1 @ save in spsr_svc
/* ldmdb sp, {r1 - lr}^ @ get calling r1 - lr
* Like adr, but force SVC mode (if required) mov r0, r0
*/ add sp, sp, #S_FRAME_SIZE - S_PC
.macro adrsvc, cond, reg, label movs pc, lr @ return & move spsr_svc into cpsr
adr\cond \reg, \label .endm
.endm
/*
.macro alignment_trap, rbase, rtemp, sym * Must be called with IRQs already disabled.
*/
.macro slow_restore_user_regs
ldr r1, [sp, #S_PSR] @ get calling cpsr
ldr lr, [sp, #S_PC]! @ get pc
msr spsr, r1 @ save in spsr_svc
ldmdb sp, {r0 - lr}^ @ get calling r1 - lr
mov r0, r0
add sp, sp, #S_FRAME_SIZE - S_PC
movs pc, lr @ return & move spsr_svc into cpsr
.endm
.macro mask_pc, rd, rm
.endm
.macro get_thread_info, rd
mov \rd, sp, lsr #13
mov \rd, \rd, lsl #13
.endm
/*
* Like adr, but force SVC mode (if required)
*/
.macro adrsvc, cond, reg, label
adr\cond \reg, \label
.endm
.macro alignment_trap, rbase, rtemp, sym
#ifdef CONFIG_ALIGNMENT_TRAP #ifdef CONFIG_ALIGNMENT_TRAP
#define OFF_CR_ALIGNMENT(x) cr_alignment - x #define OFF_CR_ALIGNMENT(x) cr_alignment - x
ldr \rtemp, [\rbase, #OFF_CR_ALIGNMENT(\sym)] ldr \rtemp, [\rbase, #OFF_CR_ALIGNMENT(\sym)]
mcr p15, 0, \rtemp, c1, c0 mcr p15, 0, \rtemp, c1, c0
#endif #endif
.endm .endm
#else #else
.macro save_user_regs .macro save_user_regs
str r0, [sp, #-4]! str r0, [sp, #-4]!
str lr, [sp, #-4]! str lr, [sp, #-4]!
sub sp, sp, #15*4 sub sp, sp, #15*4
stmia sp, {r0 - lr}^ stmia sp, {r0 - lr}^
mov r0, r0 mov r0, r0
.endm .endm
.macro restore_user_regs .macro restore_user_regs
ldmia sp, {r0 - lr}^ ldmia sp, {r0 - lr}^
mov r0, r0 mov r0, r0
ldr lr, [sp, #15*4] ldr lr, [sp, #15*4]
add sp, sp, #15*4+8 add sp, sp, #15*4+8
movs pc, lr movs pc, lr
.endm .endm
.macro fast_restore_user_regs .macro fast_restore_user_regs
add sp, sp, #S_OFF add sp, sp, #S_OFF
ldmib sp, {r1 - lr}^ ldmib sp, {r1 - lr}^
mov r0, r0 mov r0, r0
ldr lr, [sp, #15*4] ldr lr, [sp, #15*4]
add sp, sp, #15*4+8 add sp, sp, #15*4+8
movs pc, lr movs pc, lr
.endm .endm
.macro mask_pc, rd, rm .macro mask_pc, rd, rm
bic \rd, \rm, #PCMASK bic \rd, \rm, #PCMASK
.endm .endm
.macro enable_irqs, temp .macro enable_irqs, temp
teqp pc, #0x00000003 teqp pc, #0x00000003
.endm .endm
.macro initialise_traps_extra .macro initialise_traps_extra
.endm .endm
.macro get_thread_info, rd .macro get_thread_info, rd
mov \rd, sp, lsr #13 mov \rd, sp, lsr #13
mov \rd, \rd, lsl #13 mov \rd, \rd, lsl #13
.endm .endm
/* /*
* Like adr, but force SVC mode (if required) * Like adr, but force SVC mode (if required)
*/ */
.macro adrsvc, cond, reg, label .macro adrsvc, cond, reg, label
adr\cond \reg, \label adr\cond \reg, \label
orr\cond \reg, \reg, #0x08000003 orr\cond \reg, \reg, #0x08000003
.endm .endm
#endif #endif
...@@ -215,13 +242,3 @@ tsk .req r9 @ current thread_info ...@@ -215,13 +242,3 @@ tsk .req r9 @ current thread_info
ldr scno, [lr, #-4] @ get SWI instruction ldr scno, [lr, #-4] @ get SWI instruction
#endif #endif
.endm .endm
.macro set_cpsr_c, reg, mode
#if 1
mov \reg, \mode
msr cpsr_c, \reg
#else
msr cpsr_c, \mode
#endif
.endm
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment