Commit 31991332 authored by Greg Ungerer's avatar Greg Ungerer Committed by Linus Torvalds

[PATCH] move common macros into m68knommu entry.h

This overhauls asm-m68knommu/entry.h. It contains much cruft,
and is out of date relative to the underlying entry.S code.

More specifically this brings the SAVE_ALL and RESTORE_ALL macros
from the various m68knommu entry.S files into this header.
parent e57d9261
#ifndef __M68K_ENTRY_H #ifndef __M68KNOMMU_ENTRY_H
#define __M68K_ENTRY_H #define __M68KNOMMU_ENTRY_H
#include <linux/config.h> #include <linux/config.h>
#ifndef CONFIG_COLDFIRE
#include <asm/setup.h> #include <asm/setup.h>
#include <asm/page.h> #include <asm/page.h>
/* /*
* Stack layout in 'ret_from_exception': * Stack layout in 'ret_from_exception':
* *
* This allows access to the syscall arguments in registers d1-d5 * This allows access to the syscall arguments in registers d1-d5
* *
* 0(sp) - d1 * 0(sp) - d1
* 4(sp) - d2 * 4(sp) - d2
...@@ -22,34 +21,17 @@ ...@@ -22,34 +21,17 @@
* 20(sp) - d0 * 20(sp) - d0
* 24(sp) - orig_d0 * 24(sp) - orig_d0
* 28(sp) - stack adjustment * 28(sp) - stack adjustment
* 2C(sp) - sr * 2C(sp) - [ sr ] [ format & vector ]
* 2E(sp) - pc * 2E(sp) - [ pc ] [ sr ]
* 32(sp) - format & vector * 30(sp) - [ format & vector ] [ pc ]
* ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
* M68K COLDFIRE
*/ */
/*
* 97/05/14 Andreas: Register %a2 is now set to the current task throughout
* the whole kernel.
*/
/* the following macro is used when enabling interrupts */
#if defined(MACH_ATARI_ONLY) && !defined(CONFIG_HADES)
/* block out HSYNC on the atari */
#define ALLOWINT 0xfbff
#define MAX_NOINT_IPL 3
#else
/* portable version */
#define ALLOWINT 0xf8ff #define ALLOWINT 0xf8ff
#define MAX_NOINT_IPL 0
#endif /* machine compilation types */
#ifdef __ASSEMBLY__ #ifdef __ASSEMBLY__
#define curptr a2
LFLUSH_I_AND_D = 0x00000808
LSIGTRAP = 5
/* process bits for task_struct.flags */ /* process bits for task_struct.flags */
PF_TRACESYS_OFF = 3 PF_TRACESYS_OFF = 3
PF_TRACESYS_BIT = 5 PF_TRACESYS_BIT = 5
...@@ -58,72 +40,149 @@ PF_PTRACED_BIT = 4 ...@@ -58,72 +40,149 @@ PF_PTRACED_BIT = 4
PF_DTRACE_OFF = 1 PF_DTRACE_OFF = 1
PF_DTRACE_BIT = 5 PF_DTRACE_BIT = 5
#define SAVE_ALL_INT save_all_int LENOSYS = 38
#define SAVE_ALL_SYS save_all_sys
#define RESTORE_ALL restore_all LD0 = 0x20
LORIG_D0 = 0x24
LFORMATVEC = 0x2c
LSR = 0x2e
LPC = 0x30
#define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
/* /*
* This defines the normal kernel pt-regs layout. * This defines the normal kernel pt-regs layout.
* *
* regs a3-a6 and d6-d7 are preserved by C code * regs are a2-a6 and d6-d7 preserved by C code
* the kernel doesn't mess with usp unless it needs to * the kernel doesn't mess with usp unless it needs to
*/ */
#ifdef CONFIG_COLDFIRE
/* /*
* a -1 in the orig_d0 field signifies * This is made a little more tricky on the ColdFire. There is no
* that the stack frame is NOT for syscall * separate kernel and user stack pointers. Need to artificially
* construct a usp in software... When doing this we need to disable
* interrupts, otherwise bad things could happen.
*/ */
.macro save_all_int VEC=0 .macro SAVE_ALL
oriw #0x0700,%sr /* disable interrupts */ move #0x2700,%sr /* disable intrs */
clrl %sp@- /* stk_adj */ btst #5,%sp@(2) /* from user? */
pea -1:w /* orig d0 */ bnes 6f /* no, skip */
movel %d0,%sp@- /* d0 */ movel %sp,sw_usp /* save user sp */
moveml %d1-%d5/%a0-%a1/%curptr,%sp@- addql #8,sw_usp /* remove exception */
movel sw_ksp,%sp /* kernel sp */
subql #8,%sp /* room for exception */
clrl %sp@- /* stk_adj */
movel %d0,%sp@- /* orig d0 */
movel %d0,%sp@- /* d0 */
subl #32,%sp /* space for 8 regs */
moveml %d1-%d5/%a0-%a2,%sp@
movel sw_usp,%a0 /* get usp */
moveml %a0@(-8),%d1-%d2 /* get exception */
moveml %d1-%d2,%sp@(LFORMATVEC) /* copy exception */
bra 7f
6:
clrl %sp@- /* stk_adj */
movel %d0,%sp@- /* orig d0 */
movel %d0,%sp@- /* d0 */
subl #32,%sp /* space for 7 regs */
moveml %d1-%d5/%a0-%a2,%sp@
7:
.endm .endm
.macro save_all_sys VEC=0 .macro RESTORE_ALL
clrl %sp@- /* stk_adj */ btst #5,%sp@(LSR) /* going user? */
movel %d0,%sp@- /* orig d0 */ bnes 8f /* no, skip */
movel %d0,%sp@- /* d0 */ move #0x2700,%sr /* disable intrs */
moveml %d1-%d5/%a0-%a1/%curptr,%sp@- movel sw_usp,%a0 /* get usp */
.endm moveml %sp@(LFORMATVEC),%d1-%d2 /* copy exception */
moveml %d1-%d2,%a0@(-8)
.macro restore_all moveml %sp@,%d1-%d5/%a0-%a2
moveml %sp@+,%a0-%a1/%curptr/%d1-%d5 addl #32,%sp /* space for 8 regs */
movel %sp@+,%d0
addql #4,%sp /* orig d0 */
addl %sp@+,%sp /* stk adj */
addql #8,%sp /* remove exception */
movel %sp,sw_ksp /* save ksp */
subql #8,sw_usp /* set exception */
movel sw_usp,%sp /* restore usp */
rte
8:
moveml %sp@,%d1-%d5/%a0-%a2
addl #32,%sp /* space for 8 regs */
movel %sp@+,%d0 movel %sp@+,%d0
addql #4,%sp /* orig d0 */ addql #4,%sp /* orig d0 */
addl %sp@+,%sp /* stk adj */ addl %sp@+,%sp /* stk adj */
rte rte
.endm .endm
#define SWITCH_STACK_SIZE (6*4+4) /* includes return address */ /*
* Quick exception save, use current stack only.
*/
.macro SAVE_LOCAL
move #0x2700,%sr /* disable intrs */
clrl %sp@- /* stk_adj */
movel %d0,%sp@- /* orig d0 */
movel %d0,%sp@- /* d0 */
subl #32,%sp /* space for 8 regs */
moveml %d1-%d5/%a0-%a2,%sp@
.endm
#define SAVE_SWITCH_STACK save_switch_stack .macro RESTORE_LOCAL
#define RESTORE_SWITCH_STACK restore_switch_stack moveml %sp@,%d1-%d5/%a0-%a2
#define GET_CURRENT(tmp) addl #32,%sp /* space for 8 regs */
movel %sp@+,%d0
addql #4,%sp /* orig d0 */
addl %sp@+,%sp /* stk adj */
rte
.endm
.macro save_switch_stack .macro SAVE_SWITCH_STACK
moveml %a3-%a6/%d6-%d7,%sp@- subl #24,%sp /* 6 regs */
moveml %a3-%a6/%d6-%d7,%sp@
.endm .endm
.macro restore_switch_stack .macro RESTORE_SWITCH_STACK
moveml %sp@+,%a3-%a6/%d6-%d7 moveml %sp@,%a3-%a6/%d6-%d7
addl #24,%sp /* 6 regs */
.endm .endm
#else /* C source */ /*
* Software copy of the user and kernel stack pointers... Ugh...
* Need these to get around ColdFire not having separate kernel
* and user stack pointers.
*/
.globl sw_usp
.globl sw_ksp
#define STR(X) STR1(X) #else /* !CONFIG_COLDFIRE */
#define STR1(X) #X
#define PT_OFF_ORIG_D0 0x24 /*
#define PT_OFF_FORMATVEC 0x32 * Standard 68k interrupt entry and exit macros.
#define PT_OFF_SR 0x2C */
#define SAVE_ALL_INT \ .macro SAVE_ALL
"clrl %%sp@-;" /* stk_adj */ \ clrl %sp@- /* stk_adj */
"pea -1:w;" /* orig d0 = -1 */ \ movel %d0,%sp@- /* orig d0 */
"movel %%d0,%%sp@-;" /* d0 */ \ movel %d0,%sp@- /* d0 */
"moveml %%d1-%%d5/%%a0-%%a2,%%sp@-" moveml %d1-%d5/%a0-%a2,%sp@-
.endm
#endif .macro RESTORE_ALL
moveml %sp@+,%a0-%a2/%d1-%d5
movel %sp@+,%d0
addql #4,%sp /* orig d0 */
addl %sp@+,%sp /* stk adj */
rte
.endm
.macro SAVE_SWITCH_STACK
moveml %a3-%a6/%d6-%d7,%sp@-
.endm
.macro RESTORE_SWITCH_STACK
moveml %sp@+,%a3-%a6/%d6-%d7
.endm
#endif /* CONFIG_COLDFIRE */ #endif /* !CONFIG_COLDFIRE */
#endif /* __M68K_ENTRY_H */ #endif /* __ASSEMBLY__ */
#endif /* __M68KNOMMU_ENTRY_H */
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment