Commit aed40e01 authored by Mark Rutland's avatar Mark Rutland

arm64: move to ESR_ELx macros

Now that we have common ESR_ELx_* macros, move the core arm64 code over
to them.

There should be no functional change as a result of this patch.
Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Acked-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Reviewed-by: default avatarChristoffer Dall <christoffer.dall@linaro.org>
Cc: Marc Zyngier <marc.zyngier@arm.com>
Cc: Peter Maydell <peter.maydell@linaro.org>
Cc: Will Deacon <will.deacon@arm.com>
parent cf99a48d
...@@ -269,18 +269,18 @@ ENDPROC(el1_error_invalid) ...@@ -269,18 +269,18 @@ ENDPROC(el1_error_invalid)
el1_sync: el1_sync:
kernel_entry 1 kernel_entry 1
mrs x1, esr_el1 // read the syndrome register mrs x1, esr_el1 // read the syndrome register
lsr x24, x1, #ESR_EL1_EC_SHIFT // exception class lsr x24, x1, #ESR_ELx_EC_SHIFT // exception class
cmp x24, #ESR_EL1_EC_DABT_EL1 // data abort in EL1 cmp x24, #ESR_ELx_EC_DABT_CUR // data abort in EL1
b.eq el1_da b.eq el1_da
cmp x24, #ESR_EL1_EC_SYS64 // configurable trap cmp x24, #ESR_ELx_EC_SYS64 // configurable trap
b.eq el1_undef b.eq el1_undef
cmp x24, #ESR_EL1_EC_SP_ALIGN // stack alignment exception cmp x24, #ESR_ELx_EC_SP_ALIGN // stack alignment exception
b.eq el1_sp_pc b.eq el1_sp_pc
cmp x24, #ESR_EL1_EC_PC_ALIGN // pc alignment exception cmp x24, #ESR_ELx_EC_PC_ALIGN // pc alignment exception
b.eq el1_sp_pc b.eq el1_sp_pc
cmp x24, #ESR_EL1_EC_UNKNOWN // unknown exception in EL1 cmp x24, #ESR_ELx_EC_UNKNOWN // unknown exception in EL1
b.eq el1_undef b.eq el1_undef
cmp x24, #ESR_EL1_EC_BREAKPT_EL1 // debug exception in EL1 cmp x24, #ESR_ELx_EC_BREAKPT_CUR // debug exception in EL1
b.ge el1_dbg b.ge el1_dbg
b el1_inv b el1_inv
el1_da: el1_da:
...@@ -318,7 +318,7 @@ el1_dbg: ...@@ -318,7 +318,7 @@ el1_dbg:
/* /*
* Debug exception handling * Debug exception handling
*/ */
cmp x24, #ESR_EL1_EC_BRK64 // if BRK64 cmp x24, #ESR_ELx_EC_BRK64 // if BRK64
cinc x24, x24, eq // set bit '0' cinc x24, x24, eq // set bit '0'
tbz x24, #0, el1_inv // EL1 only tbz x24, #0, el1_inv // EL1 only
mrs x0, far_el1 mrs x0, far_el1
...@@ -375,26 +375,26 @@ el1_preempt: ...@@ -375,26 +375,26 @@ el1_preempt:
el0_sync: el0_sync:
kernel_entry 0 kernel_entry 0
mrs x25, esr_el1 // read the syndrome register mrs x25, esr_el1 // read the syndrome register
lsr x24, x25, #ESR_EL1_EC_SHIFT // exception class lsr x24, x25, #ESR_ELx_EC_SHIFT // exception class
cmp x24, #ESR_EL1_EC_SVC64 // SVC in 64-bit state cmp x24, #ESR_ELx_EC_SVC64 // SVC in 64-bit state
b.eq el0_svc b.eq el0_svc
cmp x24, #ESR_EL1_EC_DABT_EL0 // data abort in EL0 cmp x24, #ESR_ELx_EC_DABT_LOW // data abort in EL0
b.eq el0_da b.eq el0_da
cmp x24, #ESR_EL1_EC_IABT_EL0 // instruction abort in EL0 cmp x24, #ESR_ELx_EC_IABT_LOW // instruction abort in EL0
b.eq el0_ia b.eq el0_ia
cmp x24, #ESR_EL1_EC_FP_ASIMD // FP/ASIMD access cmp x24, #ESR_ELx_EC_FP_ASIMD // FP/ASIMD access
b.eq el0_fpsimd_acc b.eq el0_fpsimd_acc
cmp x24, #ESR_EL1_EC_FP_EXC64 // FP/ASIMD exception cmp x24, #ESR_ELx_EC_FP_EXC64 // FP/ASIMD exception
b.eq el0_fpsimd_exc b.eq el0_fpsimd_exc
cmp x24, #ESR_EL1_EC_SYS64 // configurable trap cmp x24, #ESR_ELx_EC_SYS64 // configurable trap
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_SP_ALIGN // stack alignment exception cmp x24, #ESR_ELx_EC_SP_ALIGN // stack alignment exception
b.eq el0_sp_pc b.eq el0_sp_pc
cmp x24, #ESR_EL1_EC_PC_ALIGN // pc alignment exception cmp x24, #ESR_ELx_EC_PC_ALIGN // pc alignment exception
b.eq el0_sp_pc b.eq el0_sp_pc
cmp x24, #ESR_EL1_EC_UNKNOWN // unknown exception in EL0 cmp x24, #ESR_ELx_EC_UNKNOWN // unknown exception in EL0
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_BREAKPT_EL0 // debug exception in EL0 cmp x24, #ESR_ELx_EC_BREAKPT_LOW // debug exception in EL0
b.ge el0_dbg b.ge el0_dbg
b el0_inv b el0_inv
...@@ -403,30 +403,30 @@ el0_sync: ...@@ -403,30 +403,30 @@ el0_sync:
el0_sync_compat: el0_sync_compat:
kernel_entry 0, 32 kernel_entry 0, 32
mrs x25, esr_el1 // read the syndrome register mrs x25, esr_el1 // read the syndrome register
lsr x24, x25, #ESR_EL1_EC_SHIFT // exception class lsr x24, x25, #ESR_ELx_EC_SHIFT // exception class
cmp x24, #ESR_EL1_EC_SVC32 // SVC in 32-bit state cmp x24, #ESR_ELx_EC_SVC32 // SVC in 32-bit state
b.eq el0_svc_compat b.eq el0_svc_compat
cmp x24, #ESR_EL1_EC_DABT_EL0 // data abort in EL0 cmp x24, #ESR_ELx_EC_DABT_LOW // data abort in EL0
b.eq el0_da b.eq el0_da
cmp x24, #ESR_EL1_EC_IABT_EL0 // instruction abort in EL0 cmp x24, #ESR_ELx_EC_IABT_LOW // instruction abort in EL0
b.eq el0_ia b.eq el0_ia
cmp x24, #ESR_EL1_EC_FP_ASIMD // FP/ASIMD access cmp x24, #ESR_ELx_EC_FP_ASIMD // FP/ASIMD access
b.eq el0_fpsimd_acc b.eq el0_fpsimd_acc
cmp x24, #ESR_EL1_EC_FP_EXC32 // FP/ASIMD exception cmp x24, #ESR_ELx_EC_FP_EXC32 // FP/ASIMD exception
b.eq el0_fpsimd_exc b.eq el0_fpsimd_exc
cmp x24, #ESR_EL1_EC_UNKNOWN // unknown exception in EL0 cmp x24, #ESR_ELx_EC_UNKNOWN // unknown exception in EL0
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_CP15_32 // CP15 MRC/MCR trap cmp x24, #ESR_ELx_EC_CP15_32 // CP15 MRC/MCR trap
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_CP15_64 // CP15 MRRC/MCRR trap cmp x24, #ESR_ELx_EC_CP15_64 // CP15 MRRC/MCRR trap
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_CP14_MR // CP14 MRC/MCR trap cmp x24, #ESR_ELx_EC_CP14_MR // CP14 MRC/MCR trap
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_CP14_LS // CP14 LDC/STC trap cmp x24, #ESR_ELx_EC_CP14_LS // CP14 LDC/STC trap
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_CP14_64 // CP14 MRRC/MCRR trap cmp x24, #ESR_ELx_EC_CP14_64 // CP14 MRRC/MCRR trap
b.eq el0_undef b.eq el0_undef
cmp x24, #ESR_EL1_EC_BREAKPT_EL0 // debug exception in EL0 cmp x24, #ESR_ELx_EC_BREAKPT_LOW // debug exception in EL0
b.ge el0_dbg b.ge el0_dbg
b el0_inv b el0_inv
el0_svc_compat: el0_svc_compat:
......
...@@ -501,7 +501,7 @@ static int compat_setup_sigframe(struct compat_sigframe __user *sf, ...@@ -501,7 +501,7 @@ static int compat_setup_sigframe(struct compat_sigframe __user *sf,
__put_user_error((compat_ulong_t)0, &sf->uc.uc_mcontext.trap_no, err); __put_user_error((compat_ulong_t)0, &sf->uc.uc_mcontext.trap_no, err);
/* set the compat FSR WnR */ /* set the compat FSR WnR */
__put_user_error(!!(current->thread.fault_code & ESR_EL1_WRITE) << __put_user_error(!!(current->thread.fault_code & ESR_ELx_WNR) <<
FSR_WRITE_SHIFT, &sf->uc.uc_mcontext.error_code, err); FSR_WRITE_SHIFT, &sf->uc.uc_mcontext.error_code, err);
__put_user_error(current->thread.fault_address, &sf->uc.uc_mcontext.fault_address, err); __put_user_error(current->thread.fault_address, &sf->uc.uc_mcontext.fault_address, err);
__put_user_error(set->sig[0], &sf->uc.uc_mcontext.oldmask, err); __put_user_error(set->sig[0], &sf->uc.uc_mcontext.oldmask, err);
......
...@@ -219,7 +219,7 @@ static int __kprobes do_page_fault(unsigned long addr, unsigned int esr, ...@@ -219,7 +219,7 @@ static int __kprobes do_page_fault(unsigned long addr, unsigned int esr,
if (esr & ESR_LNX_EXEC) { if (esr & ESR_LNX_EXEC) {
vm_flags = VM_EXEC; vm_flags = VM_EXEC;
} else if ((esr & ESR_EL1_WRITE) && !(esr & ESR_EL1_CM)) { } else if ((esr & ESR_ELx_WNR) && !(esr & ESR_ELx_CM)) {
vm_flags = VM_WRITE; vm_flags = VM_WRITE;
mm_flags |= FAULT_FLAG_WRITE; mm_flags |= FAULT_FLAG_WRITE;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment