Commit 10f1ea0d authored by Markos Chandras's avatar Markos Chandras Committed by Greg Kroah-Hartman

MIPS: asm: asm-eva: Introduce kernel load/store variants

commit 60cd7e08 upstream.

Introduce new macros for kernel load/store variants which will be
used to perform regular kernel space load/store operations in EVA
mode.
Signed-off-by: default avatarMarkos Chandras <markos.chandras@imgtec.com>
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/9500/Signed-off-by: default avatarRalf Baechle <ralf@linux-mips.org>
Signed-off-by: default avatarGreg Kroah-Hartman <gregkh@linuxfoundation.org>
parent 3fea9d6a
...@@ -11,6 +11,36 @@ ...@@ -11,6 +11,36 @@
#define __ASM_ASM_EVA_H #define __ASM_ASM_EVA_H
#ifndef __ASSEMBLY__ #ifndef __ASSEMBLY__
/* Kernel variants */
#define kernel_cache(op, base) "cache " op ", " base "\n"
#define kernel_ll(reg, addr) "ll " reg ", " addr "\n"
#define kernel_sc(reg, addr) "sc " reg ", " addr "\n"
#define kernel_lw(reg, addr) "lw " reg ", " addr "\n"
#define kernel_lwl(reg, addr) "lwl " reg ", " addr "\n"
#define kernel_lwr(reg, addr) "lwr " reg ", " addr "\n"
#define kernel_lh(reg, addr) "lh " reg ", " addr "\n"
#define kernel_lb(reg, addr) "lb " reg ", " addr "\n"
#define kernel_lbu(reg, addr) "lbu " reg ", " addr "\n"
#define kernel_sw(reg, addr) "sw " reg ", " addr "\n"
#define kernel_swl(reg, addr) "swl " reg ", " addr "\n"
#define kernel_swr(reg, addr) "swr " reg ", " addr "\n"
#define kernel_sh(reg, addr) "sh " reg ", " addr "\n"
#define kernel_sb(reg, addr) "sb " reg ", " addr "\n"
#ifdef CONFIG_32BIT
/*
* No 'sd' or 'ld' instructions in 32-bit but the code will
* do the correct thing
*/
#define kernel_sd(reg, addr) user_sw(reg, addr)
#define kernel_ld(reg, addr) user_lw(reg, addr)
#else
#define kernel_sd(reg, addr) "sd " reg", " addr "\n"
#define kernel_ld(reg, addr) "ld " reg", " addr "\n"
#endif /* CONFIG_32BIT */
#ifdef CONFIG_EVA #ifdef CONFIG_EVA
#define __BUILD_EVA_INSN(insn, reg, addr) \ #define __BUILD_EVA_INSN(insn, reg, addr) \
...@@ -41,37 +71,60 @@ ...@@ -41,37 +71,60 @@
#else #else
#define user_cache(op, base) "cache " op ", " base "\n" #define user_cache(op, base) kernel_cache(op, base)
#define user_ll(reg, addr) "ll " reg ", " addr "\n" #define user_ll(reg, addr) kernel_ll(reg, addr)
#define user_sc(reg, addr) "sc " reg ", " addr "\n" #define user_sc(reg, addr) kernel_sc(reg, addr)
#define user_lw(reg, addr) "lw " reg ", " addr "\n" #define user_lw(reg, addr) kernel_lw(reg, addr)
#define user_lwl(reg, addr) "lwl " reg ", " addr "\n" #define user_lwl(reg, addr) kernel_lwl(reg, addr)
#define user_lwr(reg, addr) "lwr " reg ", " addr "\n" #define user_lwr(reg, addr) kernel_lwr(reg, addr)
#define user_lh(reg, addr) "lh " reg ", " addr "\n" #define user_lh(reg, addr) kernel_lh(reg, addr)
#define user_lb(reg, addr) "lb " reg ", " addr "\n" #define user_lb(reg, addr) kernel_lb(reg, addr)
#define user_lbu(reg, addr) "lbu " reg ", " addr "\n" #define user_lbu(reg, addr) kernel_lbu(reg, addr)
#define user_sw(reg, addr) "sw " reg ", " addr "\n" #define user_sw(reg, addr) kernel_sw(reg, addr)
#define user_swl(reg, addr) "swl " reg ", " addr "\n" #define user_swl(reg, addr) kernel_swl(reg, addr)
#define user_swr(reg, addr) "swr " reg ", " addr "\n" #define user_swr(reg, addr) kernel_swr(reg, addr)
#define user_sh(reg, addr) "sh " reg ", " addr "\n" #define user_sh(reg, addr) kernel_sh(reg, addr)
#define user_sb(reg, addr) "sb " reg ", " addr "\n" #define user_sb(reg, addr) kernel_sb(reg, addr)
#ifdef CONFIG_32BIT #ifdef CONFIG_32BIT
/* #define user_sd(reg, addr) kernel_sw(reg, addr)
* No 'sd' or 'ld' instructions in 32-bit but the code will #define user_ld(reg, addr) kernel_lw(reg, addr)
* do the correct thing
*/
#define user_sd(reg, addr) user_sw(reg, addr)
#define user_ld(reg, addr) user_lw(reg, addr)
#else #else
#define user_sd(reg, addr) "sd " reg", " addr "\n" #define user_sd(reg, addr) kernel_sd(reg, addr)
#define user_ld(reg, addr) "ld " reg", " addr "\n" #define user_ld(reg, addr) kernel_ld(reg, addr)
#endif /* CONFIG_32BIT */ #endif /* CONFIG_32BIT */
#endif /* CONFIG_EVA */ #endif /* CONFIG_EVA */
#else /* __ASSEMBLY__ */ #else /* __ASSEMBLY__ */
#define kernel_cache(op, base) cache op, base
#define kernel_ll(reg, addr) ll reg, addr
#define kernel_sc(reg, addr) sc reg, addr
#define kernel_lw(reg, addr) lw reg, addr
#define kernel_lwl(reg, addr) lwl reg, addr
#define kernel_lwr(reg, addr) lwr reg, addr
#define kernel_lh(reg, addr) lh reg, addr
#define kernel_lb(reg, addr) lb reg, addr
#define kernel_lbu(reg, addr) lbu reg, addr
#define kernel_sw(reg, addr) sw reg, addr
#define kernel_swl(reg, addr) swl reg, addr
#define kernel_swr(reg, addr) swr reg, addr
#define kernel_sh(reg, addr) sh reg, addr
#define kernel_sb(reg, addr) sb reg, addr
#ifdef CONFIG_32BIT
/*
* No 'sd' or 'ld' instructions in 32-bit but the code will
* do the correct thing
*/
#define kernel_sd(reg, addr) user_sw(reg, addr)
#define kernel_ld(reg, addr) user_lw(reg, addr)
#else
#define kernel_sd(reg, addr) sd reg, addr
#define kernel_ld(reg, addr) ld reg, addr
#endif /* CONFIG_32BIT */
#ifdef CONFIG_EVA #ifdef CONFIG_EVA
#define __BUILD_EVA_INSN(insn, reg, addr) \ #define __BUILD_EVA_INSN(insn, reg, addr) \
...@@ -101,31 +154,27 @@ ...@@ -101,31 +154,27 @@
#define user_sd(reg, addr) user_sw(reg, addr) #define user_sd(reg, addr) user_sw(reg, addr)
#else #else
#define user_cache(op, base) cache op, base #define user_cache(op, base) kernel_cache(op, base)
#define user_ll(reg, addr) ll reg, addr #define user_ll(reg, addr) kernel_ll(reg, addr)
#define user_sc(reg, addr) sc reg, addr #define user_sc(reg, addr) kernel_sc(reg, addr)
#define user_lw(reg, addr) lw reg, addr #define user_lw(reg, addr) kernel_lw(reg, addr)
#define user_lwl(reg, addr) lwl reg, addr #define user_lwl(reg, addr) kernel_lwl(reg, addr)
#define user_lwr(reg, addr) lwr reg, addr #define user_lwr(reg, addr) kernel_lwr(reg, addr)
#define user_lh(reg, addr) lh reg, addr #define user_lh(reg, addr) kernel_lh(reg, addr)
#define user_lb(reg, addr) lb reg, addr #define user_lb(reg, addr) kernel_lb(reg, addr)
#define user_lbu(reg, addr) lbu reg, addr #define user_lbu(reg, addr) kernel_lbu(reg, addr)
#define user_sw(reg, addr) sw reg, addr #define user_sw(reg, addr) kernel_sw(reg, addr)
#define user_swl(reg, addr) swl reg, addr #define user_swl(reg, addr) kernel_swl(reg, addr)
#define user_swr(reg, addr) swr reg, addr #define user_swr(reg, addr) kernel_swr(reg, addr)
#define user_sh(reg, addr) sh reg, addr #define user_sh(reg, addr) kernel_sh(reg, addr)
#define user_sb(reg, addr) sb reg, addr #define user_sb(reg, addr) kernel_sb(reg, addr)
#ifdef CONFIG_32BIT #ifdef CONFIG_32BIT
/* #define user_sd(reg, addr) kernel_sw(reg, addr)
* No 'sd' or 'ld' instructions in 32-bit but the code will #define user_ld(reg, addr) kernel_lw(reg, addr)
* do the correct thing
*/
#define user_sd(reg, addr) user_sw(reg, addr)
#define user_ld(reg, addr) user_lw(reg, addr)
#else #else
#define user_sd(reg, addr) sd reg, addr #define user_sd(reg, addr) kernel_sd(reg, addr)
#define user_ld(reg, addr) ld reg, addr #define user_ld(reg, addr) kernel_sd(reg, addr)
#endif /* CONFIG_32BIT */ #endif /* CONFIG_32BIT */
#endif /* CONFIG_EVA */ #endif /* CONFIG_EVA */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment