Commit dab3b8f4 authored by Nicholas Piggin's avatar Nicholas Piggin Committed by Michael Ellerman

powerpc/64: asm use consistent global variable declaration and access

Use helper macros to access global variables, and place them in .data
sections rather than in .toc. Putting addresses in TOC is not required
because the kernel is linked with a single TOC.
Signed-off-by: default avatarNicholas Piggin <npiggin@gmail.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20220926034057.2360083-3-npiggin@gmail.com
parent 17773afd
...@@ -16,7 +16,7 @@ opal_kentry: ...@@ -16,7 +16,7 @@ opal_kentry:
li r5, 0 li r5, 0
li r6, 0 li r6, 0
li r7, 0 li r7, 0
ld r11,opal@got(r2) LOAD_REG_ADDR(r11, opal)
ld r8,0(r11) ld r8,0(r11)
ld r9,8(r11) ld r9,8(r11)
bctr bctr
...@@ -35,7 +35,7 @@ opal_call: ...@@ -35,7 +35,7 @@ opal_call:
mr r13,r2 mr r13,r2
/* Set opal return address */ /* Set opal return address */
ld r11,opal_return@got(r2) LOAD_REG_ADDR(r11, opal_return)
mtlr r11 mtlr r11
mfmsr r12 mfmsr r12
...@@ -45,7 +45,7 @@ opal_call: ...@@ -45,7 +45,7 @@ opal_call:
mtspr SPRN_HSRR1,r12 mtspr SPRN_HSRR1,r12
/* load the opal call entry point and base */ /* load the opal call entry point and base */
ld r11,opal@got(r2) LOAD_REG_ADDR(r11, opal)
ld r12,8(r11) ld r12,8(r11)
ld r2,0(r11) ld r2,0(r11)
mtspr SPRN_HSRR0,r12 mtspr SPRN_HSRR0,r12
......
...@@ -84,4 +84,13 @@ ...@@ -84,4 +84,13 @@
#define MFTBU(dest) mfspr dest, SPRN_TBRU #define MFTBU(dest) mfspr dest, SPRN_TBRU
#endif #endif
#ifdef CONFIG_PPC64_BOOT_WRAPPER
#define LOAD_REG_ADDR(reg,name) \
ld reg,name@got(r2)
#else
#define LOAD_REG_ADDR(reg,name) \
lis reg,name@ha; \
addi reg,reg,name@l
#endif
#endif /* _PPC64_PPC_ASM_H */ #endif /* _PPC64_PPC_ASM_H */
...@@ -76,16 +76,10 @@ ...@@ -76,16 +76,10 @@
swsusp_save_area: swsusp_save_area:
.space SL_SIZE .space SL_SIZE
.section ".toc","aw"
swsusp_save_area_ptr:
.tc swsusp_save_area[TC],swsusp_save_area
restore_pblist_ptr:
.tc restore_pblist[TC],restore_pblist
.section .text .section .text
.align 5 .align 5
_GLOBAL(swsusp_arch_suspend) _GLOBAL(swsusp_arch_suspend)
ld r11,swsusp_save_area_ptr@toc(r2) LOAD_REG_ADDR(r11, swsusp_save_area)
SAVE_SPECIAL(LR) SAVE_SPECIAL(LR)
SAVE_REGISTER(r1) SAVE_REGISTER(r1)
SAVE_SPECIAL(CR) SAVE_SPECIAL(CR)
...@@ -131,7 +125,7 @@ END_FW_FTR_SECTION_IFCLR(FW_FEATURE_LPAR) ...@@ -131,7 +125,7 @@ END_FW_FTR_SECTION_IFCLR(FW_FEATURE_LPAR)
bl swsusp_save bl swsusp_save
/* restore LR */ /* restore LR */
ld r11,swsusp_save_area_ptr@toc(r2) LOAD_REG_ADDR(r11, swsusp_save_area)
RESTORE_SPECIAL(LR) RESTORE_SPECIAL(LR)
addi r1,r1,128 addi r1,r1,128
...@@ -145,7 +139,7 @@ BEGIN_FTR_SECTION ...@@ -145,7 +139,7 @@ BEGIN_FTR_SECTION
END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
sync sync
ld r12,restore_pblist_ptr@toc(r2) LOAD_REG_ADDR(r11, restore_pblist)
ld r12,0(r12) ld r12,0(r12)
cmpdi r12,0 cmpdi r12,0
...@@ -187,7 +181,7 @@ nothing_to_copy: ...@@ -187,7 +181,7 @@ nothing_to_copy:
tlbia tlbia
#endif #endif
ld r11,swsusp_save_area_ptr@toc(r2) LOAD_REG_ADDR(r11, swsusp_save_area)
RESTORE_SPECIAL(CR) RESTORE_SPECIAL(CR)
...@@ -265,7 +259,7 @@ END_FW_FTR_SECTION_IFCLR(FW_FEATURE_LPAR) ...@@ -265,7 +259,7 @@ END_FW_FTR_SECTION_IFCLR(FW_FEATURE_LPAR)
bl do_after_copyback bl do_after_copyback
addi r1,r1,128 addi r1,r1,128
ld r11,swsusp_save_area_ptr@toc(r2) LOAD_REG_ADDR(r11, swsusp_save_area)
RESTORE_SPECIAL(LR) RESTORE_SPECIAL(LR)
li r3, 0 li r3, 0
......
...@@ -85,8 +85,7 @@ ...@@ -85,8 +85,7 @@
std r2, STK_GOT(r1) std r2, STK_GOT(r1)
ld r2,PACATOC(r13) /* get kernel TOC in r2 */ ld r2,PACATOC(r13) /* get kernel TOC in r2 */
addis r3,r2,function_trace_op@toc@ha LOAD_REG_ADDR(r3, function_trace_op)
addi r3,r3,function_trace_op@toc@l
ld r5,0(r3) ld r5,0(r3)
#else #else
lis r3,function_trace_op@ha lis r3,function_trace_op@ha
......
...@@ -155,8 +155,8 @@ _GLOBAL(load_up_vsx) ...@@ -155,8 +155,8 @@ _GLOBAL(load_up_vsx)
* usage of floating-point registers. These routines must be called * usage of floating-point registers. These routines must be called
* with preempt disabled. * with preempt disabled.
*/ */
#ifdef CONFIG_PPC32
.data .data
#ifdef CONFIG_PPC32
fpzero: fpzero:
.long 0 .long 0
fpone: fpone:
...@@ -169,18 +169,17 @@ fphalf: ...@@ -169,18 +169,17 @@ fphalf:
lfs fr,name@l(r11) lfs fr,name@l(r11)
#else #else
.section ".toc","aw"
fpzero: fpzero:
.tc FD_0_0[TC],0 .quad 0
fpone: fpone:
.tc FD_3ff00000_0[TC],0x3ff0000000000000 /* 1.0 */ .quad 0x3ff0000000000000 /* 1.0 */
fphalf: fphalf:
.tc FD_3fe00000_0[TC],0x3fe0000000000000 /* 0.5 */ .quad 0x3fe0000000000000 /* 0.5 */
#define LDCONST(fr, name) \ #define LDCONST(fr, name) \
lfd fr,name@toc(r2) addis r11,r2,name@toc@ha; \
lfd fr,name@toc@l(r11)
#endif #endif
.text .text
/* /*
* Internal routine to enable floating point and set FPSCR to 0. * Internal routine to enable floating point and set FPSCR to 0.
......
...@@ -9,11 +9,6 @@ ...@@ -9,11 +9,6 @@
#include <asm/export.h> #include <asm/export.h>
#include <asm/feature-fixups.h> #include <asm/feature-fixups.h>
.section ".toc","aw"
PPC64_CACHES:
.tc ppc64_caches[TC],ppc64_caches
.section ".text"
_GLOBAL_TOC(copy_page) _GLOBAL_TOC(copy_page)
BEGIN_FTR_SECTION BEGIN_FTR_SECTION
lis r5,PAGE_SIZE@h lis r5,PAGE_SIZE@h
...@@ -24,7 +19,7 @@ FTR_SECTION_ELSE ...@@ -24,7 +19,7 @@ FTR_SECTION_ELSE
ALT_FTR_SECTION_END_IFCLR(CPU_FTR_VMX_COPY) ALT_FTR_SECTION_END_IFCLR(CPU_FTR_VMX_COPY)
ori r5,r5,PAGE_SIZE@l ori r5,r5,PAGE_SIZE@l
BEGIN_FTR_SECTION BEGIN_FTR_SECTION
ld r10,PPC64_CACHES@toc(r2) LOAD_REG_ADDR(r10, ppc64_caches)
lwz r11,DCACHEL1LOGBLOCKSIZE(r10) /* log2 of cache block size */ lwz r11,DCACHEL1LOGBLOCKSIZE(r10) /* log2 of cache block size */
lwz r12,DCACHEL1BLOCKSIZE(r10) /* get cache block size */ lwz r12,DCACHEL1BLOCKSIZE(r10) /* get cache block size */
li r9,0 li r9,0
......
...@@ -11,11 +11,6 @@ ...@@ -11,11 +11,6 @@
#include <asm/asm-offsets.h> #include <asm/asm-offsets.h>
#include <asm/export.h> #include <asm/export.h>
.section ".toc","aw"
PPC64_CACHES:
.tc ppc64_caches[TC],ppc64_caches
.section ".text"
/** /**
* __arch_clear_user: - Zero a block of memory in user space, with less checking. * __arch_clear_user: - Zero a block of memory in user space, with less checking.
* @to: Destination address, in user space. * @to: Destination address, in user space.
...@@ -133,7 +128,7 @@ err1; stb r0,0(r3) ...@@ -133,7 +128,7 @@ err1; stb r0,0(r3)
blr blr
.Llong_clear: .Llong_clear:
ld r5,PPC64_CACHES@toc(r2) LOAD_REG_ADDR(r5, ppc64_caches)
bf cr7*4+0,11f bf cr7*4+0,11f
err2; std r0,0(r3) err2; std r0,0(r3)
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
_GLOBAL(read_bhrb) _GLOBAL(read_bhrb)
cmpldi r3,31 cmpldi r3,31
bgt 1f bgt 1f
ld r4,bhrb_table@got(r2) LOAD_REG_ADDR(r4, bhrb_table)
sldi r3,r3,3 sldi r3,r3,3
add r3,r4,r3 add r3,r4,r3
mtctr r3 mtctr r3
......
...@@ -4,12 +4,12 @@ ...@@ -4,12 +4,12 @@
/* unsigned long xmon_mfspr(sprn, default_value) */ /* unsigned long xmon_mfspr(sprn, default_value) */
_GLOBAL(xmon_mfspr) _GLOBAL(xmon_mfspr)
PPC_LL r5, .Lmfspr_table@got(r2) LOAD_REG_ADDR(r5, .Lmfspr_table)
b xmon_mxspr b xmon_mxspr
/* void xmon_mtspr(sprn, new_value) */ /* void xmon_mtspr(sprn, new_value) */
_GLOBAL(xmon_mtspr) _GLOBAL(xmon_mtspr)
PPC_LL r5, .Lmtspr_table@got(r2) LOAD_REG_ADDR(r5, .Lmtspr_table)
b xmon_mxspr b xmon_mxspr
/* /*
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment