Commit 4e991e3c authored by Nicholas Piggin's avatar Nicholas Piggin Committed by Michael Ellerman

powerpc: add CFUNC assembly label annotation

This macro is to be used in assembly where C functions are called.
pcrel addressing mode requires branches to functions with a
localentry value of 1 to have either a trailing nop or @notoc.
This macro permits the latter without changing callers.
Signed-off-by: default avatarNicholas Piggin <npiggin@gmail.com>
[mpe: Add dummy definitions to fix selftests build]
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://msgid.link/20230408021752.862660-5-npiggin@gmail.com
parent dc5dac74
......@@ -180,6 +180,11 @@
#ifdef __KERNEL__
/*
* Used to name C functions called from asm
*/
#define CFUNC(name) name
/*
* We use __powerpc64__ here because we want the compat VDSO to use the 32-bit
* version below in the else case of the ifdef.
......
This diff is collapsed.
......@@ -608,7 +608,7 @@ __boot_from_prom:
/* Do all of the interaction with OF client interface */
mr r8,r26
bl prom_init
bl CFUNC(prom_init)
#endif /* #CONFIG_PPC_OF_BOOT_TRAMPOLINE */
/* We never return. We also hit that trap if trying to boot
......@@ -836,7 +836,7 @@ __secondary_start:
* can turn it on below. This is a call to C, which is OK, we're still
* running on the emergency stack.
*/
bl early_setup_secondary
bl CFUNC(early_setup_secondary)
/*
* The primary has initialized our kernel stack for us in the paca, grab
......@@ -875,7 +875,7 @@ start_secondary_prolog:
LOAD_PACA_TOC()
li r3,0
std r3,0(r1) /* Zero the stack frame pointer */
bl start_secondary
bl CFUNC(start_secondary)
b .
/*
* Reset stack pointer and call start_secondary
......@@ -886,7 +886,7 @@ _GLOBAL(start_secondary_resume)
ld r1,PACAKSAVE(r13) /* Reload kernel stack pointer */
li r3,0
std r3,0(r1) /* Zero the stack frame pointer */
bl start_secondary
bl CFUNC(start_secondary)
b .
#endif
......@@ -991,7 +991,7 @@ start_here_multiplatform:
*/
#ifdef CONFIG_KASAN
bl kasan_early_init
bl CFUNC(kasan_early_init)
#endif
/* Restore parameters passed from prom_init/kexec */
mr r3,r31
......@@ -1024,7 +1024,7 @@ start_here_common:
stb r0,PACAIRQHAPPENED(r13)
/* Generic kernel entry */
bl start_kernel
bl CFUNC(start_kernel)
/* Not reached */
0: trap
......
......@@ -101,12 +101,12 @@ END_FTR_SECTION_IFSET(CPU_FTR_HAS_PPR)
* state of kernel code.
*/
SANITIZE_SYSCALL_GPRS()
bl system_call_exception
bl CFUNC(system_call_exception)
.Lsyscall_vectored_\name\()_exit:
addi r4,r1,STACK_INT_FRAME_REGS
li r5,1 /* scv */
bl syscall_exit_prepare
bl CFUNC(syscall_exit_prepare)
std r1,PACA_EXIT_SAVE_R1(r13) /* save r1 for restart */
.Lsyscall_vectored_\name\()_rst_start:
lbz r11,PACAIRQHAPPENED(r13)
......@@ -185,7 +185,7 @@ _ASM_NOKPROBE_SYMBOL(syscall_vectored_\name\()_restart)
addi r4,r1,STACK_INT_FRAME_REGS
li r11,IRQS_ALL_DISABLED
stb r11,PACAIRQSOFTMASK(r13)
bl syscall_exit_restart
bl CFUNC(syscall_exit_restart)
std r1,PACA_EXIT_SAVE_R1(r13) /* save r1 for restart */
b .Lsyscall_vectored_\name\()_rst_start
1:
......@@ -286,12 +286,12 @@ END_BTB_FLUSH_SECTION
* state of kernel code.
*/
SANITIZE_SYSCALL_GPRS()
bl system_call_exception
bl CFUNC(system_call_exception)
.Lsyscall_exit:
addi r4,r1,STACK_INT_FRAME_REGS
li r5,0 /* !scv */
bl syscall_exit_prepare
bl CFUNC(syscall_exit_prepare)
std r1,PACA_EXIT_SAVE_R1(r13) /* save r1 for restart */
#ifdef CONFIG_PPC_BOOK3S
.Lsyscall_rst_start:
......@@ -372,7 +372,7 @@ _ASM_NOKPROBE_SYMBOL(syscall_restart)
addi r4,r1,STACK_INT_FRAME_REGS
li r11,IRQS_ALL_DISABLED
stb r11,PACAIRQSOFTMASK(r13)
bl syscall_exit_restart
bl CFUNC(syscall_exit_restart)
std r1,PACA_EXIT_SAVE_R1(r13) /* save r1 for restart */
b .Lsyscall_rst_start
1:
......@@ -401,7 +401,7 @@ _ASM_NOKPROBE_SYMBOL(fast_interrupt_return_srr)
li r3,0 /* 0 return value, no EMULATE_STACK_STORE */
bne+ .Lfast_kernel_interrupt_return_srr
addi r3,r1,STACK_INT_FRAME_REGS
bl unrecoverable_exception
bl CFUNC(unrecoverable_exception)
b . /* should not get here */
#else
bne .Lfast_user_interrupt_return_srr
......@@ -419,7 +419,7 @@ _ASM_NOKPROBE_SYMBOL(interrupt_return_\srr\())
interrupt_return_\srr\()_user: /* make backtraces match the _kernel variant */
_ASM_NOKPROBE_SYMBOL(interrupt_return_\srr\()_user)
addi r3,r1,STACK_INT_FRAME_REGS
bl interrupt_exit_user_prepare
bl CFUNC(interrupt_exit_user_prepare)
#ifndef CONFIG_INTERRUPT_SANITIZE_REGISTERS
cmpdi r3,0
bne- .Lrestore_nvgprs_\srr
......@@ -523,7 +523,7 @@ _ASM_NOKPROBE_SYMBOL(interrupt_return_\srr\()_user_restart)
addi r3,r1,STACK_INT_FRAME_REGS
li r11,IRQS_ALL_DISABLED
stb r11,PACAIRQSOFTMASK(r13)
bl interrupt_exit_user_restart
bl CFUNC(interrupt_exit_user_restart)
std r1,PACA_EXIT_SAVE_R1(r13) /* save r1 for restart */
b .Linterrupt_return_\srr\()_user_rst_start
1:
......@@ -536,7 +536,7 @@ RESTART_TABLE(.Linterrupt_return_\srr\()_user_rst_start, .Linterrupt_return_\srr
interrupt_return_\srr\()_kernel:
_ASM_NOKPROBE_SYMBOL(interrupt_return_\srr\()_kernel)
addi r3,r1,STACK_INT_FRAME_REGS
bl interrupt_exit_kernel_prepare
bl CFUNC(interrupt_exit_kernel_prepare)
std r1,PACA_EXIT_SAVE_R1(r13) /* save r1 for restart */
.Linterrupt_return_\srr\()_kernel_rst_start:
......@@ -705,7 +705,7 @@ _ASM_NOKPROBE_SYMBOL(interrupt_return_\srr\()_kernel_restart)
addi r3,r1,STACK_INT_FRAME_REGS
li r11,IRQS_ALL_DISABLED
stb r11,PACAIRQSOFTMASK(r13)
bl interrupt_exit_kernel_restart
bl CFUNC(interrupt_exit_kernel_restart)
std r1,PACA_EXIT_SAVE_R1(r13) /* save r1 for restart */
b .Linterrupt_return_\srr\()_kernel_rst_start
1:
......@@ -727,20 +727,20 @@ DEFINE_FIXED_SYMBOL(__end_soft_masked, text)
#ifdef CONFIG_PPC_BOOK3S
_GLOBAL(ret_from_fork_scv)
bl schedule_tail
bl CFUNC(schedule_tail)
HANDLER_RESTORE_NVGPRS()
li r3,0 /* fork() return value */
b .Lsyscall_vectored_common_exit
#endif
_GLOBAL(ret_from_fork)
bl schedule_tail
bl CFUNC(schedule_tail)
HANDLER_RESTORE_NVGPRS()
li r3,0 /* fork() return value */
b .Lsyscall_exit
_GLOBAL(ret_from_kernel_user_thread)
bl schedule_tail
bl CFUNC(schedule_tail)
mtctr r14
mr r3,r15
#ifdef CONFIG_PPC64_ELF_ABI_V2
......
......@@ -432,7 +432,7 @@ _GLOBAL(kexec_sequence)
1:
/* copy dest pages, flush whole dest image */
mr r3,r29
bl kexec_copy_flush /* (image) */
bl CFUNC(kexec_copy_flush) /* (image) */
/* turn off mmu now if not done earlier */
cmpdi r26,0
......
......@@ -38,7 +38,11 @@
.else
addi r4, r5, VDSO_DATA_OFFSET
.endif
bl DOTSYM(\funct)
#ifdef __powerpc64__
bl CFUNC(DOTSYM(\funct))
#else
bl \funct
#endif
PPC_LL r0, PPC_MIN_STKFRM + PPC_LR_STKOFF(r1)
#ifdef __powerpc64__
PPC_LL r2, PPC_MIN_STKFRM + STK_GOT(r1)
......
......@@ -381,7 +381,7 @@ kvm_secondary_got_guest:
bne kvm_no_guest
li r3,0 /* NULL argument */
bl hmi_exception_realmode
bl CFUNC(hmi_exception_realmode)
/*
* At this point we have finished executing in the guest.
* We need to wait for hwthread_req to become zero, since
......@@ -458,7 +458,7 @@ kvm_unsplit_nap:
cmpwi r12, BOOK3S_INTERRUPT_HMI
bne 55f
li r3, 0 /* NULL argument */
bl hmi_exception_realmode
bl CFUNC(hmi_exception_realmode)
55:
/*
* Ensure that secondary doesn't nap when it has
......@@ -858,7 +858,7 @@ deliver_guest_interrupt: /* r4 = vcpu, r13 = paca */
cmpdi r0, 0
beq 71f
mr r3, r4
bl kvmppc_guest_entry_inject_int
bl CFUNC(kvmppc_guest_entry_inject_int)
ld r4, HSTATE_KVM_VCPU(r13)
71:
ld r6, VCPU_SRR0(r4)
......@@ -1544,7 +1544,7 @@ kvmppc_guest_external:
/* External interrupt, first check for host_ipi. If this is
* set, we know the host wants us out so let's do it now
*/
bl kvmppc_read_intr
bl CFUNC(kvmppc_read_intr)
/*
* Restore the active volatile registers after returning from
......@@ -1626,7 +1626,7 @@ kvmppc_hdsi:
/* Search the hash table. */
mr r3, r9 /* vcpu pointer */
li r7, 1 /* data fault */
bl kvmppc_hpte_hv_fault
bl CFUNC(kvmppc_hpte_hv_fault)
ld r9, HSTATE_KVM_VCPU(r13)
ld r10, VCPU_PC(r9)
ld r11, VCPU_MSR(r9)
......@@ -1702,7 +1702,7 @@ kvmppc_hisi:
mr r4, r10
mr r6, r11
li r7, 0 /* instruction fault */
bl kvmppc_hpte_hv_fault
bl CFUNC(kvmppc_hpte_hv_fault)
ld r9, HSTATE_KVM_VCPU(r13)
ld r10, VCPU_PC(r9)
ld r11, VCPU_MSR(r9)
......@@ -2342,7 +2342,7 @@ hmi_realmode:
lbz r0, HSTATE_PTID(r13)
cmpwi r0, 0
bne guest_exit_cont
bl kvmppc_realmode_hmi_handler
bl CFUNC(kvmppc_realmode_hmi_handler)
ld r9, HSTATE_KVM_VCPU(r13)
li r12, BOOK3S_INTERRUPT_HMI
b guest_exit_cont
......@@ -2413,7 +2413,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_ARCH_207S)
7: mflr r0
std r0, PPC_LR_STKOFF(r1)
stdu r1, -PPC_MIN_STKFRM(r1)
bl kvmppc_read_intr
bl CFUNC(kvmppc_read_intr)
nop
li r12, BOOK3S_INTERRUPT_EXTERNAL
cmpdi r3, 1
......
......@@ -45,7 +45,7 @@ _GLOBAL(copypage_power7)
std r4,-STACKFRAMESIZE+STK_REG(R30)(r1)
std r0,16(r1)
stdu r1,-STACKFRAMESIZE(r1)
bl enter_vmx_ops
bl CFUNC(enter_vmx_ops)
cmpwi r3,0
ld r0,STACKFRAMESIZE+16(r1)
ld r3,STK_REG(R31)(r1)
......@@ -88,7 +88,7 @@ _GLOBAL(copypage_power7)
addi r3,r3,128
bdnz 1b
b exit_vmx_ops /* tail call optimise */
b CFUNC(exit_vmx_ops) /* tail call optimise */
#else
li r0,(PAGE_SIZE/128)
......
......@@ -47,7 +47,7 @@
ld r15,STK_REG(R15)(r1)
ld r14,STK_REG(R14)(r1)
.Ldo_err3:
bl exit_vmx_usercopy
bl CFUNC(exit_vmx_usercopy)
ld r0,STACKFRAMESIZE+16(r1)
mtlr r0
b .Lexit
......@@ -272,7 +272,7 @@ err1; stb r0,0(r3)
mflr r0
std r0,16(r1)
stdu r1,-STACKFRAMESIZE(r1)
bl enter_vmx_usercopy
bl CFUNC(enter_vmx_usercopy)
cmpwi cr1,r3,0
ld r0,STACKFRAMESIZE+16(r1)
ld r3,STK_REG(R31)(r1)
......@@ -488,7 +488,7 @@ err3; lbz r0,0(r4)
err3; stb r0,0(r3)
15: addi r1,r1,STACKFRAMESIZE
b exit_vmx_usercopy /* tail call optimise */
b CFUNC(exit_vmx_usercopy) /* tail call optimise */
.Lvmx_unaligned_copy:
/* Get the destination 16B aligned */
......@@ -691,5 +691,5 @@ err3; lbz r0,0(r4)
err3; stb r0,0(r3)
15: addi r1,r1,STACKFRAMESIZE
b exit_vmx_usercopy /* tail call optimise */
b CFUNC(exit_vmx_usercopy) /* tail call optimise */
#endif /* CONFIG_ALTIVEC */
......@@ -14,7 +14,7 @@
_GLOBAL(__arch_hweight8)
BEGIN_FTR_SECTION
b __sw_hweight8
b CFUNC(__sw_hweight8)
nop
nop
FTR_SECTION_ELSE
......@@ -26,7 +26,7 @@ EXPORT_SYMBOL(__arch_hweight8)
_GLOBAL(__arch_hweight16)
BEGIN_FTR_SECTION
b __sw_hweight16
b CFUNC(__sw_hweight16)
nop
nop
nop
......@@ -49,7 +49,7 @@ EXPORT_SYMBOL(__arch_hweight16)
_GLOBAL(__arch_hweight32)
BEGIN_FTR_SECTION
b __sw_hweight32
b CFUNC(__sw_hweight32)
nop
nop
nop
......@@ -75,7 +75,7 @@ EXPORT_SYMBOL(__arch_hweight32)
_GLOBAL(__arch_hweight64)
BEGIN_FTR_SECTION
b __sw_hweight64
b CFUNC(__sw_hweight64)
nop
nop
nop
......
......@@ -44,7 +44,7 @@
std r5,-STACKFRAMESIZE+STK_REG(R29)(r1); \
std r0,16(r1); \
stdu r1,-STACKFRAMESIZE(r1); \
bl enter_vmx_ops; \
bl CFUNC(enter_vmx_ops); \
cmpwi cr1,r3,0; \
ld r0,STACKFRAMESIZE+16(r1); \
ld r3,STK_REG(R31)(r1); \
......@@ -60,7 +60,7 @@
std r5,-STACKFRAMESIZE+STK_REG(R29)(r1); \
std r0,16(r1); \
stdu r1,-STACKFRAMESIZE(r1); \
bl exit_vmx_ops; \
bl CFUNC(exit_vmx_ops); \
ld r0,STACKFRAMESIZE+16(r1); \
ld r3,STK_REG(R31)(r1); \
ld r4,STK_REG(R30)(r1); \
......
......@@ -218,7 +218,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
std r5,-STACKFRAMESIZE+STK_REG(R29)(r1)
std r0,16(r1)
stdu r1,-STACKFRAMESIZE(r1)
bl enter_vmx_ops
bl CFUNC(enter_vmx_ops)
cmpwi cr1,r3,0
ld r0,STACKFRAMESIZE+16(r1)
ld r3,STK_REG(R31)(r1)
......@@ -433,7 +433,7 @@ END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
15: addi r1,r1,STACKFRAMESIZE
ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
b exit_vmx_ops /* tail call optimise */
b CFUNC(exit_vmx_ops) /* tail call optimise */
.Lvmx_unaligned_copy:
/* Get the destination 16B aligned */
......@@ -637,5 +637,5 @@ END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
15: addi r1,r1,STACKFRAMESIZE
ld r3,-STACKFRAMESIZE+STK_REG(R31)(r1)
b exit_vmx_ops /* tail call optimise */
b CFUNC(exit_vmx_ops) /* tail call optimise */
#endif /* CONFIG_ALTIVEC */
......@@ -44,7 +44,7 @@ hcall_tracepoint_refcount:
std r0,16(r1); \
addi r4,r1,STK_PARAM(FIRST_REG); \
stdu r1,-STACK_FRAME_MIN_SIZE(r1); \
bl __trace_hcall_entry; \
bl CFUNC(__trace_hcall_entry); \
ld r3,STACK_FRAME_MIN_SIZE+STK_PARAM(R3)(r1); \
ld r4,STACK_FRAME_MIN_SIZE+STK_PARAM(R4)(r1); \
ld r5,STACK_FRAME_MIN_SIZE+STK_PARAM(R5)(r1); \
......@@ -63,7 +63,7 @@ hcall_tracepoint_refcount:
std r3,STACK_FRAME_MIN_SIZE+STK_PARAM(R3)(r1); \
mr r4,r3; \
mr r3,r0; \
bl __trace_hcall_exit; \
bl CFUNC(__trace_hcall_exit); \
ld r0,STACK_FRAME_MIN_SIZE+16(r1); \
addi r1,r1,STACK_FRAME_MIN_SIZE; \
ld r3,STK_PARAM(R3)(r1); \
......
......@@ -27,6 +27,7 @@
#define _GLOBAL_TOC(A) _GLOBAL(A)
#define _GLOBAL_TOC_KASAN(A) _GLOBAL(A)
#define _GLOBAL_KASAN(A) _GLOBAL(A)
#define CFUNC(name) name
#define PPC_MTOCRF(A, B) mtocrf A, B
......
......@@ -9,6 +9,7 @@
#define _GLOBAL(A) FUNC_START(test_ ## A)
#define _GLOBAL_TOC(A) FUNC_START(test_ ## A)
#define CFUNC(name) name
#define CONFIG_ALTIVEC
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment