Commit deff8a24 authored by Jiri Slaby's avatar Jiri Slaby Committed by Borislav Petkov

x86/boot: Annotate local functions

.Lrelocated, .Lpaging_enabled, .Lno_longmode, and .Lin_pm32 are
self-standing local functions, annotate them as such and preserve "no
alignment".

The annotations do not generate anything yet.
Signed-off-by: default avatarJiri Slaby <jslaby@suse.cz>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Cc: Cao jin <caoj.fnst@cn.fujitsu.com>
Cc: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
Cc: "H. Peter Anvin" <hpa@zytor.com>
Cc: Ingo Molnar <mingo@redhat.com>
Cc: Kate Stewart <kstewart@linuxfoundation.org>
Cc: "Kirill A. Shutemov" <kirill.shutemov@linux.intel.com>
Cc: linux-arch@vger.kernel.org
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Wei Huang <wei@redhat.com>
Cc: x86-ml <x86@kernel.org>
Cc: Xiaoyao Li <xiaoyao.li@linux.intel.com>
Link: https://lkml.kernel.org/r/20191011115108.12392-8-jslaby@suse.cz
parent 74d8b90a
...@@ -209,7 +209,7 @@ ENDPROC(efi32_stub_entry) ...@@ -209,7 +209,7 @@ ENDPROC(efi32_stub_entry)
#endif #endif
.text .text
.Lrelocated: SYM_FUNC_START_LOCAL_NOALIGN(.Lrelocated)
/* /*
* Clear BSS (stack is currently empty) * Clear BSS (stack is currently empty)
...@@ -260,6 +260,7 @@ ENDPROC(efi32_stub_entry) ...@@ -260,6 +260,7 @@ ENDPROC(efi32_stub_entry)
*/ */
xorl %ebx, %ebx xorl %ebx, %ebx
jmp *%eax jmp *%eax
SYM_FUNC_END(.Lrelocated)
#ifdef CONFIG_EFI_STUB #ifdef CONFIG_EFI_STUB
.data .data
......
...@@ -511,7 +511,7 @@ ENDPROC(efi64_stub_entry) ...@@ -511,7 +511,7 @@ ENDPROC(efi64_stub_entry)
#endif #endif
.text .text
.Lrelocated: SYM_FUNC_START_LOCAL_NOALIGN(.Lrelocated)
/* /*
* Clear BSS (stack is currently empty) * Clear BSS (stack is currently empty)
...@@ -540,6 +540,7 @@ ENDPROC(efi64_stub_entry) ...@@ -540,6 +540,7 @@ ENDPROC(efi64_stub_entry)
* Jump to the decompressed kernel. * Jump to the decompressed kernel.
*/ */
jmp *%rax jmp *%rax
SYM_FUNC_END(.Lrelocated)
/* /*
* Adjust the global offset table * Adjust the global offset table
...@@ -635,9 +636,10 @@ ENTRY(trampoline_32bit_src) ...@@ -635,9 +636,10 @@ ENTRY(trampoline_32bit_src)
lret lret
.code64 .code64
.Lpaging_enabled: SYM_FUNC_START_LOCAL_NOALIGN(.Lpaging_enabled)
/* Return from the trampoline */ /* Return from the trampoline */
jmp *%rdi jmp *%rdi
SYM_FUNC_END(.Lpaging_enabled)
/* /*
* The trampoline code has a size limit. * The trampoline code has a size limit.
...@@ -647,11 +649,12 @@ ENTRY(trampoline_32bit_src) ...@@ -647,11 +649,12 @@ ENTRY(trampoline_32bit_src)
.org trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_SIZE .org trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_SIZE
.code32 .code32
.Lno_longmode: SYM_FUNC_START_LOCAL_NOALIGN(.Lno_longmode)
/* This isn't an x86-64 CPU, so hang intentionally, we cannot continue */ /* This isn't an x86-64 CPU, so hang intentionally, we cannot continue */
1: 1:
hlt hlt
jmp 1b jmp 1b
SYM_FUNC_END(.Lno_longmode)
#include "../../kernel/verify_cpu.S" #include "../../kernel/verify_cpu.S"
......
...@@ -46,7 +46,7 @@ ENDPROC(protected_mode_jump) ...@@ -46,7 +46,7 @@ ENDPROC(protected_mode_jump)
.code32 .code32
.section ".text32","ax" .section ".text32","ax"
.Lin_pm32: SYM_FUNC_START_LOCAL_NOALIGN(.Lin_pm32)
# Set up data segments for flat 32-bit mode # Set up data segments for flat 32-bit mode
movl %ecx, %ds movl %ecx, %ds
movl %ecx, %es movl %ecx, %es
...@@ -72,4 +72,4 @@ ENDPROC(protected_mode_jump) ...@@ -72,4 +72,4 @@ ENDPROC(protected_mode_jump)
lldt %cx lldt %cx
jmpl *%eax # Jump to the 32-bit entrypoint jmpl *%eax # Jump to the 32-bit entrypoint
ENDPROC(.Lin_pm32) SYM_FUNC_END(.Lin_pm32)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment