Commit aa246c05 authored by Will Deacon's avatar Will Deacon

Merge branch 'for-next/asm-annotations' into for-next/core

* for-next/asm-annotations: (6 commits)
  arm64: kernel: Correct annotation of end of el0_sync
  ...
parents 4f6cdf29 73d6890f
...@@ -448,17 +448,6 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU ...@@ -448,17 +448,6 @@ USER(\label, ic ivau, \tmp2) // invalidate I line PoU
b.ne 9998b b.ne 9998b
.endm .endm
/*
* Annotate a function as position independent, i.e., safe to be called before
* the kernel virtual mapping is activated.
*/
#define ENDPIPROC(x) \
.globl __pi_##x; \
.type __pi_##x, %function; \
.set __pi_##x, x; \
.size __pi_##x, . - x; \
ENDPROC(x)
/* /*
* Annotate a function as being unsuitable for kprobes. * Annotate a function as being unsuitable for kprobes.
*/ */
......
...@@ -4,4 +4,20 @@ ...@@ -4,4 +4,20 @@
#define __ALIGN .align 2 #define __ALIGN .align 2
#define __ALIGN_STR ".align 2" #define __ALIGN_STR ".align 2"
/*
* Annotate a function as position independent, i.e., safe to be called before
* the kernel virtual mapping is activated.
*/
#define SYM_FUNC_START_PI(x) \
SYM_FUNC_START_ALIAS(__pi_##x); \
SYM_FUNC_START(x)
#define SYM_FUNC_START_WEAK_PI(x) \
SYM_FUNC_START_ALIAS(__pi_##x); \
SYM_FUNC_START_WEAK(x)
#define SYM_FUNC_END_PI(x) \
SYM_FUNC_END(x); \
SYM_FUNC_END_ALIAS(__pi_##x)
#endif #endif
...@@ -650,6 +650,7 @@ el0_sync: ...@@ -650,6 +650,7 @@ el0_sync:
mov x0, sp mov x0, sp
bl el0_sync_handler bl el0_sync_handler
b ret_to_user b ret_to_user
ENDPROC(el0_sync)
#ifdef CONFIG_COMPAT #ifdef CONFIG_COMPAT
.align 6 .align 6
...@@ -658,16 +659,18 @@ el0_sync_compat: ...@@ -658,16 +659,18 @@ el0_sync_compat:
mov x0, sp mov x0, sp
bl el0_sync_compat_handler bl el0_sync_compat_handler
b ret_to_user b ret_to_user
ENDPROC(el0_sync) ENDPROC(el0_sync_compat)
.align 6 .align 6
el0_irq_compat: el0_irq_compat:
kernel_entry 0, 32 kernel_entry 0, 32
b el0_irq_naked b el0_irq_naked
ENDPROC(el0_irq_compat)
el0_error_compat: el0_error_compat:
kernel_entry 0, 32 kernel_entry 0, 32
b el0_error_naked b el0_error_naked
ENDPROC(el0_error_compat)
#endif #endif
.align 6 .align 6
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
* Parameters: * Parameters:
* x0 - dest * x0 - dest
*/ */
ENTRY(clear_page) SYM_FUNC_START(clear_page)
mrs x1, dczid_el0 mrs x1, dczid_el0
and w1, w1, #0xf and w1, w1, #0xf
mov x2, #4 mov x2, #4
...@@ -25,5 +25,5 @@ ENTRY(clear_page) ...@@ -25,5 +25,5 @@ ENTRY(clear_page)
tst x0, #(PAGE_SIZE - 1) tst x0, #(PAGE_SIZE - 1)
b.ne 1b b.ne 1b
ret ret
ENDPROC(clear_page) SYM_FUNC_END(clear_page)
EXPORT_SYMBOL(clear_page) EXPORT_SYMBOL(clear_page)
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
* *
* Alignment fixed up by hardware. * Alignment fixed up by hardware.
*/ */
ENTRY(__arch_clear_user) SYM_FUNC_START(__arch_clear_user)
mov x2, x1 // save the size for fixup return mov x2, x1 // save the size for fixup return
subs x1, x1, #8 subs x1, x1, #8
b.mi 2f b.mi 2f
...@@ -40,7 +40,7 @@ uao_user_alternative 9f, strh, sttrh, wzr, x0, 2 ...@@ -40,7 +40,7 @@ uao_user_alternative 9f, strh, sttrh, wzr, x0, 2
uao_user_alternative 9f, strb, sttrb, wzr, x0, 0 uao_user_alternative 9f, strb, sttrb, wzr, x0, 0
5: mov x0, #0 5: mov x0, #0
ret ret
ENDPROC(__arch_clear_user) SYM_FUNC_END(__arch_clear_user)
EXPORT_SYMBOL(__arch_clear_user) EXPORT_SYMBOL(__arch_clear_user)
.section .fixup,"ax" .section .fixup,"ax"
......
...@@ -53,12 +53,12 @@ ...@@ -53,12 +53,12 @@
.endm .endm
end .req x5 end .req x5
ENTRY(__arch_copy_from_user) SYM_FUNC_START(__arch_copy_from_user)
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
mov x0, #0 // Nothing to copy mov x0, #0 // Nothing to copy
ret ret
ENDPROC(__arch_copy_from_user) SYM_FUNC_END(__arch_copy_from_user)
EXPORT_SYMBOL(__arch_copy_from_user) EXPORT_SYMBOL(__arch_copy_from_user)
.section .fixup,"ax" .section .fixup,"ax"
......
...@@ -55,12 +55,12 @@ ...@@ -55,12 +55,12 @@
end .req x5 end .req x5
ENTRY(__arch_copy_in_user) SYM_FUNC_START(__arch_copy_in_user)
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
mov x0, #0 mov x0, #0
ret ret
ENDPROC(__arch_copy_in_user) SYM_FUNC_END(__arch_copy_in_user)
EXPORT_SYMBOL(__arch_copy_in_user) EXPORT_SYMBOL(__arch_copy_in_user)
.section .fixup,"ax" .section .fixup,"ax"
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
* x0 - dest * x0 - dest
* x1 - src * x1 - src
*/ */
ENTRY(copy_page) SYM_FUNC_START(copy_page)
alternative_if ARM64_HAS_NO_HW_PREFETCH alternative_if ARM64_HAS_NO_HW_PREFETCH
// Prefetch three cache lines ahead. // Prefetch three cache lines ahead.
prfm pldl1strm, [x1, #128] prfm pldl1strm, [x1, #128]
...@@ -75,5 +75,5 @@ alternative_else_nop_endif ...@@ -75,5 +75,5 @@ alternative_else_nop_endif
stnp x16, x17, [x0, #112 - 256] stnp x16, x17, [x0, #112 - 256]
ret ret
ENDPROC(copy_page) SYM_FUNC_END(copy_page)
EXPORT_SYMBOL(copy_page) EXPORT_SYMBOL(copy_page)
...@@ -52,12 +52,12 @@ ...@@ -52,12 +52,12 @@
.endm .endm
end .req x5 end .req x5
ENTRY(__arch_copy_to_user) SYM_FUNC_START(__arch_copy_to_user)
add end, x0, x2 add end, x0, x2
#include "copy_template.S" #include "copy_template.S"
mov x0, #0 mov x0, #0
ret ret
ENDPROC(__arch_copy_to_user) SYM_FUNC_END(__arch_copy_to_user)
EXPORT_SYMBOL(__arch_copy_to_user) EXPORT_SYMBOL(__arch_copy_to_user)
.section .fixup,"ax" .section .fixup,"ax"
......
...@@ -85,17 +85,17 @@ CPU_BE( rev16 w3, w3 ) ...@@ -85,17 +85,17 @@ CPU_BE( rev16 w3, w3 )
.endm .endm
.align 5 .align 5
ENTRY(crc32_le) SYM_FUNC_START(crc32_le)
alternative_if_not ARM64_HAS_CRC32 alternative_if_not ARM64_HAS_CRC32
b crc32_le_base b crc32_le_base
alternative_else_nop_endif alternative_else_nop_endif
__crc32 __crc32
ENDPROC(crc32_le) SYM_FUNC_END(crc32_le)
.align 5 .align 5
ENTRY(__crc32c_le) SYM_FUNC_START(__crc32c_le)
alternative_if_not ARM64_HAS_CRC32 alternative_if_not ARM64_HAS_CRC32
b __crc32c_le_base b __crc32c_le_base
alternative_else_nop_endif alternative_else_nop_endif
__crc32 c __crc32 c
ENDPROC(__crc32c_le) SYM_FUNC_END(__crc32c_le)
...@@ -19,7 +19,7 @@ ...@@ -19,7 +19,7 @@
* Returns: * Returns:
* x0 - address of first occurrence of 'c' or 0 * x0 - address of first occurrence of 'c' or 0
*/ */
WEAK(memchr) SYM_FUNC_START_WEAK_PI(memchr)
and w1, w1, #0xff and w1, w1, #0xff
1: subs x2, x2, #1 1: subs x2, x2, #1
b.mi 2f b.mi 2f
...@@ -30,5 +30,5 @@ WEAK(memchr) ...@@ -30,5 +30,5 @@ WEAK(memchr)
ret ret
2: mov x0, #0 2: mov x0, #0
ret ret
ENDPIPROC(memchr) SYM_FUNC_END_PI(memchr)
EXPORT_SYMBOL_NOKASAN(memchr) EXPORT_SYMBOL_NOKASAN(memchr)
...@@ -46,7 +46,7 @@ pos .req x11 ...@@ -46,7 +46,7 @@ pos .req x11
limit_wd .req x12 limit_wd .req x12
mask .req x13 mask .req x13
WEAK(memcmp) SYM_FUNC_START_WEAK_PI(memcmp)
cbz limit, .Lret0 cbz limit, .Lret0
eor tmp1, src1, src2 eor tmp1, src1, src2
tst tmp1, #7 tst tmp1, #7
...@@ -243,5 +243,5 @@ CPU_LE( rev data2, data2 ) ...@@ -243,5 +243,5 @@ CPU_LE( rev data2, data2 )
.Lret0: .Lret0:
mov result, #0 mov result, #0
ret ret
ENDPIPROC(memcmp) SYM_FUNC_END_PI(memcmp)
EXPORT_SYMBOL_NOKASAN(memcmp) EXPORT_SYMBOL_NOKASAN(memcmp)
...@@ -57,11 +57,11 @@ ...@@ -57,11 +57,11 @@
.endm .endm
.weak memcpy .weak memcpy
ENTRY(__memcpy) SYM_FUNC_START_ALIAS(__memcpy)
ENTRY(memcpy) SYM_FUNC_START_PI(memcpy)
#include "copy_template.S" #include "copy_template.S"
ret ret
ENDPIPROC(memcpy) SYM_FUNC_END_PI(memcpy)
EXPORT_SYMBOL(memcpy) EXPORT_SYMBOL(memcpy)
ENDPROC(__memcpy) SYM_FUNC_END_ALIAS(__memcpy)
EXPORT_SYMBOL(__memcpy) EXPORT_SYMBOL(__memcpy)
...@@ -46,8 +46,8 @@ D_l .req x13 ...@@ -46,8 +46,8 @@ D_l .req x13
D_h .req x14 D_h .req x14
.weak memmove .weak memmove
ENTRY(__memmove) SYM_FUNC_START_ALIAS(__memmove)
ENTRY(memmove) SYM_FUNC_START_PI(memmove)
cmp dstin, src cmp dstin, src
b.lo __memcpy b.lo __memcpy
add tmp1, src, count add tmp1, src, count
...@@ -184,7 +184,7 @@ ENTRY(memmove) ...@@ -184,7 +184,7 @@ ENTRY(memmove)
tst count, #0x3f tst count, #0x3f
b.ne .Ltail63 b.ne .Ltail63
ret ret
ENDPIPROC(memmove) SYM_FUNC_END_PI(memmove)
EXPORT_SYMBOL(memmove) EXPORT_SYMBOL(memmove)
ENDPROC(__memmove) SYM_FUNC_END_ALIAS(__memmove)
EXPORT_SYMBOL(__memmove) EXPORT_SYMBOL(__memmove)
...@@ -43,8 +43,8 @@ tmp3w .req w9 ...@@ -43,8 +43,8 @@ tmp3w .req w9
tmp3 .req x9 tmp3 .req x9
.weak memset .weak memset
ENTRY(__memset) SYM_FUNC_START_ALIAS(__memset)
ENTRY(memset) SYM_FUNC_START_PI(memset)
mov dst, dstin /* Preserve return value. */ mov dst, dstin /* Preserve return value. */
and A_lw, val, #255 and A_lw, val, #255
orr A_lw, A_lw, A_lw, lsl #8 orr A_lw, A_lw, A_lw, lsl #8
...@@ -203,7 +203,7 @@ ENTRY(memset) ...@@ -203,7 +203,7 @@ ENTRY(memset)
ands count, count, zva_bits_x ands count, count, zva_bits_x
b.ne .Ltail_maybe_long b.ne .Ltail_maybe_long
ret ret
ENDPIPROC(memset) SYM_FUNC_END_PI(memset)
EXPORT_SYMBOL(memset) EXPORT_SYMBOL(memset)
ENDPROC(__memset) SYM_FUNC_END_ALIAS(__memset)
EXPORT_SYMBOL(__memset) EXPORT_SYMBOL(__memset)
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
* Returns: * Returns:
* x0 - address of first occurrence of 'c' or 0 * x0 - address of first occurrence of 'c' or 0
*/ */
WEAK(strchr) SYM_FUNC_START_WEAK(strchr)
and w1, w1, #0xff and w1, w1, #0xff
1: ldrb w2, [x0], #1 1: ldrb w2, [x0], #1
cmp w2, w1 cmp w2, w1
...@@ -28,5 +28,5 @@ WEAK(strchr) ...@@ -28,5 +28,5 @@ WEAK(strchr)
cmp w2, w1 cmp w2, w1
csel x0, x0, xzr, eq csel x0, x0, xzr, eq
ret ret
ENDPROC(strchr) SYM_FUNC_END(strchr)
EXPORT_SYMBOL_NOKASAN(strchr) EXPORT_SYMBOL_NOKASAN(strchr)
...@@ -48,7 +48,7 @@ tmp3 .req x9 ...@@ -48,7 +48,7 @@ tmp3 .req x9
zeroones .req x10 zeroones .req x10
pos .req x11 pos .req x11
WEAK(strcmp) SYM_FUNC_START_WEAK_PI(strcmp)
eor tmp1, src1, src2 eor tmp1, src1, src2
mov zeroones, #REP8_01 mov zeroones, #REP8_01
tst tmp1, #7 tst tmp1, #7
...@@ -219,5 +219,5 @@ CPU_BE( orr syndrome, diff, has_nul ) ...@@ -219,5 +219,5 @@ CPU_BE( orr syndrome, diff, has_nul )
lsr data1, data1, #56 lsr data1, data1, #56
sub result, data1, data2, lsr #56 sub result, data1, data2, lsr #56
ret ret
ENDPIPROC(strcmp) SYM_FUNC_END_PI(strcmp)
EXPORT_SYMBOL_NOKASAN(strcmp) EXPORT_SYMBOL_NOKASAN(strcmp)
...@@ -44,7 +44,7 @@ pos .req x12 ...@@ -44,7 +44,7 @@ pos .req x12
#define REP8_7f 0x7f7f7f7f7f7f7f7f #define REP8_7f 0x7f7f7f7f7f7f7f7f
#define REP8_80 0x8080808080808080 #define REP8_80 0x8080808080808080
WEAK(strlen) SYM_FUNC_START_WEAK_PI(strlen)
mov zeroones, #REP8_01 mov zeroones, #REP8_01
bic src, srcin, #15 bic src, srcin, #15
ands tmp1, srcin, #15 ands tmp1, srcin, #15
...@@ -111,5 +111,5 @@ CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */ ...@@ -111,5 +111,5 @@ CPU_LE( lsr tmp2, tmp2, tmp1 ) /* Shift (tmp1 & 63). */
csinv data1, data1, xzr, le csinv data1, data1, xzr, le
csel data2, data2, data2a, le csel data2, data2, data2a, le
b .Lrealigned b .Lrealigned
ENDPIPROC(strlen) SYM_FUNC_END_PI(strlen)
EXPORT_SYMBOL_NOKASAN(strlen) EXPORT_SYMBOL_NOKASAN(strlen)
...@@ -52,7 +52,7 @@ limit_wd .req x13 ...@@ -52,7 +52,7 @@ limit_wd .req x13
mask .req x14 mask .req x14
endloop .req x15 endloop .req x15
WEAK(strncmp) SYM_FUNC_START_WEAK_PI(strncmp)
cbz limit, .Lret0 cbz limit, .Lret0
eor tmp1, src1, src2 eor tmp1, src1, src2
mov zeroones, #REP8_01 mov zeroones, #REP8_01
...@@ -295,5 +295,5 @@ CPU_BE( orr syndrome, diff, has_nul ) ...@@ -295,5 +295,5 @@ CPU_BE( orr syndrome, diff, has_nul )
.Lret0: .Lret0:
mov result, #0 mov result, #0
ret ret
ENDPIPROC(strncmp) SYM_FUNC_END_PI(strncmp)
EXPORT_SYMBOL_NOKASAN(strncmp) EXPORT_SYMBOL_NOKASAN(strncmp)
...@@ -47,7 +47,7 @@ limit_wd .req x14 ...@@ -47,7 +47,7 @@ limit_wd .req x14
#define REP8_7f 0x7f7f7f7f7f7f7f7f #define REP8_7f 0x7f7f7f7f7f7f7f7f
#define REP8_80 0x8080808080808080 #define REP8_80 0x8080808080808080
WEAK(strnlen) SYM_FUNC_START_WEAK_PI(strnlen)
cbz limit, .Lhit_limit cbz limit, .Lhit_limit
mov zeroones, #REP8_01 mov zeroones, #REP8_01
bic src, srcin, #15 bic src, srcin, #15
...@@ -156,5 +156,5 @@ CPU_LE( lsr tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */ ...@@ -156,5 +156,5 @@ CPU_LE( lsr tmp2, tmp2, tmp4 ) /* Shift (tmp1 & 63). */
.Lhit_limit: .Lhit_limit:
mov len, limit mov len, limit
ret ret
ENDPIPROC(strnlen) SYM_FUNC_END_PI(strnlen)
EXPORT_SYMBOL_NOKASAN(strnlen) EXPORT_SYMBOL_NOKASAN(strnlen)
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
* Returns: * Returns:
* x0 - address of last occurrence of 'c' or 0 * x0 - address of last occurrence of 'c' or 0
*/ */
WEAK(strrchr) SYM_FUNC_START_WEAK_PI(strrchr)
mov x3, #0 mov x3, #0
and w1, w1, #0xff and w1, w1, #0xff
1: ldrb w2, [x0], #1 1: ldrb w2, [x0], #1
...@@ -29,5 +29,5 @@ WEAK(strrchr) ...@@ -29,5 +29,5 @@ WEAK(strrchr)
b 1b b 1b
2: mov x0, x3 2: mov x0, x3
ret ret
ENDPIPROC(strrchr) SYM_FUNC_END_PI(strrchr)
EXPORT_SYMBOL_NOKASAN(strrchr) EXPORT_SYMBOL_NOKASAN(strrchr)
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
#include <asm/assembler.h> #include <asm/assembler.h>
ENTRY(__ashlti3) SYM_FUNC_START(__ashlti3)
cbz x2, 1f cbz x2, 1f
mov x3, #64 mov x3, #64
sub x3, x3, x2 sub x3, x3, x2
...@@ -26,10 +26,10 @@ ENTRY(__ashlti3) ...@@ -26,10 +26,10 @@ ENTRY(__ashlti3)
lsl x1, x0, x1 lsl x1, x0, x1
mov x0, x2 mov x0, x2
ret ret
ENDPROC(__ashlti3) SYM_FUNC_END(__ashlti3)
EXPORT_SYMBOL(__ashlti3) EXPORT_SYMBOL(__ashlti3)
ENTRY(__ashrti3) SYM_FUNC_START(__ashrti3)
cbz x2, 1f cbz x2, 1f
mov x3, #64 mov x3, #64
sub x3, x3, x2 sub x3, x3, x2
...@@ -48,10 +48,10 @@ ENTRY(__ashrti3) ...@@ -48,10 +48,10 @@ ENTRY(__ashrti3)
asr x0, x1, x0 asr x0, x1, x0
mov x1, x2 mov x1, x2
ret ret
ENDPROC(__ashrti3) SYM_FUNC_END(__ashrti3)
EXPORT_SYMBOL(__ashrti3) EXPORT_SYMBOL(__ashrti3)
ENTRY(__lshrti3) SYM_FUNC_START(__lshrti3)
cbz x2, 1f cbz x2, 1f
mov x3, #64 mov x3, #64
sub x3, x3, x2 sub x3, x3, x2
...@@ -70,5 +70,5 @@ ENTRY(__lshrti3) ...@@ -70,5 +70,5 @@ ENTRY(__lshrti3)
lsr x0, x1, x0 lsr x0, x1, x0
mov x1, x2 mov x1, x2
ret ret
ENDPROC(__lshrti3) SYM_FUNC_END(__lshrti3)
EXPORT_SYMBOL(__lshrti3) EXPORT_SYMBOL(__lshrti3)
...@@ -24,7 +24,7 @@ ...@@ -24,7 +24,7 @@
* - start - virtual start address of region * - start - virtual start address of region
* - end - virtual end address of region * - end - virtual end address of region
*/ */
ENTRY(__flush_icache_range) SYM_FUNC_START(__flush_icache_range)
/* FALLTHROUGH */ /* FALLTHROUGH */
/* /*
...@@ -37,7 +37,7 @@ ENTRY(__flush_icache_range) ...@@ -37,7 +37,7 @@ ENTRY(__flush_icache_range)
* - start - virtual start address of region * - start - virtual start address of region
* - end - virtual end address of region * - end - virtual end address of region
*/ */
ENTRY(__flush_cache_user_range) SYM_FUNC_START(__flush_cache_user_range)
uaccess_ttbr0_enable x2, x3, x4 uaccess_ttbr0_enable x2, x3, x4
alternative_if ARM64_HAS_CACHE_IDC alternative_if ARM64_HAS_CACHE_IDC
dsb ishst dsb ishst
...@@ -66,8 +66,8 @@ alternative_else_nop_endif ...@@ -66,8 +66,8 @@ alternative_else_nop_endif
9: 9:
mov x0, #-EFAULT mov x0, #-EFAULT
b 1b b 1b
ENDPROC(__flush_icache_range) SYM_FUNC_END(__flush_icache_range)
ENDPROC(__flush_cache_user_range) SYM_FUNC_END(__flush_cache_user_range)
/* /*
* invalidate_icache_range(start,end) * invalidate_icache_range(start,end)
...@@ -77,7 +77,7 @@ ENDPROC(__flush_cache_user_range) ...@@ -77,7 +77,7 @@ ENDPROC(__flush_cache_user_range)
* - start - virtual start address of region * - start - virtual start address of region
* - end - virtual end address of region * - end - virtual end address of region
*/ */
ENTRY(invalidate_icache_range) SYM_FUNC_START(invalidate_icache_range)
alternative_if ARM64_HAS_CACHE_DIC alternative_if ARM64_HAS_CACHE_DIC
mov x0, xzr mov x0, xzr
isb isb
...@@ -94,7 +94,7 @@ alternative_else_nop_endif ...@@ -94,7 +94,7 @@ alternative_else_nop_endif
2: 2:
mov x0, #-EFAULT mov x0, #-EFAULT
b 1b b 1b
ENDPROC(invalidate_icache_range) SYM_FUNC_END(invalidate_icache_range)
/* /*
* __flush_dcache_area(kaddr, size) * __flush_dcache_area(kaddr, size)
...@@ -105,10 +105,10 @@ ENDPROC(invalidate_icache_range) ...@@ -105,10 +105,10 @@ ENDPROC(invalidate_icache_range)
* - kaddr - kernel address * - kaddr - kernel address
* - size - size in question * - size - size in question
*/ */
ENTRY(__flush_dcache_area) SYM_FUNC_START_PI(__flush_dcache_area)
dcache_by_line_op civac, sy, x0, x1, x2, x3 dcache_by_line_op civac, sy, x0, x1, x2, x3
ret ret
ENDPIPROC(__flush_dcache_area) SYM_FUNC_END_PI(__flush_dcache_area)
/* /*
* __clean_dcache_area_pou(kaddr, size) * __clean_dcache_area_pou(kaddr, size)
...@@ -119,14 +119,14 @@ ENDPIPROC(__flush_dcache_area) ...@@ -119,14 +119,14 @@ ENDPIPROC(__flush_dcache_area)
* - kaddr - kernel address * - kaddr - kernel address
* - size - size in question * - size - size in question
*/ */
ENTRY(__clean_dcache_area_pou) SYM_FUNC_START(__clean_dcache_area_pou)
alternative_if ARM64_HAS_CACHE_IDC alternative_if ARM64_HAS_CACHE_IDC
dsb ishst dsb ishst
ret ret
alternative_else_nop_endif alternative_else_nop_endif
dcache_by_line_op cvau, ish, x0, x1, x2, x3 dcache_by_line_op cvau, ish, x0, x1, x2, x3
ret ret
ENDPROC(__clean_dcache_area_pou) SYM_FUNC_END(__clean_dcache_area_pou)
/* /*
* __inval_dcache_area(kaddr, size) * __inval_dcache_area(kaddr, size)
...@@ -138,7 +138,8 @@ ENDPROC(__clean_dcache_area_pou) ...@@ -138,7 +138,8 @@ ENDPROC(__clean_dcache_area_pou)
* - kaddr - kernel address * - kaddr - kernel address
* - size - size in question * - size - size in question
*/ */
ENTRY(__inval_dcache_area) SYM_FUNC_START_LOCAL(__dma_inv_area)
SYM_FUNC_START_PI(__inval_dcache_area)
/* FALLTHROUGH */ /* FALLTHROUGH */
/* /*
...@@ -146,7 +147,6 @@ ENTRY(__inval_dcache_area) ...@@ -146,7 +147,6 @@ ENTRY(__inval_dcache_area)
* - start - virtual start address of region * - start - virtual start address of region
* - size - size in question * - size - size in question
*/ */
__dma_inv_area:
add x1, x1, x0 add x1, x1, x0
dcache_line_size x2, x3 dcache_line_size x2, x3
sub x3, x2, #1 sub x3, x2, #1
...@@ -165,8 +165,8 @@ __dma_inv_area: ...@@ -165,8 +165,8 @@ __dma_inv_area:
b.lo 2b b.lo 2b
dsb sy dsb sy
ret ret
ENDPIPROC(__inval_dcache_area) SYM_FUNC_END_PI(__inval_dcache_area)
ENDPROC(__dma_inv_area) SYM_FUNC_END(__dma_inv_area)
/* /*
* __clean_dcache_area_poc(kaddr, size) * __clean_dcache_area_poc(kaddr, size)
...@@ -177,7 +177,8 @@ ENDPROC(__dma_inv_area) ...@@ -177,7 +177,8 @@ ENDPROC(__dma_inv_area)
* - kaddr - kernel address * - kaddr - kernel address
* - size - size in question * - size - size in question
*/ */
ENTRY(__clean_dcache_area_poc) SYM_FUNC_START_LOCAL(__dma_clean_area)
SYM_FUNC_START_PI(__clean_dcache_area_poc)
/* FALLTHROUGH */ /* FALLTHROUGH */
/* /*
...@@ -185,11 +186,10 @@ ENTRY(__clean_dcache_area_poc) ...@@ -185,11 +186,10 @@ ENTRY(__clean_dcache_area_poc)
* - start - virtual start address of region * - start - virtual start address of region
* - size - size in question * - size - size in question
*/ */
__dma_clean_area:
dcache_by_line_op cvac, sy, x0, x1, x2, x3 dcache_by_line_op cvac, sy, x0, x1, x2, x3
ret ret
ENDPIPROC(__clean_dcache_area_poc) SYM_FUNC_END_PI(__clean_dcache_area_poc)
ENDPROC(__dma_clean_area) SYM_FUNC_END(__dma_clean_area)
/* /*
* __clean_dcache_area_pop(kaddr, size) * __clean_dcache_area_pop(kaddr, size)
...@@ -200,13 +200,13 @@ ENDPROC(__dma_clean_area) ...@@ -200,13 +200,13 @@ ENDPROC(__dma_clean_area)
* - kaddr - kernel address * - kaddr - kernel address
* - size - size in question * - size - size in question
*/ */
ENTRY(__clean_dcache_area_pop) SYM_FUNC_START_PI(__clean_dcache_area_pop)
alternative_if_not ARM64_HAS_DCPOP alternative_if_not ARM64_HAS_DCPOP
b __clean_dcache_area_poc b __clean_dcache_area_poc
alternative_else_nop_endif alternative_else_nop_endif
dcache_by_line_op cvap, sy, x0, x1, x2, x3 dcache_by_line_op cvap, sy, x0, x1, x2, x3
ret ret
ENDPIPROC(__clean_dcache_area_pop) SYM_FUNC_END_PI(__clean_dcache_area_pop)
/* /*
* __dma_flush_area(start, size) * __dma_flush_area(start, size)
...@@ -216,10 +216,10 @@ ENDPIPROC(__clean_dcache_area_pop) ...@@ -216,10 +216,10 @@ ENDPIPROC(__clean_dcache_area_pop)
* - start - virtual start address of region * - start - virtual start address of region
* - size - size in question * - size - size in question
*/ */
ENTRY(__dma_flush_area) SYM_FUNC_START_PI(__dma_flush_area)
dcache_by_line_op civac, sy, x0, x1, x2, x3 dcache_by_line_op civac, sy, x0, x1, x2, x3
ret ret
ENDPIPROC(__dma_flush_area) SYM_FUNC_END_PI(__dma_flush_area)
/* /*
* __dma_map_area(start, size, dir) * __dma_map_area(start, size, dir)
...@@ -227,11 +227,11 @@ ENDPIPROC(__dma_flush_area) ...@@ -227,11 +227,11 @@ ENDPIPROC(__dma_flush_area)
* - size - size of region * - size - size of region
* - dir - DMA direction * - dir - DMA direction
*/ */
ENTRY(__dma_map_area) SYM_FUNC_START_PI(__dma_map_area)
cmp w2, #DMA_FROM_DEVICE cmp w2, #DMA_FROM_DEVICE
b.eq __dma_inv_area b.eq __dma_inv_area
b __dma_clean_area b __dma_clean_area
ENDPIPROC(__dma_map_area) SYM_FUNC_END_PI(__dma_map_area)
/* /*
* __dma_unmap_area(start, size, dir) * __dma_unmap_area(start, size, dir)
...@@ -239,8 +239,8 @@ ENDPIPROC(__dma_map_area) ...@@ -239,8 +239,8 @@ ENDPIPROC(__dma_map_area)
* - size - size of region * - size - size of region
* - dir - DMA direction * - dir - DMA direction
*/ */
ENTRY(__dma_unmap_area) SYM_FUNC_START_PI(__dma_unmap_area)
cmp w2, #DMA_TO_DEVICE cmp w2, #DMA_TO_DEVICE
b.ne __dma_inv_area b.ne __dma_inv_area
ret ret
ENDPIPROC(__dma_unmap_area) SYM_FUNC_END_PI(__dma_unmap_area)
...@@ -57,7 +57,7 @@ ...@@ -57,7 +57,7 @@
* *
* x0: virtual address of context pointer * x0: virtual address of context pointer
*/ */
ENTRY(cpu_do_suspend) SYM_FUNC_START(cpu_do_suspend)
mrs x2, tpidr_el0 mrs x2, tpidr_el0
mrs x3, tpidrro_el0 mrs x3, tpidrro_el0
mrs x4, contextidr_el1 mrs x4, contextidr_el1
...@@ -81,7 +81,7 @@ alternative_endif ...@@ -81,7 +81,7 @@ alternative_endif
stp x10, x11, [x0, #64] stp x10, x11, [x0, #64]
stp x12, x13, [x0, #80] stp x12, x13, [x0, #80]
ret ret
ENDPROC(cpu_do_suspend) SYM_FUNC_END(cpu_do_suspend)
/** /**
* cpu_do_resume - restore CPU register context * cpu_do_resume - restore CPU register context
...@@ -89,7 +89,7 @@ ENDPROC(cpu_do_suspend) ...@@ -89,7 +89,7 @@ ENDPROC(cpu_do_suspend)
* x0: Address of context pointer * x0: Address of context pointer
*/ */
.pushsection ".idmap.text", "awx" .pushsection ".idmap.text", "awx"
ENTRY(cpu_do_resume) SYM_FUNC_START(cpu_do_resume)
ldp x2, x3, [x0] ldp x2, x3, [x0]
ldp x4, x5, [x0, #16] ldp x4, x5, [x0, #16]
ldp x6, x8, [x0, #32] ldp x6, x8, [x0, #32]
...@@ -138,7 +138,7 @@ alternative_else_nop_endif ...@@ -138,7 +138,7 @@ alternative_else_nop_endif
isb isb
ret ret
ENDPROC(cpu_do_resume) SYM_FUNC_END(cpu_do_resume)
.popsection .popsection
#endif #endif
...@@ -149,7 +149,7 @@ ENDPROC(cpu_do_resume) ...@@ -149,7 +149,7 @@ ENDPROC(cpu_do_resume)
* *
* - pgd_phys - physical address of new TTB * - pgd_phys - physical address of new TTB
*/ */
ENTRY(cpu_do_switch_mm) SYM_FUNC_START(cpu_do_switch_mm)
mrs x2, ttbr1_el1 mrs x2, ttbr1_el1
mmid x1, x1 // get mm->context.id mmid x1, x1 // get mm->context.id
phys_to_ttbr x3, x0 phys_to_ttbr x3, x0
...@@ -168,7 +168,7 @@ alternative_else_nop_endif ...@@ -168,7 +168,7 @@ alternative_else_nop_endif
msr ttbr0_el1, x3 // now update TTBR0 msr ttbr0_el1, x3 // now update TTBR0
isb isb
b post_ttbr_update_workaround // Back to C code... b post_ttbr_update_workaround // Back to C code...
ENDPROC(cpu_do_switch_mm) SYM_FUNC_END(cpu_do_switch_mm)
.pushsection ".idmap.text", "awx" .pushsection ".idmap.text", "awx"
...@@ -189,7 +189,7 @@ ENDPROC(cpu_do_switch_mm) ...@@ -189,7 +189,7 @@ ENDPROC(cpu_do_switch_mm)
* This is the low-level counterpart to cpu_replace_ttbr1, and should not be * This is the low-level counterpart to cpu_replace_ttbr1, and should not be
* called by anything else. It can only be executed from a TTBR0 mapping. * called by anything else. It can only be executed from a TTBR0 mapping.
*/ */
ENTRY(idmap_cpu_replace_ttbr1) SYM_FUNC_START(idmap_cpu_replace_ttbr1)
save_and_disable_daif flags=x2 save_and_disable_daif flags=x2
__idmap_cpu_set_reserved_ttbr1 x1, x3 __idmap_cpu_set_reserved_ttbr1 x1, x3
...@@ -201,7 +201,7 @@ ENTRY(idmap_cpu_replace_ttbr1) ...@@ -201,7 +201,7 @@ ENTRY(idmap_cpu_replace_ttbr1)
restore_daif x2 restore_daif x2
ret ret
ENDPROC(idmap_cpu_replace_ttbr1) SYM_FUNC_END(idmap_cpu_replace_ttbr1)
.popsection .popsection
#ifdef CONFIG_UNMAP_KERNEL_AT_EL0 #ifdef CONFIG_UNMAP_KERNEL_AT_EL0
...@@ -229,7 +229,7 @@ ENDPROC(idmap_cpu_replace_ttbr1) ...@@ -229,7 +229,7 @@ ENDPROC(idmap_cpu_replace_ttbr1)
*/ */
__idmap_kpti_flag: __idmap_kpti_flag:
.long 1 .long 1
ENTRY(idmap_kpti_install_ng_mappings) SYM_FUNC_START(idmap_kpti_install_ng_mappings)
cpu .req w0 cpu .req w0
num_cpus .req w1 num_cpus .req w1
swapper_pa .req x2 swapper_pa .req x2
...@@ -401,7 +401,7 @@ __idmap_kpti_secondary: ...@@ -401,7 +401,7 @@ __idmap_kpti_secondary:
.unreq swapper_ttb .unreq swapper_ttb
.unreq flag_ptr .unreq flag_ptr
ENDPROC(idmap_kpti_install_ng_mappings) SYM_FUNC_END(idmap_kpti_install_ng_mappings)
.popsection .popsection
#endif #endif
...@@ -412,7 +412,7 @@ ENDPROC(idmap_kpti_install_ng_mappings) ...@@ -412,7 +412,7 @@ ENDPROC(idmap_kpti_install_ng_mappings)
* value of the SCTLR_EL1 register. * value of the SCTLR_EL1 register.
*/ */
.pushsection ".idmap.text", "awx" .pushsection ".idmap.text", "awx"
ENTRY(__cpu_setup) SYM_FUNC_START(__cpu_setup)
tlbi vmalle1 // Invalidate local TLB tlbi vmalle1 // Invalidate local TLB
dsb nsh dsb nsh
...@@ -469,4 +469,4 @@ ENTRY(__cpu_setup) ...@@ -469,4 +469,4 @@ ENTRY(__cpu_setup)
#endif /* CONFIG_ARM64_HW_AFDBM */ #endif /* CONFIG_ARM64_HW_AFDBM */
msr tcr_el1, x10 msr tcr_el1, x10
ret // return to head.S ret // return to head.S
ENDPROC(__cpu_setup) SYM_FUNC_END(__cpu_setup)
...@@ -56,11 +56,11 @@ ...@@ -56,11 +56,11 @@
#define XEN_IMM 0xEA1 #define XEN_IMM 0xEA1
#define HYPERCALL_SIMPLE(hypercall) \ #define HYPERCALL_SIMPLE(hypercall) \
ENTRY(HYPERVISOR_##hypercall) \ SYM_FUNC_START(HYPERVISOR_##hypercall) \
mov x16, #__HYPERVISOR_##hypercall; \ mov x16, #__HYPERVISOR_##hypercall; \
hvc XEN_IMM; \ hvc XEN_IMM; \
ret; \ ret; \
ENDPROC(HYPERVISOR_##hypercall) SYM_FUNC_END(HYPERVISOR_##hypercall)
#define HYPERCALL0 HYPERCALL_SIMPLE #define HYPERCALL0 HYPERCALL_SIMPLE
#define HYPERCALL1 HYPERCALL_SIMPLE #define HYPERCALL1 HYPERCALL_SIMPLE
...@@ -86,7 +86,7 @@ HYPERCALL2(multicall); ...@@ -86,7 +86,7 @@ HYPERCALL2(multicall);
HYPERCALL2(vm_assist); HYPERCALL2(vm_assist);
HYPERCALL3(dm_op); HYPERCALL3(dm_op);
ENTRY(privcmd_call) SYM_FUNC_START(privcmd_call)
mov x16, x0 mov x16, x0
mov x0, x1 mov x0, x1
mov x1, x2 mov x1, x2
...@@ -109,4 +109,4 @@ ENTRY(privcmd_call) ...@@ -109,4 +109,4 @@ ENTRY(privcmd_call)
*/ */
uaccess_ttbr0_disable x6, x7 uaccess_ttbr0_disable x6, x7
ret ret
ENDPROC(privcmd_call); SYM_FUNC_END(privcmd_call);
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment