Commit 163d3f80 authored by Fuad Tabba's avatar Fuad Tabba Committed by Will Deacon

arm64: dcache_by_line_op to take end parameter instead of size

To be consistent with other functions with similar names and
functionality in cacheflush.h, cache.S, and cachetlb.rst, change
to specify the range in terms of start and end, as opposed to
start and size.

No functional change intended.
Reported-by: default avatarWill Deacon <will@kernel.org>
Acked-by: default avatarMark Rutland <mark.rutland@arm.com>
Signed-off-by: default avatarFuad Tabba <tabba@google.com>
Reviewed-by: default avatarArd Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20210524083001.2586635-12-tabba@google.comSigned-off-by: default avatarWill Deacon <will@kernel.org>
parent e3974adb
...@@ -397,40 +397,39 @@ alternative_endif ...@@ -397,40 +397,39 @@ alternative_endif
/* /*
* Macro to perform a data cache maintenance for the interval * Macro to perform a data cache maintenance for the interval
* [addr, addr + size) * [start, end)
* *
* op: operation passed to dc instruction * op: operation passed to dc instruction
* domain: domain used in dsb instruciton * domain: domain used in dsb instruciton
* addr: starting virtual address of the region * start: starting virtual address of the region
* size: size of the region * end: end virtual address of the region
* fixup: optional label to branch to on user fault * fixup: optional label to branch to on user fault
* Corrupts: addr, size, tmp1, tmp2 * Corrupts: start, end, tmp1, tmp2
*/ */
.macro dcache_by_line_op op, domain, addr, size, tmp1, tmp2, fixup .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
dcache_line_size \tmp1, \tmp2 dcache_line_size \tmp1, \tmp2
add \size, \addr, \size
sub \tmp2, \tmp1, #1 sub \tmp2, \tmp1, #1
bic \addr, \addr, \tmp2 bic \start, \start, \tmp2
.Ldcache_op\@: .Ldcache_op\@:
.ifc \op, cvau .ifc \op, cvau
__dcache_op_workaround_clean_cache \op, \addr __dcache_op_workaround_clean_cache \op, \start
.else .else
.ifc \op, cvac .ifc \op, cvac
__dcache_op_workaround_clean_cache \op, \addr __dcache_op_workaround_clean_cache \op, \start
.else .else
.ifc \op, cvap .ifc \op, cvap
sys 3, c7, c12, 1, \addr // dc cvap sys 3, c7, c12, 1, \start // dc cvap
.else .else
.ifc \op, cvadp .ifc \op, cvadp
sys 3, c7, c13, 1, \addr // dc cvadp sys 3, c7, c13, 1, \start // dc cvadp
.else .else
dc \op, \addr dc \op, \start
.endif .endif
.endif .endif
.endif .endif
.endif .endif
add \addr, \addr, \tmp1 add \start, \start, \tmp1
cmp \addr, \size cmp \start, \end
b.lo .Ldcache_op\@ b.lo .Ldcache_op\@
dsb \domain dsb \domain
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include <asm/alternative.h> #include <asm/alternative.h>
SYM_FUNC_START_PI(__flush_dcache_area) SYM_FUNC_START_PI(__flush_dcache_area)
add x1, x0, x1
dcache_by_line_op civac, sy, x0, x1, x2, x3 dcache_by_line_op civac, sy, x0, x1, x2, x3
ret ret
SYM_FUNC_END_PI(__flush_dcache_area) SYM_FUNC_END_PI(__flush_dcache_area)
...@@ -31,7 +31,7 @@ alternative_if ARM64_HAS_CACHE_IDC ...@@ -31,7 +31,7 @@ alternative_if ARM64_HAS_CACHE_IDC
b .Ldc_skip_\@ b .Ldc_skip_\@
alternative_else_nop_endif alternative_else_nop_endif
mov x2, x0 mov x2, x0
sub x3, x1, x0 mov x3, x1
dcache_by_line_op cvau, ish, x2, x3, x4, x5, \fixup dcache_by_line_op cvau, ish, x2, x3, x4, x5, \fixup
.Ldc_skip_\@: .Ldc_skip_\@:
alternative_if ARM64_HAS_CACHE_DIC alternative_if ARM64_HAS_CACHE_DIC
...@@ -108,6 +108,7 @@ SYM_FUNC_END(invalidate_icache_range) ...@@ -108,6 +108,7 @@ SYM_FUNC_END(invalidate_icache_range)
* - size - size in question * - size - size in question
*/ */
SYM_FUNC_START_PI(__flush_dcache_area) SYM_FUNC_START_PI(__flush_dcache_area)
add x1, x0, x1
dcache_by_line_op civac, sy, x0, x1, x2, x3 dcache_by_line_op civac, sy, x0, x1, x2, x3
ret ret
SYM_FUNC_END_PI(__flush_dcache_area) SYM_FUNC_END_PI(__flush_dcache_area)
...@@ -126,6 +127,7 @@ alternative_if ARM64_HAS_CACHE_IDC ...@@ -126,6 +127,7 @@ alternative_if ARM64_HAS_CACHE_IDC
dsb ishst dsb ishst
ret ret
alternative_else_nop_endif alternative_else_nop_endif
add x1, x0, x1
dcache_by_line_op cvau, ish, x0, x1, x2, x3 dcache_by_line_op cvau, ish, x0, x1, x2, x3
ret ret
SYM_FUNC_END(__clean_dcache_area_pou) SYM_FUNC_END(__clean_dcache_area_pou)
...@@ -187,6 +189,7 @@ SYM_FUNC_START_PI(__clean_dcache_area_poc) ...@@ -187,6 +189,7 @@ SYM_FUNC_START_PI(__clean_dcache_area_poc)
* - start - virtual start address of region * - start - virtual start address of region
* - size - size in question * - size - size in question
*/ */
add x1, x0, x1
dcache_by_line_op cvac, sy, x0, x1, x2, x3 dcache_by_line_op cvac, sy, x0, x1, x2, x3
ret ret
SYM_FUNC_END_PI(__clean_dcache_area_poc) SYM_FUNC_END_PI(__clean_dcache_area_poc)
...@@ -205,6 +208,7 @@ SYM_FUNC_START_PI(__clean_dcache_area_pop) ...@@ -205,6 +208,7 @@ SYM_FUNC_START_PI(__clean_dcache_area_pop)
alternative_if_not ARM64_HAS_DCPOP alternative_if_not ARM64_HAS_DCPOP
b __clean_dcache_area_poc b __clean_dcache_area_poc
alternative_else_nop_endif alternative_else_nop_endif
add x1, x0, x1
dcache_by_line_op cvap, sy, x0, x1, x2, x3 dcache_by_line_op cvap, sy, x0, x1, x2, x3
ret ret
SYM_FUNC_END_PI(__clean_dcache_area_pop) SYM_FUNC_END_PI(__clean_dcache_area_pop)
...@@ -218,6 +222,7 @@ SYM_FUNC_END_PI(__clean_dcache_area_pop) ...@@ -218,6 +222,7 @@ SYM_FUNC_END_PI(__clean_dcache_area_pop)
* - size - size in question * - size - size in question
*/ */
SYM_FUNC_START_PI(__dma_flush_area) SYM_FUNC_START_PI(__dma_flush_area)
add x1, x0, x1
dcache_by_line_op civac, sy, x0, x1, x2, x3 dcache_by_line_op civac, sy, x0, x1, x2, x3
ret ret
SYM_FUNC_END_PI(__dma_flush_area) SYM_FUNC_END_PI(__dma_flush_area)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment