@@ -75,8 +75,22 @@ extern void sync_icache_aliases(void *kaddr, unsigned long len);
static inline void __flush_cache_user_range(unsigned long start,
unsigned long end)
{
+ if (cpus_have_const_cap(ARM64_HAS_CACHE_IDC)) {
+ dsb(ishst);
+ if (cpus_have_const_cap(ARM64_HAS_CACHE_DIC)) {
+ isb();
+ return;
+ }
+ }
+
uaccess_ttbr0_enable();
__asm_flush_cache_user_range(start, end);
+
+ if (cpus_have_const_cap(ARM64_HAS_CACHE_DIC))
+ isb();
+ else
+ __asm_invalidate_icache_range(start, end);
+
uaccess_ttbr0_disable();
}
@@ -90,6 +104,11 @@ static inline int invalidate_icache_range(unsigned long start,
{
int ret;
+ if (cpus_have_const_cap(ARM64_HAS_CACHE_DIC)) {
+ isb();
+ return 0;
+ }
+
uaccess_ttbr0_enable();
ret = __asm_invalidate_icache_range(start, end);
uaccess_ttbr0_disable();
@@ -25,30 +25,18 @@
* - end - virtual end address of region
*/
ENTRY(__asm_flush_cache_user_range)
-alternative_if ARM64_HAS_CACHE_IDC
- dsb ishst
- b 7f
-alternative_else_nop_endif
dcache_line_size x2, x3
sub x3, x2, #1
bic x4, x0, x3
-1:
-user_alt 9f, "dc cvau, x4", "dc civac, x4", ARM64_WORKAROUND_CLEAN_CACHE
+1: user_alt 3f, "dc cvau, x4", "dc civac, x4", ARM64_WORKAROUND_CLEAN_CACHE
add x4, x4, x2
cmp x4, x1
b.lo 1b
dsb ish
-
-7:
-alternative_if ARM64_HAS_CACHE_DIC
- isb
- b 8f
-alternative_else_nop_endif
- invalidate_icache_by_line x0, x1, x2, x3, 9f
-8: mov x0, #0
-1: ret
-9: mov x0, #-EFAULT
- b 1b
+ mov x0, #0
+2: ret
+3: mov x0, #-EFAULT
+ b 2b
ENDPROC(__asm_flush_cache_user_range)
/*
@@ -60,11 +48,6 @@ ENDPROC(__asm_flush_cache_user_range)
* - end - virtual end address of region
*/
ENTRY(__asm_invalidate_icache_range)
-alternative_if ARM64_HAS_CACHE_DIC
- mov x0, xzr
- isb
- ret
-alternative_else_nop_endif
invalidate_icache_by_line x0, x1, x2, x3, 2f
mov x0, xzr
1: ret
@@ -76,6 +76,7 @@ EXPORT_SYMBOL(flush_dcache_page);
* Additional functions defined in assembly.
*/
EXPORT_SYMBOL(__asm_flush_cache_user_range);
+EXPORT_SYMBOL(__asm_invalidate_icache_range);
#ifdef CONFIG_ARCH_HAS_PMEM_API
void arch_wb_cache_pmem(void *addr, size_t size)
The assmbly functions __asm_flush_cache_user_range and __asm_invalidate_icache_range have alternatives: alternative_if ARM64_HAS_CACHE_DIC ... alternative_if ARM64_HAS_CACHE_IDC ... But, the implementation of those alternatives is trivial and therefore can be done in the C inline wrappers. Signed-off-by: Pavel Tatashin <pasha.tatashin@soleen.com> --- arch/arm64/include/asm/cacheflush.h | 19 +++++++++++++++++++ arch/arm64/mm/cache.S | 27 +++++---------------------- arch/arm64/mm/flush.c | 1 + 3 files changed, 25 insertions(+), 22 deletions(-)