From ace9429bb58fd418f0c81d4c2835699bddf6bde6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Thu, 11 Apr 2024 10:27:49 +0200 Subject: Adding upstream version 6.6.15. Signed-off-by: Daniel Baumann --- arch/arm/mm/cache-v7m.S | 456 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 456 insertions(+) create mode 100644 arch/arm/mm/cache-v7m.S (limited to 'arch/arm/mm/cache-v7m.S') diff --git a/arch/arm/mm/cache-v7m.S b/arch/arm/mm/cache-v7m.S new file mode 100644 index 0000000000..eb60b5e5e2 --- /dev/null +++ b/arch/arm/mm/cache-v7m.S @@ -0,0 +1,456 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ +/* + * linux/arch/arm/mm/cache-v7m.S + * + * Based on linux/arch/arm/mm/cache-v7.S + * + * Copyright (C) 2001 Deep Blue Solutions Ltd. + * Copyright (C) 2005 ARM Ltd. + * + * This is the "shell" of the ARMv7M processor support. + */ +#include +#include +#include +#include +#include +#include + +#include "proc-macros.S" + +.arch armv7-m + +/* Generic V7M read/write macros for memory mapped cache operations */ +.macro v7m_cache_read, rt, reg + movw \rt, #:lower16:BASEADDR_V7M_SCB + \reg + movt \rt, #:upper16:BASEADDR_V7M_SCB + \reg + ldr \rt, [\rt] +.endm + +.macro v7m_cacheop, rt, tmp, op, c = al + movw\c \tmp, #:lower16:BASEADDR_V7M_SCB + \op + movt\c \tmp, #:upper16:BASEADDR_V7M_SCB + \op + str\c \rt, [\tmp] +.endm + + +.macro read_ccsidr, rt + v7m_cache_read \rt, V7M_SCB_CCSIDR +.endm + +.macro read_clidr, rt + v7m_cache_read \rt, V7M_SCB_CLIDR +.endm + +.macro write_csselr, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_CSSELR +.endm + +/* + * dcisw: Invalidate data cache by set/way + */ +.macro dcisw, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_DCISW +.endm + +/* + * dccisw: Clean and invalidate data cache by set/way + */ +.macro dccisw, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_DCCISW +.endm + +/* + * dccimvac: Clean and invalidate data cache line by MVA to PoC. + */ +.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo +.macro dccimvac\c, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_DCCIMVAC, \c +.endm +.endr + +/* + * dcimvac: Invalidate data cache line by MVA to PoC + */ +.irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo +.macro dcimvac\c, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC, \c +.endm +.endr + +/* + * dccmvau: Clean data cache line by MVA to PoU + */ +.macro dccmvau, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAU +.endm + +/* + * dccmvac: Clean data cache line by MVA to PoC + */ +.macro dccmvac, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAC +.endm + +/* + * icimvau: Invalidate instruction caches by MVA to PoU + */ +.macro icimvau, rt, tmp + v7m_cacheop \rt, \tmp, V7M_SCB_ICIMVAU +.endm + +/* + * Invalidate the icache, inner shareable if SMP, invalidate BTB for UP. + * rt data ignored by ICIALLU(IS), so can be used for the address + */ +.macro invalidate_icache, rt + v7m_cacheop \rt, \rt, V7M_SCB_ICIALLU + mov \rt, #0 +.endm + +/* + * Invalidate the BTB, inner shareable if SMP. + * rt data ignored by BPIALL, so it can be used for the address + */ +.macro invalidate_bp, rt + v7m_cacheop \rt, \rt, V7M_SCB_BPIALL + mov \rt, #0 +.endm + +ENTRY(v7m_invalidate_l1) + mov r0, #0 + + write_csselr r0, r1 + read_ccsidr r0 + + movw r1, #0x7fff + and r2, r1, r0, lsr #13 + + movw r1, #0x3ff + + and r3, r1, r0, lsr #3 @ NumWays - 1 + add r2, r2, #1 @ NumSets + + and r0, r0, #0x7 + add r0, r0, #4 @ SetShift + + clz r1, r3 @ WayShift + add r4, r3, #1 @ NumWays +1: sub r2, r2, #1 @ NumSets-- + mov r3, r4 @ Temp = NumWays +2: subs r3, r3, #1 @ Temp-- + mov r5, r3, lsl r1 + mov r6, r2, lsl r0 + orr r5, r5, r6 @ Reg = (Temp< and proc-macros.S) + define_cache_functions v7m -- cgit v1.2.3