1/* 2 * Copyright (c) 2024, Altera Corporation. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6#include <arch.h> 7#include <asm_macros.S> 8#include <cpu_macros.S> 9#include <plat_macros.S> 10 11 .globl invalidate_dcache_all 12 .globl invalidate_cache_low_el 13 /* -------------------------------------------------------- 14 * Invalidate for NS EL2 and EL1 15 * -------------------------------------------------------- 16 */ 17func invalidate_cache_low_el 18 mrs x0,SCR_EL3 19 orr x1,x0,#SCR_NS_BIT 20 msr SCR_EL3, x1 21 isb 22 tlbi ALLE2 23 dsb sy 24 tlbi ALLE1 25 dsb sy 26endfunc invalidate_cache_low_el 27 28.pushsection .text.asm_dcache_level, "ax" 29func asm_dcache_level 30 lsl x12, x0, #1 31 msr csselr_el1, x12 /* select cache level */ 32 isb /* sync change of cssidr_el1 */ 33 mrs x6, ccsidr_el1 /* read the new cssidr_el1 */ 34 ubfx x2, x6, #0, #3 /* x2 <- log2(cache line size)-4 */ 35 ubfx x3, x6, #3, #10 /* x3 <- number of cache ways - 1 */ 36 ubfx x4, x6, #13, #15 /* x4 <- number of cache sets - 1 */ 37 add x2, x2, #4 /* x2 <- log2(cache line size) */ 38 clz w5, w3 /* bit position of #ways */ 39 /* x12 <- cache level << 1 */ 40 /* x2 <- line length offset */ 41 /* x3 <- number of cache ways - 1 */ 42 /* x4 <- number of cache sets - 1 */ 43 /* x5 <- bit position of #ways */ 44 45loop_set: 46 mov x6, x3 /* x6 <- working copy of #ways */ 47loop_way: 48 lsl x7, x6, x5 49 orr x9, x12, x7 /* map way and level to cisw value */ 50 lsl x7, x4, x2 51 orr x9, x9, x7 /* map set number to cisw value */ 52 tbz w1, #0, 1f 53 dc isw, x9 54 b 2f 551: dc cisw, x9 /* clean & invalidate by set/way */ 562: subs x6, x6, #1 /* decrement the way */ 57 b.ge loop_way 58 subs x4, x4, #1 /* decrement the set */ 59 b.ge loop_set 60 61 ret 62endfunc asm_dcache_level 63.popsection 64 65/* 66 * void __asm_flush_dcache_all(int invalidate_only) 67 * 68 * x0: 0 clean & invalidate, 1 invalidate only 69 * 70 * flush or invalidate all data cache by SET/WAY. 71 */ 72.pushsection .text.asm_dcache_all, "ax" 73func asm_dcache_all 74 mov x1, x0 75 dsb sy 76 mrs x10, clidr_el1 /* read clidr_el1 */ 77 ubfx x11, x10, #24, #3 /* x11 <- loc */ 78 cbz x11, finished /* if loc is 0, exit */ 79 mov x15, x30 80 mov x0, #0 /* start flush at cache level 0 */ 81 /* x0 <- cache level */ 82 /* x10 <- clidr_el1 */ 83 /* x11 <- loc */ 84 /* x15 <- return address */ 85 86loop_level: 87 add x12, x0, x0, lsl #1 /* x12 <- tripled cache level */ 88 lsr x12, x10, x12 89 and x12, x12, #7 /* x12 <- cache type */ 90 cmp x12, #2 91 b.lt skip /* skip if no cache or icache */ 92 bl asm_dcache_level /* x1 = 0 flush, 1 invalidate */ 93skip: 94 add x0, x0, #1 /* increment cache level */ 95 cmp x11, x0 96 b.gt loop_level 97 98 mov x0, #0 99 msr csselr_el1, x0 /* restore csselr_el1 */ 100 dsb sy 101 isb 102 mov x30, x15 103 104finished: 105 ret 106endfunc asm_dcache_all 107.popsection 108 109.pushsection .text.invalidate_dcache_all, "ax" 110func invalidate_dcache_all 111 mov x0, #0x1 112 b asm_dcache_all 113endfunc invalidate_dcache_all 114.popsection 115