1/* 2 * Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <assert_macros.S> 10#include <cortex_a17.h> 11#include <cpu_macros.S> 12 13 .macro assert_cache_enabled 14#if ENABLE_ASSERTIONS 15 ldcopr r0, SCTLR 16 tst r0, #SCTLR_C_BIT 17 ASM_ASSERT(eq) 18#endif 19 .endm 20 21func cortex_a17_disable_smp 22 ldcopr r0, ACTLR 23 bic r0, #CORTEX_A17_ACTLR_SMP_BIT 24 stcopr r0, ACTLR 25 isb 26 dsb sy 27 bx lr 28endfunc cortex_a17_disable_smp 29 30func cortex_a17_enable_smp 31 ldcopr r0, ACTLR 32 orr r0, #CORTEX_A17_ACTLR_SMP_BIT 33 stcopr r0, ACTLR 34 isb 35 bx lr 36endfunc cortex_a17_enable_smp 37 38 /* ---------------------------------------------------- 39 * Errata Workaround for Cortex A17 Errata #852421. 40 * This applies only to revision <= r1p2 of Cortex A17. 41 * Inputs: 42 * r0: variant[4:7] and revision[0:3] of current cpu. 43 * Shall clobber: r0-r3 44 * ---------------------------------------------------- 45 */ 46func errata_a17_852421_wa 47 /* 48 * Compare r0 against revision r1p2 49 */ 50 mov r2, lr 51 bl check_errata_852421 52 cmp r0, #ERRATA_NOT_APPLIES 53 beq 1f 54 ldcopr r0, CORTEX_A17_IMP_DEF_REG1 55 orr r0, r0, #(1<<24) 56 stcopr r0, CORTEX_A17_IMP_DEF_REG1 571: 58 bx r2 59endfunc errata_a17_852421_wa 60 61func check_errata_852421 62 mov r1, #0x12 63 b cpu_rev_var_ls 64endfunc check_errata_852421 65 66add_erratum_entry cortex_a17, ERRATUM(852421), ERRATA_A17_852421 67 68 /* ---------------------------------------------------- 69 * Errata Workaround for Cortex A17 Errata #852423. 70 * This applies only to revision <= r1p2 of Cortex A17. 71 * Inputs: 72 * r0: variant[4:7] and revision[0:3] of current cpu. 73 * Shall clobber: r0-r3 74 * ---------------------------------------------------- 75 */ 76func errata_a17_852423_wa 77 /* 78 * Compare r0 against revision r1p2 79 */ 80 mov r2, lr 81 bl check_errata_852423 82 cmp r0, #ERRATA_NOT_APPLIES 83 beq 1f 84 ldcopr r0, CORTEX_A17_IMP_DEF_REG1 85 orr r0, r0, #(1<<12) 86 stcopr r0, CORTEX_A17_IMP_DEF_REG1 871: 88 bx r2 89endfunc errata_a17_852423_wa 90 91func check_errata_852423 92 mov r1, #0x12 93 b cpu_rev_var_ls 94endfunc check_errata_852423 95 96add_erratum_entry cortex_a17, ERRATUM(852423), ERRATA_A17_852423 97 98func check_errata_cve_2017_5715 99#if WORKAROUND_CVE_2017_5715 100 mov r0, #ERRATA_APPLIES 101#else 102 mov r0, #ERRATA_MISSING 103#endif 104 bx lr 105endfunc check_errata_cve_2017_5715 106 107add_erratum_entry cortex_a17, CVE(2017, 5715), WORKAROUND_CVE_2017_5715 108 109func cortex_a17_reset_func 110 mov r5, lr 111 bl cpu_get_rev_var 112 mov r4, r0 113 114#if ERRATA_A17_852421 115 mov r0, r4 116 bl errata_a17_852421_wa 117#endif 118 119#if ERRATA_A17_852423 120 mov r0, r4 121 bl errata_a17_852423_wa 122#endif 123 124#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715 125 ldr r0, =wa_cve_2017_5715_bpiall_vbar 126 stcopr r0, VBAR 127 stcopr r0, MVBAR 128 /* isb will be applied in the course of the reset func */ 129#endif 130 131 mov lr, r5 132 b cortex_a17_enable_smp 133endfunc cortex_a17_reset_func 134 135func cortex_a17_core_pwr_dwn 136 push {r12, lr} 137 138 assert_cache_enabled 139 140 /* Flush L1 cache */ 141 mov r0, #DC_OP_CISW 142 bl dcsw_op_level1 143 144 /* Exit cluster coherency */ 145 pop {r12, lr} 146 b cortex_a17_disable_smp 147endfunc cortex_a17_core_pwr_dwn 148 149func cortex_a17_cluster_pwr_dwn 150 push {r12, lr} 151 152 assert_cache_enabled 153 154 /* Flush L1 caches */ 155 mov r0, #DC_OP_CISW 156 bl dcsw_op_level1 157 158 bl plat_disable_acp 159 160 /* Flush L2 caches */ 161 mov r0, #DC_OP_CISW 162 bl dcsw_op_level2 163 164 /* Exit cluster coherency */ 165 pop {r12, lr} 166 b cortex_a17_disable_smp 167endfunc cortex_a17_cluster_pwr_dwn 168 169declare_cpu_ops cortex_a17, CORTEX_A17_MIDR, \ 170 cortex_a17_reset_func, \ 171 cortex_a17_core_pwr_dwn, \ 172 cortex_a17_cluster_pwr_dwn 173