1/* 2 * Copyright (c) 2017-2022, Arm Limited and Contributors. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6#include <arch.h> 7#include <asm_macros.S> 8#include <assert_macros.S> 9#include <common/debug.h> 10#include <cortex_a72.h> 11#include <cpu_macros.S> 12 13 /* --------------------------------------------- 14 * Disable all types of L2 prefetches. 15 * --------------------------------------------- 16 */ 17func cortex_a72_disable_l2_prefetch 18 ldcopr16 r0, r1, CORTEX_A72_ECTLR 19 orr64_imm r0, r1, CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT 20 bic64_imm r0, r1, (CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK | \ 21 CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK) 22 stcopr16 r0, r1, CORTEX_A72_ECTLR 23 isb 24 bx lr 25endfunc cortex_a72_disable_l2_prefetch 26 27 /* --------------------------------------------- 28 * Disable the load-store hardware prefetcher. 29 * --------------------------------------------- 30 */ 31func cortex_a72_disable_hw_prefetcher 32 ldcopr16 r0, r1, CORTEX_A72_CPUACTLR 33 orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH 34 stcopr16 r0, r1, CORTEX_A72_CPUACTLR 35 isb 36 dsb ish 37 bx lr 38endfunc cortex_a72_disable_hw_prefetcher 39 40 /* --------------------------------------------- 41 * Disable intra-cluster coherency 42 * Clobbers: r0-r1 43 * --------------------------------------------- 44 */ 45func cortex_a72_disable_smp 46 ldcopr16 r0, r1, CORTEX_A72_ECTLR 47 bic64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT 48 stcopr16 r0, r1, CORTEX_A72_ECTLR 49 bx lr 50endfunc cortex_a72_disable_smp 51 52 /* --------------------------------------------- 53 * Disable debug interfaces 54 * --------------------------------------------- 55 */ 56func cortex_a72_disable_ext_debug 57 mov r0, #1 58 stcopr r0, DBGOSDLR 59 isb 60 dsb sy 61 bx lr 62endfunc cortex_a72_disable_ext_debug 63 64 /* --------------------------------------------------- 65 * Errata Workaround for Cortex A72 Errata #859971. 66 * This applies only to revision <= r0p3 of Cortex A72. 67 * Inputs: 68 * r0: variant[4:7] and revision[0:3] of current cpu. 69 * Shall clobber: r0-r3 70 * --------------------------------------------------- 71 */ 72func errata_a72_859971_wa 73 mov r2,lr 74 bl check_errata_859971 75 mov lr, r2 76 cmp r0, #ERRATA_NOT_APPLIES 77 beq 1f 78 ldcopr16 r0, r1, CORTEX_A72_CPUACTLR 79 orr64_imm r1, r1, CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH 80 stcopr16 r0, r1, CORTEX_A72_CPUACTLR 811: 82 bx lr 83endfunc errata_a72_859971_wa 84 85func check_errata_859971 86 mov r1, #0x03 87 b cpu_rev_var_ls 88endfunc check_errata_859971 89 90func check_errata_cve_2017_5715 91 mov r0, #ERRATA_MISSING 92 bx lr 93endfunc check_errata_cve_2017_5715 94 95func check_errata_cve_2018_3639 96#if WORKAROUND_CVE_2018_3639 97 mov r0, #ERRATA_APPLIES 98#else 99 mov r0, #ERRATA_MISSING 100#endif 101 bx lr 102endfunc check_errata_cve_2018_3639 103 104func check_errata_cve_2022_23960 105 mov r0, #ERRATA_MISSING 106 bx lr 107endfunc check_errata_cve_2022_23960 108 109 /* ------------------------------------------------- 110 * The CPU Ops reset function for Cortex-A72. 111 * ------------------------------------------------- 112 */ 113func cortex_a72_reset_func 114 mov r5, lr 115 bl cpu_get_rev_var 116 mov r4, r0 117 118#if ERRATA_A72_859971 119 mov r0, r4 120 bl errata_a72_859971_wa 121#endif 122 123#if WORKAROUND_CVE_2018_3639 124 ldcopr16 r0, r1, CORTEX_A72_CPUACTLR 125 orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE 126 stcopr16 r0, r1, CORTEX_A72_CPUACTLR 127 isb 128 dsb sy 129#endif 130 131 /* --------------------------------------------- 132 * Enable the SMP bit. 133 * --------------------------------------------- 134 */ 135 ldcopr16 r0, r1, CORTEX_A72_ECTLR 136 orr64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT 137 stcopr16 r0, r1, CORTEX_A72_ECTLR 138 isb 139 bx r5 140endfunc cortex_a72_reset_func 141 142 /* ---------------------------------------------------- 143 * The CPU Ops core power down function for Cortex-A72. 144 * ---------------------------------------------------- 145 */ 146func cortex_a72_core_pwr_dwn 147 push {r12, lr} 148 149 /* Assert if cache is enabled */ 150#if ENABLE_ASSERTIONS 151 ldcopr r0, SCTLR 152 tst r0, #SCTLR_C_BIT 153 ASM_ASSERT(eq) 154#endif 155 156 /* --------------------------------------------- 157 * Disable the L2 prefetches. 158 * --------------------------------------------- 159 */ 160 bl cortex_a72_disable_l2_prefetch 161 162 /* --------------------------------------------- 163 * Disable the load-store hardware prefetcher. 164 * --------------------------------------------- 165 */ 166 bl cortex_a72_disable_hw_prefetcher 167 168 /* --------------------------------------------- 169 * Flush L1 caches. 170 * --------------------------------------------- 171 */ 172 mov r0, #DC_OP_CISW 173 bl dcsw_op_level1 174 175 /* --------------------------------------------- 176 * Come out of intra cluster coherency 177 * --------------------------------------------- 178 */ 179 bl cortex_a72_disable_smp 180 181 /* --------------------------------------------- 182 * Force the debug interfaces to be quiescent 183 * --------------------------------------------- 184 */ 185 pop {r12, lr} 186 b cortex_a72_disable_ext_debug 187endfunc cortex_a72_core_pwr_dwn 188 189 /* ------------------------------------------------------- 190 * The CPU Ops cluster power down function for Cortex-A72. 191 * ------------------------------------------------------- 192 */ 193func cortex_a72_cluster_pwr_dwn 194 push {r12, lr} 195 196 /* Assert if cache is enabled */ 197#if ENABLE_ASSERTIONS 198 ldcopr r0, SCTLR 199 tst r0, #SCTLR_C_BIT 200 ASM_ASSERT(eq) 201#endif 202 203 /* --------------------------------------------- 204 * Disable the L2 prefetches. 205 * --------------------------------------------- 206 */ 207 bl cortex_a72_disable_l2_prefetch 208 209 /* --------------------------------------------- 210 * Disable the load-store hardware prefetcher. 211 * --------------------------------------------- 212 */ 213 bl cortex_a72_disable_hw_prefetcher 214 215#if !SKIP_A72_L1_FLUSH_PWR_DWN 216 /* --------------------------------------------- 217 * Flush L1 caches. 218 * --------------------------------------------- 219 */ 220 mov r0, #DC_OP_CISW 221 bl dcsw_op_level1 222#endif 223 224 /* --------------------------------------------- 225 * Disable the optional ACP. 226 * --------------------------------------------- 227 */ 228 bl plat_disable_acp 229 230 /* ------------------------------------------------- 231 * Flush the L2 caches. 232 * ------------------------------------------------- 233 */ 234 mov r0, #DC_OP_CISW 235 bl dcsw_op_level2 236 237 /* --------------------------------------------- 238 * Come out of intra cluster coherency 239 * --------------------------------------------- 240 */ 241 bl cortex_a72_disable_smp 242 243 /* --------------------------------------------- 244 * Force the debug interfaces to be quiescent 245 * --------------------------------------------- 246 */ 247 pop {r12, lr} 248 b cortex_a72_disable_ext_debug 249endfunc cortex_a72_cluster_pwr_dwn 250 251#if REPORT_ERRATA 252/* 253 * Errata printing function for Cortex A72. Must follow AAPCS. 254 */ 255func cortex_a72_errata_report 256 push {r12, lr} 257 258 bl cpu_get_rev_var 259 mov r4, r0 260 261 /* 262 * Report all errata. The revision-variant information is passed to 263 * checking functions of each errata. 264 */ 265 report_errata ERRATA_A72_859971, cortex_a72, 859971 266 report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715 267 report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639 268 report_errata WORKAROUND_CVE_2022_23960, cortex_a72, cve_2022_23960 269 270 pop {r12, lr} 271 bx lr 272endfunc cortex_a72_errata_report 273#endif 274 275declare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \ 276 cortex_a72_reset_func, \ 277 cortex_a72_core_pwr_dwn, \ 278 cortex_a72_cluster_pwr_dwn 279