1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
|
/*
* Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <arch.h>
#include <asm_macros.S>
#include <assert_macros.S>
#include <cortex_a15.h>
#include <cpu_macros.S>
/*
* Cortex-A15 support LPAE and Virtualization Extensions.
* Don't care if confiugration uses or not LPAE and VE.
* Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
*/
.macro assert_cache_enabled
#if ENABLE_ASSERTIONS
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
.endm
func cortex_a15_disable_smp
ldcopr r0, ACTLR
bic r0, #CORTEX_A15_ACTLR_SMP_BIT
stcopr r0, ACTLR
isb
#if ERRATA_A15_816470
/*
* Invalidate any TLB address
*/
mov r0, #0
stcopr r0, TLBIMVA
#endif
dsb sy
bx lr
endfunc cortex_a15_disable_smp
func cortex_a15_enable_smp
ldcopr r0, ACTLR
orr r0, #CORTEX_A15_ACTLR_SMP_BIT
stcopr r0, ACTLR
isb
bx lr
endfunc cortex_a15_enable_smp
/* ----------------------------------------------------
* Errata Workaround for Cortex A15 Errata #816470.
* This applies only to revision >= r3p0 of Cortex A15.
* ----------------------------------------------------
*/
func check_errata_816470
/*
* Even though this is only needed for revision >= r3p0, it is always
* applied because of the low cost of the workaround.
*/
mov r0, #ERRATA_APPLIES
bx lr
endfunc check_errata_816470
add_erratum_entry cortex_a15, ERRATUM(816470), ERRATA_A15_816470
/* ----------------------------------------------------
* Errata Workaround for Cortex A15 Errata #827671.
* This applies only to revision >= r3p0 of Cortex A15.
* Inputs:
* r0: variant[4:7] and revision[0:3] of current cpu.
* Shall clobber: r0-r3
* ----------------------------------------------------
*/
func errata_a15_827671_wa
/*
* Compare r0 against revision r3p0
*/
mov r2, lr
bl check_errata_827671
cmp r0, #ERRATA_NOT_APPLIES
beq 1f
ldcopr r0, CORTEX_A15_ACTLR2
orr r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT
stcopr r0, CORTEX_A15_ACTLR2
isb
1:
bx r2
endfunc errata_a15_827671_wa
func check_errata_827671
mov r1, #0x30
b cpu_rev_var_hs
endfunc check_errata_827671
add_erratum_entry cortex_a15, ERRATUM(827671), ERRATA_A15_827671
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2017_5715
add_erratum_entry cortex_a15, CVE(2017, 5715), WORKAROUND_CVE_2017_5715
func check_errata_cve_2022_23960
#if WORKAROUND_CVE_2022_23960
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2022_23960
add_erratum_entry cortex_a15, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
func cortex_a15_reset_func
mov r5, lr
bl cpu_get_rev_var
#if ERRATA_A15_827671
bl errata_a15_827671_wa
#endif
#if IMAGE_BL32 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
ldcopr r0, ACTLR
orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
stcopr r0, ACTLR
ldr r0, =wa_cve_2017_5715_icache_inv_vbar
stcopr r0, VBAR
stcopr r0, MVBAR
/* isb will be applied in the course of the reset func */
#endif
mov lr, r5
b cortex_a15_enable_smp
endfunc cortex_a15_reset_func
func cortex_a15_core_pwr_dwn
push {r12, lr}
assert_cache_enabled
/* Flush L1 cache */
mov r0, #DC_OP_CISW
bl dcsw_op_level1
/* Exit cluster coherency */
pop {r12, lr}
b cortex_a15_disable_smp
endfunc cortex_a15_core_pwr_dwn
func cortex_a15_cluster_pwr_dwn
push {r12, lr}
assert_cache_enabled
/* Flush L1 caches */
mov r0, #DC_OP_CISW
bl dcsw_op_level1
bl plat_disable_acp
/* Flush L2 caches */
mov r0, #DC_OP_CISW
bl dcsw_op_level2
/* Exit cluster coherency */
pop {r12, lr}
b cortex_a15_disable_smp
endfunc cortex_a15_cluster_pwr_dwn
errata_report_shim cortex_a15
declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
cortex_a15_reset_func, \
cortex_a15_core_pwr_dwn, \
cortex_a15_cluster_pwr_dwn
|