summaryrefslogtreecommitdiffstats
path: root/lib/cpus/aarch32
diff options
context:
space:
mode:
Diffstat (limited to 'lib/cpus/aarch32')
-rw-r--r--lib/cpus/aarch32/aem_generic.S55
-rw-r--r--lib/cpus/aarch32/cortex_a12.S84
-rw-r--r--lib/cpus/aarch32/cortex_a15.S191
-rw-r--r--lib/cpus/aarch32/cortex_a17.S185
-rw-r--r--lib/cpus/aarch32/cortex_a32.S132
-rw-r--r--lib/cpus/aarch32/cortex_a5.S84
-rw-r--r--lib/cpus/aarch32/cortex_a53.S316
-rw-r--r--lib/cpus/aarch32/cortex_a57.S618
-rw-r--r--lib/cpus/aarch32/cortex_a7.S84
-rw-r--r--lib/cpus/aarch32/cortex_a72.S278
-rw-r--r--lib/cpus/aarch32/cortex_a9.S121
-rw-r--r--lib/cpus/aarch32/cpu_helpers.S264
12 files changed, 2412 insertions, 0 deletions
diff --git a/lib/cpus/aarch32/aem_generic.S b/lib/cpus/aarch32/aem_generic.S
new file mode 100644
index 0000000..7bd586a
--- /dev/null
+++ b/lib/cpus/aarch32/aem_generic.S
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+#include <aem_generic.h>
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cpu_macros.S>
+
+func aem_generic_core_pwr_dwn
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ /* ---------------------------------------------
+ * Flush L1 cache to PoU.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ b dcsw_op_louis
+endfunc aem_generic_core_pwr_dwn
+
+
+func aem_generic_cluster_pwr_dwn
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ /* ---------------------------------------------
+ * Flush L1 and L2 caches to PoC.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ b dcsw_op_all
+endfunc aem_generic_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for AEM. Must follow AAPCS.
+ */
+func aem_generic_errata_report
+ bx lr
+endfunc aem_generic_errata_report
+#endif
+
+/* cpu_ops for Base AEM FVP */
+declare_cpu_ops aem_generic, BASE_AEM_MIDR, CPU_NO_RESET_FUNC, \
+ aem_generic_core_pwr_dwn, \
+ aem_generic_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a12.S b/lib/cpus/aarch32/cortex_a12.S
new file mode 100644
index 0000000..5300fe0
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a12.S
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a12.h>
+#include <cpu_macros.S>
+
+ .macro assert_cache_enabled
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ .endm
+
+func cortex_a12_disable_smp
+ ldcopr r0, ACTLR
+ bic r0, #CORTEX_A12_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a12_disable_smp
+
+func cortex_a12_enable_smp
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A12_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ bx lr
+endfunc cortex_a12_enable_smp
+
+func cortex_a12_reset_func
+ b cortex_a12_enable_smp
+endfunc cortex_a12_reset_func
+
+func cortex_a12_core_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 cache */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a12_disable_smp
+endfunc cortex_a12_core_pwr_dwn
+
+func cortex_a12_cluster_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 caches */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ bl plat_disable_acp
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a12_disable_smp
+endfunc cortex_a12_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex-A12. Must follow AAPCS.
+ */
+func cortex_a12_errata_report
+ bx lr
+endfunc cortex_a12_errata_report
+#endif
+
+declare_cpu_ops cortex_a12, CORTEX_A12_MIDR, \
+ cortex_a12_reset_func, \
+ cortex_a12_core_pwr_dwn, \
+ cortex_a12_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a15.S b/lib/cpus/aarch32/cortex_a15.S
new file mode 100644
index 0000000..1143e9b
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a15.S
@@ -0,0 +1,191 @@
+/*
+ * Copyright (c) 2016-2022, Arm Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a15.h>
+#include <cpu_macros.S>
+
+/*
+ * Cortex-A15 support LPAE and Virtualization Extensions.
+ * Don't care if confiugration uses or not LPAE and VE.
+ * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
+ */
+
+ .macro assert_cache_enabled
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ .endm
+
+func cortex_a15_disable_smp
+ ldcopr r0, ACTLR
+ bic r0, #CORTEX_A15_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+#if ERRATA_A15_816470
+ /*
+ * Invalidate any TLB address
+ */
+ mov r0, #0
+ stcopr r0, TLBIMVA
+#endif
+ dsb sy
+ bx lr
+endfunc cortex_a15_disable_smp
+
+func cortex_a15_enable_smp
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A15_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ bx lr
+endfunc cortex_a15_enable_smp
+
+ /* ----------------------------------------------------
+ * Errata Workaround for Cortex A15 Errata #816470.
+ * This applies only to revision >= r3p0 of Cortex A15.
+ * ----------------------------------------------------
+ */
+func check_errata_816470
+ /*
+ * Even though this is only needed for revision >= r3p0, it is always
+ * applied because of the low cost of the workaround.
+ */
+ mov r0, #ERRATA_APPLIES
+ bx lr
+endfunc check_errata_816470
+
+ /* ----------------------------------------------------
+ * Errata Workaround for Cortex A15 Errata #827671.
+ * This applies only to revision >= r3p0 of Cortex A15.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ----------------------------------------------------
+ */
+func errata_a15_827671_wa
+ /*
+ * Compare r0 against revision r3p0
+ */
+ mov r2, lr
+ bl check_errata_827671
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr r0, CORTEX_A15_ACTLR2
+ orr r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT
+ stcopr r0, CORTEX_A15_ACTLR2
+ isb
+1:
+ bx r2
+endfunc errata_a15_827671_wa
+
+func check_errata_827671
+ mov r1, #0x30
+ b cpu_rev_var_hs
+endfunc check_errata_827671
+
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+func check_errata_cve_2022_23960
+#if WORKAROUND_CVE_2022_23960
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2022_23960
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A15. Must follow AAPCS.
+ */
+func cortex_a15_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata ERRATA_A15_816470, cortex_a15, 816470
+ report_errata ERRATA_A15_827671, cortex_a15, 827671
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715
+ report_errata WORKAROUND_CVE_2022_23960, cortex_a15, cve_2022_23960
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a15_errata_report
+#endif
+
+func cortex_a15_reset_func
+ mov r5, lr
+ bl cpu_get_rev_var
+
+#if ERRATA_A15_827671
+ bl errata_a15_827671_wa
+#endif
+
+#if IMAGE_BL32 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
+ stcopr r0, ACTLR
+ ldr r0, =wa_cve_2017_5715_icache_inv_vbar
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ /* isb will be applied in the course of the reset func */
+#endif
+
+ mov lr, r5
+ b cortex_a15_enable_smp
+endfunc cortex_a15_reset_func
+
+func cortex_a15_core_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 cache */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a15_disable_smp
+endfunc cortex_a15_core_pwr_dwn
+
+func cortex_a15_cluster_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 caches */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ bl plat_disable_acp
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a15_disable_smp
+endfunc cortex_a15_cluster_pwr_dwn
+
+declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
+ cortex_a15_reset_func, \
+ cortex_a15_core_pwr_dwn, \
+ cortex_a15_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a17.S b/lib/cpus/aarch32/cortex_a17.S
new file mode 100644
index 0000000..b8abd33
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a17.S
@@ -0,0 +1,185 @@
+/*
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a17.h>
+#include <cpu_macros.S>
+
+ .macro assert_cache_enabled
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ .endm
+
+func cortex_a17_disable_smp
+ ldcopr r0, ACTLR
+ bic r0, #CORTEX_A17_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a17_disable_smp
+
+func cortex_a17_enable_smp
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A17_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ bx lr
+endfunc cortex_a17_enable_smp
+
+ /* ----------------------------------------------------
+ * Errata Workaround for Cortex A17 Errata #852421.
+ * This applies only to revision <= r1p2 of Cortex A17.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ----------------------------------------------------
+ */
+func errata_a17_852421_wa
+ /*
+ * Compare r0 against revision r1p2
+ */
+ mov r2, lr
+ bl check_errata_852421
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr r0, CORTEX_A17_IMP_DEF_REG1
+ orr r0, r0, #(1<<24)
+ stcopr r0, CORTEX_A17_IMP_DEF_REG1
+1:
+ bx r2
+endfunc errata_a17_852421_wa
+
+func check_errata_852421
+ mov r1, #0x12
+ b cpu_rev_var_ls
+endfunc check_errata_852421
+
+ /* ----------------------------------------------------
+ * Errata Workaround for Cortex A17 Errata #852423.
+ * This applies only to revision <= r1p2 of Cortex A17.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ----------------------------------------------------
+ */
+func errata_a17_852423_wa
+ /*
+ * Compare r0 against revision r1p2
+ */
+ mov r2, lr
+ bl check_errata_852423
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr r0, CORTEX_A17_IMP_DEF_REG1
+ orr r0, r0, #(1<<12)
+ stcopr r0, CORTEX_A17_IMP_DEF_REG1
+1:
+ bx r2
+endfunc errata_a17_852423_wa
+
+func check_errata_852423
+ mov r1, #0x12
+ b cpu_rev_var_ls
+endfunc check_errata_852423
+
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A17. Must follow AAPCS.
+ */
+func cortex_a17_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata ERRATA_A17_852421, cortex_a17, 852421
+ report_errata ERRATA_A17_852423, cortex_a17, 852423
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a17, cve_2017_5715
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a17_errata_report
+#endif
+
+func cortex_a17_reset_func
+ mov r5, lr
+ bl cpu_get_rev_var
+ mov r4, r0
+
+#if ERRATA_A17_852421
+ mov r0, r4
+ bl errata_a17_852421_wa
+#endif
+
+#if ERRATA_A17_852423
+ mov r0, r4
+ bl errata_a17_852423_wa
+#endif
+
+#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
+ ldr r0, =workaround_bpiall_runtime_exceptions
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ /* isb will be applied in the course of the reset func */
+#endif
+
+ mov lr, r5
+ b cortex_a17_enable_smp
+endfunc cortex_a17_reset_func
+
+func cortex_a17_core_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 cache */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a17_disable_smp
+endfunc cortex_a17_core_pwr_dwn
+
+func cortex_a17_cluster_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 caches */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ bl plat_disable_acp
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a17_disable_smp
+endfunc cortex_a17_cluster_pwr_dwn
+
+declare_cpu_ops cortex_a17, CORTEX_A17_MIDR, \
+ cortex_a17_reset_func, \
+ cortex_a17_core_pwr_dwn, \
+ cortex_a17_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a32.S b/lib/cpus/aarch32/cortex_a32.S
new file mode 100644
index 0000000..c262276
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a32.S
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a32.h>
+#include <cpu_macros.S>
+
+
+ /* ---------------------------------------------
+ * Disable intra-cluster coherency
+ * Clobbers: r0-r1
+ * ---------------------------------------------
+ */
+func cortex_a32_disable_smp
+ ldcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
+ bic r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
+ stcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a32_disable_smp
+
+ /* -------------------------------------------------
+ * The CPU Ops reset function for Cortex-A32.
+ * Clobbers: r0-r1
+ * -------------------------------------------------
+ */
+func cortex_a32_reset_func
+ /* ---------------------------------------------
+ * Enable the SMP bit.
+ * ---------------------------------------------
+ */
+ ldcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
+ orr r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
+ stcopr16 r0, r1, CORTEX_A32_CPUECTLR_EL1
+ isb
+ bx lr
+endfunc cortex_a32_reset_func
+
+ /* ----------------------------------------------------
+ * The CPU Ops core power down function for Cortex-A32.
+ * Clobbers: r0-r3
+ * ----------------------------------------------------
+ */
+func cortex_a32_core_pwr_dwn
+ /* r12 is pushed to meet the 8 byte stack alignment requirement */
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Flush L1 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a32_disable_smp
+endfunc cortex_a32_core_pwr_dwn
+
+ /* -------------------------------------------------------
+ * The CPU Ops cluster power down function for Cortex-A32.
+ * Clobbers: r0-r3
+ * -------------------------------------------------------
+ */
+func cortex_a32_cluster_pwr_dwn
+ /* r12 is pushed to meet the 8 byte stack alignment requirement */
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Flush L1 cache.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* ---------------------------------------------
+ * Disable the optional ACP.
+ * ---------------------------------------------
+ */
+ bl plat_disable_acp
+
+ /* ---------------------------------------------
+ * Flush L2 cache.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level2
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a32_disable_smp
+endfunc cortex_a32_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex-A32. Must follow AAPCS.
+ */
+func cortex_a32_errata_report
+ bx lr
+endfunc cortex_a32_errata_report
+#endif
+
+declare_cpu_ops cortex_a32, CORTEX_A32_MIDR, \
+ cortex_a32_reset_func, \
+ cortex_a32_core_pwr_dwn, \
+ cortex_a32_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a5.S b/lib/cpus/aarch32/cortex_a5.S
new file mode 100644
index 0000000..8abb66f
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a5.S
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a5.h>
+#include <cpu_macros.S>
+
+ .macro assert_cache_enabled
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ .endm
+
+func cortex_a5_disable_smp
+ ldcopr r0, ACTLR
+ bic r0, #CORTEX_A5_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a5_disable_smp
+
+func cortex_a5_enable_smp
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A5_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ bx lr
+endfunc cortex_a5_enable_smp
+
+func cortex_a5_reset_func
+ b cortex_a5_enable_smp
+endfunc cortex_a5_reset_func
+
+func cortex_a5_core_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 cache */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a5_disable_smp
+endfunc cortex_a5_core_pwr_dwn
+
+func cortex_a5_cluster_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 caches */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ bl plat_disable_acp
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a5_disable_smp
+endfunc cortex_a5_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex-A5. Must follow AAPCS.
+ */
+func cortex_a5_errata_report
+ bx lr
+endfunc cortex_a5_errata_report
+#endif
+
+declare_cpu_ops cortex_a5, CORTEX_A5_MIDR, \
+ cortex_a5_reset_func, \
+ cortex_a5_core_pwr_dwn, \
+ cortex_a5_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a53.S b/lib/cpus/aarch32/cortex_a53.S
new file mode 100644
index 0000000..6e3ff81
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a53.S
@@ -0,0 +1,316 @@
+/*
+ * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <common/debug.h>
+#include <cortex_a53.h>
+#include <cpu_macros.S>
+
+#if A53_DISABLE_NON_TEMPORAL_HINT
+#undef ERRATA_A53_836870
+#define ERRATA_A53_836870 1
+#endif
+
+ /* ---------------------------------------------
+ * Disable intra-cluster coherency
+ * ---------------------------------------------
+ */
+func cortex_a53_disable_smp
+ ldcopr16 r0, r1, CORTEX_A53_ECTLR
+ bic64_imm r0, r1, CORTEX_A53_ECTLR_SMP_BIT
+ stcopr16 r0, r1, CORTEX_A53_ECTLR
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a53_disable_smp
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A53 Errata #819472.
+ * This applies only to revision <= r0p1 of Cortex A53.
+ * ---------------------------------------------------
+ */
+func check_errata_819472
+ /*
+ * Even though this is only needed for revision <= r0p1, it
+ * is always applied due to limitations of the current
+ * errata framework.
+ */
+ mov r0, #ERRATA_APPLIES
+ bx lr
+endfunc check_errata_819472
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A53 Errata #824069.
+ * This applies only to revision <= r0p2 of Cortex A53.
+ * ---------------------------------------------------
+ */
+func check_errata_824069
+ /*
+ * Even though this is only needed for revision <= r0p2, it
+ * is always applied due to limitations of the current
+ * errata framework.
+ */
+ mov r0, #ERRATA_APPLIES
+ bx lr
+endfunc check_errata_824069
+
+ /* --------------------------------------------------
+ * Errata Workaround for Cortex A53 Errata #826319.
+ * This applies only to revision <= r0p2 of Cortex A53.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * --------------------------------------------------
+ */
+func errata_a53_826319_wa
+ /*
+ * Compare r0 against revision r0p2
+ */
+ mov r2, lr
+ bl check_errata_826319
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr r0, CORTEX_A53_L2ACTLR
+ bic r0, #CORTEX_A53_L2ACTLR_ENABLE_UNIQUECLEAN
+ orr r0, #CORTEX_A53_L2ACTLR_DISABLE_CLEAN_PUSH
+ stcopr r0, CORTEX_A53_L2ACTLR
+1:
+ bx lr
+endfunc errata_a53_826319_wa
+
+func check_errata_826319
+ mov r1, #0x02
+ b cpu_rev_var_ls
+endfunc check_errata_826319
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A53 Errata #827319.
+ * This applies only to revision <= r0p2 of Cortex A53.
+ * ---------------------------------------------------
+ */
+func check_errata_827319
+ /*
+ * Even though this is only needed for revision <= r0p2, it
+ * is always applied due to limitations of the current
+ * errata framework.
+ */
+ mov r0, #ERRATA_APPLIES
+ bx lr
+endfunc check_errata_827319
+
+ /* ---------------------------------------------------------------------
+ * Disable the cache non-temporal hint.
+ *
+ * This ignores the Transient allocation hint in the MAIR and treats
+ * allocations the same as non-transient allocation types. As a result,
+ * the LDNP and STNP instructions in AArch64 behave the same as the
+ * equivalent LDP and STP instructions.
+ *
+ * This is relevant only for revisions <= r0p3 of Cortex-A53.
+ * From r0p4 and onwards, the bit to disable the hint is enabled by
+ * default at reset.
+ *
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------------------------
+ */
+func a53_disable_non_temporal_hint
+ /*
+ * Compare r0 against revision r0p3
+ */
+ mov r2, lr
+ bl check_errata_disable_non_temporal_hint
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A53_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A53_CPUACTLR_DTAH
+ stcopr16 r0, r1, CORTEX_A53_CPUACTLR
+1:
+ bx lr
+endfunc a53_disable_non_temporal_hint
+
+func check_errata_disable_non_temporal_hint
+ mov r1, #0x03
+ b cpu_rev_var_ls
+endfunc check_errata_disable_non_temporal_hint
+
+ /* --------------------------------------------------
+ * Errata Workaround for Cortex A53 Errata #855873.
+ *
+ * This applies only to revisions >= r0p3 of Cortex A53.
+ * Earlier revisions of the core are affected as well, but don't
+ * have the chicken bit in the CPUACTLR register. It is expected that
+ * the rich OS takes care of that, especially as the workaround is
+ * shared with other erratas in those revisions of the CPU.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * --------------------------------------------------
+ */
+func errata_a53_855873_wa
+ /*
+ * Compare r0 against revision r0p3 and higher
+ */
+ mov r2, lr
+ bl check_errata_855873
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A53_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A53_CPUACTLR_ENDCCASCI
+ stcopr16 r0, r1, CORTEX_A53_CPUACTLR
+1:
+ bx lr
+endfunc errata_a53_855873_wa
+
+func check_errata_855873
+ mov r1, #0x03
+ b cpu_rev_var_hs
+endfunc check_errata_855873
+
+ /* -------------------------------------------------
+ * The CPU Ops reset function for Cortex-A53.
+ * Shall clobber: r0-r6
+ * -------------------------------------------------
+ */
+func cortex_a53_reset_func
+ mov r5, lr
+ bl cpu_get_rev_var
+ mov r4, r0
+
+#if ERRATA_A53_826319
+ mov r0, r4
+ bl errata_a53_826319_wa
+#endif
+
+#if ERRATA_A53_836870
+ mov r0, r4
+ bl a53_disable_non_temporal_hint
+#endif
+
+#if ERRATA_A53_855873
+ mov r0, r4
+ bl errata_a53_855873_wa
+#endif
+
+ /* ---------------------------------------------
+ * Enable the SMP bit.
+ * ---------------------------------------------
+ */
+ ldcopr16 r0, r1, CORTEX_A53_ECTLR
+ orr64_imm r0, r1, CORTEX_A53_ECTLR_SMP_BIT
+ stcopr16 r0, r1, CORTEX_A53_ECTLR
+ isb
+ bx r5
+endfunc cortex_a53_reset_func
+
+ /* ----------------------------------------------------
+ * The CPU Ops core power down function for Cortex-A53.
+ * ----------------------------------------------------
+ */
+func cortex_a53_core_pwr_dwn
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Flush L1 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a53_disable_smp
+endfunc cortex_a53_core_pwr_dwn
+
+ /* -------------------------------------------------------
+ * The CPU Ops cluster power down function for Cortex-A53.
+ * Clobbers: r0-r3
+ * -------------------------------------------------------
+ */
+func cortex_a53_cluster_pwr_dwn
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Flush L1 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* ---------------------------------------------
+ * Disable the optional ACP.
+ * ---------------------------------------------
+ */
+ bl plat_disable_acp
+
+ /* ---------------------------------------------
+ * Flush L2 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level2
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a53_disable_smp
+endfunc cortex_a53_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A53. Must follow AAPCS.
+ */
+func cortex_a53_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata ERRATA_A53_819472, cortex_a53, 819472
+ report_errata ERRATA_A53_824069, cortex_a53, 824069
+ report_errata ERRATA_A53_826319, cortex_a53, 826319
+ report_errata ERRATA_A53_827319, cortex_a53, 827319
+ report_errata ERRATA_A53_836870, cortex_a53, disable_non_temporal_hint
+ report_errata ERRATA_A53_855873, cortex_a53, 855873
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a53_errata_report
+#endif
+
+declare_cpu_ops cortex_a53, CORTEX_A53_MIDR, \
+ cortex_a53_reset_func, \
+ cortex_a53_core_pwr_dwn, \
+ cortex_a53_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a57.S b/lib/cpus/aarch32/cortex_a57.S
new file mode 100644
index 0000000..18ee1f9
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a57.S
@@ -0,0 +1,618 @@
+/*
+ * Copyright (c) 2017-2022, Arm Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <common/debug.h>
+#include <cortex_a57.h>
+#include <cpu_macros.S>
+
+ /* ---------------------------------------------
+ * Disable intra-cluster coherency
+ * Clobbers: r0-r1
+ * ---------------------------------------------
+ */
+func cortex_a57_disable_smp
+ ldcopr16 r0, r1, CORTEX_A57_ECTLR
+ bic64_imm r0, r1, CORTEX_A57_ECTLR_SMP_BIT
+ stcopr16 r0, r1, CORTEX_A57_ECTLR
+ bx lr
+endfunc cortex_a57_disable_smp
+
+ /* ---------------------------------------------
+ * Disable all types of L2 prefetches.
+ * Clobbers: r0-r2
+ * ---------------------------------------------
+ */
+func cortex_a57_disable_l2_prefetch
+ ldcopr16 r0, r1, CORTEX_A57_ECTLR
+ orr64_imm r0, r1, CORTEX_A57_ECTLR_DIS_TWD_ACC_PFTCH_BIT
+ bic64_imm r0, r1, (CORTEX_A57_ECTLR_L2_IPFTCH_DIST_MASK | \
+ CORTEX_A57_ECTLR_L2_DPFTCH_DIST_MASK)
+ stcopr16 r0, r1, CORTEX_A57_ECTLR
+ isb
+ dsb ish
+ bx lr
+endfunc cortex_a57_disable_l2_prefetch
+
+ /* ---------------------------------------------
+ * Disable debug interfaces
+ * ---------------------------------------------
+ */
+func cortex_a57_disable_ext_debug
+ mov r0, #1
+ stcopr r0, DBGOSDLR
+ isb
+#if ERRATA_A57_817169
+ /*
+ * Invalidate any TLB address
+ */
+ mov r0, #0
+ stcopr r0, TLBIMVA
+#endif
+ dsb sy
+ bx lr
+endfunc cortex_a57_disable_ext_debug
+
+ /* --------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #806969.
+ * This applies only to revision r0p0 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * --------------------------------------------------
+ */
+func errata_a57_806969_wa
+ /*
+ * Compare r0 against revision r0p0
+ */
+ mov r2, lr
+ bl check_errata_806969
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_NO_ALLOC_WBWA
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_806969_wa
+
+func check_errata_806969
+ mov r1, #0x00
+ b cpu_rev_var_ls
+endfunc check_errata_806969
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #813419.
+ * This applies only to revision r0p0 of Cortex A57.
+ * ---------------------------------------------------
+ */
+func check_errata_813419
+ /*
+ * Even though this is only needed for revision r0p0, it
+ * is always applied due to limitations of the current
+ * errata framework.
+ */
+ mov r0, #ERRATA_APPLIES
+ bx lr
+endfunc check_errata_813419
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #813420.
+ * This applies only to revision r0p0 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_813420_wa
+ /*
+ * Compare r0 against revision r0p0
+ */
+ mov r2, lr
+ bl check_errata_813420
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DCC_AS_DCCI
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_813420_wa
+
+func check_errata_813420
+ mov r1, #0x00
+ b cpu_rev_var_ls
+endfunc check_errata_813420
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #814670.
+ * This applies only to revision r0p0 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_814670_wa
+ /*
+ * Compare r0 against revision r0p0
+ */
+ mov r2, lr
+ bl check_errata_814670
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DIS_DMB_NULLIFICATION
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ isb
+1:
+ bx r2
+endfunc errata_a57_814670_wa
+
+func check_errata_814670
+ mov r1, #0x00
+ b cpu_rev_var_ls
+endfunc check_errata_814670
+
+ /* ----------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #817169.
+ * This applies only to revision <= r0p1 of Cortex A57.
+ * ----------------------------------------------------
+ */
+func check_errata_817169
+ /*
+ * Even though this is only needed for revision <= r0p1, it
+ * is always applied because of the low cost of the workaround.
+ */
+ mov r0, #ERRATA_APPLIES
+ bx lr
+endfunc check_errata_817169
+
+ /* --------------------------------------------------------------------
+ * Disable the over-read from the LDNP instruction.
+ *
+ * This applies to all revisions <= r1p2. The performance degradation
+ * observed with LDNP/STNP has been fixed on r1p3 and onwards.
+ *
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------------------------
+ */
+func a57_disable_ldnp_overread
+ /*
+ * Compare r0 against revision r1p2
+ */
+ mov r2, lr
+ bl check_errata_disable_ldnp_overread
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DIS_OVERREAD
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc a57_disable_ldnp_overread
+
+func check_errata_disable_ldnp_overread
+ mov r1, #0x12
+ b cpu_rev_var_ls
+endfunc check_errata_disable_ldnp_overread
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #826974.
+ * This applies only to revision <= r1p1 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_826974_wa
+ /*
+ * Compare r0 against revision r1p1
+ */
+ mov r2, lr
+ bl check_errata_826974
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_DMB
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_826974_wa
+
+func check_errata_826974
+ mov r1, #0x11
+ b cpu_rev_var_ls
+endfunc check_errata_826974
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #826977.
+ * This applies only to revision <= r1p1 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_826977_wa
+ /*
+ * Compare r0 against revision r1p1
+ */
+ mov r2, lr
+ bl check_errata_826977
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_GRE_NGRE_AS_NGNRE
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_826977_wa
+
+func check_errata_826977
+ mov r1, #0x11
+ b cpu_rev_var_ls
+endfunc check_errata_826977
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #828024.
+ * This applies only to revision <= r1p1 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_828024_wa
+ /*
+ * Compare r0 against revision r1p1
+ */
+ mov r2, lr
+ bl check_errata_828024
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ /*
+ * Setting the relevant bits in CORTEX_A57_CPUACTLR has to be done in 2
+ * instructions here because the resulting bitmask doesn't fit in a
+ * 16-bit value so it cannot be encoded in a single instruction.
+ */
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_NO_ALLOC_WBWA
+ orr64_imm r0, r1, (CORTEX_A57_CPUACTLR_DIS_L1_STREAMING | CORTEX_A57_CPUACTLR_DIS_STREAMING)
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_828024_wa
+
+func check_errata_828024
+ mov r1, #0x11
+ b cpu_rev_var_ls
+endfunc check_errata_828024
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #829520.
+ * This applies only to revision <= r1p2 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_829520_wa
+ /*
+ * Compare r0 against revision r1p2
+ */
+ mov r2, lr
+ bl check_errata_829520
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DIS_INDIRECT_PREDICTOR
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_829520_wa
+
+func check_errata_829520
+ mov r1, #0x12
+ b cpu_rev_var_ls
+endfunc check_errata_829520
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #833471.
+ * This applies only to revision <= r1p2 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_833471_wa
+ /*
+ * Compare r0 against revision r1p2
+ */
+ mov r2, lr
+ bl check_errata_833471
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r1, r1, CORTEX_A57_CPUACTLR_FORCE_FPSCR_FLUSH
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_833471_wa
+
+func check_errata_833471
+ mov r1, #0x12
+ b cpu_rev_var_ls
+endfunc check_errata_833471
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A57 Errata #859972.
+ * This applies only to revision <= r1p3 of Cortex A57.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a57_859972_wa
+ mov r2, lr
+ bl check_errata_859972
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r1, r1, CORTEX_A57_CPUACTLR_DIS_INSTR_PREFETCH
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+1:
+ bx lr
+endfunc errata_a57_859972_wa
+
+func check_errata_859972
+ mov r1, #0x13
+ b cpu_rev_var_ls
+endfunc check_errata_859972
+
+func check_errata_cve_2017_5715
+ mov r0, #ERRATA_MISSING
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+func check_errata_cve_2018_3639
+#if WORKAROUND_CVE_2018_3639
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2018_3639
+
+func check_errata_cve_2022_23960
+ mov r0, #ERRATA_MISSING
+ bx lr
+endfunc check_errata_cve_2022_23960
+
+ /* -------------------------------------------------
+ * The CPU Ops reset function for Cortex-A57.
+ * Shall clobber: r0-r6
+ * -------------------------------------------------
+ */
+func cortex_a57_reset_func
+ mov r5, lr
+ bl cpu_get_rev_var
+ mov r4, r0
+
+#if ERRATA_A57_806969
+ mov r0, r4
+ bl errata_a57_806969_wa
+#endif
+
+#if ERRATA_A57_813420
+ mov r0, r4
+ bl errata_a57_813420_wa
+#endif
+
+#if ERRATA_A57_814670
+ mov r0, r4
+ bl errata_a57_814670_wa
+#endif
+
+#if A57_DISABLE_NON_TEMPORAL_HINT
+ mov r0, r4
+ bl a57_disable_ldnp_overread
+#endif
+
+#if ERRATA_A57_826974
+ mov r0, r4
+ bl errata_a57_826974_wa
+#endif
+
+#if ERRATA_A57_826977
+ mov r0, r4
+ bl errata_a57_826977_wa
+#endif
+
+#if ERRATA_A57_828024
+ mov r0, r4
+ bl errata_a57_828024_wa
+#endif
+
+#if ERRATA_A57_829520
+ mov r0, r4
+ bl errata_a57_829520_wa
+#endif
+
+#if ERRATA_A57_833471
+ mov r0, r4
+ bl errata_a57_833471_wa
+#endif
+
+#if ERRATA_A57_859972
+ mov r0, r4
+ bl errata_a57_859972_wa
+#endif
+
+#if WORKAROUND_CVE_2018_3639
+ ldcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A57_CPUACTLR_DIS_LOAD_PASS_STORE
+ stcopr16 r0, r1, CORTEX_A57_CPUACTLR
+ isb
+ dsb sy
+#endif
+
+ /* ---------------------------------------------
+ * Enable the SMP bit.
+ * ---------------------------------------------
+ */
+ ldcopr16 r0, r1, CORTEX_A57_ECTLR
+ orr64_imm r0, r1, CORTEX_A57_ECTLR_SMP_BIT
+ stcopr16 r0, r1, CORTEX_A57_ECTLR
+ isb
+ bx r5
+endfunc cortex_a57_reset_func
+
+ /* ----------------------------------------------------
+ * The CPU Ops core power down function for Cortex-A57.
+ * ----------------------------------------------------
+ */
+func cortex_a57_core_pwr_dwn
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Disable the L2 prefetches.
+ * ---------------------------------------------
+ */
+ bl cortex_a57_disable_l2_prefetch
+
+ /* ---------------------------------------------
+ * Flush L1 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ bl cortex_a57_disable_smp
+
+ /* ---------------------------------------------
+ * Force the debug interfaces to be quiescent
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a57_disable_ext_debug
+endfunc cortex_a57_core_pwr_dwn
+
+ /* -------------------------------------------------------
+ * The CPU Ops cluster power down function for Cortex-A57.
+ * Clobbers: r0-r3
+ * -------------------------------------------------------
+ */
+func cortex_a57_cluster_pwr_dwn
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Disable the L2 prefetches.
+ * ---------------------------------------------
+ */
+ bl cortex_a57_disable_l2_prefetch
+
+ /* ---------------------------------------------
+ * Flush L1 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* ---------------------------------------------
+ * Disable the optional ACP.
+ * ---------------------------------------------
+ */
+ bl plat_disable_acp
+
+ /* ---------------------------------------------
+ * Flush L2 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level2
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ bl cortex_a57_disable_smp
+
+ /* ---------------------------------------------
+ * Force the debug interfaces to be quiescent
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a57_disable_ext_debug
+endfunc cortex_a57_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A57. Must follow AAPCS.
+ */
+func cortex_a57_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata ERRATA_A57_806969, cortex_a57, 806969
+ report_errata ERRATA_A57_813419, cortex_a57, 813419
+ report_errata ERRATA_A57_813420, cortex_a57, 813420
+ report_errata ERRATA_A57_814670, cortex_a57, 814670
+ report_errata ERRATA_A57_817169, cortex_a57, 817169
+ report_errata A57_DISABLE_NON_TEMPORAL_HINT, cortex_a57, \
+ disable_ldnp_overread
+ report_errata ERRATA_A57_826974, cortex_a57, 826974
+ report_errata ERRATA_A57_826977, cortex_a57, 826977
+ report_errata ERRATA_A57_828024, cortex_a57, 828024
+ report_errata ERRATA_A57_829520, cortex_a57, 829520
+ report_errata ERRATA_A57_833471, cortex_a57, 833471
+ report_errata ERRATA_A57_859972, cortex_a57, 859972
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
+ report_errata WORKAROUND_CVE_2018_3639, cortex_a57, cve_2018_3639
+ report_errata WORKAROUND_CVE_2022_23960, cortex_a57, cve_2022_23960
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a57_errata_report
+#endif
+
+declare_cpu_ops cortex_a57, CORTEX_A57_MIDR, \
+ cortex_a57_reset_func, \
+ cortex_a57_core_pwr_dwn, \
+ cortex_a57_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a7.S b/lib/cpus/aarch32/cortex_a7.S
new file mode 100644
index 0000000..4d4bb77
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a7.S
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a7.h>
+#include <cpu_macros.S>
+
+ .macro assert_cache_enabled
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ .endm
+
+func cortex_a7_disable_smp
+ ldcopr r0, ACTLR
+ bic r0, #CORTEX_A7_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a7_disable_smp
+
+func cortex_a7_enable_smp
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A7_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ bx lr
+endfunc cortex_a7_enable_smp
+
+func cortex_a7_reset_func
+ b cortex_a7_enable_smp
+endfunc cortex_a7_reset_func
+
+func cortex_a7_core_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 cache */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a7_disable_smp
+endfunc cortex_a7_core_pwr_dwn
+
+func cortex_a7_cluster_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 caches */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ bl plat_disable_acp
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a7_disable_smp
+endfunc cortex_a7_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex-A7. Must follow AAPCS.
+ */
+func cortex_a7_errata_report
+ bx lr
+endfunc cortex_a7_errata_report
+#endif
+
+declare_cpu_ops cortex_a7, CORTEX_A7_MIDR, \
+ cortex_a7_reset_func, \
+ cortex_a7_core_pwr_dwn, \
+ cortex_a7_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a72.S b/lib/cpus/aarch32/cortex_a72.S
new file mode 100644
index 0000000..03914b2
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a72.S
@@ -0,0 +1,278 @@
+/*
+ * Copyright (c) 2017-2022, Arm Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <common/debug.h>
+#include <cortex_a72.h>
+#include <cpu_macros.S>
+
+ /* ---------------------------------------------
+ * Disable all types of L2 prefetches.
+ * ---------------------------------------------
+ */
+func cortex_a72_disable_l2_prefetch
+ ldcopr16 r0, r1, CORTEX_A72_ECTLR
+ orr64_imm r0, r1, CORTEX_A72_ECTLR_DIS_TWD_ACC_PFTCH_BIT
+ bic64_imm r0, r1, (CORTEX_A72_ECTLR_L2_IPFTCH_DIST_MASK | \
+ CORTEX_A72_ECTLR_L2_DPFTCH_DIST_MASK)
+ stcopr16 r0, r1, CORTEX_A72_ECTLR
+ isb
+ bx lr
+endfunc cortex_a72_disable_l2_prefetch
+
+ /* ---------------------------------------------
+ * Disable the load-store hardware prefetcher.
+ * ---------------------------------------------
+ */
+func cortex_a72_disable_hw_prefetcher
+ ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH
+ stcopr16 r0, r1, CORTEX_A72_CPUACTLR
+ isb
+ dsb ish
+ bx lr
+endfunc cortex_a72_disable_hw_prefetcher
+
+ /* ---------------------------------------------
+ * Disable intra-cluster coherency
+ * Clobbers: r0-r1
+ * ---------------------------------------------
+ */
+func cortex_a72_disable_smp
+ ldcopr16 r0, r1, CORTEX_A72_ECTLR
+ bic64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT
+ stcopr16 r0, r1, CORTEX_A72_ECTLR
+ bx lr
+endfunc cortex_a72_disable_smp
+
+ /* ---------------------------------------------
+ * Disable debug interfaces
+ * ---------------------------------------------
+ */
+func cortex_a72_disable_ext_debug
+ mov r0, #1
+ stcopr r0, DBGOSDLR
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a72_disable_ext_debug
+
+ /* ---------------------------------------------------
+ * Errata Workaround for Cortex A72 Errata #859971.
+ * This applies only to revision <= r0p3 of Cortex A72.
+ * Inputs:
+ * r0: variant[4:7] and revision[0:3] of current cpu.
+ * Shall clobber: r0-r3
+ * ---------------------------------------------------
+ */
+func errata_a72_859971_wa
+ mov r2,lr
+ bl check_errata_859971
+ mov lr, r2
+ cmp r0, #ERRATA_NOT_APPLIES
+ beq 1f
+ ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
+ orr64_imm r1, r1, CORTEX_A72_CPUACTLR_DIS_INSTR_PREFETCH
+ stcopr16 r0, r1, CORTEX_A72_CPUACTLR
+1:
+ bx lr
+endfunc errata_a72_859971_wa
+
+func check_errata_859971
+ mov r1, #0x03
+ b cpu_rev_var_ls
+endfunc check_errata_859971
+
+func check_errata_cve_2017_5715
+ mov r0, #ERRATA_MISSING
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+func check_errata_cve_2018_3639
+#if WORKAROUND_CVE_2018_3639
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2018_3639
+
+func check_errata_cve_2022_23960
+ mov r0, #ERRATA_MISSING
+ bx lr
+endfunc check_errata_cve_2022_23960
+
+ /* -------------------------------------------------
+ * The CPU Ops reset function for Cortex-A72.
+ * -------------------------------------------------
+ */
+func cortex_a72_reset_func
+ mov r5, lr
+ bl cpu_get_rev_var
+ mov r4, r0
+
+#if ERRATA_A72_859971
+ mov r0, r4
+ bl errata_a72_859971_wa
+#endif
+
+#if WORKAROUND_CVE_2018_3639
+ ldcopr16 r0, r1, CORTEX_A72_CPUACTLR
+ orr64_imm r0, r1, CORTEX_A72_CPUACTLR_DIS_LOAD_PASS_STORE
+ stcopr16 r0, r1, CORTEX_A72_CPUACTLR
+ isb
+ dsb sy
+#endif
+
+ /* ---------------------------------------------
+ * Enable the SMP bit.
+ * ---------------------------------------------
+ */
+ ldcopr16 r0, r1, CORTEX_A72_ECTLR
+ orr64_imm r0, r1, CORTEX_A72_ECTLR_SMP_BIT
+ stcopr16 r0, r1, CORTEX_A72_ECTLR
+ isb
+ bx r5
+endfunc cortex_a72_reset_func
+
+ /* ----------------------------------------------------
+ * The CPU Ops core power down function for Cortex-A72.
+ * ----------------------------------------------------
+ */
+func cortex_a72_core_pwr_dwn
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Disable the L2 prefetches.
+ * ---------------------------------------------
+ */
+ bl cortex_a72_disable_l2_prefetch
+
+ /* ---------------------------------------------
+ * Disable the load-store hardware prefetcher.
+ * ---------------------------------------------
+ */
+ bl cortex_a72_disable_hw_prefetcher
+
+ /* ---------------------------------------------
+ * Flush L1 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ bl cortex_a72_disable_smp
+
+ /* ---------------------------------------------
+ * Force the debug interfaces to be quiescent
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a72_disable_ext_debug
+endfunc cortex_a72_core_pwr_dwn
+
+ /* -------------------------------------------------------
+ * The CPU Ops cluster power down function for Cortex-A72.
+ * -------------------------------------------------------
+ */
+func cortex_a72_cluster_pwr_dwn
+ push {r12, lr}
+
+ /* Assert if cache is enabled */
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+
+ /* ---------------------------------------------
+ * Disable the L2 prefetches.
+ * ---------------------------------------------
+ */
+ bl cortex_a72_disable_l2_prefetch
+
+ /* ---------------------------------------------
+ * Disable the load-store hardware prefetcher.
+ * ---------------------------------------------
+ */
+ bl cortex_a72_disable_hw_prefetcher
+
+#if !SKIP_A72_L1_FLUSH_PWR_DWN
+ /* ---------------------------------------------
+ * Flush L1 caches.
+ * ---------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+#endif
+
+ /* ---------------------------------------------
+ * Disable the optional ACP.
+ * ---------------------------------------------
+ */
+ bl plat_disable_acp
+
+ /* -------------------------------------------------
+ * Flush the L2 caches.
+ * -------------------------------------------------
+ */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level2
+
+ /* ---------------------------------------------
+ * Come out of intra cluster coherency
+ * ---------------------------------------------
+ */
+ bl cortex_a72_disable_smp
+
+ /* ---------------------------------------------
+ * Force the debug interfaces to be quiescent
+ * ---------------------------------------------
+ */
+ pop {r12, lr}
+ b cortex_a72_disable_ext_debug
+endfunc cortex_a72_cluster_pwr_dwn
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A72. Must follow AAPCS.
+ */
+func cortex_a72_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata ERRATA_A72_859971, cortex_a72, 859971
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
+ report_errata WORKAROUND_CVE_2018_3639, cortex_a72, cve_2018_3639
+ report_errata WORKAROUND_CVE_2022_23960, cortex_a72, cve_2022_23960
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a72_errata_report
+#endif
+
+declare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \
+ cortex_a72_reset_func, \
+ cortex_a72_core_pwr_dwn, \
+ cortex_a72_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cortex_a9.S b/lib/cpus/aarch32/cortex_a9.S
new file mode 100644
index 0000000..7200343
--- /dev/null
+++ b/lib/cpus/aarch32/cortex_a9.S
@@ -0,0 +1,121 @@
+/*
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cortex_a9.h>
+#include <cpu_macros.S>
+
+ .macro assert_cache_enabled
+#if ENABLE_ASSERTIONS
+ ldcopr r0, SCTLR
+ tst r0, #SCTLR_C_BIT
+ ASM_ASSERT(eq)
+#endif
+ .endm
+
+func cortex_a9_disable_smp
+ ldcopr r0, ACTLR
+ bic r0, #CORTEX_A9_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ dsb sy
+ bx lr
+endfunc cortex_a9_disable_smp
+
+func cortex_a9_enable_smp
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A9_ACTLR_SMP_BIT
+ stcopr r0, ACTLR
+ isb
+ bx lr
+endfunc cortex_a9_enable_smp
+
+func check_errata_a9_794073
+#if ERRATA_A9_794073
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A9. Must follow AAPCS.
+ */
+func cortex_a9_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a9, cve_2017_5715
+ report_errata ERRATA_A9_794073, cortex_a9, a9_79407
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a9_errata_report
+#endif
+
+func cortex_a9_reset_func
+#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
+ ldr r0, =workaround_bpiall_runtime_exceptions
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ /* isb will be applied in the course of the reset func */
+#endif
+ b cortex_a9_enable_smp
+endfunc cortex_a9_reset_func
+
+func cortex_a9_core_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 cache */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a9_disable_smp
+endfunc cortex_a9_core_pwr_dwn
+
+func cortex_a9_cluster_pwr_dwn
+ push {r12, lr}
+
+ assert_cache_enabled
+
+ /* Flush L1 caches */
+ mov r0, #DC_OP_CISW
+ bl dcsw_op_level1
+
+ bl plat_disable_acp
+
+ /* Exit cluster coherency */
+ pop {r12, lr}
+ b cortex_a9_disable_smp
+endfunc cortex_a9_cluster_pwr_dwn
+
+declare_cpu_ops cortex_a9, CORTEX_A9_MIDR, \
+ cortex_a9_reset_func, \
+ cortex_a9_core_pwr_dwn, \
+ cortex_a9_cluster_pwr_dwn
diff --git a/lib/cpus/aarch32/cpu_helpers.S b/lib/cpus/aarch32/cpu_helpers.S
new file mode 100644
index 0000000..6ed800c
--- /dev/null
+++ b/lib/cpus/aarch32/cpu_helpers.S
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <assert_macros.S>
+#include <cpu_macros.S>
+#include <common/bl_common.h>
+#include <lib/el3_runtime/cpu_data.h>
+
+#if defined(IMAGE_BL1) || defined(IMAGE_BL32) || (defined(IMAGE_BL2) && BL2_AT_EL3)
+ /*
+ * The reset handler common to all platforms. After a matching
+ * cpu_ops structure entry is found, the correponding reset_handler
+ * in the cpu_ops is invoked. The reset handler is invoked very early
+ * in the boot sequence and it is assumed that we can clobber r0 - r10
+ * without the need to follow AAPCS.
+ * Clobbers: r0 - r10
+ */
+ .globl reset_handler
+func reset_handler
+ mov r8, lr
+
+ /* The plat_reset_handler can clobber r0 - r7 */
+ bl plat_reset_handler
+
+ /* Get the matching cpu_ops pointer (clobbers: r0 - r5) */
+ bl get_cpu_ops_ptr
+
+#if ENABLE_ASSERTIONS
+ cmp r0, #0
+ ASM_ASSERT(ne)
+#endif
+
+ /* Get the cpu_ops reset handler */
+ ldr r1, [r0, #CPU_RESET_FUNC]
+ cmp r1, #0
+ mov lr, r8
+ bxne r1
+ bx lr
+endfunc reset_handler
+
+#endif
+
+#ifdef IMAGE_BL32 /* The power down core and cluster is needed only in BL32 */
+ /*
+ * void prepare_cpu_pwr_dwn(unsigned int power_level)
+ *
+ * Prepare CPU power down function for all platforms. The function takes
+ * a domain level to be powered down as its parameter. After the cpu_ops
+ * pointer is retrieved from cpu_data, the handler for requested power
+ * level is called.
+ */
+ .globl prepare_cpu_pwr_dwn
+func prepare_cpu_pwr_dwn
+ /*
+ * If the given power level exceeds CPU_MAX_PWR_DWN_OPS, we call the
+ * power down handler for the last power level
+ */
+ mov r2, #(CPU_MAX_PWR_DWN_OPS - 1)
+ cmp r0, r2
+ movhi r0, r2
+
+ push {r0, lr}
+ bl _cpu_data
+ pop {r2, lr}
+
+ ldr r0, [r0, #CPU_DATA_CPU_OPS_PTR]
+#if ENABLE_ASSERTIONS
+ cmp r0, #0
+ ASM_ASSERT(ne)
+#endif
+
+ /* Get the appropriate power down handler */
+ mov r1, #CPU_PWR_DWN_OPS
+ add r1, r1, r2, lsl #2
+ ldr r1, [r0, r1]
+#if ENABLE_ASSERTIONS
+ cmp r1, #0
+ ASM_ASSERT(ne)
+#endif
+ bx r1
+endfunc prepare_cpu_pwr_dwn
+
+ /*
+ * Initializes the cpu_ops_ptr if not already initialized
+ * in cpu_data. This must only be called after the data cache
+ * is enabled. AAPCS is followed.
+ */
+ .globl init_cpu_ops
+func init_cpu_ops
+ push {r4 - r6, lr}
+ bl _cpu_data
+ mov r6, r0
+ ldr r1, [r0, #CPU_DATA_CPU_OPS_PTR]
+ cmp r1, #0
+ bne 1f
+ bl get_cpu_ops_ptr
+#if ENABLE_ASSERTIONS
+ cmp r0, #0
+ ASM_ASSERT(ne)
+#endif
+ str r0, [r6, #CPU_DATA_CPU_OPS_PTR]!
+1:
+ pop {r4 - r6, pc}
+endfunc init_cpu_ops
+
+#endif /* IMAGE_BL32 */
+
+ /*
+ * The below function returns the cpu_ops structure matching the
+ * midr of the core. It reads the MIDR and finds the matching
+ * entry in cpu_ops entries. Only the implementation and part number
+ * are used to match the entries.
+ * Return :
+ * r0 - The matching cpu_ops pointer on Success
+ * r0 - 0 on failure.
+ * Clobbers: r0 - r5
+ */
+ .globl get_cpu_ops_ptr
+func get_cpu_ops_ptr
+ /* Get the cpu_ops start and end locations */
+ ldr r4, =(__CPU_OPS_START__ + CPU_MIDR)
+ ldr r5, =(__CPU_OPS_END__ + CPU_MIDR)
+
+ /* Initialize the return parameter */
+ mov r0, #0
+
+ /* Read the MIDR_EL1 */
+ ldcopr r2, MIDR
+ ldr r3, =CPU_IMPL_PN_MASK
+
+ /* Retain only the implementation and part number using mask */
+ and r2, r2, r3
+1:
+ /* Check if we have reached end of list */
+ cmp r4, r5
+ bhs error_exit
+
+ /* load the midr from the cpu_ops */
+ ldr r1, [r4], #CPU_OPS_SIZE
+ and r1, r1, r3
+
+ /* Check if midr matches to midr of this core */
+ cmp r1, r2
+ bne 1b
+
+ /* Subtract the increment and offset to get the cpu-ops pointer */
+ sub r0, r4, #(CPU_OPS_SIZE + CPU_MIDR)
+#if ENABLE_ASSERTIONS
+ cmp r0, #0
+ ASM_ASSERT(ne)
+#endif
+error_exit:
+ bx lr
+endfunc get_cpu_ops_ptr
+
+/*
+ * Extract CPU revision and variant, and combine them into a single numeric for
+ * easier comparison.
+ */
+ .globl cpu_get_rev_var
+func cpu_get_rev_var
+ ldcopr r1, MIDR
+
+ /*
+ * Extract the variant[23:20] and revision[3:0] from r1 and pack it in
+ * r0[0:7] as variant[7:4] and revision[3:0]:
+ *
+ * First extract r1[23:16] to r0[7:0] and zero fill the rest. Then
+ * extract r1[3:0] into r0[3:0] retaining other bits.
+ */
+ ubfx r0, r1, #(MIDR_VAR_SHIFT - MIDR_REV_BITS), #(MIDR_REV_BITS + MIDR_VAR_BITS)
+ bfi r0, r1, #MIDR_REV_SHIFT, #MIDR_REV_BITS
+ bx lr
+endfunc cpu_get_rev_var
+
+/*
+ * Compare the CPU's revision-variant (r0) with a given value (r1), for errata
+ * application purposes. If the revision-variant is less than or same as a given
+ * value, indicates that errata applies; otherwise not.
+ */
+ .globl cpu_rev_var_ls
+func cpu_rev_var_ls
+ cmp r0, r1
+ movls r0, #ERRATA_APPLIES
+ movhi r0, #ERRATA_NOT_APPLIES
+ bx lr
+endfunc cpu_rev_var_ls
+
+/*
+ * Compare the CPU's revision-variant (r0) with a given value (r1), for errata
+ * application purposes. If the revision-variant is higher than or same as a
+ * given value, indicates that errata applies; otherwise not.
+ */
+ .globl cpu_rev_var_hs
+func cpu_rev_var_hs
+ cmp r0, r1
+ movge r0, #ERRATA_APPLIES
+ movlt r0, #ERRATA_NOT_APPLIES
+ bx lr
+endfunc cpu_rev_var_hs
+
+#if REPORT_ERRATA
+/*
+ * void print_errata_status(void);
+ *
+ * Function to print errata status for CPUs of its class. Must be called only:
+ *
+ * - with MMU and data caches are enabled;
+ * - after cpu_ops have been initialized in per-CPU data.
+ */
+ .globl print_errata_status
+func print_errata_status
+ /* r12 is pushed only for the sake of 8-byte stack alignment */
+ push {r4, r5, r12, lr}
+#ifdef IMAGE_BL1
+ /*
+ * BL1 doesn't have per-CPU data. So retrieve the CPU operations
+ * directly.
+ */
+ bl get_cpu_ops_ptr
+ ldr r0, [r0, #CPU_ERRATA_FUNC]
+ cmp r0, #0
+ blxne r0
+#else
+ /*
+ * Retrieve pointer to cpu_ops, and further, the errata printing
+ * function. If it's non-NULL, jump to the function in turn.
+ */
+ bl _cpu_data
+#if ENABLE_ASSERTIONS
+ cmp r0, #0
+ ASM_ASSERT(ne)
+#endif
+ ldr r1, [r0, #CPU_DATA_CPU_OPS_PTR]
+#if ENABLE_ASSERTIONS
+ cmp r1, #0
+ ASM_ASSERT(ne)
+#endif
+ ldr r0, [r1, #CPU_ERRATA_FUNC]
+ cmp r0, #0
+ beq 1f
+
+ mov r4, r0
+
+ /*
+ * Load pointers to errata lock and printed flag. Call
+ * errata_needs_reporting to check whether this CPU needs to report
+ * errata status pertaining to its class.
+ */
+ ldr r0, [r1, #CPU_ERRATA_LOCK]
+ ldr r1, [r1, #CPU_ERRATA_PRINTED]
+ bl errata_needs_reporting
+ cmp r0, #0
+ blxne r4
+1:
+#endif
+ pop {r4, r5, r12, pc}
+endfunc print_errata_status
+#endif