Merge "fix(a3k): change fatal error to warning when CM3 reset is not implemented" into integration
diff --git a/include/lib/cpus/aarch64/cortex_a76.h b/include/lib/cpus/aarch64/cortex_a76.h
index a61825f..74fb6e9 100644
--- a/include/lib/cpus/aarch64/cortex_a76.h
+++ b/include/lib/cpus/aarch64/cortex_a76.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -10,38 +10,41 @@
#include <lib/utils_def.h>
/* Cortex-A76 MIDR for revision 0 */
-#define CORTEX_A76_MIDR U(0x410fd0b0)
+#define CORTEX_A76_MIDR U(0x410fd0b0)
+
+/* Cortex-A76 loop count for CVE-2022-23960 mitigation */
+#define CORTEX_A76_BHB_LOOP_COUNT U(24)
/*******************************************************************************
* CPU Extended Control register specific definitions.
******************************************************************************/
-#define CORTEX_A76_CPUPWRCTLR_EL1 S3_0_C15_C2_7
-#define CORTEX_A76_CPUECTLR_EL1 S3_0_C15_C1_4
+#define CORTEX_A76_CPUPWRCTLR_EL1 S3_0_C15_C2_7
+#define CORTEX_A76_CPUECTLR_EL1 S3_0_C15_C1_4
-#define CORTEX_A76_CPUECTLR_EL1_WS_THR_L2 (ULL(3) << 24)
-#define CORTEX_A76_CPUECTLR_EL1_BIT_51 (ULL(1) << 51)
+#define CORTEX_A76_CPUECTLR_EL1_WS_THR_L2 (ULL(3) << 24)
+#define CORTEX_A76_CPUECTLR_EL1_BIT_51 (ULL(1) << 51)
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
-#define CORTEX_A76_CPUACTLR_EL1 S3_0_C15_C1_0
+#define CORTEX_A76_CPUACTLR_EL1 S3_0_C15_C1_0
#define CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION (ULL(1) << 6)
-#define CORTEX_A76_CPUACTLR_EL1_BIT_13 (ULL(1) << 13)
+#define CORTEX_A76_CPUACTLR_EL1_BIT_13 (ULL(1) << 13)
-#define CORTEX_A76_CPUACTLR2_EL1 S3_0_C15_C1_1
+#define CORTEX_A76_CPUACTLR2_EL1 S3_0_C15_C1_1
-#define CORTEX_A76_CPUACTLR2_EL1_BIT_2 (ULL(1) << 2)
+#define CORTEX_A76_CPUACTLR2_EL1_BIT_2 (ULL(1) << 2)
#define CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE (ULL(1) << 16)
-#define CORTEX_A76_CPUACTLR3_EL1 S3_0_C15_C1_2
+#define CORTEX_A76_CPUACTLR3_EL1 S3_0_C15_C1_2
-#define CORTEX_A76_CPUACTLR3_EL1_BIT_10 (ULL(1) << 10)
+#define CORTEX_A76_CPUACTLR3_EL1_BIT_10 (ULL(1) << 10)
/* Definitions of register field mask in CORTEX_A76_CPUPWRCTLR_EL1 */
-#define CORTEX_A76_CORE_PWRDN_EN_MASK U(0x1)
+#define CORTEX_A76_CORE_PWRDN_EN_MASK U(0x1)
#endif /* CORTEX_A76_H */
diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S
index 4f7f4bb..114d0f5 100644
--- a/lib/cpus/aarch64/cortex_a76.S
+++ b/lib/cpus/aarch64/cortex_a76.S
@@ -7,11 +7,11 @@
#include <arch.h>
#include <asm_macros.S>
#include <common/bl_common.h>
-#include <context.h>
#include <cortex_a76.h>
#include <cpu_macros.S>
#include <plat_macros.S>
#include <services/arm_arch_svc.h>
+#include "wa_cve_2022_23960_bhb.S"
/* Hardware handled coherency */
#if HW_ASSISTED_COHERENCY == 0
@@ -35,59 +35,17 @@
*
* The macro saves x2-x3 to the context. In the fast path
* x0-x3 registers do not need to be restored as the calling
- * context will have saved them.
+ * context will have saved them. The macro also saves
+ * x29-x30 to the context in the sync_exception path.
*/
.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
-
.if \_is_sync_exception
- /*
- * Ensure SMC is coming from A64/A32 state on #0
- * with W0 = SMCCC_ARCH_WORKAROUND_2
- *
- * This sequence evaluates as:
- * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
- * allowing use of a single branch operation
- */
- orr w2, wzr, #SMCCC_ARCH_WORKAROUND_2
- cmp x0, x2
- mrs x3, esr_el3
- mov_imm w2, \_esr_el3_val
- ccmp w2, w3, #0, eq
- /*
- * Static predictor will predict a fall-through, optimizing
- * the `SMCCC_ARCH_WORKAROUND_2` fast path.
- */
- bne 1f
-
- /*
- * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
- * fast path.
- */
- cmp x1, xzr /* enable/disable check */
-
- /*
- * When the calling context wants mitigation disabled,
- * we program the mitigation disable function in the
- * CPU context, which gets invoked on subsequent exits from
- * EL3 via the `el3_exit` function. Otherwise NULL is
- * programmed in the CPU context, which results in caller's
- * inheriting the EL3 mitigation state (enabled) on subsequent
- * `el3_exit`.
- */
- mov x0, xzr
- adr x1, cortex_a76_disable_wa_cve_2018_3639
- csel x1, x1, x0, eq
- str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
-
- mrs x2, CORTEX_A76_CPUACTLR2_EL1
- orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
- bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
- csel x3, x3, x1, eq
- msr CORTEX_A76_CPUACTLR2_EL1, x3
- exception_return /* exception_return contains ISB */
+ stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
+ mov_imm w2, \_esr_el3_val
+ bl apply_cve_2018_3639_sync_wa
+ ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
.endif
-1:
/*
* Always enable v4 mitigation during EL3 execution. This is not
* required for the fast path above because it does not perform any
@@ -105,8 +63,10 @@
*/
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
.endm
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
-vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
+#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
+vector_base cortex_a76_wa_cve_vbar
/* ---------------------------------------------------------------------
* Current EL with SP_EL0 : 0x0 - 0x200
@@ -153,22 +113,54 @@
* ---------------------------------------------------------------------
*/
vector_entry cortex_a76_sync_exception_aarch64
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b sync_exception_aarch64
end_vector_entry cortex_a76_sync_exception_aarch64
vector_entry cortex_a76_irq_aarch64
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b irq_aarch64
end_vector_entry cortex_a76_irq_aarch64
vector_entry cortex_a76_fiq_aarch64
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b fiq_aarch64
end_vector_entry cortex_a76_fiq_aarch64
vector_entry cortex_a76_serror_aarch64
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b serror_aarch64
end_vector_entry cortex_a76_serror_aarch64
@@ -177,24 +169,130 @@
* ---------------------------------------------------------------------
*/
vector_entry cortex_a76_sync_exception_aarch32
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b sync_exception_aarch32
end_vector_entry cortex_a76_sync_exception_aarch32
vector_entry cortex_a76_irq_aarch32
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b irq_aarch32
end_vector_entry cortex_a76_irq_aarch32
vector_entry cortex_a76_fiq_aarch32
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b fiq_aarch32
end_vector_entry cortex_a76_fiq_aarch32
vector_entry cortex_a76_serror_aarch32
+
+#if WORKAROUND_CVE_2022_23960
+ apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
+#endif /* WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
+
b serror_aarch32
end_vector_entry cortex_a76_serror_aarch32
+#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
+
+#if DYNAMIC_WORKAROUND_CVE_2018_3639
+ /*
+ * -----------------------------------------------------------------
+ * This function applies the mitigation for CVE-2018-3639
+ * specifically for sync exceptions. It implements a fast path
+ * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
+ * running in AArch64 will go through the fast and return early.
+ *
+ * In the fast path x0-x3 registers do not need to be restored as the
+ * calling context will have saved them.
+ *
+ * Caller must pass value of esr_el3 to compare via x2.
+ * Save and restore these registers outside of this function from the
+ * context before jumping to the main runtime vector table entry.
+ *
+ * Shall clobber: x0-x3, x30
+ * -----------------------------------------------------------------
+ */
+func apply_cve_2018_3639_sync_wa
+ /*
+ * Ensure SMC is coming from A64/A32 state on #0
+ * with W0 = SMCCC_ARCH_WORKAROUND_2
+ *
+ * This sequence evaluates as:
+ * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
+ * allowing use of a single branch operation
+ * X2 populated outside this function with the SMC FID.
+ */
+ orr w3, wzr, #SMCCC_ARCH_WORKAROUND_2
+ cmp x0, x3
+ mrs x3, esr_el3
+
+ ccmp w2, w3, #0, eq
+ /*
+ * Static predictor will predict a fall-through, optimizing
+ * the `SMCCC_ARCH_WORKAROUND_2` fast path.
+ */
+ bne 1f
+
+ /*
+ * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
+ * fast path.
+ */
+ cmp x1, xzr /* enable/disable check */
+
+ /*
+ * When the calling context wants mitigation disabled,
+ * we program the mitigation disable function in the
+ * CPU context, which gets invoked on subsequent exits from
+ * EL3 via the `el3_exit` function. Otherwise NULL is
+ * programmed in the CPU context, which results in caller's
+ * inheriting the EL3 mitigation state (enabled) on subsequent
+ * `el3_exit`.
+ */
+ mov x0, xzr
+ adr x1, cortex_a76_disable_wa_cve_2018_3639
+ csel x1, x1, x0, eq
+ str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
+
+ mrs x2, CORTEX_A76_CPUACTLR2_EL1
+ orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
+ bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
+ csel x3, x3, x1, eq
+ msr CORTEX_A76_CPUACTLR2_EL1, x3
+ ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
+ /*
+ * `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
+ */
+ exception_return /* exception_return contains ISB */
+1:
+ ret
+endfunc apply_cve_2018_3639_sync_wa
#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
/* --------------------------------------------------
@@ -519,6 +617,15 @@
#endif
endfunc check_errata_1165522
+func check_errata_cve_2022_23960
+#if WORKAROUND_CVE_2022_23960
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif /* WORKAROUND_CVE_2022_23960 */
+ ret
+endfunc check_errata_cve_2022_23960
+
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A76.
* Shall clobber: x0-x19
@@ -590,16 +697,31 @@
* The Cortex-A76 generic vectors are overwritten to use the vectors
* defined above. This is required in order to apply mitigation
* against CVE-2018-3639 on exception entry from lower ELs.
+ * If the below vector table is used, skip overriding it again for
+ * CVE_2022_23960 as both use the same vbar.
*/
- adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar
+ adr x0, cortex_a76_wa_cve_vbar
msr vbar_el3, x0
isb
+ b 2f
#endif /* IMAGE_BL31 */
1:
#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
#endif /* WORKAROUND_CVE_2018_3639 */
+#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
+ /*
+ * The Cortex-A76 generic vectors are overridden to apply errata
+ * mitigation on exception entry from lower ELs. This will be bypassed
+ * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
+ */
+ adr x0, cortex_a76_wa_cve_vbar
+ msr vbar_el3, x0
+ isb
+#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
+2:
+
#if ERRATA_DSU_798953
bl errata_dsu_798953_wa
#endif
@@ -656,6 +778,7 @@
report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
+ report_errata WORKAROUND_CVE_2022_23960, cortex_a76, cve_2022_23960
ldp x8, x30, [sp], #16
ret
diff --git a/plat/arm/css/sgi/include/sgi_base_platform_def.h b/plat/arm/css/sgi/include/sgi_base_platform_def.h
index 93609b9..c9c8c04 100644
--- a/plat/arm/css/sgi/include/sgi_base_platform_def.h
+++ b/plat/arm/css/sgi/include/sgi_base_platform_def.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018-2022, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2018-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -35,8 +35,8 @@
# if SPM_MM
# define PLAT_ARM_MMAP_ENTRIES (9 + ((CSS_SGI_CHIP_COUNT - 1) * 3))
# define MAX_XLAT_TABLES (7 + ((CSS_SGI_CHIP_COUNT - 1) * 3))
-# define PLAT_SP_IMAGE_MMAP_REGIONS 10
-# define PLAT_SP_IMAGE_MAX_XLAT_TABLES 12
+# define PLAT_SP_IMAGE_MMAP_REGIONS 9
+# define PLAT_SP_IMAGE_MAX_XLAT_TABLES 11
# else
# define PLAT_ARM_MMAP_ENTRIES (5 + ((CSS_SGI_CHIP_COUNT - 1) * 3))
# define MAX_XLAT_TABLES (6 + ((CSS_SGI_CHIP_COUNT - 1) * 3))
@@ -130,21 +130,6 @@
# define PLATFORM_STACK_SIZE 0x440
#endif
-/* PL011 UART related constants */
-#define SOC_CSS_SEC_UART_BASE UL(0x2A410000)
-#define SOC_CSS_NSEC_UART_BASE UL(0x2A400000)
-#define SOC_CSS_UART_SIZE UL(0x10000)
-#define SOC_CSS_UART_CLK_IN_HZ UL(7372800)
-
-/* UART related constants */
-#define PLAT_ARM_BOOT_UART_BASE SOC_CSS_SEC_UART_BASE
-#define PLAT_ARM_BOOT_UART_CLK_IN_HZ SOC_CSS_UART_CLK_IN_HZ
-
-#define PLAT_ARM_RUN_UART_BASE SOC_CSS_SEC_UART_BASE
-#define PLAT_ARM_RUN_UART_CLK_IN_HZ SOC_CSS_UART_CLK_IN_HZ
-
-#define PLAT_ARM_CRASH_UART_BASE SOC_CSS_SEC_UART_BASE
-#define PLAT_ARM_CRASH_UART_CLK_IN_HZ SOC_CSS_UART_CLK_IN_HZ
#define PLAT_ARM_NSTIMER_FRAME_ID 0
@@ -273,18 +258,4 @@
CSS_SGI_REMOTE_CHIP_MEM_OFFSET(n) + ARM_DRAM2_END, \
ARM_TZC_NS_DRAM_S_ACCESS, PLAT_ARM_TZC_NS_DEV_ACCESS}
-#if SPM_MM
-
-/*
- * Stand-alone MM logs would be routed via secure UART. Define page table
- * entry for secure UART which would be common to all platforms.
- */
-#define SOC_PLATFORM_SECURE_UART MAP_REGION_FLAT( \
- SOC_CSS_SEC_UART_BASE, \
- SOC_CSS_UART_SIZE, \
- MT_DEVICE | MT_RW | \
- MT_SECURE | MT_USER)
-
-#endif
-
#endif /* SGI_BASE_PLATFORM_DEF_H */
diff --git a/plat/arm/css/sgi/include/sgi_soc_css_def.h b/plat/arm/css/sgi/include/sgi_soc_css_def.h
deleted file mode 100644
index f78b45a..0000000
--- a/plat/arm/css/sgi/include/sgi_soc_css_def.h
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2022, ARM Limited and Contributors. All rights reserved.
- *
- * SPDX-License-Identifier: BSD-3-Clause
- */
-
-#ifndef SGI_SOC_CSS_DEF_H
-#define SGI_SOC_CSS_DEF_H
-
-#include <lib/utils_def.h>
-#include <plat/arm/board/common/v2m_def.h>
-#include <plat/arm/soc/common/soc_css_def.h>
-#include <plat/common/common_def.h>
-
-/*
- * Definitions common to all ARM CSSv1-based development platforms
- */
-
-/* Platform ID address */
-#define BOARD_CSS_PLAT_ID_REG_ADDR UL(0x7ffe00e0)
-
-/* Platform ID related accessors */
-#define BOARD_CSS_PLAT_ID_REG_ID_MASK 0x0f
-#define BOARD_CSS_PLAT_ID_REG_ID_SHIFT 0x0
-#define BOARD_CSS_PLAT_TYPE_EMULATOR 0x02
-
-#ifndef __ASSEMBLER__
-
-#include <lib/mmio.h>
-
-#define BOARD_CSS_GET_PLAT_TYPE(addr) \
- ((mmio_read_32(addr) & BOARD_CSS_PLAT_ID_REG_ID_MASK) \
- >> BOARD_CSS_PLAT_ID_REG_ID_SHIFT)
-
-#endif /* __ASSEMBLER__ */
-
-#define MAX_IO_DEVICES 3
-#define MAX_IO_HANDLES 4
-
-/* Reserve the last block of flash for PSCI MEM PROTECT flag */
-#define PLAT_ARM_FLASH_IMAGE_BASE V2M_FLASH0_BASE
-#define PLAT_ARM_FLASH_IMAGE_MAX_SIZE (V2M_FLASH0_SIZE - V2M_FLASH_BLOCK_SIZE)
-
-#define PLAT_ARM_NVM_BASE V2M_FLASH0_BASE
-#define PLAT_ARM_NVM_SIZE (V2M_FLASH0_SIZE - V2M_FLASH_BLOCK_SIZE)
-
-#endif /* SGI_SOC_CSS_DEF_H */
diff --git a/plat/arm/css/sgi/include/sgi_soc_css_def_v2.h b/plat/arm/css/sgi/include/sgi_soc_css_def_v2.h
index acf31eb..639b687 100644
--- a/plat/arm/css/sgi/include/sgi_soc_css_def_v2.h
+++ b/plat/arm/css/sgi/include/sgi_soc_css_def_v2.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2021-2022, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -24,10 +24,17 @@
#define SOC_CSS_PCIE_CONTROL_BASE UL(0x0ef20000)
+/* PL011 UART related constants */
+#define SOC_CSS_UART1_BASE UL(0x0ef80000)
+#define SOC_CSS_UART0_BASE UL(0x0ef70000)
+
/* Memory controller */
#define SOC_MEMCNTRL_BASE UL(0x10000000)
#define SOC_MEMCNTRL_SIZE UL(0x10000000)
+#define SOC_CSS_UART0_CLK_IN_HZ UL(7372800)
+#define SOC_CSS_UART1_CLK_IN_HZ UL(7372800)
+
/* SoC NIC-400 Global Programmers View (GPV) */
#define SOC_CSS_NIC400_BASE UL(0x0ED00000)
@@ -199,4 +206,17 @@
#define PLAT_ARM_NVM_BASE V2M_FLASH0_BASE
#define PLAT_ARM_NVM_SIZE (V2M_FLASH0_SIZE - V2M_FLASH_BLOCK_SIZE)
+/* UART related constants */
+#define PLAT_ARM_BOOT_UART_BASE SOC_CSS_UART0_BASE
+#define PLAT_ARM_BOOT_UART_CLK_IN_HZ SOC_CSS_UART0_CLK_IN_HZ
+
+#define PLAT_ARM_RUN_UART_BASE SOC_CSS_UART1_BASE
+#define PLAT_ARM_RUN_UART_CLK_IN_HZ SOC_CSS_UART1_CLK_IN_HZ
+
+#define PLAT_ARM_SP_MIN_RUN_UART_BASE SOC_CSS_UART1_BASE
+#define PLAT_ARM_SP_MIN_RUN_UART_CLK_IN_HZ SOC_CSS_UART1_CLK_IN_HZ
+
+#define PLAT_ARM_CRASH_UART_BASE PLAT_ARM_RUN_UART_BASE
+#define PLAT_ARM_CRASH_UART_CLK_IN_HZ PLAT_ARM_RUN_UART_CLK_IN_HZ
+
#endif /* SGI_SOC_CSS_DEF_V2_H */
diff --git a/plat/arm/css/sgi/include/sgi_soc_platform_def.h b/plat/arm/css/sgi/include/sgi_soc_platform_def.h
index 3b8d9c6..405d62f 100644
--- a/plat/arm/css/sgi/include/sgi_soc_platform_def.h
+++ b/plat/arm/css/sgi/include/sgi_soc_platform_def.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2020-2022, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2020-2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -7,10 +7,10 @@
#ifndef SGI_SOC_PLATFORM_DEF_H
#define SGI_SOC_PLATFORM_DEF_H
+#include <sgi_base_platform_def.h>
+#include <plat/arm/board/common/board_css_def.h>
#include <plat/arm/board/common/v2m_def.h>
#include <plat/arm/soc/common/soc_css_def.h>
-#include <sgi_base_platform_def.h>
-#include <sgi_soc_css_def.h>
/* Map the System registers to access from S-EL0 */
#define CSS_SYSTEMREG_DEVICE_BASE (0x1C010000)
diff --git a/plat/arm/css/sgi/sgi_plat.c b/plat/arm/css/sgi/sgi_plat.c
index a0199c3..20c52e9 100644
--- a/plat/arm/css/sgi/sgi_plat.c
+++ b/plat/arm/css/sgi/sgi_plat.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018-2022, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2018-2021, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -89,7 +89,6 @@
const mmap_region_t plat_arm_secure_partition_mmap[] = {
PLAT_ARM_SECURE_MAP_SYSTEMREG,
PLAT_ARM_SECURE_MAP_NOR2,
- SOC_PLATFORM_SECURE_UART,
PLAT_ARM_SECURE_MAP_DEVICE,
ARM_SP_IMAGE_MMAP,
ARM_SP_IMAGE_NS_BUF_MMAP,
diff --git a/plat/arm/css/sgi/sgi_plat_v2.c b/plat/arm/css/sgi/sgi_plat_v2.c
index cef5345..1a2a966 100644
--- a/plat/arm/css/sgi/sgi_plat_v2.c
+++ b/plat/arm/css/sgi/sgi_plat_v2.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2021-2022, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -83,7 +83,6 @@
const mmap_region_t plat_arm_secure_partition_mmap[] = {
PLAT_ARM_SECURE_MAP_SYSTEMREG,
PLAT_ARM_SECURE_MAP_NOR2,
- SOC_PLATFORM_SECURE_UART,
SOC_PLATFORM_PERIPH_MAP_DEVICE_USER,
ARM_SP_IMAGE_MMAP,
ARM_SP_IMAGE_NS_BUF_MMAP,