fix(mpam): run-time checks for mpam save/restore routines

With "ENABLE_MPAM_FOR_LOWER_ELS" and "CTX_INCLUDE_EL2_REGS" build
options enabled, MPAM EL2 registers would be saved/restored as part of
context management. Context save/restore routines as of now would
proceed to access all of MPAM EL2 registers without any runtime checks.
MPAM specification states that MPAMHCR_EL2 should only be accessed if
MPAMIDR_EL1.HAS_HCR is "1". Likewise, MPAMIDR_EL1.VPMR_MAX has to be
probed to obtain the maximum supported MPAMVPM<x>_EL2 before accessing
corresponding MPAMVPM<x>_EL2 registers. Since runtime checks are not
being made, an exception would be raised if the platform under test
doesn't support one of the registers. On Neoverse reference design
platforms, an exception is being raised while MPAMVPM2_EL2 or above are
accessed. Neoverse reference design platforms support only registers
till MPAMVPM1_EL2 at this point.

To resolve this, add sufficient runtime checks in MPAM EL2 context
save/restore routines. As part of the new save/restore routines,
MPAMIDR_EL1.HAS_HCR and MPAMIDR_EL1.VPMR_MAX are probed for the right
set of registers to be saved and restored.

CC: Davidson Kumaresan <davidson.kumaresan@arm.com>
Signed-off-by: Rohit Mathew <rohit.mathew@arm.com>
Change-Id: I2e3affd23091023b287b2bd5057a4a549037b611
diff --git a/include/arch/aarch64/arch.h b/include/arch/aarch64/arch.h
index 9e13c3d..9e4a3b7 100644
--- a/include/arch/aarch64/arch.h
+++ b/include/arch/aarch64/arch.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
  * Copyright (c) 2020-2022, NVIDIA Corporation. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
@@ -1063,13 +1063,17 @@
 #define PMBLIMITR_EL1		S3_0_C9_C10_0
 
 /*******************************************************************************
- * Definitions for system register interface to MPAM
+ * Definitions for system register interface, shifts and masks for MPAM
  ******************************************************************************/
 #define MPAMIDR_EL1		S3_0_C10_C4_4
 #define MPAM2_EL2		S3_4_C10_C5_0
 #define MPAMHCR_EL2		S3_4_C10_C4_0
 #define MPAM3_EL3		S3_6_C10_C5_0
 
+#define MPAMIDR_EL1_HAS_HCR_SHIFT	ULL(0x11)
+#define MPAMIDR_EL1_VPMR_MAX_SHIFT	ULL(0x12)
+#define MPAMIDR_EL1_VPMR_MAX_WIDTH	ULL(0x3)
+#define MPAMIDR_EL1_VPMR_MAX_POSSIBLE	ULL(0x7)
 /*******************************************************************************
  * Definitions for system register interface to AMU for FEAT_AMUv1
  ******************************************************************************/
diff --git a/lib/el3_runtime/aarch64/context.S b/lib/el3_runtime/aarch64/context.S
index b5d61ff..722b8ae 100644
--- a/lib/el3_runtime/aarch64/context.S
+++ b/lib/el3_runtime/aarch64/context.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -257,52 +257,200 @@
 	mrs	x10, MPAM2_EL2
 	str	x10, [x0, #CTX_MPAM2_EL2]
 
+	mrs	x10, MPAMIDR_EL1
+
+	/*
+	 * The context registers that we intend to save would be part of the
+	 * PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1.
+	 */
+	tbz	w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f
+
+	/*
+	 * MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the
+	 * system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to save
+	 * the context of these registers.
+	 */
 	mrs	x11, MPAMHCR_EL2
 	mrs	x12, MPAMVPM0_EL2
 	stp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
 
-	mrs	x13, MPAMVPM1_EL2
-	mrs	x14, MPAMVPM2_EL2
-	stp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
+	mrs	x13, MPAMVPMV_EL2
+	str	x13, [x0, #CTX_MPAMVPMV_EL2]
 
-	mrs	x15, MPAMVPM3_EL2
-	mrs	x16, MPAMVPM4_EL2
-	stp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
+	/*
+	 * MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported
+	 * VPMR value. Proceed to save the context of registers from
+	 * MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. From MPAM spec,
+	 * VPMR_MAX should not be zero if HAS_HCR == 1.
+	 */
+	ubfx	x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT, \
+		#MPAMIDR_EL1_VPMR_MAX_WIDTH
 
-	mrs	x9, MPAMVPM5_EL2
-	mrs	x10, MPAMVPM6_EL2
-	stp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
+	/*
+	 * Once VPMR_MAX has been identified, calculate the offset relative to
+	 * PC to jump to so that relevant context can be saved. The offset is
+	 * calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for
+	 * saving one VPM register) + (absolute address of label "1").
+	 */
+	mov	w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE
+	sub	w10, w11, w10
 
-	mrs	x11, MPAMVPM7_EL2
-	mrs	x12, MPAMVPMV_EL2
-	stp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
-	ret
+	/* Calculate the size of one block of MPAMVPM*_EL2 save */
+	adr	x11, 1f
+	adr	x12, 2f
+	sub	x12, x12, x11
+
+	madd	x10, x10, x12, x11
+	br	x10
+
+	/*
+	 * The branch above would land properly on one of the blocks following
+	 * label "1". Make sure that the order of save is retained.
+	 */
+1:
+#if ENABLE_BTI
+	bti	j
+#endif
+	mrs	x10, MPAMVPM7_EL2
+	str	x10, [x0, #CTX_MPAMVPM7_EL2]
+2:
+#if ENABLE_BTI
+	bti	j
+#endif
+	mrs	x11, MPAMVPM6_EL2
+	str	x11, [x0, #CTX_MPAMVPM6_EL2]
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	mrs	x12, MPAMVPM5_EL2
+	str	x12, [x0, #CTX_MPAMVPM5_EL2]
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	mrs	x13, MPAMVPM4_EL2
+	str	x13, [x0, #CTX_MPAMVPM4_EL2]
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	mrs	x14, MPAMVPM3_EL2
+	str	x14, [x0, #CTX_MPAMVPM3_EL2]
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	mrs	x15, MPAMVPM2_EL2
+	str	x15, [x0, #CTX_MPAMVPM2_EL2]
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	mrs	x16, MPAMVPM1_EL2
+	str	x16, [x0, #CTX_MPAMVPM1_EL2]
+
+3:	ret
 endfunc el2_sysregs_context_save_mpam
 
 func el2_sysregs_context_restore_mpam
 	ldr	x10, [x0, #CTX_MPAM2_EL2]
 	msr	MPAM2_EL2, x10
 
+	mrs	x10, MPAMIDR_EL1
+	/*
+	 * The context registers that we intend to restore would be part of the
+	 * PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1.
+	 */
+	tbz	w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f
+
+	/*
+	 * MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the
+	 * system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to restore
+	 * the context of these registers
+	 */
 	ldp	x11, x12, [x0, #CTX_MPAMHCR_EL2]
 	msr	MPAMHCR_EL2, x11
 	msr	MPAMVPM0_EL2, x12
 
-	ldp	x13, x14, [x0, #CTX_MPAMVPM1_EL2]
-	msr	MPAMVPM1_EL2, x13
-	msr	MPAMVPM2_EL2, x14
+	ldr	x13, [x0, #CTX_MPAMVPMV_EL2]
+	msr	MPAMVPMV_EL2, x13
 
-	ldp	x15, x16, [x0, #CTX_MPAMVPM3_EL2]
-	msr	MPAMVPM3_EL2, x15
-	msr	MPAMVPM4_EL2, x16
+	/*
+	 * MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported
+	 * VPMR value. Proceed to restore the context of registers from
+	 * MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. from MPAM spec,
+	 * VPMR_MAX should not be zero if HAS_HCR == 1.
+	 */
+	ubfx	x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT,	\
+		#MPAMIDR_EL1_VPMR_MAX_WIDTH
 
-	ldp	x9, x10, [x0, #CTX_MPAMVPM5_EL2]
-	msr	MPAMVPM5_EL2, x9
-	msr	MPAMVPM6_EL2, x10
+	/*
+	 * Once VPMR_MAX has been identified, calculate the offset relative to
+	 * PC to jump to so that relevant context can be restored. The offset is
+	 * calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for
+	 * restoring one VPM register) + (absolute address of label "1").
+	 */
+	mov	w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE
+	sub	w10, w11, w10
 
-	ldp	x11, x12, [x0, #CTX_MPAMVPM7_EL2]
-	msr	MPAMVPM7_EL2, x11
-	msr	MPAMVPMV_EL2, x12
-	ret
+	/* Calculate the size of one block of MPAMVPM*_EL2 restore */
+	adr	x11, 1f
+	adr	x12, 2f
+	sub	x12, x12, x11
+
+	madd	x10, x10, x12, x11
+	br	x10
+
+	/*
+	 * The branch above would land properly on one of the blocks following
+	 * label "1". Make sure that the order of restore is retained.
+	 */
+1:
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	ldr	x10, [x0, #CTX_MPAMVPM7_EL2]
+	msr	MPAMVPM7_EL2, x10
+2:
+#if ENABLE_BTI
+	bti	j
+#endif
+	ldr	x11, [x0, #CTX_MPAMVPM6_EL2]
+	msr	MPAMVPM6_EL2, x11
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	ldr	x12, [x0, #CTX_MPAMVPM5_EL2]
+	msr	MPAMVPM5_EL2, x12
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	ldr	x13, [x0, #CTX_MPAMVPM4_EL2]
+	msr	MPAMVPM4_EL2, x13
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	ldr	x14, [x0, #CTX_MPAMVPM3_EL2]
+	msr	MPAMVPM3_EL2, x14
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	ldr	x15, [x0, #CTX_MPAMVPM2_EL2]
+	msr	MPAMVPM2_EL2, x15
+
+#if ENABLE_BTI
+	bti	j
+#endif
+	ldr	x16, [x0, #CTX_MPAMVPM1_EL2]
+	msr	MPAMVPM1_EL2, x16
+
+3:	ret
 endfunc el2_sysregs_context_restore_mpam
 #endif /* ENABLE_MPAM_FOR_LOWER_ELS */