SPMD: save/restore EL2 system registers.

NOTE: Not all EL-2 system registers are saved/restored.
This subset includes registers recognized by ARMv8.0

Change-Id: I9993c7d78d8f5f8e72d1c6c8d6fd871283aa3ce0
Signed-off-by: Jose Marinho <jose.marinho@arm.com>
Signed-off-by: Olivier Deprez <olivier.deprez@arm.com>
Signed-off-by: Artsem Artsemenka <artsem.artsemenka@arm.com>
Signed-off-by: Max Shvetsov <maksims.svecovs@arm.com>
diff --git a/lib/el3_runtime/aarch64/context.S b/lib/el3_runtime/aarch64/context.S
index 9bd25ba..bcc7eef 100644
--- a/lib/el3_runtime/aarch64/context.S
+++ b/lib/el3_runtime/aarch64/context.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -9,6 +9,11 @@
 #include <assert_macros.S>
 #include <context.h>
 
+#if CTX_INCLUDE_EL2_REGS
+	.global	el2_sysregs_context_save
+	.global	el2_sysregs_context_restore
+#endif
+
 	.global	el1_sysregs_context_save
 	.global	el1_sysregs_context_restore
 #if CTX_INCLUDE_FPREGS
@@ -19,6 +24,390 @@
 	.global	restore_gp_pmcr_pauth_regs
 	.global	el3_exit
 
+#if CTX_INCLUDE_EL2_REGS
+
+/* -----------------------------------------------------
+ * The following function strictly follows the AArch64
+ * PCS to use x9-x17 (temporary caller-saved registers)
+ * to save EL1 system register context. It assumes that
+ * 'x0' is pointing to a 'el1_sys_regs' structure where
+ * the register context will be saved.
+ * -----------------------------------------------------
+ */
+func el2_sysregs_context_save
+
+	mrs	x9, actlr_el2
+	str	x9, [x0, #CTX_ACTLR_EL2]
+
+	mrs	x9, afsr0_el2
+	str	x9, [x0, #CTX_AFSR0_EL2]
+
+	mrs	x9, afsr1_el2
+	str	x9, [x0, #CTX_AFSR1_EL2]
+
+	mrs	x9, amair_el2
+	str	x9, [x0, #CTX_AMAIR_EL2]
+
+	mrs	x9, cnthctl_el2
+	str	x9, [x0, #CTX_CNTHCTL_EL2]
+
+	mrs	x9, cnthp_ctl_el2
+	str	x9, [x0, #CTX_CNTHP_CTL_EL2]
+
+	mrs	x9, cnthp_cval_el2
+	str	x9, [x0, #CTX_CNTHP_CVAL_EL2]
+
+	mrs	x9, cnthp_tval_el2
+	str	x9, [x0, #CTX_CNTHP_TVAL_EL2]
+
+	mrs	x9, CNTPOFF_EL2
+	str	x9, [x0, #CTX_CNTPOFF_EL2]
+
+	mrs	x9, cntvoff_el2
+	str	x9, [x0, #CTX_CNTVOFF_EL2]
+
+	mrs	x9, cptr_el2
+	str	x9, [x0, #CTX_CPTR_EL2]
+
+	mrs	x9, dbgvcr32_el2
+	str	x9, [x0, #CTX_DBGVCR32_EL2]
+
+	mrs	x9, elr_el2
+	str	x9, [x0, #CTX_ELR_EL2]
+
+	mrs	x9, esr_el2
+	str	x9, [x0, #CTX_ESR_EL2]
+
+	mrs	x9, far_el2
+	str	x9, [x0, #CTX_FAR_EL2]
+
+	mrs	x9, fpexc32_el2
+	str	x9, [x0, #CTX_FPEXC32_EL2]
+
+	mrs	x9, hacr_el2
+	str	x9, [x0, #CTX_HACR_EL2]
+
+	mrs	x9, HAFGRTR_EL2
+	str	x9, [x0, #CTX_HAFGRTR_EL2]
+
+	mrs	x9, hcr_el2
+	str	x9, [x0, #CTX_HCR_EL2]
+
+	mrs	x9, HDFGRTR_EL2
+	str	x9, [x0, #CTX_HDFGRTR_EL2]
+
+	mrs	x9, HDFGWTR_EL2
+	str	x9, [x0, #CTX_HDFGWTR_EL2]
+
+	mrs	x9, HFGITR_EL2
+	str	x9, [x0, #CTX_HFGITR_EL2]
+
+	mrs	x9, HFGRTR_EL2
+	str	x9, [x0, #CTX_HFGRTR_EL2]
+
+	mrs	x9, HFGWTR_EL2
+	str	x9, [x0, #CTX_HFGWTR_EL2]
+
+	mrs	x9, hpfar_el2
+	str	x9, [x0, #CTX_HPFAR_EL2]
+
+	mrs	x9, hstr_el2
+	str	x9, [x0, #CTX_HSTR_EL2]
+
+	mrs	x9, ICC_SRE_EL2
+	str	x9, [x0, #CTX_ICC_SRE_EL2]
+
+	mrs	x9, ICH_EISR_EL2
+	str	x9, [x0, #CTX_ICH_EISR_EL2]
+
+	mrs	x9, ICH_ELRSR_EL2
+	str	x9, [x0, #CTX_ICH_ELRSR_EL2]
+
+	mrs	x9, ICH_HCR_EL2
+	str	x9, [x0, #CTX_ICH_HCR_EL2]
+
+	mrs	x9, ICH_MISR_EL2
+	str	x9, [x0, #CTX_ICH_MISR_EL2]
+
+	mrs	x9, ICH_VMCR_EL2
+	str	x9, [x0, #CTX_ICH_VMCR_EL2]
+
+	mrs	x9, ICH_VTR_EL2
+	str	x9, [x0, #CTX_ICH_VTR_EL2]
+
+	mrs	x9, mair_el2
+	str	x9, [x0, #CTX_MAIR_EL2]
+
+	mrs	x9, mdcr_el2
+	str	x9, [x0, #CTX_MDCR_EL2]
+
+	mrs	x9, MPAM2_EL2
+	str	x9, [x0, #CTX_MPAM2_EL2]
+
+	mrs	x9, MPAMHCR_EL2
+	str	x9, [x0, #CTX_MPAMHCR_EL2]
+
+	mrs	x9, MPAMVPM0_EL2
+	str	x9, [x0, #CTX_MPAMVPM0_EL2]
+
+	mrs	x9, MPAMVPM1_EL2
+	str	x9, [x0, #CTX_MPAMVPM1_EL2]
+
+	mrs	x9, MPAMVPM2_EL2
+	str	x9, [x0, #CTX_MPAMVPM2_EL2]
+
+	mrs	x9, MPAMVPM3_EL2
+	str	x9, [x0, #CTX_MPAMVPM3_EL2]
+
+	mrs	x9, MPAMVPM4_EL2
+	str	x9, [x0, #CTX_MPAMVPM4_EL2]
+
+	mrs	x9, MPAMVPM5_EL2
+	str	x9, [x0, #CTX_MPAMVPM5_EL2]
+
+	mrs	x9, MPAMVPM6_EL2
+	str	x9, [x0, #CTX_MPAMVPM6_EL2]
+
+	mrs	x9, MPAMVPM7_EL2
+	str	x9, [x0, #CTX_MPAMVPM7_EL2]
+
+	mrs	x9, MPAMVPMV_EL2
+	str	x9, [x0, #CTX_MPAMVPMV_EL2]
+
+	mrs	x9, rmr_el2
+	str	x9, [x0, #CTX_RMR_EL2]
+
+	mrs	x9, sctlr_el2
+	str	x9, [x0, #CTX_SCTLR_EL2]
+
+	mrs	x9, spsr_el2
+	str	x9, [x0, #CTX_SPSR_EL2]
+
+	mrs	x9, sp_el2
+	str	x9, [x0, #CTX_SP_EL2]
+
+	mrs	x9, tcr_el2
+	str	x9, [x0, #CTX_TCR_EL2]
+
+	mrs	x9, tpidr_el2
+	str	x9, [x0, #CTX_TPIDR_EL2]
+
+	mrs	x9, ttbr0_el2
+	str	x9, [x0, #CTX_TTBR0_EL2]
+
+	mrs	x9, vbar_el2
+	str	x9, [x0, #CTX_VBAR_EL2]
+
+	mrs	x9, vmpidr_el2
+	str	x9, [x0, #CTX_VMPIDR_EL2]
+
+	mrs	x9, vpidr_el2
+	str	x9, [x0, #CTX_VPIDR_EL2]
+
+	mrs	x9, vtcr_el2
+	str	x9, [x0, #CTX_VTCR_EL2]
+
+	mrs	x9, vttbr_el2
+	str	x9, [x0, #CTX_VTTBR_EL2]
+
+	mrs	x9, ZCR_EL2
+	str	x9, [x0, #CTX_ZCR_EL2]
+
+	ret
+endfunc el2_sysregs_context_save
+
+/* -----------------------------------------------------
+ * The following function strictly follows the AArch64
+ * PCS to use x9-x17 (temporary caller-saved registers)
+ * to restore EL1 system register context.  It assumes
+ * that 'x0' is pointing to a 'el1_sys_regs' structure
+ * from where the register context will be restored
+ * -----------------------------------------------------
+ */
+func el2_sysregs_context_restore
+
+	ldr	x9, [x0, #CTX_ACTLR_EL2]
+	msr	actlr_el2, x9
+
+	ldr	x9, [x0, #CTX_AFSR0_EL2]
+	msr	afsr0_el2, x9
+
+	ldr	x9, [x0, #CTX_AFSR1_EL2]
+	msr	afsr1_el2, x9
+
+	ldr	x9, [x0, #CTX_AMAIR_EL2]
+	msr	amair_el2, x9
+
+	ldr	x9, [x0, #CTX_CNTHCTL_EL2]
+	msr	cnthctl_el2, x9
+
+	ldr	x9, [x0, #CTX_CNTHP_CTL_EL2]
+	msr	cnthp_ctl_el2, x9
+
+	ldr	x9, [x0, #CTX_CNTHP_CVAL_EL2]
+	msr	cnthp_cval_el2, x9
+
+	ldr	x9, [x0, #CTX_CNTHP_TVAL_EL2]
+	msr	cnthp_tval_el2, x9
+
+	ldr	x9, [x0, #CTX_CNTPOFF_EL2]
+	msr	CNTPOFF_EL2, x9
+
+	ldr	x9, [x0, #CTX_CNTVOFF_EL2]
+	msr	cntvoff_el2, x9
+
+	ldr	x9, [x0, #CTX_CPTR_EL2]
+	msr	cptr_el2, x9
+
+	ldr	x9, [x0, #CTX_DBGVCR32_EL2]
+	msr	dbgvcr32_el2, x9
+
+	ldr	x9, [x0, #CTX_ELR_EL2]
+	msr	elr_el2, x9
+
+	ldr	x9, [x0, #CTX_ESR_EL2]
+	msr	esr_el2, x9
+
+	ldr	x9, [x0, #CTX_FAR_EL2]
+	msr	far_el2, x9
+
+	ldr	x9, [x0, #CTX_FPEXC32_EL2]
+	msr	fpexc32_el2, x9
+
+	ldr	x9, [x0, #CTX_HACR_EL2]
+	msr	hacr_el2, x9
+
+	ldr	x9, [x0, #CTX_HAFGRTR_EL2]
+	msr	HAFGRTR_EL2, x9
+
+	ldr	x9, [x0, #CTX_HCR_EL2]
+	msr	hcr_el2, x9
+
+	ldr	x9, [x0, #CTX_HDFGRTR_EL2]
+	msr	HDFGRTR_EL2, x9
+
+	ldr	x9, [x0, #CTX_HDFGWTR_EL2]
+	msr	HDFGWTR_EL2, x9
+
+	ldr	x9, [x0, #CTX_HFGITR_EL2]
+	msr	HFGITR_EL2, x9
+
+	ldr	x9, [x0, #CTX_HFGRTR_EL2]
+	msr	HFGRTR_EL2, x9
+
+	ldr	x9, [x0, #CTX_HFGWTR_EL2]
+	msr	HFGWTR_EL2, x9
+
+	ldr	x9, [x0, #CTX_HPFAR_EL2]
+	msr	hpfar_el2, x9
+
+	ldr	x9, [x0, #CTX_HSTR_EL2]
+	msr	hstr_el2, x9
+
+	ldr	x9, [x0, #CTX_ICC_SRE_EL2]
+	msr	ICC_SRE_EL2, x9
+
+	ldr	x9, [x0, #CTX_ICH_EISR_EL2]
+	msr	ICH_EISR_EL2, x9
+
+	ldr	x9, [x0, #CTX_ICH_ELRSR_EL2]
+	msr	ICH_ELRSR_EL2, x9
+
+	ldr	x9, [x0, #CTX_ICH_HCR_EL2]
+	msr	ICH_HCR_EL2, x9
+
+	ldr	x9, [x0, #CTX_ICH_MISR_EL2]
+	msr	ICH_MISR_EL2, x9
+
+	ldr	x9, [x0, #CTX_ICH_VMCR_EL2]
+	msr	ICH_VMCR_EL2, x9
+
+	ldr	x9, [x0, #CTX_ICH_VTR_EL2]
+	msr	ICH_VTR_EL2, x9
+
+	ldr	x9, [x0, #CTX_MAIR_EL2]
+	msr	mair_el2, x9
+
+	ldr	x9, [x0, #CTX_MDCR_EL2]
+	msr	mdcr_el2, x9
+
+	ldr	x9, [x0, #CTX_MPAM2_EL2]
+	msr	MPAM2_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMHCR_EL2]
+	msr	MPAMHCR_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM0_EL2]
+	msr	MPAMVPM0_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM1_EL2]
+	msr	MPAMVPM1_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM2_EL2]
+	msr	MPAMVPM2_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM3_EL2]
+	msr	MPAMVPM3_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM4_EL2]
+	msr	MPAMVPM4_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM5_EL2]
+	msr	MPAMVPM5_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM6_EL2]
+	msr	MPAMVPM6_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPM7_EL2]
+	msr	MPAMVPM7_EL2, x9
+
+	ldr	x9, [x0, #CTX_MPAMVPMV_EL2]
+	msr	MPAMVPMV_EL2, x9
+
+	ldr	x9, [x0, #CTX_RMR_EL2]
+	msr	rmr_el2, x9
+
+	ldr	x9, [x0, #CTX_SCTLR_EL2]
+	msr	sctlr_el2, x9
+
+	ldr	x9, [x0, #CTX_SPSR_EL2]
+	msr	spsr_el2, x9
+
+	ldr	x9, [x0, #CTX_SP_EL2]
+	msr	sp_el2, x9
+
+	ldr	x9, [x0, #CTX_TCR_EL2]
+	msr	tcr_el2, x9
+
+	ldr	x9, [x0, #CTX_TPIDR_EL2]
+	msr	tpidr_el2, x9
+
+	ldr	x9, [x0, #CTX_TTBR0_EL2]
+	msr	ttbr0_el2, x9
+
+	ldr	x9, [x0, #CTX_VBAR_EL2]
+	msr	vbar_el2, x9
+
+	ldr	x9, [x0, #CTX_VMPIDR_EL2]
+	msr	vmpidr_el2, x9
+
+	ldr	x9, [x0, #CTX_VPIDR_EL2]
+	msr	vpidr_el2, x9
+
+	ldr	x9, [x0, #CTX_VTCR_EL2]
+	msr	vtcr_el2, x9
+
+	ldr	x9, [x0, #CTX_VTTBR_EL2]
+	msr	vttbr_el2, x9
+
+	ldr	x9, [x0, #CTX_ZCR_EL2]
+	msr	ZCR_EL2, x9
+
+	ret
+endfunc el2_sysregs_context_restore
+
+#endif /* CTX_INCLUDE_EL2_REGS */
+
 /* ------------------------------------------------------------------
  * The following function strictly follows the AArch64 PCS to use
  * x9-x17 (temporary caller-saved registers) to save EL1 system
diff --git a/lib/el3_runtime/aarch64/context_mgmt.c b/lib/el3_runtime/aarch64/context_mgmt.c
index 546e39e..f59bcfc 100644
--- a/lib/el3_runtime/aarch64/context_mgmt.c
+++ b/lib/el3_runtime/aarch64/context_mgmt.c
@@ -530,6 +530,52 @@
 	cm_set_next_eret_context(security_state);
 }
 
+#if CTX_INCLUDE_EL2_REGS
+/*******************************************************************************
+ * Save EL2 sysreg context
+ ******************************************************************************/
+void cm_el2_sysregs_context_save(uint32_t security_state)
+{
+	u_register_t scr_el3 = read_scr();
+
+	/*
+	 * Always save the non-secure EL2 context, only save the
+	 * S-EL2 context if S-EL2 is enabled.
+	 */
+	if ((security_state == NON_SECURE) ||
+	    ((scr_el3 & SCR_EEL2_BIT) != 0U)) {
+		cpu_context_t *ctx;
+
+		ctx = cm_get_context(security_state);
+		assert(ctx != NULL);
+
+		el2_sysregs_context_save(get_sysregs_ctx(ctx));
+	}
+}
+
+/*******************************************************************************
+ * Restore EL2 sysreg context
+ ******************************************************************************/
+void cm_el2_sysregs_context_restore(uint32_t security_state)
+{
+	u_register_t scr_el3 = read_scr();
+
+	/*
+	 * Always restore the non-secure EL2 context, only restore the
+	 * S-EL2 context if S-EL2 is enabled.
+	 */
+	if ((security_state == NON_SECURE) ||
+	    ((scr_el3 & SCR_EEL2_BIT) != 0U)) {
+		cpu_context_t *ctx;
+
+		ctx = cm_get_context(security_state);
+		assert(ctx != NULL);
+
+		el2_sysregs_context_restore(get_sysregs_ctx(ctx));
+	}
+}
+#endif /* CTX_INCLUDE_EL2_REGS */
+
 /*******************************************************************************
  * The next four functions are used by runtime services to save and restore
  * EL1 context on the 'cpu_context' structure for the specified security