AArch64: Disable Secure Cycle Counter

This patch fixes an issue when secure world timing information
can be leaked because Secure Cycle Counter is not disabled.
For ARMv8.5 the counter gets disabled by setting MDCR_El3.SCCD
bit on CPU cold/warm boot.
For the earlier architectures PMCR_EL0 register is saved/restored
on secure world entry/exit from/to Non-secure state, and cycle
counting gets disabled by setting PMCR_EL0.DP bit.
'include\aarch64\arch.h' header file was tided up and new
ARMv8.5-PMU related definitions were added.

Change-Id: I6f56db6bc77504634a352388990ad925a69ebbfa
Signed-off-by: Alexei Fedorov <Alexei.Fedorov@arm.com>
diff --git a/lib/el3_runtime/aarch64/context.S b/lib/el3_runtime/aarch64/context.S
index e6ab19b..53dc02e 100644
--- a/lib/el3_runtime/aarch64/context.S
+++ b/lib/el3_runtime/aarch64/context.S
@@ -24,9 +24,45 @@
 	.global	save_gp_registers
 	.global	restore_gp_registers
 	.global	restore_gp_registers_eret
+	.global	save_pmcr_disable_pmu
 	.global	el3_exit
 
 /* -----------------------------------------------------
+ * If ARMv8.5-PMU is implemented, cycle counting is
+ * disabled by seting MDCR_EL3.SCCD to 1.
+ * -----------------------------------------------------
+ */
+func save_pmcr_disable_pmu
+	/* -----------------------------------------------------
+	 * Check if earlier initialization MDCR_EL3.SCCD to 1
+	 * failed, meaning that ARMv8-PMU is not implemented and
+	 * PMCR_EL0 should be saved in non-secure context.
+	 * -----------------------------------------------------
+	 */
+	mrs	x9, mdcr_el3
+	tst	x9, #MDCR_SCCD_BIT
+	bne	1f
+
+	/* Secure Cycle Counter is not disabled */
+	mrs	x9, pmcr_el0
+
+	/* Check caller's security state */
+	mrs	x10, scr_el3
+	tst	x10, #SCR_NS_BIT
+	beq	2f
+
+	/* Save PMCR_EL0 if called from Non-secure state */
+	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
+
+	/* Disable cycle counter when event counting is prohibited */
+2:	orr	x9, x9, #PMCR_EL0_DP_BIT
+	msr	pmcr_el0, x9
+
+	isb
+1:	ret
+endfunc save_pmcr_disable_pmu
+
+/* -----------------------------------------------------
  * The following function strictly follows the AArch64
  * PCS to use x9-x17 (temporary caller-saved registers)
  * to save EL1 system register context. It assumes that
@@ -80,9 +116,6 @@
 	mrs	x9, vbar_el1
 	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
 
-	mrs	x10, pmcr_el0
-	str	x10, [x0, #CTX_PMCR_EL0]
-
 	/* Save AArch32 system registers if the build has instructed so */
 #if CTX_INCLUDE_AARCH32_REGS
 	mrs	x11, spsr_abt
@@ -169,9 +202,6 @@
 	msr	contextidr_el1, x17
 	msr	vbar_el1, x9
 
-	ldr	x10, [x0, #CTX_PMCR_EL0]
-	msr	pmcr_el0, x10
-
 	/* Restore AArch32 system registers if the build has instructed so */
 #if CTX_INCLUDE_AARCH32_REGS
 	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
@@ -503,6 +533,29 @@
 	msr	spsr_el3, x16
 	msr	elr_el3, x17
 
+	/* -----------------------------------------------------
+	 * Restore PMCR_EL0 when returning to Non-secure state
+	 * if Secure Cycle Counter is not disabled in MDCR_EL3
+	 * when ARMv8.5-PMU is implemented
+	 * -----------------------------------------------------
+	 */
+	tst	x18, #SCR_NS_BIT
+	beq	2f
+
+	/* -----------------------------------------------------
+	 * Back to Non-secure state.
+	 * Check if earlier initialization MDCR_EL3.SCCD to 1
+	 * failed, meaning that ARMv8-PMU is not implemented and
+	 * PMCR_EL0 should be restored from non-secure context.
+	 * -----------------------------------------------------
+	 */
+	mrs	x17, mdcr_el3
+	tst	x17, #MDCR_SCCD_BIT
+	bne	2f
+	ldr	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
+	msr	pmcr_el0, x17
+2:
+
 #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
 	/* Restore mitigation state as it was on entry to EL3 */
 	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]