AArch64: Disable Secure Cycle Counter

This patch fixes an issue when secure world timing information
can be leaked because Secure Cycle Counter is not disabled.
For ARMv8.5 the counter gets disabled by setting MDCR_El3.SCCD
bit on CPU cold/warm boot.
For the earlier architectures PMCR_EL0 register is saved/restored
on secure world entry/exit from/to Non-secure state, and cycle
counting gets disabled by setting PMCR_EL0.DP bit.
'include\aarch64\arch.h' header file was tided up and new
ARMv8.5-PMU related definitions were added.

Change-Id: I6f56db6bc77504634a352388990ad925a69ebbfa
Signed-off-by: Alexei Fedorov <Alexei.Fedorov@arm.com>
diff --git a/include/arch/aarch64/arch.h b/include/arch/aarch64/arch.h
index fa857fb..98a8590 100644
--- a/include/arch/aarch64/arch.h
+++ b/include/arch/aarch64/arch.h
@@ -132,12 +132,13 @@
 #define ID_AA64PFR0_EL2_SHIFT	U(8)
 #define ID_AA64PFR0_EL3_SHIFT	U(12)
 #define ID_AA64PFR0_AMU_SHIFT	U(44)
-#define ID_AA64PFR0_AMU_LENGTH	U(4)
 #define ID_AA64PFR0_AMU_MASK	ULL(0xf)
 #define ID_AA64PFR0_ELX_MASK	ULL(0xf)
+#define ID_AA64PFR0_GIC_SHIFT	U(24)
+#define ID_AA64PFR0_GIC_WIDTH	U(4)
+#define ID_AA64PFR0_GIC_MASK	ULL(0xf)
 #define ID_AA64PFR0_SVE_SHIFT	U(32)
 #define ID_AA64PFR0_SVE_MASK	ULL(0xf)
-#define ID_AA64PFR0_SVE_LENGTH	U(4)
 #define ID_AA64PFR0_MPAM_SHIFT	U(40)
 #define ID_AA64PFR0_MPAM_MASK	ULL(0xf)
 #define ID_AA64PFR0_DIT_SHIFT	U(48)
@@ -148,18 +149,14 @@
 #define ID_AA64PFR0_CSV2_MASK	ULL(0xf)
 #define ID_AA64PFR0_CSV2_LENGTH	U(4)
 
-/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
-#define ID_AA64DFR0_PMS_SHIFT	U(32)
-#define ID_AA64DFR0_PMS_LENGTH	U(4)
-#define ID_AA64DFR0_PMS_MASK	ULL(0xf)
-
+/* Exception level handling */
 #define EL_IMPL_NONE		ULL(0)
 #define EL_IMPL_A64ONLY		ULL(1)
 #define EL_IMPL_A64_A32		ULL(2)
 
-#define ID_AA64PFR0_GIC_SHIFT	U(24)
-#define ID_AA64PFR0_GIC_WIDTH	U(4)
-#define ID_AA64PFR0_GIC_MASK	ULL(0xf)
+/* ID_AA64DFR0_EL1.PMS definitions (for ARMv8.2+) */
+#define ID_AA64DFR0_PMS_SHIFT	U(32)
+#define ID_AA64DFR0_PMS_MASK	ULL(0xf)
 
 /* ID_AA64ISAR1_EL1 definitions */
 #define ID_AA64ISAR1_EL1	S3_0_C0_C6_1
@@ -304,20 +301,25 @@
 #define SCR_RESET_VAL		SCR_RES1_BITS
 
 /* MDCR_EL3 definitions */
+#define MDCR_SCCD_BIT		(ULL(1) << 23)
+#define MDCR_SPME_BIT		(ULL(1) << 17)
+#define MDCR_SDD_BIT		(ULL(1) << 16)
 #define MDCR_SPD32(x)		((x) << 14)
 #define MDCR_SPD32_LEGACY	ULL(0x0)
 #define MDCR_SPD32_DISABLE	ULL(0x2)
 #define MDCR_SPD32_ENABLE	ULL(0x3)
-#define MDCR_SDD_BIT		(ULL(1) << 16)
 #define MDCR_NSPB(x)		((x) << 12)
 #define MDCR_NSPB_EL1		ULL(0x3)
 #define MDCR_TDOSA_BIT		(ULL(1) << 10)
 #define MDCR_TDA_BIT		(ULL(1) << 9)
 #define MDCR_TPM_BIT		(ULL(1) << 6)
-#define MDCR_SCCD_BIT		(ULL(1) << 23)
 #define MDCR_EL3_RESET_VAL	ULL(0x0)
 
 /* MDCR_EL2 definitions */
+#define MDCR_EL2_HLP		(U(1) << 26)
+#define MDCR_EL2_HCCD		(U(1) << 23)
+#define MDCR_EL2_TTRF		(U(1) << 19)
+#define MDCR_EL2_HPMD		(U(1) << 17)
 #define MDCR_EL2_TPMS		(U(1) << 14)
 #define MDCR_EL2_E2PB(x)	((x) << 12)
 #define MDCR_EL2_E2PB_EL1	U(0x3)
@@ -677,10 +679,14 @@
 #define PMCR_EL0_N_SHIFT	U(11)
 #define PMCR_EL0_N_MASK		U(0x1f)
 #define PMCR_EL0_N_BITS		(PMCR_EL0_N_MASK << PMCR_EL0_N_SHIFT)
+#define PMCR_EL0_LP_BIT		(U(1) << 7)
 #define PMCR_EL0_LC_BIT		(U(1) << 6)
 #define PMCR_EL0_DP_BIT		(U(1) << 5)
 #define PMCR_EL0_X_BIT		(U(1) << 4)
 #define PMCR_EL0_D_BIT		(U(1) << 3)
+#define PMCR_EL0_C_BIT		(U(1) << 2)
+#define PMCR_EL0_P_BIT		(U(1) << 1)
+#define PMCR_EL0_E_BIT		(U(1) << 0)
 
 /*******************************************************************************
  * Definitions for system register interface to SVE
diff --git a/include/arch/aarch64/el3_common_macros.S b/include/arch/aarch64/el3_common_macros.S
index 22b32b4..a36b7da 100644
--- a/include/arch/aarch64/el3_common_macros.S
+++ b/include/arch/aarch64/el3_common_macros.S
@@ -116,12 +116,42 @@
 	 * ---------------------------------------------------------------------
 	 */
 	mov_imm	x0, ((MDCR_EL3_RESET_VAL | MDCR_SDD_BIT | \
-		      MDCR_SPD32(MDCR_SPD32_DISABLE) | MDCR_SCCD_BIT) \
-		    & ~(MDCR_TDOSA_BIT | MDCR_TDA_BIT | MDCR_TPM_BIT))
+		      MDCR_SPD32(MDCR_SPD32_DISABLE) | MDCR_SCCD_BIT) & \
+		    ~(MDCR_TDOSA_BIT | MDCR_TDA_BIT | MDCR_TPM_BIT))
 
 	msr	mdcr_el3, x0
 
 	/* ---------------------------------------------------------------------
+	 * Initialise PMCR_EL0 setting all fields rather than relying
+	 * on hw. Some fields are architecturally UNKNOWN on reset.
+	 *
+	 * PMCR_EL0.LP: Set to one so that event counter overflow, that
+	 *  is recorded in PMOVSCLR_EL0[0-30], occurs on the increment
+	 *  that changes PMEVCNTR<n>_EL0[63] from 1 to 0, when ARMv8.5-PMU
+	 *  is implemented. This bit is RES0 in versions of the architecture
+	 *  earlier than ARMv8.5, setting it to 1 doesn't have any effect
+	 *  on them.
+	 *
+	 * PMCR_EL0.LC: Set to one so that cycle counter overflow, that
+	 *  is recorded in PMOVSCLR_EL0[31], occurs on the increment
+	 *  that changes PMCCNTR_EL0[63] from 1 to 0.
+	 *
+	 * PMCR_EL0.DP: Set to one so that the cycle counter,
+	 *  PMCCNTR_EL0 does not count when event counting is prohibited.
+	 *
+	 * PMCR_EL0.X: Set to zero to disable export of events.
+	 *
+	 * PMCR_EL0.D: Set to zero so that, when enabled, PMCCNTR_EL0
+	 *  counts on every clock cycle.
+	 * ---------------------------------------------------------------------
+	 */
+	mov_imm	x0, ((PMCR_EL0_RESET_VAL | PMCR_EL0_LP_BIT | \
+		      PMCR_EL0_LC_BIT | PMCR_EL0_DP_BIT) & \
+		    ~(PMCR_EL0_X_BIT | PMCR_EL0_D_BIT))
+
+	msr	pmcr_el0, x0
+
+	/* ---------------------------------------------------------------------
 	 * Enable External Aborts and SError Interrupts now that the exception
 	 * vectors have been setup.
 	 * ---------------------------------------------------------------------
diff --git a/include/lib/el3_runtime/aarch64/context.h b/include/lib/el3_runtime/aarch64/context.h
index a76a59b..64fa8a9 100644
--- a/include/lib/el3_runtime/aarch64/context.h
+++ b/include/lib/el3_runtime/aarch64/context.h
@@ -59,7 +59,7 @@
 #define CTX_RUNTIME_SP		U(0x10)
 #define CTX_SPSR_EL3		U(0x18)
 #define CTX_ELR_EL3		U(0x20)
-#define CTX_UNUSED		U(0x28)
+#define CTX_PMCR_EL0		U(0x28)
 #define CTX_EL3STATE_END	U(0x30)
 
 /*******************************************************************************
@@ -91,22 +91,21 @@
 #define CTX_AFSR1_EL1		U(0x98)
 #define CTX_CONTEXTIDR_EL1	U(0xa0)
 #define CTX_VBAR_EL1		U(0xa8)
-#define CTX_PMCR_EL0		U(0xb0)
 
 /*
  * If the platform is AArch64-only, there is no need to save and restore these
  * AArch32 registers.
  */
 #if CTX_INCLUDE_AARCH32_REGS
-#define CTX_SPSR_ABT		U(0xc0)  /* Align to the next 16 byte boundary */
-#define CTX_SPSR_UND		U(0xc8)
-#define CTX_SPSR_IRQ		U(0xd0)
-#define CTX_SPSR_FIQ		U(0xd8)
-#define CTX_DACR32_EL2		U(0xe0)
-#define CTX_IFSR32_EL2		U(0xe8)
-#define CTX_AARCH32_END		U(0xf0) /* Align to the next 16 byte boundary */
+#define CTX_SPSR_ABT		U(0xb0)	/* Align to the next 16 byte boundary */
+#define CTX_SPSR_UND		U(0xb8)
+#define CTX_SPSR_IRQ		U(0xc0)
+#define CTX_SPSR_FIQ		U(0xc8)
+#define CTX_DACR32_EL2		U(0xd0)
+#define CTX_IFSR32_EL2		U(0xd8)
+#define CTX_AARCH32_END		U(0xe0) /* Align to the next 16 byte boundary */
 #else
-#define CTX_AARCH32_END		U(0xc0)  /* Align to the next 16 byte boundary */
+#define CTX_AARCH32_END		U(0xb0)	/* Align to the next 16 byte boundary */
 #endif /* CTX_INCLUDE_AARCH32_REGS */
 
 /*