blob: 785e850934e7d64eb9e0cd8b5bde31fdb1bd1fbb [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Max Shvetsov28f39f02020-02-25 13:56:19 +00002 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-arm82cb2c12017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley97043ac2014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke0a30cf52014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosbb9549b2019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley97043ac2014-04-09 13:14:54 +010010#include <context.h>
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +010011#include <el3_common_macros.S>
Achin Gupta9ac63c52014-01-16 12:08:03 +000012
Max Shvetsov28f39f02020-02-25 13:56:19 +000013#if CTX_INCLUDE_EL2_REGS
14 .global el2_sysregs_context_save
15 .global el2_sysregs_context_restore
16#endif
17
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +010018 .global el1_sysregs_context_save
19 .global el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21 .global fpregs_context_save
22 .global fpregs_context_restore
23#endif
Alexei Fedoroved108b52019-09-13 14:11:59 +010024 .global save_gp_pmcr_pauth_regs
25 .global restore_gp_pmcr_pauth_regs
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +010026 .global save_and_update_ptw_el1_sys_regs
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +010027 .global el3_exit
28
Max Shvetsov28f39f02020-02-25 13:56:19 +000029#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsov28259462020-02-17 16:15:47 +000034 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
Max Shvetsov28f39f02020-02-25 13:56:19 +000036 * the register context will be saved.
Max Shvetsov28259462020-02-17 16:15:47 +000037 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
Max Shvetsov28f39f02020-02-25 13:56:19 +000044 * -----------------------------------------------------
45 */
Max Shvetsov28259462020-02-17 16:15:47 +000046
Max Shvetsov28f39f02020-02-25 13:56:19 +000047func el2_sysregs_context_save
Max Shvetsov28f39f02020-02-25 13:56:19 +000048 mrs x9, actlr_el2
Max Shvetsov28259462020-02-17 16:15:47 +000049 mrs x10, afsr0_el2
50 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000051
Max Shvetsov28259462020-02-17 16:15:47 +000052 mrs x11, afsr1_el2
53 mrs x12, amair_el2
54 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000055
Max Shvetsov28259462020-02-17 16:15:47 +000056 mrs x13, cnthctl_el2
57 mrs x14, cnthp_ctl_el2
58 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000059
Max Shvetsov28259462020-02-17 16:15:47 +000060 mrs x15, cnthp_cval_el2
61 mrs x16, cnthp_tval_el2
62 stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000063
Max Shvetsov28259462020-02-17 16:15:47 +000064 mrs x17, cntvoff_el2
Max Shvetsov28f39f02020-02-25 13:56:19 +000065 mrs x9, cptr_el2
Max Shvetsov28259462020-02-17 16:15:47 +000066 stp x17, x9, [x0, #CTX_CNTVOFF_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000067
Max Shvetsov28259462020-02-17 16:15:47 +000068 mrs x11, elr_el2
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +010069#if CTX_INCLUDE_AARCH32_REGS
70 mrs x10, dbgvcr32_el2
Max Shvetsov28259462020-02-17 16:15:47 +000071 stp x10, x11, [x0, #CTX_DBGVCR32_EL2]
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +010072#else
73 str x11, [x0, #CTX_ELR_EL2]
74#endif
Max Shvetsov28f39f02020-02-25 13:56:19 +000075
Max Shvetsov28259462020-02-17 16:15:47 +000076 mrs x14, esr_el2
77 mrs x15, far_el2
78 stp x14, x15, [x0, #CTX_ESR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000079
Max Shvetsov30ee3752020-05-13 18:15:39 +010080 mrs x16, hacr_el2
81 mrs x17, hcr_el2
82 stp x16, x17, [x0, #CTX_HACR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000083
Max Shvetsov30ee3752020-05-13 18:15:39 +010084 mrs x9, hpfar_el2
85 mrs x10, hstr_el2
86 stp x9, x10, [x0, #CTX_HPFAR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000087
Max Shvetsov30ee3752020-05-13 18:15:39 +010088 mrs x11, ICC_SRE_EL2
89 mrs x12, ICH_HCR_EL2
90 stp x11, x12, [x0, #CTX_ICC_SRE_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000091
Max Shvetsov30ee3752020-05-13 18:15:39 +010092 mrs x13, ICH_VMCR_EL2
93 mrs x14, mair_el2
94 stp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000095
Max Shvetsov30ee3752020-05-13 18:15:39 +010096 mrs x15, mdcr_el2
97 mrs x16, PMSCR_EL2
98 stp x15, x16, [x0, #CTX_MDCR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +000099
Max Shvetsov30ee3752020-05-13 18:15:39 +0100100 mrs x17, sctlr_el2
101 mrs x9, spsr_el2
102 stp x17, x9, [x0, #CTX_SCTLR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +0000103
Max Shvetsov30ee3752020-05-13 18:15:39 +0100104 mrs x10, sp_el2
105 mrs x11, tcr_el2
106 stp x10, x11, [x0, #CTX_SP_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +0000107
Max Shvetsov30ee3752020-05-13 18:15:39 +0100108 mrs x12, tpidr_el2
109 mrs x13, ttbr0_el2
110 stp x12, x13, [x0, #CTX_TPIDR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +0000111
Max Shvetsov30ee3752020-05-13 18:15:39 +0100112 mrs x14, vbar_el2
113 mrs x15, vmpidr_el2
114 stp x14, x15, [x0, #CTX_VBAR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +0000115
Max Shvetsov30ee3752020-05-13 18:15:39 +0100116 mrs x16, vpidr_el2
117 mrs x17, vtcr_el2
118 stp x16, x17, [x0, #CTX_VPIDR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +0000119
Max Shvetsov30ee3752020-05-13 18:15:39 +0100120 mrs x9, vttbr_el2
121 str x9, [x0, #CTX_VTTBR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +0000122
Max Shvetsov28259462020-02-17 16:15:47 +0000123#if CTX_INCLUDE_MTE_REGS
Max Shvetsov30ee3752020-05-13 18:15:39 +0100124 mrs x10, TFSR_EL2
125 str x10, [x0, #CTX_TFSR_EL2]
Max Shvetsov28259462020-02-17 16:15:47 +0000126#endif
Max Shvetsov28f39f02020-02-25 13:56:19 +0000127
Max Shvetsov28259462020-02-17 16:15:47 +0000128#if ENABLE_MPAM_FOR_LOWER_ELS
129 mrs x9, MPAM2_EL2
130 mrs x10, MPAMHCR_EL2
131 stp x9, x10, [x0, #CTX_MPAM2_EL2]
132
133 mrs x11, MPAMVPM0_EL2
134 mrs x12, MPAMVPM1_EL2
135 stp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
136
137 mrs x13, MPAMVPM2_EL2
138 mrs x14, MPAMVPM3_EL2
139 stp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
140
141 mrs x15, MPAMVPM4_EL2
142 mrs x16, MPAMVPM5_EL2
143 stp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
144
145 mrs x17, MPAMVPM6_EL2
146 mrs x9, MPAMVPM7_EL2
147 stp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
148
149 mrs x10, MPAMVPMV_EL2
150 str x10, [x0, #CTX_MPAMVPMV_EL2]
151#endif
152
153
154#if ARM_ARCH_AT_LEAST(8, 6)
155 mrs x11, HAFGRTR_EL2
156 mrs x12, HDFGRTR_EL2
157 stp x11, x12, [x0, #CTX_HAFGRTR_EL2]
158
159 mrs x13, HDFGWTR_EL2
160 mrs x14, HFGITR_EL2
161 stp x13, x14, [x0, #CTX_HDFGWTR_EL2]
162
163 mrs x15, HFGRTR_EL2
164 mrs x16, HFGWTR_EL2
165 stp x15, x16, [x0, #CTX_HFGRTR_EL2]
166
167 mrs x17, CNTPOFF_EL2
168 str x17, [x0, #CTX_CNTPOFF_EL2]
169#endif
170
171#if ARM_ARCH_AT_LEAST(8, 4)
172 mrs x9, cnthps_ctl_el2
173 mrs x10, cnthps_cval_el2
174 stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
175
176 mrs x11, cnthps_tval_el2
177 mrs x12, cnthvs_ctl_el2
178 stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
179
180 mrs x13, cnthvs_cval_el2
181 mrs x14, cnthvs_tval_el2
182 stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
183
184 mrs x15, cnthv_ctl_el2
185 mrs x16, cnthv_cval_el2
186 stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
187
188 mrs x17, cnthv_tval_el2
189 mrs x9, contextidr_el2
190 stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
191
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +0100192#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsov28259462020-02-17 16:15:47 +0000193 mrs x10, sder32_el2
194 str x10, [x0, #CTX_SDER32_EL2]
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +0100195#endif
Max Shvetsov28259462020-02-17 16:15:47 +0000196
197 mrs x11, ttbr1_el2
198 str x11, [x0, #CTX_TTBR1_EL2]
199
200 mrs x12, vdisr_el2
201 str x12, [x0, #CTX_VDISR_EL2]
202
203 mrs x13, vncr_el2
204 str x13, [x0, #CTX_VNCR_EL2]
205
206 mrs x14, vsesr_el2
207 str x14, [x0, #CTX_VSESR_EL2]
208
209 mrs x15, vstcr_el2
210 str x15, [x0, #CTX_VSTCR_EL2]
211
212 mrs x16, vsttbr_el2
213 str x16, [x0, #CTX_VSTTBR_EL2]
Olivier Deprez7f164a82020-03-20 14:22:05 +0100214
215 mrs x17, TRFCR_EL2
216 str x17, [x0, #CTX_TRFCR_EL2]
Max Shvetsov28259462020-02-17 16:15:47 +0000217#endif
218
219#if ARM_ARCH_AT_LEAST(8, 5)
Olivier Deprez7f164a82020-03-20 14:22:05 +0100220 mrs x9, scxtnum_el2
221 str x9, [x0, #CTX_SCXTNUM_EL2]
Max Shvetsov28259462020-02-17 16:15:47 +0000222#endif
Max Shvetsov28f39f02020-02-25 13:56:19 +0000223
224 ret
225endfunc el2_sysregs_context_save
226
227/* -----------------------------------------------------
228 * The following function strictly follows the AArch64
229 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsov28259462020-02-17 16:15:47 +0000230 * to restore EL2 system register context. It assumes
231 * that 'x0' is pointing to a 'el2_sys_regs' structure
Max Shvetsov28f39f02020-02-25 13:56:19 +0000232 * from where the register context will be restored
Max Shvetsov28259462020-02-17 16:15:47 +0000233
234 * The following registers are not restored
235 * AMEVCNTVOFF0<n>_EL2
236 * AMEVCNTVOFF1<n>_EL2
237 * ICH_AP0R<n>_EL2
238 * ICH_AP1R<n>_EL2
239 * ICH_LR<n>_EL2
Max Shvetsov28f39f02020-02-25 13:56:19 +0000240 * -----------------------------------------------------
241 */
242func el2_sysregs_context_restore
243
Max Shvetsov28259462020-02-17 16:15:47 +0000244 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsov28f39f02020-02-25 13:56:19 +0000245 msr actlr_el2, x9
Max Shvetsov28259462020-02-17 16:15:47 +0000246 msr afsr0_el2, x10
Max Shvetsov28f39f02020-02-25 13:56:19 +0000247
Max Shvetsov28259462020-02-17 16:15:47 +0000248 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
249 msr afsr1_el2, x11
250 msr amair_el2, x12
Max Shvetsov28f39f02020-02-25 13:56:19 +0000251
Max Shvetsov28259462020-02-17 16:15:47 +0000252 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
253 msr cnthctl_el2, x13
254 msr cnthp_ctl_el2, x14
Max Shvetsov28f39f02020-02-25 13:56:19 +0000255
Max Shvetsov28259462020-02-17 16:15:47 +0000256 ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
257 msr cnthp_cval_el2, x15
258 msr cnthp_tval_el2, x16
Max Shvetsov28f39f02020-02-25 13:56:19 +0000259
Max Shvetsov28259462020-02-17 16:15:47 +0000260 ldp x17, x9, [x0, #CTX_CNTVOFF_EL2]
261 msr cntvoff_el2, x17
Max Shvetsov28f39f02020-02-25 13:56:19 +0000262 msr cptr_el2, x9
263
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +0100264#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsov28259462020-02-17 16:15:47 +0000265 ldp x10, x11, [x0, #CTX_DBGVCR32_EL2]
266 msr dbgvcr32_el2, x10
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +0100267#else
268 ldr x11, [x0, #CTX_ELR_EL2]
269#endif
Max Shvetsov28259462020-02-17 16:15:47 +0000270 msr elr_el2, x11
Max Shvetsov28f39f02020-02-25 13:56:19 +0000271
Max Shvetsov28259462020-02-17 16:15:47 +0000272 ldp x14, x15, [x0, #CTX_ESR_EL2]
273 msr esr_el2, x14
274 msr far_el2, x15
Max Shvetsov28f39f02020-02-25 13:56:19 +0000275
Max Shvetsov30ee3752020-05-13 18:15:39 +0100276 ldp x16, x17, [x0, #CTX_HACR_EL2]
277 msr hacr_el2, x16
278 msr hcr_el2, x17
Max Shvetsov28f39f02020-02-25 13:56:19 +0000279
Max Shvetsov30ee3752020-05-13 18:15:39 +0100280 ldp x9, x10, [x0, #CTX_HPFAR_EL2]
281 msr hpfar_el2, x9
282 msr hstr_el2, x10
Max Shvetsov28f39f02020-02-25 13:56:19 +0000283
Max Shvetsov30ee3752020-05-13 18:15:39 +0100284 ldp x11, x12, [x0, #CTX_ICC_SRE_EL2]
285 msr ICC_SRE_EL2, x11
286 msr ICH_HCR_EL2, x12
Max Shvetsov28f39f02020-02-25 13:56:19 +0000287
Max Shvetsov30ee3752020-05-13 18:15:39 +0100288 ldp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
289 msr ICH_VMCR_EL2, x13
290 msr mair_el2, x14
Max Shvetsov28f39f02020-02-25 13:56:19 +0000291
Max Shvetsov30ee3752020-05-13 18:15:39 +0100292 ldp x15, x16, [x0, #CTX_MDCR_EL2]
293 msr mdcr_el2, x15
294 msr PMSCR_EL2, x16
Max Shvetsov28f39f02020-02-25 13:56:19 +0000295
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100296 ldp x17, x9, [x0, #CTX_SCTLR_EL2]
297 msr sctlr_el2, x17
298 msr spsr_el2, x9
Max Shvetsov28f39f02020-02-25 13:56:19 +0000299
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100300 ldp x10, x11, [x0, #CTX_SP_EL2]
301 msr sp_el2, x10
302 msr tcr_el2, x11
Max Shvetsov28f39f02020-02-25 13:56:19 +0000303
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100304 ldp x12, x13, [x0, #CTX_TPIDR_EL2]
305 msr tpidr_el2, x12
306 msr ttbr0_el2, x13
Max Shvetsov28f39f02020-02-25 13:56:19 +0000307
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100308 ldp x13, x14, [x0, #CTX_VBAR_EL2]
309 msr vbar_el2, x13
310 msr vmpidr_el2, x14
Max Shvetsov28f39f02020-02-25 13:56:19 +0000311
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100312 ldp x15, x16, [x0, #CTX_VPIDR_EL2]
313 msr vpidr_el2, x15
314 msr vtcr_el2, x16
315
316 ldr x17, [x0, #CTX_VTTBR_EL2]
317 msr vttbr_el2, x17
Max Shvetsov28f39f02020-02-25 13:56:19 +0000318
Max Shvetsov28259462020-02-17 16:15:47 +0000319#if CTX_INCLUDE_MTE_REGS
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100320 ldr x9, [x0, #CTX_TFSR_EL2]
321 msr TFSR_EL2, x9
Max Shvetsov28259462020-02-17 16:15:47 +0000322#endif
Max Shvetsov28f39f02020-02-25 13:56:19 +0000323
Max Shvetsov28259462020-02-17 16:15:47 +0000324#if ENABLE_MPAM_FOR_LOWER_ELS
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100325 ldp x10, x11, [x0, #CTX_MPAM2_EL2]
326 msr MPAM2_EL2, x10
327 msr MPAMHCR_EL2, x11
Max Shvetsov28259462020-02-17 16:15:47 +0000328
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100329 ldp x12, x13, [x0, #CTX_MPAMVPM0_EL2]
330 msr MPAMVPM0_EL2, x12
331 msr MPAMVPM1_EL2, x13
Max Shvetsov28259462020-02-17 16:15:47 +0000332
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100333 ldp x14, x15, [x0, #CTX_MPAMVPM2_EL2]
334 msr MPAMVPM2_EL2, x14
335 msr MPAMVPM3_EL2, x15
Max Shvetsov28259462020-02-17 16:15:47 +0000336
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100337 ldp x16, x17, [x0, #CTX_MPAMVPM4_EL2]
338 msr MPAMVPM4_EL2, x16
339 msr MPAMVPM5_EL2, x17
Max Shvetsov28259462020-02-17 16:15:47 +0000340
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100341 ldp x9, x10, [x0, #CTX_MPAMVPM6_EL2]
342 msr MPAMVPM6_EL2, x9
343 msr MPAMVPM7_EL2, x10
Max Shvetsov28259462020-02-17 16:15:47 +0000344
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100345 ldr x11, [x0, #CTX_MPAMVPMV_EL2]
346 msr MPAMVPMV_EL2, x11
Max Shvetsov28259462020-02-17 16:15:47 +0000347#endif
348
349#if ARM_ARCH_AT_LEAST(8, 6)
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100350 ldp x12, x13, [x0, #CTX_HAFGRTR_EL2]
351 msr HAFGRTR_EL2, x12
352 msr HDFGRTR_EL2, x13
Max Shvetsov28259462020-02-17 16:15:47 +0000353
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100354 ldp x14, x15, [x0, #CTX_HDFGWTR_EL2]
355 msr HDFGWTR_EL2, x14
356 msr HFGITR_EL2, x15
Max Shvetsov28259462020-02-17 16:15:47 +0000357
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100358 ldp x16, x17, [x0, #CTX_HFGRTR_EL2]
359 msr HFGRTR_EL2, x16
360 msr HFGWTR_EL2, x17
Max Shvetsov28259462020-02-17 16:15:47 +0000361
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100362 ldr x9, [x0, #CTX_CNTPOFF_EL2]
363 msr CNTPOFF_EL2, x9
Max Shvetsov28259462020-02-17 16:15:47 +0000364#endif
365
366#if ARM_ARCH_AT_LEAST(8, 4)
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100367 ldp x10, x11, [x0, #CTX_CNTHPS_CTL_EL2]
368 msr cnthps_ctl_el2, x10
369 msr cnthps_cval_el2, x11
Max Shvetsov28259462020-02-17 16:15:47 +0000370
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100371 ldp x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2]
372 msr cnthps_tval_el2, x12
373 msr cnthvs_ctl_el2, x13
Max Shvetsov28259462020-02-17 16:15:47 +0000374
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100375 ldp x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2]
376 msr cnthvs_cval_el2, x14
377 msr cnthvs_tval_el2, x15
Max Shvetsov28259462020-02-17 16:15:47 +0000378
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100379 ldp x16, x17, [x0, #CTX_CNTHV_CTL_EL2]
380 msr cnthv_ctl_el2, x16
381 msr cnthv_cval_el2, x17
Max Shvetsov28259462020-02-17 16:15:47 +0000382
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100383 ldp x9, x10, [x0, #CTX_CNTHV_TVAL_EL2]
384 msr cnthv_tval_el2, x9
385 msr contextidr_el2, x10
Max Shvetsov28259462020-02-17 16:15:47 +0000386
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +0100387#if CTX_INCLUDE_AARCH32_REGS
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100388 ldr x11, [x0, #CTX_SDER32_EL2]
389 msr sder32_el2, x11
Arunachalam Ganapathy0f777ea2020-05-26 11:32:35 +0100390#endif
Max Shvetsov28259462020-02-17 16:15:47 +0000391
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100392 ldr x12, [x0, #CTX_TTBR1_EL2]
393 msr ttbr1_el2, x12
Max Shvetsov28259462020-02-17 16:15:47 +0000394
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100395 ldr x13, [x0, #CTX_VDISR_EL2]
396 msr vdisr_el2, x13
Max Shvetsov28259462020-02-17 16:15:47 +0000397
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100398 ldr x14, [x0, #CTX_VNCR_EL2]
399 msr vncr_el2, x14
Max Shvetsov28259462020-02-17 16:15:47 +0000400
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100401 ldr x15, [x0, #CTX_VSESR_EL2]
402 msr vsesr_el2, x15
Max Shvetsov28259462020-02-17 16:15:47 +0000403
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100404 ldr x16, [x0, #CTX_VSTCR_EL2]
405 msr vstcr_el2, x16
Max Shvetsov28259462020-02-17 16:15:47 +0000406
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100407 ldr x17, [x0, #CTX_VSTTBR_EL2]
408 msr vsttbr_el2, x17
Olivier Deprez7f164a82020-03-20 14:22:05 +0100409
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100410 ldr x9, [x0, #CTX_TRFCR_EL2]
411 msr TRFCR_EL2, x9
Max Shvetsov28259462020-02-17 16:15:47 +0000412#endif
413
414#if ARM_ARCH_AT_LEAST(8, 5)
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100415 ldr x10, [x0, #CTX_SCXTNUM_EL2]
416 msr scxtnum_el2, x10
Max Shvetsov28259462020-02-17 16:15:47 +0000417#endif
Max Shvetsov28f39f02020-02-25 13:56:19 +0000418
419 ret
420endfunc el2_sysregs_context_restore
421
422#endif /* CTX_INCLUDE_EL2_REGS */
423
Alexei Fedoroved108b52019-09-13 14:11:59 +0100424/* ------------------------------------------------------------------
425 * The following function strictly follows the AArch64 PCS to use
426 * x9-x17 (temporary caller-saved registers) to save EL1 system
427 * register context. It assumes that 'x0' is pointing to a
428 * 'el1_sys_regs' structure where the register context will be saved.
429 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000430 */
Andrew Thoelke0a30cf52014-03-18 13:46:55 +0000431func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000432
433 mrs x9, spsr_el1
434 mrs x10, elr_el1
435 stp x9, x10, [x0, #CTX_SPSR_EL1]
436
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +0100437#if !ERRATA_SPECULATIVE_AT
Achin Gupta9ac63c52014-01-16 12:08:03 +0000438 mrs x15, sctlr_el1
Manish V Badarkhecb556152020-07-28 07:22:30 +0100439 mrs x16, tcr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000440 stp x15, x16, [x0, #CTX_SCTLR_EL1]
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +0100441#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000442
443 mrs x17, cpacr_el1
444 mrs x9, csselr_el1
445 stp x17, x9, [x0, #CTX_CPACR_EL1]
446
447 mrs x10, sp_el1
448 mrs x11, esr_el1
449 stp x10, x11, [x0, #CTX_SP_EL1]
450
451 mrs x12, ttbr0_el1
452 mrs x13, ttbr1_el1
453 stp x12, x13, [x0, #CTX_TTBR0_EL1]
454
455 mrs x14, mair_el1
456 mrs x15, amair_el1
457 stp x14, x15, [x0, #CTX_MAIR_EL1]
458
Manish V Badarkhecb556152020-07-28 07:22:30 +0100459 mrs x16, actlr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000460 mrs x17, tpidr_el1
Manish V Badarkhecb556152020-07-28 07:22:30 +0100461 stp x16, x17, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000462
463 mrs x9, tpidr_el0
464 mrs x10, tpidrro_el0
465 stp x9, x10, [x0, #CTX_TPIDR_EL0]
466
Achin Gupta9ac63c52014-01-16 12:08:03 +0000467 mrs x13, par_el1
468 mrs x14, far_el1
469 stp x13, x14, [x0, #CTX_PAR_EL1]
470
471 mrs x15, afsr0_el1
472 mrs x16, afsr1_el1
473 stp x15, x16, [x0, #CTX_AFSR0_EL1]
474
475 mrs x17, contextidr_el1
476 mrs x9, vbar_el1
477 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
478
Soby Mathew8cd16e62016-05-17 14:01:32 +0100479 /* Save AArch32 system registers if the build has instructed so */
480#if CTX_INCLUDE_AARCH32_REGS
481 mrs x11, spsr_abt
482 mrs x12, spsr_und
483 stp x11, x12, [x0, #CTX_SPSR_ABT]
484
485 mrs x13, spsr_irq
486 mrs x14, spsr_fiq
487 stp x13, x14, [x0, #CTX_SPSR_IRQ]
488
489 mrs x15, dacr32_el2
490 mrs x16, ifsr32_el2
491 stp x15, x16, [x0, #CTX_DACR32_EL2]
Soby Mathew8cd16e62016-05-17 14:01:32 +0100492#endif
493
Jeenu Viswambharan2da8d8b2014-05-12 15:28:47 +0100494 /* Save NS timer registers if the build has instructed so */
495#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000496 mrs x10, cntp_ctl_el0
497 mrs x11, cntp_cval_el0
498 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
499
500 mrs x12, cntv_ctl_el0
501 mrs x13, cntv_cval_el0
502 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
503
504 mrs x14, cntkctl_el1
Jeenu Viswambharan2da8d8b2014-05-12 15:28:47 +0100505 str x14, [x0, #CTX_CNTKCTL_EL1]
506#endif
507
Justin Chadwell9dd94382019-07-18 14:25:33 +0100508 /* Save MTE system registers if the build has instructed so */
509#if CTX_INCLUDE_MTE_REGS
510 mrs x15, TFSRE0_EL1
511 mrs x16, TFSR_EL1
512 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
513
514 mrs x9, RGSR_EL1
515 mrs x10, GCR_EL1
516 stp x9, x10, [x0, #CTX_RGSR_EL1]
517#endif
518
Achin Gupta9ac63c52014-01-16 12:08:03 +0000519 ret
Kévin Petit8b779622015-03-24 14:03:57 +0000520endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000521
Alexei Fedoroved108b52019-09-13 14:11:59 +0100522/* ------------------------------------------------------------------
523 * The following function strictly follows the AArch64 PCS to use
524 * x9-x17 (temporary caller-saved registers) to restore EL1 system
525 * register context. It assumes that 'x0' is pointing to a
526 * 'el1_sys_regs' structure from where the register context will be
527 * restored
528 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000529 */
Andrew Thoelke0a30cf52014-03-18 13:46:55 +0000530func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000531
532 ldp x9, x10, [x0, #CTX_SPSR_EL1]
533 msr spsr_el1, x9
534 msr elr_el1, x10
535
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +0100536#if !ERRATA_SPECULATIVE_AT
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100537 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
538 msr sctlr_el1, x15
Manish V Badarkhecb556152020-07-28 07:22:30 +0100539 msr tcr_el1, x16
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +0100540#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000541
542 ldp x17, x9, [x0, #CTX_CPACR_EL1]
543 msr cpacr_el1, x17
544 msr csselr_el1, x9
545
546 ldp x10, x11, [x0, #CTX_SP_EL1]
547 msr sp_el1, x10
548 msr esr_el1, x11
549
550 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
551 msr ttbr0_el1, x12
552 msr ttbr1_el1, x13
553
554 ldp x14, x15, [x0, #CTX_MAIR_EL1]
555 msr mair_el1, x14
556 msr amair_el1, x15
557
Manish V Badarkhecb556152020-07-28 07:22:30 +0100558 ldp x16, x17, [x0, #CTX_ACTLR_EL1]
559 msr actlr_el1, x16
Manish V Badarkhefb2072b2020-07-28 07:12:56 +0100560 msr tpidr_el1, x17
Achin Gupta9ac63c52014-01-16 12:08:03 +0000561
562 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
563 msr tpidr_el0, x9
564 msr tpidrro_el0, x10
565
Achin Gupta9ac63c52014-01-16 12:08:03 +0000566 ldp x13, x14, [x0, #CTX_PAR_EL1]
567 msr par_el1, x13
568 msr far_el1, x14
569
570 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
571 msr afsr0_el1, x15
572 msr afsr1_el1, x16
573
574 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
575 msr contextidr_el1, x17
576 msr vbar_el1, x9
577
Soby Mathew8cd16e62016-05-17 14:01:32 +0100578 /* Restore AArch32 system registers if the build has instructed so */
579#if CTX_INCLUDE_AARCH32_REGS
580 ldp x11, x12, [x0, #CTX_SPSR_ABT]
581 msr spsr_abt, x11
582 msr spsr_und, x12
583
584 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
585 msr spsr_irq, x13
586 msr spsr_fiq, x14
587
588 ldp x15, x16, [x0, #CTX_DACR32_EL2]
589 msr dacr32_el2, x15
590 msr ifsr32_el2, x16
Soby Mathew8cd16e62016-05-17 14:01:32 +0100591#endif
Jeenu Viswambharan2da8d8b2014-05-12 15:28:47 +0100592 /* Restore NS timer registers if the build has instructed so */
593#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000594 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
595 msr cntp_ctl_el0, x10
596 msr cntp_cval_el0, x11
597
598 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
599 msr cntv_ctl_el0, x12
600 msr cntv_cval_el0, x13
601
Jeenu Viswambharan2da8d8b2014-05-12 15:28:47 +0100602 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000603 msr cntkctl_el1, x14
Jeenu Viswambharan2da8d8b2014-05-12 15:28:47 +0100604#endif
Justin Chadwell9dd94382019-07-18 14:25:33 +0100605 /* Restore MTE system registers if the build has instructed so */
606#if CTX_INCLUDE_MTE_REGS
607 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
608 msr TFSRE0_EL1, x11
609 msr TFSR_EL1, x12
610
611 ldp x13, x14, [x0, #CTX_RGSR_EL1]
612 msr RGSR_EL1, x13
613 msr GCR_EL1, x14
614#endif
Jeenu Viswambharan2da8d8b2014-05-12 15:28:47 +0100615
Achin Gupta9ac63c52014-01-16 12:08:03 +0000616 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000617 ret
Kévin Petit8b779622015-03-24 14:03:57 +0000618endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000619
Alexei Fedoroved108b52019-09-13 14:11:59 +0100620/* ------------------------------------------------------------------
621 * The following function follows the aapcs_64 strictly to use
622 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
623 * to save floating point register context. It assumes that 'x0' is
624 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000625 * be saved.
626 *
Alexei Fedoroved108b52019-09-13 14:11:59 +0100627 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
628 * However currently we don't use VFP registers nor set traps in
629 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000630 *
631 * TODO: Revisit when VFP is used in secure world
Alexei Fedoroved108b52019-09-13 14:11:59 +0100632 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000633 */
Juan Castillo0f21c542014-06-25 17:26:36 +0100634#if CTX_INCLUDE_FPREGS
Andrew Thoelke0a30cf52014-03-18 13:46:55 +0000635func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000636 stp q0, q1, [x0, #CTX_FP_Q0]
637 stp q2, q3, [x0, #CTX_FP_Q2]
638 stp q4, q5, [x0, #CTX_FP_Q4]
639 stp q6, q7, [x0, #CTX_FP_Q6]
640 stp q8, q9, [x0, #CTX_FP_Q8]
641 stp q10, q11, [x0, #CTX_FP_Q10]
642 stp q12, q13, [x0, #CTX_FP_Q12]
643 stp q14, q15, [x0, #CTX_FP_Q14]
644 stp q16, q17, [x0, #CTX_FP_Q16]
645 stp q18, q19, [x0, #CTX_FP_Q18]
646 stp q20, q21, [x0, #CTX_FP_Q20]
647 stp q22, q23, [x0, #CTX_FP_Q22]
648 stp q24, q25, [x0, #CTX_FP_Q24]
649 stp q26, q27, [x0, #CTX_FP_Q26]
650 stp q28, q29, [x0, #CTX_FP_Q28]
651 stp q30, q31, [x0, #CTX_FP_Q30]
652
653 mrs x9, fpsr
654 str x9, [x0, #CTX_FP_FPSR]
655
656 mrs x10, fpcr
657 str x10, [x0, #CTX_FP_FPCR]
658
David Cunado91089f32017-10-20 11:30:57 +0100659#if CTX_INCLUDE_AARCH32_REGS
660 mrs x11, fpexc32_el2
661 str x11, [x0, #CTX_FP_FPEXC32_EL2]
662#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000663 ret
Kévin Petit8b779622015-03-24 14:03:57 +0000664endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000665
Alexei Fedoroved108b52019-09-13 14:11:59 +0100666/* ------------------------------------------------------------------
667 * The following function follows the aapcs_64 strictly to use x9-x17
668 * (temporary caller-saved registers according to AArch64 PCS) to
669 * restore floating point register context. It assumes that 'x0' is
670 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000671 * will be restored.
672 *
Alexei Fedoroved108b52019-09-13 14:11:59 +0100673 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
674 * However currently we don't use VFP registers nor set traps in
675 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000676 *
677 * TODO: Revisit when VFP is used in secure world
Alexei Fedoroved108b52019-09-13 14:11:59 +0100678 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000679 */
Andrew Thoelke0a30cf52014-03-18 13:46:55 +0000680func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000681 ldp q0, q1, [x0, #CTX_FP_Q0]
682 ldp q2, q3, [x0, #CTX_FP_Q2]
683 ldp q4, q5, [x0, #CTX_FP_Q4]
684 ldp q6, q7, [x0, #CTX_FP_Q6]
685 ldp q8, q9, [x0, #CTX_FP_Q8]
686 ldp q10, q11, [x0, #CTX_FP_Q10]
687 ldp q12, q13, [x0, #CTX_FP_Q12]
688 ldp q14, q15, [x0, #CTX_FP_Q14]
689 ldp q16, q17, [x0, #CTX_FP_Q16]
690 ldp q18, q19, [x0, #CTX_FP_Q18]
691 ldp q20, q21, [x0, #CTX_FP_Q20]
692 ldp q22, q23, [x0, #CTX_FP_Q22]
693 ldp q24, q25, [x0, #CTX_FP_Q24]
694 ldp q26, q27, [x0, #CTX_FP_Q26]
695 ldp q28, q29, [x0, #CTX_FP_Q28]
696 ldp q30, q31, [x0, #CTX_FP_Q30]
697
698 ldr x9, [x0, #CTX_FP_FPSR]
699 msr fpsr, x9
700
Soby Mathew817ac8d2015-12-03 09:42:50 +0000701 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000702 msr fpcr, x10
703
David Cunado91089f32017-10-20 11:30:57 +0100704#if CTX_INCLUDE_AARCH32_REGS
705 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
706 msr fpexc32_el2, x11
707#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000708 /*
709 * No explict ISB required here as ERET to
Sandrine Bailleux1645d3e2015-12-17 13:58:58 +0000710 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000711 * covers it
712 */
713
714 ret
Kévin Petit8b779622015-03-24 14:03:57 +0000715endfunc fpregs_context_restore
Juan Castillo0f21c542014-06-25 17:26:36 +0100716#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100717
Alexei Fedoroved108b52019-09-13 14:11:59 +0100718/* ------------------------------------------------------------------
719 * The following function is used to save and restore all the general
720 * purpose and ARMv8.3-PAuth (if enabled) registers.
721 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
722 * when ARMv8.5-PMU is implemented, and if called from Non-secure
723 * state saves PMCR_EL0 and disables Cycle Counter.
724 *
725 * Ideally we would only save and restore the callee saved registers
726 * when a world switch occurs but that type of implementation is more
727 * complex. So currently we will always save and restore these
728 * registers on entry and exit of EL3.
729 * These are not macros to ensure their invocation fits within the 32
730 * instructions per exception vector.
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100731 * clobbers: x18
Alexei Fedoroved108b52019-09-13 14:11:59 +0100732 * ------------------------------------------------------------------
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100733 */
Alexei Fedoroved108b52019-09-13 14:11:59 +0100734func save_gp_pmcr_pauth_regs
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100735 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
736 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
737 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
738 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
739 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
740 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
741 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
742 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
743 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
744 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
745 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
746 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
747 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
748 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
749 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
750 mrs x18, sp_el0
751 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100752
Alexei Fedoroved108b52019-09-13 14:11:59 +0100753 /* ----------------------------------------------------------
754 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
755 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
756 * should be saved in non-secure context.
757 * ----------------------------------------------------------
758 */
759 mrs x9, mdcr_el3
760 tst x9, #MDCR_SCCD_BIT
761 bne 1f
762
763 /* Secure Cycle Counter is not disabled */
764 mrs x9, pmcr_el0
765
766 /* Check caller's security state */
767 mrs x10, scr_el3
768 tst x10, #SCR_NS_BIT
769 beq 2f
770
771 /* Save PMCR_EL0 if called from Non-secure state */
772 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
773
774 /* Disable cycle counter when event counting is prohibited */
7752: orr x9, x9, #PMCR_EL0_DP_BIT
776 msr pmcr_el0, x9
777 isb
7781:
779#if CTX_INCLUDE_PAUTH_REGS
780 /* ----------------------------------------------------------
781 * Save the ARMv8.3-PAuth keys as they are not banked
782 * by exception level
783 * ----------------------------------------------------------
784 */
785 add x19, sp, #CTX_PAUTH_REGS_OFFSET
786
787 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
788 mrs x21, APIAKeyHi_EL1
789 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
790 mrs x23, APIBKeyHi_EL1
791 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
792 mrs x25, APDAKeyHi_EL1
793 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
794 mrs x27, APDBKeyHi_EL1
795 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
796 mrs x29, APGAKeyHi_EL1
797
798 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
799 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
800 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
801 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
802 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
803#endif /* CTX_INCLUDE_PAUTH_REGS */
804
805 ret
806endfunc save_gp_pmcr_pauth_regs
807
808/* ------------------------------------------------------------------
809 * This function restores ARMv8.3-PAuth (if enabled) and all general
810 * purpose registers except x30 from the CPU context.
811 * x30 register must be explicitly restored by the caller.
812 * ------------------------------------------------------------------
Jeenu Viswambharanef653d92017-11-29 16:59:34 +0000813 */
Alexei Fedoroved108b52019-09-13 14:11:59 +0100814func restore_gp_pmcr_pauth_regs
815#if CTX_INCLUDE_PAUTH_REGS
816 /* Restore the ARMv8.3 PAuth keys */
817 add x10, sp, #CTX_PAUTH_REGS_OFFSET
818
819 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
820 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
821 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
822 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
823 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
824
825 msr APIAKeyLo_EL1, x0
826 msr APIAKeyHi_EL1, x1
827 msr APIBKeyLo_EL1, x2
828 msr APIBKeyHi_EL1, x3
829 msr APDAKeyLo_EL1, x4
830 msr APDAKeyHi_EL1, x5
831 msr APDBKeyLo_EL1, x6
832 msr APDBKeyHi_EL1, x7
833 msr APGAKeyLo_EL1, x8
834 msr APGAKeyHi_EL1, x9
835#endif /* CTX_INCLUDE_PAUTH_REGS */
836
837 /* ----------------------------------------------------------
838 * Restore PMCR_EL0 when returning to Non-secure state if
839 * Secure Cycle Counter is not disabled in MDCR_EL3 when
840 * ARMv8.5-PMU is implemented.
841 * ----------------------------------------------------------
842 */
843 mrs x0, scr_el3
844 tst x0, #SCR_NS_BIT
845 beq 2f
846
847 /* ----------------------------------------------------------
848 * Back to Non-secure state.
849 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
850 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
851 * should be restored from non-secure context.
852 * ----------------------------------------------------------
853 */
854 mrs x0, mdcr_el3
855 tst x0, #MDCR_SCCD_BIT
856 bne 2f
857 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
858 msr pmcr_el0, x0
8592:
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100860 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
861 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100862 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
863 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
864 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
865 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
866 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
867 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharanef653d92017-11-29 16:59:34 +0000868 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100869 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
870 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
871 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
872 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
873 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharanef653d92017-11-29 16:59:34 +0000874 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
875 msr sp_el0, x28
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100876 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharanef653d92017-11-29 16:59:34 +0000877 ret
Alexei Fedoroved108b52019-09-13 14:11:59 +0100878endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharanef653d92017-11-29 16:59:34 +0000879
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +0100880/*
881 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
882 * registers and update EL1 registers to disable stage1 and stage2
883 * page table walk
884 */
885func save_and_update_ptw_el1_sys_regs
886 /* ----------------------------------------------------------
887 * Save only sctlr_el1 and tcr_el1 registers
888 * ----------------------------------------------------------
889 */
890 mrs x29, sctlr_el1
891 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
892 mrs x29, tcr_el1
893 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
894
895 /* ------------------------------------------------------------
896 * Must follow below order in order to disable page table
897 * walk for lower ELs (EL1 and EL0). First step ensures that
898 * page table walk is disabled for stage1 and second step
899 * ensures that page table walker should use TCR_EL1.EPDx
900 * bits to perform address translation. ISB ensures that CPU
901 * does these 2 steps in order.
902 *
903 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
904 * stage1.
905 * 2. Enable MMU bit to avoid identity mapping via stage2
906 * and force TCR_EL1.EPDx to be used by the page table
907 * walker.
908 * ------------------------------------------------------------
909 */
910 orr x29, x29, #(TCR_EPD0_BIT)
911 orr x29, x29, #(TCR_EPD1_BIT)
912 msr tcr_el1, x29
913 isb
914 mrs x29, sctlr_el1
915 orr x29, x29, #SCTLR_M_BIT
916 msr sctlr_el1, x29
917 isb
918
919 ret
920endfunc save_and_update_ptw_el1_sys_regs
921
Alexei Fedoroved108b52019-09-13 14:11:59 +0100922/* ------------------------------------------------------------------
923 * This routine assumes that the SP_EL3 is pointing to a valid
924 * context structure from where the gp regs and other special
925 * registers can be retrieved.
926 * ------------------------------------------------------------------
Antonio Nino Diaz4d1ccf02019-01-30 20:41:31 +0000927 */
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100928func el3_exit
Jan Dabrosbb9549b2019-12-02 13:30:03 +0100929#if ENABLE_ASSERTIONS
930 /* el3_exit assumes SP_EL0 on entry */
931 mrs x17, spsel
932 cmp x17, #MODE_SP_EL0
933 ASM_ASSERT(eq)
934#endif
935
Alexei Fedoroved108b52019-09-13 14:11:59 +0100936 /* ----------------------------------------------------------
937 * Save the current SP_EL0 i.e. the EL3 runtime stack which
938 * will be used for handling the next SMC.
939 * Then switch to SP_EL3.
940 * ----------------------------------------------------------
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100941 */
942 mov x17, sp
Alexei Fedoroved108b52019-09-13 14:11:59 +0100943 msr spsel, #MODE_SP_ELX
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100944 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
945
Alexei Fedoroved108b52019-09-13 14:11:59 +0100946 /* ----------------------------------------------------------
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100947 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
Alexei Fedoroved108b52019-09-13 14:11:59 +0100948 * ----------------------------------------------------------
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100949 */
950 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
951 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
952 msr scr_el3, x18
953 msr spsr_el3, x16
954 msr elr_el3, x17
955
Dimitris Papastamosfe007b22018-05-16 11:36:14 +0100956#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedoroved108b52019-09-13 14:11:59 +0100957 /* ----------------------------------------------------------
958 * Restore mitigation state as it was on entry to EL3
959 * ----------------------------------------------------------
960 */
Dimitris Papastamosfe007b22018-05-16 11:36:14 +0100961 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedoroved108b52019-09-13 14:11:59 +0100962 cbz x17, 1f
Dimitris Papastamosfe007b22018-05-16 11:36:14 +0100963 blr x17
Antonio Nino Diaz4d1ccf02019-01-30 20:41:31 +00009641:
Dimitris Papastamosfe007b22018-05-16 11:36:14 +0100965#endif
Manish V Badarkhe3b8456b2020-07-23 12:43:25 +0100966 restore_ptw_el1_sys_regs
967
Alexei Fedoroved108b52019-09-13 14:11:59 +0100968 /* ----------------------------------------------------------
969 * Restore general purpose (including x30), PMCR_EL0 and
970 * ARMv8.3-PAuth registers.
971 * Exit EL3 via ERET to a lower exception level.
972 * ----------------------------------------------------------
973 */
974 bl restore_gp_pmcr_pauth_regs
975 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
Dimitris Papastamosfe007b22018-05-16 11:36:14 +0100976
Alexei Fedoroved108b52019-09-13 14:11:59 +0100977#if IMAGE_BL31 && RAS_EXTENSION
978 /* ----------------------------------------------------------
979 * Issue Error Synchronization Barrier to synchronize SErrors
980 * before exiting EL3. We're running with EAs unmasked, so
981 * any synchronized errors would be taken immediately;
982 * therefore no need to inspect DISR_EL1 register.
983 * ----------------------------------------------------------
984 */
985 esb
Antonio Nino Diaz52839622019-01-31 11:58:00 +0000986#endif
Anthony Steinhauserf461fe32020-01-07 15:44:06 -0800987 exception_return
Antonio Nino Diaz52839622019-01-31 11:58:00 +0000988
Yatharth Kocharbbf8f6f2015-10-02 17:56:48 +0100989endfunc el3_exit