Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Max Shvetsov | 28f39f0 | 2020-02-25 13:56:19 +0000 | [diff] [blame^] | 2 | * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | 82cb2c1 | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Dan Handley | 97043ac | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 7 | #include <arch.h> |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 8 | #include <asm_macros.S> |
Jan Dabros | bb9549b | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 9 | #include <assert_macros.S> |
Dan Handley | 97043ac | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 10 | #include <context.h> |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 11 | |
Max Shvetsov | 28f39f0 | 2020-02-25 13:56:19 +0000 | [diff] [blame^] | 12 | #if CTX_INCLUDE_EL2_REGS |
| 13 | .global el2_sysregs_context_save |
| 14 | .global el2_sysregs_context_restore |
| 15 | #endif |
| 16 | |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 17 | .global el1_sysregs_context_save |
| 18 | .global el1_sysregs_context_restore |
| 19 | #if CTX_INCLUDE_FPREGS |
| 20 | .global fpregs_context_save |
| 21 | .global fpregs_context_restore |
| 22 | #endif |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 23 | .global save_gp_pmcr_pauth_regs |
| 24 | .global restore_gp_pmcr_pauth_regs |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 25 | .global el3_exit |
| 26 | |
Max Shvetsov | 28f39f0 | 2020-02-25 13:56:19 +0000 | [diff] [blame^] | 27 | #if CTX_INCLUDE_EL2_REGS |
| 28 | |
| 29 | /* ----------------------------------------------------- |
| 30 | * The following function strictly follows the AArch64 |
| 31 | * PCS to use x9-x17 (temporary caller-saved registers) |
| 32 | * to save EL1 system register context. It assumes that |
| 33 | * 'x0' is pointing to a 'el1_sys_regs' structure where |
| 34 | * the register context will be saved. |
| 35 | * ----------------------------------------------------- |
| 36 | */ |
| 37 | func el2_sysregs_context_save |
| 38 | |
| 39 | mrs x9, actlr_el2 |
| 40 | str x9, [x0, #CTX_ACTLR_EL2] |
| 41 | |
| 42 | mrs x9, afsr0_el2 |
| 43 | str x9, [x0, #CTX_AFSR0_EL2] |
| 44 | |
| 45 | mrs x9, afsr1_el2 |
| 46 | str x9, [x0, #CTX_AFSR1_EL2] |
| 47 | |
| 48 | mrs x9, amair_el2 |
| 49 | str x9, [x0, #CTX_AMAIR_EL2] |
| 50 | |
| 51 | mrs x9, cnthctl_el2 |
| 52 | str x9, [x0, #CTX_CNTHCTL_EL2] |
| 53 | |
| 54 | mrs x9, cnthp_ctl_el2 |
| 55 | str x9, [x0, #CTX_CNTHP_CTL_EL2] |
| 56 | |
| 57 | mrs x9, cnthp_cval_el2 |
| 58 | str x9, [x0, #CTX_CNTHP_CVAL_EL2] |
| 59 | |
| 60 | mrs x9, cnthp_tval_el2 |
| 61 | str x9, [x0, #CTX_CNTHP_TVAL_EL2] |
| 62 | |
| 63 | mrs x9, CNTPOFF_EL2 |
| 64 | str x9, [x0, #CTX_CNTPOFF_EL2] |
| 65 | |
| 66 | mrs x9, cntvoff_el2 |
| 67 | str x9, [x0, #CTX_CNTVOFF_EL2] |
| 68 | |
| 69 | mrs x9, cptr_el2 |
| 70 | str x9, [x0, #CTX_CPTR_EL2] |
| 71 | |
| 72 | mrs x9, dbgvcr32_el2 |
| 73 | str x9, [x0, #CTX_DBGVCR32_EL2] |
| 74 | |
| 75 | mrs x9, elr_el2 |
| 76 | str x9, [x0, #CTX_ELR_EL2] |
| 77 | |
| 78 | mrs x9, esr_el2 |
| 79 | str x9, [x0, #CTX_ESR_EL2] |
| 80 | |
| 81 | mrs x9, far_el2 |
| 82 | str x9, [x0, #CTX_FAR_EL2] |
| 83 | |
| 84 | mrs x9, fpexc32_el2 |
| 85 | str x9, [x0, #CTX_FPEXC32_EL2] |
| 86 | |
| 87 | mrs x9, hacr_el2 |
| 88 | str x9, [x0, #CTX_HACR_EL2] |
| 89 | |
| 90 | mrs x9, HAFGRTR_EL2 |
| 91 | str x9, [x0, #CTX_HAFGRTR_EL2] |
| 92 | |
| 93 | mrs x9, hcr_el2 |
| 94 | str x9, [x0, #CTX_HCR_EL2] |
| 95 | |
| 96 | mrs x9, HDFGRTR_EL2 |
| 97 | str x9, [x0, #CTX_HDFGRTR_EL2] |
| 98 | |
| 99 | mrs x9, HDFGWTR_EL2 |
| 100 | str x9, [x0, #CTX_HDFGWTR_EL2] |
| 101 | |
| 102 | mrs x9, HFGITR_EL2 |
| 103 | str x9, [x0, #CTX_HFGITR_EL2] |
| 104 | |
| 105 | mrs x9, HFGRTR_EL2 |
| 106 | str x9, [x0, #CTX_HFGRTR_EL2] |
| 107 | |
| 108 | mrs x9, HFGWTR_EL2 |
| 109 | str x9, [x0, #CTX_HFGWTR_EL2] |
| 110 | |
| 111 | mrs x9, hpfar_el2 |
| 112 | str x9, [x0, #CTX_HPFAR_EL2] |
| 113 | |
| 114 | mrs x9, hstr_el2 |
| 115 | str x9, [x0, #CTX_HSTR_EL2] |
| 116 | |
| 117 | mrs x9, ICC_SRE_EL2 |
| 118 | str x9, [x0, #CTX_ICC_SRE_EL2] |
| 119 | |
| 120 | mrs x9, ICH_EISR_EL2 |
| 121 | str x9, [x0, #CTX_ICH_EISR_EL2] |
| 122 | |
| 123 | mrs x9, ICH_ELRSR_EL2 |
| 124 | str x9, [x0, #CTX_ICH_ELRSR_EL2] |
| 125 | |
| 126 | mrs x9, ICH_HCR_EL2 |
| 127 | str x9, [x0, #CTX_ICH_HCR_EL2] |
| 128 | |
| 129 | mrs x9, ICH_MISR_EL2 |
| 130 | str x9, [x0, #CTX_ICH_MISR_EL2] |
| 131 | |
| 132 | mrs x9, ICH_VMCR_EL2 |
| 133 | str x9, [x0, #CTX_ICH_VMCR_EL2] |
| 134 | |
| 135 | mrs x9, ICH_VTR_EL2 |
| 136 | str x9, [x0, #CTX_ICH_VTR_EL2] |
| 137 | |
| 138 | mrs x9, mair_el2 |
| 139 | str x9, [x0, #CTX_MAIR_EL2] |
| 140 | |
| 141 | mrs x9, mdcr_el2 |
| 142 | str x9, [x0, #CTX_MDCR_EL2] |
| 143 | |
| 144 | mrs x9, MPAM2_EL2 |
| 145 | str x9, [x0, #CTX_MPAM2_EL2] |
| 146 | |
| 147 | mrs x9, MPAMHCR_EL2 |
| 148 | str x9, [x0, #CTX_MPAMHCR_EL2] |
| 149 | |
| 150 | mrs x9, MPAMVPM0_EL2 |
| 151 | str x9, [x0, #CTX_MPAMVPM0_EL2] |
| 152 | |
| 153 | mrs x9, MPAMVPM1_EL2 |
| 154 | str x9, [x0, #CTX_MPAMVPM1_EL2] |
| 155 | |
| 156 | mrs x9, MPAMVPM2_EL2 |
| 157 | str x9, [x0, #CTX_MPAMVPM2_EL2] |
| 158 | |
| 159 | mrs x9, MPAMVPM3_EL2 |
| 160 | str x9, [x0, #CTX_MPAMVPM3_EL2] |
| 161 | |
| 162 | mrs x9, MPAMVPM4_EL2 |
| 163 | str x9, [x0, #CTX_MPAMVPM4_EL2] |
| 164 | |
| 165 | mrs x9, MPAMVPM5_EL2 |
| 166 | str x9, [x0, #CTX_MPAMVPM5_EL2] |
| 167 | |
| 168 | mrs x9, MPAMVPM6_EL2 |
| 169 | str x9, [x0, #CTX_MPAMVPM6_EL2] |
| 170 | |
| 171 | mrs x9, MPAMVPM7_EL2 |
| 172 | str x9, [x0, #CTX_MPAMVPM7_EL2] |
| 173 | |
| 174 | mrs x9, MPAMVPMV_EL2 |
| 175 | str x9, [x0, #CTX_MPAMVPMV_EL2] |
| 176 | |
| 177 | mrs x9, rmr_el2 |
| 178 | str x9, [x0, #CTX_RMR_EL2] |
| 179 | |
| 180 | mrs x9, sctlr_el2 |
| 181 | str x9, [x0, #CTX_SCTLR_EL2] |
| 182 | |
| 183 | mrs x9, spsr_el2 |
| 184 | str x9, [x0, #CTX_SPSR_EL2] |
| 185 | |
| 186 | mrs x9, sp_el2 |
| 187 | str x9, [x0, #CTX_SP_EL2] |
| 188 | |
| 189 | mrs x9, tcr_el2 |
| 190 | str x9, [x0, #CTX_TCR_EL2] |
| 191 | |
| 192 | mrs x9, tpidr_el2 |
| 193 | str x9, [x0, #CTX_TPIDR_EL2] |
| 194 | |
| 195 | mrs x9, ttbr0_el2 |
| 196 | str x9, [x0, #CTX_TTBR0_EL2] |
| 197 | |
| 198 | mrs x9, vbar_el2 |
| 199 | str x9, [x0, #CTX_VBAR_EL2] |
| 200 | |
| 201 | mrs x9, vmpidr_el2 |
| 202 | str x9, [x0, #CTX_VMPIDR_EL2] |
| 203 | |
| 204 | mrs x9, vpidr_el2 |
| 205 | str x9, [x0, #CTX_VPIDR_EL2] |
| 206 | |
| 207 | mrs x9, vtcr_el2 |
| 208 | str x9, [x0, #CTX_VTCR_EL2] |
| 209 | |
| 210 | mrs x9, vttbr_el2 |
| 211 | str x9, [x0, #CTX_VTTBR_EL2] |
| 212 | |
| 213 | mrs x9, ZCR_EL2 |
| 214 | str x9, [x0, #CTX_ZCR_EL2] |
| 215 | |
| 216 | ret |
| 217 | endfunc el2_sysregs_context_save |
| 218 | |
| 219 | /* ----------------------------------------------------- |
| 220 | * The following function strictly follows the AArch64 |
| 221 | * PCS to use x9-x17 (temporary caller-saved registers) |
| 222 | * to restore EL1 system register context. It assumes |
| 223 | * that 'x0' is pointing to a 'el1_sys_regs' structure |
| 224 | * from where the register context will be restored |
| 225 | * ----------------------------------------------------- |
| 226 | */ |
| 227 | func el2_sysregs_context_restore |
| 228 | |
| 229 | ldr x9, [x0, #CTX_ACTLR_EL2] |
| 230 | msr actlr_el2, x9 |
| 231 | |
| 232 | ldr x9, [x0, #CTX_AFSR0_EL2] |
| 233 | msr afsr0_el2, x9 |
| 234 | |
| 235 | ldr x9, [x0, #CTX_AFSR1_EL2] |
| 236 | msr afsr1_el2, x9 |
| 237 | |
| 238 | ldr x9, [x0, #CTX_AMAIR_EL2] |
| 239 | msr amair_el2, x9 |
| 240 | |
| 241 | ldr x9, [x0, #CTX_CNTHCTL_EL2] |
| 242 | msr cnthctl_el2, x9 |
| 243 | |
| 244 | ldr x9, [x0, #CTX_CNTHP_CTL_EL2] |
| 245 | msr cnthp_ctl_el2, x9 |
| 246 | |
| 247 | ldr x9, [x0, #CTX_CNTHP_CVAL_EL2] |
| 248 | msr cnthp_cval_el2, x9 |
| 249 | |
| 250 | ldr x9, [x0, #CTX_CNTHP_TVAL_EL2] |
| 251 | msr cnthp_tval_el2, x9 |
| 252 | |
| 253 | ldr x9, [x0, #CTX_CNTPOFF_EL2] |
| 254 | msr CNTPOFF_EL2, x9 |
| 255 | |
| 256 | ldr x9, [x0, #CTX_CNTVOFF_EL2] |
| 257 | msr cntvoff_el2, x9 |
| 258 | |
| 259 | ldr x9, [x0, #CTX_CPTR_EL2] |
| 260 | msr cptr_el2, x9 |
| 261 | |
| 262 | ldr x9, [x0, #CTX_DBGVCR32_EL2] |
| 263 | msr dbgvcr32_el2, x9 |
| 264 | |
| 265 | ldr x9, [x0, #CTX_ELR_EL2] |
| 266 | msr elr_el2, x9 |
| 267 | |
| 268 | ldr x9, [x0, #CTX_ESR_EL2] |
| 269 | msr esr_el2, x9 |
| 270 | |
| 271 | ldr x9, [x0, #CTX_FAR_EL2] |
| 272 | msr far_el2, x9 |
| 273 | |
| 274 | ldr x9, [x0, #CTX_FPEXC32_EL2] |
| 275 | msr fpexc32_el2, x9 |
| 276 | |
| 277 | ldr x9, [x0, #CTX_HACR_EL2] |
| 278 | msr hacr_el2, x9 |
| 279 | |
| 280 | ldr x9, [x0, #CTX_HAFGRTR_EL2] |
| 281 | msr HAFGRTR_EL2, x9 |
| 282 | |
| 283 | ldr x9, [x0, #CTX_HCR_EL2] |
| 284 | msr hcr_el2, x9 |
| 285 | |
| 286 | ldr x9, [x0, #CTX_HDFGRTR_EL2] |
| 287 | msr HDFGRTR_EL2, x9 |
| 288 | |
| 289 | ldr x9, [x0, #CTX_HDFGWTR_EL2] |
| 290 | msr HDFGWTR_EL2, x9 |
| 291 | |
| 292 | ldr x9, [x0, #CTX_HFGITR_EL2] |
| 293 | msr HFGITR_EL2, x9 |
| 294 | |
| 295 | ldr x9, [x0, #CTX_HFGRTR_EL2] |
| 296 | msr HFGRTR_EL2, x9 |
| 297 | |
| 298 | ldr x9, [x0, #CTX_HFGWTR_EL2] |
| 299 | msr HFGWTR_EL2, x9 |
| 300 | |
| 301 | ldr x9, [x0, #CTX_HPFAR_EL2] |
| 302 | msr hpfar_el2, x9 |
| 303 | |
| 304 | ldr x9, [x0, #CTX_HSTR_EL2] |
| 305 | msr hstr_el2, x9 |
| 306 | |
| 307 | ldr x9, [x0, #CTX_ICC_SRE_EL2] |
| 308 | msr ICC_SRE_EL2, x9 |
| 309 | |
| 310 | ldr x9, [x0, #CTX_ICH_EISR_EL2] |
| 311 | msr ICH_EISR_EL2, x9 |
| 312 | |
| 313 | ldr x9, [x0, #CTX_ICH_ELRSR_EL2] |
| 314 | msr ICH_ELRSR_EL2, x9 |
| 315 | |
| 316 | ldr x9, [x0, #CTX_ICH_HCR_EL2] |
| 317 | msr ICH_HCR_EL2, x9 |
| 318 | |
| 319 | ldr x9, [x0, #CTX_ICH_MISR_EL2] |
| 320 | msr ICH_MISR_EL2, x9 |
| 321 | |
| 322 | ldr x9, [x0, #CTX_ICH_VMCR_EL2] |
| 323 | msr ICH_VMCR_EL2, x9 |
| 324 | |
| 325 | ldr x9, [x0, #CTX_ICH_VTR_EL2] |
| 326 | msr ICH_VTR_EL2, x9 |
| 327 | |
| 328 | ldr x9, [x0, #CTX_MAIR_EL2] |
| 329 | msr mair_el2, x9 |
| 330 | |
| 331 | ldr x9, [x0, #CTX_MDCR_EL2] |
| 332 | msr mdcr_el2, x9 |
| 333 | |
| 334 | ldr x9, [x0, #CTX_MPAM2_EL2] |
| 335 | msr MPAM2_EL2, x9 |
| 336 | |
| 337 | ldr x9, [x0, #CTX_MPAMHCR_EL2] |
| 338 | msr MPAMHCR_EL2, x9 |
| 339 | |
| 340 | ldr x9, [x0, #CTX_MPAMVPM0_EL2] |
| 341 | msr MPAMVPM0_EL2, x9 |
| 342 | |
| 343 | ldr x9, [x0, #CTX_MPAMVPM1_EL2] |
| 344 | msr MPAMVPM1_EL2, x9 |
| 345 | |
| 346 | ldr x9, [x0, #CTX_MPAMVPM2_EL2] |
| 347 | msr MPAMVPM2_EL2, x9 |
| 348 | |
| 349 | ldr x9, [x0, #CTX_MPAMVPM3_EL2] |
| 350 | msr MPAMVPM3_EL2, x9 |
| 351 | |
| 352 | ldr x9, [x0, #CTX_MPAMVPM4_EL2] |
| 353 | msr MPAMVPM4_EL2, x9 |
| 354 | |
| 355 | ldr x9, [x0, #CTX_MPAMVPM5_EL2] |
| 356 | msr MPAMVPM5_EL2, x9 |
| 357 | |
| 358 | ldr x9, [x0, #CTX_MPAMVPM6_EL2] |
| 359 | msr MPAMVPM6_EL2, x9 |
| 360 | |
| 361 | ldr x9, [x0, #CTX_MPAMVPM7_EL2] |
| 362 | msr MPAMVPM7_EL2, x9 |
| 363 | |
| 364 | ldr x9, [x0, #CTX_MPAMVPMV_EL2] |
| 365 | msr MPAMVPMV_EL2, x9 |
| 366 | |
| 367 | ldr x9, [x0, #CTX_RMR_EL2] |
| 368 | msr rmr_el2, x9 |
| 369 | |
| 370 | ldr x9, [x0, #CTX_SCTLR_EL2] |
| 371 | msr sctlr_el2, x9 |
| 372 | |
| 373 | ldr x9, [x0, #CTX_SPSR_EL2] |
| 374 | msr spsr_el2, x9 |
| 375 | |
| 376 | ldr x9, [x0, #CTX_SP_EL2] |
| 377 | msr sp_el2, x9 |
| 378 | |
| 379 | ldr x9, [x0, #CTX_TCR_EL2] |
| 380 | msr tcr_el2, x9 |
| 381 | |
| 382 | ldr x9, [x0, #CTX_TPIDR_EL2] |
| 383 | msr tpidr_el2, x9 |
| 384 | |
| 385 | ldr x9, [x0, #CTX_TTBR0_EL2] |
| 386 | msr ttbr0_el2, x9 |
| 387 | |
| 388 | ldr x9, [x0, #CTX_VBAR_EL2] |
| 389 | msr vbar_el2, x9 |
| 390 | |
| 391 | ldr x9, [x0, #CTX_VMPIDR_EL2] |
| 392 | msr vmpidr_el2, x9 |
| 393 | |
| 394 | ldr x9, [x0, #CTX_VPIDR_EL2] |
| 395 | msr vpidr_el2, x9 |
| 396 | |
| 397 | ldr x9, [x0, #CTX_VTCR_EL2] |
| 398 | msr vtcr_el2, x9 |
| 399 | |
| 400 | ldr x9, [x0, #CTX_VTTBR_EL2] |
| 401 | msr vttbr_el2, x9 |
| 402 | |
| 403 | ldr x9, [x0, #CTX_ZCR_EL2] |
| 404 | msr ZCR_EL2, x9 |
| 405 | |
| 406 | ret |
| 407 | endfunc el2_sysregs_context_restore |
| 408 | |
| 409 | #endif /* CTX_INCLUDE_EL2_REGS */ |
| 410 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 411 | /* ------------------------------------------------------------------ |
| 412 | * The following function strictly follows the AArch64 PCS to use |
| 413 | * x9-x17 (temporary caller-saved registers) to save EL1 system |
| 414 | * register context. It assumes that 'x0' is pointing to a |
| 415 | * 'el1_sys_regs' structure where the register context will be saved. |
| 416 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 417 | */ |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 418 | func el1_sysregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 419 | |
| 420 | mrs x9, spsr_el1 |
| 421 | mrs x10, elr_el1 |
| 422 | stp x9, x10, [x0, #CTX_SPSR_EL1] |
| 423 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 424 | mrs x15, sctlr_el1 |
| 425 | mrs x16, actlr_el1 |
| 426 | stp x15, x16, [x0, #CTX_SCTLR_EL1] |
| 427 | |
| 428 | mrs x17, cpacr_el1 |
| 429 | mrs x9, csselr_el1 |
| 430 | stp x17, x9, [x0, #CTX_CPACR_EL1] |
| 431 | |
| 432 | mrs x10, sp_el1 |
| 433 | mrs x11, esr_el1 |
| 434 | stp x10, x11, [x0, #CTX_SP_EL1] |
| 435 | |
| 436 | mrs x12, ttbr0_el1 |
| 437 | mrs x13, ttbr1_el1 |
| 438 | stp x12, x13, [x0, #CTX_TTBR0_EL1] |
| 439 | |
| 440 | mrs x14, mair_el1 |
| 441 | mrs x15, amair_el1 |
| 442 | stp x14, x15, [x0, #CTX_MAIR_EL1] |
| 443 | |
| 444 | mrs x16, tcr_el1 |
| 445 | mrs x17, tpidr_el1 |
| 446 | stp x16, x17, [x0, #CTX_TCR_EL1] |
| 447 | |
| 448 | mrs x9, tpidr_el0 |
| 449 | mrs x10, tpidrro_el0 |
| 450 | stp x9, x10, [x0, #CTX_TPIDR_EL0] |
| 451 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 452 | mrs x13, par_el1 |
| 453 | mrs x14, far_el1 |
| 454 | stp x13, x14, [x0, #CTX_PAR_EL1] |
| 455 | |
| 456 | mrs x15, afsr0_el1 |
| 457 | mrs x16, afsr1_el1 |
| 458 | stp x15, x16, [x0, #CTX_AFSR0_EL1] |
| 459 | |
| 460 | mrs x17, contextidr_el1 |
| 461 | mrs x9, vbar_el1 |
| 462 | stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] |
| 463 | |
Soby Mathew | 8cd16e6 | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 464 | /* Save AArch32 system registers if the build has instructed so */ |
| 465 | #if CTX_INCLUDE_AARCH32_REGS |
| 466 | mrs x11, spsr_abt |
| 467 | mrs x12, spsr_und |
| 468 | stp x11, x12, [x0, #CTX_SPSR_ABT] |
| 469 | |
| 470 | mrs x13, spsr_irq |
| 471 | mrs x14, spsr_fiq |
| 472 | stp x13, x14, [x0, #CTX_SPSR_IRQ] |
| 473 | |
| 474 | mrs x15, dacr32_el2 |
| 475 | mrs x16, ifsr32_el2 |
| 476 | stp x15, x16, [x0, #CTX_DACR32_EL2] |
Soby Mathew | 8cd16e6 | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 477 | #endif |
| 478 | |
Jeenu Viswambharan | 2da8d8b | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 479 | /* Save NS timer registers if the build has instructed so */ |
| 480 | #if NS_TIMER_SWITCH |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 481 | mrs x10, cntp_ctl_el0 |
| 482 | mrs x11, cntp_cval_el0 |
| 483 | stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] |
| 484 | |
| 485 | mrs x12, cntv_ctl_el0 |
| 486 | mrs x13, cntv_cval_el0 |
| 487 | stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] |
| 488 | |
| 489 | mrs x14, cntkctl_el1 |
Jeenu Viswambharan | 2da8d8b | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 490 | str x14, [x0, #CTX_CNTKCTL_EL1] |
| 491 | #endif |
| 492 | |
Justin Chadwell | 9dd9438 | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 493 | /* Save MTE system registers if the build has instructed so */ |
| 494 | #if CTX_INCLUDE_MTE_REGS |
| 495 | mrs x15, TFSRE0_EL1 |
| 496 | mrs x16, TFSR_EL1 |
| 497 | stp x15, x16, [x0, #CTX_TFSRE0_EL1] |
| 498 | |
| 499 | mrs x9, RGSR_EL1 |
| 500 | mrs x10, GCR_EL1 |
| 501 | stp x9, x10, [x0, #CTX_RGSR_EL1] |
| 502 | #endif |
| 503 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 504 | ret |
Kévin Petit | 8b77962 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 505 | endfunc el1_sysregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 506 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 507 | /* ------------------------------------------------------------------ |
| 508 | * The following function strictly follows the AArch64 PCS to use |
| 509 | * x9-x17 (temporary caller-saved registers) to restore EL1 system |
| 510 | * register context. It assumes that 'x0' is pointing to a |
| 511 | * 'el1_sys_regs' structure from where the register context will be |
| 512 | * restored |
| 513 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 514 | */ |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 515 | func el1_sysregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 516 | |
| 517 | ldp x9, x10, [x0, #CTX_SPSR_EL1] |
| 518 | msr spsr_el1, x9 |
| 519 | msr elr_el1, x10 |
| 520 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 521 | ldp x15, x16, [x0, #CTX_SCTLR_EL1] |
| 522 | msr sctlr_el1, x15 |
| 523 | msr actlr_el1, x16 |
| 524 | |
| 525 | ldp x17, x9, [x0, #CTX_CPACR_EL1] |
| 526 | msr cpacr_el1, x17 |
| 527 | msr csselr_el1, x9 |
| 528 | |
| 529 | ldp x10, x11, [x0, #CTX_SP_EL1] |
| 530 | msr sp_el1, x10 |
| 531 | msr esr_el1, x11 |
| 532 | |
| 533 | ldp x12, x13, [x0, #CTX_TTBR0_EL1] |
| 534 | msr ttbr0_el1, x12 |
| 535 | msr ttbr1_el1, x13 |
| 536 | |
| 537 | ldp x14, x15, [x0, #CTX_MAIR_EL1] |
| 538 | msr mair_el1, x14 |
| 539 | msr amair_el1, x15 |
| 540 | |
| 541 | ldp x16, x17, [x0, #CTX_TCR_EL1] |
| 542 | msr tcr_el1, x16 |
| 543 | msr tpidr_el1, x17 |
| 544 | |
| 545 | ldp x9, x10, [x0, #CTX_TPIDR_EL0] |
| 546 | msr tpidr_el0, x9 |
| 547 | msr tpidrro_el0, x10 |
| 548 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 549 | ldp x13, x14, [x0, #CTX_PAR_EL1] |
| 550 | msr par_el1, x13 |
| 551 | msr far_el1, x14 |
| 552 | |
| 553 | ldp x15, x16, [x0, #CTX_AFSR0_EL1] |
| 554 | msr afsr0_el1, x15 |
| 555 | msr afsr1_el1, x16 |
| 556 | |
| 557 | ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] |
| 558 | msr contextidr_el1, x17 |
| 559 | msr vbar_el1, x9 |
| 560 | |
Soby Mathew | 8cd16e6 | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 561 | /* Restore AArch32 system registers if the build has instructed so */ |
| 562 | #if CTX_INCLUDE_AARCH32_REGS |
| 563 | ldp x11, x12, [x0, #CTX_SPSR_ABT] |
| 564 | msr spsr_abt, x11 |
| 565 | msr spsr_und, x12 |
| 566 | |
| 567 | ldp x13, x14, [x0, #CTX_SPSR_IRQ] |
| 568 | msr spsr_irq, x13 |
| 569 | msr spsr_fiq, x14 |
| 570 | |
| 571 | ldp x15, x16, [x0, #CTX_DACR32_EL2] |
| 572 | msr dacr32_el2, x15 |
| 573 | msr ifsr32_el2, x16 |
Soby Mathew | 8cd16e6 | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 574 | #endif |
Jeenu Viswambharan | 2da8d8b | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 575 | /* Restore NS timer registers if the build has instructed so */ |
| 576 | #if NS_TIMER_SWITCH |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 577 | ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] |
| 578 | msr cntp_ctl_el0, x10 |
| 579 | msr cntp_cval_el0, x11 |
| 580 | |
| 581 | ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] |
| 582 | msr cntv_ctl_el0, x12 |
| 583 | msr cntv_cval_el0, x13 |
| 584 | |
Jeenu Viswambharan | 2da8d8b | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 585 | ldr x14, [x0, #CTX_CNTKCTL_EL1] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 586 | msr cntkctl_el1, x14 |
Jeenu Viswambharan | 2da8d8b | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 587 | #endif |
Justin Chadwell | 9dd9438 | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 588 | /* Restore MTE system registers if the build has instructed so */ |
| 589 | #if CTX_INCLUDE_MTE_REGS |
| 590 | ldp x11, x12, [x0, #CTX_TFSRE0_EL1] |
| 591 | msr TFSRE0_EL1, x11 |
| 592 | msr TFSR_EL1, x12 |
| 593 | |
| 594 | ldp x13, x14, [x0, #CTX_RGSR_EL1] |
| 595 | msr RGSR_EL1, x13 |
| 596 | msr GCR_EL1, x14 |
| 597 | #endif |
Jeenu Viswambharan | 2da8d8b | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 598 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 599 | /* No explict ISB required here as ERET covers it */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 600 | ret |
Kévin Petit | 8b77962 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 601 | endfunc el1_sysregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 602 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 603 | /* ------------------------------------------------------------------ |
| 604 | * The following function follows the aapcs_64 strictly to use |
| 605 | * x9-x17 (temporary caller-saved registers according to AArch64 PCS) |
| 606 | * to save floating point register context. It assumes that 'x0' is |
| 607 | * pointing to a 'fp_regs' structure where the register context will |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 608 | * be saved. |
| 609 | * |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 610 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 611 | * However currently we don't use VFP registers nor set traps in |
| 612 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 613 | * |
| 614 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 615 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 616 | */ |
Juan Castillo | 0f21c54 | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 617 | #if CTX_INCLUDE_FPREGS |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 618 | func fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 619 | stp q0, q1, [x0, #CTX_FP_Q0] |
| 620 | stp q2, q3, [x0, #CTX_FP_Q2] |
| 621 | stp q4, q5, [x0, #CTX_FP_Q4] |
| 622 | stp q6, q7, [x0, #CTX_FP_Q6] |
| 623 | stp q8, q9, [x0, #CTX_FP_Q8] |
| 624 | stp q10, q11, [x0, #CTX_FP_Q10] |
| 625 | stp q12, q13, [x0, #CTX_FP_Q12] |
| 626 | stp q14, q15, [x0, #CTX_FP_Q14] |
| 627 | stp q16, q17, [x0, #CTX_FP_Q16] |
| 628 | stp q18, q19, [x0, #CTX_FP_Q18] |
| 629 | stp q20, q21, [x0, #CTX_FP_Q20] |
| 630 | stp q22, q23, [x0, #CTX_FP_Q22] |
| 631 | stp q24, q25, [x0, #CTX_FP_Q24] |
| 632 | stp q26, q27, [x0, #CTX_FP_Q26] |
| 633 | stp q28, q29, [x0, #CTX_FP_Q28] |
| 634 | stp q30, q31, [x0, #CTX_FP_Q30] |
| 635 | |
| 636 | mrs x9, fpsr |
| 637 | str x9, [x0, #CTX_FP_FPSR] |
| 638 | |
| 639 | mrs x10, fpcr |
| 640 | str x10, [x0, #CTX_FP_FPCR] |
| 641 | |
David Cunado | 91089f3 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 642 | #if CTX_INCLUDE_AARCH32_REGS |
| 643 | mrs x11, fpexc32_el2 |
| 644 | str x11, [x0, #CTX_FP_FPEXC32_EL2] |
| 645 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 646 | ret |
Kévin Petit | 8b77962 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 647 | endfunc fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 648 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 649 | /* ------------------------------------------------------------------ |
| 650 | * The following function follows the aapcs_64 strictly to use x9-x17 |
| 651 | * (temporary caller-saved registers according to AArch64 PCS) to |
| 652 | * restore floating point register context. It assumes that 'x0' is |
| 653 | * pointing to a 'fp_regs' structure from where the register context |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 654 | * will be restored. |
| 655 | * |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 656 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 657 | * However currently we don't use VFP registers nor set traps in |
| 658 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 659 | * |
| 660 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 661 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 662 | */ |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 663 | func fpregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 664 | ldp q0, q1, [x0, #CTX_FP_Q0] |
| 665 | ldp q2, q3, [x0, #CTX_FP_Q2] |
| 666 | ldp q4, q5, [x0, #CTX_FP_Q4] |
| 667 | ldp q6, q7, [x0, #CTX_FP_Q6] |
| 668 | ldp q8, q9, [x0, #CTX_FP_Q8] |
| 669 | ldp q10, q11, [x0, #CTX_FP_Q10] |
| 670 | ldp q12, q13, [x0, #CTX_FP_Q12] |
| 671 | ldp q14, q15, [x0, #CTX_FP_Q14] |
| 672 | ldp q16, q17, [x0, #CTX_FP_Q16] |
| 673 | ldp q18, q19, [x0, #CTX_FP_Q18] |
| 674 | ldp q20, q21, [x0, #CTX_FP_Q20] |
| 675 | ldp q22, q23, [x0, #CTX_FP_Q22] |
| 676 | ldp q24, q25, [x0, #CTX_FP_Q24] |
| 677 | ldp q26, q27, [x0, #CTX_FP_Q26] |
| 678 | ldp q28, q29, [x0, #CTX_FP_Q28] |
| 679 | ldp q30, q31, [x0, #CTX_FP_Q30] |
| 680 | |
| 681 | ldr x9, [x0, #CTX_FP_FPSR] |
| 682 | msr fpsr, x9 |
| 683 | |
Soby Mathew | 817ac8d | 2015-12-03 09:42:50 +0000 | [diff] [blame] | 684 | ldr x10, [x0, #CTX_FP_FPCR] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 685 | msr fpcr, x10 |
| 686 | |
David Cunado | 91089f3 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 687 | #if CTX_INCLUDE_AARCH32_REGS |
| 688 | ldr x11, [x0, #CTX_FP_FPEXC32_EL2] |
| 689 | msr fpexc32_el2, x11 |
| 690 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 691 | /* |
| 692 | * No explict ISB required here as ERET to |
Sandrine Bailleux | 1645d3e | 2015-12-17 13:58:58 +0000 | [diff] [blame] | 693 | * switch to secure EL1 or non-secure world |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 694 | * covers it |
| 695 | */ |
| 696 | |
| 697 | ret |
Kévin Petit | 8b77962 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 698 | endfunc fpregs_context_restore |
Juan Castillo | 0f21c54 | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 699 | #endif /* CTX_INCLUDE_FPREGS */ |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 700 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 701 | /* ------------------------------------------------------------------ |
| 702 | * The following function is used to save and restore all the general |
| 703 | * purpose and ARMv8.3-PAuth (if enabled) registers. |
| 704 | * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3 |
| 705 | * when ARMv8.5-PMU is implemented, and if called from Non-secure |
| 706 | * state saves PMCR_EL0 and disables Cycle Counter. |
| 707 | * |
| 708 | * Ideally we would only save and restore the callee saved registers |
| 709 | * when a world switch occurs but that type of implementation is more |
| 710 | * complex. So currently we will always save and restore these |
| 711 | * registers on entry and exit of EL3. |
| 712 | * These are not macros to ensure their invocation fits within the 32 |
| 713 | * instructions per exception vector. |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 714 | * clobbers: x18 |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 715 | * ------------------------------------------------------------------ |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 716 | */ |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 717 | func save_gp_pmcr_pauth_regs |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 718 | stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 719 | stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
| 720 | stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 721 | stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 722 | stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 723 | stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 724 | stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 725 | stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
| 726 | stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
| 727 | stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 728 | stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 729 | stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 730 | stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 731 | stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
| 732 | stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
| 733 | mrs x18, sp_el0 |
| 734 | str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 735 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 736 | /* ---------------------------------------------------------- |
| 737 | * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, |
| 738 | * meaning that ARMv8-PMU is not implemented and PMCR_EL0 |
| 739 | * should be saved in non-secure context. |
| 740 | * ---------------------------------------------------------- |
| 741 | */ |
| 742 | mrs x9, mdcr_el3 |
| 743 | tst x9, #MDCR_SCCD_BIT |
| 744 | bne 1f |
| 745 | |
| 746 | /* Secure Cycle Counter is not disabled */ |
| 747 | mrs x9, pmcr_el0 |
| 748 | |
| 749 | /* Check caller's security state */ |
| 750 | mrs x10, scr_el3 |
| 751 | tst x10, #SCR_NS_BIT |
| 752 | beq 2f |
| 753 | |
| 754 | /* Save PMCR_EL0 if called from Non-secure state */ |
| 755 | str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
| 756 | |
| 757 | /* Disable cycle counter when event counting is prohibited */ |
| 758 | 2: orr x9, x9, #PMCR_EL0_DP_BIT |
| 759 | msr pmcr_el0, x9 |
| 760 | isb |
| 761 | 1: |
| 762 | #if CTX_INCLUDE_PAUTH_REGS |
| 763 | /* ---------------------------------------------------------- |
| 764 | * Save the ARMv8.3-PAuth keys as they are not banked |
| 765 | * by exception level |
| 766 | * ---------------------------------------------------------- |
| 767 | */ |
| 768 | add x19, sp, #CTX_PAUTH_REGS_OFFSET |
| 769 | |
| 770 | mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ |
| 771 | mrs x21, APIAKeyHi_EL1 |
| 772 | mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ |
| 773 | mrs x23, APIBKeyHi_EL1 |
| 774 | mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ |
| 775 | mrs x25, APDAKeyHi_EL1 |
| 776 | mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ |
| 777 | mrs x27, APDBKeyHi_EL1 |
| 778 | mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ |
| 779 | mrs x29, APGAKeyHi_EL1 |
| 780 | |
| 781 | stp x20, x21, [x19, #CTX_PACIAKEY_LO] |
| 782 | stp x22, x23, [x19, #CTX_PACIBKEY_LO] |
| 783 | stp x24, x25, [x19, #CTX_PACDAKEY_LO] |
| 784 | stp x26, x27, [x19, #CTX_PACDBKEY_LO] |
| 785 | stp x28, x29, [x19, #CTX_PACGAKEY_LO] |
| 786 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
| 787 | |
| 788 | ret |
| 789 | endfunc save_gp_pmcr_pauth_regs |
| 790 | |
| 791 | /* ------------------------------------------------------------------ |
| 792 | * This function restores ARMv8.3-PAuth (if enabled) and all general |
| 793 | * purpose registers except x30 from the CPU context. |
| 794 | * x30 register must be explicitly restored by the caller. |
| 795 | * ------------------------------------------------------------------ |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 796 | */ |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 797 | func restore_gp_pmcr_pauth_regs |
| 798 | #if CTX_INCLUDE_PAUTH_REGS |
| 799 | /* Restore the ARMv8.3 PAuth keys */ |
| 800 | add x10, sp, #CTX_PAUTH_REGS_OFFSET |
| 801 | |
| 802 | ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ |
| 803 | ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ |
| 804 | ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ |
| 805 | ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ |
| 806 | ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ |
| 807 | |
| 808 | msr APIAKeyLo_EL1, x0 |
| 809 | msr APIAKeyHi_EL1, x1 |
| 810 | msr APIBKeyLo_EL1, x2 |
| 811 | msr APIBKeyHi_EL1, x3 |
| 812 | msr APDAKeyLo_EL1, x4 |
| 813 | msr APDAKeyHi_EL1, x5 |
| 814 | msr APDBKeyLo_EL1, x6 |
| 815 | msr APDBKeyHi_EL1, x7 |
| 816 | msr APGAKeyLo_EL1, x8 |
| 817 | msr APGAKeyHi_EL1, x9 |
| 818 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
| 819 | |
| 820 | /* ---------------------------------------------------------- |
| 821 | * Restore PMCR_EL0 when returning to Non-secure state if |
| 822 | * Secure Cycle Counter is not disabled in MDCR_EL3 when |
| 823 | * ARMv8.5-PMU is implemented. |
| 824 | * ---------------------------------------------------------- |
| 825 | */ |
| 826 | mrs x0, scr_el3 |
| 827 | tst x0, #SCR_NS_BIT |
| 828 | beq 2f |
| 829 | |
| 830 | /* ---------------------------------------------------------- |
| 831 | * Back to Non-secure state. |
| 832 | * Check if earlier initialization MDCR_EL3.SCCD to 1 failed, |
| 833 | * meaning that ARMv8-PMU is not implemented and PMCR_EL0 |
| 834 | * should be restored from non-secure context. |
| 835 | * ---------------------------------------------------------- |
| 836 | */ |
| 837 | mrs x0, mdcr_el3 |
| 838 | tst x0, #MDCR_SCCD_BIT |
| 839 | bne 2f |
| 840 | ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
| 841 | msr pmcr_el0, x0 |
| 842 | 2: |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 843 | ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 844 | ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 845 | ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 846 | ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 847 | ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 848 | ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 849 | ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 850 | ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 851 | ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 852 | ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 853 | ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 854 | ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 855 | ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 856 | ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 857 | ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
| 858 | msr sp_el0, x28 |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 859 | ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 860 | ret |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 861 | endfunc restore_gp_pmcr_pauth_regs |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 862 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 863 | /* ------------------------------------------------------------------ |
| 864 | * This routine assumes that the SP_EL3 is pointing to a valid |
| 865 | * context structure from where the gp regs and other special |
| 866 | * registers can be retrieved. |
| 867 | * ------------------------------------------------------------------ |
Antonio Nino Diaz | 4d1ccf0 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 868 | */ |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 869 | func el3_exit |
Jan Dabros | bb9549b | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 870 | #if ENABLE_ASSERTIONS |
| 871 | /* el3_exit assumes SP_EL0 on entry */ |
| 872 | mrs x17, spsel |
| 873 | cmp x17, #MODE_SP_EL0 |
| 874 | ASM_ASSERT(eq) |
| 875 | #endif |
| 876 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 877 | /* ---------------------------------------------------------- |
| 878 | * Save the current SP_EL0 i.e. the EL3 runtime stack which |
| 879 | * will be used for handling the next SMC. |
| 880 | * Then switch to SP_EL3. |
| 881 | * ---------------------------------------------------------- |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 882 | */ |
| 883 | mov x17, sp |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 884 | msr spsel, #MODE_SP_ELX |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 885 | str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] |
| 886 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 887 | /* ---------------------------------------------------------- |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 888 | * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 889 | * ---------------------------------------------------------- |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 890 | */ |
| 891 | ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 892 | ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] |
| 893 | msr scr_el3, x18 |
| 894 | msr spsr_el3, x16 |
| 895 | msr elr_el3, x17 |
| 896 | |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 897 | #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 898 | /* ---------------------------------------------------------- |
| 899 | * Restore mitigation state as it was on entry to EL3 |
| 900 | * ---------------------------------------------------------- |
| 901 | */ |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 902 | ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 903 | cbz x17, 1f |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 904 | blr x17 |
Antonio Nino Diaz | 4d1ccf0 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 905 | 1: |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 906 | #endif |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 907 | /* ---------------------------------------------------------- |
| 908 | * Restore general purpose (including x30), PMCR_EL0 and |
| 909 | * ARMv8.3-PAuth registers. |
| 910 | * Exit EL3 via ERET to a lower exception level. |
| 911 | * ---------------------------------------------------------- |
| 912 | */ |
| 913 | bl restore_gp_pmcr_pauth_regs |
| 914 | ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 915 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 916 | #if IMAGE_BL31 && RAS_EXTENSION |
| 917 | /* ---------------------------------------------------------- |
| 918 | * Issue Error Synchronization Barrier to synchronize SErrors |
| 919 | * before exiting EL3. We're running with EAs unmasked, so |
| 920 | * any synchronized errors would be taken immediately; |
| 921 | * therefore no need to inspect DISR_EL1 register. |
| 922 | * ---------------------------------------------------------- |
| 923 | */ |
| 924 | esb |
Antonio Nino Diaz | 5283962 | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 925 | #endif |
Anthony Steinhauser | f461fe3 | 2020-01-07 15:44:06 -0800 | [diff] [blame] | 926 | exception_return |
Antonio Nino Diaz | 5283962 | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 927 | |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 928 | endfunc el3_exit |