context.S 25.7 KB
Newer Older
1
/*
2
 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3
 *
dp-arm's avatar
dp-arm committed
4
 * SPDX-License-Identifier: BSD-3-Clause
5
6
 */

7
#include <arch.h>
8
#include <asm_macros.S>
9
#include <assert_macros.S>
10
#include <context.h>
11
#include <el3_common_macros.S>
12

13
14
15
16
17
#if CTX_INCLUDE_EL2_REGS
	.global	el2_sysregs_context_save
	.global	el2_sysregs_context_restore
#endif

18
19
20
21
22
	.global	el1_sysregs_context_save
	.global	el1_sysregs_context_restore
#if CTX_INCLUDE_FPREGS
	.global	fpregs_context_save
	.global	fpregs_context_restore
23
#endif
24
25
	.global	save_gp_pmcr_pauth_regs
	.global	restore_gp_pmcr_pauth_regs
26
	.global save_and_update_ptw_el1_sys_regs
27
28
	.global	el3_exit

29
30
31
32
33
#if CTX_INCLUDE_EL2_REGS

/* -----------------------------------------------------
 * The following function strictly follows the AArch64
 * PCS to use x9-x17 (temporary caller-saved registers)
34
35
 * to save EL2 system register context. It assumes that
 * 'x0' is pointing to a 'el2_sys_regs' structure where
36
 * the register context will be saved.
37
38
39
40
41
42
43
 *
 * The following registers are not added.
 * AMEVCNTVOFF0<n>_EL2
 * AMEVCNTVOFF1<n>_EL2
 * ICH_AP0R<n>_EL2
 * ICH_AP1R<n>_EL2
 * ICH_LR<n>_EL2
44
45
46
 * -----------------------------------------------------
 */

47
func el2_sysregs_context_save
48
	mrs	x9, actlr_el2
49
50
	mrs	x10, afsr0_el2
	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
51

52
53
54
	mrs	x11, afsr1_el2
	mrs	x12, amair_el2
	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
55

56
57
58
	mrs	x13, cnthctl_el2
	mrs	x14, cnthp_ctl_el2
	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
59

60
61
62
	mrs	x15, cnthp_cval_el2
	mrs	x16, cnthp_tval_el2
	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
63

64
	mrs	x17, cntvoff_el2
65
	mrs	x9, cptr_el2
66
	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
67

68
	mrs	x11, elr_el2
69
70
#if CTX_INCLUDE_AARCH32_REGS
	mrs	x10, dbgvcr32_el2
71
	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
72
73
74
#else
	str	x11, [x0, #CTX_ELR_EL2]
#endif
75

76
77
78
	mrs	x14, esr_el2
	mrs	x15, far_el2
	stp	x14, x15, [x0, #CTX_ESR_EL2]
79

80
81
82
	mrs	x16, hacr_el2
	mrs	x17, hcr_el2
	stp	x16, x17, [x0, #CTX_HACR_EL2]
83

84
85
86
	mrs	x9, hpfar_el2
	mrs	x10, hstr_el2
	stp	x9, x10, [x0, #CTX_HPFAR_EL2]
87

88
89
90
	mrs	x11, ICC_SRE_EL2
	mrs	x12, ICH_HCR_EL2
	stp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
91

92
93
94
	mrs	x13, ICH_VMCR_EL2
	mrs	x14, mair_el2
	stp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
95

96
	mrs	x15, mdcr_el2
97
#if ENABLE_SPE_FOR_LOWER_ELS
98
99
	mrs	x16, PMSCR_EL2
	stp	x15, x16, [x0, #CTX_MDCR_EL2]
100
101
102
#else
	str	x15, [x0, #CTX_MDCR_EL2]
#endif
103

104
105
106
	mrs	x17, sctlr_el2
	mrs	x9, spsr_el2
	stp	x17, x9, [x0, #CTX_SCTLR_EL2]
107

108
109
110
	mrs	x10, sp_el2
	mrs	x11, tcr_el2
	stp	x10, x11, [x0, #CTX_SP_EL2]
111

112
113
114
	mrs	x12, tpidr_el2
	mrs	x13, ttbr0_el2
	stp	x12, x13, [x0, #CTX_TPIDR_EL2]
115

116
117
118
	mrs	x14, vbar_el2
	mrs	x15, vmpidr_el2
	stp	x14, x15, [x0, #CTX_VBAR_EL2]
119

120
121
122
	mrs	x16, vpidr_el2
	mrs	x17, vtcr_el2
	stp	x16, x17, [x0, #CTX_VPIDR_EL2]
123

124
125
	mrs	x9, vttbr_el2
	str	x9, [x0, #CTX_VTTBR_EL2]
126

127
#if CTX_INCLUDE_MTE_REGS
128
129
	mrs	x10, TFSR_EL2
	str	x10, [x0, #CTX_TFSR_EL2]
130
#endif
131

132
#if ENABLE_MPAM_FOR_LOWER_ELS
133
	mrs	x9, MPAM2_EL2
134
135
	mrs	x10, MPAMHCR_EL2
	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
136

137
138
139
	mrs	x11, MPAMVPM0_EL2
	mrs	x12, MPAMVPM1_EL2
	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
140

141
142
143
	mrs	x13, MPAMVPM2_EL2
	mrs	x14, MPAMVPM3_EL2
	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
144

145
146
147
	mrs	x15, MPAMVPM4_EL2
	mrs	x16, MPAMVPM5_EL2
	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
148

149
150
151
	mrs	x17, MPAMVPM6_EL2
	mrs	x9, MPAMVPM7_EL2
	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
152

153
154
155
	mrs	x10, MPAMVPMV_EL2
	str	x10, [x0, #CTX_MPAMVPMV_EL2]
#endif
156
157


158
159
160
161
#if ARM_ARCH_AT_LEAST(8, 6)
	mrs	x11, HAFGRTR_EL2
	mrs	x12, HDFGRTR_EL2
	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
162

163
164
165
	mrs	x13, HDFGWTR_EL2
	mrs	x14, HFGITR_EL2
	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
166

167
168
169
	mrs	x15, HFGRTR_EL2
	mrs	x16, HFGWTR_EL2
	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
170

171
172
173
	mrs	x17, CNTPOFF_EL2
	str	x17, [x0, #CTX_CNTPOFF_EL2]
#endif
174

175
176
177
178
#if ARM_ARCH_AT_LEAST(8, 4)
	mrs	x9, cnthps_ctl_el2
	mrs	x10, cnthps_cval_el2
	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
179

180
181
182
	mrs	x11, cnthps_tval_el2
	mrs	x12, cnthvs_ctl_el2
	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
183

184
185
186
	mrs	x13, cnthvs_cval_el2
	mrs	x14, cnthvs_tval_el2
	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
187

188
189
190
	mrs	x15, cnthv_ctl_el2
	mrs	x16, cnthv_cval_el2
	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
191

192
193
194
	mrs	x17, cnthv_tval_el2
	mrs	x9, contextidr_el2
	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
195

196
#if CTX_INCLUDE_AARCH32_REGS
197
198
	mrs	x10, sder32_el2
	str	x10, [x0, #CTX_SDER32_EL2]
199
#endif
200

201
202
	mrs	x11, ttbr1_el2
	str	x11, [x0, #CTX_TTBR1_EL2]
203

204
205
	mrs	x12, vdisr_el2
	str	x12, [x0, #CTX_VDISR_EL2]
206

207
#if CTX_INCLUDE_NEVE_REGS
208
209
	mrs	x13, vncr_el2
	str	x13, [x0, #CTX_VNCR_EL2]
210
#endif
211

212
213
	mrs	x14, vsesr_el2
	str	x14, [x0, #CTX_VSESR_EL2]
214

215
216
	mrs	x15, vstcr_el2
	str	x15, [x0, #CTX_VSTCR_EL2]
217

218
219
	mrs	x16, vsttbr_el2
	str	x16, [x0, #CTX_VSTTBR_EL2]
220
221
222

	mrs	x17, TRFCR_EL2
	str	x17, [x0, #CTX_TRFCR_EL2]
223
#endif
224

225
#if ARM_ARCH_AT_LEAST(8, 5)
226
227
	mrs	x9, scxtnum_el2
	str	x9, [x0, #CTX_SCXTNUM_EL2]
228
#endif
229
230
231
232
233
234
235

	ret
endfunc el2_sysregs_context_save

/* -----------------------------------------------------
 * The following function strictly follows the AArch64
 * PCS to use x9-x17 (temporary caller-saved registers)
236
237
 * to restore EL2 system register context.  It assumes
 * that 'x0' is pointing to a 'el2_sys_regs' structure
238
 * from where the register context will be restored
239
240
241
242
243
244
245

 * The following registers are not restored
 * AMEVCNTVOFF0<n>_EL2
 * AMEVCNTVOFF1<n>_EL2
 * ICH_AP0R<n>_EL2
 * ICH_AP1R<n>_EL2
 * ICH_LR<n>_EL2
246
247
248
249
 * -----------------------------------------------------
 */
func el2_sysregs_context_restore

250
	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
251
	msr	actlr_el2, x9
252
	msr	afsr0_el2, x10
253

254
255
256
	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
	msr	afsr1_el2, x11
	msr	amair_el2, x12
257

258
259
260
	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
	msr	cnthctl_el2, x13
	msr	cnthp_ctl_el2, x14
261

262
263
264
	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
	msr	cnthp_cval_el2, x15
	msr	cnthp_tval_el2, x16
265

266
267
	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
	msr	cntvoff_el2, x17
268
269
	msr	cptr_el2, x9

270
#if CTX_INCLUDE_AARCH32_REGS
271
272
	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
	msr	dbgvcr32_el2, x10
273
274
275
#else
	ldr	x11, [x0, #CTX_ELR_EL2]
#endif
276
	msr	elr_el2, x11
277

278
279
280
	ldp	x14, x15, [x0, #CTX_ESR_EL2]
	msr	esr_el2, x14
	msr	far_el2, x15
281

282
283
284
	ldp	x16, x17, [x0, #CTX_HACR_EL2]
	msr	hacr_el2, x16
	msr	hcr_el2, x17
285

286
287
288
	ldp	x9, x10, [x0, #CTX_HPFAR_EL2]
	msr	hpfar_el2, x9
	msr	hstr_el2, x10
289

290
291
292
	ldp	x11, x12, [x0, #CTX_ICC_SRE_EL2]
	msr	ICC_SRE_EL2, x11
	msr	ICH_HCR_EL2, x12
293

294
295
296
	ldp	x13, x14, [x0, #CTX_ICH_VMCR_EL2]
	msr	ICH_VMCR_EL2, x13
	msr	mair_el2, x14
297

298
#if ENABLE_SPE_FOR_LOWER_ELS
299
300
	ldp	x15, x16, [x0, #CTX_MDCR_EL2]
	msr	PMSCR_EL2, x16
301
302
303
304
#else
	ldr	x15, [x0, #CTX_MDCR_EL2]
#endif
	msr	mdcr_el2, x15
305

306
307
308
	ldp	x17, x9, [x0, #CTX_SCTLR_EL2]
	msr	sctlr_el2, x17
	msr	spsr_el2, x9
309

310
311
312
	ldp	x10, x11, [x0, #CTX_SP_EL2]
	msr	sp_el2, x10
	msr	tcr_el2, x11
313

314
315
316
	ldp	x12, x13, [x0, #CTX_TPIDR_EL2]
	msr	tpidr_el2, x12
	msr	ttbr0_el2, x13
317

318
319
320
	ldp	x13, x14, [x0, #CTX_VBAR_EL2]
	msr	vbar_el2, x13
	msr	vmpidr_el2, x14
321

322
323
324
325
326
327
	ldp	x15, x16, [x0, #CTX_VPIDR_EL2]
	msr	vpidr_el2, x15
	msr	vtcr_el2, x16

	ldr	x17, [x0, #CTX_VTTBR_EL2]
	msr	vttbr_el2, x17
328

329
#if CTX_INCLUDE_MTE_REGS
330
331
	ldr	x9, [x0, #CTX_TFSR_EL2]
	msr	TFSR_EL2, x9
332
#endif
333

334
#if ENABLE_MPAM_FOR_LOWER_ELS
335
336
337
	ldp	x10, x11, [x0, #CTX_MPAM2_EL2]
	msr	MPAM2_EL2, x10
	msr	MPAMHCR_EL2, x11
338

339
340
341
	ldp	x12, x13, [x0, #CTX_MPAMVPM0_EL2]
	msr	MPAMVPM0_EL2, x12
	msr	MPAMVPM1_EL2, x13
342

343
344
345
	ldp	x14, x15, [x0, #CTX_MPAMVPM2_EL2]
	msr	MPAMVPM2_EL2, x14
	msr	MPAMVPM3_EL2, x15
346

347
348
349
	ldp	x16, x17, [x0, #CTX_MPAMVPM4_EL2]
	msr	MPAMVPM4_EL2, x16
	msr	MPAMVPM5_EL2, x17
350

351
352
353
	ldp	x9, x10, [x0, #CTX_MPAMVPM6_EL2]
	msr	MPAMVPM6_EL2, x9
	msr	MPAMVPM7_EL2, x10
354

355
356
	ldr	x11, [x0, #CTX_MPAMVPMV_EL2]
	msr	MPAMVPMV_EL2, x11
357
#endif
358

359
#if ARM_ARCH_AT_LEAST(8, 6)
360
361
362
	ldp	x12, x13, [x0, #CTX_HAFGRTR_EL2]
	msr	HAFGRTR_EL2, x12
	msr	HDFGRTR_EL2, x13
363

364
365
366
	ldp	x14, x15, [x0, #CTX_HDFGWTR_EL2]
	msr	HDFGWTR_EL2, x14
	msr	HFGITR_EL2, x15
367

368
369
370
	ldp	x16, x17, [x0, #CTX_HFGRTR_EL2]
	msr	HFGRTR_EL2, x16
	msr	HFGWTR_EL2, x17
371

372
373
	ldr	x9, [x0, #CTX_CNTPOFF_EL2]
	msr	CNTPOFF_EL2, x9
374
#endif
375

376
#if ARM_ARCH_AT_LEAST(8, 4)
377
378
379
	ldp	x10, x11, [x0, #CTX_CNTHPS_CTL_EL2]
	msr	cnthps_ctl_el2, x10
	msr	cnthps_cval_el2, x11
380

381
382
383
	ldp	x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2]
	msr	cnthps_tval_el2, x12
	msr	cnthvs_ctl_el2, x13
384

385
386
387
	ldp	x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2]
	msr	cnthvs_cval_el2, x14
	msr	cnthvs_tval_el2, x15
388

389
390
391
	ldp	x16, x17, [x0, #CTX_CNTHV_CTL_EL2]
	msr	cnthv_ctl_el2, x16
	msr	cnthv_cval_el2, x17
392

393
394
395
	ldp	x9, x10, [x0, #CTX_CNTHV_TVAL_EL2]
	msr	cnthv_tval_el2, x9
	msr	contextidr_el2, x10
396

397
#if CTX_INCLUDE_AARCH32_REGS
398
399
	ldr	x11, [x0, #CTX_SDER32_EL2]
	msr	sder32_el2, x11
400
#endif
401

402
403
	ldr	x12, [x0, #CTX_TTBR1_EL2]
	msr	ttbr1_el2, x12
404

405
406
	ldr	x13, [x0, #CTX_VDISR_EL2]
	msr	vdisr_el2, x13
407

408
#if CTX_INCLUDE_NEVE_REGS
409
410
	ldr	x14, [x0, #CTX_VNCR_EL2]
	msr	vncr_el2, x14
411
#endif
412

413
414
	ldr	x15, [x0, #CTX_VSESR_EL2]
	msr	vsesr_el2, x15
415

416
417
	ldr	x16, [x0, #CTX_VSTCR_EL2]
	msr	vstcr_el2, x16
418

419
420
	ldr	x17, [x0, #CTX_VSTTBR_EL2]
	msr	vsttbr_el2, x17
421

422
423
	ldr	x9, [x0, #CTX_TRFCR_EL2]
	msr	TRFCR_EL2, x9
424
#endif
425

426
#if ARM_ARCH_AT_LEAST(8, 5)
427
428
	ldr	x10, [x0, #CTX_SCXTNUM_EL2]
	msr	scxtnum_el2, x10
429
430
#endif

431
432
433
434
435
	ret
endfunc el2_sysregs_context_restore

#endif /* CTX_INCLUDE_EL2_REGS */

436
437
438
439
440
441
/* ------------------------------------------------------------------
 * The following function strictly follows the AArch64 PCS to use
 * x9-x17 (temporary caller-saved registers) to save EL1 system
 * register context. It assumes that 'x0' is pointing to a
 * 'el1_sys_regs' structure where the register context will be saved.
 * ------------------------------------------------------------------
442
 */
443
func el1_sysregs_context_save
444
445
446
447
448

	mrs	x9, spsr_el1
	mrs	x10, elr_el1
	stp	x9, x10, [x0, #CTX_SPSR_EL1]

449
#if !ERRATA_SPECULATIVE_AT
450
	mrs	x15, sctlr_el1
451
	mrs	x16, tcr_el1
452
	stp	x15, x16, [x0, #CTX_SCTLR_EL1]
453
#endif
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470

	mrs	x17, cpacr_el1
	mrs	x9, csselr_el1
	stp	x17, x9, [x0, #CTX_CPACR_EL1]

	mrs	x10, sp_el1
	mrs	x11, esr_el1
	stp	x10, x11, [x0, #CTX_SP_EL1]

	mrs	x12, ttbr0_el1
	mrs	x13, ttbr1_el1
	stp	x12, x13, [x0, #CTX_TTBR0_EL1]

	mrs	x14, mair_el1
	mrs	x15, amair_el1
	stp	x14, x15, [x0, #CTX_MAIR_EL1]

471
	mrs	x16, actlr_el1
472
	mrs	x17, tpidr_el1
473
	stp	x16, x17, [x0, #CTX_ACTLR_EL1]
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490

	mrs	x9, tpidr_el0
	mrs	x10, tpidrro_el0
	stp	x9, x10, [x0, #CTX_TPIDR_EL0]

	mrs	x13, par_el1
	mrs	x14, far_el1
	stp	x13, x14, [x0, #CTX_PAR_EL1]

	mrs	x15, afsr0_el1
	mrs	x16, afsr1_el1
	stp	x15, x16, [x0, #CTX_AFSR0_EL1]

	mrs	x17, contextidr_el1
	mrs	x9, vbar_el1
	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]

491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
	/* Save AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS
	mrs	x11, spsr_abt
	mrs	x12, spsr_und
	stp	x11, x12, [x0, #CTX_SPSR_ABT]

	mrs	x13, spsr_irq
	mrs	x14, spsr_fiq
	stp	x13, x14, [x0, #CTX_SPSR_IRQ]

	mrs	x15, dacr32_el2
	mrs	x16, ifsr32_el2
	stp	x15, x16, [x0, #CTX_DACR32_EL2]
#endif

506
507
	/* Save NS timer registers if the build has instructed so */
#if NS_TIMER_SWITCH
508
509
510
511
512
513
514
515
516
	mrs	x10, cntp_ctl_el0
	mrs	x11, cntp_cval_el0
	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]

	mrs	x12, cntv_ctl_el0
	mrs	x13, cntv_cval_el0
	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]

	mrs	x14, cntkctl_el1
517
518
519
	str	x14, [x0, #CTX_CNTKCTL_EL1]
#endif

520
521
522
523
524
525
526
527
528
529
530
	/* Save MTE system registers if the build has instructed so */
#if CTX_INCLUDE_MTE_REGS
	mrs	x15, TFSRE0_EL1
	mrs	x16, TFSR_EL1
	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]

	mrs	x9, RGSR_EL1
	mrs	x10, GCR_EL1
	stp	x9, x10, [x0, #CTX_RGSR_EL1]
#endif

531
	ret
532
endfunc el1_sysregs_context_save
533

534
535
536
537
538
539
540
/* ------------------------------------------------------------------
 * The following function strictly follows the AArch64 PCS to use
 * x9-x17 (temporary caller-saved registers) to restore EL1 system
 * register context.  It assumes that 'x0' is pointing to a
 * 'el1_sys_regs' structure from where the register context will be
 * restored
 * ------------------------------------------------------------------
541
 */
542
func el1_sysregs_context_restore
543
544
545
546
547

	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
	msr	spsr_el1, x9
	msr	elr_el1, x10

548
#if !ERRATA_SPECULATIVE_AT
549
550
	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
	msr	sctlr_el1, x15
551
	msr	tcr_el1, x16
552
#endif
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569

	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
	msr	cpacr_el1, x17
	msr	csselr_el1, x9

	ldp	x10, x11, [x0, #CTX_SP_EL1]
	msr	sp_el1, x10
	msr	esr_el1, x11

	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
	msr	ttbr0_el1, x12
	msr	ttbr1_el1, x13

	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
	msr	mair_el1, x14
	msr	amair_el1, x15

570
571
	ldp 	x16, x17, [x0, #CTX_ACTLR_EL1]
	msr	actlr_el1, x16
572
	msr	tpidr_el1, x17
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589

	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
	msr	tpidr_el0, x9
	msr	tpidrro_el0, x10

	ldp	x13, x14, [x0, #CTX_PAR_EL1]
	msr	par_el1, x13
	msr	far_el1, x14

	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
	msr	afsr0_el1, x15
	msr	afsr1_el1, x16

	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
	msr	contextidr_el1, x17
	msr	vbar_el1, x9

590
591
592
593
594
595
596
597
598
599
600
601
602
603
	/* Restore AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS
	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
	msr	spsr_abt, x11
	msr	spsr_und, x12

	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
	msr	spsr_irq, x13
	msr	spsr_fiq, x14

	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
	msr	dacr32_el2, x15
	msr	ifsr32_el2, x16
#endif
604
605
	/* Restore NS timer registers if the build has instructed so */
#if NS_TIMER_SWITCH
606
607
608
609
610
611
612
613
	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
	msr	cntp_ctl_el0, x10
	msr	cntp_cval_el0, x11

	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
	msr	cntv_ctl_el0, x12
	msr	cntv_cval_el0, x13

614
	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
615
	msr	cntkctl_el1, x14
616
#endif
617
618
619
620
621
622
623
624
625
626
	/* Restore MTE system registers if the build has instructed so */
#if CTX_INCLUDE_MTE_REGS
	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
	msr	TFSRE0_EL1, x11
	msr	TFSR_EL1, x12

	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
	msr	RGSR_EL1, x13
	msr	GCR_EL1, x14
#endif
627

628
629
	/* No explict ISB required here as ERET covers it */
	ret
630
endfunc el1_sysregs_context_restore
631

632
633
634
635
636
/* ------------------------------------------------------------------
 * The following function follows the aapcs_64 strictly to use
 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
 * to save floating point register context. It assumes that 'x0' is
 * pointing to a 'fp_regs' structure where the register context will
637
638
 * be saved.
 *
639
640
641
 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
 * However currently we don't use VFP registers nor set traps in
 * Trusted Firmware, and assume it's cleared.
642
643
 *
 * TODO: Revisit when VFP is used in secure world
644
 * ------------------------------------------------------------------
645
 */
646
#if CTX_INCLUDE_FPREGS
647
func fpregs_context_save
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
	stp	q0, q1, [x0, #CTX_FP_Q0]
	stp	q2, q3, [x0, #CTX_FP_Q2]
	stp	q4, q5, [x0, #CTX_FP_Q4]
	stp	q6, q7, [x0, #CTX_FP_Q6]
	stp	q8, q9, [x0, #CTX_FP_Q8]
	stp	q10, q11, [x0, #CTX_FP_Q10]
	stp	q12, q13, [x0, #CTX_FP_Q12]
	stp	q14, q15, [x0, #CTX_FP_Q14]
	stp	q16, q17, [x0, #CTX_FP_Q16]
	stp	q18, q19, [x0, #CTX_FP_Q18]
	stp	q20, q21, [x0, #CTX_FP_Q20]
	stp	q22, q23, [x0, #CTX_FP_Q22]
	stp	q24, q25, [x0, #CTX_FP_Q24]
	stp	q26, q27, [x0, #CTX_FP_Q26]
	stp	q28, q29, [x0, #CTX_FP_Q28]
	stp	q30, q31, [x0, #CTX_FP_Q30]

	mrs	x9, fpsr
	str	x9, [x0, #CTX_FP_FPSR]

	mrs	x10, fpcr
	str	x10, [x0, #CTX_FP_FPCR]

David Cunado's avatar
David Cunado committed
671
672
673
674
#if CTX_INCLUDE_AARCH32_REGS
	mrs	x11, fpexc32_el2
	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
#endif
675
	ret
676
endfunc fpregs_context_save
677

678
679
680
681
682
/* ------------------------------------------------------------------
 * The following function follows the aapcs_64 strictly to use x9-x17
 * (temporary caller-saved registers according to AArch64 PCS) to
 * restore floating point register context. It assumes that 'x0' is
 * pointing to a 'fp_regs' structure from where the register context
683
684
 * will be restored.
 *
685
686
687
 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
 * However currently we don't use VFP registers nor set traps in
 * Trusted Firmware, and assume it's cleared.
688
689
 *
 * TODO: Revisit when VFP is used in secure world
690
 * ------------------------------------------------------------------
691
 */
692
func fpregs_context_restore
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
	ldp	q0, q1, [x0, #CTX_FP_Q0]
	ldp	q2, q3, [x0, #CTX_FP_Q2]
	ldp	q4, q5, [x0, #CTX_FP_Q4]
	ldp	q6, q7, [x0, #CTX_FP_Q6]
	ldp	q8, q9, [x0, #CTX_FP_Q8]
	ldp	q10, q11, [x0, #CTX_FP_Q10]
	ldp	q12, q13, [x0, #CTX_FP_Q12]
	ldp	q14, q15, [x0, #CTX_FP_Q14]
	ldp	q16, q17, [x0, #CTX_FP_Q16]
	ldp	q18, q19, [x0, #CTX_FP_Q18]
	ldp	q20, q21, [x0, #CTX_FP_Q20]
	ldp	q22, q23, [x0, #CTX_FP_Q22]
	ldp	q24, q25, [x0, #CTX_FP_Q24]
	ldp	q26, q27, [x0, #CTX_FP_Q26]
	ldp	q28, q29, [x0, #CTX_FP_Q28]
	ldp	q30, q31, [x0, #CTX_FP_Q30]

	ldr	x9, [x0, #CTX_FP_FPSR]
	msr	fpsr, x9

713
	ldr	x10, [x0, #CTX_FP_FPCR]
714
715
	msr	fpcr, x10

David Cunado's avatar
David Cunado committed
716
717
718
719
#if CTX_INCLUDE_AARCH32_REGS
	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
	msr	fpexc32_el2, x11
#endif
720
721
	/*
	 * No explict ISB required here as ERET to
722
	 * switch to secure EL1 or non-secure world
723
724
725
726
	 * covers it
	 */

	ret
727
endfunc fpregs_context_restore
728
#endif /* CTX_INCLUDE_FPREGS */
729

730
731
732
733
734
735
736
737
738
739
740
741
742
/* ------------------------------------------------------------------
 * The following function is used to save and restore all the general
 * purpose and ARMv8.3-PAuth (if enabled) registers.
 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
 * when ARMv8.5-PMU is implemented, and if called from Non-secure
 * state saves PMCR_EL0 and disables Cycle Counter.
 *
 * Ideally we would only save and restore the callee saved registers
 * when a world switch occurs but that type of implementation is more
 * complex. So currently we will always save and restore these
 * registers on entry and exit of EL3.
 * These are not macros to ensure their invocation fits within the 32
 * instructions per exception vector.
743
 * clobbers: x18
744
 * ------------------------------------------------------------------
745
 */
746
func save_gp_pmcr_pauth_regs
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
	mrs	x18, sp_el0
	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816

	/* ----------------------------------------------------------
	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
	 * should be saved in non-secure context.
	 * ----------------------------------------------------------
	 */
	mrs	x9, mdcr_el3
	tst	x9, #MDCR_SCCD_BIT
	bne	1f

	/* Secure Cycle Counter is not disabled */
	mrs	x9, pmcr_el0

	/* Check caller's security state */
	mrs	x10, scr_el3
	tst	x10, #SCR_NS_BIT
	beq	2f

	/* Save PMCR_EL0 if called from Non-secure state */
	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]

	/* Disable cycle counter when event counting is prohibited */
2:	orr	x9, x9, #PMCR_EL0_DP_BIT
	msr	pmcr_el0, x9
	isb
1:
#if CTX_INCLUDE_PAUTH_REGS
	/* ----------------------------------------------------------
 	 * Save the ARMv8.3-PAuth keys as they are not banked
 	 * by exception level
	 * ----------------------------------------------------------
	 */
	add	x19, sp, #CTX_PAUTH_REGS_OFFSET

	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
	mrs	x21, APIAKeyHi_EL1
	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
	mrs	x23, APIBKeyHi_EL1
	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
	mrs	x25, APDAKeyHi_EL1
	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
	mrs	x27, APDBKeyHi_EL1
	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
	mrs	x29, APGAKeyHi_EL1

	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
#endif /* CTX_INCLUDE_PAUTH_REGS */

817
	ret
818
endfunc save_gp_pmcr_pauth_regs
819

820
821
822
823
824
/* ------------------------------------------------------------------
 * This function restores ARMv8.3-PAuth (if enabled) and all general
 * purpose registers except x30 from the CPU context.
 * x30 register must be explicitly restored by the caller.
 * ------------------------------------------------------------------
825
 */
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
func restore_gp_pmcr_pauth_regs
#if CTX_INCLUDE_PAUTH_REGS
 	/* Restore the ARMv8.3 PAuth keys */
	add	x10, sp, #CTX_PAUTH_REGS_OFFSET

	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */

	msr	APIAKeyLo_EL1, x0
	msr	APIAKeyHi_EL1, x1
	msr	APIBKeyLo_EL1, x2
	msr	APIBKeyHi_EL1, x3
	msr	APDAKeyLo_EL1, x4
	msr	APDAKeyHi_EL1, x5
	msr	APDBKeyLo_EL1, x6
	msr	APDBKeyHi_EL1, x7
	msr	APGAKeyLo_EL1, x8
	msr	APGAKeyHi_EL1, x9
#endif /* CTX_INCLUDE_PAUTH_REGS */

	/* ----------------------------------------------------------
	 * Restore PMCR_EL0 when returning to Non-secure state if
	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
	 * ARMv8.5-PMU is implemented.
	 * ----------------------------------------------------------
	 */
	mrs	x0, scr_el3
	tst	x0, #SCR_NS_BIT
	beq	2f

	/* ----------------------------------------------------------
	 * Back to Non-secure state.
	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
	 * should be restored from non-secure context.
	 * ----------------------------------------------------------
	 */
	mrs	x0, mdcr_el3
	tst	x0, #MDCR_SCCD_BIT
	bne	2f
	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
	msr	pmcr_el0, x0
2:
872
873
874
875
876
877
878
879
	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
880
	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
881
882
883
884
885
	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
886
887
	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
	msr	sp_el0, x28
888
	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
889
	ret
890
endfunc restore_gp_pmcr_pauth_regs
891

892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
/*
 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
 * registers and update EL1 registers to disable stage1 and stage2
 * page table walk
 */
func save_and_update_ptw_el1_sys_regs
	/* ----------------------------------------------------------
	 * Save only sctlr_el1 and tcr_el1 registers
	 * ----------------------------------------------------------
	 */
	mrs	x29, sctlr_el1
	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
	mrs	x29, tcr_el1
	str	x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]

	/* ------------------------------------------------------------
	 * Must follow below order in order to disable page table
	 * walk for lower ELs (EL1 and EL0). First step ensures that
	 * page table walk is disabled for stage1 and second step
	 * ensures that page table walker should use TCR_EL1.EPDx
	 * bits to perform address translation. ISB ensures that CPU
	 * does these 2 steps in order.
	 *
	 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
	 *    stage1.
	 * 2. Enable MMU bit to avoid identity mapping via stage2
	 *    and force TCR_EL1.EPDx to be used by the page table
	 *    walker.
	 * ------------------------------------------------------------
	 */
	orr	x29, x29, #(TCR_EPD0_BIT)
	orr	x29, x29, #(TCR_EPD1_BIT)
	msr	tcr_el1, x29
	isb
	mrs	x29, sctlr_el1
	orr	x29, x29, #SCTLR_M_BIT
	msr	sctlr_el1, x29
	isb

	ret
endfunc save_and_update_ptw_el1_sys_regs

934
935
936
937
938
/* ------------------------------------------------------------------
 * This routine assumes that the SP_EL3 is pointing to a valid
 * context structure from where the gp regs and other special
 * registers can be retrieved.
 * ------------------------------------------------------------------
939
 */
940
func el3_exit
941
942
943
944
945
946
947
#if ENABLE_ASSERTIONS
	/* el3_exit assumes SP_EL0 on entry */
	mrs	x17, spsel
	cmp	x17, #MODE_SP_EL0
	ASM_ASSERT(eq)
#endif

948
949
950
951
952
	/* ----------------------------------------------------------
	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
	 * will be used for handling the next SMC.
	 * Then switch to SP_EL3.
	 * ----------------------------------------------------------
953
954
	 */
	mov	x17, sp
955
	msr	spsel, #MODE_SP_ELX
956
957
	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]

958
	/* ----------------------------------------------------------
959
	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
960
	 * ----------------------------------------------------------
961
962
963
964
965
966
967
	 */
	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
	msr	scr_el3, x18
	msr	spsr_el3, x16
	msr	elr_el3, x17

968
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
969
970
971
972
	/* ----------------------------------------------------------
	 * Restore mitigation state as it was on entry to EL3
	 * ----------------------------------------------------------
	 */
973
	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
974
	cbz	x17, 1f
975
	blr	x17
976
1:
977
#endif
978
979
	restore_ptw_el1_sys_regs

980
981
982
983
984
985
986
987
	/* ----------------------------------------------------------
	 * Restore general purpose (including x30), PMCR_EL0 and
	 * ARMv8.3-PAuth registers.
	 * Exit EL3 via ERET to a lower exception level.
 	 * ----------------------------------------------------------
 	 */
	bl	restore_gp_pmcr_pauth_regs
	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
988

989
990
991
992
993
994
995
996
997
#if IMAGE_BL31 && RAS_EXTENSION
	/* ----------------------------------------------------------
	 * Issue Error Synchronization Barrier to synchronize SErrors
	 * before exiting EL3. We're running with EAs unmasked, so
	 * any synchronized errors would be taken immediately;
	 * therefore no need to inspect DISR_EL1 register.
 	 * ----------------------------------------------------------
	 */
	esb
998
#endif
999
	exception_return
1000

1001
endfunc el3_exit