context.S 23.8 KB
Newer Older
1
/*
2
 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3
 *
dp-arm's avatar
dp-arm committed
4
 * SPDX-License-Identifier: BSD-3-Clause
5
6
 */

7
#include <arch.h>
8
#include <asm_macros.S>
9
#include <assert_macros.S>
10
#include <context.h>
11

12
13
14
15
16
#if CTX_INCLUDE_EL2_REGS
	.global	el2_sysregs_context_save
	.global	el2_sysregs_context_restore
#endif

17
18
19
20
21
	.global	el1_sysregs_context_save
	.global	el1_sysregs_context_restore
#if CTX_INCLUDE_FPREGS
	.global	fpregs_context_save
	.global	fpregs_context_restore
22
#endif
23
24
	.global	save_gp_pmcr_pauth_regs
	.global	restore_gp_pmcr_pauth_regs
25
26
	.global	el3_exit

27
28
29
30
31
#if CTX_INCLUDE_EL2_REGS

/* -----------------------------------------------------
 * The following function strictly follows the AArch64
 * PCS to use x9-x17 (temporary caller-saved registers)
32
33
 * to save EL2 system register context. It assumes that
 * 'x0' is pointing to a 'el2_sys_regs' structure where
34
 * the register context will be saved.
35
36
37
38
39
40
41
 *
 * The following registers are not added.
 * AMEVCNTVOFF0<n>_EL2
 * AMEVCNTVOFF1<n>_EL2
 * ICH_AP0R<n>_EL2
 * ICH_AP1R<n>_EL2
 * ICH_LR<n>_EL2
42
43
44
 * -----------------------------------------------------
 */

45
func el2_sysregs_context_save
46
	mrs	x9, actlr_el2
47
48
	mrs	x10, afsr0_el2
	stp	x9, x10, [x0, #CTX_ACTLR_EL2]
49

50
51
52
	mrs	x11, afsr1_el2
	mrs	x12, amair_el2
	stp	x11, x12, [x0, #CTX_AFSR1_EL2]
53

54
55
56
	mrs	x13, cnthctl_el2
	mrs	x14, cnthp_ctl_el2
	stp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
57

58
59
60
	mrs	x15, cnthp_cval_el2
	mrs	x16, cnthp_tval_el2
	stp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
61

62
	mrs	x17, cntvoff_el2
63
	mrs	x9, cptr_el2
64
	stp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
65

66
67
68
	mrs	x10, dbgvcr32_el2
	mrs	x11, elr_el2
	stp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
69

70
71
72
	mrs	x14, esr_el2
	mrs	x15, far_el2
	stp	x14, x15, [x0, #CTX_ESR_EL2]
73

74
75
76
	mrs	x16, fpexc32_el2
	mrs	x17, hacr_el2
	stp	x16, x17, [x0, #CTX_FPEXC32_EL2]
77
78

	mrs	x9, hcr_el2
79
80
	mrs	x10, hpfar_el2
	stp	x9, x10, [x0, #CTX_HCR_EL2]
81

82
83
84
	mrs	x11, hstr_el2
	mrs	x12, ICC_SRE_EL2
	stp	x11, x12, [x0, #CTX_HSTR_EL2]
85

86
87
88
	mrs	x13, ICH_HCR_EL2
	mrs	x14, ICH_VMCR_EL2
	stp	x13, x14, [x0, #CTX_ICH_HCR_EL2]
89

90
91
92
	mrs	x15, mair_el2
	mrs	x16, mdcr_el2
	stp	x15, x16, [x0, #CTX_MAIR_EL2]
93

94
95
96
	mrs	x17, PMSCR_EL2
	mrs	x9, sctlr_el2
	stp	x17, x9, [x0, #CTX_PMSCR_EL2]
97

98
99
100
	mrs	x10, spsr_el2
	mrs	x11, sp_el2
	stp	x10, x11, [x0, #CTX_SPSR_EL2]
101

102
	mrs	x12, tcr_el2
103
	mrs	x13, tpidr_el2
104
	stp	x12, x13, [x0, #CTX_TCR_EL2]
105

106
107
108
	mrs	x14, ttbr0_el2
	mrs	x15, vbar_el2
	stp	x14, x15, [x0, #CTX_TTBR0_EL2]
109

110
111
112
	mrs	x16, vmpidr_el2
	mrs	x17, vpidr_el2
	stp	x16, x17, [x0, #CTX_VMPIDR_EL2]
113

114
115
116
	mrs	x9, vtcr_el2
	mrs	x10, vttbr_el2
	stp	x9, x10, [x0, #CTX_VTCR_EL2]
117

118
119
120
121
#if CTX_INCLUDE_MTE_REGS
	mrs	x11, TFSR_EL2
	str	x11, [x0, #CTX_TFSR_EL2]
#endif
122

123
#if ENABLE_MPAM_FOR_LOWER_ELS
124
	mrs	x9, MPAM2_EL2
125
126
	mrs	x10, MPAMHCR_EL2
	stp	x9, x10, [x0, #CTX_MPAM2_EL2]
127

128
129
130
	mrs	x11, MPAMVPM0_EL2
	mrs	x12, MPAMVPM1_EL2
	stp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
131

132
133
134
	mrs	x13, MPAMVPM2_EL2
	mrs	x14, MPAMVPM3_EL2
	stp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
135

136
137
138
	mrs	x15, MPAMVPM4_EL2
	mrs	x16, MPAMVPM5_EL2
	stp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
139

140
141
142
	mrs	x17, MPAMVPM6_EL2
	mrs	x9, MPAMVPM7_EL2
	stp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
143

144
145
146
	mrs	x10, MPAMVPMV_EL2
	str	x10, [x0, #CTX_MPAMVPMV_EL2]
#endif
147
148


149
150
151
152
#if ARM_ARCH_AT_LEAST(8, 6)
	mrs	x11, HAFGRTR_EL2
	mrs	x12, HDFGRTR_EL2
	stp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
153

154
155
156
	mrs	x13, HDFGWTR_EL2
	mrs	x14, HFGITR_EL2
	stp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
157

158
159
160
	mrs	x15, HFGRTR_EL2
	mrs	x16, HFGWTR_EL2
	stp	x15, x16, [x0, #CTX_HFGRTR_EL2]
161

162
163
164
	mrs	x17, CNTPOFF_EL2
	str	x17, [x0, #CTX_CNTPOFF_EL2]
#endif
165

166
167
168
169
#if ARM_ARCH_AT_LEAST(8, 4)
	mrs	x9, cnthps_ctl_el2
	mrs	x10, cnthps_cval_el2
	stp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
170

171
172
173
	mrs	x11, cnthps_tval_el2
	mrs	x12, cnthvs_ctl_el2
	stp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
174

175
176
177
	mrs	x13, cnthvs_cval_el2
	mrs	x14, cnthvs_tval_el2
	stp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
178

179
180
181
	mrs	x15, cnthv_ctl_el2
	mrs	x16, cnthv_cval_el2
	stp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
182

183
184
185
	mrs	x17, cnthv_tval_el2
	mrs	x9, contextidr_el2
	stp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
186

187
188
	mrs	x10, sder32_el2
	str	x10, [x0, #CTX_SDER32_EL2]
189

190
191
	mrs	x11, ttbr1_el2
	str	x11, [x0, #CTX_TTBR1_EL2]
192

193
194
	mrs	x12, vdisr_el2
	str	x12, [x0, #CTX_VDISR_EL2]
195

196
197
	mrs	x13, vncr_el2
	str	x13, [x0, #CTX_VNCR_EL2]
198

199
200
	mrs	x14, vsesr_el2
	str	x14, [x0, #CTX_VSESR_EL2]
201

202
203
	mrs	x15, vstcr_el2
	str	x15, [x0, #CTX_VSTCR_EL2]
204

205
206
	mrs	x16, vsttbr_el2
	str	x16, [x0, #CTX_VSTTBR_EL2]
207
208
209

	mrs	x17, TRFCR_EL2
	str	x17, [x0, #CTX_TRFCR_EL2]
210
#endif
211

212
#if ARM_ARCH_AT_LEAST(8, 5)
213
214
	mrs	x9, scxtnum_el2
	str	x9, [x0, #CTX_SCXTNUM_EL2]
215
#endif
216
217
218
219
220
221
222

	ret
endfunc el2_sysregs_context_save

/* -----------------------------------------------------
 * The following function strictly follows the AArch64
 * PCS to use x9-x17 (temporary caller-saved registers)
223
224
 * to restore EL2 system register context.  It assumes
 * that 'x0' is pointing to a 'el2_sys_regs' structure
225
 * from where the register context will be restored
226
227
228
229
230
231
232

 * The following registers are not restored
 * AMEVCNTVOFF0<n>_EL2
 * AMEVCNTVOFF1<n>_EL2
 * ICH_AP0R<n>_EL2
 * ICH_AP1R<n>_EL2
 * ICH_LR<n>_EL2
233
234
235
236
 * -----------------------------------------------------
 */
func el2_sysregs_context_restore

237
	ldp	x9, x10, [x0, #CTX_ACTLR_EL2]
238
	msr	actlr_el2, x9
239
	msr	afsr0_el2, x10
240

241
242
243
	ldp	x11, x12, [x0, #CTX_AFSR1_EL2]
	msr	afsr1_el2, x11
	msr	amair_el2, x12
244

245
246
247
	ldp	x13, x14, [x0, #CTX_CNTHCTL_EL2]
	msr	cnthctl_el2, x13
	msr	cnthp_ctl_el2, x14
248

249
250
251
	ldp	x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
	msr	cnthp_cval_el2, x15
	msr	cnthp_tval_el2, x16
252

253
254
	ldp	x17, x9, [x0, #CTX_CNTVOFF_EL2]
	msr	cntvoff_el2, x17
255
256
	msr	cptr_el2, x9

257
258
259
	ldp	x10, x11, [x0, #CTX_DBGVCR32_EL2]
	msr	dbgvcr32_el2, x10
	msr	elr_el2, x11
260

261
262
263
	ldp	x14, x15, [x0, #CTX_ESR_EL2]
	msr	esr_el2, x14
	msr	far_el2, x15
264

265
266
267
	ldp	x16, x17, [x0, #CTX_FPEXC32_EL2]
	msr	fpexc32_el2, x16
	msr	hacr_el2, x17
268

269
	ldp	x9, x10, [x0, #CTX_HCR_EL2]
270
	msr	hcr_el2, x9
271
	msr	hpfar_el2, x10
272

273
274
275
	ldp	x11, x12, [x0, #CTX_HSTR_EL2]
	msr	hstr_el2, x11
	msr	ICC_SRE_EL2, x12
276

277
278
279
	ldp	x13, x14, [x0, #CTX_ICH_HCR_EL2]
	msr	ICH_HCR_EL2, x13
	msr	ICH_VMCR_EL2, x14
280

281
282
283
	ldp	x15, x16, [x0, #CTX_MAIR_EL2]
	msr	mair_el2, x15
	msr	mdcr_el2, x16
284

285
286
287
	ldp	x17, x9, [x0, #CTX_PMSCR_EL2]
	msr	PMSCR_EL2, x17
	msr	sctlr_el2, x9
288

289
290
291
	ldp	x10, x11, [x0, #CTX_SPSR_EL2]
	msr	spsr_el2, x10
	msr	sp_el2, x11
292

293
294
	ldp	x12, x13, [x0, #CTX_TCR_EL2]
	msr	tcr_el2, x12
295
	msr	tpidr_el2, x13
296

297
298
299
	ldp	x14, x15, [x0, #CTX_TTBR0_EL2]
	msr	ttbr0_el2, x14
	msr	vbar_el2, x15
300

301
302
303
	ldp	x16, x17, [x0, #CTX_VMPIDR_EL2]
	msr	vmpidr_el2, x16
	msr	vpidr_el2, x17
304

305
306
307
	ldp	x9, x10, [x0, #CTX_VTCR_EL2]
	msr	vtcr_el2, x9
	msr	vttbr_el2, x10
308

309
310
311
312
#if CTX_INCLUDE_MTE_REGS
	ldr	x11, [x0, #CTX_TFSR_EL2]
	msr	TFSR_EL2, x11
#endif
313

314
315
#if ENABLE_MPAM_FOR_LOWER_ELS
	ldp	x9, x10, [x0, #CTX_MPAM2_EL2]
316
	msr	MPAM2_EL2, x9
317
	msr	MPAMHCR_EL2, x10
318

319
320
321
	ldp	x11, x12, [x0, #CTX_MPAMVPM0_EL2]
	msr	MPAMVPM0_EL2, x11
	msr	MPAMVPM1_EL2, x12
322

323
324
325
	ldp	x13, x14, [x0, #CTX_MPAMVPM2_EL2]
	msr	MPAMVPM2_EL2, x13
	msr	MPAMVPM3_EL2, x14
326

327
328
329
	ldp	x15, x16, [x0, #CTX_MPAMVPM4_EL2]
	msr	MPAMVPM4_EL2, x15
	msr	MPAMVPM5_EL2, x16
330

331
332
333
	ldp	x17, x9, [x0, #CTX_MPAMVPM6_EL2]
	msr	MPAMVPM6_EL2, x17
	msr	MPAMVPM7_EL2, x9
334

335
336
337
	ldr	x10, [x0, #CTX_MPAMVPMV_EL2]
	msr	MPAMVPMV_EL2, x10
#endif
338

339
340
341
342
#if ARM_ARCH_AT_LEAST(8, 6)
	ldp	x11, x12, [x0, #CTX_HAFGRTR_EL2]
	msr	HAFGRTR_EL2, x11
	msr	HDFGRTR_EL2, x12
343

344
345
346
	ldp	x13, x14, [x0, #CTX_HDFGWTR_EL2]
	msr	HDFGWTR_EL2, x13
	msr	HFGITR_EL2, x14
347

348
349
350
	ldp	x15, x16, [x0, #CTX_HFGRTR_EL2]
	msr	HFGRTR_EL2, x15
	msr	HFGWTR_EL2, x16
351

352
353
354
	ldr	x17, [x0, #CTX_CNTPOFF_EL2]
	msr	CNTPOFF_EL2, x17
#endif
355

356
357
358
359
#if ARM_ARCH_AT_LEAST(8, 4)
	ldp	x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
	msr	cnthps_ctl_el2, x9
	msr	cnthps_cval_el2, x10
360

361
362
363
	ldp	x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
	msr	cnthps_tval_el2, x11
	msr	cnthvs_ctl_el2, x12
364

365
366
367
	ldp	x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
	msr	cnthvs_cval_el2, x13
	msr	cnthvs_tval_el2, x14
368

369
370
371
	ldp	x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
	msr	cnthv_ctl_el2, x15
	msr	cnthv_cval_el2, x16
372

373
374
375
	ldp	x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
	msr	cnthv_tval_el2, x17
	msr	contextidr_el2, x9
376

377
378
	ldr	x10, [x0, #CTX_SDER32_EL2]
	msr	sder32_el2, x10
379

380
381
	ldr	x11, [x0, #CTX_TTBR1_EL2]
	msr	ttbr1_el2, x11
382

383
384
	ldr	x12, [x0, #CTX_VDISR_EL2]
	msr	vdisr_el2, x12
385

386
387
	ldr	x13, [x0, #CTX_VNCR_EL2]
	msr	vncr_el2, x13
388

389
390
	ldr	x14, [x0, #CTX_VSESR_EL2]
	msr	vsesr_el2, x14
391

392
393
	ldr	x15, [x0, #CTX_VSTCR_EL2]
	msr	vstcr_el2, x15
394

395
396
	ldr	x16, [x0, #CTX_VSTTBR_EL2]
	msr	vsttbr_el2, x16
397
398
399

	ldr	x17, [x0, #CTX_TRFCR_EL2]
	msr	TRFCR_EL2, x17
400
#endif
401

402
#if ARM_ARCH_AT_LEAST(8, 5)
403
404
	ldr	x9, [x0, #CTX_SCXTNUM_EL2]
	msr	scxtnum_el2, x9
405
#endif
406
407
408
409
410
411

	ret
endfunc el2_sysregs_context_restore

#endif /* CTX_INCLUDE_EL2_REGS */

412
413
414
415
416
417
/* ------------------------------------------------------------------
 * The following function strictly follows the AArch64 PCS to use
 * x9-x17 (temporary caller-saved registers) to save EL1 system
 * register context. It assumes that 'x0' is pointing to a
 * 'el1_sys_regs' structure where the register context will be saved.
 * ------------------------------------------------------------------
418
 */
419
func el1_sysregs_context_save
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464

	mrs	x9, spsr_el1
	mrs	x10, elr_el1
	stp	x9, x10, [x0, #CTX_SPSR_EL1]

	mrs	x15, sctlr_el1
	mrs	x16, actlr_el1
	stp	x15, x16, [x0, #CTX_SCTLR_EL1]

	mrs	x17, cpacr_el1
	mrs	x9, csselr_el1
	stp	x17, x9, [x0, #CTX_CPACR_EL1]

	mrs	x10, sp_el1
	mrs	x11, esr_el1
	stp	x10, x11, [x0, #CTX_SP_EL1]

	mrs	x12, ttbr0_el1
	mrs	x13, ttbr1_el1
	stp	x12, x13, [x0, #CTX_TTBR0_EL1]

	mrs	x14, mair_el1
	mrs	x15, amair_el1
	stp	x14, x15, [x0, #CTX_MAIR_EL1]

	mrs	x16, tcr_el1
	mrs	x17, tpidr_el1
	stp	x16, x17, [x0, #CTX_TCR_EL1]

	mrs	x9, tpidr_el0
	mrs	x10, tpidrro_el0
	stp	x9, x10, [x0, #CTX_TPIDR_EL0]

	mrs	x13, par_el1
	mrs	x14, far_el1
	stp	x13, x14, [x0, #CTX_PAR_EL1]

	mrs	x15, afsr0_el1
	mrs	x16, afsr1_el1
	stp	x15, x16, [x0, #CTX_AFSR0_EL1]

	mrs	x17, contextidr_el1
	mrs	x9, vbar_el1
	stp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]

465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
	/* Save AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS
	mrs	x11, spsr_abt
	mrs	x12, spsr_und
	stp	x11, x12, [x0, #CTX_SPSR_ABT]

	mrs	x13, spsr_irq
	mrs	x14, spsr_fiq
	stp	x13, x14, [x0, #CTX_SPSR_IRQ]

	mrs	x15, dacr32_el2
	mrs	x16, ifsr32_el2
	stp	x15, x16, [x0, #CTX_DACR32_EL2]
#endif

480
481
	/* Save NS timer registers if the build has instructed so */
#if NS_TIMER_SWITCH
482
483
484
485
486
487
488
489
490
	mrs	x10, cntp_ctl_el0
	mrs	x11, cntp_cval_el0
	stp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]

	mrs	x12, cntv_ctl_el0
	mrs	x13, cntv_cval_el0
	stp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]

	mrs	x14, cntkctl_el1
491
492
493
	str	x14, [x0, #CTX_CNTKCTL_EL1]
#endif

494
495
496
497
498
499
500
501
502
503
504
	/* Save MTE system registers if the build has instructed so */
#if CTX_INCLUDE_MTE_REGS
	mrs	x15, TFSRE0_EL1
	mrs	x16, TFSR_EL1
	stp	x15, x16, [x0, #CTX_TFSRE0_EL1]

	mrs	x9, RGSR_EL1
	mrs	x10, GCR_EL1
	stp	x9, x10, [x0, #CTX_RGSR_EL1]
#endif

505
	ret
506
endfunc el1_sysregs_context_save
507

508
509
510
511
512
513
514
/* ------------------------------------------------------------------
 * The following function strictly follows the AArch64 PCS to use
 * x9-x17 (temporary caller-saved registers) to restore EL1 system
 * register context.  It assumes that 'x0' is pointing to a
 * 'el1_sys_regs' structure from where the register context will be
 * restored
 * ------------------------------------------------------------------
515
 */
516
func el1_sysregs_context_restore
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561

	ldp	x9, x10, [x0, #CTX_SPSR_EL1]
	msr	spsr_el1, x9
	msr	elr_el1, x10

	ldp	x15, x16, [x0, #CTX_SCTLR_EL1]
	msr	sctlr_el1, x15
	msr	actlr_el1, x16

	ldp	x17, x9, [x0, #CTX_CPACR_EL1]
	msr	cpacr_el1, x17
	msr	csselr_el1, x9

	ldp	x10, x11, [x0, #CTX_SP_EL1]
	msr	sp_el1, x10
	msr	esr_el1, x11

	ldp	x12, x13, [x0, #CTX_TTBR0_EL1]
	msr	ttbr0_el1, x12
	msr	ttbr1_el1, x13

	ldp	x14, x15, [x0, #CTX_MAIR_EL1]
	msr	mair_el1, x14
	msr	amair_el1, x15

	ldp	x16, x17, [x0, #CTX_TCR_EL1]
	msr	tcr_el1, x16
	msr	tpidr_el1, x17

	ldp	x9, x10, [x0, #CTX_TPIDR_EL0]
	msr	tpidr_el0, x9
	msr	tpidrro_el0, x10

	ldp	x13, x14, [x0, #CTX_PAR_EL1]
	msr	par_el1, x13
	msr	far_el1, x14

	ldp	x15, x16, [x0, #CTX_AFSR0_EL1]
	msr	afsr0_el1, x15
	msr	afsr1_el1, x16

	ldp	x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
	msr	contextidr_el1, x17
	msr	vbar_el1, x9

562
563
564
565
566
567
568
569
570
571
572
573
574
575
	/* Restore AArch32 system registers if the build has instructed so */
#if CTX_INCLUDE_AARCH32_REGS
	ldp	x11, x12, [x0, #CTX_SPSR_ABT]
	msr	spsr_abt, x11
	msr	spsr_und, x12

	ldp	x13, x14, [x0, #CTX_SPSR_IRQ]
	msr	spsr_irq, x13
	msr	spsr_fiq, x14

	ldp	x15, x16, [x0, #CTX_DACR32_EL2]
	msr	dacr32_el2, x15
	msr	ifsr32_el2, x16
#endif
576
577
	/* Restore NS timer registers if the build has instructed so */
#if NS_TIMER_SWITCH
578
579
580
581
582
583
584
585
	ldp	x10, x11, [x0, #CTX_CNTP_CTL_EL0]
	msr	cntp_ctl_el0, x10
	msr	cntp_cval_el0, x11

	ldp	x12, x13, [x0, #CTX_CNTV_CTL_EL0]
	msr	cntv_ctl_el0, x12
	msr	cntv_cval_el0, x13

586
	ldr	x14, [x0, #CTX_CNTKCTL_EL1]
587
	msr	cntkctl_el1, x14
588
#endif
589
590
591
592
593
594
595
596
597
598
	/* Restore MTE system registers if the build has instructed so */
#if CTX_INCLUDE_MTE_REGS
	ldp	x11, x12, [x0, #CTX_TFSRE0_EL1]
	msr	TFSRE0_EL1, x11
	msr	TFSR_EL1, x12

	ldp	x13, x14, [x0, #CTX_RGSR_EL1]
	msr	RGSR_EL1, x13
	msr	GCR_EL1, x14
#endif
599

600
601
	/* No explict ISB required here as ERET covers it */
	ret
602
endfunc el1_sysregs_context_restore
603

604
605
606
607
608
/* ------------------------------------------------------------------
 * The following function follows the aapcs_64 strictly to use
 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
 * to save floating point register context. It assumes that 'x0' is
 * pointing to a 'fp_regs' structure where the register context will
609
610
 * be saved.
 *
611
612
613
 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
 * However currently we don't use VFP registers nor set traps in
 * Trusted Firmware, and assume it's cleared.
614
615
 *
 * TODO: Revisit when VFP is used in secure world
616
 * ------------------------------------------------------------------
617
 */
618
#if CTX_INCLUDE_FPREGS
619
func fpregs_context_save
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
	stp	q0, q1, [x0, #CTX_FP_Q0]
	stp	q2, q3, [x0, #CTX_FP_Q2]
	stp	q4, q5, [x0, #CTX_FP_Q4]
	stp	q6, q7, [x0, #CTX_FP_Q6]
	stp	q8, q9, [x0, #CTX_FP_Q8]
	stp	q10, q11, [x0, #CTX_FP_Q10]
	stp	q12, q13, [x0, #CTX_FP_Q12]
	stp	q14, q15, [x0, #CTX_FP_Q14]
	stp	q16, q17, [x0, #CTX_FP_Q16]
	stp	q18, q19, [x0, #CTX_FP_Q18]
	stp	q20, q21, [x0, #CTX_FP_Q20]
	stp	q22, q23, [x0, #CTX_FP_Q22]
	stp	q24, q25, [x0, #CTX_FP_Q24]
	stp	q26, q27, [x0, #CTX_FP_Q26]
	stp	q28, q29, [x0, #CTX_FP_Q28]
	stp	q30, q31, [x0, #CTX_FP_Q30]

	mrs	x9, fpsr
	str	x9, [x0, #CTX_FP_FPSR]

	mrs	x10, fpcr
	str	x10, [x0, #CTX_FP_FPCR]

David Cunado's avatar
David Cunado committed
643
644
645
646
#if CTX_INCLUDE_AARCH32_REGS
	mrs	x11, fpexc32_el2
	str	x11, [x0, #CTX_FP_FPEXC32_EL2]
#endif
647
	ret
648
endfunc fpregs_context_save
649

650
651
652
653
654
/* ------------------------------------------------------------------
 * The following function follows the aapcs_64 strictly to use x9-x17
 * (temporary caller-saved registers according to AArch64 PCS) to
 * restore floating point register context. It assumes that 'x0' is
 * pointing to a 'fp_regs' structure from where the register context
655
656
 * will be restored.
 *
657
658
659
 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
 * However currently we don't use VFP registers nor set traps in
 * Trusted Firmware, and assume it's cleared.
660
661
 *
 * TODO: Revisit when VFP is used in secure world
662
 * ------------------------------------------------------------------
663
 */
664
func fpregs_context_restore
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
	ldp	q0, q1, [x0, #CTX_FP_Q0]
	ldp	q2, q3, [x0, #CTX_FP_Q2]
	ldp	q4, q5, [x0, #CTX_FP_Q4]
	ldp	q6, q7, [x0, #CTX_FP_Q6]
	ldp	q8, q9, [x0, #CTX_FP_Q8]
	ldp	q10, q11, [x0, #CTX_FP_Q10]
	ldp	q12, q13, [x0, #CTX_FP_Q12]
	ldp	q14, q15, [x0, #CTX_FP_Q14]
	ldp	q16, q17, [x0, #CTX_FP_Q16]
	ldp	q18, q19, [x0, #CTX_FP_Q18]
	ldp	q20, q21, [x0, #CTX_FP_Q20]
	ldp	q22, q23, [x0, #CTX_FP_Q22]
	ldp	q24, q25, [x0, #CTX_FP_Q24]
	ldp	q26, q27, [x0, #CTX_FP_Q26]
	ldp	q28, q29, [x0, #CTX_FP_Q28]
	ldp	q30, q31, [x0, #CTX_FP_Q30]

	ldr	x9, [x0, #CTX_FP_FPSR]
	msr	fpsr, x9

685
	ldr	x10, [x0, #CTX_FP_FPCR]
686
687
	msr	fpcr, x10

David Cunado's avatar
David Cunado committed
688
689
690
691
#if CTX_INCLUDE_AARCH32_REGS
	ldr	x11, [x0, #CTX_FP_FPEXC32_EL2]
	msr	fpexc32_el2, x11
#endif
692
693
	/*
	 * No explict ISB required here as ERET to
694
	 * switch to secure EL1 or non-secure world
695
696
697
698
	 * covers it
	 */

	ret
699
endfunc fpregs_context_restore
700
#endif /* CTX_INCLUDE_FPREGS */
701

702
703
704
705
706
707
708
709
710
711
712
713
714
/* ------------------------------------------------------------------
 * The following function is used to save and restore all the general
 * purpose and ARMv8.3-PAuth (if enabled) registers.
 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
 * when ARMv8.5-PMU is implemented, and if called from Non-secure
 * state saves PMCR_EL0 and disables Cycle Counter.
 *
 * Ideally we would only save and restore the callee saved registers
 * when a world switch occurs but that type of implementation is more
 * complex. So currently we will always save and restore these
 * registers on entry and exit of EL3.
 * These are not macros to ensure their invocation fits within the 32
 * instructions per exception vector.
715
 * clobbers: x18
716
 * ------------------------------------------------------------------
717
 */
718
func save_gp_pmcr_pauth_regs
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
	stp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
	stp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
	stp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
	stp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
	stp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
	stp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
	stp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
	stp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
	stp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
	stp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
	stp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
	stp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
	stp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
	mrs	x18, sp_el0
	str	x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788

	/* ----------------------------------------------------------
	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
	 * should be saved in non-secure context.
	 * ----------------------------------------------------------
	 */
	mrs	x9, mdcr_el3
	tst	x9, #MDCR_SCCD_BIT
	bne	1f

	/* Secure Cycle Counter is not disabled */
	mrs	x9, pmcr_el0

	/* Check caller's security state */
	mrs	x10, scr_el3
	tst	x10, #SCR_NS_BIT
	beq	2f

	/* Save PMCR_EL0 if called from Non-secure state */
	str	x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]

	/* Disable cycle counter when event counting is prohibited */
2:	orr	x9, x9, #PMCR_EL0_DP_BIT
	msr	pmcr_el0, x9
	isb
1:
#if CTX_INCLUDE_PAUTH_REGS
	/* ----------------------------------------------------------
 	 * Save the ARMv8.3-PAuth keys as they are not banked
 	 * by exception level
	 * ----------------------------------------------------------
	 */
	add	x19, sp, #CTX_PAUTH_REGS_OFFSET

	mrs	x20, APIAKeyLo_EL1	/* x21:x20 = APIAKey */
	mrs	x21, APIAKeyHi_EL1
	mrs	x22, APIBKeyLo_EL1	/* x23:x22 = APIBKey */
	mrs	x23, APIBKeyHi_EL1
	mrs	x24, APDAKeyLo_EL1	/* x25:x24 = APDAKey */
	mrs	x25, APDAKeyHi_EL1
	mrs	x26, APDBKeyLo_EL1	/* x27:x26 = APDBKey */
	mrs	x27, APDBKeyHi_EL1
	mrs	x28, APGAKeyLo_EL1	/* x29:x28 = APGAKey */
	mrs	x29, APGAKeyHi_EL1

	stp	x20, x21, [x19, #CTX_PACIAKEY_LO]
	stp	x22, x23, [x19, #CTX_PACIBKEY_LO]
	stp	x24, x25, [x19, #CTX_PACDAKEY_LO]
	stp	x26, x27, [x19, #CTX_PACDBKEY_LO]
	stp	x28, x29, [x19, #CTX_PACGAKEY_LO]
#endif /* CTX_INCLUDE_PAUTH_REGS */

789
	ret
790
endfunc save_gp_pmcr_pauth_regs
791

792
793
794
795
796
/* ------------------------------------------------------------------
 * This function restores ARMv8.3-PAuth (if enabled) and all general
 * purpose registers except x30 from the CPU context.
 * x30 register must be explicitly restored by the caller.
 * ------------------------------------------------------------------
797
 */
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
func restore_gp_pmcr_pauth_regs
#if CTX_INCLUDE_PAUTH_REGS
 	/* Restore the ARMv8.3 PAuth keys */
	add	x10, sp, #CTX_PAUTH_REGS_OFFSET

	ldp	x0, x1, [x10, #CTX_PACIAKEY_LO]	/* x1:x0 = APIAKey */
	ldp	x2, x3, [x10, #CTX_PACIBKEY_LO]	/* x3:x2 = APIBKey */
	ldp	x4, x5, [x10, #CTX_PACDAKEY_LO]	/* x5:x4 = APDAKey */
	ldp	x6, x7, [x10, #CTX_PACDBKEY_LO]	/* x7:x6 = APDBKey */
	ldp	x8, x9, [x10, #CTX_PACGAKEY_LO]	/* x9:x8 = APGAKey */

	msr	APIAKeyLo_EL1, x0
	msr	APIAKeyHi_EL1, x1
	msr	APIBKeyLo_EL1, x2
	msr	APIBKeyHi_EL1, x3
	msr	APDAKeyLo_EL1, x4
	msr	APDAKeyHi_EL1, x5
	msr	APDBKeyLo_EL1, x6
	msr	APDBKeyHi_EL1, x7
	msr	APGAKeyLo_EL1, x8
	msr	APGAKeyHi_EL1, x9
#endif /* CTX_INCLUDE_PAUTH_REGS */

	/* ----------------------------------------------------------
	 * Restore PMCR_EL0 when returning to Non-secure state if
	 * Secure Cycle Counter is not disabled in MDCR_EL3 when
	 * ARMv8.5-PMU is implemented.
	 * ----------------------------------------------------------
	 */
	mrs	x0, scr_el3
	tst	x0, #SCR_NS_BIT
	beq	2f

	/* ----------------------------------------------------------
	 * Back to Non-secure state.
	 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
	 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
	 * should be restored from non-secure context.
	 * ----------------------------------------------------------
	 */
	mrs	x0, mdcr_el3
	tst	x0, #MDCR_SCCD_BIT
	bne	2f
	ldr	x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
	msr	pmcr_el0, x0
2:
844
845
846
847
848
849
850
851
	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
	ldp	x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
	ldp	x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
	ldp	x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
	ldp	x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
	ldp	x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
	ldp	x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
852
	ldp	x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
853
854
855
856
857
	ldp	x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
	ldp	x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
	ldp	x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
	ldp	x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
	ldp	x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
858
859
	ldr	x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
	msr	sp_el0, x28
860
	ldp	x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
861
	ret
862
endfunc restore_gp_pmcr_pauth_regs
863

864
865
866
867
868
/* ------------------------------------------------------------------
 * This routine assumes that the SP_EL3 is pointing to a valid
 * context structure from where the gp regs and other special
 * registers can be retrieved.
 * ------------------------------------------------------------------
869
 */
870
func el3_exit
871
872
873
874
875
876
877
#if ENABLE_ASSERTIONS
	/* el3_exit assumes SP_EL0 on entry */
	mrs	x17, spsel
	cmp	x17, #MODE_SP_EL0
	ASM_ASSERT(eq)
#endif

878
879
880
881
882
	/* ----------------------------------------------------------
	 * Save the current SP_EL0 i.e. the EL3 runtime stack which
	 * will be used for handling the next SMC.
	 * Then switch to SP_EL3.
	 * ----------------------------------------------------------
883
884
	 */
	mov	x17, sp
885
	msr	spsel, #MODE_SP_ELX
886
887
	str	x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]

888
	/* ----------------------------------------------------------
889
	 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
890
	 * ----------------------------------------------------------
891
892
893
894
895
896
897
	 */
	ldr	x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
	ldp	x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
	msr	scr_el3, x18
	msr	spsr_el3, x16
	msr	elr_el3, x17

898
#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
899
900
901
902
	/* ----------------------------------------------------------
	 * Restore mitigation state as it was on entry to EL3
	 * ----------------------------------------------------------
	 */
903
	ldr	x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
904
	cbz	x17, 1f
905
	blr	x17
906
1:
907
#endif
908
909
910
911
912
913
914
915
	/* ----------------------------------------------------------
	 * Restore general purpose (including x30), PMCR_EL0 and
	 * ARMv8.3-PAuth registers.
	 * Exit EL3 via ERET to a lower exception level.
 	 * ----------------------------------------------------------
 	 */
	bl	restore_gp_pmcr_pauth_regs
	ldr	x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
916

917
918
919
920
921
922
923
924
925
#if IMAGE_BL31 && RAS_EXTENSION
	/* ----------------------------------------------------------
	 * Issue Error Synchronization Barrier to synchronize SErrors
	 * before exiting EL3. We're running with EAs unmasked, so
	 * any synchronized errors would be taken immediately;
	 * therefore no need to inspect DISR_EL1 register.
 	 * ----------------------------------------------------------
	 */
	esb
926
#endif
927
	exception_return
928

929
endfunc el3_exit