cortex_a76.S 11.4 KB
Newer Older
1
/*
2
 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
3
4
5
6
7
8
 *
 * SPDX-License-Identifier: BSD-3-Clause
 */

#include <arch.h>
#include <asm_macros.S>
9
#include <common/bl_common.h>
10
#include <context.h>
11
12
13
#include <cortex_a76.h>
#include <cpu_macros.S>
#include <plat_macros.S>
14
#include <services/arm_arch_svc.h>
15

16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#if !DYNAMIC_WORKAROUND_CVE_2018_3639
#error Cortex A76 requires DYNAMIC_WORKAROUND_CVE_2018_3639=1
#endif

#define ESR_EL3_A64_SMC0	0x5e000000
#define ESR_EL3_A32_SMC0	0x4e000000

	/*
	 * This macro applies the mitigation for CVE-2018-3639.
	 * It implements a fash path where `SMCCC_ARCH_WORKAROUND_2`
	 * SMC calls from a lower EL running in AArch32 or AArch64
	 * will go through the fast and return early.
	 *
	 * The macro saves x2-x3 to the context.  In the fast path
	 * x0-x3 registers do not need to be restored as the calling
	 * context will have saved them.
	 */
	.macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
	stp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]

	.if \_is_sync_exception
		/*
		 * Ensure SMC is coming from A64/A32 state on #0
		 * with W0 = SMCCC_ARCH_WORKAROUND_2
		 *
		 * This sequence evaluates as:
		 *    (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
		 * allowing use of a single branch operation
		 */
		orr	w2, wzr, #SMCCC_ARCH_WORKAROUND_2
		cmp	x0, x2
		mrs	x3, esr_el3
		mov_imm	w2, \_esr_el3_val
		ccmp	w2, w3, #0, eq
		/*
		 * Static predictor will predict a fall-through, optimizing
		 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
		 */
		bne	1f

		/*
		 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
		 * fast path.
		 */
		cmp	x1, xzr /* enable/disable check */

		/*
		 * When the calling context wants mitigation disabled,
		 * we program the mitigation disable function in the
		 * CPU context, which gets invoked on subsequent exits from
		 * EL3 via the `el3_exit` function.  Otherwise NULL is
		 * programmed in the CPU context, which results in caller's
		 * inheriting the EL3 mitigation state (enabled) on subsequent
		 * `el3_exit`.
		 */
		mov	x0, xzr
		adr	x1, cortex_a76_disable_wa_cve_2018_3639
		csel	x1, x1, x0, eq
		str	x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]

		mrs	x2, CORTEX_A76_CPUACTLR2_EL1
		orr	x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
		bic	x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
		csel	x3, x3, x1, eq
		msr	CORTEX_A76_CPUACTLR2_EL1, x3
		eret	/* ERET implies ISB */
	.endif
1:
	/*
	 * Always enable v4 mitigation during EL3 execution.  This is not
	 * required for the fast path above because it does not perform any
	 * memory loads.
	 */
	mrs	x2, CORTEX_A76_CPUACTLR2_EL1
	orr	x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
	msr	CORTEX_A76_CPUACTLR2_EL1, x2
	isb

	/*
	 * The caller may have passed arguments to EL3 via x2-x3.
	 * Restore these registers from the context before jumping to the
	 * main runtime vector table entry.
	 */
	ldp	x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
	.endm

vector_base cortex_a76_wa_cve_2018_3639_a76_vbar

	/* ---------------------------------------------------------------------
	 * Current EL with SP_EL0 : 0x0 - 0x200
	 * ---------------------------------------------------------------------
	 */
vector_entry cortex_a76_sync_exception_sp_el0
	b	sync_exception_sp_el0
110
end_vector_entry cortex_a76_sync_exception_sp_el0
111
112
113

vector_entry cortex_a76_irq_sp_el0
	b	irq_sp_el0
114
end_vector_entry cortex_a76_irq_sp_el0
115
116
117

vector_entry cortex_a76_fiq_sp_el0
	b	fiq_sp_el0
118
end_vector_entry cortex_a76_fiq_sp_el0
119
120
121

vector_entry cortex_a76_serror_sp_el0
	b	serror_sp_el0
122
end_vector_entry cortex_a76_serror_sp_el0
123
124
125
126
127
128
129

	/* ---------------------------------------------------------------------
	 * Current EL with SP_ELx: 0x200 - 0x400
	 * ---------------------------------------------------------------------
	 */
vector_entry cortex_a76_sync_exception_sp_elx
	b	sync_exception_sp_elx
130
end_vector_entry cortex_a76_sync_exception_sp_elx
131
132
133

vector_entry cortex_a76_irq_sp_elx
	b	irq_sp_elx
134
end_vector_entry cortex_a76_irq_sp_elx
135
136
137

vector_entry cortex_a76_fiq_sp_elx
	b	fiq_sp_elx
138
end_vector_entry cortex_a76_fiq_sp_elx
139
140
141

vector_entry cortex_a76_serror_sp_elx
	b	serror_sp_elx
142
end_vector_entry cortex_a76_serror_sp_elx
143
144
145
146
147
148
149
150

	/* ---------------------------------------------------------------------
	 * Lower EL using AArch64 : 0x400 - 0x600
	 * ---------------------------------------------------------------------
	 */
vector_entry cortex_a76_sync_exception_aarch64
	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
	b	sync_exception_aarch64
151
end_vector_entry cortex_a76_sync_exception_aarch64
152
153
154
155

vector_entry cortex_a76_irq_aarch64
	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
	b	irq_aarch64
156
end_vector_entry cortex_a76_irq_aarch64
157
158
159
160

vector_entry cortex_a76_fiq_aarch64
	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
	b	fiq_aarch64
161
end_vector_entry cortex_a76_fiq_aarch64
162
163
164
165

vector_entry cortex_a76_serror_aarch64
	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
	b	serror_aarch64
166
end_vector_entry cortex_a76_serror_aarch64
167
168
169
170
171
172
173
174

	/* ---------------------------------------------------------------------
	 * Lower EL using AArch32 : 0x600 - 0x800
	 * ---------------------------------------------------------------------
	 */
vector_entry cortex_a76_sync_exception_aarch32
	apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
	b	sync_exception_aarch32
175
end_vector_entry cortex_a76_sync_exception_aarch32
176
177
178
179

vector_entry cortex_a76_irq_aarch32
	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
	b	irq_aarch32
180
end_vector_entry cortex_a76_irq_aarch32
181
182
183
184

vector_entry cortex_a76_fiq_aarch32
	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
	b	fiq_aarch32
185
end_vector_entry cortex_a76_fiq_aarch32
186
187
188
189

vector_entry cortex_a76_serror_aarch32
	apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
	b	serror_aarch32
190
end_vector_entry cortex_a76_serror_aarch32
191

192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
	/* --------------------------------------------------
	 * Errata Workaround for Cortex A76 Errata #1073348.
	 * This applies only to revision <= r1p0 of Cortex A76.
	 * Inputs:
	 * x0: variant[4:7] and revision[0:3] of current cpu.
	 * Shall clobber: x0-x17
	 * --------------------------------------------------
	 */
func errata_a76_1073348_wa
	/*
	 * Compare x0 against revision r1p0
	 */
	mov	x17, x30
	bl	check_errata_1073348
	cbz	x0, 1f
	mrs	x1, CORTEX_A76_CPUACTLR_EL1
	orr	x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
	msr	CORTEX_A76_CPUACTLR_EL1, x1
	isb
1:
	ret	x17
	endfunc errata_a76_1073348_wa

func check_errata_1073348
	mov	x1, #0x10
	b	cpu_rev_var_ls
endfunc check_errata_1073348

220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
	/* --------------------------------------------------
	 * Errata Workaround for Cortex A76 Errata #1130799.
	 * This applies only to revision <= r2p0 of Cortex A76.
	 * Inputs:
	 * x0: variant[4:7] and revision[0:3] of current cpu.
	 * Shall clobber: x0-x17
	 * --------------------------------------------------
	 */
func errata_a76_1130799_wa
	/*
	 * Compare x0 against revision r2p0
	 */
	mov	x17, x30
	bl	check_errata_1130799
	cbz	x0, 1f
	mrs	x1, CORTEX_A76_CPUACTLR2_EL1
	orr	x1, x1 ,#(1 << 59)
	msr	CORTEX_A76_CPUACTLR2_EL1, x1
	isb
1:
	ret	x17
endfunc errata_a76_1130799_wa

func check_errata_1130799
	mov	x1, #0x20
	b	cpu_rev_var_ls
endfunc check_errata_1130799

248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
	/* --------------------------------------------------
	 * Errata Workaround for Cortex A76 Errata #1220197.
	 * This applies only to revision <= r2p0 of Cortex A76.
	 * Inputs:
	 * x0: variant[4:7] and revision[0:3] of current cpu.
	 * Shall clobber: x0-x17
	 * --------------------------------------------------
	 */
func errata_a76_1220197_wa
/*
 * Compare x0 against revision r2p0
 */
	mov	x17, x30
	bl	check_errata_1220197
	cbz	x0, 1f
	mrs	x1, CORTEX_A76_CPUECTLR_EL1
	orr	x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
	msr	CORTEX_A76_CPUECTLR_EL1, x1
	isb
1:
	ret	x17
endfunc errata_a76_1220197_wa

func check_errata_1220197
	mov	x1, #0x20
	b	cpu_rev_var_ls
endfunc check_errata_1220197

276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
func check_errata_cve_2018_3639
#if WORKAROUND_CVE_2018_3639
	mov	x0, #ERRATA_APPLIES
#else
	mov	x0, #ERRATA_MISSING
#endif
	ret
endfunc check_errata_cve_2018_3639

func cortex_a76_disable_wa_cve_2018_3639
	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
	bic	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
	msr	CORTEX_A76_CPUACTLR2_EL1, x0
	isb
	ret
endfunc cortex_a76_disable_wa_cve_2018_3639

293
294
295
296
297
	/* -------------------------------------------------
	 * The CPU Ops reset function for Cortex-A76.
	 * Shall clobber: x0-x19
	 * -------------------------------------------------
	 */
298
func cortex_a76_reset_func
299
	mov	x19, x30
300
	bl	cpu_get_rev_var
301
	mov	x18, x0
302

303
304
305
306
307
#if ERRATA_A76_1073348
	mov	x0, x18
	bl	errata_a76_1073348_wa
#endif

308
#if ERRATA_A76_1130799
309
	mov	x0, x18
310
311
	bl	errata_a76_1130799_wa
#endif
312

313
314
315
316
317
#if ERRATA_A76_1220197
	mov	x0, x18
	bl	errata_a76_1220197_wa
#endif

318
#if WORKAROUND_CVE_2018_3639
319
320
321
322
323
324
	/* If the PE implements SSBS, we don't need the dynamic workaround */
	mrs	x0, id_aa64pfr1_el1
	lsr	x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
	and     x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
	cbnz	x0, 1f

325
326
327
328
329
	mrs	x0, CORTEX_A76_CPUACTLR2_EL1
	orr	x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
	msr	CORTEX_A76_CPUACTLR2_EL1, x0
	isb

330
#ifdef IMAGE_BL31
331
332
333
334
335
336
337
338
339
	/*
	 * The Cortex-A76 generic vectors are overwritten to use the vectors
	 * defined above.  This is required in order to apply mitigation
	 * against CVE-2018-3639 on exception entry from lower ELs.
	 */
	adr	x0, cortex_a76_wa_cve_2018_3639_a76_vbar
	msr	vbar_el3, x0
	isb
#endif
340

341
342
343
1:
#endif

344
345
346
347
#if ERRATA_DSU_936184
	bl	errata_dsu_936184_wa
#endif
	ret	x19
348
349
endfunc cortex_a76_reset_func

350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
	/* ---------------------------------------------
	 * HW will do the cache maintenance while powering down
	 * ---------------------------------------------
	 */
func cortex_a76_core_pwr_dwn
	/* ---------------------------------------------
	 * Enable CPU power down bit in power control register
	 * ---------------------------------------------
	 */
	mrs	x0, CORTEX_A76_CPUPWRCTLR_EL1
	orr	x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
	msr	CORTEX_A76_CPUPWRCTLR_EL1, x0
	isb
	ret
endfunc cortex_a76_core_pwr_dwn

366
367
368
369
370
371
372
373
374
375
376
377
378
379
#if REPORT_ERRATA
/*
 * Errata printing function for Cortex Cortex A76. Must follow AAPCS.
 */
func cortex_a76_errata_report
	stp	x8, x30, [sp, #-16]!

	bl	cpu_get_rev_var
	mov	x8, x0

	/*
	 * Report all errata. The revision-variant information is passed to
	 * checking functions of each errata.
	 */
380
	report_errata ERRATA_A76_1073348, cortex_a76, 1073348
381
	report_errata ERRATA_A76_1130799, cortex_a76, 1130799
382
	report_errata ERRATA_A76_1220197, cortex_a76, 1220197
383
	report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
384
	report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
385
386
387
388
389
390

	ldp	x8, x30, [sp], #16
	ret
endfunc cortex_a76_errata_report
#endif

391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
	/* ---------------------------------------------
	 * This function provides cortex_a76 specific
	 * register information for crash reporting.
	 * It needs to return with x6 pointing to
	 * a list of register names in ascii and
	 * x8 - x15 having values of registers to be
	 * reported.
	 * ---------------------------------------------
	 */
.section .rodata.cortex_a76_regs, "aS"
cortex_a76_regs:  /* The ascii list of register names to be reported */
	.asciz	"cpuectlr_el1", ""

func cortex_a76_cpu_reg_dump
	adr	x6, cortex_a76_regs
	mrs	x8, CORTEX_A76_CPUECTLR_EL1
	ret
endfunc cortex_a76_cpu_reg_dump

410
411
412
413
declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
	cortex_a76_reset_func, \
	CPU_NO_EXTRA1_FUNC, \
	cortex_a76_disable_wa_cve_2018_3639, \
414
	cortex_a76_core_pwr_dwn