aem_generic.S 2.92 KB
Newer Older
1
/*
2
 * Copyright (c) 2014-2019, ARM Limited and Contributors. All rights reserved.
3
 *
dp-arm's avatar
dp-arm committed
4
 * SPDX-License-Identifier: BSD-3-Clause
5
 */
6
#include <aem_generic.h>
7
#include <arch.h>
8
#include <asm_macros.S>
9
10
#include <cpu_macros.S>

11
12
13
14
15
16
17
18
19
func aem_generic_core_pwr_dwn
	/* ---------------------------------------------
	 * Disable the Data Cache.
	 * ---------------------------------------------
	 */
	mrs	x1, sctlr_el3
	bic	x1, x1, #SCTLR_C_BIT
	msr	sctlr_el3, x1
	isb
20

21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
	/* ---------------------------------------------
	 * AEM model supports L3 caches in which case L2
	 * will be private per core caches and flush
	 * from L1 to L2 is not sufficient.
	 * ---------------------------------------------
	 */
	mrs	x1, clidr_el1

	/* ---------------------------------------------
	 * Check if L3 cache is implemented.
	 * ---------------------------------------------
	 */
	tst	x1, ((1 << CLIDR_FIELD_WIDTH) - 1) << CTYPE_SHIFT(3)

	/* ---------------------------------------------
	 * There is no L3 cache, flush L1 to L2 only.
	 * ---------------------------------------------
	 */
39
	mov	x0, #DCCISW
40
41
42
	b.eq	dcsw_op_level1

	mov	x18, x30
43

44
	/* ---------------------------------------------
45
	 * Flush L1 cache to L2.
46
47
	 * ---------------------------------------------
	 */
48
49
	bl	dcsw_op_level1
	mov	x30, x18
50

51
52
53
54
55
56
57
	/* ---------------------------------------------
	 * Flush L2 cache to L3.
	 * ---------------------------------------------
	 */
	mov	x0, #DCCISW
	b	dcsw_op_level2
endfunc aem_generic_core_pwr_dwn
58
59
60
61
62
63
64
65
66
67
68
69

func aem_generic_cluster_pwr_dwn
	/* ---------------------------------------------
	 * Disable the Data Cache.
	 * ---------------------------------------------
	 */
	mrs	x1, sctlr_el3
	bic	x1, x1, #SCTLR_C_BIT
	msr	sctlr_el3, x1
	isb

	/* ---------------------------------------------
70
	 * Flush all caches to PoC.
71
72
73
74
	 * ---------------------------------------------
	 */
	mov	x0, #DCCISW
	b	dcsw_op_all
75
endfunc aem_generic_cluster_pwr_dwn
76

77
78
79
80
81
82
83
84
85
#if REPORT_ERRATA
/*
 * Errata printing function for AEM. Must follow AAPCS.
 */
func aem_generic_errata_report
	ret
endfunc aem_generic_errata_report
#endif

86
87
88
89
90
91
92
93
94
	/* ---------------------------------------------
	 * This function provides cpu specific
	 * register information for crash reporting.
	 * It needs to return with x6 pointing to
	 * a list of register names in ascii and
	 * x8 - x15 having values of registers to be
	 * reported.
	 * ---------------------------------------------
	 */
95
96
97
98
.section .rodata.aem_generic_regs, "aS"
aem_generic_regs:  /* The ascii list of register names to be reported */
	.asciz	"" /* no registers to report */

99
func aem_generic_cpu_reg_dump
100
	adr	x6, aem_generic_regs
101
	ret
102
endfunc aem_generic_cpu_reg_dump
103

104
105

/* cpu_ops for Base AEM FVP */
106
107
108
declare_cpu_ops aem_generic, BASE_AEM_MIDR, CPU_NO_RESET_FUNC, \
	aem_generic_core_pwr_dwn, \
	aem_generic_cluster_pwr_dwn
109

110
/* cpu_ops for Foundation FVP */
111
112
113
declare_cpu_ops aem_generic, FOUNDATION_AEM_MIDR, CPU_NO_RESET_FUNC, \
	aem_generic_core_pwr_dwn, \
	aem_generic_cluster_pwr_dwn