enable_mmu.S 2.48 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
/*
 * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
 *
 * SPDX-License-Identifier: BSD-3-Clause
 */

#include <asm_macros.S>
#include <assert_macros.S>
#include <xlat_tables_v2.h>

11
12
	.global	enable_mmu_direct_svc_mon
	.global	enable_mmu_direct_hyp
13

14
15
	/* void enable_mmu_direct_svc_mon(unsigned int flags) */
func enable_mmu_direct_svc_mon
16
17
18
19
20
21
22
23
24
25
26
27
28
	/* Assert that MMU is turned off */
#if ENABLE_ASSERTIONS
	ldcopr  r1, SCTLR
	tst	r1, #SCTLR_M_BIT
	ASM_ASSERT(eq)
#endif

	/* Invalidate TLB entries */
	TLB_INVALIDATE(r0, TLBIALL)

	mov	r3, r0
	ldr	r0, =mmu_cfg_params

29
30
	/* MAIR0. Only the lower 32 bits are used. */
	ldr	r1, [r0, #(MMU_CFG_MAIR << 3)]
31
32
	stcopr	r1, MAIR0

33
34
	/* TTBCR. Only the lower 32 bits are used. */
	ldr	r2, [r0, #(MMU_CFG_TCR << 3)]
35
36
37
	stcopr	r2, TTBCR

	/* TTBR0 */
38
39
	ldr	r1, [r0, #(MMU_CFG_TTBR0 << 3)]
	ldr	r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
	stcopr16	r1, r2, TTBR0_64

	/* TTBR1 is unused right now; set it to 0. */
	mov	r1, #0
	mov	r2, #0
	stcopr16	r1, r2, TTBR1_64

	/*
	 * Ensure all translation table writes have drained into memory, the TLB
	 * invalidation is complete, and translation register writes are
	 * committed before enabling the MMU
	 */
	dsb	ish
	isb

	/* Enable enable MMU by honoring flags */
	ldcopr  r1, SCTLR
	ldr	r2, =(SCTLR_WXN_BIT | SCTLR_C_BIT | SCTLR_M_BIT)
	orr	r1, r1, r2

	/* Clear C bit if requested */
	tst	r3, #DISABLE_DCACHE
	bicne	r1, r1, #SCTLR_C_BIT

	stcopr	r1, SCTLR
	isb

	bx	lr
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
endfunc enable_mmu_direct_svc_mon


	/* void enable_mmu_direct_hyp(unsigned int flags) */
func enable_mmu_direct_hyp
	/* Assert that MMU is turned off */
#if ENABLE_ASSERTIONS
	ldcopr  r1, HSCTLR
	tst	r1, #HSCTLR_M_BIT
	ASM_ASSERT(eq)
#endif

	/* Invalidate TLB entries */
	TLB_INVALIDATE(r0, TLBIALL)

	mov	r3, r0
	ldr	r0, =mmu_cfg_params

	/* HMAIR0 */
	ldr	r1, [r0, #(MMU_CFG_MAIR << 3)]
	stcopr	r1, HMAIR0

	/* HTCR */
	ldr	r2, [r0, #(MMU_CFG_TCR << 3)]
	stcopr	r2, HTCR

	/* HTTBR */
	ldr	r1, [r0, #(MMU_CFG_TTBR0 << 3)]
	ldr	r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
	stcopr16	r1, r2, HTTBR_64

	/*
	 * Ensure all translation table writes have drained into memory, the TLB
	 * invalidation is complete, and translation register writes are
	 * committed before enabling the MMU
	 */
	dsb	ish
	isb

	/* Enable enable MMU by honoring flags */
	ldcopr  r1, HSCTLR
	ldr	r2, =(HSCTLR_WXN_BIT | HSCTLR_C_BIT | HSCTLR_M_BIT)
	orr	r1, r1, r2

	/* Clear C bit if requested */
	tst	r3, #DISABLE_DCACHE
	bicne	r1, r1, #HSCTLR_C_BIT

	stcopr	r1, HSCTLR
	isb

	bx	lr
endfunc enable_mmu_direct_hyp