tsp.ld.S 4.32 KB
Newer Older
1
/*
Masahiro Yamada's avatar
Masahiro Yamada committed
2
 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
3
 *
dp-arm's avatar
dp-arm committed
4
 * SPDX-License-Identifier: BSD-3-Clause
5
6
 */

7
#include <common/bl_common.ld.h>
8
#include <lib/xlat_tables/xlat_tables_defs.h>
9
#include <platform_def.h>
10
11
12

OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
13
14
ENTRY(tsp_entrypoint)

15
16

MEMORY {
17
    RAM (rwx): ORIGIN = TSP_SEC_MEM_BASE, LENGTH = TSP_SEC_MEM_SIZE
18
19
20
21
22
23
}


SECTIONS
{
    . = BL32_BASE;
24
    ASSERT(. == ALIGN(PAGE_SIZE),
25
26
           "BL32_BASE address is not aligned on a page boundary.")

27
28
29
30
31
32
#if SEPARATE_CODE_AND_RODATA
    .text . : {
        __TEXT_START__ = .;
        *tsp_entrypoint.o(.text*)
        *(.text*)
        *(.vectors)
33
        . = ALIGN(PAGE_SIZE);
34
35
36
37
38
39
        __TEXT_END__ = .;
    } >RAM

    .rodata . : {
        __RODATA_START__ = .;
        *(.rodata*)
Masahiro Yamada's avatar
Masahiro Yamada committed
40
41
42
43
44
45
46
47
48
49
50

        /*
         * Keep the .got section in the RO section as it is patched
         * prior to enabling the MMU and having the .got in RO is better for
         * security. GOT is a table of addresses so ensure 8-byte alignment.
         */
        . = ALIGN(8);
        __GOT_START__ = .;
        *(.got)
        __GOT_END__ = .;

51
        . = ALIGN(PAGE_SIZE);
52
53
54
        __RODATA_END__ = .;
    } >RAM
#else
55
56
    ro . : {
        __RO_START__ = .;
Andrew Thoelke's avatar
Andrew Thoelke committed
57
58
        *tsp_entrypoint.o(.text*)
        *(.text*)
59
        *(.rodata*)
Masahiro Yamada's avatar
Masahiro Yamada committed
60
61
62
63
64
65
66
67
68
69
70

        /*
         * Keep the .got section in the RO section as it is patched
         * prior to enabling the MMU and having the .got in RO is better for
         * security. GOT is a table of addresses so ensure 8-byte alignment.
         */
        . = ALIGN(8);
        __GOT_START__ = .;
        *(.got)
        __GOT_END__ = .;

71
        *(.vectors)
Masahiro Yamada's avatar
Masahiro Yamada committed
72

73
74
75
76
77
78
        __RO_END_UNALIGNED__ = .;
        /*
         * Memory page(s) mapped to this section will be marked as
         * read-only, executable.  No RW data from the next section must
         * creep in.  Ensure the rest of the current memory page is unused.
         */
79
        . = ALIGN(PAGE_SIZE);
80
81
        __RO_END__ = .;
    } >RAM
82
#endif
83

84
85
86
87
88
89
    /*
     * Define a linker symbol to mark start of the RW memory area for this
     * image.
     */
    __RW_START__ = . ;

90
91
    .data . : {
        __DATA_START__ = .;
Andrew Thoelke's avatar
Andrew Thoelke committed
92
        *(.data*)
93
94
95
        __DATA_END__ = .;
    } >RAM

Masahiro Yamada's avatar
Masahiro Yamada committed
96
97
98
99
100
101
102
103
104
105
106
    /*
     * .rela.dyn needs to come after .data for the read-elf utility to parse
     * this section correctly. Ensure 8-byte alignment so that the fields of
     * RELA data structure are aligned.
     */
    . = ALIGN(8);
    __RELA_START__ = .;
    .rela.dyn . : {
    } >RAM
    __RELA_END__ = .;

107
108
#ifdef TSP_PROGBITS_LIMIT
    ASSERT(. <= TSP_PROGBITS_LIMIT, "TSP progbits has exceeded its limit.")
109
110
#endif

111
112
113
114
115
116
117
118
    stacks (NOLOAD) : {
        __STACKS_START__ = .;
        *(tzfw_normal_stacks)
        __STACKS_END__ = .;
    } >RAM

    /*
     * The .bss section gets initialised to 0 at runtime.
119
120
     * Its base address should be 16-byte aligned for better performance of the
     * zero-initialization code.
121
122
123
     */
    .bss : ALIGN(16) {
        __BSS_START__ = .;
Andrew Thoelke's avatar
Andrew Thoelke committed
124
        *(SORT_BY_ALIGNMENT(.bss*))
125
126
127
128
        *(COMMON)
        __BSS_END__ = .;
    } >RAM

129
    XLAT_TABLE_SECTION >RAM
130

131
#if USE_COHERENT_MEM
132
133
134
135
136
137
    /*
     * The base address of the coherent memory section must be page-aligned (4K)
     * to guarantee that the coherent data are stored on their own pages and
     * are not mixed with normal data.  This is required to set up the correct
     * memory attributes for the coherent data page tables.
     */
138
    coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
139
140
141
142
143
144
145
146
        __COHERENT_RAM_START__ = .;
        *(tzfw_coherent_mem)
        __COHERENT_RAM_END_UNALIGNED__ = .;
        /*
         * Memory page(s) mapped to this section will be marked
         * as device memory.  No other unexpected data must creep in.
         * Ensure the rest of the current memory page is unused.
         */
147
        . = ALIGN(PAGE_SIZE);
148
149
        __COHERENT_RAM_END__ = .;
    } >RAM
150
#endif
151

152
153
154
155
156
    /*
     * Define a linker symbol to mark the end of the RW memory area for this
     * image.
     */
    __RW_END__ = .;
157
    __BL32_END__ = .;
158

Masahiro Yamada's avatar
Masahiro Yamada committed
159
160
161
162
    /DISCARD/ : {
        *(.dynsym .dynstr .hash .gnu.hash)
    }

163
    __BSS_SIZE__ = SIZEOF(.bss);
164
#if USE_COHERENT_MEM
165
166
    __COHERENT_RAM_UNALIGNED_SIZE__ =
        __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
167
#endif
168

169
    ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
170
}