Unverified Commit 9a93d8cc authored by Dimitris Papastamos's avatar Dimitris Papastamos Committed by GitHub
Browse files

Merge pull request #1460 from robertovargas-arm/clang

Make TF compatible with Clang assembler and linker
Showing with 268 additions and 215 deletions
+268 -215
...@@ -85,7 +85,13 @@ $(eval $(call add_define,DEBUG)) ...@@ -85,7 +85,13 @@ $(eval $(call add_define,DEBUG))
ifneq (${DEBUG}, 0) ifneq (${DEBUG}, 0)
BUILD_TYPE := debug BUILD_TYPE := debug
TF_CFLAGS += -g TF_CFLAGS += -g
ASFLAGS += -g -Wa,--gdwarf-2
ifneq ($(findstring clang,$(notdir $(CC))),)
ASFLAGS += -g
else
ASFLAGS += -g -Wa,--gdwarf-2
endif
# Use LOG_LEVEL_INFO by default for debug builds # Use LOG_LEVEL_INFO by default for debug builds
LOG_LEVEL := 40 LOG_LEVEL := 40
else else
...@@ -119,7 +125,7 @@ CC := ${CROSS_COMPILE}gcc ...@@ -119,7 +125,7 @@ CC := ${CROSS_COMPILE}gcc
CPP := ${CROSS_COMPILE}cpp CPP := ${CROSS_COMPILE}cpp
AS := ${CROSS_COMPILE}gcc AS := ${CROSS_COMPILE}gcc
AR := ${CROSS_COMPILE}ar AR := ${CROSS_COMPILE}ar
LD := ${CROSS_COMPILE}ld LINKER := ${CROSS_COMPILE}ld
OC := ${CROSS_COMPILE}objcopy OC := ${CROSS_COMPILE}objcopy
OD := ${CROSS_COMPILE}objdump OD := ${CROSS_COMPILE}objdump
NM := ${CROSS_COMPILE}nm NM := ${CROSS_COMPILE}nm
...@@ -128,8 +134,8 @@ DTC := dtc ...@@ -128,8 +134,8 @@ DTC := dtc
# Use ${LD}.bfd instead if it exists (as absolute path or together with $PATH). # Use ${LD}.bfd instead if it exists (as absolute path or together with $PATH).
ifneq ($(strip $(wildcard ${LD}.bfd) \ ifneq ($(strip $(wildcard ${LD}.bfd) \
$(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LD}.bfd))),) $(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LINKER}.bfd))),)
LD := ${LD}.bfd LINKER := ${LINKER}.bfd
endif endif
ifeq (${ARM_ARCH_MAJOR},7) ifeq (${ARM_ARCH_MAJOR},7)
...@@ -143,12 +149,21 @@ endif ...@@ -143,12 +149,21 @@ endif
ifeq ($(notdir $(CC)),armclang) ifeq ($(notdir $(CC)),armclang)
TF_CFLAGS_aarch32 = -target arm-arm-none-eabi $(march32-directive) TF_CFLAGS_aarch32 = -target arm-arm-none-eabi $(march32-directive)
TF_CFLAGS_aarch64 = -target aarch64-arm-none-eabi -march=armv8-a TF_CFLAGS_aarch64 = -target aarch64-arm-none-eabi -march=armv8-a
LD = $(LINKER)
AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
CPP = $(CC) -E $(TF_CFLAGS_$(ARCH))
PP = $(CC) -E $(TF_CFLAGS_$(ARCH))
else ifneq ($(findstring clang,$(notdir $(CC))),) else ifneq ($(findstring clang,$(notdir $(CC))),)
TF_CFLAGS_aarch32 = $(target32-directive) TF_CFLAGS_aarch32 = $(target32-directive)
TF_CFLAGS_aarch64 = -target aarch64-elf TF_CFLAGS_aarch64 = -target aarch64-elf
LD = $(LINKER)
AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
CPP = $(CC) -E
PP = $(CC) -E
else else
TF_CFLAGS_aarch32 = $(march32-directive) TF_CFLAGS_aarch32 = $(march32-directive)
TF_CFLAGS_aarch64 = -march=armv8-a TF_CFLAGS_aarch64 = -march=armv8-a
LD = $(LINKER)
endif endif
TF_CFLAGS_aarch32 += -mno-unaligned-access TF_CFLAGS_aarch32 += -mno-unaligned-access
......
...@@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0 ...@@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0
mov x0, #SYNC_EXCEPTION_SP_EL0 mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionSP0 end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0 vector_entry IrqSP0
mov x0, #IRQ_SP_EL0 mov x0, #IRQ_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqSP0 end_vector_entry IrqSP0
vector_entry FiqSP0 vector_entry FiqSP0
mov x0, #FIQ_SP_EL0 mov x0, #FIQ_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqSP0 end_vector_entry FiqSP0
vector_entry SErrorSP0 vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0 mov x0, #SERROR_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorSP0 end_vector_entry SErrorSP0
/* ----------------------------------------------------- /* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400 * Current EL with SPx: 0x200 - 0x400
...@@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx ...@@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx
mov x0, #SYNC_EXCEPTION_SP_ELX mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionSPx end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx vector_entry IrqSPx
mov x0, #IRQ_SP_ELX mov x0, #IRQ_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqSPx end_vector_entry IrqSPx
vector_entry FiqSPx vector_entry FiqSPx
mov x0, #FIQ_SP_ELX mov x0, #FIQ_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqSPx end_vector_entry FiqSPx
vector_entry SErrorSPx vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX mov x0, #SERROR_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorSPx end_vector_entry SErrorSPx
/* ----------------------------------------------------- /* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
...@@ -91,25 +91,25 @@ vector_entry SynchronousExceptionA64 ...@@ -91,25 +91,25 @@ vector_entry SynchronousExceptionA64
b.ne unexpected_sync_exception b.ne unexpected_sync_exception
b smc_handler64 b smc_handler64
check_vector_size SynchronousExceptionA64 end_vector_entry SynchronousExceptionA64
vector_entry IrqA64 vector_entry IrqA64
mov x0, #IRQ_AARCH64 mov x0, #IRQ_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqA64 end_vector_entry IrqA64
vector_entry FiqA64 vector_entry FiqA64
mov x0, #FIQ_AARCH64 mov x0, #FIQ_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqA64 end_vector_entry FiqA64
vector_entry SErrorA64 vector_entry SErrorA64
mov x0, #SERROR_AARCH64 mov x0, #SERROR_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorA64 end_vector_entry SErrorA64
/* ----------------------------------------------------- /* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
...@@ -119,25 +119,25 @@ vector_entry SynchronousExceptionA32 ...@@ -119,25 +119,25 @@ vector_entry SynchronousExceptionA32
mov x0, #SYNC_EXCEPTION_AARCH32 mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionA32 end_vector_entry SynchronousExceptionA32
vector_entry IrqA32 vector_entry IrqA32
mov x0, #IRQ_AARCH32 mov x0, #IRQ_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqA32 end_vector_entry IrqA32
vector_entry FiqA32 vector_entry FiqA32
mov x0, #FIQ_AARCH32 mov x0, #FIQ_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqA32 end_vector_entry FiqA32
vector_entry SErrorA32 vector_entry SErrorA32
mov x0, #SERROR_AARCH32 mov x0, #SERROR_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorA32 end_vector_entry SErrorA32
func smc_handler64 func smc_handler64
......
...@@ -28,10 +28,19 @@ SECTIONS ...@@ -28,10 +28,19 @@ SECTIONS
*bl1_entrypoint.o(.text*) *bl1_entrypoint.o(.text*)
*(.text*) *(.text*)
*(.vectors) *(.vectors)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .; __TEXT_END__ = .;
} >ROM } >ROM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >ROM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >ROM
.rodata . : { .rodata . : {
__RODATA_START__ = .; __RODATA_START__ = .;
*(.rodata*) *(.rodata*)
...@@ -152,7 +161,7 @@ SECTIONS ...@@ -152,7 +161,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in. * as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .; __COHERENT_RAM_END__ = .;
} >RAM } >RAM
#endif #endif
......
...@@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0 ...@@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0
mov x0, #SYNC_EXCEPTION_SP_EL0 mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionSP0 end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0 vector_entry IrqSP0
mov x0, #IRQ_SP_EL0 mov x0, #IRQ_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqSP0 end_vector_entry IrqSP0
vector_entry FiqSP0 vector_entry FiqSP0
mov x0, #FIQ_SP_EL0 mov x0, #FIQ_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqSP0 end_vector_entry FiqSP0
vector_entry SErrorSP0 vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0 mov x0, #SERROR_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorSP0 end_vector_entry SErrorSP0
/* ----------------------------------------------------- /* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400 * Current EL with SPx: 0x200 - 0x400
...@@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx ...@@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx
mov x0, #SYNC_EXCEPTION_SP_ELX mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionSPx end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx vector_entry IrqSPx
mov x0, #IRQ_SP_ELX mov x0, #IRQ_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqSPx end_vector_entry IrqSPx
vector_entry FiqSPx vector_entry FiqSPx
mov x0, #FIQ_SP_ELX mov x0, #FIQ_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqSPx end_vector_entry FiqSPx
vector_entry SErrorSPx vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX mov x0, #SERROR_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorSPx end_vector_entry SErrorSPx
/* ----------------------------------------------------- /* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
...@@ -82,25 +82,25 @@ vector_entry SynchronousExceptionA64 ...@@ -82,25 +82,25 @@ vector_entry SynchronousExceptionA64
mov x0, #SYNC_EXCEPTION_AARCH64 mov x0, #SYNC_EXCEPTION_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionA64 end_vector_entry SynchronousExceptionA64
vector_entry IrqA64 vector_entry IrqA64
mov x0, #IRQ_AARCH64 mov x0, #IRQ_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqA64 end_vector_entry IrqA64
vector_entry FiqA64 vector_entry FiqA64
mov x0, #FIQ_AARCH64 mov x0, #FIQ_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqA64 end_vector_entry FiqA64
vector_entry SErrorA64 vector_entry SErrorA64
mov x0, #SERROR_AARCH64 mov x0, #SERROR_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorA64 end_vector_entry SErrorA64
/* ----------------------------------------------------- /* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
...@@ -110,22 +110,22 @@ vector_entry SynchronousExceptionA32 ...@@ -110,22 +110,22 @@ vector_entry SynchronousExceptionA32
mov x0, #SYNC_EXCEPTION_AARCH32 mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionA32 end_vector_entry SynchronousExceptionA32
vector_entry IrqA32 vector_entry IrqA32
mov x0, #IRQ_AARCH32 mov x0, #IRQ_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqA32 end_vector_entry IrqA32
vector_entry FiqA32 vector_entry FiqA32
mov x0, #FIQ_AARCH32 mov x0, #FIQ_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqA32 end_vector_entry FiqA32
vector_entry SErrorA32 vector_entry SErrorA32
mov x0, #SERROR_AARCH32 mov x0, #SERROR_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorA32 end_vector_entry SErrorA32
...@@ -28,10 +28,19 @@ SECTIONS ...@@ -28,10 +28,19 @@ SECTIONS
*bl2_entrypoint.o(.text*) *bl2_entrypoint.o(.text*)
*(.text*) *(.text*)
*(.vectors) *(.vectors)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .; __TEXT_END__ = .;
} >RAM } >RAM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >RAM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >RAM
.rodata . : { .rodata . : {
__RODATA_START__ = .; __RODATA_START__ = .;
*(.rodata*) *(.rodata*)
...@@ -42,7 +51,7 @@ SECTIONS ...@@ -42,7 +51,7 @@ SECTIONS
KEEP(*(.img_parser_lib_descs)) KEEP(*(.img_parser_lib_descs))
__PARSER_LIB_DESCS_END__ = .; __PARSER_LIB_DESCS_END__ = .;
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .; __RODATA_END__ = .;
} >RAM } >RAM
#else #else
...@@ -65,7 +74,7 @@ SECTIONS ...@@ -65,7 +74,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must * read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused. * creep in. Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RO_END__ = .; __RO_END__ = .;
} >RAM } >RAM
#endif #endif
...@@ -131,7 +140,7 @@ SECTIONS ...@@ -131,7 +140,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in. * as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .; __COHERENT_RAM_END__ = .;
} >RAM } >RAM
#endif #endif
......
...@@ -42,7 +42,7 @@ SECTIONS ...@@ -42,7 +42,7 @@ SECTIONS
__TEXT_RESIDENT_END__ = .; __TEXT_RESIDENT_END__ = .;
*(.text*) *(.text*)
*(.vectors) *(.vectors)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .; __TEXT_END__ = .;
#if BL2_IN_XIP_MEM #if BL2_IN_XIP_MEM
} >ROM } >ROM
...@@ -69,7 +69,7 @@ SECTIONS ...@@ -69,7 +69,7 @@ SECTIONS
KEEP(*(cpu_ops)) KEEP(*(cpu_ops))
__CPU_OPS_END__ = .; __CPU_OPS_END__ = .;
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .; __RODATA_END__ = .;
#if BL2_IN_XIP_MEM #if BL2_IN_XIP_MEM
} >ROM } >ROM
...@@ -111,7 +111,7 @@ SECTIONS ...@@ -111,7 +111,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must * read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused. * creep in. Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RO_END__ = .; __RO_END__ = .;
#if BL2_IN_XIP_MEM #if BL2_IN_XIP_MEM
...@@ -195,7 +195,7 @@ SECTIONS ...@@ -195,7 +195,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in. * as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .; __COHERENT_RAM_END__ = .;
} >RAM } >RAM
#endif #endif
......
...@@ -28,14 +28,23 @@ SECTIONS ...@@ -28,14 +28,23 @@ SECTIONS
*bl2u_entrypoint.o(.text*) *bl2u_entrypoint.o(.text*)
*(.text*) *(.text*)
*(.vectors) *(.vectors)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .; __TEXT_END__ = .;
} >RAM } >RAM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >RAM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >RAM
.rodata . : { .rodata . : {
__RODATA_START__ = .; __RODATA_START__ = .;
*(.rodata*) *(.rodata*)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .; __RODATA_END__ = .;
} >RAM } >RAM
#else #else
...@@ -52,7 +61,7 @@ SECTIONS ...@@ -52,7 +61,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must * read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused. * creep in. Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RO_END__ = .; __RO_END__ = .;
} >RAM } >RAM
#endif #endif
...@@ -118,7 +127,7 @@ SECTIONS ...@@ -118,7 +127,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in. * as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .; __COHERENT_RAM_END__ = .;
} >RAM } >RAM
#endif #endif
......
...@@ -233,7 +233,7 @@ vector_base runtime_exceptions ...@@ -233,7 +233,7 @@ vector_base runtime_exceptions
vector_entry sync_exception_sp_el0 vector_entry sync_exception_sp_el0
/* We don't expect any synchronous exceptions from EL3 */ /* We don't expect any synchronous exceptions from EL3 */
b report_unhandled_exception b report_unhandled_exception
check_vector_size sync_exception_sp_el0 end_vector_entry sync_exception_sp_el0
vector_entry irq_sp_el0 vector_entry irq_sp_el0
/* /*
...@@ -241,17 +241,17 @@ vector_entry irq_sp_el0 ...@@ -241,17 +241,17 @@ vector_entry irq_sp_el0
* error. Loop infinitely. * error. Loop infinitely.
*/ */
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size irq_sp_el0 end_vector_entry irq_sp_el0
vector_entry fiq_sp_el0 vector_entry fiq_sp_el0
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size fiq_sp_el0 end_vector_entry fiq_sp_el0
vector_entry serror_sp_el0 vector_entry serror_sp_el0
b report_unhandled_exception b report_unhandled_exception
check_vector_size serror_sp_el0 end_vector_entry serror_sp_el0
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400 * Current EL with SP_ELx: 0x200 - 0x400
...@@ -265,19 +265,19 @@ vector_entry sync_exception_sp_elx ...@@ -265,19 +265,19 @@ vector_entry sync_exception_sp_elx
* corrupted. * corrupted.
*/ */
b report_unhandled_exception b report_unhandled_exception
check_vector_size sync_exception_sp_elx end_vector_entry sync_exception_sp_elx
vector_entry irq_sp_elx vector_entry irq_sp_elx
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size irq_sp_elx end_vector_entry irq_sp_elx
vector_entry fiq_sp_elx vector_entry fiq_sp_elx
b report_unhandled_interrupt b report_unhandled_interrupt
check_vector_size fiq_sp_elx end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx vector_entry serror_sp_elx
b report_unhandled_exception b report_unhandled_exception
check_vector_size serror_sp_elx end_vector_entry serror_sp_elx
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
...@@ -292,17 +292,17 @@ vector_entry sync_exception_aarch64 ...@@ -292,17 +292,17 @@ vector_entry sync_exception_aarch64
*/ */
check_and_unmask_ea check_and_unmask_ea
handle_sync_exception handle_sync_exception
check_vector_size sync_exception_aarch64 end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64 vector_entry irq_aarch64
check_and_unmask_ea check_and_unmask_ea
handle_interrupt_exception irq_aarch64 handle_interrupt_exception irq_aarch64
check_vector_size irq_aarch64 end_vector_entry irq_aarch64
vector_entry fiq_aarch64 vector_entry fiq_aarch64
check_and_unmask_ea check_and_unmask_ea
handle_interrupt_exception fiq_aarch64 handle_interrupt_exception fiq_aarch64
check_vector_size fiq_aarch64 end_vector_entry fiq_aarch64
vector_entry serror_aarch64 vector_entry serror_aarch64
msr daifclr, #DAIF_ABT_BIT msr daifclr, #DAIF_ABT_BIT
...@@ -313,7 +313,7 @@ vector_entry serror_aarch64 ...@@ -313,7 +313,7 @@ vector_entry serror_aarch64
*/ */
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_ea #ERROR_EA_ASYNC handle_ea #ERROR_EA_ASYNC
check_vector_size serror_aarch64 end_vector_entry serror_aarch64
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
...@@ -328,17 +328,17 @@ vector_entry sync_exception_aarch32 ...@@ -328,17 +328,17 @@ vector_entry sync_exception_aarch32
*/ */
check_and_unmask_ea check_and_unmask_ea
handle_sync_exception handle_sync_exception
check_vector_size sync_exception_aarch32 end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32 vector_entry irq_aarch32
check_and_unmask_ea check_and_unmask_ea
handle_interrupt_exception irq_aarch32 handle_interrupt_exception irq_aarch32
check_vector_size irq_aarch32 end_vector_entry irq_aarch32
vector_entry fiq_aarch32 vector_entry fiq_aarch32
check_and_unmask_ea check_and_unmask_ea
handle_interrupt_exception fiq_aarch32 handle_interrupt_exception fiq_aarch32
check_vector_size fiq_aarch32 end_vector_entry fiq_aarch32
vector_entry serror_aarch32 vector_entry serror_aarch32
msr daifclr, #DAIF_ABT_BIT msr daifclr, #DAIF_ABT_BIT
...@@ -349,7 +349,7 @@ vector_entry serror_aarch32 ...@@ -349,7 +349,7 @@ vector_entry serror_aarch32
*/ */
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_ea #ERROR_EA_ASYNC handle_ea #ERROR_EA_ASYNC
check_vector_size serror_aarch32 end_vector_entry serror_aarch32
/* --------------------------------------------------------------------- /* ---------------------------------------------------------------------
......
...@@ -32,7 +32,7 @@ SECTIONS ...@@ -32,7 +32,7 @@ SECTIONS
*bl31_entrypoint.o(.text*) *bl31_entrypoint.o(.text*)
*(.text*) *(.text*)
*(.vectors) *(.vectors)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .; __TEXT_END__ = .;
} >RAM } >RAM
...@@ -67,7 +67,7 @@ SECTIONS ...@@ -67,7 +67,7 @@ SECTIONS
. = ALIGN(8); . = ALIGN(8);
#include <pubsub_events.h> #include <pubsub_events.h>
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .; __RODATA_END__ = .;
} >RAM } >RAM
#else #else
...@@ -111,7 +111,7 @@ SECTIONS ...@@ -111,7 +111,7 @@ SECTIONS
* executable. No RW data from the next section must creep in. * executable. No RW data from the next section must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RO_END__ = .; __RO_END__ = .;
} >RAM } >RAM
#endif #endif
...@@ -131,7 +131,7 @@ SECTIONS ...@@ -131,7 +131,7 @@ SECTIONS
spm_shim_exceptions : ALIGN(PAGE_SIZE) { spm_shim_exceptions : ALIGN(PAGE_SIZE) {
__SPM_SHIM_EXCEPTIONS_START__ = .; __SPM_SHIM_EXCEPTIONS_START__ = .;
*(.spm_shim_exceptions) *(.spm_shim_exceptions)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__SPM_SHIM_EXCEPTIONS_END__ = .; __SPM_SHIM_EXCEPTIONS_END__ = .;
} >RAM } >RAM
#endif #endif
...@@ -246,7 +246,7 @@ SECTIONS ...@@ -246,7 +246,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in. * as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .; __COHERENT_RAM_END__ = .;
} >RAM } >RAM
#endif #endif
......
...@@ -28,10 +28,19 @@ SECTIONS ...@@ -28,10 +28,19 @@ SECTIONS
*entrypoint.o(.text*) *entrypoint.o(.text*)
*(.text*) *(.text*)
*(.vectors) *(.vectors)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .; __TEXT_END__ = .;
} >RAM } >RAM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >RAM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >RAM
.rodata . : { .rodata . : {
__RODATA_START__ = .; __RODATA_START__ = .;
*(.rodata*) *(.rodata*)
...@@ -55,7 +64,7 @@ SECTIONS ...@@ -55,7 +64,7 @@ SECTIONS
. = ALIGN(8); . = ALIGN(8);
#include <pubsub_events.h> #include <pubsub_events.h>
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .; __RODATA_END__ = .;
} >RAM } >RAM
#else #else
...@@ -92,7 +101,7 @@ SECTIONS ...@@ -92,7 +101,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must * read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory block is unused. * creep in. Ensure the rest of the current memory block is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RO_END__ = .; __RO_END__ = .;
} >RAM } >RAM
#endif #endif
...@@ -207,7 +216,7 @@ SECTIONS ...@@ -207,7 +216,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in. * as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .; __COHERENT_RAM_END__ = .;
} >RAM } >RAM
......
...@@ -82,19 +82,19 @@ vector_base tsp_exceptions ...@@ -82,19 +82,19 @@ vector_base tsp_exceptions
*/ */
vector_entry sync_exception_sp_el0 vector_entry sync_exception_sp_el0
b plat_panic_handler b plat_panic_handler
check_vector_size sync_exception_sp_el0 end_vector_entry sync_exception_sp_el0
vector_entry irq_sp_el0 vector_entry irq_sp_el0
b plat_panic_handler b plat_panic_handler
check_vector_size irq_sp_el0 end_vector_entry irq_sp_el0
vector_entry fiq_sp_el0 vector_entry fiq_sp_el0
b plat_panic_handler b plat_panic_handler
check_vector_size fiq_sp_el0 end_vector_entry fiq_sp_el0
vector_entry serror_sp_el0 vector_entry serror_sp_el0
b plat_panic_handler b plat_panic_handler
check_vector_size serror_sp_el0 end_vector_entry serror_sp_el0
/* ----------------------------------------------------- /* -----------------------------------------------------
...@@ -104,19 +104,19 @@ vector_entry serror_sp_el0 ...@@ -104,19 +104,19 @@ vector_entry serror_sp_el0
*/ */
vector_entry sync_exception_sp_elx vector_entry sync_exception_sp_elx
b plat_panic_handler b plat_panic_handler
check_vector_size sync_exception_sp_elx end_vector_entry sync_exception_sp_elx
vector_entry irq_sp_elx vector_entry irq_sp_elx
handle_tsp_interrupt irq_sp_elx handle_tsp_interrupt irq_sp_elx
check_vector_size irq_sp_elx end_vector_entry irq_sp_elx
vector_entry fiq_sp_elx vector_entry fiq_sp_elx
handle_tsp_interrupt fiq_sp_elx handle_tsp_interrupt fiq_sp_elx
check_vector_size fiq_sp_elx end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx vector_entry serror_sp_elx
b plat_panic_handler b plat_panic_handler
check_vector_size serror_sp_elx end_vector_entry serror_sp_elx
/* ----------------------------------------------------- /* -----------------------------------------------------
...@@ -126,19 +126,19 @@ vector_entry serror_sp_elx ...@@ -126,19 +126,19 @@ vector_entry serror_sp_elx
*/ */
vector_entry sync_exception_aarch64 vector_entry sync_exception_aarch64
b plat_panic_handler b plat_panic_handler
check_vector_size sync_exception_aarch64 end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64 vector_entry irq_aarch64
b plat_panic_handler b plat_panic_handler
check_vector_size irq_aarch64 end_vector_entry irq_aarch64
vector_entry fiq_aarch64 vector_entry fiq_aarch64
b plat_panic_handler b plat_panic_handler
check_vector_size fiq_aarch64 end_vector_entry fiq_aarch64
vector_entry serror_aarch64 vector_entry serror_aarch64
b plat_panic_handler b plat_panic_handler
check_vector_size serror_aarch64 end_vector_entry serror_aarch64
/* ----------------------------------------------------- /* -----------------------------------------------------
...@@ -148,16 +148,16 @@ vector_entry serror_aarch64 ...@@ -148,16 +148,16 @@ vector_entry serror_aarch64
*/ */
vector_entry sync_exception_aarch32 vector_entry sync_exception_aarch32
b plat_panic_handler b plat_panic_handler
check_vector_size sync_exception_aarch32 end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32 vector_entry irq_aarch32
b plat_panic_handler b plat_panic_handler
check_vector_size irq_aarch32 end_vector_entry irq_aarch32
vector_entry fiq_aarch32 vector_entry fiq_aarch32
b plat_panic_handler b plat_panic_handler
check_vector_size fiq_aarch32 end_vector_entry fiq_aarch32
vector_entry serror_aarch32 vector_entry serror_aarch32
b plat_panic_handler b plat_panic_handler
check_vector_size serror_aarch32 end_vector_entry serror_aarch32
...@@ -29,14 +29,14 @@ SECTIONS ...@@ -29,14 +29,14 @@ SECTIONS
*tsp_entrypoint.o(.text*) *tsp_entrypoint.o(.text*)
*(.text*) *(.text*)
*(.vectors) *(.vectors)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__TEXT_END__ = .; __TEXT_END__ = .;
} >RAM } >RAM
.rodata . : { .rodata . : {
__RODATA_START__ = .; __RODATA_START__ = .;
*(.rodata*) *(.rodata*)
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RODATA_END__ = .; __RODATA_END__ = .;
} >RAM } >RAM
#else #else
...@@ -52,7 +52,7 @@ SECTIONS ...@@ -52,7 +52,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must * read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused. * creep in. Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__RO_END__ = .; __RO_END__ = .;
} >RAM } >RAM
#endif #endif
...@@ -117,7 +117,7 @@ SECTIONS ...@@ -117,7 +117,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in. * as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused. * Ensure the rest of the current memory page is unused.
*/ */
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .; __COHERENT_RAM_END__ = .;
} >RAM } >RAM
#endif #endif
......
...@@ -24,25 +24,25 @@ vector_entry SynchronousExceptionSP0 ...@@ -24,25 +24,25 @@ vector_entry SynchronousExceptionSP0
mov x0, #SYNC_EXCEPTION_SP_EL0 mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionSP0 end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0 vector_entry IrqSP0
mov x0, #IRQ_SP_EL0 mov x0, #IRQ_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqSP0 end_vector_entry IrqSP0
vector_entry FiqSP0 vector_entry FiqSP0
mov x0, #FIQ_SP_EL0 mov x0, #FIQ_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqSP0 end_vector_entry FiqSP0
vector_entry SErrorSP0 vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0 mov x0, #SERROR_SP_EL0
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorSP0 end_vector_entry SErrorSP0
/* ----------------------------------------------------- /* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400 * Current EL with SPx: 0x200 - 0x400
...@@ -52,25 +52,25 @@ vector_entry SynchronousExceptionSPx ...@@ -52,25 +52,25 @@ vector_entry SynchronousExceptionSPx
mov x0, #SYNC_EXCEPTION_SP_ELX mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionSPx end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx vector_entry IrqSPx
mov x0, #IRQ_SP_ELX mov x0, #IRQ_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqSPx end_vector_entry IrqSPx
vector_entry FiqSPx vector_entry FiqSPx
mov x0, #FIQ_SP_ELX mov x0, #FIQ_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqSPx end_vector_entry FiqSPx
vector_entry SErrorSPx vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX mov x0, #SERROR_SP_ELX
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorSPx end_vector_entry SErrorSPx
/* ----------------------------------------------------- /* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600 * Lower EL using AArch64 : 0x400 - 0x600
...@@ -80,25 +80,25 @@ vector_entry SynchronousExceptionA64 ...@@ -80,25 +80,25 @@ vector_entry SynchronousExceptionA64
mov x0, #SYNC_EXCEPTION_AARCH64 mov x0, #SYNC_EXCEPTION_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionA64 end_vector_entry SynchronousExceptionA64
vector_entry IrqA64 vector_entry IrqA64
mov x0, #IRQ_AARCH64 mov x0, #IRQ_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqA64 end_vector_entry IrqA64
vector_entry FiqA64 vector_entry FiqA64
mov x0, #FIQ_AARCH64 mov x0, #FIQ_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqA64 end_vector_entry FiqA64
vector_entry SErrorA64 vector_entry SErrorA64
mov x0, #SERROR_AARCH64 mov x0, #SERROR_AARCH64
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorA64 end_vector_entry SErrorA64
/* ----------------------------------------------------- /* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800 * Lower EL using AArch32 : 0x600 - 0x800
...@@ -108,22 +108,22 @@ vector_entry SynchronousExceptionA32 ...@@ -108,22 +108,22 @@ vector_entry SynchronousExceptionA32
mov x0, #SYNC_EXCEPTION_AARCH32 mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SynchronousExceptionA32 end_vector_entry SynchronousExceptionA32
vector_entry IrqA32 vector_entry IrqA32
mov x0, #IRQ_AARCH32 mov x0, #IRQ_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size IrqA32 end_vector_entry IrqA32
vector_entry FiqA32 vector_entry FiqA32
mov x0, #FIQ_AARCH32 mov x0, #FIQ_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size FiqA32 end_vector_entry FiqA32
vector_entry SErrorA32 vector_entry SErrorA32
mov x0, #SERROR_AARCH32 mov x0, #SERROR_AARCH32
bl plat_report_exception bl plat_report_exception
no_ret plat_panic_handler no_ret plat_panic_handler
check_vector_size SErrorA32 end_vector_entry SErrorA32
...@@ -62,8 +62,8 @@ given Linaro Release. Also, these `Linaro instructions`_ provide further ...@@ -62,8 +62,8 @@ given Linaro Release. Also, these `Linaro instructions`_ provide further
guidance and a script, which can be used to download Linaro deliverables guidance and a script, which can be used to download Linaro deliverables
automatically. automatically.
Optionally, TF-A can be built using clang or Arm Compiler 6. Optionally, TF-A can be built using clang version 4.0 or newer or Arm
See instructions below on how to switch the default compiler. Compiler 6. See instructions below on how to switch the default compiler.
In addition, the following optional packages and tools may be needed: In addition, the following optional packages and tools may be needed:
...@@ -103,10 +103,14 @@ Building TF-A ...@@ -103,10 +103,14 @@ Building TF-A
export CROSS_COMPILE=<path-to-aarch32-gcc>/bin/arm-linux-gnueabihf- export CROSS_COMPILE=<path-to-aarch32-gcc>/bin/arm-linux-gnueabihf-
It is possible to build TF-A using clang or Arm Compiler 6. To do so It is possible to build TF-A using Clang or Arm Compiler 6. To do so
``CC`` needs to point to the clang or armclang binary. Only the compiler ``CC`` needs to point to the clang or armclang binary, which will
is switched; the assembler and linker need to be provided by the GNU also select the clang or armclang assembler. Be aware that the
toolchain, thus ``CROSS_COMPILE`` should be set as described above. GNU linker is used by default. In case of being needed the linker
can be overriden using the ``LD`` variable. Clang linker version 6 is
known to work with TF-A.
In both cases ``CROSS_COMPILE`` should be set as described above.
Arm Compiler 6 will be selected when the base name of the path assigned Arm Compiler 6 will be selected when the base name of the path assigned
to ``CC`` matches the string 'armclang'. to ``CC`` matches the string 'armclang'.
......
...@@ -83,11 +83,20 @@ ...@@ -83,11 +83,20 @@
.section \section_name, "ax" .section \section_name, "ax"
.align 7, 0 .align 7, 0
.type \label, %function .type \label, %function
.func \label
.cfi_startproc .cfi_startproc
\label: \label:
.endm .endm
/*
* Add the bytes until fill the full exception vector, whose size is always
* 32 instructions. If there are more than 32 instructions in the
* exception vector then an error is emitted.
*/
.macro end_vector_entry label
.cfi_endproc
.fill \label + (32 * 4) - .
.endm
/* /*
* This macro verifies that the given vector doesn't exceed the * This macro verifies that the given vector doesn't exceed the
* architectural limit of 32 instructions. This is meant to be placed * architectural limit of 32 instructions. This is meant to be placed
...@@ -95,11 +104,10 @@ ...@@ -95,11 +104,10 @@
* vector entry as the parameter * vector entry as the parameter
*/ */
.macro check_vector_size since .macro check_vector_size since
.endfunc #if ERROR_DEPRECATED
.cfi_endproc .error "check_vector_size must not be used. Use end_vector_entry instead"
.if (. - \since) > (32 * 4) #endif
.error "Vector exceeds 32 instructions" end_vector_entry \since
.endif
.endm .endm
#if ENABLE_PLAT_COMPAT #if ENABLE_PLAT_COMPAT
......
/* /*
* Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -31,7 +31,6 @@ ...@@ -31,7 +31,6 @@
.cfi_sections .debug_frame .cfi_sections .debug_frame
.section .text.asm.\_name, "ax" .section .text.asm.\_name, "ax"
.type \_name, %function .type \_name, %function
.func \_name
/* /*
* .cfi_startproc and .cfi_endproc are needed to output entries in * .cfi_startproc and .cfi_endproc are needed to output entries in
* .debug_frame * .debug_frame
...@@ -45,7 +44,6 @@ ...@@ -45,7 +44,6 @@
* This macro is used to mark the end of a function. * This macro is used to mark the end of a function.
*/ */
.macro endfunc _name .macro endfunc _name
.endfunc
.cfi_endproc .cfi_endproc
.size \_name, . - \_name .size \_name, . - \_name
.endm .endm
......
...@@ -35,38 +35,47 @@ ...@@ -35,38 +35,47 @@
# define REPORT_ERRATA 0 # define REPORT_ERRATA 0
#endif #endif
/*
* Define the offsets to the fields in cpu_ops structure. .equ CPU_MIDR_SIZE, CPU_WORD_SIZE
*/ .equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
.struct 0 .equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
CPU_MIDR: /* cpu_ops midr */ .equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
.space 4 .equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
/* Reset fn is needed during reset */ .equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
#if defined(IMAGE_AT_EL3)
CPU_RESET_FUNC: /* cpu_ops reset_func */ #ifndef IMAGE_AT_EL3
.space 4 .equ CPU_RESET_FUNC_SIZE, 0
#endif #endif
#ifdef IMAGE_BL32 /* The power down core and cluster is needed only in BL32 */
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */ /* The power down core and cluster is needed only in BL32 */
.space (4 * CPU_MAX_PWR_DWN_OPS) #ifndef IMAGE_BL32
.equ CPU_PWR_DWN_OPS_SIZE, 0
#endif #endif
/* /* Fields required to print errata status */
* Fields required to print errata status. Only in BL32 that the printing #if !REPORT_ERRATA
* require mutual exclusion and printed flag. .equ CPU_ERRATA_FUNC_SIZE, 0
*/
#if REPORT_ERRATA
CPU_ERRATA_FUNC: /* CPU errata status printing function */
.space 4
#if defined(IMAGE_BL32)
CPU_ERRATA_LOCK:
.space 4
CPU_ERRATA_PRINTED:
.space 4
#endif #endif
/* Only BL32 requires mutual exclusion and printed flag. */
#if !(REPORT_ERRATA && defined(IMAGE_BL32))
.equ CPU_ERRATA_LOCK_SIZE, 0
.equ CPU_ERRATA_PRINTED_SIZE, 0
#endif #endif
CPU_OPS_SIZE = .
/*
* Define the offsets to the fields in cpu_ops structure.
* Every offset is defined based on the offset and size of the previous
* field.
*/
.equ CPU_MIDR, 0
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
.equ CPU_PWR_DWN_OPS, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
.equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
.equ CPU_OPS_SIZE, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
/* /*
* Write given expressions as words * Write given expressions as words
...@@ -128,21 +137,8 @@ CPU_OPS_SIZE = . ...@@ -128,21 +137,8 @@ CPU_OPS_SIZE = .
.word \_resetfunc .word \_resetfunc
#endif #endif
#ifdef IMAGE_BL32 #ifdef IMAGE_BL32
1:
/* Insert list of functions */ /* Insert list of functions */
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
2:
/*
* Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
* list
*/
.ifeq 2b - 1b
.error "At least one power down function must be specified"
.else
.iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
.error "More than CPU_MAX_PWR_DWN_OPS functions specified"
.endif
.endif
#endif #endif
#if REPORT_ERRATA #if REPORT_ERRATA
......
...@@ -38,46 +38,56 @@ ...@@ -38,46 +38,56 @@
# define REPORT_ERRATA 0 # define REPORT_ERRATA 0
#endif #endif
/*
* Define the offsets to the fields in cpu_ops structure. .equ CPU_MIDR_SIZE, CPU_WORD_SIZE
*/ .equ CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE
.struct 0 .equ CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE
CPU_MIDR: /* cpu_ops midr */ .equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
.space 8 .equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
/* Reset fn is needed in BL at reset vector */ .equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
#if defined(IMAGE_AT_EL3) .equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
CPU_RESET_FUNC: /* cpu_ops reset_func */ .equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
.space 8 .equ CPU_REG_DUMP_SIZE, CPU_WORD_SIZE
#ifndef IMAGE_AT_EL3
.equ CPU_RESET_FUNC_SIZE, 0
#endif #endif
CPU_EXTRA1_FUNC:
.space 8 /* The power down core and cluster is needed only in BL31 */
CPU_EXTRA2_FUNC: #ifndef IMAGE_BL31
.space 8 .equ CPU_PWR_DWN_OPS_SIZE, 0
#ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
.space (8 * CPU_MAX_PWR_DWN_OPS)
#endif #endif
/* /* Fields required to print errata status. */
* Fields required to print errata status. Only in BL31 that the printing #if !REPORT_ERRATA
* require mutual exclusion and printed flag. .equ CPU_ERRATA_FUNC_SIZE, 0
*/
#if REPORT_ERRATA
CPU_ERRATA_FUNC:
.space 8
#if defined(IMAGE_BL31)
CPU_ERRATA_LOCK:
.space 8
CPU_ERRATA_PRINTED:
.space 8
#endif #endif
/* Only BL31 requieres mutual exclusion and printed flag. */
#if !(REPORT_ERRATA && defined(IMAGE_BL31))
.equ CPU_ERRATA_LOCK_SIZE, 0
.equ CPU_ERRATA_PRINTED_SIZE, 0
#endif #endif
#if defined(IMAGE_BL31) && CRASH_REPORTING #if !defined(IMAGE_BL31) || !CRASH_REPORTING
CPU_REG_DUMP: /* cpu specific register dump for crash reporting */ .equ CPU_REG_DUMP_SIZE, 0
.space 8
#endif #endif
CPU_OPS_SIZE = .
/*
* Define the offsets to the fields in cpu_ops structure.
* Every offset is defined based in the offset and size of the previous
* field.
*/
.equ CPU_MIDR, 0
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
.equ CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
.equ CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
.equ CPU_PWR_DWN_OPS, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
.equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
.equ CPU_REG_DUMP, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
.equ CPU_OPS_SIZE, CPU_REG_DUMP + CPU_REG_DUMP_SIZE
/* /*
* Write given expressions as quad words * Write given expressions as quad words
...@@ -149,21 +159,8 @@ CPU_OPS_SIZE = . ...@@ -149,21 +159,8 @@ CPU_OPS_SIZE = .
.quad \_extra1 .quad \_extra1
.quad \_extra2 .quad \_extra2
#ifdef IMAGE_BL31 #ifdef IMAGE_BL31
1:
/* Insert list of functions */ /* Insert list of functions */
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
2:
/*
* Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
* list
*/
.ifeq 2b - 1b
.error "At least one power down function must be specified"
.else
.iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
.error "More than CPU_MAX_PWR_DWN_OPS functions specified"
.endif
.endif
#endif #endif
#if REPORT_ERRATA #if REPORT_ERRATA
......
...@@ -22,8 +22,7 @@ icc_regs: ...@@ -22,8 +22,7 @@ icc_regs:
/* Registers common to both GICv2 and GICv3 */ /* Registers common to both GICv2 and GICv3 */
gicd_pend_reg: gicd_pend_reg:
.asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n" \ .asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n Offset:\t\t\tvalue\n"
" Offset:\t\t\tvalue\n"
newline: newline:
.asciz "\n" .asciz "\n"
spacer: spacer:
......
/* /*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved. * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
* *
* SPDX-License-Identifier: BSD-3-Clause * SPDX-License-Identifier: BSD-3-Clause
*/ */
...@@ -22,7 +22,7 @@ SECTIONS ...@@ -22,7 +22,7 @@ SECTIONS
*(arm_el3_tzc_dram) *(arm_el3_tzc_dram)
__EL3_SEC_DRAM_UNALIGNED_END__ = .; __EL3_SEC_DRAM_UNALIGNED_END__ = .;
. = NEXT(PAGE_SIZE); . = ALIGN(PAGE_SIZE);
__EL3_SEC_DRAM_END__ = .; __EL3_SEC_DRAM_END__ = .;
} >EL3_SEC_DRAM } >EL3_SEC_DRAM
} }
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment