Unverified Commit 9a93d8cc authored by Dimitris Papastamos's avatar Dimitris Papastamos Committed by GitHub
Browse files

Merge pull request #1460 from robertovargas-arm/clang

Make TF compatible with Clang assembler and linker
Showing with 268 additions and 215 deletions
+268 -215
......@@ -85,7 +85,13 @@ $(eval $(call add_define,DEBUG))
ifneq (${DEBUG}, 0)
BUILD_TYPE := debug
TF_CFLAGS += -g
ASFLAGS += -g -Wa,--gdwarf-2
ifneq ($(findstring clang,$(notdir $(CC))),)
ASFLAGS += -g
else
ASFLAGS += -g -Wa,--gdwarf-2
endif
# Use LOG_LEVEL_INFO by default for debug builds
LOG_LEVEL := 40
else
......@@ -119,7 +125,7 @@ CC := ${CROSS_COMPILE}gcc
CPP := ${CROSS_COMPILE}cpp
AS := ${CROSS_COMPILE}gcc
AR := ${CROSS_COMPILE}ar
LD := ${CROSS_COMPILE}ld
LINKER := ${CROSS_COMPILE}ld
OC := ${CROSS_COMPILE}objcopy
OD := ${CROSS_COMPILE}objdump
NM := ${CROSS_COMPILE}nm
......@@ -128,8 +134,8 @@ DTC := dtc
# Use ${LD}.bfd instead if it exists (as absolute path or together with $PATH).
ifneq ($(strip $(wildcard ${LD}.bfd) \
$(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LD}.bfd))),)
LD := ${LD}.bfd
$(foreach dir,$(subst :, ,${PATH}),$(wildcard ${dir}/${LINKER}.bfd))),)
LINKER := ${LINKER}.bfd
endif
ifeq (${ARM_ARCH_MAJOR},7)
......@@ -143,12 +149,21 @@ endif
ifeq ($(notdir $(CC)),armclang)
TF_CFLAGS_aarch32 = -target arm-arm-none-eabi $(march32-directive)
TF_CFLAGS_aarch64 = -target aarch64-arm-none-eabi -march=armv8-a
LD = $(LINKER)
AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
CPP = $(CC) -E $(TF_CFLAGS_$(ARCH))
PP = $(CC) -E $(TF_CFLAGS_$(ARCH))
else ifneq ($(findstring clang,$(notdir $(CC))),)
TF_CFLAGS_aarch32 = $(target32-directive)
TF_CFLAGS_aarch64 = -target aarch64-elf
LD = $(LINKER)
AS = $(CC) -c -x assembler-with-cpp $(TF_CFLAGS_$(ARCH))
CPP = $(CC) -E
PP = $(CC) -E
else
TF_CFLAGS_aarch32 = $(march32-directive)
TF_CFLAGS_aarch64 = -march=armv8-a
LD = $(LINKER)
endif
TF_CFLAGS_aarch32 += -mno-unaligned-access
......
......@@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0
mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionSP0
end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0
mov x0, #IRQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqSP0
end_vector_entry IrqSP0
vector_entry FiqSP0
mov x0, #FIQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqSP0
end_vector_entry FiqSP0
vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorSP0
end_vector_entry SErrorSP0
/* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400
......@@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx
mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionSPx
end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx
mov x0, #IRQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqSPx
end_vector_entry IrqSPx
vector_entry FiqSPx
mov x0, #FIQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqSPx
end_vector_entry FiqSPx
vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorSPx
end_vector_entry SErrorSPx
/* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
......@@ -91,25 +91,25 @@ vector_entry SynchronousExceptionA64
b.ne unexpected_sync_exception
b smc_handler64
check_vector_size SynchronousExceptionA64
end_vector_entry SynchronousExceptionA64
vector_entry IrqA64
mov x0, #IRQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqA64
end_vector_entry IrqA64
vector_entry FiqA64
mov x0, #FIQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqA64
end_vector_entry FiqA64
vector_entry SErrorA64
mov x0, #SERROR_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorA64
end_vector_entry SErrorA64
/* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
......@@ -119,25 +119,25 @@ vector_entry SynchronousExceptionA32
mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionA32
end_vector_entry SynchronousExceptionA32
vector_entry IrqA32
mov x0, #IRQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqA32
end_vector_entry IrqA32
vector_entry FiqA32
mov x0, #FIQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqA32
end_vector_entry FiqA32
vector_entry SErrorA32
mov x0, #SERROR_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorA32
end_vector_entry SErrorA32
func smc_handler64
......
......@@ -28,10 +28,19 @@ SECTIONS
*bl1_entrypoint.o(.text*)
*(.text*)
*(.vectors)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >ROM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >ROM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >ROM
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
......@@ -152,7 +161,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
......
......@@ -26,25 +26,25 @@ vector_entry SynchronousExceptionSP0
mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionSP0
end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0
mov x0, #IRQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqSP0
end_vector_entry IrqSP0
vector_entry FiqSP0
mov x0, #FIQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqSP0
end_vector_entry FiqSP0
vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorSP0
end_vector_entry SErrorSP0
/* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400
......@@ -54,25 +54,25 @@ vector_entry SynchronousExceptionSPx
mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionSPx
end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx
mov x0, #IRQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqSPx
end_vector_entry IrqSPx
vector_entry FiqSPx
mov x0, #FIQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqSPx
end_vector_entry FiqSPx
vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorSPx
end_vector_entry SErrorSPx
/* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
......@@ -82,25 +82,25 @@ vector_entry SynchronousExceptionA64
mov x0, #SYNC_EXCEPTION_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionA64
end_vector_entry SynchronousExceptionA64
vector_entry IrqA64
mov x0, #IRQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqA64
end_vector_entry IrqA64
vector_entry FiqA64
mov x0, #FIQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqA64
end_vector_entry FiqA64
vector_entry SErrorA64
mov x0, #SERROR_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorA64
end_vector_entry SErrorA64
/* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
......@@ -110,22 +110,22 @@ vector_entry SynchronousExceptionA32
mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionA32
end_vector_entry SynchronousExceptionA32
vector_entry IrqA32
mov x0, #IRQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqA32
end_vector_entry IrqA32
vector_entry FiqA32
mov x0, #FIQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqA32
end_vector_entry FiqA32
vector_entry SErrorA32
mov x0, #SERROR_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorA32
end_vector_entry SErrorA32
......@@ -28,10 +28,19 @@ SECTIONS
*bl2_entrypoint.o(.text*)
*(.text*)
*(.vectors)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >RAM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >RAM
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
......@@ -42,7 +51,7 @@ SECTIONS
KEEP(*(.img_parser_lib_descs))
__PARSER_LIB_DESCS_END__ = .;
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
......@@ -65,7 +74,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
......@@ -131,7 +140,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
......
......@@ -42,7 +42,7 @@ SECTIONS
__TEXT_RESIDENT_END__ = .;
*(.text*)
*(.vectors)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
#if BL2_IN_XIP_MEM
} >ROM
......@@ -69,7 +69,7 @@ SECTIONS
KEEP(*(cpu_ops))
__CPU_OPS_END__ = .;
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
#if BL2_IN_XIP_MEM
} >ROM
......@@ -111,7 +111,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RO_END__ = .;
#if BL2_IN_XIP_MEM
......@@ -195,7 +195,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
......
......@@ -28,14 +28,23 @@ SECTIONS
*bl2u_entrypoint.o(.text*)
*(.text*)
*(.vectors)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >RAM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >RAM
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
......@@ -52,7 +61,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
......@@ -118,7 +127,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
......
......@@ -233,7 +233,7 @@ vector_base runtime_exceptions
vector_entry sync_exception_sp_el0
/* We don't expect any synchronous exceptions from EL3 */
b report_unhandled_exception
check_vector_size sync_exception_sp_el0
end_vector_entry sync_exception_sp_el0
vector_entry irq_sp_el0
/*
......@@ -241,17 +241,17 @@ vector_entry irq_sp_el0
* error. Loop infinitely.
*/
b report_unhandled_interrupt
check_vector_size irq_sp_el0
end_vector_entry irq_sp_el0
vector_entry fiq_sp_el0
b report_unhandled_interrupt
check_vector_size fiq_sp_el0
end_vector_entry fiq_sp_el0
vector_entry serror_sp_el0
b report_unhandled_exception
check_vector_size serror_sp_el0
end_vector_entry serror_sp_el0
/* ---------------------------------------------------------------------
* Current EL with SP_ELx: 0x200 - 0x400
......@@ -265,19 +265,19 @@ vector_entry sync_exception_sp_elx
* corrupted.
*/
b report_unhandled_exception
check_vector_size sync_exception_sp_elx
end_vector_entry sync_exception_sp_elx
vector_entry irq_sp_elx
b report_unhandled_interrupt
check_vector_size irq_sp_elx
end_vector_entry irq_sp_elx
vector_entry fiq_sp_elx
b report_unhandled_interrupt
check_vector_size fiq_sp_elx
end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx
b report_unhandled_exception
check_vector_size serror_sp_elx
end_vector_entry serror_sp_elx
/* ---------------------------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
......@@ -292,17 +292,17 @@ vector_entry sync_exception_aarch64
*/
check_and_unmask_ea
handle_sync_exception
check_vector_size sync_exception_aarch64
end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64
check_and_unmask_ea
handle_interrupt_exception irq_aarch64
check_vector_size irq_aarch64
end_vector_entry irq_aarch64
vector_entry fiq_aarch64
check_and_unmask_ea
handle_interrupt_exception fiq_aarch64
check_vector_size fiq_aarch64
end_vector_entry fiq_aarch64
vector_entry serror_aarch64
msr daifclr, #DAIF_ABT_BIT
......@@ -313,7 +313,7 @@ vector_entry serror_aarch64
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_ea #ERROR_EA_ASYNC
check_vector_size serror_aarch64
end_vector_entry serror_aarch64
/* ---------------------------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
......@@ -328,17 +328,17 @@ vector_entry sync_exception_aarch32
*/
check_and_unmask_ea
handle_sync_exception
check_vector_size sync_exception_aarch32
end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32
check_and_unmask_ea
handle_interrupt_exception irq_aarch32
check_vector_size irq_aarch32
end_vector_entry irq_aarch32
vector_entry fiq_aarch32
check_and_unmask_ea
handle_interrupt_exception fiq_aarch32
check_vector_size fiq_aarch32
end_vector_entry fiq_aarch32
vector_entry serror_aarch32
msr daifclr, #DAIF_ABT_BIT
......@@ -349,7 +349,7 @@ vector_entry serror_aarch32
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_ea #ERROR_EA_ASYNC
check_vector_size serror_aarch32
end_vector_entry serror_aarch32
/* ---------------------------------------------------------------------
......
......@@ -32,7 +32,7 @@ SECTIONS
*bl31_entrypoint.o(.text*)
*(.text*)
*(.vectors)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
......@@ -67,7 +67,7 @@ SECTIONS
. = ALIGN(8);
#include <pubsub_events.h>
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
......@@ -111,7 +111,7 @@ SECTIONS
* executable. No RW data from the next section must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
......@@ -131,7 +131,7 @@ SECTIONS
spm_shim_exceptions : ALIGN(PAGE_SIZE) {
__SPM_SHIM_EXCEPTIONS_START__ = .;
*(.spm_shim_exceptions)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__SPM_SHIM_EXCEPTIONS_END__ = .;
} >RAM
#endif
......@@ -246,7 +246,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
......
......@@ -28,10 +28,19 @@ SECTIONS
*entrypoint.o(.text*)
*(.text*)
*(.vectors)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
/* .ARM.extab and .ARM.exidx are only added because Clang need them */
.ARM.extab . : {
*(.ARM.extab* .gnu.linkonce.armextab.*)
} >RAM
.ARM.exidx . : {
*(.ARM.exidx* .gnu.linkonce.armexidx.*)
} >RAM
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
......@@ -55,7 +64,7 @@ SECTIONS
. = ALIGN(8);
#include <pubsub_events.h>
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
......@@ -92,7 +101,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory block is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
......@@ -207,7 +216,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
......
......@@ -82,19 +82,19 @@ vector_base tsp_exceptions
*/
vector_entry sync_exception_sp_el0
b plat_panic_handler
check_vector_size sync_exception_sp_el0
end_vector_entry sync_exception_sp_el0
vector_entry irq_sp_el0
b plat_panic_handler
check_vector_size irq_sp_el0
end_vector_entry irq_sp_el0
vector_entry fiq_sp_el0
b plat_panic_handler
check_vector_size fiq_sp_el0
end_vector_entry fiq_sp_el0
vector_entry serror_sp_el0
b plat_panic_handler
check_vector_size serror_sp_el0
end_vector_entry serror_sp_el0
/* -----------------------------------------------------
......@@ -104,19 +104,19 @@ vector_entry serror_sp_el0
*/
vector_entry sync_exception_sp_elx
b plat_panic_handler
check_vector_size sync_exception_sp_elx
end_vector_entry sync_exception_sp_elx
vector_entry irq_sp_elx
handle_tsp_interrupt irq_sp_elx
check_vector_size irq_sp_elx
end_vector_entry irq_sp_elx
vector_entry fiq_sp_elx
handle_tsp_interrupt fiq_sp_elx
check_vector_size fiq_sp_elx
end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx
b plat_panic_handler
check_vector_size serror_sp_elx
end_vector_entry serror_sp_elx
/* -----------------------------------------------------
......@@ -126,19 +126,19 @@ vector_entry serror_sp_elx
*/
vector_entry sync_exception_aarch64
b plat_panic_handler
check_vector_size sync_exception_aarch64
end_vector_entry sync_exception_aarch64
vector_entry irq_aarch64
b plat_panic_handler
check_vector_size irq_aarch64
end_vector_entry irq_aarch64
vector_entry fiq_aarch64
b plat_panic_handler
check_vector_size fiq_aarch64
end_vector_entry fiq_aarch64
vector_entry serror_aarch64
b plat_panic_handler
check_vector_size serror_aarch64
end_vector_entry serror_aarch64
/* -----------------------------------------------------
......@@ -148,16 +148,16 @@ vector_entry serror_aarch64
*/
vector_entry sync_exception_aarch32
b plat_panic_handler
check_vector_size sync_exception_aarch32
end_vector_entry sync_exception_aarch32
vector_entry irq_aarch32
b plat_panic_handler
check_vector_size irq_aarch32
end_vector_entry irq_aarch32
vector_entry fiq_aarch32
b plat_panic_handler
check_vector_size fiq_aarch32
end_vector_entry fiq_aarch32
vector_entry serror_aarch32
b plat_panic_handler
check_vector_size serror_aarch32
end_vector_entry serror_aarch32
......@@ -29,14 +29,14 @@ SECTIONS
*tsp_entrypoint.o(.text*)
*(.text*)
*(.vectors)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__TEXT_END__ = .;
} >RAM
.rodata . : {
__RODATA_START__ = .;
*(.rodata*)
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RODATA_END__ = .;
} >RAM
#else
......@@ -52,7 +52,7 @@ SECTIONS
* read-only, executable. No RW data from the next section must
* creep in. Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__RO_END__ = .;
} >RAM
#endif
......@@ -117,7 +117,7 @@ SECTIONS
* as device memory. No other unexpected data must creep in.
* Ensure the rest of the current memory page is unused.
*/
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__COHERENT_RAM_END__ = .;
} >RAM
#endif
......
......@@ -24,25 +24,25 @@ vector_entry SynchronousExceptionSP0
mov x0, #SYNC_EXCEPTION_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionSP0
end_vector_entry SynchronousExceptionSP0
vector_entry IrqSP0
mov x0, #IRQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqSP0
end_vector_entry IrqSP0
vector_entry FiqSP0
mov x0, #FIQ_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqSP0
end_vector_entry FiqSP0
vector_entry SErrorSP0
mov x0, #SERROR_SP_EL0
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorSP0
end_vector_entry SErrorSP0
/* -----------------------------------------------------
* Current EL with SPx: 0x200 - 0x400
......@@ -52,25 +52,25 @@ vector_entry SynchronousExceptionSPx
mov x0, #SYNC_EXCEPTION_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionSPx
end_vector_entry SynchronousExceptionSPx
vector_entry IrqSPx
mov x0, #IRQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqSPx
end_vector_entry IrqSPx
vector_entry FiqSPx
mov x0, #FIQ_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqSPx
end_vector_entry FiqSPx
vector_entry SErrorSPx
mov x0, #SERROR_SP_ELX
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorSPx
end_vector_entry SErrorSPx
/* -----------------------------------------------------
* Lower EL using AArch64 : 0x400 - 0x600
......@@ -80,25 +80,25 @@ vector_entry SynchronousExceptionA64
mov x0, #SYNC_EXCEPTION_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionA64
end_vector_entry SynchronousExceptionA64
vector_entry IrqA64
mov x0, #IRQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqA64
end_vector_entry IrqA64
vector_entry FiqA64
mov x0, #FIQ_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqA64
end_vector_entry FiqA64
vector_entry SErrorA64
mov x0, #SERROR_AARCH64
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorA64
end_vector_entry SErrorA64
/* -----------------------------------------------------
* Lower EL using AArch32 : 0x600 - 0x800
......@@ -108,22 +108,22 @@ vector_entry SynchronousExceptionA32
mov x0, #SYNC_EXCEPTION_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SynchronousExceptionA32
end_vector_entry SynchronousExceptionA32
vector_entry IrqA32
mov x0, #IRQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size IrqA32
end_vector_entry IrqA32
vector_entry FiqA32
mov x0, #FIQ_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size FiqA32
end_vector_entry FiqA32
vector_entry SErrorA32
mov x0, #SERROR_AARCH32
bl plat_report_exception
no_ret plat_panic_handler
check_vector_size SErrorA32
end_vector_entry SErrorA32
......@@ -62,8 +62,8 @@ given Linaro Release. Also, these `Linaro instructions`_ provide further
guidance and a script, which can be used to download Linaro deliverables
automatically.
Optionally, TF-A can be built using clang or Arm Compiler 6.
See instructions below on how to switch the default compiler.
Optionally, TF-A can be built using clang version 4.0 or newer or Arm
Compiler 6. See instructions below on how to switch the default compiler.
In addition, the following optional packages and tools may be needed:
......@@ -103,10 +103,14 @@ Building TF-A
export CROSS_COMPILE=<path-to-aarch32-gcc>/bin/arm-linux-gnueabihf-
It is possible to build TF-A using clang or Arm Compiler 6. To do so
``CC`` needs to point to the clang or armclang binary. Only the compiler
is switched; the assembler and linker need to be provided by the GNU
toolchain, thus ``CROSS_COMPILE`` should be set as described above.
It is possible to build TF-A using Clang or Arm Compiler 6. To do so
``CC`` needs to point to the clang or armclang binary, which will
also select the clang or armclang assembler. Be aware that the
GNU linker is used by default. In case of being needed the linker
can be overriden using the ``LD`` variable. Clang linker version 6 is
known to work with TF-A.
In both cases ``CROSS_COMPILE`` should be set as described above.
Arm Compiler 6 will be selected when the base name of the path assigned
to ``CC`` matches the string 'armclang'.
......
......@@ -83,11 +83,20 @@
.section \section_name, "ax"
.align 7, 0
.type \label, %function
.func \label
.cfi_startproc
\label:
.endm
/*
* Add the bytes until fill the full exception vector, whose size is always
* 32 instructions. If there are more than 32 instructions in the
* exception vector then an error is emitted.
*/
.macro end_vector_entry label
.cfi_endproc
.fill \label + (32 * 4) - .
.endm
/*
* This macro verifies that the given vector doesn't exceed the
* architectural limit of 32 instructions. This is meant to be placed
......@@ -95,11 +104,10 @@
* vector entry as the parameter
*/
.macro check_vector_size since
.endfunc
.cfi_endproc
.if (. - \since) > (32 * 4)
.error "Vector exceeds 32 instructions"
.endif
#if ERROR_DEPRECATED
.error "check_vector_size must not be used. Use end_vector_entry instead"
#endif
end_vector_entry \since
.endm
#if ENABLE_PLAT_COMPAT
......
/*
* Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -31,7 +31,6 @@
.cfi_sections .debug_frame
.section .text.asm.\_name, "ax"
.type \_name, %function
.func \_name
/*
* .cfi_startproc and .cfi_endproc are needed to output entries in
* .debug_frame
......@@ -45,7 +44,6 @@
* This macro is used to mark the end of a function.
*/
.macro endfunc _name
.endfunc
.cfi_endproc
.size \_name, . - \_name
.endm
......
......@@ -35,38 +35,47 @@
# define REPORT_ERRATA 0
#endif
/*
* Define the offsets to the fields in cpu_ops structure.
*/
.struct 0
CPU_MIDR: /* cpu_ops midr */
.space 4
/* Reset fn is needed during reset */
#if defined(IMAGE_AT_EL3)
CPU_RESET_FUNC: /* cpu_ops reset_func */
.space 4
.equ CPU_MIDR_SIZE, CPU_WORD_SIZE
.equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
.equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
.equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
.equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
.equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
#ifndef IMAGE_AT_EL3
.equ CPU_RESET_FUNC_SIZE, 0
#endif
#ifdef IMAGE_BL32 /* The power down core and cluster is needed only in BL32 */
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
.space (4 * CPU_MAX_PWR_DWN_OPS)
/* The power down core and cluster is needed only in BL32 */
#ifndef IMAGE_BL32
.equ CPU_PWR_DWN_OPS_SIZE, 0
#endif
/*
* Fields required to print errata status. Only in BL32 that the printing
* require mutual exclusion and printed flag.
*/
#if REPORT_ERRATA
CPU_ERRATA_FUNC: /* CPU errata status printing function */
.space 4
#if defined(IMAGE_BL32)
CPU_ERRATA_LOCK:
.space 4
CPU_ERRATA_PRINTED:
.space 4
/* Fields required to print errata status */
#if !REPORT_ERRATA
.equ CPU_ERRATA_FUNC_SIZE, 0
#endif
/* Only BL32 requires mutual exclusion and printed flag. */
#if !(REPORT_ERRATA && defined(IMAGE_BL32))
.equ CPU_ERRATA_LOCK_SIZE, 0
.equ CPU_ERRATA_PRINTED_SIZE, 0
#endif
CPU_OPS_SIZE = .
/*
* Define the offsets to the fields in cpu_ops structure.
* Every offset is defined based on the offset and size of the previous
* field.
*/
.equ CPU_MIDR, 0
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
.equ CPU_PWR_DWN_OPS, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
.equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
.equ CPU_OPS_SIZE, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
/*
* Write given expressions as words
......@@ -128,21 +137,8 @@ CPU_OPS_SIZE = .
.word \_resetfunc
#endif
#ifdef IMAGE_BL32
1:
/* Insert list of functions */
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
2:
/*
* Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
* list
*/
.ifeq 2b - 1b
.error "At least one power down function must be specified"
.else
.iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
.error "More than CPU_MAX_PWR_DWN_OPS functions specified"
.endif
.endif
#endif
#if REPORT_ERRATA
......
......@@ -38,46 +38,56 @@
# define REPORT_ERRATA 0
#endif
/*
* Define the offsets to the fields in cpu_ops structure.
*/
.struct 0
CPU_MIDR: /* cpu_ops midr */
.space 8
/* Reset fn is needed in BL at reset vector */
#if defined(IMAGE_AT_EL3)
CPU_RESET_FUNC: /* cpu_ops reset_func */
.space 8
.equ CPU_MIDR_SIZE, CPU_WORD_SIZE
.equ CPU_EXTRA1_FUNC_SIZE, CPU_WORD_SIZE
.equ CPU_EXTRA2_FUNC_SIZE, CPU_WORD_SIZE
.equ CPU_RESET_FUNC_SIZE, CPU_WORD_SIZE
.equ CPU_PWR_DWN_OPS_SIZE, CPU_WORD_SIZE * CPU_MAX_PWR_DWN_OPS
.equ CPU_ERRATA_FUNC_SIZE, CPU_WORD_SIZE
.equ CPU_ERRATA_LOCK_SIZE, CPU_WORD_SIZE
.equ CPU_ERRATA_PRINTED_SIZE, CPU_WORD_SIZE
.equ CPU_REG_DUMP_SIZE, CPU_WORD_SIZE
#ifndef IMAGE_AT_EL3
.equ CPU_RESET_FUNC_SIZE, 0
#endif
CPU_EXTRA1_FUNC:
.space 8
CPU_EXTRA2_FUNC:
.space 8
#ifdef IMAGE_BL31 /* The power down core and cluster is needed only in BL31 */
CPU_PWR_DWN_OPS: /* cpu_ops power down functions */
.space (8 * CPU_MAX_PWR_DWN_OPS)
/* The power down core and cluster is needed only in BL31 */
#ifndef IMAGE_BL31
.equ CPU_PWR_DWN_OPS_SIZE, 0
#endif
/*
* Fields required to print errata status. Only in BL31 that the printing
* require mutual exclusion and printed flag.
*/
#if REPORT_ERRATA
CPU_ERRATA_FUNC:
.space 8
#if defined(IMAGE_BL31)
CPU_ERRATA_LOCK:
.space 8
CPU_ERRATA_PRINTED:
.space 8
/* Fields required to print errata status. */
#if !REPORT_ERRATA
.equ CPU_ERRATA_FUNC_SIZE, 0
#endif
/* Only BL31 requieres mutual exclusion and printed flag. */
#if !(REPORT_ERRATA && defined(IMAGE_BL31))
.equ CPU_ERRATA_LOCK_SIZE, 0
.equ CPU_ERRATA_PRINTED_SIZE, 0
#endif
#if defined(IMAGE_BL31) && CRASH_REPORTING
CPU_REG_DUMP: /* cpu specific register dump for crash reporting */
.space 8
#if !defined(IMAGE_BL31) || !CRASH_REPORTING
.equ CPU_REG_DUMP_SIZE, 0
#endif
CPU_OPS_SIZE = .
/*
* Define the offsets to the fields in cpu_ops structure.
* Every offset is defined based in the offset and size of the previous
* field.
*/
.equ CPU_MIDR, 0
.equ CPU_RESET_FUNC, CPU_MIDR + CPU_MIDR_SIZE
.equ CPU_EXTRA1_FUNC, CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
.equ CPU_EXTRA2_FUNC, CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
.equ CPU_PWR_DWN_OPS, CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
.equ CPU_ERRATA_FUNC, CPU_PWR_DWN_OPS + CPU_PWR_DWN_OPS_SIZE
.equ CPU_ERRATA_LOCK, CPU_ERRATA_FUNC + CPU_ERRATA_FUNC_SIZE
.equ CPU_ERRATA_PRINTED, CPU_ERRATA_LOCK + CPU_ERRATA_LOCK_SIZE
.equ CPU_REG_DUMP, CPU_ERRATA_PRINTED + CPU_ERRATA_PRINTED_SIZE
.equ CPU_OPS_SIZE, CPU_REG_DUMP + CPU_REG_DUMP_SIZE
/*
* Write given expressions as quad words
......@@ -149,21 +159,8 @@ CPU_OPS_SIZE = .
.quad \_extra1
.quad \_extra2
#ifdef IMAGE_BL31
1:
/* Insert list of functions */
fill_constants CPU_MAX_PWR_DWN_OPS, \_power_down_ops
2:
/*
* Error if no or more than CPU_MAX_PWR_DWN_OPS were specified in the
* list
*/
.ifeq 2b - 1b
.error "At least one power down function must be specified"
.else
.iflt 2b - 1b - (CPU_MAX_PWR_DWN_OPS * CPU_WORD_SIZE)
.error "More than CPU_MAX_PWR_DWN_OPS functions specified"
.endif
.endif
#endif
#if REPORT_ERRATA
......
......@@ -22,8 +22,7 @@ icc_regs:
/* Registers common to both GICv2 and GICv3 */
gicd_pend_reg:
.asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n" \
" Offset:\t\t\tvalue\n"
.asciz "gicd_ispendr regs (Offsets 0x200 - 0x278)\n Offset:\t\t\tvalue\n"
newline:
.asciz "\n"
spacer:
......
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
......@@ -22,7 +22,7 @@ SECTIONS
*(arm_el3_tzc_dram)
__EL3_SEC_DRAM_UNALIGNED_END__ = .;
. = NEXT(PAGE_SIZE);
. = ALIGN(PAGE_SIZE);
__EL3_SEC_DRAM_END__ = .;
} >EL3_SEC_DRAM
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment