arch: aarch64: Catch early errors in EL3 and EL1

Setup the stack as early as possible to catch any possible errors in the
reset routine and handle also EL3 fatal errors.

Signed-off-by: Carlo Caione <ccaione@baylibre.com>
This commit is contained in:
Carlo Caione 2020-10-06 12:51:33 +02:00 committed by Anas Nashif
commit 645082791b
2 changed files with 25 additions and 8 deletions

View file

@ -157,16 +157,28 @@ static void esf_dump(const z_arch_esf_t *esf)
void z_arm64_fatal_error(unsigned int reason, const z_arch_esf_t *esf)
{
uint64_t el, esr, elr, far;
uint64_t esr = 0;
uint64_t elr = 0;
uint64_t far = 0;
uint64_t el;
if (reason != K_ERR_SPURIOUS_IRQ) {
__asm__ volatile("mrs %0, CurrentEL" : "=r" (el));
if (GET_EL(el) != MODE_EL0) {
switch (GET_EL(el)) {
case MODE_EL1:
__asm__ volatile("mrs %0, esr_el1" : "=r" (esr));
__asm__ volatile("mrs %0, far_el1" : "=r" (far));
__asm__ volatile("mrs %0, elr_el1" : "=r" (elr));
break;
case MODE_EL3:
__asm__ volatile("mrs %0, esr_el3" : "=r" (esr));
__asm__ volatile("mrs %0, far_el3" : "=r" (far));
__asm__ volatile("mrs %0, elr_el3" : "=r" (elr));
break;
}
if (GET_EL(el) != MODE_EL0) {
LOG_ERR("ESR_ELn: 0x%016llx", esr);
LOG_ERR("FAR_ELn: 0x%016llx", far);
LOG_ERR("ELR_ELn: 0x%016llx", elr);

View file

@ -62,6 +62,11 @@ SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start)
msr vbar_el3, x19
isb
/* Setup a stack for EL3 (SP_ELx) */
ldr x0, =(z_interrupt_stacks)
add x0, x0, #(CONFIG_ISR_STACK_SIZE)
mov sp, x0
/* Initialize sctlr_el3 to reset value */
mov_imm x1, SCTLR_EL3_RES1
mrs x0, sctlr_el3
@ -121,6 +126,12 @@ SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start)
1:
/* Initialize VBAR */
msr vbar_el1, x19
isb
/* Setup the stack (SP_ELx) */
ldr x0, =(z_interrupt_stacks)
add x0, x0, #(CONFIG_ISR_STACK_SIZE)
mov sp, x0
/* Disable access trapping in EL1 for NEON/FP */
mov x0, #(CPACR_EL1_FPEN_NOTRAP)
@ -140,10 +151,4 @@ SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start)
/* Enable the SError interrupt */
msr daifclr, #(DAIFSET_ABT)
/* Switch to SP_ELn and setup the stack */
msr spsel, #1
ldr x0, =(z_interrupt_stacks)
add x0, x0, #(CONFIG_ISR_STACK_SIZE)
mov sp, x0
bl z_arm64_prep_c