aarch64: Rewrite reset code using C

There is no strict reason to use assembly for the reset routine. Move as
much code as possible to C code using the proper helpers.

Signed-off-by: Carlo Caione <ccaione@baylibre.com>
This commit is contained in:
Carlo Caione 2021-02-12 14:44:18 +01:00 committed by Anas Nashif
commit 9d908c78fa
10 changed files with 346 additions and 203 deletions

View file

@ -14,6 +14,7 @@ zephyr_library_sources(
irq_manage.c
prep_c.c
reset.S
reset.c
switch.S
thread.c
vector_table.S

View file

@ -4,12 +4,6 @@
* SPDX-License-Identifier: Apache-2.0
*/
/*
* Reset handler
*
* Reset handler that prepares the system for running C code.
*/
#include <toolchain.h>
#include <linker/sections.h>
#include <arch/cpu.h>
@ -19,14 +13,74 @@
_ASM_FILE_PROLOGUE
/*
* Platform may do platform specific init at EL3.
* The function implementation must preserve callee saved registers as per
* AArch64 ABI PCS.
* Platform specific pre-C init code
*
* Note: - Stack is not yet available
* - x23 must be preserved
*/
WTEXT(z_arch_el3_plat_init)
SECTION_FUNC(TEXT,z_arch_el3_plat_init)
ret
WTEXT(z_arm64_el3_plat_prep_c)
SECTION_FUNC(TEXT,z_arm64_el3_plat_prep_c)
ret
WTEXT(z_arm64_el2_plat_prep_c)
SECTION_FUNC(TEXT,z_arm64_el2_plat_prep_c)
ret
WTEXT(z_arm64_el1_plat_prep_c)
SECTION_FUNC(TEXT,z_arm64_el1_plat_prep_c)
ret
/*
* Set the minimum necessary to safely call C code
*/
GTEXT(__reset_prep_c)
SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset_prep_c)
/* return address: x23 */
mov x23, x30
switch_el x0, 3f, 2f, 1f
3:
/* Reinitialize SCTLR from scratch in EL3 */
ldr w0, =(SCTLR_EL3_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT)
msr sctlr_el3, x0
/* Custom plat prep_c init */
bl z_arm64_el3_plat_prep_c
b out
2:
/* Disable alignment fault checking */
mrs x0, sctlr_el2
bic x0, x0, SCTLR_A_BIT
msr sctlr_el2, x0
/* Custom plat prep_c init */
bl z_arm64_el2_plat_prep_c
b out
1:
/* Disable alignment fault checking */
mrs x0, sctlr_el1
bic x0, x0, SCTLR_A_BIT
msr sctlr_el1, x0
/* Custom plat prep_c init */
bl z_arm64_el1_plat_prep_c
out:
isb
/* Select SP_EL0 */
msr SPSel, #0
/* Initialize stack */
ldr x0, =(z_interrupt_stacks)
add x0, x0, #(CONFIG_ISR_STACK_SIZE)
mov sp, x0
ret x23
/*
* Reset vector
@ -34,9 +88,6 @@ ret
* Ran when the system comes out of reset. The processor is in thread mode with
* privileged level. At this point, neither SP_EL0 nor SP_ELx point to a valid
* area in SRAM.
*
* When these steps are completed, jump to z_arm64_prep_c(), which will finish
* setting up the system for running C code.
*/
GTEXT(__reset)
@ -44,103 +95,44 @@ SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset)
GTEXT(__start)
SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start)
/* Mask all exceptions */
msr DAIFSet, #0xf
/* Setup vector table */
adr x19, _vector_table
/* Prepare for calling C code */
bl __reset_prep_c
switch_el x1, 3f, 2f, 1f
/* Platform hook for highest EL */
bl z_arm64_el_highest_init
switch_el:
switch_el x0, 3f, 2f, 1f
3:
/*
* Zephyr entry happened in EL3. Do EL3 specific init before
* dropping to lower EL.
*/
/* EL3 init */
bl z_arm64_el3_init
/* Initialize VBAR */
msr vbar_el3, x19
isb
/* Switch to SP_EL0 and setup the stack */
msr spsel, #0
ldr x0, =(z_interrupt_stacks)
add x0, x0, #(CONFIG_ISR_STACK_SIZE)
mov sp, x0
/* Initialize SCTLR_EL3 to reset value */
mov_imm x1, SCTLR_EL3_RES1
mrs x0, sctlr_el3
orr x0, x0, x1
msr sctlr_el3, x0
isb
/*
* Disable access traps to EL3 for CPACR, Trace, FP, ASIMD,
* SVE from lower EL.
*/
mov_imm x0, CPTR_EL3_RES0
mov_imm x1, (CPTR_EL3_TTA_BIT | CPTR_EL3_TFP_BIT | CPTR_EL3_TCPAC_BIT)
bic x0, x0, x1
orr x0, x0, #(CPTR_EL3_EZ_BIT)
msr cptr_el3, x0
isb
/* Platform specific configurations needed in EL3 */
bl z_arch_el3_plat_init
/* Enable access control configuration from lower EL */
mrs x0, actlr_el3
orr x0, x0, #(ACTLR_EL3_L2ACTLR_BIT | ACTLR_EL3_L2ECTLR_BIT \
| ACTLR_EL3_L2CTLR_BIT)
orr x0, x0, #(ACTLR_EL3_CPUACTLR_BIT | ACTLR_EL3_CPUECTLR_BIT)
msr actlr_el3, x0
/* Initialize SCTLR_EL1 to reset value */
mov_imm x0, SCTLR_EL1_RES1
msr sctlr_el1, x0
/* Disable EA/IRQ/FIQ routing to EL3 and set EL1 to AArch64 */
mov x0, xzr
orr x0, x0, #(SCR_RW_BIT)
msr scr_el3, x0
/* On eret return to secure EL1h with DAIF masked */
mov_imm x0, (SPSR_DAIF_MASK | SPSR_MODE_EL1H)
msr spsr_el3, x0
adr x0, 1f
msr elr_el3, x0
/* Get next EL */
adr x0, switch_el
bl z_arm64_el3_get_next_el
eret
2:
/* Booting from EL2 is not supported */
b .
/* EL2 init */
bl z_arm64_el2_init
/* Move to EL1 with all exceptions masked */
mov_imm x0, (SPSR_DAIF_MASK | SPSR_MODE_EL1T)
msr spsr_el2, x0
adr x0, 1f
msr elr_el2, x0
eret
1:
/* Initialize VBAR */
msr vbar_el1, x19
/* EL1 init */
bl z_arm64_el1_init
/* Enable SError interrupts */
msr DAIFClr, #(DAIFCLR_ABT_BIT)
isb
/* Switch to SP_EL0 and setup the stack */
msr spsel, #0
ldr x0, =(z_interrupt_stacks)
add x0, x0, #(CONFIG_ISR_STACK_SIZE)
mov sp, x0
/* Disable access trapping in EL1 for NEON/FP */
mov_imm x0, CPACR_EL1_FPEN_NOTRAP
msr cpacr_el1, x0
/* Enable the instruction cache and EL1 stack alignment check. */
mov_imm x1, (SCTLR_I_BIT | SCTLR_SA_BIT)
mrs x0, sctlr_el1
orr x0, x0, x1
msr sctlr_el1, x0
0:
isb
/* Enable the SError interrupt */
msr daifclr, #(DAIFCLR_ABT_BIT)
bl z_arm64_prep_c
b z_arm64_prep_c

View file

@ -0,0 +1,146 @@
/*
* Copyright (c) 2021 Carlo Caione <ccaione@baylibre.com>
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <kernel_internal.h>
#include "vector_table.h"
void __weak z_arm64_el_highest_plat_init(void)
{
/* do nothing */
}
void __weak z_arm64_el3_plat_init(void)
{
/* do nothing */
}
void __weak z_arm64_el2_plat_init(void)
{
/* do nothing */
}
void __weak z_arm64_el1_plat_init(void)
{
/* do nothing */
}
void z_arm64_el_highest_init(void)
{
write_cntfrq_el0(CONFIG_SYS_CLOCK_HW_CYCLES_PER_SEC);
z_arm64_el_highest_plat_init();
isb();
}
void z_arm64_el3_init(void)
{
uint64_t reg;
/* Setup vector table */
write_vbar_el3((uint64_t)_vector_table);
isb();
reg = 0U; /* Mostly RES0 */
reg &= ~(CPTR_TTA_BIT | /* Do not trap sysreg accesses */
CPTR_TFP_BIT | /* Do not trap SVE, SIMD and FP */
CPTR_TCPAC_BIT); /* Do not trap CPTR_EL2 / CPACR_EL1 accesses */
write_cptr_el3(reg);
reg = 0U; /* Reset */
#ifdef CONFIG_ARMV8_A_NS
reg |= SCR_NS_BIT; /* EL2 / EL3 non-secure */
#endif
reg |= (SCR_RES1 | /* RES1 */
SCR_RW_BIT | /* EL2 execution state is AArch64 */
SCR_ST_BIT | /* Do not trap EL1 accesses to timer */
SCR_HCE_BIT | /* Do not trap HVC */
SCR_SMD_BIT); /* Do not trap SMC */
write_scr_el3(reg);
z_arm64_el3_plat_init();
isb();
}
void z_arm64_el2_init(void)
{
uint64_t reg;
reg = read_sctlr_el2();
reg |= (SCTLR_EL2_RES1 | /* RES1 */
SCTLR_I_BIT | /* Enable i-cache */
SCTLR_SA_BIT); /* Enable SP alignment check */
write_sctlr_el2(reg);
reg = read_hcr_el2();
reg |= HCR_RW_BIT; /* EL1 Execution state is AArch64 */
write_hcr_el2(reg);
reg = 0U; /* RES0 */
reg |= CPTR_EL2_RES1; /* RES1 */
reg &= ~(CPTR_TFP_BIT | /* Do not trap SVE, SIMD and FP */
CPTR_TCPAC_BIT); /* Do not trap CPACR_EL1 accesses */
write_cptr_el2(reg);
zero_cntvoff_el2(); /* Set 64-bit virtual timer offset to 0 */
zero_cnthctl_el2();
zero_cnthp_ctl_el2();
z_arm64_el2_plat_init();
isb();
}
void z_arm64_el1_init(void)
{
uint64_t reg;
/* Setup vector table */
write_vbar_el1((uint64_t)_vector_table);
isb();
reg = 0U; /* RES0 */
reg |= CPACR_EL1_FPEN_NOTRAP; /* Do not trap NEON/SIMD/FP */
/* TODO: CONFIG_FLOAT_*_FORBIDDEN */
write_cpacr_el1(reg);
reg = read_sctlr_el1();
reg |= (SCTLR_EL1_RES1 | /* RES1 */
SCTLR_I_BIT | /* Enable i-cache */
SCTLR_SA_BIT); /* Enable SP alignment check */
write_sctlr_el1(reg);
z_arm64_el1_plat_init();
isb();
}
void z_arm64_el3_get_next_el(uint64_t switch_addr)
{
uint64_t spsr;
write_elr_el3(switch_addr);
/* Mask the DAIF */
spsr = SPSR_DAIF_MASK;
/*
* Is considered an illegal return "[..] a return to EL2 when EL3 is
* implemented and the value of the SCR_EL3.NS bit is 0 if
* ARMv8.4-SecEL2 is not implemented" (D1.11.2 from ARM DDI 0487E.a)
*/
if (is_el_implemented(2) &&
((is_in_secure_state() && is_el2_sec_supported()) || !is_in_secure_state())) {
/* Dropping into EL2 */
spsr |= SPSR_MODE_EL2T;
} else {
/* Dropping into EL1 */
spsr |= SPSR_MODE_EL1T;
}
write_spsr_el3(spsr);
}

View file

@ -136,6 +136,31 @@ static ALWAYS_INLINE void disable_fiq(void)
#define __DMB() dmb()
#define __DSB() dsb()
static inline bool is_el_implemented(unsigned int el)
{
unsigned int shift;
if (el > 3) {
return false;
}
shift = ID_AA64PFR0_EL1_SHIFT * el;
return (((read_id_aa64pfr0_el1() >> shift) & ID_AA64PFR0_ELX_MASK) != 0U);
}
static inline bool is_el2_sec_supported(void)
{
return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) &
ID_AA64PFR0_SEL2_MASK) != 0U);
}
static inline bool is_in_secure_state(void)
{
/* We cannot read SCR_EL3 from EL2 or EL1 */
return !IS_ENABLED(CONFIG_ARMV8_A_NS);
}
#endif /* !_ASMLANGUAGE */
#endif /* ZEPHYR_INCLUDE_ARCH_ARM_AARCH64_LIB_HELPERS_H_ */

View file

@ -4,6 +4,6 @@ zephyr_include_directories(.)
zephyr_sources(
soc.c
)
zephyr_sources_ifdef(CONFIG_SOC_BCM58402_A72 plat_core.S)
zephyr_sources_ifdef(CONFIG_SOC_BCM58402_A72 plat_core.c)
zephyr_sources_ifdef(CONFIG_ARM_MMU mmu_regions.c)

View file

@ -1,65 +0,0 @@
/*
* Copyright 2020 Broadcom
*
* SPDX-License-Identifier: Apache-2.0
*/
/**
*@file
*@brief plat/core specific init
*/
#include <toolchain.h>
#include <linker/sections.h>
#include <arch/cpu.h>
_ASM_FILE_PROLOGUE
GTEXT(z_arch_el3_plat_init)
GTEXT(plat_l2_init)
SECTION_FUNC(TEXT, z_arch_el3_plat_init)
mov x20, x30
/* Enable GIC v3 system interface */
mov_imm x0, (ICC_SRE_ELx_DFB_BIT | ICC_SRE_ELx_DIB_BIT | \
ICC_SRE_ELx_SRE_BIT | ICC_SRE_EL3_EN_BIT)
msr ICC_SRE_EL3, x0
/* L2 config */
bl plat_l2_init
mov x30, x20
ret
SECTION_FUNC(TEXT,plat_l2_init)
/*
* Set L2 Auxiliary Control Register of Cortex-A72
*/
/* Disable cluster coherency */
mrs x0, CORTEX_A72_L2ACTLR_EL1
orr x0, x0, #CORTEX_A72_L2ACTLR_DISABLE_ACE_SH_OR_CHI_BIT
msr CORTEX_A72_L2ACTLR_EL1, x0
/* Set L2 Control Register */
mov_imm x1, ((CORTEX_A72_L2_DATA_RAM_LATENCY_MASK << \
CORTEX_A72_L2CTLR_DATA_RAM_LATENCY_SHIFT) | \
(CORTEX_A72_L2_TAG_RAM_LATENCY_MASK << \
CORTEX_A72_L2CTLR_TAG_RAM_LATENCY_SHIFT) | \
(CORTEX_A72_L2_TAG_RAM_SETUP_1_CYCLE << \
CORTEX_A72_L2CTLR_TAG_RAM_SETUP_SHIFT) | \
(CORTEX_A72_L2_DATA_RAM_SETUP_1_CYCLE << \
CORTEX_A72_L2CTLR_DATA_RAM_SETUP_SHIFT))
bic x0, x0, x1
mov_imm x1, ((CORTEX_A72_L2_DATA_RAM_LATENCY_3_CYCLES << \
CORTEX_A72_L2CTLR_DATA_RAM_LATENCY_SHIFT) | \
(CORTEX_A72_L2_TAG_RAM_SETUP_1_CYCLE << \
CORTEX_A72_L2CTLR_TAG_RAM_SETUP_SHIFT) | \
(CORTEX_A72_L2_DATA_RAM_SETUP_1_CYCLE << \
CORTEX_A72_L2CTLR_DATA_RAM_SETUP_SHIFT))
orr x0, x0, x1
msr CORTEX_A72_L2CTLR_EL1, x0
dsb sy
isb
ret

View file

@ -0,0 +1,57 @@
/*
* Copyright 2020 Broadcom
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <toolchain.h>
#include <linker/sections.h>
#include <arch/cpu.h>
void z_arm64_el3_plat_init(void)
{
uint64_t reg, val;
/* Enable access control configuration from lower EL */
reg = read_actlr_el3();
reg |= (ACTLR_EL3_L2ACTLR_BIT |
ACTLR_EL3_L2ECTLR_BIT |
ACTLR_EL3_L2CTLR_BIT |
ACTLR_EL3_CPUACTLR_BIT |
ACTLR_EL3_CPUECTLR_BIT);
write_actlr_el3(reg);
reg = (ICC_SRE_ELx_DFB_BIT | ICC_SRE_ELx_DIB_BIT |
ICC_SRE_ELx_SRE_BIT | ICC_SRE_EL3_EN_BIT);
write_sysreg(reg, ICC_SRE_EL3);
reg = read_sysreg(CORTEX_A72_L2ACTLR_EL1);
reg |= CORTEX_A72_L2ACTLR_DISABLE_ACE_SH_OR_CHI_BIT;
write_sysreg(reg, CORTEX_A72_L2ACTLR_EL1);
val = ((CORTEX_A72_L2_DATA_RAM_LATENCY_MASK <<
CORTEX_A72_L2CTLR_DATA_RAM_LATENCY_SHIFT) |
(CORTEX_A72_L2_TAG_RAM_LATENCY_MASK <<
CORTEX_A72_L2CTLR_TAG_RAM_LATENCY_SHIFT) |
(CORTEX_A72_L2_TAG_RAM_SETUP_1_CYCLE <<
CORTEX_A72_L2CTLR_TAG_RAM_SETUP_SHIFT) |
(CORTEX_A72_L2_DATA_RAM_SETUP_1_CYCLE <<
CORTEX_A72_L2CTLR_DATA_RAM_SETUP_SHIFT));
reg &= ~val;
val = ((CORTEX_A72_L2_DATA_RAM_LATENCY_3_CYCLES <<
CORTEX_A72_L2CTLR_DATA_RAM_LATENCY_SHIFT) |
(CORTEX_A72_L2_TAG_RAM_SETUP_1_CYCLE <<
CORTEX_A72_L2CTLR_TAG_RAM_SETUP_SHIFT) |
(CORTEX_A72_L2_DATA_RAM_SETUP_1_CYCLE <<
CORTEX_A72_L2CTLR_DATA_RAM_SETUP_SHIFT));
reg |= val;
write_sysreg(reg, CORTEX_A72_L2CTLR_EL1);
dsb();
isb();
}

View file

@ -3,4 +3,4 @@
zephyr_library_sources_ifdef(CONFIG_ARM_MMU mmu_regions.c)
zephyr_sources_ifdef(CONFIG_SOC_QEMU_CORTEX_A53 plat_core.S)
zephyr_sources_ifdef(CONFIG_SOC_QEMU_CORTEX_A53 plat_core.c)

View file

@ -1,32 +0,0 @@
/*
* Copyright 2020 Carlo Caione <ccaione@baylibre.com>
*
* SPDX-License-Identifier: Apache-2.0
*/
/**
*@file
*@brief plat/core specific init
*/
#include <toolchain.h>
#include <linker/sections.h>
#include <arch/cpu.h>
_ASM_FILE_PROLOGUE
GTEXT(z_arch_el3_plat_init)
SECTION_FUNC(TEXT, z_arch_el3_plat_init)
mov x20, x30
#ifdef CONFIG_GIC_V3
/* Enable GIC v3 system interface */
mov_imm x0, (ICC_SRE_ELx_DFB_BIT | ICC_SRE_ELx_DIB_BIT | \
ICC_SRE_ELx_SRE_BIT | ICC_SRE_EL3_EN_BIT)
msr ICC_SRE_EL3, x0
#endif
mov x30, x20
ret

View file

@ -0,0 +1,19 @@
/*
* Copyright 2020 Carlo Caione <ccaione@baylibre.com>
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <toolchain.h>
#include <linker/sections.h>
#include <arch/cpu.h>
void z_arm64_el3_plat_init(void)
{
uint64_t reg = 0;
reg = (ICC_SRE_ELx_DFB_BIT | ICC_SRE_ELx_DIB_BIT |
ICC_SRE_ELx_SRE_BIT | ICC_SRE_EL3_EN_BIT);
write_sysreg(reg, ICC_SRE_EL3);
}