ARM: Fix push/pop alignment on ARM platforms
This patch adjusts the way we are doing push/pop operations before making function calls inside of assembly routines. ARM requires 8 byte aligned stack operations across public interfaces. This means that we need to be sure to push multiples of 2 registers. Fixes #2108 Signed-off-by: Andy Gross <andy.gross@linaro.org>
This commit is contained in:
parent
4d2459e515
commit
a468c15eca
6 changed files with 67 additions and 54 deletions
|
@ -114,11 +114,15 @@ SECTION_FUNC(TEXT, _NanoIdleValClear)
|
|||
|
||||
SECTION_FUNC(TEXT, k_cpu_idle)
|
||||
#ifdef CONFIG_TRACING
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
bl z_sys_trace_idle
|
||||
pop {r0}
|
||||
mov lr, r0
|
||||
#endif
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r0, r1}
|
||||
mov lr, r1
|
||||
#else
|
||||
pop {r0, lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
#endif /* CONFIG_TRACING */
|
||||
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
cpsie i
|
||||
|
@ -158,11 +162,15 @@ SECTION_FUNC(TEXT, k_cpu_idle)
|
|||
|
||||
SECTION_FUNC(TEXT, k_cpu_atomic_idle)
|
||||
#ifdef CONFIG_TRACING
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
bl z_sys_trace_idle
|
||||
pop {r1}
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r0, r1}
|
||||
mov lr, r1
|
||||
#endif
|
||||
#else
|
||||
pop {r0, lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
#endif /* CONFIG_TRACING */
|
||||
|
||||
/*
|
||||
* Lock PRIMASK while sleeping: wfe will still get interrupted by
|
||||
|
|
|
@ -87,14 +87,13 @@ _EXIT_EXC:
|
|||
#endif /* CONFIG_PREEMPT_ENABLED */
|
||||
|
||||
#ifdef CONFIG_STACK_SENTINEL
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
bl _check_stack_sentinel
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r0}
|
||||
mov lr, r0
|
||||
pop {r0, r1}
|
||||
mov lr, r1
|
||||
#else
|
||||
pop {lr}
|
||||
pop {r0, lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
#endif /* CONFIG_STACK_SENTINEL */
|
||||
|
||||
bx lr
|
||||
|
|
|
@ -139,9 +139,9 @@ _s_stack_frame_endif:
|
|||
*/
|
||||
mov r1, lr
|
||||
#endif /* CONFIG_ARM_SECURE_FIRMWARE */
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
bl _Fault
|
||||
|
||||
pop {pc}
|
||||
pop {r0, pc}
|
||||
|
||||
.end
|
||||
|
|
|
@ -41,7 +41,7 @@ GTEXT(_IntExit)
|
|||
*/
|
||||
SECTION_FUNC(TEXT, _isr_wrapper)
|
||||
|
||||
push {lr} /* lr is now the first item on the stack */
|
||||
push {r0,lr} /* r0, lr are now the first items on the stack */
|
||||
|
||||
#ifdef CONFIG_EXECUTION_BENCHMARKING
|
||||
bl read_timer_start_of_isr
|
||||
|
@ -106,17 +106,17 @@ _idle_state_cleared:
|
|||
|
||||
ldm r1!,{r0,r3} /* arg in r0, ISR in r3 */
|
||||
#ifdef CONFIG_EXECUTION_BENCHMARKING
|
||||
stm sp!,{r0-r3} /* Save r0 to r4 into stack */
|
||||
push {lr}
|
||||
stm sp!,{r0-r3} /* Save r0 to r3 into stack */
|
||||
push {r0, lr}
|
||||
bl read_timer_end_of_isr
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r3}
|
||||
pop {r0, r3}
|
||||
mov lr,r3
|
||||
#else
|
||||
pop {lr}
|
||||
pop {r0, lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
ldm sp!,{r0-r3} /* Restore r0 to r4 regs */
|
||||
#endif
|
||||
ldm sp!,{r0-r3} /* Restore r0 to r3 regs */
|
||||
#endif /* CONFIG_EXECUTION_BENCHMARKING */
|
||||
blx r3 /* call ISR */
|
||||
|
||||
#ifdef CONFIG_TRACING
|
||||
|
@ -124,10 +124,10 @@ _idle_state_cleared:
|
|||
#endif
|
||||
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r3}
|
||||
pop {r0, r3}
|
||||
mov lr, r3
|
||||
#elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
|
||||
pop {lr}
|
||||
pop {r0, lr}
|
||||
#else
|
||||
#error Unknown ARM architecture
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
|
|
|
@ -45,14 +45,14 @@ SECTION_FUNC(TEXT, __pendsv)
|
|||
|
||||
#ifdef CONFIG_TRACING
|
||||
/* Register the context switch */
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
bl z_sys_trace_thread_switched_out
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r0}
|
||||
mov lr, r0
|
||||
pop {r0, r1}
|
||||
mov lr, r1
|
||||
#else
|
||||
pop {lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
pop {r0, lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8M_M_BASELINE */
|
||||
#endif /* CONFIG_TRACING */
|
||||
|
||||
/* protect the kernel state while we play with the thread lists */
|
||||
|
@ -233,29 +233,29 @@ _thread_irq_disabled:
|
|||
#endif /* CONFIG_BUILTIN_STACK_GUARD */
|
||||
|
||||
#ifdef CONFIG_EXECUTION_BENCHMARKING
|
||||
stm sp!,{r0-r3} /* Save regs r0 to r4 on stack */
|
||||
push {lr}
|
||||
stm sp!,{r0-r3} /* Save regs r0 to r3 on stack */
|
||||
push {r0, lr}
|
||||
bl read_timer_end_of_swap
|
||||
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r3}
|
||||
mov lr,r3
|
||||
pop {r0, r1}
|
||||
mov lr,r1
|
||||
#else
|
||||
pop {lr}
|
||||
pop {r0, lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
ldm sp!,{r0-r3} /* Load back regs ro to r4 */
|
||||
ldm sp!,{r0-r3} /* Load back regs r0 to r3 */
|
||||
|
||||
#endif /* CONFIG_EXECUTION_BENCHMARKING */
|
||||
|
||||
#ifdef CONFIG_TRACING
|
||||
/* Register the context switch */
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
bl z_sys_trace_thread_switched_in
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r0}
|
||||
mov lr, r0
|
||||
pop {r0, r1}
|
||||
mov lr, r1
|
||||
#else
|
||||
pop {lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
pop {r0, lr}
|
||||
#endif
|
||||
#endif /* CONFIG_TRACING */
|
||||
|
||||
/* exc return */
|
||||
|
@ -294,19 +294,19 @@ _stack_frame_endif:
|
|||
beq _oops
|
||||
|
||||
#if CONFIG_IRQ_OFFLOAD
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
blx _irq_do_offload /* call C routine which executes the offload */
|
||||
pop {r3}
|
||||
mov lr, r3
|
||||
#endif
|
||||
pop {r0, r1}
|
||||
mov lr, r1
|
||||
#endif /* CONFIG_IRQ_OFFLOAD */
|
||||
|
||||
/* exception return is done in _IntExit() */
|
||||
b _IntExit
|
||||
|
||||
_oops:
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
blx _do_kernel_oops
|
||||
pop {pc}
|
||||
pop {r0, pc}
|
||||
|
||||
#elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
|
||||
/**
|
||||
|
@ -358,18 +358,18 @@ SECTION_FUNC(TEXT, __svc)
|
|||
beq _oops
|
||||
|
||||
#if CONFIG_IRQ_OFFLOAD
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
blx _irq_do_offload /* call C routine which executes the offload */
|
||||
pop {lr}
|
||||
pop {r0, lr}
|
||||
|
||||
/* exception return is done in _IntExit() */
|
||||
b _IntExit
|
||||
#endif
|
||||
|
||||
_oops:
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
blx _do_kernel_oops
|
||||
pop {pc}
|
||||
pop {r0, pc}
|
||||
|
||||
#if CONFIG_USERSPACE
|
||||
/*
|
||||
|
|
|
@ -118,10 +118,16 @@ SECTION_FUNC(TEXT,_arm_userspace_enter)
|
|||
|
||||
#ifdef CONFIG_EXECUTION_BENCHMARKING
|
||||
stm sp!,{r0-r3} /* Save regs r0 to r4 on stack */
|
||||
push {lr}
|
||||
push {r0, lr}
|
||||
bl read_timer_end_of_userspace_enter
|
||||
pop {lr}
|
||||
ldm sp!,{r0-r3} /* Load back regs ro to r4 */
|
||||
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
|
||||
pop {r0, r3}
|
||||
mov lr,r3
|
||||
#else
|
||||
pop {r0, lr}
|
||||
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
|
||||
ldm sp!,{r0-r3} /* Restore r0 to r3 regs */
|
||||
|
||||
#endif /* CONFIG_EXECUTION_BENCHMARKING */
|
||||
|
||||
/* change processor mode to unprivileged */
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue