ARM: Fix push/pop alignment on ARM platforms

This patch adjusts the way we are doing push/pop operations before
making function calls inside of assembly routines.  ARM requires 8
byte aligned stack operations across public interfaces.  This means
that we need to be sure to push multiples of 2 registers.

Fixes #2108

Signed-off-by: Andy Gross <andy.gross@linaro.org>
This commit is contained in:
Andy Gross 2019-01-31 16:31:01 -06:00 committed by Kumar Gala
commit a468c15eca
6 changed files with 67 additions and 54 deletions

View file

@ -114,11 +114,15 @@ SECTION_FUNC(TEXT, _NanoIdleValClear)
SECTION_FUNC(TEXT, k_cpu_idle) SECTION_FUNC(TEXT, k_cpu_idle)
#ifdef CONFIG_TRACING #ifdef CONFIG_TRACING
push {lr} push {r0, lr}
bl z_sys_trace_idle bl z_sys_trace_idle
pop {r0} #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
mov lr, r0 pop {r0, r1}
#endif mov lr, r1
#else
pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
#endif /* CONFIG_TRACING */
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
cpsie i cpsie i
@ -158,11 +162,15 @@ SECTION_FUNC(TEXT, k_cpu_idle)
SECTION_FUNC(TEXT, k_cpu_atomic_idle) SECTION_FUNC(TEXT, k_cpu_atomic_idle)
#ifdef CONFIG_TRACING #ifdef CONFIG_TRACING
push {lr} push {r0, lr}
bl z_sys_trace_idle bl z_sys_trace_idle
pop {r1} #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
mov lr, r1 pop {r0, r1}
#endif mov lr, r1
#else
pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
#endif /* CONFIG_TRACING */
/* /*
* Lock PRIMASK while sleeping: wfe will still get interrupted by * Lock PRIMASK while sleeping: wfe will still get interrupted by

View file

@ -87,14 +87,13 @@ _EXIT_EXC:
#endif /* CONFIG_PREEMPT_ENABLED */ #endif /* CONFIG_PREEMPT_ENABLED */
#ifdef CONFIG_STACK_SENTINEL #ifdef CONFIG_STACK_SENTINEL
push {lr} push {r0, lr}
bl _check_stack_sentinel bl _check_stack_sentinel
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
pop {r0} pop {r0, r1}
mov lr, r0 mov lr, r1
#else #else
pop {lr} pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
#endif /* CONFIG_STACK_SENTINEL */ #endif /* CONFIG_STACK_SENTINEL */
bx lr bx lr

View file

@ -139,9 +139,9 @@ _s_stack_frame_endif:
*/ */
mov r1, lr mov r1, lr
#endif /* CONFIG_ARM_SECURE_FIRMWARE */ #endif /* CONFIG_ARM_SECURE_FIRMWARE */
push {lr} push {r0, lr}
bl _Fault bl _Fault
pop {pc} pop {r0, pc}
.end .end

View file

@ -41,7 +41,7 @@ GTEXT(_IntExit)
*/ */
SECTION_FUNC(TEXT, _isr_wrapper) SECTION_FUNC(TEXT, _isr_wrapper)
push {lr} /* lr is now the first item on the stack */ push {r0,lr} /* r0, lr are now the first items on the stack */
#ifdef CONFIG_EXECUTION_BENCHMARKING #ifdef CONFIG_EXECUTION_BENCHMARKING
bl read_timer_start_of_isr bl read_timer_start_of_isr
@ -106,17 +106,17 @@ _idle_state_cleared:
ldm r1!,{r0,r3} /* arg in r0, ISR in r3 */ ldm r1!,{r0,r3} /* arg in r0, ISR in r3 */
#ifdef CONFIG_EXECUTION_BENCHMARKING #ifdef CONFIG_EXECUTION_BENCHMARKING
stm sp!,{r0-r3} /* Save r0 to r4 into stack */ stm sp!,{r0-r3} /* Save r0 to r3 into stack */
push {lr} push {r0, lr}
bl read_timer_end_of_isr bl read_timer_end_of_isr
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
pop {r3} pop {r0, r3}
mov lr,r3 mov lr,r3
#else #else
pop {lr} pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
ldm sp!,{r0-r3} /* Restore r0 to r4 regs */ ldm sp!,{r0-r3} /* Restore r0 to r3 regs */
#endif #endif /* CONFIG_EXECUTION_BENCHMARKING */
blx r3 /* call ISR */ blx r3 /* call ISR */
#ifdef CONFIG_TRACING #ifdef CONFIG_TRACING
@ -124,10 +124,10 @@ _idle_state_cleared:
#endif #endif
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
pop {r3} pop {r0, r3}
mov lr, r3 mov lr, r3
#elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE) #elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
pop {lr} pop {r0, lr}
#else #else
#error Unknown ARM architecture #error Unknown ARM architecture
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */

View file

@ -45,14 +45,14 @@ SECTION_FUNC(TEXT, __pendsv)
#ifdef CONFIG_TRACING #ifdef CONFIG_TRACING
/* Register the context switch */ /* Register the context switch */
push {lr} push {r0, lr}
bl z_sys_trace_thread_switched_out bl z_sys_trace_thread_switched_out
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
pop {r0} pop {r0, r1}
mov lr, r0 mov lr, r1
#else #else
pop {lr} pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ #endif /* CONFIG_ARMV6_M_ARMV8M_M_BASELINE */
#endif /* CONFIG_TRACING */ #endif /* CONFIG_TRACING */
/* protect the kernel state while we play with the thread lists */ /* protect the kernel state while we play with the thread lists */
@ -233,29 +233,29 @@ _thread_irq_disabled:
#endif /* CONFIG_BUILTIN_STACK_GUARD */ #endif /* CONFIG_BUILTIN_STACK_GUARD */
#ifdef CONFIG_EXECUTION_BENCHMARKING #ifdef CONFIG_EXECUTION_BENCHMARKING
stm sp!,{r0-r3} /* Save regs r0 to r4 on stack */ stm sp!,{r0-r3} /* Save regs r0 to r3 on stack */
push {lr} push {r0, lr}
bl read_timer_end_of_swap bl read_timer_end_of_swap
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
pop {r3} pop {r0, r1}
mov lr,r3 mov lr,r1
#else #else
pop {lr} pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
ldm sp!,{r0-r3} /* Load back regs ro to r4 */ ldm sp!,{r0-r3} /* Load back regs r0 to r3 */
#endif /* CONFIG_EXECUTION_BENCHMARKING */ #endif /* CONFIG_EXECUTION_BENCHMARKING */
#ifdef CONFIG_TRACING #ifdef CONFIG_TRACING
/* Register the context switch */ /* Register the context switch */
push {lr} push {r0, lr}
bl z_sys_trace_thread_switched_in bl z_sys_trace_thread_switched_in
#if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE) #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
pop {r0} pop {r0, r1}
mov lr, r0 mov lr, r1
#else #else
pop {lr} pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ #endif
#endif /* CONFIG_TRACING */ #endif /* CONFIG_TRACING */
/* exc return */ /* exc return */
@ -294,19 +294,19 @@ _stack_frame_endif:
beq _oops beq _oops
#if CONFIG_IRQ_OFFLOAD #if CONFIG_IRQ_OFFLOAD
push {lr} push {r0, lr}
blx _irq_do_offload /* call C routine which executes the offload */ blx _irq_do_offload /* call C routine which executes the offload */
pop {r3} pop {r0, r1}
mov lr, r3 mov lr, r1
#endif #endif /* CONFIG_IRQ_OFFLOAD */
/* exception return is done in _IntExit() */ /* exception return is done in _IntExit() */
b _IntExit b _IntExit
_oops: _oops:
push {lr} push {r0, lr}
blx _do_kernel_oops blx _do_kernel_oops
pop {pc} pop {r0, pc}
#elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE) #elif defined(CONFIG_ARMV7_M_ARMV8_M_MAINLINE)
/** /**
@ -358,18 +358,18 @@ SECTION_FUNC(TEXT, __svc)
beq _oops beq _oops
#if CONFIG_IRQ_OFFLOAD #if CONFIG_IRQ_OFFLOAD
push {lr} push {r0, lr}
blx _irq_do_offload /* call C routine which executes the offload */ blx _irq_do_offload /* call C routine which executes the offload */
pop {lr} pop {r0, lr}
/* exception return is done in _IntExit() */ /* exception return is done in _IntExit() */
b _IntExit b _IntExit
#endif #endif
_oops: _oops:
push {lr} push {r0, lr}
blx _do_kernel_oops blx _do_kernel_oops
pop {pc} pop {r0, pc}
#if CONFIG_USERSPACE #if CONFIG_USERSPACE
/* /*

View file

@ -118,10 +118,16 @@ SECTION_FUNC(TEXT,_arm_userspace_enter)
#ifdef CONFIG_EXECUTION_BENCHMARKING #ifdef CONFIG_EXECUTION_BENCHMARKING
stm sp!,{r0-r3} /* Save regs r0 to r4 on stack */ stm sp!,{r0-r3} /* Save regs r0 to r4 on stack */
push {lr} push {r0, lr}
bl read_timer_end_of_userspace_enter bl read_timer_end_of_userspace_enter
pop {lr} #if defined(CONFIG_ARMV6_M_ARMV8_M_BASELINE)
ldm sp!,{r0-r3} /* Load back regs ro to r4 */ pop {r0, r3}
mov lr,r3
#else
pop {r0, lr}
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
ldm sp!,{r0-r3} /* Restore r0 to r3 regs */
#endif /* CONFIG_EXECUTION_BENCHMARKING */ #endif /* CONFIG_EXECUTION_BENCHMARKING */
/* change processor mode to unprivileged */ /* change processor mode to unprivileged */