arch: arm: Optimise Cortex-R exception return function.

z_arm_exc_exit (z_arm_int_exit) requires the current execution mode to
be specified as a parameter (through r0). This is not necessary because
this value can be directly read from CPSR.

This commit modifies the exception return function to retrieve the
current execution mode from CPSR and removes all provisions for passing
the execution mode parameter.

Signed-off-by: Stephanos Ioannidis <root@stephanos.io>
This commit is contained in:
Stephanos Ioannidis 2019-10-15 15:57:04 +09:00 committed by Ioannis Glaropoulos
commit cd90d49a86
4 changed files with 4 additions and 27 deletions

View file

@ -71,10 +71,6 @@ SECTION_SUBSEC_FUNC(TEXT, _HandlerModeExit, z_arm_int_exit)
*/ */
SECTION_SUBSEC_FUNC(TEXT, _HandlerModeExit, z_arm_exc_exit) SECTION_SUBSEC_FUNC(TEXT, _HandlerModeExit, z_arm_exc_exit)
#if defined(CONFIG_CPU_CORTEX_R)
/* r0 contains the caller mode */
push {r0, lr}
#endif
#ifdef CONFIG_PREEMPT_ENABLED #ifdef CONFIG_PREEMPT_ENABLED
ldr r0, =_kernel ldr r0, =_kernel
@ -117,9 +113,6 @@ _EXIT_EXC:
#if defined(CONFIG_CPU_CORTEX_M) #if defined(CONFIG_CPU_CORTEX_M)
bx lr bx lr
#elif defined(CONFIG_CPU_CORTEX_R) #elif defined(CONFIG_CPU_CORTEX_R)
/* Restore the caller mode to r0 */
pop {r0, lr}
/* /*
* Restore r0-r3, r12 and lr stored into the process stack by the mode * Restore r0-r3, r12 and lr stored into the process stack by the mode
* entry function. These registers are saved by _isr_wrapper for IRQ mode * entry function. These registers are saved by _isr_wrapper for IRQ mode
@ -128,22 +121,15 @@ _EXIT_EXC:
* r0-r3 are either the values from the thread before it was switched out * r0-r3 are either the values from the thread before it was switched out
* or they are the args to _new_thread for a new thread. * or they are the args to _new_thread for a new thread.
*/ */
push {r4, r5} push {r4-r6}
mrs r6, cpsr
cmp r0, #RET_FROM_SVC
cps #MODE_SYS cps #MODE_SYS
ldmia sp!, {r0-r5} ldmia sp!, {r0-r5}
beq _svc_exit msr cpsr_c, r6
cps #MODE_IRQ
b _exc_exit
_svc_exit:
cps #MODE_SVC
_exc_exit:
mov r12, r4 mov r12, r4
mov lr, r5 mov lr, r5
pop {r4, r5} pop {r4-r6}
movs pc, lr movs pc, lr
#endif #endif

View file

@ -195,10 +195,6 @@ _idle_state_cleared:
#error Unknown ARM architecture #error Unknown ARM architecture
#endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */ #endif /* CONFIG_ARMV6_M_ARMV8_M_BASELINE */
#if defined(CONFIG_CPU_CORTEX_R)
mov r0, #RET_FROM_IRQ
#endif
/* Use 'bx' instead of 'b' because 'bx' can jump further, and use /* Use 'bx' instead of 'b' because 'bx' can jump further, and use
* 'bx' instead of 'blx' because exception return is done in * 'bx' instead of 'blx' because exception return is done in
* z_arm_int_exit() */ * z_arm_int_exit() */

View file

@ -625,7 +625,6 @@ demux:
blx z_irq_do_offload /* call C routine which executes the offload */ blx z_irq_do_offload /* call C routine which executes the offload */
/* exception return is done in z_arm_int_exit() */ /* exception return is done in z_arm_int_exit() */
mov r0, #RET_FROM_SVC
b z_arm_int_exit b z_arm_int_exit
#endif #endif
@ -633,7 +632,6 @@ _context_switch:
/* handler mode exit, to PendSV */ /* handler mode exit, to PendSV */
bl z_arm_pendsv bl z_arm_pendsv
mov r0, #RET_FROM_SVC
b z_arm_int_exit b z_arm_int_exit
_oops: _oops:

View file

@ -31,9 +31,6 @@
#define FPEXC_EN (1 << 30) #define FPEXC_EN (1 << 30)
#define RET_FROM_SVC 0
#define RET_FROM_IRQ 1
#define __ISB() __asm__ volatile ("isb sy" : : : "memory") #define __ISB() __asm__ volatile ("isb sy" : : : "memory")
#define __DMB() __asm__ volatile ("dmb sy" : : : "memory") #define __DMB() __asm__ volatile ("dmb sy" : : : "memory")