From 9ec1c1a793b18af35d558e5830c076de93f6a6d5 Mon Sep 17 00:00:00 2001 From: Carlo Caione Date: Wed, 11 Nov 2020 12:41:37 +0100 Subject: [PATCH] aarch64: userspace: Introduce arch_user_string_nlen Introduce the arch_user_string_nlen() assembly routine and the necessary C code bits. Signed-off-by: Carlo Caione Signed-off-by: Nicolas Pitre --- arch/arm/core/aarch64/fatal.c | 22 ++++++++++++++++++- arch/arm/core/aarch64/userspace.S | 36 +++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 1 deletion(-) diff --git a/arch/arm/core/aarch64/fatal.c b/arch/arm/core/aarch64/fatal.c index 674de6842f1..a16d5219c3a 100644 --- a/arch/arm/core/aarch64/fatal.c +++ b/arch/arm/core/aarch64/fatal.c @@ -15,9 +15,18 @@ #include #include +#include LOG_MODULE_DECLARE(os, CONFIG_KERNEL_LOG_LEVEL); +#ifdef CONFIG_USERSPACE +Z_EXC_DECLARE(z_arm64_user_string_nlen); + +static const struct z_exc_handle exceptions[] = { + Z_EXC_HANDLE(z_arm64_user_string_nlen), +}; +#endif /* CONFIG_USERSPACE */ + #ifdef CONFIG_EXCEPTION_DEBUG static void dump_esr(uint64_t esr, bool *dump_far) { @@ -168,7 +177,18 @@ static bool is_recoverable(z_arch_esf_t *esf, uint64_t esr, uint64_t far, if (!esf) return false; - /* Empty */ +#ifdef CONFIG_USERSPACE + for (int i = 0; i < ARRAY_SIZE(exceptions); i++) { + /* Mask out instruction mode */ + uint64_t start = (uint64_t)exceptions[i].start; + uint64_t end = (uint64_t)exceptions[i].end; + + if (esf->elr >= start && esf->elr < end) { + esf->elr = (uint64_t)(exceptions[i].fixup); + return true; + } + } +#endif return false; } diff --git a/arch/arm/core/aarch64/userspace.S b/arch/arm/core/aarch64/userspace.S index 4b3371393e0..ae42734a722 100644 --- a/arch/arm/core/aarch64/userspace.S +++ b/arch/arm/core/aarch64/userspace.S @@ -13,6 +13,42 @@ _ASM_FILE_PROLOGUE +/* + * size_t arch_user_string_nlen(const char *s, size_t maxsize, int *err_arg) + */ + +GTEXT(z_arm64_user_string_nlen_fault_start) +GTEXT(z_arm64_user_string_nlen_fault_end) +GTEXT(z_arm64_user_string_nlen_fixup) + +GTEXT(arch_user_string_nlen) +SECTION_FUNC(TEXT, arch_user_string_nlen) + + mov x3, x0 + mov x0, #0 + mov x4, #0 + +strlen_loop: + + cmp x0, x1 + beq strlen_done + +z_arm64_user_string_nlen_fault_start: + ldrb w5, [x3, x0] +z_arm64_user_string_nlen_fault_end: + cbz x5, strlen_done + + add x0, x0, #1 + b strlen_loop + +z_arm64_user_string_nlen_fixup: + mov x4, #-1 + mov x0, #0 + +strlen_done: + str w4, [x2] + ret + /* * Routine to jump into userspace *