diff --git a/arch/arm64/kernel/Makefile b/arch/arm64/kernel/Makefile index 30618ec8715c..dccca8af71da 100644 --- a/arch/arm64/kernel/Makefile +++ b/arch/arm64/kernel/Makefile @@ -30,8 +30,10 @@ OBJCOPYFLAGS := --prefix-symbols=__efistub_ $(obj)/%.stub.o: $(obj)/%.o FORCE $(call if_changed,objcopy) -arm64-obj-$(CONFIG_COMPAT) += sys32.o kuser32.o signal32.o \ +arm64-obj-$(CONFIG_COMPAT) += sys32.o signal32.o \ sys_compat.o entry32.o +arm64-obj-$(CONFIG_COMPAT) += sigreturn32.o +arm64-obj-$(CONFIG_COMPAT) += kuser32.o arm64-obj-$(CONFIG_FUNCTION_TRACER) += ftrace.o entry-ftrace.o arm64-obj-$(CONFIG_MODULES) += arm64ksyms.o module.o arm64-obj-$(CONFIG_ARM64_MODULE_PLTS) += module-plts.o diff --git a/arch/arm64/kernel/kuser32.S b/arch/arm64/kernel/kuser32.S index 997e6b27ff6a..d15b5c2935b3 100644 --- a/arch/arm64/kernel/kuser32.S +++ b/arch/arm64/kernel/kuser32.S @@ -20,16 +20,13 @@ * * AArch32 user helpers. * - * Each segment is 32-byte aligned and will be moved to the top of the high - * vector page. New segments (if ever needed) must be added in front of - * existing ones. This mechanism should be used only for things that are - * really small and justified, and not be abused freely. + * These helpers are provided for compatibility with AArch32 binaries that + * still need them. They are installed at a fixed address by + * aarch32_setup_additional_pages(). * * See Documentation/arm/kernel_user_helpers.txt for formal definitions. */ -#include - .align 5 .globl __kuser_helper_start __kuser_helper_start: @@ -77,42 +74,3 @@ __kuser_helper_version: // 0xffff0ffc .word ((__kuser_helper_end - __kuser_helper_start) >> 5) .globl __kuser_helper_end __kuser_helper_end: - -/* - * AArch32 sigreturn code - * - * For ARM syscalls, the syscall number has to be loaded into r7. - * We do not support an OABI userspace. - * - * For Thumb syscalls, we also pass the syscall number via r7. We therefore - * need two 16-bit instructions. - */ - .globl __aarch32_sigret_code_start -__aarch32_sigret_code_start: - - /* - * ARM Code - */ - .byte __NR_compat_sigreturn, 0x70, 0xa0, 0xe3 // mov r7, #__NR_compat_sigreturn - .byte __NR_compat_sigreturn, 0x00, 0x00, 0xef // svc #__NR_compat_sigreturn - - /* - * Thumb code - */ - .byte __NR_compat_sigreturn, 0x27 // svc #__NR_compat_sigreturn - .byte __NR_compat_sigreturn, 0xdf // mov r7, #__NR_compat_sigreturn - - /* - * ARM code - */ - .byte __NR_compat_rt_sigreturn, 0x70, 0xa0, 0xe3 // mov r7, #__NR_compat_rt_sigreturn - .byte __NR_compat_rt_sigreturn, 0x00, 0x00, 0xef // svc #__NR_compat_rt_sigreturn - - /* - * Thumb code - */ - .byte __NR_compat_rt_sigreturn, 0x27 // svc #__NR_compat_rt_sigreturn - .byte __NR_compat_rt_sigreturn, 0xdf // mov r7, #__NR_compat_rt_sigreturn - - .globl __aarch32_sigret_code_end -__aarch32_sigret_code_end: diff --git a/arch/arm64/kernel/sigreturn32.S b/arch/arm64/kernel/sigreturn32.S new file mode 100644 index 000000000000..6ecda4d84cd5 --- /dev/null +++ b/arch/arm64/kernel/sigreturn32.S @@ -0,0 +1,67 @@ +/* + * sigreturn trampolines for AArch32. + * + * Copyright (C) 2005-2011 Nicolas Pitre + * Copyright (C) 2012 ARM Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + * + * AArch32 sigreturn code + * + * For ARM syscalls, the syscall number has to be loaded into r7. + * We do not support an OABI userspace. + * + * For Thumb syscalls, we also pass the syscall number via r7. We therefore + * need two 16-bit instructions. + */ + +#include + + .globl __aarch32_sigret_code_start +__aarch32_sigret_code_start: + + /* + * ARM Code + */ + // mov r7, #__NR_compat_sigreturn + .byte __NR_compat_sigreturn, 0x70, 0xa0, 0xe3 + // svc #__NR_compat_sigreturn + .byte __NR_compat_sigreturn, 0x00, 0x00, 0xef + + /* + * Thumb code + */ + // svc #__NR_compat_sigreturn + .byte __NR_compat_sigreturn, 0x27 + // mov r7, #__NR_compat_sigreturn + .byte __NR_compat_sigreturn, 0xdf + + /* + * ARM code + */ + // mov r7, #__NR_compat_rt_sigreturn + .byte __NR_compat_rt_sigreturn, 0x70, 0xa0, 0xe3 + // svc #__NR_compat_rt_sigreturn + .byte __NR_compat_rt_sigreturn, 0x00, 0x00, 0xef + + /* + * Thumb code + */ + // svc #__NR_compat_rt_sigreturn + .byte __NR_compat_rt_sigreturn, 0x27 + // mov r7, #__NR_compat_rt_sigreturn + .byte __NR_compat_rt_sigreturn, 0xdf + + .globl __aarch32_sigret_code_end +__aarch32_sigret_code_end: