summaryrefslogtreecommitdiff
path: root/arch/arm64
diff options
context:
space:
mode:
authorMark Rutland <mark.rutland@arm.com>2018-12-07 18:08:20 +0000
committerWill Deacon <will.deacon@arm.com>2018-12-10 11:50:11 +0000
commit56c08ec5162c7cf87632c59714ac625e7d54106a (patch)
tree2a9604991e807eab6ab57a16069d31a4382fb0c0 /arch/arm64
parent50fdecb292e0b2d98b0b2bf319a4faa1aa7d666f (diff)
downloadlwn-56c08ec5162c7cf87632c59714ac625e7d54106a.tar.gz
lwn-56c08ec5162c7cf87632c59714ac625e7d54106a.zip
arm64: uaccess: use asm EXPORT_SYMBOL()
For a while now it's been possible to use EXPORT_SYMBOL() in assembly files, which allows us to place exports immediately after assembly functions, as we do for C functions. As a step towards removing arm64ksyms.c, let's move the uaccess exports to the assembly files the functions are defined in. As we have to include <asm/assembler.h>, the existing includes are fixed to follow the usual ordering conventions. There should be no functional change as a result of this patch. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Cc: Will Deacon <will.deacon@arm.com> Cc: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm64')
-rw-r--r--arch/arm64/kernel/arm64ksyms.c7
-rw-r--r--arch/arm64/lib/clear_user.S2
-rw-r--r--arch/arm64/lib/copy_from_user.S4
-rw-r--r--arch/arm64/lib/copy_in_user.S4
-rw-r--r--arch/arm64/lib/copy_to_user.S4
5 files changed, 11 insertions, 10 deletions
diff --git a/arch/arm64/kernel/arm64ksyms.c b/arch/arm64/kernel/arm64ksyms.c
index 784b257d9643..02019667e319 100644
--- a/arch/arm64/kernel/arm64ksyms.c
+++ b/arch/arm64/kernel/arm64ksyms.c
@@ -24,18 +24,11 @@
#include <linux/delay.h>
#include <linux/in6.h>
#include <linux/syscalls.h>
-#include <linux/uaccess.h>
#include <linux/io.h>
#include <linux/kprobes.h>
#include <asm/checksum.h>
- /* user mem (segment) */
-EXPORT_SYMBOL(__arch_copy_from_user);
-EXPORT_SYMBOL(__arch_copy_to_user);
-EXPORT_SYMBOL(__arch_clear_user);
-EXPORT_SYMBOL(__arch_copy_in_user);
-
/* string / mem functions */
#ifndef CONFIG_KASAN
EXPORT_SYMBOL(strchr);
diff --git a/arch/arm64/lib/clear_user.S b/arch/arm64/lib/clear_user.S
index 21ba0b29621b..feb225bd4b80 100644
--- a/arch/arm64/lib/clear_user.S
+++ b/arch/arm64/lib/clear_user.S
@@ -18,6 +18,7 @@
#include <linux/linkage.h>
#include <asm/asm-uaccess.h>
+#include <asm/assembler.h>
.text
@@ -53,6 +54,7 @@ uao_user_alternative 9f, strb, sttrb, wzr, x0, 0
uaccess_disable_not_uao x2, x3
ret
ENDPROC(__arch_clear_user)
+EXPORT_SYMBOL(__arch_clear_user)
.section .fixup,"ax"
.align 2
diff --git a/arch/arm64/lib/copy_from_user.S b/arch/arm64/lib/copy_from_user.S
index 20305d485046..dea6c762d52f 100644
--- a/arch/arm64/lib/copy_from_user.S
+++ b/arch/arm64/lib/copy_from_user.S
@@ -16,8 +16,9 @@
#include <linux/linkage.h>
-#include <asm/cache.h>
#include <asm/asm-uaccess.h>
+#include <asm/assembler.h>
+#include <asm/cache.h>
/*
* Copy from user space to a kernel buffer (alignment handled by the hardware)
@@ -71,6 +72,7 @@ ENTRY(__arch_copy_from_user)
mov x0, #0 // Nothing to copy
ret
ENDPROC(__arch_copy_from_user)
+EXPORT_SYMBOL(__arch_copy_from_user)
.section .fixup,"ax"
.align 2
diff --git a/arch/arm64/lib/copy_in_user.S b/arch/arm64/lib/copy_in_user.S
index 54b75deb1d16..a84227fbf716 100644
--- a/arch/arm64/lib/copy_in_user.S
+++ b/arch/arm64/lib/copy_in_user.S
@@ -18,8 +18,9 @@
#include <linux/linkage.h>
-#include <asm/cache.h>
#include <asm/asm-uaccess.h>
+#include <asm/assembler.h>
+#include <asm/cache.h>
/*
* Copy from user space to user space (alignment handled by the hardware)
@@ -73,6 +74,7 @@ ENTRY(__arch_copy_in_user)
mov x0, #0
ret
ENDPROC(__arch_copy_in_user)
+EXPORT_SYMBOL(__arch_copy_in_user)
.section .fixup,"ax"
.align 2
diff --git a/arch/arm64/lib/copy_to_user.S b/arch/arm64/lib/copy_to_user.S
index fda6172d6b88..ef44c7ca3ffb 100644
--- a/arch/arm64/lib/copy_to_user.S
+++ b/arch/arm64/lib/copy_to_user.S
@@ -16,8 +16,9 @@
#include <linux/linkage.h>
-#include <asm/cache.h>
#include <asm/asm-uaccess.h>
+#include <asm/assembler.h>
+#include <asm/cache.h>
/*
* Copy to user space from a kernel buffer (alignment handled by the hardware)
@@ -70,6 +71,7 @@ ENTRY(__arch_copy_to_user)
mov x0, #0
ret
ENDPROC(__arch_copy_to_user)
+EXPORT_SYMBOL(__arch_copy_to_user)
.section .fixup,"ax"
.align 2