summaryrefslogtreecommitdiff
path: root/include/asm-mips
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2006-05-03 20:42:39 +0100
committerRalf Baechle <ralf@linux-mips.org>2006-06-01 00:28:31 +0100
commit6ee1da94c5fed95bacce3eda8e6d9e69324ecab7 (patch)
tree730d377f59f66434b479f83b24d2ebad74945705 /include/asm-mips
parent235a9d3eee9a9588c17d39efff8373d0513549b5 (diff)
downloadlwn-6ee1da94c5fed95bacce3eda8e6d9e69324ecab7.tar.gz
lwn-6ee1da94c5fed95bacce3eda8e6d9e69324ecab7.zip
[MIPS] Update/fix futex assembly
o Implement futex_atomic_op_inuser() operation o Don't use the R10000-ll/sc bug workaround version for every processor. branch likely is deprecated and some historic ll/sc processors don't implement it. In any case it's slow. Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips')
-rw-r--r--include/asm-mips/futex.h141
1 files changed, 116 insertions, 25 deletions
diff --git a/include/asm-mips/futex.h b/include/asm-mips/futex.h
index a554089991f2..12d118f1bc9c 100644
--- a/include/asm-mips/futex.h
+++ b/include/asm-mips/futex.h
@@ -7,6 +7,7 @@
#include <linux/futex.h>
#include <asm/errno.h>
#include <asm/uaccess.h>
+#include <asm/war.h>
#ifdef CONFIG_SMP
#define __FUTEX_SMP_SYNC " sync \n"
@@ -16,30 +17,58 @@
#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
{ \
- __asm__ __volatile__( \
- " .set push \n" \
- " .set noat \n" \
- " .set mips3 \n" \
- "1: ll %1, (%3) # __futex_atomic_op1 \n" \
- " .set mips0 \n" \
- " " insn " \n" \
- " .set mips3 \n" \
- "2: sc $1, (%3) \n" \
- " beqzl $1, 1b \n" \
- __FUTEX_SMP_SYNC \
- "3: \n" \
- " .set pop \n" \
- " .set mips0 \n" \
- " .section .fixup,\"ax\" \n" \
- "4: li %0, %5 \n" \
- " j 2b \n" \
- " .previous \n" \
- " .section __ex_table,\"a\" \n" \
- " "__UA_ADDR "\t1b, 4b \n" \
- " "__UA_ADDR "\t2b, 4b \n" \
- " .previous \n" \
- : "=r" (ret), "=r" (oldval) \
- : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \
+ if (cpu_has_llsc && R10000_LLSC_WAR) { \
+ __asm__ __volatile__( \
+ " .set push \n" \
+ " .set noat \n" \
+ " .set mips3 \n" \
+ "1: ll %1, (%3) # __futex_atomic_op \n" \
+ " .set mips0 \n" \
+ " " insn " \n" \
+ " .set mips3 \n" \
+ "2: sc $1, (%3) \n" \
+ " beqzl $1, 1b \n" \
+ __FUTEX_SMP_SYNC \
+ "3: \n" \
+ " .set pop \n" \
+ " .set mips0 \n" \
+ " .section .fixup,\"ax\" \n" \
+ "4: li %0, %5 \n" \
+ " j 2b \n" \
+ " .previous \n" \
+ " .section __ex_table,\"a\" \n" \
+ " "__UA_ADDR "\t1b, 4b \n" \
+ " "__UA_ADDR "\t2b, 4b \n" \
+ " .previous \n" \
+ : "=r" (ret), "=r" (oldval) \
+ : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \
+ } else if (cpu_has_llsc) { \
+ __asm__ __volatile__( \
+ " .set push \n" \
+ " .set noat \n" \
+ " .set mips3 \n" \
+ "1: ll %1, (%3) # __futex_atomic_op \n" \
+ " .set mips0 \n" \
+ " " insn " \n" \
+ " .set mips3 \n" \
+ "2: sc $1, (%3) \n" \
+ " beqz $1, 1b \n" \
+ __FUTEX_SMP_SYNC \
+ "3: \n" \
+ " .set pop \n" \
+ " .set mips0 \n" \
+ " .section .fixup,\"ax\" \n" \
+ "4: li %0, %5 \n" \
+ " j 2b \n" \
+ " .previous \n" \
+ " .section __ex_table,\"a\" \n" \
+ " "__UA_ADDR "\t1b, 4b \n" \
+ " "__UA_ADDR "\t2b, 4b \n" \
+ " .previous \n" \
+ : "=r" (ret), "=r" (oldval) \
+ : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \
+ } else \
+ ret = -ENOSYS; \
}
static inline int
@@ -102,7 +131,69 @@ futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
static inline int
futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
{
- return -ENOSYS;
+ int retval;
+
+ if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
+ return -EFAULT;
+
+ if (cpu_has_llsc && R10000_LLSC_WAR) {
+ __asm__ __volatile__(
+ "# futex_atomic_cmpxchg_inatomic \n"
+ " .set push \n"
+ " .set noat \n"
+ " .set mips3 \n"
+ "1: ll %0, %2 \n"
+ " bne %0, %z3, 3f \n"
+ " .set mips0 \n"
+ " move $1, %z4 \n"
+ " .set mips3 \n"
+ "2: sc $1, %1 \n"
+ " beqzl $1, 1b \n"
+ __FUTEX_SMP_SYNC
+ "3: \n"
+ " .set pop \n"
+ " .section .fixup,\"ax\" \n"
+ "4: li %0, %5 \n"
+ " j 3b \n"
+ " .previous \n"
+ " .section __ex_table,\"a\" \n"
+ " "__UA_ADDR "\t1b, 4b \n"
+ " "__UA_ADDR "\t2b, 4b \n"
+ " .previous \n"
+ : "=&r" (retval), "=R" (*uaddr)
+ : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
+ : "memory");
+ } else if (cpu_has_llsc) {
+ __asm__ __volatile__(
+ "# futex_atomic_cmpxchg_inatomic \n"
+ " .set push \n"
+ " .set noat \n"
+ " .set mips3 \n"
+ "1: ll %0, %2 \n"
+ " bne %0, %z3, 3f \n"
+ " .set mips0 \n"
+ " move $1, %z4 \n"
+ " .set mips3 \n"
+ "2: sc $1, %1 \n"
+ " beqz $1, 1b \n"
+ __FUTEX_SMP_SYNC
+ "3: \n"
+ " .set pop \n"
+ " .section .fixup,\"ax\" \n"
+ "4: li %0, %5 \n"
+ " j 3b \n"
+ " .previous \n"
+ " .section __ex_table,\"a\" \n"
+ " "__UA_ADDR "\t1b, 4b \n"
+ " "__UA_ADDR "\t2b, 4b \n"
+ " .previous \n"
+ : "=&r" (retval), "=R" (*uaddr)
+ : "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
+ : "memory");
+ } else
+ return -ENOSYS;
+
+ return retval;
}
#endif