1 #ifndef _ASM_HEXAGON_FUTEX_H
2 #define _ASM_HEXAGON_FUTEX_H
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
10 /* XXX TODO-- need to add sync barriers! */
12 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
14 "1: %0 = memw_locked(%3);\n" \
15 /* For example: %1 = %4 */ \
17 "2: memw_locked(%3,p2) = %1;\n" \
18 " if !p2 jump 1b;\n" \
21 ".section .fixup,\"ax\"\n" \
25 ".section __ex_table,\"a\"\n" \
26 ".long 1b,4b,2b,4b\n" \
28 : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
29 : "r" (uaddr), "r" (oparg), "i" (-EFAULT) \
34 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
42 __futex_atomic_op("%1 = %4\n", ret, oldval, uaddr, oparg);
45 __futex_atomic_op("%1 = add(%0,%4)\n", ret, oldval, uaddr,
49 __futex_atomic_op("%1 = or(%0,%4)\n", ret, oldval, uaddr,
53 __futex_atomic_op("%1 = not(%4); %1 = and(%0,%1)\n", ret,
54 oldval, uaddr, oparg);
57 __futex_atomic_op("%1 = xor(%0,%4)\n", ret, oldval, uaddr,
73 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval,
79 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
82 __asm__ __volatile__ (
83 "1: %1 = memw_locked(%3)\n"
85 " p2 = cmp.eq(%1,%4)\n"
86 " if !p2.new jump:NT 3f\n"
88 "2: memw_locked(%3,p2) = %5\n"
91 ".section .fixup,\"ax\"\n"
95 ".section __ex_table,\"a\"\n"
98 : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
99 : "r" (uaddr), "r" (oldval), "r" (newval), "i"(-EFAULT)
106 #endif /* __KERNEL__ */
107 #endif /* _ASM_HEXAGON_FUTEX_H */