include/asm-x86/futex.h: checkpatch cleanups - formatting only
authorJoe Perches <joe@perches.com>
Sun, 23 Mar 2008 08:02:12 +0000 (01:02 -0700)
committerIngo Molnar <mingo@elte.hu>
Thu, 17 Apr 2008 15:41:23 +0000 (17:41 +0200)
Signed-off-by: Joe Perches <joe@perches.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
include/asm-x86/futex.h

index c9952ea9f6980cdb3620470ff285c59e28f8acd2..ac0fbf24d722a36f3f0ee20e7060b5aace6bced1 100644 (file)
 #include <asm/uaccess.h>
 
 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg)    \
-  __asm__ __volatile(                                          \
-"1:    " insn "\n"                                             \
-"2:    .section .fixup,\"ax\"\n                                \
-3:     mov     %3, %1\n                                        \
-       jmp     2b\n                                            \
-       .previous\n"                                            \
-       _ASM_EXTABLE(1b,3b)                                     \
-       : "=r" (oldval), "=r" (ret), "+m" (*uaddr)              \
-       : "i" (-EFAULT), "0" (oparg), "1" (0))
+       asm volatile("1:\t" insn "\n"                           \
+                    "2:\t.section .fixup,\"ax\"\n"             \
+                    "3:\tmov\t%3, %1\n"                        \
+                    "\tjmp\t2b\n"                              \
+                    "\t.previous\n"                            \
+                    _ASM_EXTABLE(1b, 3b)                       \
+                    : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
+                    : "i" (-EFAULT), "0" (oparg), "1" (0))
 
 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)    \
-  __asm__ __volatile(                                          \
-"1:    movl    %2, %0\n                                        \
-       movl    %0, %3\n"                                       \
-       insn "\n"                                               \
-"2:    lock; cmpxchgl %3, %2\n                                 \
-       jnz     1b\n                                            \
-3:     .section .fixup,\"ax\"\n                                \
-4:     mov     %5, %1\n                                        \
-       jmp     3b\n                                            \
-       .previous\n"                                            \
-       _ASM_EXTABLE(1b,4b)                                     \
-       _ASM_EXTABLE(2b,4b)                                     \
-       : "=&a" (oldval), "=&r" (ret), "+m" (*uaddr),           \
-         "=&r" (tem)                                           \
-       : "r" (oparg), "i" (-EFAULT), "1" (0))
-
-static inline int
-futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
+       asm volatile("1:\tmovl  %2, %0\n"                       \
+                    "\tmovl\t%0, %3\n"                         \
+                    "\t" insn "\n"                             \
+                    "2:\tlock; cmpxchgl %3, %2\n"              \
+                    "\tjnz\t1b\n"                              \
+                    "3:\t.section .fixup,\"ax\"\n"             \
+                    "4:\tmov\t%5, %1\n"                        \
+                    "\tjmp\t3b\n"                              \
+                    "\t.previous\n"                            \
+                    _ASM_EXTABLE(1b, 4b)                       \
+                    _ASM_EXTABLE(2b, 4b)                       \
+                    : "=&a" (oldval), "=&r" (ret),             \
+                      "+m" (*uaddr), "=&r" (tem)               \
+                    : "r" (oparg), "i" (-EFAULT), "1" (0))
+
+static inline int futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
 {
        int op = (encoded_op >> 28) & 7;
        int cmp = (encoded_op >> 24) & 15;
@@ -87,20 +84,33 @@ futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
 
        if (!ret) {
                switch (cmp) {
-               case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
-               case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
-               case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
-               case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
-               case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
-               case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
-               default: ret = -ENOSYS;
+               case FUTEX_OP_CMP_EQ:
+                       ret = (oldval == cmparg);
+                       break;
+               case FUTEX_OP_CMP_NE:
+                       ret = (oldval != cmparg);
+                       break;
+               case FUTEX_OP_CMP_LT:
+                       ret = (oldval < cmparg);
+                       break;
+               case FUTEX_OP_CMP_GE:
+                       ret = (oldval >= cmparg);
+                       break;
+               case FUTEX_OP_CMP_LE:
+                       ret = (oldval <= cmparg);
+                       break;
+               case FUTEX_OP_CMP_GT:
+                       ret = (oldval > cmparg);
+                       break;
+               default:
+                       ret = -ENOSYS;
                }
        }
        return ret;
 }
 
-static inline int
-futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
+static inline int futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval,
+                                               int newval)
 {
 
 #if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
@@ -112,16 +122,15 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
        if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
                return -EFAULT;
 
-       __asm__ __volatile__(
-               "1:     lock; cmpxchgl %3, %1                   \n"
-               "2:     .section .fixup, \"ax\"                 \n"
-               "3:     mov     %2, %0                          \n"
-               "       jmp     2b                              \n"
-               "       .previous                               \n"
-               _ASM_EXTABLE(1b,3b)
-               : "=a" (oldval), "+m" (*uaddr)
-               : "i" (-EFAULT), "r" (newval), "0" (oldval)
-               : "memory"
+       asm volatile("1:\tlock; cmpxchgl %3, %1\n"
+                    "2:\t.section .fixup, \"ax\"\n"
+                    "3:\tmov     %2, %0\n"
+                    "\tjmp     2b\n"
+                    "\t.previous\n"
+                    _ASM_EXTABLE(1b, 3b)
+                    : "=a" (oldval), "+m" (*uaddr)
+                    : "i" (-EFAULT), "r" (newval), "0" (oldval)
+                    : "memory"
        );
 
        return oldval;