Merge tag 'for-linus' of git://git.kernel.org/pub/scm/virt/kvm/kvm
[sfrench/cifs-2.6.git] / arch / arm / include / asm / futex.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_ARM_FUTEX_H
3 #define _ASM_ARM_FUTEX_H
4
5 #ifdef __KERNEL__
6
7 #include <linux/futex.h>
8 #include <linux/uaccess.h>
9 #include <asm/errno.h>
10
11 #define __futex_atomic_ex_table(err_reg)                        \
12         "3:\n"                                                  \
13         "       .pushsection __ex_table,\"a\"\n"                \
14         "       .align  3\n"                                    \
15         "       .long   1b, 4f, 2b, 4f\n"                       \
16         "       .popsection\n"                                  \
17         "       .pushsection .text.fixup,\"ax\"\n"              \
18         "       .align  2\n"                                    \
19         "4:     mov     %0, " err_reg "\n"                      \
20         "       b       3b\n"                                   \
21         "       .popsection"
22
23 #ifdef CONFIG_SMP
24
25 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
26 ({                                                              \
27         unsigned int __ua_flags;                                \
28         smp_mb();                                               \
29         prefetchw(uaddr);                                       \
30         __ua_flags = uaccess_save_and_enable();                 \
31         __asm__ __volatile__(                                   \
32         "1:     ldrex   %1, [%3]\n"                             \
33         "       " insn "\n"                                     \
34         "2:     strex   %2, %0, [%3]\n"                         \
35         "       teq     %2, #0\n"                               \
36         "       bne     1b\n"                                   \
37         "       mov     %0, #0\n"                               \
38         __futex_atomic_ex_table("%5")                           \
39         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
40         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
41         : "cc", "memory");                                      \
42         uaccess_restore(__ua_flags);                            \
43 })
44
45 static inline int
46 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
47                               u32 oldval, u32 newval)
48 {
49         unsigned int __ua_flags;
50         int ret;
51         u32 val;
52
53         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
54                 return -EFAULT;
55
56         smp_mb();
57         /* Prefetching cannot fault */
58         prefetchw(uaddr);
59         __ua_flags = uaccess_save_and_enable();
60         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
61         "1:     ldrex   %1, [%4]\n"
62         "       teq     %1, %2\n"
63         "       ite     eq      @ explicit IT needed for the 2b label\n"
64         "2:     strexeq %0, %3, [%4]\n"
65         "       movne   %0, #0\n"
66         "       teq     %0, #0\n"
67         "       bne     1b\n"
68         __futex_atomic_ex_table("%5")
69         : "=&r" (ret), "=&r" (val)
70         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
71         : "cc", "memory");
72         uaccess_restore(__ua_flags);
73         smp_mb();
74
75         *uval = val;
76         return ret;
77 }
78
79 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
80
81 #include <linux/preempt.h>
82 #include <asm/domain.h>
83
84 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
85 ({                                                              \
86         unsigned int __ua_flags = uaccess_save_and_enable();    \
87         __asm__ __volatile__(                                   \
88         "1:     " TUSER(ldr) "  %1, [%3]\n"                     \
89         "       " insn "\n"                                     \
90         "2:     " TUSER(str) "  %0, [%3]\n"                     \
91         "       mov     %0, #0\n"                               \
92         __futex_atomic_ex_table("%5")                           \
93         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
94         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
95         : "cc", "memory");                                      \
96         uaccess_restore(__ua_flags);                            \
97 })
98
99 static inline int
100 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
101                               u32 oldval, u32 newval)
102 {
103         unsigned int __ua_flags;
104         int ret = 0;
105         u32 val;
106
107         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
108                 return -EFAULT;
109
110         preempt_disable();
111         __ua_flags = uaccess_save_and_enable();
112         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
113         "1:     " TUSER(ldr) "  %1, [%4]\n"
114         "       teq     %1, %2\n"
115         "       it      eq      @ explicit IT needed for the 2b label\n"
116         "2:     " TUSER(streq) "        %3, [%4]\n"
117         __futex_atomic_ex_table("%5")
118         : "+r" (ret), "=&r" (val)
119         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
120         : "cc", "memory");
121         uaccess_restore(__ua_flags);
122
123         *uval = val;
124         preempt_enable();
125
126         return ret;
127 }
128
129 #endif /* !SMP */
130
131 static inline int
132 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
133 {
134         int oldval = 0, ret, tmp;
135
136 #ifndef CONFIG_SMP
137         preempt_disable();
138 #endif
139         pagefault_disable();
140
141         switch (op) {
142         case FUTEX_OP_SET:
143                 __futex_atomic_op("mov  %0, %4", ret, oldval, tmp, uaddr, oparg);
144                 break;
145         case FUTEX_OP_ADD:
146                 __futex_atomic_op("add  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
147                 break;
148         case FUTEX_OP_OR:
149                 __futex_atomic_op("orr  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
150                 break;
151         case FUTEX_OP_ANDN:
152                 __futex_atomic_op("and  %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
153                 break;
154         case FUTEX_OP_XOR:
155                 __futex_atomic_op("eor  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
156                 break;
157         default:
158                 ret = -ENOSYS;
159         }
160
161         pagefault_enable();
162 #ifndef CONFIG_SMP
163         preempt_enable();
164 #endif
165
166         if (!ret)
167                 *oval = oldval;
168
169         return ret;
170 }
171
172 #endif /* __KERNEL__ */
173 #endif /* _ASM_ARM_FUTEX_H */