treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 500
[sfrench/cifs-2.6.git] / arch / xtensa / include / asm / futex.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Atomic futex routines
4  *
5  * Based on the PowerPC implementataion
6  *
7  * Copyright (C) 2013 TangoTec Ltd.
8  *
9  * Baruch Siach <baruch@tkos.co.il>
10  */
11
12 #ifndef _ASM_XTENSA_FUTEX_H
13 #define _ASM_XTENSA_FUTEX_H
14
15 #include <linux/futex.h>
16 #include <linux/uaccess.h>
17 #include <linux/errno.h>
18
19 #if XCHAL_HAVE_EXCLUSIVE
20 #define __futex_atomic_op(insn, ret, old, uaddr, arg)   \
21         __asm__ __volatile(                             \
22         "1:     l32ex   %[oldval], %[addr]\n"           \
23                 insn "\n"                               \
24         "2:     s32ex   %[newval], %[addr]\n"           \
25         "       getex   %[newval]\n"                    \
26         "       beqz    %[newval], 1b\n"                \
27         "       movi    %[newval], 0\n"                 \
28         "3:\n"                                          \
29         "       .section .fixup,\"ax\"\n"               \
30         "       .align 4\n"                             \
31         "       .literal_position\n"                    \
32         "5:     movi    %[oldval], 3b\n"                \
33         "       movi    %[newval], %[fault]\n"          \
34         "       jx      %[oldval]\n"                    \
35         "       .previous\n"                            \
36         "       .section __ex_table,\"a\"\n"            \
37         "       .long 1b, 5b, 2b, 5b\n"                 \
38         "       .previous\n"                            \
39         : [oldval] "=&r" (old), [newval] "=&r" (ret)    \
40         : [addr] "r" (uaddr), [oparg] "r" (arg),        \
41           [fault] "I" (-EFAULT)                         \
42         : "memory")
43 #elif XCHAL_HAVE_S32C1I
44 #define __futex_atomic_op(insn, ret, old, uaddr, arg)   \
45         __asm__ __volatile(                             \
46         "1:     l32i    %[oldval], %[addr], 0\n"        \
47                 insn "\n"                               \
48         "       wsr     %[oldval], scompare1\n"         \
49         "2:     s32c1i  %[newval], %[addr], 0\n"        \
50         "       bne     %[newval], %[oldval], 1b\n"     \
51         "       movi    %[newval], 0\n"                 \
52         "3:\n"                                          \
53         "       .section .fixup,\"ax\"\n"               \
54         "       .align 4\n"                             \
55         "       .literal_position\n"                    \
56         "5:     movi    %[oldval], 3b\n"                \
57         "       movi    %[newval], %[fault]\n"          \
58         "       jx      %[oldval]\n"                    \
59         "       .previous\n"                            \
60         "       .section __ex_table,\"a\"\n"            \
61         "       .long 1b, 5b, 2b, 5b\n"                 \
62         "       .previous\n"                            \
63         : [oldval] "=&r" (old), [newval] "=&r" (ret)    \
64         : [addr] "r" (uaddr), [oparg] "r" (arg),        \
65           [fault] "I" (-EFAULT)                         \
66         : "memory")
67 #endif
68
69 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
70                 u32 __user *uaddr)
71 {
72 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
73         int oldval = 0, ret;
74
75         pagefault_disable();
76
77         switch (op) {
78         case FUTEX_OP_SET:
79                 __futex_atomic_op("mov %[newval], %[oparg]",
80                                   ret, oldval, uaddr, oparg);
81                 break;
82         case FUTEX_OP_ADD:
83                 __futex_atomic_op("add %[newval], %[oldval], %[oparg]",
84                                   ret, oldval, uaddr, oparg);
85                 break;
86         case FUTEX_OP_OR:
87                 __futex_atomic_op("or %[newval], %[oldval], %[oparg]",
88                                   ret, oldval, uaddr, oparg);
89                 break;
90         case FUTEX_OP_ANDN:
91                 __futex_atomic_op("and %[newval], %[oldval], %[oparg]",
92                                   ret, oldval, uaddr, ~oparg);
93                 break;
94         case FUTEX_OP_XOR:
95                 __futex_atomic_op("xor %[newval], %[oldval], %[oparg]",
96                                   ret, oldval, uaddr, oparg);
97                 break;
98         default:
99                 ret = -ENOSYS;
100         }
101
102         pagefault_enable();
103
104         if (!ret)
105                 *oval = oldval;
106
107         return ret;
108 #else
109         return -ENOSYS;
110 #endif
111 }
112
113 static inline int
114 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
115                               u32 oldval, u32 newval)
116 {
117 #if XCHAL_HAVE_S32C1I || XCHAL_HAVE_EXCLUSIVE
118         unsigned long tmp;
119         int ret = 0;
120
121         if (!access_ok(uaddr, sizeof(u32)))
122                 return -EFAULT;
123
124         __asm__ __volatile__ (
125         "       # futex_atomic_cmpxchg_inatomic\n"
126 #if XCHAL_HAVE_EXCLUSIVE
127         "1:     l32ex   %[tmp], %[addr]\n"
128         "       s32i    %[tmp], %[uval], 0\n"
129         "       bne     %[tmp], %[oldval], 2f\n"
130         "       mov     %[tmp], %[newval]\n"
131         "3:     s32ex   %[tmp], %[addr]\n"
132         "       getex   %[tmp]\n"
133         "       beqz    %[tmp], 1b\n"
134 #elif XCHAL_HAVE_S32C1I
135         "       wsr     %[oldval], scompare1\n"
136         "1:     s32c1i  %[newval], %[addr], 0\n"
137         "       s32i    %[newval], %[uval], 0\n"
138 #endif
139         "2:\n"
140         "       .section .fixup,\"ax\"\n"
141         "       .align 4\n"
142         "       .literal_position\n"
143         "4:     movi    %[tmp], 2b\n"
144         "       movi    %[ret], %[fault]\n"
145         "       jx      %[tmp]\n"
146         "       .previous\n"
147         "       .section __ex_table,\"a\"\n"
148         "       .long 1b, 4b\n"
149 #if XCHAL_HAVE_EXCLUSIVE
150         "       .long 3b, 4b\n"
151 #endif
152         "       .previous\n"
153         : [ret] "+r" (ret), [newval] "+r" (newval), [tmp] "=&r" (tmp)
154         : [addr] "r" (uaddr), [oldval] "r" (oldval), [uval] "r" (uval),
155           [fault] "I" (-EFAULT)
156         : "memory");
157
158         return ret;
159 #else
160         return -ENOSYS;
161 #endif
162 }
163
164 #endif /* _ASM_XTENSA_FUTEX_H */