b5069e802d5ce4acbe9f8ac61f47401db8f1f54d
[sfrench/cifs-2.6.git] / arch / x86 / include / asm / cmpxchg.h
1 #ifndef ASM_X86_CMPXCHG_H
2 #define ASM_X86_CMPXCHG_H
3
4 #include <linux/compiler.h>
5 #include <asm/cpufeatures.h>
6 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
7
8 /*
9  * Non-existant functions to indicate usage errors at link time
10  * (or compile-time if the compiler implements __compiletime_error().
11  */
12 extern void __xchg_wrong_size(void)
13         __compiletime_error("Bad argument size for xchg");
14 extern void __cmpxchg_wrong_size(void)
15         __compiletime_error("Bad argument size for cmpxchg");
16 extern void __xadd_wrong_size(void)
17         __compiletime_error("Bad argument size for xadd");
18 extern void __add_wrong_size(void)
19         __compiletime_error("Bad argument size for add");
20
21 /*
22  * Constants for operation sizes. On 32-bit, the 64-bit size it set to
23  * -1 because sizeof will never return -1, thereby making those switch
24  * case statements guaranteeed dead code which the compiler will
25  * eliminate, and allowing the "missing symbol in the default case" to
26  * indicate a usage error.
27  */
28 #define __X86_CASE_B    1
29 #define __X86_CASE_W    2
30 #define __X86_CASE_L    4
31 #ifdef CONFIG_64BIT
32 #define __X86_CASE_Q    8
33 #else
34 #define __X86_CASE_Q    -1              /* sizeof will never return -1 */
35 #endif
36
37 /* 
38  * An exchange-type operation, which takes a value and a pointer, and
39  * returns the old value.
40  */
41 #define __xchg_op(ptr, arg, op, lock)                                   \
42         ({                                                              \
43                 __typeof__ (*(ptr)) __ret = (arg);                      \
44                 switch (sizeof(*(ptr))) {                               \
45                 case __X86_CASE_B:                                      \
46                         asm volatile (lock #op "b %b0, %1\n"            \
47                                       : "+q" (__ret), "+m" (*(ptr))     \
48                                       : : "memory", "cc");              \
49                         break;                                          \
50                 case __X86_CASE_W:                                      \
51                         asm volatile (lock #op "w %w0, %1\n"            \
52                                       : "+r" (__ret), "+m" (*(ptr))     \
53                                       : : "memory", "cc");              \
54                         break;                                          \
55                 case __X86_CASE_L:                                      \
56                         asm volatile (lock #op "l %0, %1\n"             \
57                                       : "+r" (__ret), "+m" (*(ptr))     \
58                                       : : "memory", "cc");              \
59                         break;                                          \
60                 case __X86_CASE_Q:                                      \
61                         asm volatile (lock #op "q %q0, %1\n"            \
62                                       : "+r" (__ret), "+m" (*(ptr))     \
63                                       : : "memory", "cc");              \
64                         break;                                          \
65                 default:                                                \
66                         __ ## op ## _wrong_size();                      \
67                 }                                                       \
68                 __ret;                                                  \
69         })
70
71 /*
72  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
73  * Since this is generally used to protect other memory information, we
74  * use "asm volatile" and "memory" clobbers to prevent gcc from moving
75  * information around.
76  */
77 #define xchg(ptr, v)    __xchg_op((ptr), (v), xchg, "")
78
79 /*
80  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
81  * store NEW in MEM.  Return the initial value in MEM.  Success is
82  * indicated by comparing RETURN with OLD.
83  */
84 #define __raw_cmpxchg(ptr, old, new, size, lock)                        \
85 ({                                                                      \
86         __typeof__(*(ptr)) __ret;                                       \
87         __typeof__(*(ptr)) __old = (old);                               \
88         __typeof__(*(ptr)) __new = (new);                               \
89         switch (size) {                                                 \
90         case __X86_CASE_B:                                              \
91         {                                                               \
92                 volatile u8 *__ptr = (volatile u8 *)(ptr);              \
93                 asm volatile(lock "cmpxchgb %2,%1"                      \
94                              : "=a" (__ret), "+m" (*__ptr)              \
95                              : "q" (__new), "0" (__old)                 \
96                              : "memory");                               \
97                 break;                                                  \
98         }                                                               \
99         case __X86_CASE_W:                                              \
100         {                                                               \
101                 volatile u16 *__ptr = (volatile u16 *)(ptr);            \
102                 asm volatile(lock "cmpxchgw %2,%1"                      \
103                              : "=a" (__ret), "+m" (*__ptr)              \
104                              : "r" (__new), "0" (__old)                 \
105                              : "memory");                               \
106                 break;                                                  \
107         }                                                               \
108         case __X86_CASE_L:                                              \
109         {                                                               \
110                 volatile u32 *__ptr = (volatile u32 *)(ptr);            \
111                 asm volatile(lock "cmpxchgl %2,%1"                      \
112                              : "=a" (__ret), "+m" (*__ptr)              \
113                              : "r" (__new), "0" (__old)                 \
114                              : "memory");                               \
115                 break;                                                  \
116         }                                                               \
117         case __X86_CASE_Q:                                              \
118         {                                                               \
119                 volatile u64 *__ptr = (volatile u64 *)(ptr);            \
120                 asm volatile(lock "cmpxchgq %2,%1"                      \
121                              : "=a" (__ret), "+m" (*__ptr)              \
122                              : "r" (__new), "0" (__old)                 \
123                              : "memory");                               \
124                 break;                                                  \
125         }                                                               \
126         default:                                                        \
127                 __cmpxchg_wrong_size();                                 \
128         }                                                               \
129         __ret;                                                          \
130 })
131
132 #define __cmpxchg(ptr, old, new, size)                                  \
133         __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
134
135 #define __sync_cmpxchg(ptr, old, new, size)                             \
136         __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
137
138 #define __cmpxchg_local(ptr, old, new, size)                            \
139         __raw_cmpxchg((ptr), (old), (new), (size), "")
140
141 #ifdef CONFIG_X86_32
142 # include <asm/cmpxchg_32.h>
143 #else
144 # include <asm/cmpxchg_64.h>
145 #endif
146
147 #define cmpxchg(ptr, old, new)                                          \
148         __cmpxchg(ptr, old, new, sizeof(*(ptr)))
149
150 #define sync_cmpxchg(ptr, old, new)                                     \
151         __sync_cmpxchg(ptr, old, new, sizeof(*(ptr)))
152
153 #define cmpxchg_local(ptr, old, new)                                    \
154         __cmpxchg_local(ptr, old, new, sizeof(*(ptr)))
155
156
157 #define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock)                \
158 ({                                                                      \
159         bool success;                                                   \
160         __typeof__(_ptr) _old = (__typeof__(_ptr))(_pold);              \
161         __typeof__(*(_ptr)) __old = *_old;                              \
162         __typeof__(*(_ptr)) __new = (_new);                             \
163         switch (size) {                                                 \
164         case __X86_CASE_B:                                              \
165         {                                                               \
166                 volatile u8 *__ptr = (volatile u8 *)(_ptr);             \
167                 asm volatile(lock "cmpxchgb %[new], %[ptr]"             \
168                              CC_SET(z)                                  \
169                              : CC_OUT(z) (success),                     \
170                                [ptr] "+m" (*__ptr),                     \
171                                [old] "+a" (__old)                       \
172                              : [new] "q" (__new)                        \
173                              : "memory");                               \
174                 break;                                                  \
175         }                                                               \
176         case __X86_CASE_W:                                              \
177         {                                                               \
178                 volatile u16 *__ptr = (volatile u16 *)(_ptr);           \
179                 asm volatile(lock "cmpxchgw %[new], %[ptr]"             \
180                              CC_SET(z)                                  \
181                              : CC_OUT(z) (success),                     \
182                                [ptr] "+m" (*__ptr),                     \
183                                [old] "+a" (__old)                       \
184                              : [new] "r" (__new)                        \
185                              : "memory");                               \
186                 break;                                                  \
187         }                                                               \
188         case __X86_CASE_L:                                              \
189         {                                                               \
190                 volatile u32 *__ptr = (volatile u32 *)(_ptr);           \
191                 asm volatile(lock "cmpxchgl %[new], %[ptr]"             \
192                              CC_SET(z)                                  \
193                              : CC_OUT(z) (success),                     \
194                                [ptr] "+m" (*__ptr),                     \
195                                [old] "+a" (__old)                       \
196                              : [new] "r" (__new)                        \
197                              : "memory");                               \
198                 break;                                                  \
199         }                                                               \
200         case __X86_CASE_Q:                                              \
201         {                                                               \
202                 volatile u64 *__ptr = (volatile u64 *)(_ptr);           \
203                 asm volatile(lock "cmpxchgq %[new], %[ptr]"             \
204                              CC_SET(z)                                  \
205                              : CC_OUT(z) (success),                     \
206                                [ptr] "+m" (*__ptr),                     \
207                                [old] "+a" (__old)                       \
208                              : [new] "r" (__new)                        \
209                              : "memory");                               \
210                 break;                                                  \
211         }                                                               \
212         default:                                                        \
213                 __cmpxchg_wrong_size();                                 \
214         }                                                               \
215         if (unlikely(!success))                                         \
216                 *_old = __old;                                          \
217         likely(success);                                                \
218 })
219
220 #define __try_cmpxchg(ptr, pold, new, size)                             \
221         __raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX)
222
223 #define try_cmpxchg(ptr, pold, new)                                     \
224         __try_cmpxchg((ptr), (pold), (new), sizeof(*(ptr)))
225
226 /*
227  * xadd() adds "inc" to "*ptr" and atomically returns the previous
228  * value of "*ptr".
229  *
230  * xadd() is locked when multiple CPUs are online
231  */
232 #define __xadd(ptr, inc, lock)  __xchg_op((ptr), (inc), xadd, lock)
233 #define xadd(ptr, inc)          __xadd((ptr), (inc), LOCK_PREFIX)
234
235 #define __cmpxchg_double(pfx, p1, p2, o1, o2, n1, n2)                   \
236 ({                                                                      \
237         bool __ret;                                                     \
238         __typeof__(*(p1)) __old1 = (o1), __new1 = (n1);                 \
239         __typeof__(*(p2)) __old2 = (o2), __new2 = (n2);                 \
240         BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));                    \
241         BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));                    \
242         VM_BUG_ON((unsigned long)(p1) % (2 * sizeof(long)));            \
243         VM_BUG_ON((unsigned long)((p1) + 1) != (unsigned long)(p2));    \
244         asm volatile(pfx "cmpxchg%c4b %2; sete %0"                      \
245                      : "=a" (__ret), "+d" (__old2),                     \
246                        "+m" (*(p1)), "+m" (*(p2))                       \
247                      : "i" (2 * sizeof(long)), "a" (__old1),            \
248                        "b" (__new1), "c" (__new2));                     \
249         __ret;                                                          \
250 })
251
252 #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
253         __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2)
254
255 #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
256         __cmpxchg_double(, p1, p2, o1, o2, n1, n2)
257
258 #endif  /* ASM_X86_CMPXCHG_H */