Merge branch 'perf-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git...
[sfrench/cifs-2.6.git] / arch / mips / include / asm / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
23 #include <asm/war.h>
24
25 #define ATOMIC_INIT(i)    { (i) }
26
27 /*
28  * atomic_read - read atomic variable
29  * @v: pointer of type atomic_t
30  *
31  * Atomically reads the value of @v.
32  */
33 #define atomic_read(v)          READ_ONCE((v)->counter)
34
35 /*
36  * atomic_set - set atomic variable
37  * @v: pointer of type atomic_t
38  * @i: required value
39  *
40  * Atomically sets the value of @v to @i.
41  */
42 #define atomic_set(v, i)        WRITE_ONCE((v)->counter, (i))
43
44 #define ATOMIC_OP(op, c_op, asm_op)                                           \
45 static __inline__ void atomic_##op(int i, atomic_t * v)                       \
46 {                                                                             \
47         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
48                 int temp;                                                     \
49                                                                               \
50                 __asm__ __volatile__(                                         \
51                 "       .set    arch=r4000                              \n"   \
52                 "1:     ll      %0, %1          # atomic_" #op "        \n"   \
53                 "       " #asm_op " %0, %2                              \n"   \
54                 "       sc      %0, %1                                  \n"   \
55                 "       beqzl   %0, 1b                                  \n"   \
56                 "       .set    mips0                                   \n"   \
57                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
58                 : "Ir" (i));                                                  \
59         } else if (kernel_uses_llsc) {                                        \
60                 int temp;                                                     \
61                                                                               \
62                 do {                                                          \
63                         __asm__ __volatile__(                                 \
64                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
65                         "       ll      %0, %1          # atomic_" #op "\n"   \
66                         "       " #asm_op " %0, %2                      \n"   \
67                         "       sc      %0, %1                          \n"   \
68                         "       .set    mips0                           \n"   \
69                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)  \
70                         : "Ir" (i));                                          \
71                 } while (unlikely(!temp));                                    \
72         } else {                                                              \
73                 unsigned long flags;                                          \
74                                                                               \
75                 raw_local_irq_save(flags);                                    \
76                 v->counter c_op i;                                            \
77                 raw_local_irq_restore(flags);                                 \
78         }                                                                     \
79 }
80
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op)                                    \
82 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v)       \
83 {                                                                             \
84         int result;                                                           \
85                                                                               \
86         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
87                 int temp;                                                     \
88                                                                               \
89                 __asm__ __volatile__(                                         \
90                 "       .set    arch=r4000                              \n"   \
91                 "1:     ll      %1, %2          # atomic_" #op "_return \n"   \
92                 "       " #asm_op " %0, %1, %3                          \n"   \
93                 "       sc      %0, %2                                  \n"   \
94                 "       beqzl   %0, 1b                                  \n"   \
95                 "       " #asm_op " %0, %1, %3                          \n"   \
96                 "       .set    mips0                                   \n"   \
97                 : "=&r" (result), "=&r" (temp),                               \
98                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
99                 : "Ir" (i));                                                  \
100         } else if (kernel_uses_llsc) {                                        \
101                 int temp;                                                     \
102                                                                               \
103                 do {                                                          \
104                         __asm__ __volatile__(                                 \
105                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
106                         "       ll      %1, %2  # atomic_" #op "_return \n"   \
107                         "       " #asm_op " %0, %1, %3                  \n"   \
108                         "       sc      %0, %2                          \n"   \
109                         "       .set    mips0                           \n"   \
110                         : "=&r" (result), "=&r" (temp),                       \
111                           "+" GCC_OFF_SMALL_ASM() (v->counter)                \
112                         : "Ir" (i));                                          \
113                 } while (unlikely(!result));                                  \
114                                                                               \
115                 result = temp; result c_op i;                                 \
116         } else {                                                              \
117                 unsigned long flags;                                          \
118                                                                               \
119                 raw_local_irq_save(flags);                                    \
120                 result = v->counter;                                          \
121                 result c_op i;                                                \
122                 v->counter = result;                                          \
123                 raw_local_irq_restore(flags);                                 \
124         }                                                                     \
125                                                                               \
126         return result;                                                        \
127 }
128
129 #define ATOMIC_FETCH_OP(op, c_op, asm_op)                                     \
130 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v)        \
131 {                                                                             \
132         int result;                                                           \
133                                                                               \
134         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
135                 int temp;                                                     \
136                                                                               \
137                 __asm__ __volatile__(                                         \
138                 "       .set    arch=r4000                              \n"   \
139                 "1:     ll      %1, %2          # atomic_fetch_" #op "  \n"   \
140                 "       " #asm_op " %0, %1, %3                          \n"   \
141                 "       sc      %0, %2                                  \n"   \
142                 "       beqzl   %0, 1b                                  \n"   \
143                 "       move    %0, %1                                  \n"   \
144                 "       .set    mips0                                   \n"   \
145                 : "=&r" (result), "=&r" (temp),                               \
146                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
147                 : "Ir" (i));                                                  \
148         } else if (kernel_uses_llsc) {                                        \
149                 int temp;                                                     \
150                                                                               \
151                 do {                                                          \
152                         __asm__ __volatile__(                                 \
153                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
154                         "       ll      %1, %2  # atomic_fetch_" #op "  \n"   \
155                         "       " #asm_op " %0, %1, %3                  \n"   \
156                         "       sc      %0, %2                          \n"   \
157                         "       .set    mips0                           \n"   \
158                         : "=&r" (result), "=&r" (temp),                       \
159                           "+" GCC_OFF_SMALL_ASM() (v->counter)                \
160                         : "Ir" (i));                                          \
161                 } while (unlikely(!result));                                  \
162                                                                               \
163                 result = temp;                                                \
164         } else {                                                              \
165                 unsigned long flags;                                          \
166                                                                               \
167                 raw_local_irq_save(flags);                                    \
168                 result = v->counter;                                          \
169                 v->counter c_op i;                                            \
170                 raw_local_irq_restore(flags);                                 \
171         }                                                                     \
172                                                                               \
173         return result;                                                        \
174 }
175
176 #define ATOMIC_OPS(op, c_op, asm_op)                                          \
177         ATOMIC_OP(op, c_op, asm_op)                                           \
178         ATOMIC_OP_RETURN(op, c_op, asm_op)                                    \
179         ATOMIC_FETCH_OP(op, c_op, asm_op)
180
181 ATOMIC_OPS(add, +=, addu)
182 ATOMIC_OPS(sub, -=, subu)
183
184 #define atomic_add_return_relaxed       atomic_add_return_relaxed
185 #define atomic_sub_return_relaxed       atomic_sub_return_relaxed
186 #define atomic_fetch_add_relaxed        atomic_fetch_add_relaxed
187 #define atomic_fetch_sub_relaxed        atomic_fetch_sub_relaxed
188
189 #undef ATOMIC_OPS
190 #define ATOMIC_OPS(op, c_op, asm_op)                                          \
191         ATOMIC_OP(op, c_op, asm_op)                                           \
192         ATOMIC_FETCH_OP(op, c_op, asm_op)
193
194 ATOMIC_OPS(and, &=, and)
195 ATOMIC_OPS(or, |=, or)
196 ATOMIC_OPS(xor, ^=, xor)
197
198 #define atomic_fetch_and_relaxed        atomic_fetch_and_relaxed
199 #define atomic_fetch_or_relaxed         atomic_fetch_or_relaxed
200 #define atomic_fetch_xor_relaxed        atomic_fetch_xor_relaxed
201
202 #undef ATOMIC_OPS
203 #undef ATOMIC_FETCH_OP
204 #undef ATOMIC_OP_RETURN
205 #undef ATOMIC_OP
206
207 /*
208  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
209  * @i: integer value to subtract
210  * @v: pointer of type atomic_t
211  *
212  * Atomically test @v and subtract @i if @v is greater or equal than @i.
213  * The function returns the old value of @v minus @i.
214  */
215 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
216 {
217         int result;
218
219         smp_mb__before_llsc();
220
221         if (kernel_uses_llsc && R10000_LLSC_WAR) {
222                 int temp;
223
224                 __asm__ __volatile__(
225                 "       .set    arch=r4000                              \n"
226                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
227                 "       subu    %0, %1, %3                              \n"
228                 "       bltz    %0, 1f                                  \n"
229                 "       sc      %0, %2                                  \n"
230                 "       .set    noreorder                               \n"
231                 "       beqzl   %0, 1b                                  \n"
232                 "        subu   %0, %1, %3                              \n"
233                 "       .set    reorder                                 \n"
234                 "1:                                                     \n"
235                 "       .set    mips0                                   \n"
236                 : "=&r" (result), "=&r" (temp),
237                   "+" GCC_OFF_SMALL_ASM() (v->counter)
238                 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
239                 : "memory");
240         } else if (kernel_uses_llsc) {
241                 int temp;
242
243                 __asm__ __volatile__(
244                 "       .set    "MIPS_ISA_LEVEL"                        \n"
245                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
246                 "       subu    %0, %1, %3                              \n"
247                 "       bltz    %0, 1f                                  \n"
248                 "       sc      %0, %2                                  \n"
249                 "       .set    noreorder                               \n"
250                 "       beqz    %0, 1b                                  \n"
251                 "        subu   %0, %1, %3                              \n"
252                 "       .set    reorder                                 \n"
253                 "1:                                                     \n"
254                 "       .set    mips0                                   \n"
255                 : "=&r" (result), "=&r" (temp),
256                   "+" GCC_OFF_SMALL_ASM() (v->counter)
257                 : "Ir" (i));
258         } else {
259                 unsigned long flags;
260
261                 raw_local_irq_save(flags);
262                 result = v->counter;
263                 result -= i;
264                 if (result >= 0)
265                         v->counter = result;
266                 raw_local_irq_restore(flags);
267         }
268
269         smp_llsc_mb();
270
271         return result;
272 }
273
274 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
275 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
276
277 /*
278  * atomic_dec_if_positive - decrement by 1 if old value positive
279  * @v: pointer of type atomic_t
280  */
281 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
282
283 #ifdef CONFIG_64BIT
284
285 #define ATOMIC64_INIT(i)    { (i) }
286
287 /*
288  * atomic64_read - read atomic variable
289  * @v: pointer of type atomic64_t
290  *
291  */
292 #define atomic64_read(v)        READ_ONCE((v)->counter)
293
294 /*
295  * atomic64_set - set atomic variable
296  * @v: pointer of type atomic64_t
297  * @i: required value
298  */
299 #define atomic64_set(v, i)      WRITE_ONCE((v)->counter, (i))
300
301 #define ATOMIC64_OP(op, c_op, asm_op)                                         \
302 static __inline__ void atomic64_##op(long i, atomic64_t * v)                  \
303 {                                                                             \
304         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
305                 long temp;                                                    \
306                                                                               \
307                 __asm__ __volatile__(                                         \
308                 "       .set    arch=r4000                              \n"   \
309                 "1:     lld     %0, %1          # atomic64_" #op "      \n"   \
310                 "       " #asm_op " %0, %2                              \n"   \
311                 "       scd     %0, %1                                  \n"   \
312                 "       beqzl   %0, 1b                                  \n"   \
313                 "       .set    mips0                                   \n"   \
314                 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)          \
315                 : "Ir" (i));                                                  \
316         } else if (kernel_uses_llsc) {                                        \
317                 long temp;                                                    \
318                                                                               \
319                 do {                                                          \
320                         __asm__ __volatile__(                                 \
321                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
322                         "       lld     %0, %1          # atomic64_" #op "\n" \
323                         "       " #asm_op " %0, %2                      \n"   \
324                         "       scd     %0, %1                          \n"   \
325                         "       .set    mips0                           \n"   \
326                         : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)      \
327                         : "Ir" (i));                                          \
328                 } while (unlikely(!temp));                                    \
329         } else {                                                              \
330                 unsigned long flags;                                          \
331                                                                               \
332                 raw_local_irq_save(flags);                                    \
333                 v->counter c_op i;                                            \
334                 raw_local_irq_restore(flags);                                 \
335         }                                                                     \
336 }
337
338 #define ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
339 static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
340 {                                                                             \
341         long result;                                                          \
342                                                                               \
343         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
344                 long temp;                                                    \
345                                                                               \
346                 __asm__ __volatile__(                                         \
347                 "       .set    arch=r4000                              \n"   \
348                 "1:     lld     %1, %2          # atomic64_" #op "_return\n"  \
349                 "       " #asm_op " %0, %1, %3                          \n"   \
350                 "       scd     %0, %2                                  \n"   \
351                 "       beqzl   %0, 1b                                  \n"   \
352                 "       " #asm_op " %0, %1, %3                          \n"   \
353                 "       .set    mips0                                   \n"   \
354                 : "=&r" (result), "=&r" (temp),                               \
355                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
356                 : "Ir" (i));                                                  \
357         } else if (kernel_uses_llsc) {                                        \
358                 long temp;                                                    \
359                                                                               \
360                 do {                                                          \
361                         __asm__ __volatile__(                                 \
362                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
363                         "       lld     %1, %2  # atomic64_" #op "_return\n"  \
364                         "       " #asm_op " %0, %1, %3                  \n"   \
365                         "       scd     %0, %2                          \n"   \
366                         "       .set    mips0                           \n"   \
367                         : "=&r" (result), "=&r" (temp),                       \
368                           "=" GCC_OFF_SMALL_ASM() (v->counter)                \
369                         : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)          \
370                         : "memory");                                          \
371                 } while (unlikely(!result));                                  \
372                                                                               \
373                 result = temp; result c_op i;                                 \
374         } else {                                                              \
375                 unsigned long flags;                                          \
376                                                                               \
377                 raw_local_irq_save(flags);                                    \
378                 result = v->counter;                                          \
379                 result c_op i;                                                \
380                 v->counter = result;                                          \
381                 raw_local_irq_restore(flags);                                 \
382         }                                                                     \
383                                                                               \
384         return result;                                                        \
385 }
386
387 #define ATOMIC64_FETCH_OP(op, c_op, asm_op)                                   \
388 static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v)  \
389 {                                                                             \
390         long result;                                                          \
391                                                                               \
392         if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
393                 long temp;                                                    \
394                                                                               \
395                 __asm__ __volatile__(                                         \
396                 "       .set    arch=r4000                              \n"   \
397                 "1:     lld     %1, %2          # atomic64_fetch_" #op "\n"   \
398                 "       " #asm_op " %0, %1, %3                          \n"   \
399                 "       scd     %0, %2                                  \n"   \
400                 "       beqzl   %0, 1b                                  \n"   \
401                 "       move    %0, %1                                  \n"   \
402                 "       .set    mips0                                   \n"   \
403                 : "=&r" (result), "=&r" (temp),                               \
404                   "+" GCC_OFF_SMALL_ASM() (v->counter)                        \
405                 : "Ir" (i));                                                  \
406         } else if (kernel_uses_llsc) {                                        \
407                 long temp;                                                    \
408                                                                               \
409                 do {                                                          \
410                         __asm__ __volatile__(                                 \
411                         "       .set    "MIPS_ISA_LEVEL"                \n"   \
412                         "       lld     %1, %2  # atomic64_fetch_" #op "\n"   \
413                         "       " #asm_op " %0, %1, %3                  \n"   \
414                         "       scd     %0, %2                          \n"   \
415                         "       .set    mips0                           \n"   \
416                         : "=&r" (result), "=&r" (temp),                       \
417                           "=" GCC_OFF_SMALL_ASM() (v->counter)                \
418                         : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)          \
419                         : "memory");                                          \
420                 } while (unlikely(!result));                                  \
421                                                                               \
422                 result = temp;                                                \
423         } else {                                                              \
424                 unsigned long flags;                                          \
425                                                                               \
426                 raw_local_irq_save(flags);                                    \
427                 result = v->counter;                                          \
428                 v->counter c_op i;                                            \
429                 raw_local_irq_restore(flags);                                 \
430         }                                                                     \
431                                                                               \
432         return result;                                                        \
433 }
434
435 #define ATOMIC64_OPS(op, c_op, asm_op)                                        \
436         ATOMIC64_OP(op, c_op, asm_op)                                         \
437         ATOMIC64_OP_RETURN(op, c_op, asm_op)                                  \
438         ATOMIC64_FETCH_OP(op, c_op, asm_op)
439
440 ATOMIC64_OPS(add, +=, daddu)
441 ATOMIC64_OPS(sub, -=, dsubu)
442
443 #define atomic64_add_return_relaxed     atomic64_add_return_relaxed
444 #define atomic64_sub_return_relaxed     atomic64_sub_return_relaxed
445 #define atomic64_fetch_add_relaxed      atomic64_fetch_add_relaxed
446 #define atomic64_fetch_sub_relaxed      atomic64_fetch_sub_relaxed
447
448 #undef ATOMIC64_OPS
449 #define ATOMIC64_OPS(op, c_op, asm_op)                                        \
450         ATOMIC64_OP(op, c_op, asm_op)                                         \
451         ATOMIC64_FETCH_OP(op, c_op, asm_op)
452
453 ATOMIC64_OPS(and, &=, and)
454 ATOMIC64_OPS(or, |=, or)
455 ATOMIC64_OPS(xor, ^=, xor)
456
457 #define atomic64_fetch_and_relaxed      atomic64_fetch_and_relaxed
458 #define atomic64_fetch_or_relaxed       atomic64_fetch_or_relaxed
459 #define atomic64_fetch_xor_relaxed      atomic64_fetch_xor_relaxed
460
461 #undef ATOMIC64_OPS
462 #undef ATOMIC64_FETCH_OP
463 #undef ATOMIC64_OP_RETURN
464 #undef ATOMIC64_OP
465
466 /*
467  * atomic64_sub_if_positive - conditionally subtract integer from atomic
468  *                            variable
469  * @i: integer value to subtract
470  * @v: pointer of type atomic64_t
471  *
472  * Atomically test @v and subtract @i if @v is greater or equal than @i.
473  * The function returns the old value of @v minus @i.
474  */
475 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
476 {
477         long result;
478
479         smp_mb__before_llsc();
480
481         if (kernel_uses_llsc && R10000_LLSC_WAR) {
482                 long temp;
483
484                 __asm__ __volatile__(
485                 "       .set    arch=r4000                              \n"
486                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
487                 "       dsubu   %0, %1, %3                              \n"
488                 "       bltz    %0, 1f                                  \n"
489                 "       scd     %0, %2                                  \n"
490                 "       .set    noreorder                               \n"
491                 "       beqzl   %0, 1b                                  \n"
492                 "        dsubu  %0, %1, %3                              \n"
493                 "       .set    reorder                                 \n"
494                 "1:                                                     \n"
495                 "       .set    mips0                                   \n"
496                 : "=&r" (result), "=&r" (temp),
497                   "=" GCC_OFF_SMALL_ASM() (v->counter)
498                 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
499                 : "memory");
500         } else if (kernel_uses_llsc) {
501                 long temp;
502
503                 __asm__ __volatile__(
504                 "       .set    "MIPS_ISA_LEVEL"                        \n"
505                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
506                 "       dsubu   %0, %1, %3                              \n"
507                 "       bltz    %0, 1f                                  \n"
508                 "       scd     %0, %2                                  \n"
509                 "       .set    noreorder                               \n"
510                 "       beqz    %0, 1b                                  \n"
511                 "        dsubu  %0, %1, %3                              \n"
512                 "       .set    reorder                                 \n"
513                 "1:                                                     \n"
514                 "       .set    mips0                                   \n"
515                 : "=&r" (result), "=&r" (temp),
516                   "+" GCC_OFF_SMALL_ASM() (v->counter)
517                 : "Ir" (i));
518         } else {
519                 unsigned long flags;
520
521                 raw_local_irq_save(flags);
522                 result = v->counter;
523                 result -= i;
524                 if (result >= 0)
525                         v->counter = result;
526                 raw_local_irq_restore(flags);
527         }
528
529         smp_llsc_mb();
530
531         return result;
532 }
533
534 #define atomic64_cmpxchg(v, o, n) \
535         ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
536 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
537
538 /*
539  * atomic64_dec_if_positive - decrement by 1 if old value positive
540  * @v: pointer of type atomic64_t
541  */
542 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
543
544 #endif /* CONFIG_64BIT */
545
546 #endif /* _ASM_ATOMIC_H */