Merge branch 'next' of git://git.kernel.org/pub/scm/linux/kernel/git/davej/cpufreq
[sfrench/cifs-2.6.git] / arch / mips / include / asm / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
21 #include <asm/war.h>
22 #include <asm/system.h>
23
24 #define ATOMIC_INIT(i)    { (i) }
25
26 /*
27  * atomic_read - read atomic variable
28  * @v: pointer of type atomic_t
29  *
30  * Atomically reads the value of @v.
31  */
32 #define atomic_read(v)          (*(volatile int *)&(v)->counter)
33
34 /*
35  * atomic_set - set atomic variable
36  * @v: pointer of type atomic_t
37  * @i: required value
38  *
39  * Atomically sets the value of @v to @i.
40  */
41 #define atomic_set(v, i)                ((v)->counter = (i))
42
43 /*
44  * atomic_add - add integer to atomic variable
45  * @i: integer value to add
46  * @v: pointer of type atomic_t
47  *
48  * Atomically adds @i to @v.
49  */
50 static __inline__ void atomic_add(int i, atomic_t * v)
51 {
52         if (kernel_uses_llsc && R10000_LLSC_WAR) {
53                 int temp;
54
55                 __asm__ __volatile__(
56                 "       .set    mips3                                   \n"
57                 "1:     ll      %0, %1          # atomic_add            \n"
58                 "       addu    %0, %2                                  \n"
59                 "       sc      %0, %1                                  \n"
60                 "       beqzl   %0, 1b                                  \n"
61                 "       .set    mips0                                   \n"
62                 : "=&r" (temp), "=m" (v->counter)
63                 : "Ir" (i), "m" (v->counter));
64         } else if (kernel_uses_llsc) {
65                 int temp;
66
67                 __asm__ __volatile__(
68                 "       .set    mips3                                   \n"
69                 "1:     ll      %0, %1          # atomic_add            \n"
70                 "       addu    %0, %2                                  \n"
71                 "       sc      %0, %1                                  \n"
72                 "       beqz    %0, 2f                                  \n"
73                 "       .subsection 2                                   \n"
74                 "2:     b       1b                                      \n"
75                 "       .previous                                       \n"
76                 "       .set    mips0                                   \n"
77                 : "=&r" (temp), "=m" (v->counter)
78                 : "Ir" (i), "m" (v->counter));
79         } else {
80                 unsigned long flags;
81
82                 raw_local_irq_save(flags);
83                 v->counter += i;
84                 raw_local_irq_restore(flags);
85         }
86 }
87
88 /*
89  * atomic_sub - subtract the atomic variable
90  * @i: integer value to subtract
91  * @v: pointer of type atomic_t
92  *
93  * Atomically subtracts @i from @v.
94  */
95 static __inline__ void atomic_sub(int i, atomic_t * v)
96 {
97         if (kernel_uses_llsc && R10000_LLSC_WAR) {
98                 int temp;
99
100                 __asm__ __volatile__(
101                 "       .set    mips3                                   \n"
102                 "1:     ll      %0, %1          # atomic_sub            \n"
103                 "       subu    %0, %2                                  \n"
104                 "       sc      %0, %1                                  \n"
105                 "       beqzl   %0, 1b                                  \n"
106                 "       .set    mips0                                   \n"
107                 : "=&r" (temp), "=m" (v->counter)
108                 : "Ir" (i), "m" (v->counter));
109         } else if (kernel_uses_llsc) {
110                 int temp;
111
112                 __asm__ __volatile__(
113                 "       .set    mips3                                   \n"
114                 "1:     ll      %0, %1          # atomic_sub            \n"
115                 "       subu    %0, %2                                  \n"
116                 "       sc      %0, %1                                  \n"
117                 "       beqz    %0, 2f                                  \n"
118                 "       .subsection 2                                   \n"
119                 "2:     b       1b                                      \n"
120                 "       .previous                                       \n"
121                 "       .set    mips0                                   \n"
122                 : "=&r" (temp), "=m" (v->counter)
123                 : "Ir" (i), "m" (v->counter));
124         } else {
125                 unsigned long flags;
126
127                 raw_local_irq_save(flags);
128                 v->counter -= i;
129                 raw_local_irq_restore(flags);
130         }
131 }
132
133 /*
134  * Same as above, but return the result value
135  */
136 static __inline__ int atomic_add_return(int i, atomic_t * v)
137 {
138         int result;
139
140         smp_mb__before_llsc();
141
142         if (kernel_uses_llsc && R10000_LLSC_WAR) {
143                 int temp;
144
145                 __asm__ __volatile__(
146                 "       .set    mips3                                   \n"
147                 "1:     ll      %1, %2          # atomic_add_return     \n"
148                 "       addu    %0, %1, %3                              \n"
149                 "       sc      %0, %2                                  \n"
150                 "       beqzl   %0, 1b                                  \n"
151                 "       addu    %0, %1, %3                              \n"
152                 "       .set    mips0                                   \n"
153                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154                 : "Ir" (i), "m" (v->counter)
155                 : "memory");
156         } else if (kernel_uses_llsc) {
157                 int temp;
158
159                 __asm__ __volatile__(
160                 "       .set    mips3                                   \n"
161                 "1:     ll      %1, %2          # atomic_add_return     \n"
162                 "       addu    %0, %1, %3                              \n"
163                 "       sc      %0, %2                                  \n"
164                 "       beqz    %0, 2f                                  \n"
165                 "       addu    %0, %1, %3                              \n"
166                 "       .subsection 2                                   \n"
167                 "2:     b       1b                                      \n"
168                 "       .previous                                       \n"
169                 "       .set    mips0                                   \n"
170                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171                 : "Ir" (i), "m" (v->counter)
172                 : "memory");
173         } else {
174                 unsigned long flags;
175
176                 raw_local_irq_save(flags);
177                 result = v->counter;
178                 result += i;
179                 v->counter = result;
180                 raw_local_irq_restore(flags);
181         }
182
183         smp_llsc_mb();
184
185         return result;
186 }
187
188 static __inline__ int atomic_sub_return(int i, atomic_t * v)
189 {
190         int result;
191
192         smp_mb__before_llsc();
193
194         if (kernel_uses_llsc && R10000_LLSC_WAR) {
195                 int temp;
196
197                 __asm__ __volatile__(
198                 "       .set    mips3                                   \n"
199                 "1:     ll      %1, %2          # atomic_sub_return     \n"
200                 "       subu    %0, %1, %3                              \n"
201                 "       sc      %0, %2                                  \n"
202                 "       beqzl   %0, 1b                                  \n"
203                 "       subu    %0, %1, %3                              \n"
204                 "       .set    mips0                                   \n"
205                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
206                 : "Ir" (i), "m" (v->counter)
207                 : "memory");
208         } else if (kernel_uses_llsc) {
209                 int temp;
210
211                 __asm__ __volatile__(
212                 "       .set    mips3                                   \n"
213                 "1:     ll      %1, %2          # atomic_sub_return     \n"
214                 "       subu    %0, %1, %3                              \n"
215                 "       sc      %0, %2                                  \n"
216                 "       beqz    %0, 2f                                  \n"
217                 "       subu    %0, %1, %3                              \n"
218                 "       .subsection 2                                   \n"
219                 "2:     b       1b                                      \n"
220                 "       .previous                                       \n"
221                 "       .set    mips0                                   \n"
222                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
223                 : "Ir" (i), "m" (v->counter)
224                 : "memory");
225         } else {
226                 unsigned long flags;
227
228                 raw_local_irq_save(flags);
229                 result = v->counter;
230                 result -= i;
231                 v->counter = result;
232                 raw_local_irq_restore(flags);
233         }
234
235         smp_llsc_mb();
236
237         return result;
238 }
239
240 /*
241  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
242  * @i: integer value to subtract
243  * @v: pointer of type atomic_t
244  *
245  * Atomically test @v and subtract @i if @v is greater or equal than @i.
246  * The function returns the old value of @v minus @i.
247  */
248 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249 {
250         int result;
251
252         smp_mb__before_llsc();
253
254         if (kernel_uses_llsc && R10000_LLSC_WAR) {
255                 int temp;
256
257                 __asm__ __volatile__(
258                 "       .set    mips3                                   \n"
259                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
260                 "       subu    %0, %1, %3                              \n"
261                 "       bltz    %0, 1f                                  \n"
262                 "       sc      %0, %2                                  \n"
263                 "       .set    noreorder                               \n"
264                 "       beqzl   %0, 1b                                  \n"
265                 "        subu   %0, %1, %3                              \n"
266                 "       .set    reorder                                 \n"
267                 "1:                                                     \n"
268                 "       .set    mips0                                   \n"
269                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
270                 : "Ir" (i), "m" (v->counter)
271                 : "memory");
272         } else if (kernel_uses_llsc) {
273                 int temp;
274
275                 __asm__ __volatile__(
276                 "       .set    mips3                                   \n"
277                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
278                 "       subu    %0, %1, %3                              \n"
279                 "       bltz    %0, 1f                                  \n"
280                 "       sc      %0, %2                                  \n"
281                 "       .set    noreorder                               \n"
282                 "       beqz    %0, 2f                                  \n"
283                 "        subu   %0, %1, %3                              \n"
284                 "       .set    reorder                                 \n"
285                 "       .subsection 2                                   \n"
286                 "2:     b       1b                                      \n"
287                 "       .previous                                       \n"
288                 "1:                                                     \n"
289                 "       .set    mips0                                   \n"
290                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
291                 : "Ir" (i), "m" (v->counter)
292                 : "memory");
293         } else {
294                 unsigned long flags;
295
296                 raw_local_irq_save(flags);
297                 result = v->counter;
298                 result -= i;
299                 if (result >= 0)
300                         v->counter = result;
301                 raw_local_irq_restore(flags);
302         }
303
304         smp_llsc_mb();
305
306         return result;
307 }
308
309 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
310 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
311
312 /**
313  * atomic_add_unless - add unless the number is a given value
314  * @v: pointer of type atomic_t
315  * @a: the amount to add to v...
316  * @u: ...unless v is equal to u.
317  *
318  * Atomically adds @a to @v, so long as it was not @u.
319  * Returns non-zero if @v was not @u, and zero otherwise.
320  */
321 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
322 {
323         int c, old;
324         c = atomic_read(v);
325         for (;;) {
326                 if (unlikely(c == (u)))
327                         break;
328                 old = atomic_cmpxchg((v), c, c + (a));
329                 if (likely(old == c))
330                         break;
331                 c = old;
332         }
333         return c != (u);
334 }
335 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
336
337 #define atomic_dec_return(v) atomic_sub_return(1, (v))
338 #define atomic_inc_return(v) atomic_add_return(1, (v))
339
340 /*
341  * atomic_sub_and_test - subtract value from variable and test result
342  * @i: integer value to subtract
343  * @v: pointer of type atomic_t
344  *
345  * Atomically subtracts @i from @v and returns
346  * true if the result is zero, or false for all
347  * other cases.
348  */
349 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
350
351 /*
352  * atomic_inc_and_test - increment and test
353  * @v: pointer of type atomic_t
354  *
355  * Atomically increments @v by 1
356  * and returns true if the result is zero, or false for all
357  * other cases.
358  */
359 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
360
361 /*
362  * atomic_dec_and_test - decrement by 1 and test
363  * @v: pointer of type atomic_t
364  *
365  * Atomically decrements @v by 1 and
366  * returns true if the result is 0, or false for all other
367  * cases.
368  */
369 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
370
371 /*
372  * atomic_dec_if_positive - decrement by 1 if old value positive
373  * @v: pointer of type atomic_t
374  */
375 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
376
377 /*
378  * atomic_inc - increment atomic variable
379  * @v: pointer of type atomic_t
380  *
381  * Atomically increments @v by 1.
382  */
383 #define atomic_inc(v) atomic_add(1, (v))
384
385 /*
386  * atomic_dec - decrement and test
387  * @v: pointer of type atomic_t
388  *
389  * Atomically decrements @v by 1.
390  */
391 #define atomic_dec(v) atomic_sub(1, (v))
392
393 /*
394  * atomic_add_negative - add and test if negative
395  * @v: pointer of type atomic_t
396  * @i: integer value to add
397  *
398  * Atomically adds @i to @v and returns true
399  * if the result is negative, or false when
400  * result is greater than or equal to zero.
401  */
402 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
403
404 #ifdef CONFIG_64BIT
405
406 #define ATOMIC64_INIT(i)    { (i) }
407
408 /*
409  * atomic64_read - read atomic variable
410  * @v: pointer of type atomic64_t
411  *
412  */
413 #define atomic64_read(v)        (*(volatile long *)&(v)->counter)
414
415 /*
416  * atomic64_set - set atomic variable
417  * @v: pointer of type atomic64_t
418  * @i: required value
419  */
420 #define atomic64_set(v, i)      ((v)->counter = (i))
421
422 /*
423  * atomic64_add - add integer to atomic variable
424  * @i: integer value to add
425  * @v: pointer of type atomic64_t
426  *
427  * Atomically adds @i to @v.
428  */
429 static __inline__ void atomic64_add(long i, atomic64_t * v)
430 {
431         if (kernel_uses_llsc && R10000_LLSC_WAR) {
432                 long temp;
433
434                 __asm__ __volatile__(
435                 "       .set    mips3                                   \n"
436                 "1:     lld     %0, %1          # atomic64_add          \n"
437                 "       daddu   %0, %2                                  \n"
438                 "       scd     %0, %1                                  \n"
439                 "       beqzl   %0, 1b                                  \n"
440                 "       .set    mips0                                   \n"
441                 : "=&r" (temp), "=m" (v->counter)
442                 : "Ir" (i), "m" (v->counter));
443         } else if (kernel_uses_llsc) {
444                 long temp;
445
446                 __asm__ __volatile__(
447                 "       .set    mips3                                   \n"
448                 "1:     lld     %0, %1          # atomic64_add          \n"
449                 "       daddu   %0, %2                                  \n"
450                 "       scd     %0, %1                                  \n"
451                 "       beqz    %0, 2f                                  \n"
452                 "       .subsection 2                                   \n"
453                 "2:     b       1b                                      \n"
454                 "       .previous                                       \n"
455                 "       .set    mips0                                   \n"
456                 : "=&r" (temp), "=m" (v->counter)
457                 : "Ir" (i), "m" (v->counter));
458         } else {
459                 unsigned long flags;
460
461                 raw_local_irq_save(flags);
462                 v->counter += i;
463                 raw_local_irq_restore(flags);
464         }
465 }
466
467 /*
468  * atomic64_sub - subtract the atomic variable
469  * @i: integer value to subtract
470  * @v: pointer of type atomic64_t
471  *
472  * Atomically subtracts @i from @v.
473  */
474 static __inline__ void atomic64_sub(long i, atomic64_t * v)
475 {
476         if (kernel_uses_llsc && R10000_LLSC_WAR) {
477                 long temp;
478
479                 __asm__ __volatile__(
480                 "       .set    mips3                                   \n"
481                 "1:     lld     %0, %1          # atomic64_sub          \n"
482                 "       dsubu   %0, %2                                  \n"
483                 "       scd     %0, %1                                  \n"
484                 "       beqzl   %0, 1b                                  \n"
485                 "       .set    mips0                                   \n"
486                 : "=&r" (temp), "=m" (v->counter)
487                 : "Ir" (i), "m" (v->counter));
488         } else if (kernel_uses_llsc) {
489                 long temp;
490
491                 __asm__ __volatile__(
492                 "       .set    mips3                                   \n"
493                 "1:     lld     %0, %1          # atomic64_sub          \n"
494                 "       dsubu   %0, %2                                  \n"
495                 "       scd     %0, %1                                  \n"
496                 "       beqz    %0, 2f                                  \n"
497                 "       .subsection 2                                   \n"
498                 "2:     b       1b                                      \n"
499                 "       .previous                                       \n"
500                 "       .set    mips0                                   \n"
501                 : "=&r" (temp), "=m" (v->counter)
502                 : "Ir" (i), "m" (v->counter));
503         } else {
504                 unsigned long flags;
505
506                 raw_local_irq_save(flags);
507                 v->counter -= i;
508                 raw_local_irq_restore(flags);
509         }
510 }
511
512 /*
513  * Same as above, but return the result value
514  */
515 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
516 {
517         long result;
518
519         smp_mb__before_llsc();
520
521         if (kernel_uses_llsc && R10000_LLSC_WAR) {
522                 long temp;
523
524                 __asm__ __volatile__(
525                 "       .set    mips3                                   \n"
526                 "1:     lld     %1, %2          # atomic64_add_return   \n"
527                 "       daddu   %0, %1, %3                              \n"
528                 "       scd     %0, %2                                  \n"
529                 "       beqzl   %0, 1b                                  \n"
530                 "       daddu   %0, %1, %3                              \n"
531                 "       .set    mips0                                   \n"
532                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
533                 : "Ir" (i), "m" (v->counter)
534                 : "memory");
535         } else if (kernel_uses_llsc) {
536                 long temp;
537
538                 __asm__ __volatile__(
539                 "       .set    mips3                                   \n"
540                 "1:     lld     %1, %2          # atomic64_add_return   \n"
541                 "       daddu   %0, %1, %3                              \n"
542                 "       scd     %0, %2                                  \n"
543                 "       beqz    %0, 2f                                  \n"
544                 "       daddu   %0, %1, %3                              \n"
545                 "       .subsection 2                                   \n"
546                 "2:     b       1b                                      \n"
547                 "       .previous                                       \n"
548                 "       .set    mips0                                   \n"
549                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
550                 : "Ir" (i), "m" (v->counter)
551                 : "memory");
552         } else {
553                 unsigned long flags;
554
555                 raw_local_irq_save(flags);
556                 result = v->counter;
557                 result += i;
558                 v->counter = result;
559                 raw_local_irq_restore(flags);
560         }
561
562         smp_llsc_mb();
563
564         return result;
565 }
566
567 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
568 {
569         long result;
570
571         smp_mb__before_llsc();
572
573         if (kernel_uses_llsc && R10000_LLSC_WAR) {
574                 long temp;
575
576                 __asm__ __volatile__(
577                 "       .set    mips3                                   \n"
578                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
579                 "       dsubu   %0, %1, %3                              \n"
580                 "       scd     %0, %2                                  \n"
581                 "       beqzl   %0, 1b                                  \n"
582                 "       dsubu   %0, %1, %3                              \n"
583                 "       .set    mips0                                   \n"
584                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585                 : "Ir" (i), "m" (v->counter)
586                 : "memory");
587         } else if (kernel_uses_llsc) {
588                 long temp;
589
590                 __asm__ __volatile__(
591                 "       .set    mips3                                   \n"
592                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
593                 "       dsubu   %0, %1, %3                              \n"
594                 "       scd     %0, %2                                  \n"
595                 "       beqz    %0, 2f                                  \n"
596                 "       dsubu   %0, %1, %3                              \n"
597                 "       .subsection 2                                   \n"
598                 "2:     b       1b                                      \n"
599                 "       .previous                                       \n"
600                 "       .set    mips0                                   \n"
601                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
602                 : "Ir" (i), "m" (v->counter)
603                 : "memory");
604         } else {
605                 unsigned long flags;
606
607                 raw_local_irq_save(flags);
608                 result = v->counter;
609                 result -= i;
610                 v->counter = result;
611                 raw_local_irq_restore(flags);
612         }
613
614         smp_llsc_mb();
615
616         return result;
617 }
618
619 /*
620  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
621  * @i: integer value to subtract
622  * @v: pointer of type atomic64_t
623  *
624  * Atomically test @v and subtract @i if @v is greater or equal than @i.
625  * The function returns the old value of @v minus @i.
626  */
627 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
628 {
629         long result;
630
631         smp_mb__before_llsc();
632
633         if (kernel_uses_llsc && R10000_LLSC_WAR) {
634                 long temp;
635
636                 __asm__ __volatile__(
637                 "       .set    mips3                                   \n"
638                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
639                 "       dsubu   %0, %1, %3                              \n"
640                 "       bltz    %0, 1f                                  \n"
641                 "       scd     %0, %2                                  \n"
642                 "       .set    noreorder                               \n"
643                 "       beqzl   %0, 1b                                  \n"
644                 "        dsubu  %0, %1, %3                              \n"
645                 "       .set    reorder                                 \n"
646                 "1:                                                     \n"
647                 "       .set    mips0                                   \n"
648                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
649                 : "Ir" (i), "m" (v->counter)
650                 : "memory");
651         } else if (kernel_uses_llsc) {
652                 long temp;
653
654                 __asm__ __volatile__(
655                 "       .set    mips3                                   \n"
656                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
657                 "       dsubu   %0, %1, %3                              \n"
658                 "       bltz    %0, 1f                                  \n"
659                 "       scd     %0, %2                                  \n"
660                 "       .set    noreorder                               \n"
661                 "       beqz    %0, 2f                                  \n"
662                 "        dsubu  %0, %1, %3                              \n"
663                 "       .set    reorder                                 \n"
664                 "       .subsection 2                                   \n"
665                 "2:     b       1b                                      \n"
666                 "       .previous                                       \n"
667                 "1:                                                     \n"
668                 "       .set    mips0                                   \n"
669                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
670                 : "Ir" (i), "m" (v->counter)
671                 : "memory");
672         } else {
673                 unsigned long flags;
674
675                 raw_local_irq_save(flags);
676                 result = v->counter;
677                 result -= i;
678                 if (result >= 0)
679                         v->counter = result;
680                 raw_local_irq_restore(flags);
681         }
682
683         smp_llsc_mb();
684
685         return result;
686 }
687
688 #define atomic64_cmpxchg(v, o, n) \
689         ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
690 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
691
692 /**
693  * atomic64_add_unless - add unless the number is a given value
694  * @v: pointer of type atomic64_t
695  * @a: the amount to add to v...
696  * @u: ...unless v is equal to u.
697  *
698  * Atomically adds @a to @v, so long as it was not @u.
699  * Returns non-zero if @v was not @u, and zero otherwise.
700  */
701 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
702 {
703         long c, old;
704         c = atomic64_read(v);
705         for (;;) {
706                 if (unlikely(c == (u)))
707                         break;
708                 old = atomic64_cmpxchg((v), c, c + (a));
709                 if (likely(old == c))
710                         break;
711                 c = old;
712         }
713         return c != (u);
714 }
715
716 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
717
718 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
719 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
720
721 /*
722  * atomic64_sub_and_test - subtract value from variable and test result
723  * @i: integer value to subtract
724  * @v: pointer of type atomic64_t
725  *
726  * Atomically subtracts @i from @v and returns
727  * true if the result is zero, or false for all
728  * other cases.
729  */
730 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
731
732 /*
733  * atomic64_inc_and_test - increment and test
734  * @v: pointer of type atomic64_t
735  *
736  * Atomically increments @v by 1
737  * and returns true if the result is zero, or false for all
738  * other cases.
739  */
740 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
741
742 /*
743  * atomic64_dec_and_test - decrement by 1 and test
744  * @v: pointer of type atomic64_t
745  *
746  * Atomically decrements @v by 1 and
747  * returns true if the result is 0, or false for all other
748  * cases.
749  */
750 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
751
752 /*
753  * atomic64_dec_if_positive - decrement by 1 if old value positive
754  * @v: pointer of type atomic64_t
755  */
756 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
757
758 /*
759  * atomic64_inc - increment atomic variable
760  * @v: pointer of type atomic64_t
761  *
762  * Atomically increments @v by 1.
763  */
764 #define atomic64_inc(v) atomic64_add(1, (v))
765
766 /*
767  * atomic64_dec - decrement and test
768  * @v: pointer of type atomic64_t
769  *
770  * Atomically decrements @v by 1.
771  */
772 #define atomic64_dec(v) atomic64_sub(1, (v))
773
774 /*
775  * atomic64_add_negative - add and test if negative
776  * @v: pointer of type atomic64_t
777  * @i: integer value to add
778  *
779  * Atomically adds @i to @v and returns true
780  * if the result is negative, or false when
781  * result is greater than or equal to zero.
782  */
783 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
784
785 #endif /* CONFIG_64BIT */
786
787 /*
788  * atomic*_return operations are serializing but not the non-*_return
789  * versions.
790  */
791 #define smp_mb__before_atomic_dec()     smp_mb__before_llsc()
792 #define smp_mb__after_atomic_dec()      smp_llsc_mb()
793 #define smp_mb__before_atomic_inc()     smp_mb__before_llsc()
794 #define smp_mb__after_atomic_inc()      smp_llsc_mb()
795
796 #include <asm-generic/atomic-long.h>
797
798 #endif /* _ASM_ATOMIC_H */