2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
25 #define ATOMIC_INIT(i) { (i) }
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
31 * Atomically reads the value of @v.
33 #define atomic_read(v) READ_ONCE((v)->counter)
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
40 * Atomically sets the value of @v to @i.
42 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
44 #define ATOMIC_OP(op, c_op, asm_op) \
45 static __inline__ void atomic_##op(int i, atomic_t * v) \
47 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
50 __asm__ __volatile__( \
51 " .set arch=r4000 \n" \
52 "1: ll %0, %1 # atomic_" #op " \n" \
53 " " #asm_op " %0, %2 \n" \
57 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
59 } else if (kernel_uses_llsc) { \
63 __asm__ __volatile__( \
64 " .set "MIPS_ISA_LEVEL" \n" \
65 " ll %0, %1 # atomic_" #op "\n" \
66 " " #asm_op " %0, %2 \n" \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
71 } while (unlikely(!temp)); \
73 unsigned long flags; \
75 raw_local_irq_save(flags); \
77 raw_local_irq_restore(flags); \
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
86 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
89 __asm__ __volatile__( \
90 " .set arch=r4000 \n" \
91 "1: ll %1, %2 # atomic_" #op "_return \n" \
92 " " #asm_op " %0, %1, %3 \n" \
95 " " #asm_op " %0, %1, %3 \n" \
97 : "=&r" (result), "=&r" (temp), \
98 "+" GCC_OFF_SMALL_ASM() (v->counter) \
100 } else if (kernel_uses_llsc) { \
104 __asm__ __volatile__( \
105 " .set "MIPS_ISA_LEVEL" \n" \
106 " ll %1, %2 # atomic_" #op "_return \n" \
107 " " #asm_op " %0, %1, %3 \n" \
110 : "=&r" (result), "=&r" (temp), \
111 "+" GCC_OFF_SMALL_ASM() (v->counter) \
113 } while (unlikely(!result)); \
115 result = temp; result c_op i; \
117 unsigned long flags; \
119 raw_local_irq_save(flags); \
120 result = v->counter; \
122 v->counter = result; \
123 raw_local_irq_restore(flags); \
129 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
130 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
134 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
137 __asm__ __volatile__( \
138 " .set arch=r4000 \n" \
139 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
140 " " #asm_op " %0, %1, %3 \n" \
145 : "=&r" (result), "=&r" (temp), \
146 "+" GCC_OFF_SMALL_ASM() (v->counter) \
148 } else if (kernel_uses_llsc) { \
152 __asm__ __volatile__( \
153 " .set "MIPS_ISA_LEVEL" \n" \
154 " ll %1, %2 # atomic_fetch_" #op " \n" \
155 " " #asm_op " %0, %1, %3 \n" \
158 : "=&r" (result), "=&r" (temp), \
159 "+" GCC_OFF_SMALL_ASM() (v->counter) \
161 } while (unlikely(!result)); \
165 unsigned long flags; \
167 raw_local_irq_save(flags); \
168 result = v->counter; \
170 raw_local_irq_restore(flags); \
176 #define ATOMIC_OPS(op, c_op, asm_op) \
177 ATOMIC_OP(op, c_op, asm_op) \
178 ATOMIC_OP_RETURN(op, c_op, asm_op) \
179 ATOMIC_FETCH_OP(op, c_op, asm_op)
181 ATOMIC_OPS(add, +=, addu)
182 ATOMIC_OPS(sub, -=, subu)
184 #define atomic_add_return_relaxed atomic_add_return_relaxed
185 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
186 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
187 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
190 #define ATOMIC_OPS(op, c_op, asm_op) \
191 ATOMIC_OP(op, c_op, asm_op) \
192 ATOMIC_FETCH_OP(op, c_op, asm_op)
194 ATOMIC_OPS(and, &=, and)
195 ATOMIC_OPS(or, |=, or)
196 ATOMIC_OPS(xor, ^=, xor)
198 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
199 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
200 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
203 #undef ATOMIC_FETCH_OP
204 #undef ATOMIC_OP_RETURN
208 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
209 * @i: integer value to subtract
210 * @v: pointer of type atomic_t
212 * Atomically test @v and subtract @i if @v is greater or equal than @i.
213 * The function returns the old value of @v minus @i.
215 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
219 smp_mb__before_llsc();
221 if (kernel_uses_llsc && R10000_LLSC_WAR) {
224 __asm__ __volatile__(
225 " .set arch=r4000 \n"
226 "1: ll %1, %2 # atomic_sub_if_positive\n"
227 " subu %0, %1, %3 \n"
232 " subu %0, %1, %3 \n"
236 : "=&r" (result), "=&r" (temp),
237 "+" GCC_OFF_SMALL_ASM() (v->counter)
238 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
240 } else if (kernel_uses_llsc) {
243 __asm__ __volatile__(
244 " .set "MIPS_ISA_LEVEL" \n"
245 "1: ll %1, %2 # atomic_sub_if_positive\n"
246 " subu %0, %1, %3 \n"
251 " subu %0, %1, %3 \n"
255 : "=&r" (result), "=&r" (temp),
256 "+" GCC_OFF_SMALL_ASM() (v->counter)
261 raw_local_irq_save(flags);
266 raw_local_irq_restore(flags);
274 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
275 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
278 * atomic_dec_if_positive - decrement by 1 if old value positive
279 * @v: pointer of type atomic_t
281 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
285 #define ATOMIC64_INIT(i) { (i) }
288 * atomic64_read - read atomic variable
289 * @v: pointer of type atomic64_t
292 #define atomic64_read(v) READ_ONCE((v)->counter)
295 * atomic64_set - set atomic variable
296 * @v: pointer of type atomic64_t
299 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
301 #define ATOMIC64_OP(op, c_op, asm_op) \
302 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
304 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
307 __asm__ __volatile__( \
308 " .set arch=r4000 \n" \
309 "1: lld %0, %1 # atomic64_" #op " \n" \
310 " " #asm_op " %0, %2 \n" \
314 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
316 } else if (kernel_uses_llsc) { \
320 __asm__ __volatile__( \
321 " .set "MIPS_ISA_LEVEL" \n" \
322 " lld %0, %1 # atomic64_" #op "\n" \
323 " " #asm_op " %0, %2 \n" \
326 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
328 } while (unlikely(!temp)); \
330 unsigned long flags; \
332 raw_local_irq_save(flags); \
334 raw_local_irq_restore(flags); \
338 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
339 static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
343 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
346 __asm__ __volatile__( \
347 " .set arch=r4000 \n" \
348 "1: lld %1, %2 # atomic64_" #op "_return\n" \
349 " " #asm_op " %0, %1, %3 \n" \
352 " " #asm_op " %0, %1, %3 \n" \
354 : "=&r" (result), "=&r" (temp), \
355 "+" GCC_OFF_SMALL_ASM() (v->counter) \
357 } else if (kernel_uses_llsc) { \
361 __asm__ __volatile__( \
362 " .set "MIPS_ISA_LEVEL" \n" \
363 " lld %1, %2 # atomic64_" #op "_return\n" \
364 " " #asm_op " %0, %1, %3 \n" \
367 : "=&r" (result), "=&r" (temp), \
368 "=" GCC_OFF_SMALL_ASM() (v->counter) \
369 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
371 } while (unlikely(!result)); \
373 result = temp; result c_op i; \
375 unsigned long flags; \
377 raw_local_irq_save(flags); \
378 result = v->counter; \
380 v->counter = result; \
381 raw_local_irq_restore(flags); \
387 #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
388 static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \
392 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
395 __asm__ __volatile__( \
396 " .set arch=r4000 \n" \
397 "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
398 " " #asm_op " %0, %1, %3 \n" \
403 : "=&r" (result), "=&r" (temp), \
404 "+" GCC_OFF_SMALL_ASM() (v->counter) \
406 } else if (kernel_uses_llsc) { \
410 __asm__ __volatile__( \
411 " .set "MIPS_ISA_LEVEL" \n" \
412 " lld %1, %2 # atomic64_fetch_" #op "\n" \
413 " " #asm_op " %0, %1, %3 \n" \
416 : "=&r" (result), "=&r" (temp), \
417 "=" GCC_OFF_SMALL_ASM() (v->counter) \
418 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
420 } while (unlikely(!result)); \
424 unsigned long flags; \
426 raw_local_irq_save(flags); \
427 result = v->counter; \
429 raw_local_irq_restore(flags); \
435 #define ATOMIC64_OPS(op, c_op, asm_op) \
436 ATOMIC64_OP(op, c_op, asm_op) \
437 ATOMIC64_OP_RETURN(op, c_op, asm_op) \
438 ATOMIC64_FETCH_OP(op, c_op, asm_op)
440 ATOMIC64_OPS(add, +=, daddu)
441 ATOMIC64_OPS(sub, -=, dsubu)
443 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
444 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
445 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
446 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
449 #define ATOMIC64_OPS(op, c_op, asm_op) \
450 ATOMIC64_OP(op, c_op, asm_op) \
451 ATOMIC64_FETCH_OP(op, c_op, asm_op)
453 ATOMIC64_OPS(and, &=, and)
454 ATOMIC64_OPS(or, |=, or)
455 ATOMIC64_OPS(xor, ^=, xor)
457 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
458 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
459 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
462 #undef ATOMIC64_FETCH_OP
463 #undef ATOMIC64_OP_RETURN
467 * atomic64_sub_if_positive - conditionally subtract integer from atomic
469 * @i: integer value to subtract
470 * @v: pointer of type atomic64_t
472 * Atomically test @v and subtract @i if @v is greater or equal than @i.
473 * The function returns the old value of @v minus @i.
475 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
479 smp_mb__before_llsc();
481 if (kernel_uses_llsc && R10000_LLSC_WAR) {
484 __asm__ __volatile__(
485 " .set arch=r4000 \n"
486 "1: lld %1, %2 # atomic64_sub_if_positive\n"
487 " dsubu %0, %1, %3 \n"
492 " dsubu %0, %1, %3 \n"
496 : "=&r" (result), "=&r" (temp),
497 "=" GCC_OFF_SMALL_ASM() (v->counter)
498 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter)
500 } else if (kernel_uses_llsc) {
503 __asm__ __volatile__(
504 " .set "MIPS_ISA_LEVEL" \n"
505 "1: lld %1, %2 # atomic64_sub_if_positive\n"
506 " dsubu %0, %1, %3 \n"
511 " dsubu %0, %1, %3 \n"
515 : "=&r" (result), "=&r" (temp),
516 "+" GCC_OFF_SMALL_ASM() (v->counter)
521 raw_local_irq_save(flags);
526 raw_local_irq_restore(flags);
534 #define atomic64_cmpxchg(v, o, n) \
535 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
536 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
539 * atomic64_dec_if_positive - decrement by 1 if old value positive
540 * @v: pointer of type atomic64_t
542 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
544 #endif /* CONFIG_64BIT */
546 #endif /* _ASM_ATOMIC_H */