1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
12 #define xchg_relaxed xchg
13 #define xchg_acquire xchg
14 #define xchg_release xchg
15 #else /* xchg_relaxed */
18 #define xchg_acquire(...) \
19 __atomic_op_acquire(xchg, __VA_ARGS__)
23 #define xchg_release(...) \
24 __atomic_op_release(xchg, __VA_ARGS__)
29 __atomic_op_fence(xchg, __VA_ARGS__)
32 #endif /* xchg_relaxed */
34 #ifndef cmpxchg_relaxed
35 #define cmpxchg_relaxed cmpxchg
36 #define cmpxchg_acquire cmpxchg
37 #define cmpxchg_release cmpxchg
38 #else /* cmpxchg_relaxed */
40 #ifndef cmpxchg_acquire
41 #define cmpxchg_acquire(...) \
42 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
45 #ifndef cmpxchg_release
46 #define cmpxchg_release(...) \
47 __atomic_op_release(cmpxchg, __VA_ARGS__)
51 #define cmpxchg(...) \
52 __atomic_op_fence(cmpxchg, __VA_ARGS__)
55 #endif /* cmpxchg_relaxed */
57 #ifndef cmpxchg64_relaxed
58 #define cmpxchg64_relaxed cmpxchg64
59 #define cmpxchg64_acquire cmpxchg64
60 #define cmpxchg64_release cmpxchg64
61 #else /* cmpxchg64_relaxed */
63 #ifndef cmpxchg64_acquire
64 #define cmpxchg64_acquire(...) \
65 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
68 #ifndef cmpxchg64_release
69 #define cmpxchg64_release(...) \
70 __atomic_op_release(cmpxchg64, __VA_ARGS__)
74 #define cmpxchg64(...) \
75 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
78 #endif /* cmpxchg64_relaxed */
80 #ifndef atomic_read_acquire
81 static __always_inline int
82 atomic_read_acquire(const atomic_t *v)
84 return smp_load_acquire(&(v)->counter);
86 #define atomic_read_acquire atomic_read_acquire
89 #ifndef atomic_set_release
90 static __always_inline void
91 atomic_set_release(atomic_t *v, int i)
93 smp_store_release(&(v)->counter, i);
95 #define atomic_set_release atomic_set_release
98 #ifndef atomic_add_return_relaxed
99 #define atomic_add_return_acquire atomic_add_return
100 #define atomic_add_return_release atomic_add_return
101 #define atomic_add_return_relaxed atomic_add_return
102 #else /* atomic_add_return_relaxed */
104 #ifndef atomic_add_return_acquire
105 static __always_inline int
106 atomic_add_return_acquire(int i, atomic_t *v)
108 int ret = atomic_add_return_relaxed(i, v);
109 __atomic_acquire_fence();
112 #define atomic_add_return_acquire atomic_add_return_acquire
115 #ifndef atomic_add_return_release
116 static __always_inline int
117 atomic_add_return_release(int i, atomic_t *v)
119 __atomic_release_fence();
120 return atomic_add_return_relaxed(i, v);
122 #define atomic_add_return_release atomic_add_return_release
125 #ifndef atomic_add_return
126 static __always_inline int
127 atomic_add_return(int i, atomic_t *v)
130 __atomic_pre_full_fence();
131 ret = atomic_add_return_relaxed(i, v);
132 __atomic_post_full_fence();
135 #define atomic_add_return atomic_add_return
138 #endif /* atomic_add_return_relaxed */
140 #ifndef atomic_fetch_add_relaxed
141 #define atomic_fetch_add_acquire atomic_fetch_add
142 #define atomic_fetch_add_release atomic_fetch_add
143 #define atomic_fetch_add_relaxed atomic_fetch_add
144 #else /* atomic_fetch_add_relaxed */
146 #ifndef atomic_fetch_add_acquire
147 static __always_inline int
148 atomic_fetch_add_acquire(int i, atomic_t *v)
150 int ret = atomic_fetch_add_relaxed(i, v);
151 __atomic_acquire_fence();
154 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
157 #ifndef atomic_fetch_add_release
158 static __always_inline int
159 atomic_fetch_add_release(int i, atomic_t *v)
161 __atomic_release_fence();
162 return atomic_fetch_add_relaxed(i, v);
164 #define atomic_fetch_add_release atomic_fetch_add_release
167 #ifndef atomic_fetch_add
168 static __always_inline int
169 atomic_fetch_add(int i, atomic_t *v)
172 __atomic_pre_full_fence();
173 ret = atomic_fetch_add_relaxed(i, v);
174 __atomic_post_full_fence();
177 #define atomic_fetch_add atomic_fetch_add
180 #endif /* atomic_fetch_add_relaxed */
182 #ifndef atomic_sub_return_relaxed
183 #define atomic_sub_return_acquire atomic_sub_return
184 #define atomic_sub_return_release atomic_sub_return
185 #define atomic_sub_return_relaxed atomic_sub_return
186 #else /* atomic_sub_return_relaxed */
188 #ifndef atomic_sub_return_acquire
189 static __always_inline int
190 atomic_sub_return_acquire(int i, atomic_t *v)
192 int ret = atomic_sub_return_relaxed(i, v);
193 __atomic_acquire_fence();
196 #define atomic_sub_return_acquire atomic_sub_return_acquire
199 #ifndef atomic_sub_return_release
200 static __always_inline int
201 atomic_sub_return_release(int i, atomic_t *v)
203 __atomic_release_fence();
204 return atomic_sub_return_relaxed(i, v);
206 #define atomic_sub_return_release atomic_sub_return_release
209 #ifndef atomic_sub_return
210 static __always_inline int
211 atomic_sub_return(int i, atomic_t *v)
214 __atomic_pre_full_fence();
215 ret = atomic_sub_return_relaxed(i, v);
216 __atomic_post_full_fence();
219 #define atomic_sub_return atomic_sub_return
222 #endif /* atomic_sub_return_relaxed */
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_acquire atomic_fetch_sub
226 #define atomic_fetch_sub_release atomic_fetch_sub
227 #define atomic_fetch_sub_relaxed atomic_fetch_sub
228 #else /* atomic_fetch_sub_relaxed */
230 #ifndef atomic_fetch_sub_acquire
231 static __always_inline int
232 atomic_fetch_sub_acquire(int i, atomic_t *v)
234 int ret = atomic_fetch_sub_relaxed(i, v);
235 __atomic_acquire_fence();
238 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
241 #ifndef atomic_fetch_sub_release
242 static __always_inline int
243 atomic_fetch_sub_release(int i, atomic_t *v)
245 __atomic_release_fence();
246 return atomic_fetch_sub_relaxed(i, v);
248 #define atomic_fetch_sub_release atomic_fetch_sub_release
251 #ifndef atomic_fetch_sub
252 static __always_inline int
253 atomic_fetch_sub(int i, atomic_t *v)
256 __atomic_pre_full_fence();
257 ret = atomic_fetch_sub_relaxed(i, v);
258 __atomic_post_full_fence();
261 #define atomic_fetch_sub atomic_fetch_sub
264 #endif /* atomic_fetch_sub_relaxed */
267 static __always_inline void
268 atomic_inc(atomic_t *v)
272 #define atomic_inc atomic_inc
275 #ifndef atomic_inc_return_relaxed
276 #ifdef atomic_inc_return
277 #define atomic_inc_return_acquire atomic_inc_return
278 #define atomic_inc_return_release atomic_inc_return
279 #define atomic_inc_return_relaxed atomic_inc_return
280 #endif /* atomic_inc_return */
282 #ifndef atomic_inc_return
283 static __always_inline int
284 atomic_inc_return(atomic_t *v)
286 return atomic_add_return(1, v);
288 #define atomic_inc_return atomic_inc_return
291 #ifndef atomic_inc_return_acquire
292 static __always_inline int
293 atomic_inc_return_acquire(atomic_t *v)
295 return atomic_add_return_acquire(1, v);
297 #define atomic_inc_return_acquire atomic_inc_return_acquire
300 #ifndef atomic_inc_return_release
301 static __always_inline int
302 atomic_inc_return_release(atomic_t *v)
304 return atomic_add_return_release(1, v);
306 #define atomic_inc_return_release atomic_inc_return_release
309 #ifndef atomic_inc_return_relaxed
310 static __always_inline int
311 atomic_inc_return_relaxed(atomic_t *v)
313 return atomic_add_return_relaxed(1, v);
315 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
318 #else /* atomic_inc_return_relaxed */
320 #ifndef atomic_inc_return_acquire
321 static __always_inline int
322 atomic_inc_return_acquire(atomic_t *v)
324 int ret = atomic_inc_return_relaxed(v);
325 __atomic_acquire_fence();
328 #define atomic_inc_return_acquire atomic_inc_return_acquire
331 #ifndef atomic_inc_return_release
332 static __always_inline int
333 atomic_inc_return_release(atomic_t *v)
335 __atomic_release_fence();
336 return atomic_inc_return_relaxed(v);
338 #define atomic_inc_return_release atomic_inc_return_release
341 #ifndef atomic_inc_return
342 static __always_inline int
343 atomic_inc_return(atomic_t *v)
346 __atomic_pre_full_fence();
347 ret = atomic_inc_return_relaxed(v);
348 __atomic_post_full_fence();
351 #define atomic_inc_return atomic_inc_return
354 #endif /* atomic_inc_return_relaxed */
356 #ifndef atomic_fetch_inc_relaxed
357 #ifdef atomic_fetch_inc
358 #define atomic_fetch_inc_acquire atomic_fetch_inc
359 #define atomic_fetch_inc_release atomic_fetch_inc
360 #define atomic_fetch_inc_relaxed atomic_fetch_inc
361 #endif /* atomic_fetch_inc */
363 #ifndef atomic_fetch_inc
364 static __always_inline int
365 atomic_fetch_inc(atomic_t *v)
367 return atomic_fetch_add(1, v);
369 #define atomic_fetch_inc atomic_fetch_inc
372 #ifndef atomic_fetch_inc_acquire
373 static __always_inline int
374 atomic_fetch_inc_acquire(atomic_t *v)
376 return atomic_fetch_add_acquire(1, v);
378 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
381 #ifndef atomic_fetch_inc_release
382 static __always_inline int
383 atomic_fetch_inc_release(atomic_t *v)
385 return atomic_fetch_add_release(1, v);
387 #define atomic_fetch_inc_release atomic_fetch_inc_release
390 #ifndef atomic_fetch_inc_relaxed
391 static __always_inline int
392 atomic_fetch_inc_relaxed(atomic_t *v)
394 return atomic_fetch_add_relaxed(1, v);
396 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
399 #else /* atomic_fetch_inc_relaxed */
401 #ifndef atomic_fetch_inc_acquire
402 static __always_inline int
403 atomic_fetch_inc_acquire(atomic_t *v)
405 int ret = atomic_fetch_inc_relaxed(v);
406 __atomic_acquire_fence();
409 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
412 #ifndef atomic_fetch_inc_release
413 static __always_inline int
414 atomic_fetch_inc_release(atomic_t *v)
416 __atomic_release_fence();
417 return atomic_fetch_inc_relaxed(v);
419 #define atomic_fetch_inc_release atomic_fetch_inc_release
422 #ifndef atomic_fetch_inc
423 static __always_inline int
424 atomic_fetch_inc(atomic_t *v)
427 __atomic_pre_full_fence();
428 ret = atomic_fetch_inc_relaxed(v);
429 __atomic_post_full_fence();
432 #define atomic_fetch_inc atomic_fetch_inc
435 #endif /* atomic_fetch_inc_relaxed */
438 static __always_inline void
439 atomic_dec(atomic_t *v)
443 #define atomic_dec atomic_dec
446 #ifndef atomic_dec_return_relaxed
447 #ifdef atomic_dec_return
448 #define atomic_dec_return_acquire atomic_dec_return
449 #define atomic_dec_return_release atomic_dec_return
450 #define atomic_dec_return_relaxed atomic_dec_return
451 #endif /* atomic_dec_return */
453 #ifndef atomic_dec_return
454 static __always_inline int
455 atomic_dec_return(atomic_t *v)
457 return atomic_sub_return(1, v);
459 #define atomic_dec_return atomic_dec_return
462 #ifndef atomic_dec_return_acquire
463 static __always_inline int
464 atomic_dec_return_acquire(atomic_t *v)
466 return atomic_sub_return_acquire(1, v);
468 #define atomic_dec_return_acquire atomic_dec_return_acquire
471 #ifndef atomic_dec_return_release
472 static __always_inline int
473 atomic_dec_return_release(atomic_t *v)
475 return atomic_sub_return_release(1, v);
477 #define atomic_dec_return_release atomic_dec_return_release
480 #ifndef atomic_dec_return_relaxed
481 static __always_inline int
482 atomic_dec_return_relaxed(atomic_t *v)
484 return atomic_sub_return_relaxed(1, v);
486 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
489 #else /* atomic_dec_return_relaxed */
491 #ifndef atomic_dec_return_acquire
492 static __always_inline int
493 atomic_dec_return_acquire(atomic_t *v)
495 int ret = atomic_dec_return_relaxed(v);
496 __atomic_acquire_fence();
499 #define atomic_dec_return_acquire atomic_dec_return_acquire
502 #ifndef atomic_dec_return_release
503 static __always_inline int
504 atomic_dec_return_release(atomic_t *v)
506 __atomic_release_fence();
507 return atomic_dec_return_relaxed(v);
509 #define atomic_dec_return_release atomic_dec_return_release
512 #ifndef atomic_dec_return
513 static __always_inline int
514 atomic_dec_return(atomic_t *v)
517 __atomic_pre_full_fence();
518 ret = atomic_dec_return_relaxed(v);
519 __atomic_post_full_fence();
522 #define atomic_dec_return atomic_dec_return
525 #endif /* atomic_dec_return_relaxed */
527 #ifndef atomic_fetch_dec_relaxed
528 #ifdef atomic_fetch_dec
529 #define atomic_fetch_dec_acquire atomic_fetch_dec
530 #define atomic_fetch_dec_release atomic_fetch_dec
531 #define atomic_fetch_dec_relaxed atomic_fetch_dec
532 #endif /* atomic_fetch_dec */
534 #ifndef atomic_fetch_dec
535 static __always_inline int
536 atomic_fetch_dec(atomic_t *v)
538 return atomic_fetch_sub(1, v);
540 #define atomic_fetch_dec atomic_fetch_dec
543 #ifndef atomic_fetch_dec_acquire
544 static __always_inline int
545 atomic_fetch_dec_acquire(atomic_t *v)
547 return atomic_fetch_sub_acquire(1, v);
549 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
552 #ifndef atomic_fetch_dec_release
553 static __always_inline int
554 atomic_fetch_dec_release(atomic_t *v)
556 return atomic_fetch_sub_release(1, v);
558 #define atomic_fetch_dec_release atomic_fetch_dec_release
561 #ifndef atomic_fetch_dec_relaxed
562 static __always_inline int
563 atomic_fetch_dec_relaxed(atomic_t *v)
565 return atomic_fetch_sub_relaxed(1, v);
567 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
570 #else /* atomic_fetch_dec_relaxed */
572 #ifndef atomic_fetch_dec_acquire
573 static __always_inline int
574 atomic_fetch_dec_acquire(atomic_t *v)
576 int ret = atomic_fetch_dec_relaxed(v);
577 __atomic_acquire_fence();
580 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
583 #ifndef atomic_fetch_dec_release
584 static __always_inline int
585 atomic_fetch_dec_release(atomic_t *v)
587 __atomic_release_fence();
588 return atomic_fetch_dec_relaxed(v);
590 #define atomic_fetch_dec_release atomic_fetch_dec_release
593 #ifndef atomic_fetch_dec
594 static __always_inline int
595 atomic_fetch_dec(atomic_t *v)
598 __atomic_pre_full_fence();
599 ret = atomic_fetch_dec_relaxed(v);
600 __atomic_post_full_fence();
603 #define atomic_fetch_dec atomic_fetch_dec
606 #endif /* atomic_fetch_dec_relaxed */
608 #ifndef atomic_fetch_and_relaxed
609 #define atomic_fetch_and_acquire atomic_fetch_and
610 #define atomic_fetch_and_release atomic_fetch_and
611 #define atomic_fetch_and_relaxed atomic_fetch_and
612 #else /* atomic_fetch_and_relaxed */
614 #ifndef atomic_fetch_and_acquire
615 static __always_inline int
616 atomic_fetch_and_acquire(int i, atomic_t *v)
618 int ret = atomic_fetch_and_relaxed(i, v);
619 __atomic_acquire_fence();
622 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
625 #ifndef atomic_fetch_and_release
626 static __always_inline int
627 atomic_fetch_and_release(int i, atomic_t *v)
629 __atomic_release_fence();
630 return atomic_fetch_and_relaxed(i, v);
632 #define atomic_fetch_and_release atomic_fetch_and_release
635 #ifndef atomic_fetch_and
636 static __always_inline int
637 atomic_fetch_and(int i, atomic_t *v)
640 __atomic_pre_full_fence();
641 ret = atomic_fetch_and_relaxed(i, v);
642 __atomic_post_full_fence();
645 #define atomic_fetch_and atomic_fetch_and
648 #endif /* atomic_fetch_and_relaxed */
650 #ifndef atomic_andnot
651 static __always_inline void
652 atomic_andnot(int i, atomic_t *v)
656 #define atomic_andnot atomic_andnot
659 #ifndef atomic_fetch_andnot_relaxed
660 #ifdef atomic_fetch_andnot
661 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
662 #define atomic_fetch_andnot_release atomic_fetch_andnot
663 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
664 #endif /* atomic_fetch_andnot */
666 #ifndef atomic_fetch_andnot
667 static __always_inline int
668 atomic_fetch_andnot(int i, atomic_t *v)
670 return atomic_fetch_and(~i, v);
672 #define atomic_fetch_andnot atomic_fetch_andnot
675 #ifndef atomic_fetch_andnot_acquire
676 static __always_inline int
677 atomic_fetch_andnot_acquire(int i, atomic_t *v)
679 return atomic_fetch_and_acquire(~i, v);
681 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
684 #ifndef atomic_fetch_andnot_release
685 static __always_inline int
686 atomic_fetch_andnot_release(int i, atomic_t *v)
688 return atomic_fetch_and_release(~i, v);
690 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
693 #ifndef atomic_fetch_andnot_relaxed
694 static __always_inline int
695 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
697 return atomic_fetch_and_relaxed(~i, v);
699 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
702 #else /* atomic_fetch_andnot_relaxed */
704 #ifndef atomic_fetch_andnot_acquire
705 static __always_inline int
706 atomic_fetch_andnot_acquire(int i, atomic_t *v)
708 int ret = atomic_fetch_andnot_relaxed(i, v);
709 __atomic_acquire_fence();
712 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
715 #ifndef atomic_fetch_andnot_release
716 static __always_inline int
717 atomic_fetch_andnot_release(int i, atomic_t *v)
719 __atomic_release_fence();
720 return atomic_fetch_andnot_relaxed(i, v);
722 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
725 #ifndef atomic_fetch_andnot
726 static __always_inline int
727 atomic_fetch_andnot(int i, atomic_t *v)
730 __atomic_pre_full_fence();
731 ret = atomic_fetch_andnot_relaxed(i, v);
732 __atomic_post_full_fence();
735 #define atomic_fetch_andnot atomic_fetch_andnot
738 #endif /* atomic_fetch_andnot_relaxed */
740 #ifndef atomic_fetch_or_relaxed
741 #define atomic_fetch_or_acquire atomic_fetch_or
742 #define atomic_fetch_or_release atomic_fetch_or
743 #define atomic_fetch_or_relaxed atomic_fetch_or
744 #else /* atomic_fetch_or_relaxed */
746 #ifndef atomic_fetch_or_acquire
747 static __always_inline int
748 atomic_fetch_or_acquire(int i, atomic_t *v)
750 int ret = atomic_fetch_or_relaxed(i, v);
751 __atomic_acquire_fence();
754 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
757 #ifndef atomic_fetch_or_release
758 static __always_inline int
759 atomic_fetch_or_release(int i, atomic_t *v)
761 __atomic_release_fence();
762 return atomic_fetch_or_relaxed(i, v);
764 #define atomic_fetch_or_release atomic_fetch_or_release
767 #ifndef atomic_fetch_or
768 static __always_inline int
769 atomic_fetch_or(int i, atomic_t *v)
772 __atomic_pre_full_fence();
773 ret = atomic_fetch_or_relaxed(i, v);
774 __atomic_post_full_fence();
777 #define atomic_fetch_or atomic_fetch_or
780 #endif /* atomic_fetch_or_relaxed */
782 #ifndef atomic_fetch_xor_relaxed
783 #define atomic_fetch_xor_acquire atomic_fetch_xor
784 #define atomic_fetch_xor_release atomic_fetch_xor
785 #define atomic_fetch_xor_relaxed atomic_fetch_xor
786 #else /* atomic_fetch_xor_relaxed */
788 #ifndef atomic_fetch_xor_acquire
789 static __always_inline int
790 atomic_fetch_xor_acquire(int i, atomic_t *v)
792 int ret = atomic_fetch_xor_relaxed(i, v);
793 __atomic_acquire_fence();
796 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
799 #ifndef atomic_fetch_xor_release
800 static __always_inline int
801 atomic_fetch_xor_release(int i, atomic_t *v)
803 __atomic_release_fence();
804 return atomic_fetch_xor_relaxed(i, v);
806 #define atomic_fetch_xor_release atomic_fetch_xor_release
809 #ifndef atomic_fetch_xor
810 static __always_inline int
811 atomic_fetch_xor(int i, atomic_t *v)
814 __atomic_pre_full_fence();
815 ret = atomic_fetch_xor_relaxed(i, v);
816 __atomic_post_full_fence();
819 #define atomic_fetch_xor atomic_fetch_xor
822 #endif /* atomic_fetch_xor_relaxed */
824 #ifndef atomic_xchg_relaxed
825 #define atomic_xchg_acquire atomic_xchg
826 #define atomic_xchg_release atomic_xchg
827 #define atomic_xchg_relaxed atomic_xchg
828 #else /* atomic_xchg_relaxed */
830 #ifndef atomic_xchg_acquire
831 static __always_inline int
832 atomic_xchg_acquire(atomic_t *v, int i)
834 int ret = atomic_xchg_relaxed(v, i);
835 __atomic_acquire_fence();
838 #define atomic_xchg_acquire atomic_xchg_acquire
841 #ifndef atomic_xchg_release
842 static __always_inline int
843 atomic_xchg_release(atomic_t *v, int i)
845 __atomic_release_fence();
846 return atomic_xchg_relaxed(v, i);
848 #define atomic_xchg_release atomic_xchg_release
852 static __always_inline int
853 atomic_xchg(atomic_t *v, int i)
856 __atomic_pre_full_fence();
857 ret = atomic_xchg_relaxed(v, i);
858 __atomic_post_full_fence();
861 #define atomic_xchg atomic_xchg
864 #endif /* atomic_xchg_relaxed */
866 #ifndef atomic_cmpxchg_relaxed
867 #define atomic_cmpxchg_acquire atomic_cmpxchg
868 #define atomic_cmpxchg_release atomic_cmpxchg
869 #define atomic_cmpxchg_relaxed atomic_cmpxchg
870 #else /* atomic_cmpxchg_relaxed */
872 #ifndef atomic_cmpxchg_acquire
873 static __always_inline int
874 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
876 int ret = atomic_cmpxchg_relaxed(v, old, new);
877 __atomic_acquire_fence();
880 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
883 #ifndef atomic_cmpxchg_release
884 static __always_inline int
885 atomic_cmpxchg_release(atomic_t *v, int old, int new)
887 __atomic_release_fence();
888 return atomic_cmpxchg_relaxed(v, old, new);
890 #define atomic_cmpxchg_release atomic_cmpxchg_release
893 #ifndef atomic_cmpxchg
894 static __always_inline int
895 atomic_cmpxchg(atomic_t *v, int old, int new)
898 __atomic_pre_full_fence();
899 ret = atomic_cmpxchg_relaxed(v, old, new);
900 __atomic_post_full_fence();
903 #define atomic_cmpxchg atomic_cmpxchg
906 #endif /* atomic_cmpxchg_relaxed */
908 #ifndef atomic_try_cmpxchg_relaxed
909 #ifdef atomic_try_cmpxchg
910 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
911 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
912 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
913 #endif /* atomic_try_cmpxchg */
915 #ifndef atomic_try_cmpxchg
916 static __always_inline bool
917 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
920 r = atomic_cmpxchg(v, o, new);
921 if (unlikely(r != o))
923 return likely(r == o);
925 #define atomic_try_cmpxchg atomic_try_cmpxchg
928 #ifndef atomic_try_cmpxchg_acquire
929 static __always_inline bool
930 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
933 r = atomic_cmpxchg_acquire(v, o, new);
934 if (unlikely(r != o))
936 return likely(r == o);
938 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
941 #ifndef atomic_try_cmpxchg_release
942 static __always_inline bool
943 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
946 r = atomic_cmpxchg_release(v, o, new);
947 if (unlikely(r != o))
949 return likely(r == o);
951 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
954 #ifndef atomic_try_cmpxchg_relaxed
955 static __always_inline bool
956 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
959 r = atomic_cmpxchg_relaxed(v, o, new);
960 if (unlikely(r != o))
962 return likely(r == o);
964 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
967 #else /* atomic_try_cmpxchg_relaxed */
969 #ifndef atomic_try_cmpxchg_acquire
970 static __always_inline bool
971 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
973 bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
974 __atomic_acquire_fence();
977 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
980 #ifndef atomic_try_cmpxchg_release
981 static __always_inline bool
982 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
984 __atomic_release_fence();
985 return atomic_try_cmpxchg_relaxed(v, old, new);
987 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
990 #ifndef atomic_try_cmpxchg
991 static __always_inline bool
992 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
995 __atomic_pre_full_fence();
996 ret = atomic_try_cmpxchg_relaxed(v, old, new);
997 __atomic_post_full_fence();
1000 #define atomic_try_cmpxchg atomic_try_cmpxchg
1003 #endif /* atomic_try_cmpxchg_relaxed */
1005 #ifndef atomic_sub_and_test
1007 * atomic_sub_and_test - subtract value from variable and test result
1008 * @i: integer value to subtract
1009 * @v: pointer of type atomic_t
1011 * Atomically subtracts @i from @v and returns
1012 * true if the result is zero, or false for all
1015 static __always_inline bool
1016 atomic_sub_and_test(int i, atomic_t *v)
1018 return atomic_sub_return(i, v) == 0;
1020 #define atomic_sub_and_test atomic_sub_and_test
1023 #ifndef atomic_dec_and_test
1025 * atomic_dec_and_test - decrement and test
1026 * @v: pointer of type atomic_t
1028 * Atomically decrements @v by 1 and
1029 * returns true if the result is 0, or false for all other
1032 static __always_inline bool
1033 atomic_dec_and_test(atomic_t *v)
1035 return atomic_dec_return(v) == 0;
1037 #define atomic_dec_and_test atomic_dec_and_test
1040 #ifndef atomic_inc_and_test
1042 * atomic_inc_and_test - increment and test
1043 * @v: pointer of type atomic_t
1045 * Atomically increments @v by 1
1046 * and returns true if the result is zero, or false for all
1049 static __always_inline bool
1050 atomic_inc_and_test(atomic_t *v)
1052 return atomic_inc_return(v) == 0;
1054 #define atomic_inc_and_test atomic_inc_and_test
1057 #ifndef atomic_add_negative
1059 * atomic_add_negative - add and test if negative
1060 * @i: integer value to add
1061 * @v: pointer of type atomic_t
1063 * Atomically adds @i to @v and returns true
1064 * if the result is negative, or false when
1065 * result is greater than or equal to zero.
1067 static __always_inline bool
1068 atomic_add_negative(int i, atomic_t *v)
1070 return atomic_add_return(i, v) < 0;
1072 #define atomic_add_negative atomic_add_negative
1075 #ifndef atomic_fetch_add_unless
1077 * atomic_fetch_add_unless - add unless the number is already a given value
1078 * @v: pointer of type atomic_t
1079 * @a: the amount to add to v...
1080 * @u: ...unless v is equal to u.
1082 * Atomically adds @a to @v, so long as @v was not already @u.
1083 * Returns original value of @v
1085 static __always_inline int
1086 atomic_fetch_add_unless(atomic_t *v, int a, int u)
1088 int c = atomic_read(v);
1091 if (unlikely(c == u))
1093 } while (!atomic_try_cmpxchg(v, &c, c + a));
1097 #define atomic_fetch_add_unless atomic_fetch_add_unless
1100 #ifndef atomic_add_unless
1102 * atomic_add_unless - add unless the number is already a given value
1103 * @v: pointer of type atomic_t
1104 * @a: the amount to add to v...
1105 * @u: ...unless v is equal to u.
1107 * Atomically adds @a to @v, if @v was not already @u.
1108 * Returns true if the addition was done.
1110 static __always_inline bool
1111 atomic_add_unless(atomic_t *v, int a, int u)
1113 return atomic_fetch_add_unless(v, a, u) != u;
1115 #define atomic_add_unless atomic_add_unless
1118 #ifndef atomic_inc_not_zero
1120 * atomic_inc_not_zero - increment unless the number is zero
1121 * @v: pointer of type atomic_t
1123 * Atomically increments @v by 1, if @v is non-zero.
1124 * Returns true if the increment was done.
1126 static __always_inline bool
1127 atomic_inc_not_zero(atomic_t *v)
1129 return atomic_add_unless(v, 1, 0);
1131 #define atomic_inc_not_zero atomic_inc_not_zero
1134 #ifndef atomic_inc_unless_negative
1135 static __always_inline bool
1136 atomic_inc_unless_negative(atomic_t *v)
1138 int c = atomic_read(v);
1141 if (unlikely(c < 0))
1143 } while (!atomic_try_cmpxchg(v, &c, c + 1));
1147 #define atomic_inc_unless_negative atomic_inc_unless_negative
1150 #ifndef atomic_dec_unless_positive
1151 static __always_inline bool
1152 atomic_dec_unless_positive(atomic_t *v)
1154 int c = atomic_read(v);
1157 if (unlikely(c > 0))
1159 } while (!atomic_try_cmpxchg(v, &c, c - 1));
1163 #define atomic_dec_unless_positive atomic_dec_unless_positive
1166 #ifndef atomic_dec_if_positive
1167 static __always_inline int
1168 atomic_dec_if_positive(atomic_t *v)
1170 int dec, c = atomic_read(v);
1174 if (unlikely(dec < 0))
1176 } while (!atomic_try_cmpxchg(v, &c, dec));
1180 #define atomic_dec_if_positive atomic_dec_if_positive
1183 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1184 #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
1186 #ifdef CONFIG_GENERIC_ATOMIC64
1187 #include <asm-generic/atomic64.h>
1190 #ifndef atomic64_read_acquire
1191 static __always_inline s64
1192 atomic64_read_acquire(const atomic64_t *v)
1194 return smp_load_acquire(&(v)->counter);
1196 #define atomic64_read_acquire atomic64_read_acquire
1199 #ifndef atomic64_set_release
1200 static __always_inline void
1201 atomic64_set_release(atomic64_t *v, s64 i)
1203 smp_store_release(&(v)->counter, i);
1205 #define atomic64_set_release atomic64_set_release
1208 #ifndef atomic64_add_return_relaxed
1209 #define atomic64_add_return_acquire atomic64_add_return
1210 #define atomic64_add_return_release atomic64_add_return
1211 #define atomic64_add_return_relaxed atomic64_add_return
1212 #else /* atomic64_add_return_relaxed */
1214 #ifndef atomic64_add_return_acquire
1215 static __always_inline s64
1216 atomic64_add_return_acquire(s64 i, atomic64_t *v)
1218 s64 ret = atomic64_add_return_relaxed(i, v);
1219 __atomic_acquire_fence();
1222 #define atomic64_add_return_acquire atomic64_add_return_acquire
1225 #ifndef atomic64_add_return_release
1226 static __always_inline s64
1227 atomic64_add_return_release(s64 i, atomic64_t *v)
1229 __atomic_release_fence();
1230 return atomic64_add_return_relaxed(i, v);
1232 #define atomic64_add_return_release atomic64_add_return_release
1235 #ifndef atomic64_add_return
1236 static __always_inline s64
1237 atomic64_add_return(s64 i, atomic64_t *v)
1240 __atomic_pre_full_fence();
1241 ret = atomic64_add_return_relaxed(i, v);
1242 __atomic_post_full_fence();
1245 #define atomic64_add_return atomic64_add_return
1248 #endif /* atomic64_add_return_relaxed */
1250 #ifndef atomic64_fetch_add_relaxed
1251 #define atomic64_fetch_add_acquire atomic64_fetch_add
1252 #define atomic64_fetch_add_release atomic64_fetch_add
1253 #define atomic64_fetch_add_relaxed atomic64_fetch_add
1254 #else /* atomic64_fetch_add_relaxed */
1256 #ifndef atomic64_fetch_add_acquire
1257 static __always_inline s64
1258 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1260 s64 ret = atomic64_fetch_add_relaxed(i, v);
1261 __atomic_acquire_fence();
1264 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1267 #ifndef atomic64_fetch_add_release
1268 static __always_inline s64
1269 atomic64_fetch_add_release(s64 i, atomic64_t *v)
1271 __atomic_release_fence();
1272 return atomic64_fetch_add_relaxed(i, v);
1274 #define atomic64_fetch_add_release atomic64_fetch_add_release
1277 #ifndef atomic64_fetch_add
1278 static __always_inline s64
1279 atomic64_fetch_add(s64 i, atomic64_t *v)
1282 __atomic_pre_full_fence();
1283 ret = atomic64_fetch_add_relaxed(i, v);
1284 __atomic_post_full_fence();
1287 #define atomic64_fetch_add atomic64_fetch_add
1290 #endif /* atomic64_fetch_add_relaxed */
1292 #ifndef atomic64_sub_return_relaxed
1293 #define atomic64_sub_return_acquire atomic64_sub_return
1294 #define atomic64_sub_return_release atomic64_sub_return
1295 #define atomic64_sub_return_relaxed atomic64_sub_return
1296 #else /* atomic64_sub_return_relaxed */
1298 #ifndef atomic64_sub_return_acquire
1299 static __always_inline s64
1300 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1302 s64 ret = atomic64_sub_return_relaxed(i, v);
1303 __atomic_acquire_fence();
1306 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
1309 #ifndef atomic64_sub_return_release
1310 static __always_inline s64
1311 atomic64_sub_return_release(s64 i, atomic64_t *v)
1313 __atomic_release_fence();
1314 return atomic64_sub_return_relaxed(i, v);
1316 #define atomic64_sub_return_release atomic64_sub_return_release
1319 #ifndef atomic64_sub_return
1320 static __always_inline s64
1321 atomic64_sub_return(s64 i, atomic64_t *v)
1324 __atomic_pre_full_fence();
1325 ret = atomic64_sub_return_relaxed(i, v);
1326 __atomic_post_full_fence();
1329 #define atomic64_sub_return atomic64_sub_return
1332 #endif /* atomic64_sub_return_relaxed */
1334 #ifndef atomic64_fetch_sub_relaxed
1335 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
1336 #define atomic64_fetch_sub_release atomic64_fetch_sub
1337 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1338 #else /* atomic64_fetch_sub_relaxed */
1340 #ifndef atomic64_fetch_sub_acquire
1341 static __always_inline s64
1342 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1344 s64 ret = atomic64_fetch_sub_relaxed(i, v);
1345 __atomic_acquire_fence();
1348 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1351 #ifndef atomic64_fetch_sub_release
1352 static __always_inline s64
1353 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1355 __atomic_release_fence();
1356 return atomic64_fetch_sub_relaxed(i, v);
1358 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1361 #ifndef atomic64_fetch_sub
1362 static __always_inline s64
1363 atomic64_fetch_sub(s64 i, atomic64_t *v)
1366 __atomic_pre_full_fence();
1367 ret = atomic64_fetch_sub_relaxed(i, v);
1368 __atomic_post_full_fence();
1371 #define atomic64_fetch_sub atomic64_fetch_sub
1374 #endif /* atomic64_fetch_sub_relaxed */
1376 #ifndef atomic64_inc
1377 static __always_inline void
1378 atomic64_inc(atomic64_t *v)
1382 #define atomic64_inc atomic64_inc
1385 #ifndef atomic64_inc_return_relaxed
1386 #ifdef atomic64_inc_return
1387 #define atomic64_inc_return_acquire atomic64_inc_return
1388 #define atomic64_inc_return_release atomic64_inc_return
1389 #define atomic64_inc_return_relaxed atomic64_inc_return
1390 #endif /* atomic64_inc_return */
1392 #ifndef atomic64_inc_return
1393 static __always_inline s64
1394 atomic64_inc_return(atomic64_t *v)
1396 return atomic64_add_return(1, v);
1398 #define atomic64_inc_return atomic64_inc_return
1401 #ifndef atomic64_inc_return_acquire
1402 static __always_inline s64
1403 atomic64_inc_return_acquire(atomic64_t *v)
1405 return atomic64_add_return_acquire(1, v);
1407 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1410 #ifndef atomic64_inc_return_release
1411 static __always_inline s64
1412 atomic64_inc_return_release(atomic64_t *v)
1414 return atomic64_add_return_release(1, v);
1416 #define atomic64_inc_return_release atomic64_inc_return_release
1419 #ifndef atomic64_inc_return_relaxed
1420 static __always_inline s64
1421 atomic64_inc_return_relaxed(atomic64_t *v)
1423 return atomic64_add_return_relaxed(1, v);
1425 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1428 #else /* atomic64_inc_return_relaxed */
1430 #ifndef atomic64_inc_return_acquire
1431 static __always_inline s64
1432 atomic64_inc_return_acquire(atomic64_t *v)
1434 s64 ret = atomic64_inc_return_relaxed(v);
1435 __atomic_acquire_fence();
1438 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1441 #ifndef atomic64_inc_return_release
1442 static __always_inline s64
1443 atomic64_inc_return_release(atomic64_t *v)
1445 __atomic_release_fence();
1446 return atomic64_inc_return_relaxed(v);
1448 #define atomic64_inc_return_release atomic64_inc_return_release
1451 #ifndef atomic64_inc_return
1452 static __always_inline s64
1453 atomic64_inc_return(atomic64_t *v)
1456 __atomic_pre_full_fence();
1457 ret = atomic64_inc_return_relaxed(v);
1458 __atomic_post_full_fence();
1461 #define atomic64_inc_return atomic64_inc_return
1464 #endif /* atomic64_inc_return_relaxed */
1466 #ifndef atomic64_fetch_inc_relaxed
1467 #ifdef atomic64_fetch_inc
1468 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
1469 #define atomic64_fetch_inc_release atomic64_fetch_inc
1470 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1471 #endif /* atomic64_fetch_inc */
1473 #ifndef atomic64_fetch_inc
1474 static __always_inline s64
1475 atomic64_fetch_inc(atomic64_t *v)
1477 return atomic64_fetch_add(1, v);
1479 #define atomic64_fetch_inc atomic64_fetch_inc
1482 #ifndef atomic64_fetch_inc_acquire
1483 static __always_inline s64
1484 atomic64_fetch_inc_acquire(atomic64_t *v)
1486 return atomic64_fetch_add_acquire(1, v);
1488 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1491 #ifndef atomic64_fetch_inc_release
1492 static __always_inline s64
1493 atomic64_fetch_inc_release(atomic64_t *v)
1495 return atomic64_fetch_add_release(1, v);
1497 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1500 #ifndef atomic64_fetch_inc_relaxed
1501 static __always_inline s64
1502 atomic64_fetch_inc_relaxed(atomic64_t *v)
1504 return atomic64_fetch_add_relaxed(1, v);
1506 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1509 #else /* atomic64_fetch_inc_relaxed */
1511 #ifndef atomic64_fetch_inc_acquire
1512 static __always_inline s64
1513 atomic64_fetch_inc_acquire(atomic64_t *v)
1515 s64 ret = atomic64_fetch_inc_relaxed(v);
1516 __atomic_acquire_fence();
1519 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1522 #ifndef atomic64_fetch_inc_release
1523 static __always_inline s64
1524 atomic64_fetch_inc_release(atomic64_t *v)
1526 __atomic_release_fence();
1527 return atomic64_fetch_inc_relaxed(v);
1529 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1532 #ifndef atomic64_fetch_inc
1533 static __always_inline s64
1534 atomic64_fetch_inc(atomic64_t *v)
1537 __atomic_pre_full_fence();
1538 ret = atomic64_fetch_inc_relaxed(v);
1539 __atomic_post_full_fence();
1542 #define atomic64_fetch_inc atomic64_fetch_inc
1545 #endif /* atomic64_fetch_inc_relaxed */
1547 #ifndef atomic64_dec
1548 static __always_inline void
1549 atomic64_dec(atomic64_t *v)
1553 #define atomic64_dec atomic64_dec
1556 #ifndef atomic64_dec_return_relaxed
1557 #ifdef atomic64_dec_return
1558 #define atomic64_dec_return_acquire atomic64_dec_return
1559 #define atomic64_dec_return_release atomic64_dec_return
1560 #define atomic64_dec_return_relaxed atomic64_dec_return
1561 #endif /* atomic64_dec_return */
1563 #ifndef atomic64_dec_return
1564 static __always_inline s64
1565 atomic64_dec_return(atomic64_t *v)
1567 return atomic64_sub_return(1, v);
1569 #define atomic64_dec_return atomic64_dec_return
1572 #ifndef atomic64_dec_return_acquire
1573 static __always_inline s64
1574 atomic64_dec_return_acquire(atomic64_t *v)
1576 return atomic64_sub_return_acquire(1, v);
1578 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1581 #ifndef atomic64_dec_return_release
1582 static __always_inline s64
1583 atomic64_dec_return_release(atomic64_t *v)
1585 return atomic64_sub_return_release(1, v);
1587 #define atomic64_dec_return_release atomic64_dec_return_release
1590 #ifndef atomic64_dec_return_relaxed
1591 static __always_inline s64
1592 atomic64_dec_return_relaxed(atomic64_t *v)
1594 return atomic64_sub_return_relaxed(1, v);
1596 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1599 #else /* atomic64_dec_return_relaxed */
1601 #ifndef atomic64_dec_return_acquire
1602 static __always_inline s64
1603 atomic64_dec_return_acquire(atomic64_t *v)
1605 s64 ret = atomic64_dec_return_relaxed(v);
1606 __atomic_acquire_fence();
1609 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1612 #ifndef atomic64_dec_return_release
1613 static __always_inline s64
1614 atomic64_dec_return_release(atomic64_t *v)
1616 __atomic_release_fence();
1617 return atomic64_dec_return_relaxed(v);
1619 #define atomic64_dec_return_release atomic64_dec_return_release
1622 #ifndef atomic64_dec_return
1623 static __always_inline s64
1624 atomic64_dec_return(atomic64_t *v)
1627 __atomic_pre_full_fence();
1628 ret = atomic64_dec_return_relaxed(v);
1629 __atomic_post_full_fence();
1632 #define atomic64_dec_return atomic64_dec_return
1635 #endif /* atomic64_dec_return_relaxed */
1637 #ifndef atomic64_fetch_dec_relaxed
1638 #ifdef atomic64_fetch_dec
1639 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
1640 #define atomic64_fetch_dec_release atomic64_fetch_dec
1641 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1642 #endif /* atomic64_fetch_dec */
1644 #ifndef atomic64_fetch_dec
1645 static __always_inline s64
1646 atomic64_fetch_dec(atomic64_t *v)
1648 return atomic64_fetch_sub(1, v);
1650 #define atomic64_fetch_dec atomic64_fetch_dec
1653 #ifndef atomic64_fetch_dec_acquire
1654 static __always_inline s64
1655 atomic64_fetch_dec_acquire(atomic64_t *v)
1657 return atomic64_fetch_sub_acquire(1, v);
1659 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1662 #ifndef atomic64_fetch_dec_release
1663 static __always_inline s64
1664 atomic64_fetch_dec_release(atomic64_t *v)
1666 return atomic64_fetch_sub_release(1, v);
1668 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1671 #ifndef atomic64_fetch_dec_relaxed
1672 static __always_inline s64
1673 atomic64_fetch_dec_relaxed(atomic64_t *v)
1675 return atomic64_fetch_sub_relaxed(1, v);
1677 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1680 #else /* atomic64_fetch_dec_relaxed */
1682 #ifndef atomic64_fetch_dec_acquire
1683 static __always_inline s64
1684 atomic64_fetch_dec_acquire(atomic64_t *v)
1686 s64 ret = atomic64_fetch_dec_relaxed(v);
1687 __atomic_acquire_fence();
1690 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1693 #ifndef atomic64_fetch_dec_release
1694 static __always_inline s64
1695 atomic64_fetch_dec_release(atomic64_t *v)
1697 __atomic_release_fence();
1698 return atomic64_fetch_dec_relaxed(v);
1700 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1703 #ifndef atomic64_fetch_dec
1704 static __always_inline s64
1705 atomic64_fetch_dec(atomic64_t *v)
1708 __atomic_pre_full_fence();
1709 ret = atomic64_fetch_dec_relaxed(v);
1710 __atomic_post_full_fence();
1713 #define atomic64_fetch_dec atomic64_fetch_dec
1716 #endif /* atomic64_fetch_dec_relaxed */
1718 #ifndef atomic64_fetch_and_relaxed
1719 #define atomic64_fetch_and_acquire atomic64_fetch_and
1720 #define atomic64_fetch_and_release atomic64_fetch_and
1721 #define atomic64_fetch_and_relaxed atomic64_fetch_and
1722 #else /* atomic64_fetch_and_relaxed */
1724 #ifndef atomic64_fetch_and_acquire
1725 static __always_inline s64
1726 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1728 s64 ret = atomic64_fetch_and_relaxed(i, v);
1729 __atomic_acquire_fence();
1732 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1735 #ifndef atomic64_fetch_and_release
1736 static __always_inline s64
1737 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1739 __atomic_release_fence();
1740 return atomic64_fetch_and_relaxed(i, v);
1742 #define atomic64_fetch_and_release atomic64_fetch_and_release
1745 #ifndef atomic64_fetch_and
1746 static __always_inline s64
1747 atomic64_fetch_and(s64 i, atomic64_t *v)
1750 __atomic_pre_full_fence();
1751 ret = atomic64_fetch_and_relaxed(i, v);
1752 __atomic_post_full_fence();
1755 #define atomic64_fetch_and atomic64_fetch_and
1758 #endif /* atomic64_fetch_and_relaxed */
1760 #ifndef atomic64_andnot
1761 static __always_inline void
1762 atomic64_andnot(s64 i, atomic64_t *v)
1764 atomic64_and(~i, v);
1766 #define atomic64_andnot atomic64_andnot
1769 #ifndef atomic64_fetch_andnot_relaxed
1770 #ifdef atomic64_fetch_andnot
1771 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1772 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1773 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1774 #endif /* atomic64_fetch_andnot */
1776 #ifndef atomic64_fetch_andnot
1777 static __always_inline s64
1778 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1780 return atomic64_fetch_and(~i, v);
1782 #define atomic64_fetch_andnot atomic64_fetch_andnot
1785 #ifndef atomic64_fetch_andnot_acquire
1786 static __always_inline s64
1787 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1789 return atomic64_fetch_and_acquire(~i, v);
1791 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1794 #ifndef atomic64_fetch_andnot_release
1795 static __always_inline s64
1796 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1798 return atomic64_fetch_and_release(~i, v);
1800 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1803 #ifndef atomic64_fetch_andnot_relaxed
1804 static __always_inline s64
1805 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1807 return atomic64_fetch_and_relaxed(~i, v);
1809 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1812 #else /* atomic64_fetch_andnot_relaxed */
1814 #ifndef atomic64_fetch_andnot_acquire
1815 static __always_inline s64
1816 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1818 s64 ret = atomic64_fetch_andnot_relaxed(i, v);
1819 __atomic_acquire_fence();
1822 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1825 #ifndef atomic64_fetch_andnot_release
1826 static __always_inline s64
1827 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1829 __atomic_release_fence();
1830 return atomic64_fetch_andnot_relaxed(i, v);
1832 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1835 #ifndef atomic64_fetch_andnot
1836 static __always_inline s64
1837 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1840 __atomic_pre_full_fence();
1841 ret = atomic64_fetch_andnot_relaxed(i, v);
1842 __atomic_post_full_fence();
1845 #define atomic64_fetch_andnot atomic64_fetch_andnot
1848 #endif /* atomic64_fetch_andnot_relaxed */
1850 #ifndef atomic64_fetch_or_relaxed
1851 #define atomic64_fetch_or_acquire atomic64_fetch_or
1852 #define atomic64_fetch_or_release atomic64_fetch_or
1853 #define atomic64_fetch_or_relaxed atomic64_fetch_or
1854 #else /* atomic64_fetch_or_relaxed */
1856 #ifndef atomic64_fetch_or_acquire
1857 static __always_inline s64
1858 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1860 s64 ret = atomic64_fetch_or_relaxed(i, v);
1861 __atomic_acquire_fence();
1864 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1867 #ifndef atomic64_fetch_or_release
1868 static __always_inline s64
1869 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1871 __atomic_release_fence();
1872 return atomic64_fetch_or_relaxed(i, v);
1874 #define atomic64_fetch_or_release atomic64_fetch_or_release
1877 #ifndef atomic64_fetch_or
1878 static __always_inline s64
1879 atomic64_fetch_or(s64 i, atomic64_t *v)
1882 __atomic_pre_full_fence();
1883 ret = atomic64_fetch_or_relaxed(i, v);
1884 __atomic_post_full_fence();
1887 #define atomic64_fetch_or atomic64_fetch_or
1890 #endif /* atomic64_fetch_or_relaxed */
1892 #ifndef atomic64_fetch_xor_relaxed
1893 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
1894 #define atomic64_fetch_xor_release atomic64_fetch_xor
1895 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1896 #else /* atomic64_fetch_xor_relaxed */
1898 #ifndef atomic64_fetch_xor_acquire
1899 static __always_inline s64
1900 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1902 s64 ret = atomic64_fetch_xor_relaxed(i, v);
1903 __atomic_acquire_fence();
1906 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1909 #ifndef atomic64_fetch_xor_release
1910 static __always_inline s64
1911 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1913 __atomic_release_fence();
1914 return atomic64_fetch_xor_relaxed(i, v);
1916 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1919 #ifndef atomic64_fetch_xor
1920 static __always_inline s64
1921 atomic64_fetch_xor(s64 i, atomic64_t *v)
1924 __atomic_pre_full_fence();
1925 ret = atomic64_fetch_xor_relaxed(i, v);
1926 __atomic_post_full_fence();
1929 #define atomic64_fetch_xor atomic64_fetch_xor
1932 #endif /* atomic64_fetch_xor_relaxed */
1934 #ifndef atomic64_xchg_relaxed
1935 #define atomic64_xchg_acquire atomic64_xchg
1936 #define atomic64_xchg_release atomic64_xchg
1937 #define atomic64_xchg_relaxed atomic64_xchg
1938 #else /* atomic64_xchg_relaxed */
1940 #ifndef atomic64_xchg_acquire
1941 static __always_inline s64
1942 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1944 s64 ret = atomic64_xchg_relaxed(v, i);
1945 __atomic_acquire_fence();
1948 #define atomic64_xchg_acquire atomic64_xchg_acquire
1951 #ifndef atomic64_xchg_release
1952 static __always_inline s64
1953 atomic64_xchg_release(atomic64_t *v, s64 i)
1955 __atomic_release_fence();
1956 return atomic64_xchg_relaxed(v, i);
1958 #define atomic64_xchg_release atomic64_xchg_release
1961 #ifndef atomic64_xchg
1962 static __always_inline s64
1963 atomic64_xchg(atomic64_t *v, s64 i)
1966 __atomic_pre_full_fence();
1967 ret = atomic64_xchg_relaxed(v, i);
1968 __atomic_post_full_fence();
1971 #define atomic64_xchg atomic64_xchg
1974 #endif /* atomic64_xchg_relaxed */
1976 #ifndef atomic64_cmpxchg_relaxed
1977 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1978 #define atomic64_cmpxchg_release atomic64_cmpxchg
1979 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1980 #else /* atomic64_cmpxchg_relaxed */
1982 #ifndef atomic64_cmpxchg_acquire
1983 static __always_inline s64
1984 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1986 s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
1987 __atomic_acquire_fence();
1990 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1993 #ifndef atomic64_cmpxchg_release
1994 static __always_inline s64
1995 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1997 __atomic_release_fence();
1998 return atomic64_cmpxchg_relaxed(v, old, new);
2000 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
2003 #ifndef atomic64_cmpxchg
2004 static __always_inline s64
2005 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2008 __atomic_pre_full_fence();
2009 ret = atomic64_cmpxchg_relaxed(v, old, new);
2010 __atomic_post_full_fence();
2013 #define atomic64_cmpxchg atomic64_cmpxchg
2016 #endif /* atomic64_cmpxchg_relaxed */
2018 #ifndef atomic64_try_cmpxchg_relaxed
2019 #ifdef atomic64_try_cmpxchg
2020 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2021 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2022 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2023 #endif /* atomic64_try_cmpxchg */
2025 #ifndef atomic64_try_cmpxchg
2026 static __always_inline bool
2027 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2030 r = atomic64_cmpxchg(v, o, new);
2031 if (unlikely(r != o))
2033 return likely(r == o);
2035 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2038 #ifndef atomic64_try_cmpxchg_acquire
2039 static __always_inline bool
2040 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2043 r = atomic64_cmpxchg_acquire(v, o, new);
2044 if (unlikely(r != o))
2046 return likely(r == o);
2048 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2051 #ifndef atomic64_try_cmpxchg_release
2052 static __always_inline bool
2053 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2056 r = atomic64_cmpxchg_release(v, o, new);
2057 if (unlikely(r != o))
2059 return likely(r == o);
2061 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2064 #ifndef atomic64_try_cmpxchg_relaxed
2065 static __always_inline bool
2066 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2069 r = atomic64_cmpxchg_relaxed(v, o, new);
2070 if (unlikely(r != o))
2072 return likely(r == o);
2074 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2077 #else /* atomic64_try_cmpxchg_relaxed */
2079 #ifndef atomic64_try_cmpxchg_acquire
2080 static __always_inline bool
2081 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2083 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2084 __atomic_acquire_fence();
2087 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2090 #ifndef atomic64_try_cmpxchg_release
2091 static __always_inline bool
2092 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2094 __atomic_release_fence();
2095 return atomic64_try_cmpxchg_relaxed(v, old, new);
2097 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2100 #ifndef atomic64_try_cmpxchg
2101 static __always_inline bool
2102 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2105 __atomic_pre_full_fence();
2106 ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2107 __atomic_post_full_fence();
2110 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
2113 #endif /* atomic64_try_cmpxchg_relaxed */
2115 #ifndef atomic64_sub_and_test
2117 * atomic64_sub_and_test - subtract value from variable and test result
2118 * @i: integer value to subtract
2119 * @v: pointer of type atomic64_t
2121 * Atomically subtracts @i from @v and returns
2122 * true if the result is zero, or false for all
2125 static __always_inline bool
2126 atomic64_sub_and_test(s64 i, atomic64_t *v)
2128 return atomic64_sub_return(i, v) == 0;
2130 #define atomic64_sub_and_test atomic64_sub_and_test
2133 #ifndef atomic64_dec_and_test
2135 * atomic64_dec_and_test - decrement and test
2136 * @v: pointer of type atomic64_t
2138 * Atomically decrements @v by 1 and
2139 * returns true if the result is 0, or false for all other
2142 static __always_inline bool
2143 atomic64_dec_and_test(atomic64_t *v)
2145 return atomic64_dec_return(v) == 0;
2147 #define atomic64_dec_and_test atomic64_dec_and_test
2150 #ifndef atomic64_inc_and_test
2152 * atomic64_inc_and_test - increment and test
2153 * @v: pointer of type atomic64_t
2155 * Atomically increments @v by 1
2156 * and returns true if the result is zero, or false for all
2159 static __always_inline bool
2160 atomic64_inc_and_test(atomic64_t *v)
2162 return atomic64_inc_return(v) == 0;
2164 #define atomic64_inc_and_test atomic64_inc_and_test
2167 #ifndef atomic64_add_negative
2169 * atomic64_add_negative - add and test if negative
2170 * @i: integer value to add
2171 * @v: pointer of type atomic64_t
2173 * Atomically adds @i to @v and returns true
2174 * if the result is negative, or false when
2175 * result is greater than or equal to zero.
2177 static __always_inline bool
2178 atomic64_add_negative(s64 i, atomic64_t *v)
2180 return atomic64_add_return(i, v) < 0;
2182 #define atomic64_add_negative atomic64_add_negative
2185 #ifndef atomic64_fetch_add_unless
2187 * atomic64_fetch_add_unless - add unless the number is already a given value
2188 * @v: pointer of type atomic64_t
2189 * @a: the amount to add to v...
2190 * @u: ...unless v is equal to u.
2192 * Atomically adds @a to @v, so long as @v was not already @u.
2193 * Returns original value of @v
2195 static __always_inline s64
2196 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2198 s64 c = atomic64_read(v);
2201 if (unlikely(c == u))
2203 } while (!atomic64_try_cmpxchg(v, &c, c + a));
2207 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
2210 #ifndef atomic64_add_unless
2212 * atomic64_add_unless - add unless the number is already a given value
2213 * @v: pointer of type atomic64_t
2214 * @a: the amount to add to v...
2215 * @u: ...unless v is equal to u.
2217 * Atomically adds @a to @v, if @v was not already @u.
2218 * Returns true if the addition was done.
2220 static __always_inline bool
2221 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2223 return atomic64_fetch_add_unless(v, a, u) != u;
2225 #define atomic64_add_unless atomic64_add_unless
2228 #ifndef atomic64_inc_not_zero
2230 * atomic64_inc_not_zero - increment unless the number is zero
2231 * @v: pointer of type atomic64_t
2233 * Atomically increments @v by 1, if @v is non-zero.
2234 * Returns true if the increment was done.
2236 static __always_inline bool
2237 atomic64_inc_not_zero(atomic64_t *v)
2239 return atomic64_add_unless(v, 1, 0);
2241 #define atomic64_inc_not_zero atomic64_inc_not_zero
2244 #ifndef atomic64_inc_unless_negative
2245 static __always_inline bool
2246 atomic64_inc_unless_negative(atomic64_t *v)
2248 s64 c = atomic64_read(v);
2251 if (unlikely(c < 0))
2253 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2257 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
2260 #ifndef atomic64_dec_unless_positive
2261 static __always_inline bool
2262 atomic64_dec_unless_positive(atomic64_t *v)
2264 s64 c = atomic64_read(v);
2267 if (unlikely(c > 0))
2269 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2273 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
2276 #ifndef atomic64_dec_if_positive
2277 static __always_inline s64
2278 atomic64_dec_if_positive(atomic64_t *v)
2280 s64 dec, c = atomic64_read(v);
2284 if (unlikely(dec < 0))
2286 } while (!atomic64_try_cmpxchg(v, &c, dec));
2290 #define atomic64_dec_if_positive atomic64_dec_if_positive
2293 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
2294 #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
2296 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2297 // baaf45f4c24ed88ceae58baca39d7fd80bb8101b