atomics/treewide: Remove redundant atomic_inc_not_zero() definitions
authorMark Rutland <mark.rutland@arm.com>
Thu, 21 Jun 2018 12:13:05 +0000 (13:13 +0100)
committerIngo Molnar <mingo@kernel.org>
Thu, 21 Jun 2018 12:22:33 +0000 (14:22 +0200)
When atomic_inc_not_zero(v) isn't defined, <linux/atomic.h> will define
it as falling back to atomic_add_unless((v), 1, 0), so there's no need
for arch code to do so.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Reviewed-by: Will Deacon <will.deacon@arm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Palmer Dabbelt <palmer@sifive.com>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Link: https://lore.kernel.org/lkml/20180621121321.4761-3-mark.rutland@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/arc/include/asm/atomic.h
arch/hexagon/include/asm/atomic.h
arch/riscv/include/asm/atomic.h

index 67121b5ff3a36c1783cc5de0f4ca4518ef6fab0e..cecdf3403cafaeb35d5d5aeb13557a20c2a1186b 100644 (file)
@@ -336,8 +336,6 @@ ATOMIC_OPS(xor, ^=, CTOP_INST_AXOR_DI_R2_R2_R3)
        c;                                                              \
 })
 
-#define atomic_inc_not_zero(v)         atomic_add_unless((v), 1, 0)
-
 #define atomic_inc(v)                  atomic_add(1, v)
 #define atomic_dec(v)                  atomic_sub(1, v)
 
index 287aa9f394f302d5b2e8f2db22068f2639225e5d..d2feeba93c4457bbd6a7163d94c8e7594d5391a1 100644 (file)
@@ -197,8 +197,6 @@ static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
        return __oldval;
 }
 
-#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-
 #define atomic_inc(v) atomic_add(1, (v))
 #define atomic_dec(v) atomic_sub(1, (v))
 
index 739e810c857e4ae654b32ea3d8afbca0575e7cb9..0e27e050ba149bd1913d3ac0294afedc31fa93cc 100644 (file)
@@ -375,15 +375,6 @@ static __always_inline int atomic64_add_unless(atomic64_t *v, long a, long u)
 }
 #endif
 
-/*
- * The extra atomic operations that are constructed from one of the core
- * LR/SC-based operations above.
- */
-static __always_inline int atomic_inc_not_zero(atomic_t *v)
-{
-        return atomic_fetch_add_unless(v, 1, 0);
-}
-
 #ifndef CONFIG_GENERIC_ATOMIC64
 static __always_inline long atomic64_inc_not_zero(atomic64_t *v)
 {