powerpc: Rename LWSYNC_ON_SMP to PPC_RELEASE_BARRIER, ISYNC_ON_SMP to PPC_ACQUIRE_BARRIER
authorAnton Blanchard <anton@samba.org>
Wed, 10 Feb 2010 01:04:06 +0000 (01:04 +0000)
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>
Wed, 17 Feb 2010 03:03:15 +0000 (14:03 +1100)
For performance reasons we are about to change ISYNC_ON_SMP to sometimes be
lwsync. Now that the macro name doesn't make sense, change it and LWSYNC_ON_SMP
to better explain what the barriers are doing.

Signed-off-by: Anton Blanchard <anton@samba.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
arch/powerpc/include/asm/atomic.h
arch/powerpc/include/asm/bitops.h
arch/powerpc/include/asm/futex.h
arch/powerpc/include/asm/mutex.h
arch/powerpc/include/asm/spinlock.h
arch/powerpc/include/asm/synch.h
arch/powerpc/include/asm/system.h

index 4012483b1899d32a84097bbbdb543c80f025f0d0..b8f152ece02572be5a15acb75ec5ac312d9ec0d9 100644 (file)
@@ -49,13 +49,13 @@ static __inline__ int atomic_add_return(int a, atomic_t *v)
        int t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%2         # atomic_add_return\n\
        add     %0,%1,%0\n"
        PPC405_ERR77(0,%2)
 "      stwcx.  %0,0,%2 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (a), "r" (&v->counter)
        : "cc", "memory");
@@ -85,13 +85,13 @@ static __inline__ int atomic_sub_return(int a, atomic_t *v)
        int t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%2         # atomic_sub_return\n\
        subf    %0,%1,%0\n"
        PPC405_ERR77(0,%2)
 "      stwcx.  %0,0,%2 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (a), "r" (&v->counter)
        : "cc", "memory");
@@ -119,13 +119,13 @@ static __inline__ int atomic_inc_return(atomic_t *v)
        int t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%1         # atomic_inc_return\n\
        addic   %0,%0,1\n"
        PPC405_ERR77(0,%1)
 "      stwcx.  %0,0,%1 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (&v->counter)
        : "cc", "xer", "memory");
@@ -163,13 +163,13 @@ static __inline__ int atomic_dec_return(atomic_t *v)
        int t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%1         # atomic_dec_return\n\
        addic   %0,%0,-1\n"
        PPC405_ERR77(0,%1)
 "      stwcx.  %0,0,%1\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (&v->counter)
        : "cc", "xer", "memory");
@@ -194,7 +194,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
        int t;
 
        __asm__ __volatile__ (
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%1         # atomic_add_unless\n\
        cmpw    0,%0,%3 \n\
        beq-    2f \n\
@@ -202,7 +202,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
        PPC405_ERR77(0,%2)
 "      stwcx.  %0,0,%1 \n\
        bne-    1b \n"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
 "      subf    %0,%2,%0 \n\
 2:"
        : "=&r" (t)
@@ -227,7 +227,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
        int t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%1         # atomic_dec_if_positive\n\
        cmpwi   %0,1\n\
        addi    %0,%0,-1\n\
@@ -235,7 +235,7 @@ static __inline__ int atomic_dec_if_positive(atomic_t *v)
        PPC405_ERR77(0,%1)
 "      stwcx.  %0,0,%1\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        "\n\
 2:"    : "=&b" (t)
        : "r" (&v->counter)
@@ -286,12 +286,12 @@ static __inline__ long atomic64_add_return(long a, atomic64_t *v)
        long t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%2         # atomic64_add_return\n\
        add     %0,%1,%0\n\
        stdcx.  %0,0,%2 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (a), "r" (&v->counter)
        : "cc", "memory");
@@ -320,12 +320,12 @@ static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
        long t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%2         # atomic64_sub_return\n\
        subf    %0,%1,%0\n\
        stdcx.  %0,0,%2 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (a), "r" (&v->counter)
        : "cc", "memory");
@@ -352,12 +352,12 @@ static __inline__ long atomic64_inc_return(atomic64_t *v)
        long t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%1         # atomic64_inc_return\n\
        addic   %0,%0,1\n\
        stdcx.  %0,0,%1 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (&v->counter)
        : "cc", "xer", "memory");
@@ -394,12 +394,12 @@ static __inline__ long atomic64_dec_return(atomic64_t *v)
        long t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%1         # atomic64_dec_return\n\
        addic   %0,%0,-1\n\
        stdcx.  %0,0,%1\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (&v->counter)
        : "cc", "xer", "memory");
@@ -419,13 +419,13 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
        long t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%1         # atomic64_dec_if_positive\n\
        addic.  %0,%0,-1\n\
        blt-    2f\n\
        stdcx.  %0,0,%1\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        "\n\
 2:"    : "=&r" (t)
        : "r" (&v->counter)
@@ -451,14 +451,14 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
        long t;
 
        __asm__ __volatile__ (
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%1         # atomic_add_unless\n\
        cmpd    0,%0,%3 \n\
        beq-    2f \n\
        add     %0,%2,%0 \n"
 "      stdcx.  %0,0,%1 \n\
        bne-    1b \n"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
 "      subf    %0,%2,%0 \n\
 2:"
        : "=&r" (t)
index 3c7c37bd92e38baf590561b2dae7b4286e065976..30964ae2d096a9c754407d7fd33a6e96ab9c9692 100644 (file)
@@ -78,7 +78,7 @@ static __inline__ void fn(unsigned long mask, \
 
 DEFINE_BITOP(set_bits, or, "", "")
 DEFINE_BITOP(clear_bits, andc, "", "")
-DEFINE_BITOP(clear_bits_unlock, andc, LWSYNC_ON_SMP, "")
+DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER, "")
 DEFINE_BITOP(change_bits, xor, "", "")
 
 static __inline__ void set_bit(int nr, volatile unsigned long *addr)
@@ -124,10 +124,14 @@ static __inline__ unsigned long fn(                       \
        return (old & mask);                            \
 }
 
-DEFINE_TESTOP(test_and_set_bits, or, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0)
-DEFINE_TESTOP(test_and_set_bits_lock, or, "", ISYNC_ON_SMP, 1)
-DEFINE_TESTOP(test_and_clear_bits, andc, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0)
-DEFINE_TESTOP(test_and_change_bits, xor, LWSYNC_ON_SMP, ISYNC_ON_SMP, 0)
+DEFINE_TESTOP(test_and_set_bits, or, PPC_RELEASE_BARRIER,
+             PPC_ACQUIRE_BARRIER, 0)
+DEFINE_TESTOP(test_and_set_bits_lock, or, "",
+             PPC_ACQUIRE_BARRIER, 1)
+DEFINE_TESTOP(test_and_clear_bits, andc, PPC_RELEASE_BARRIER,
+             PPC_ACQUIRE_BARRIER, 0)
+DEFINE_TESTOP(test_and_change_bits, xor, PPC_RELEASE_BARRIER,
+             PPC_ACQUIRE_BARRIER, 0)
 
 static __inline__ int test_and_set_bit(unsigned long nr,
                                       volatile unsigned long *addr)
@@ -158,7 +162,7 @@ static __inline__ int test_and_change_bit(unsigned long nr,
 
 static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
 {
-       __asm__ __volatile__(LWSYNC_ON_SMP "" ::: "memory");
+       __asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory");
        __clear_bit(nr, addr);
 }
 
index 9696cc36d2dcf50ae3cb33784cf2a2362edf942a..7c589ef81fb0eec42eee5f17cfbfc25563bb66fa 100644 (file)
@@ -11,7 +11,7 @@
 
 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
   __asm__ __volatile ( \
-       LWSYNC_ON_SMP \
+       PPC_RELEASE_BARRIER \
 "1:    lwarx   %0,0,%2\n" \
        insn \
        PPC405_ERR77(0, %2) \
@@ -90,14 +90,14 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
                return -EFAULT;
 
         __asm__ __volatile__ (
-        LWSYNC_ON_SMP
+        PPC_RELEASE_BARRIER
 "1:     lwarx   %0,0,%2         # futex_atomic_cmpxchg_inatomic\n\
         cmpw    0,%0,%3\n\
         bne-    3f\n"
         PPC405_ERR77(0,%2)
 "2:     stwcx.  %4,0,%2\n\
         bne-    1b\n"
-        ISYNC_ON_SMP
+        PPC_ACQUIRE_BARRIER
 "3:    .section .fixup,\"ax\"\n\
 4:     li      %0,%5\n\
        b       3b\n\
index dabc01c727b86efb4b35c1d8f30e58e0ce95ede4..5399f7e18102f7144bcb1464ff5f13c2fac4468c 100644 (file)
@@ -15,7 +15,7 @@ static inline int __mutex_cmpxchg_lock(atomic_t *v, int old, int new)
        PPC405_ERR77(0,%1)
 "      stwcx.  %3,0,%1\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        "\n\
 2:"
        : "=&r" (t)
@@ -35,7 +35,7 @@ static inline int __mutex_dec_return_lock(atomic_t *v)
        PPC405_ERR77(0,%1)
 "      stwcx.  %0,0,%1\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (t)
        : "r" (&v->counter)
        : "cc", "memory");
@@ -48,7 +48,7 @@ static inline int __mutex_inc_return_unlock(atomic_t *v)
        int t;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%1         # mutex unlock\n\
        addic   %0,%0,1\n"
        PPC405_ERR77(0,%1)
index 1c35b59f6f30c0f528e6538483f764a30218d761..f9611bd69ed2a102b38e67da0451be7694c70a5b 100644 (file)
@@ -65,9 +65,10 @@ static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock)
        cmpwi           0,%0,0\n\
        bne-            2f\n\
        stwcx.          %1,0,%2\n\
-       bne-            1b\n\
-       isync\n\
-2:"    : "=&r" (tmp)
+       bne-            1b\n"
+       PPC_ACQUIRE_BARRIER
+"2:"
+       : "=&r" (tmp)
        : "r" (token), "r" (&lock->slock)
        : "cr0", "memory");
 
@@ -145,7 +146,7 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock)
 {
        SYNC_IO;
        __asm__ __volatile__("# arch_spin_unlock\n\t"
-                               LWSYNC_ON_SMP: : :"memory");
+                               PPC_RELEASE_BARRIER: : :"memory");
        lock->slock = 0;
 }
 
@@ -193,9 +194,9 @@ static inline long __arch_read_trylock(arch_rwlock_t *rw)
        ble-            2f\n"
        PPC405_ERR77(0,%1)
 "      stwcx.          %0,0,%1\n\
-       bne-            1b\n\
-       isync\n\
-2:"    : "=&r" (tmp)
+       bne-            1b\n"
+       PPC_ACQUIRE_BARRIER
+"2:"   : "=&r" (tmp)
        : "r" (&rw->lock)
        : "cr0", "xer", "memory");
 
@@ -217,9 +218,9 @@ static inline long __arch_write_trylock(arch_rwlock_t *rw)
        bne-            2f\n"
        PPC405_ERR77(0,%1)
 "      stwcx.          %1,0,%2\n\
-       bne-            1b\n\
-       isync\n\
-2:"    : "=&r" (tmp)
+       bne-            1b\n"
+       PPC_ACQUIRE_BARRIER
+"2:"   : "=&r" (tmp)
        : "r" (token), "r" (&rw->lock)
        : "cr0", "memory");
 
@@ -270,7 +271,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
 
        __asm__ __volatile__(
        "# read_unlock\n\t"
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx           %0,0,%1\n\
        addic           %0,%0,-1\n"
        PPC405_ERR77(0,%1)
@@ -284,7 +285,7 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
 static inline void arch_write_unlock(arch_rwlock_t *rw)
 {
        __asm__ __volatile__("# write_unlock\n\t"
-                               LWSYNC_ON_SMP: : :"memory");
+                               PPC_RELEASE_BARRIER: : :"memory");
        rw->lock = 0;
 }
 
index 28f6ddbff4cf31d6e9fa8d420563dc8228e1db5a..5db1f0d5ea82944554a747d2be6371483977f4a8 100644 (file)
@@ -37,11 +37,11 @@ static inline void isync(void)
 #endif
 
 #ifdef CONFIG_SMP
-#define ISYNC_ON_SMP   "\n\tisync\n"
-#define LWSYNC_ON_SMP  stringify_in_c(LWSYNC) "\n"
+#define PPC_ACQUIRE_BARRIER    "\n\tisync\n"
+#define PPC_RELEASE_BARRIER    stringify_in_c(LWSYNC) "\n"
 #else
-#define ISYNC_ON_SMP
-#define LWSYNC_ON_SMP
+#define PPC_ACQUIRE_BARRIER
+#define PPC_RELEASE_BARRIER
 #endif
 
 #endif /* __KERNEL__ */
index bb8e006a47c67655d9f68d2608abd438e0e21d48..aa59f5b794e89d6407cad1157d21cce035c84e1a 100644 (file)
@@ -232,12 +232,12 @@ __xchg_u32(volatile void *p, unsigned long val)
        unsigned long prev;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%2 \n"
        PPC405_ERR77(0,%2)
 "      stwcx.  %3,0,%2 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
        : "r" (p), "r" (val)
        : "cc", "memory");
@@ -275,12 +275,12 @@ __xchg_u64(volatile void *p, unsigned long val)
        unsigned long prev;
 
        __asm__ __volatile__(
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%2 \n"
        PPC405_ERR77(0,%2)
 "      stdcx.  %3,0,%2 \n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
        : "r" (p), "r" (val)
        : "cc", "memory");
@@ -366,14 +366,14 @@ __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
        unsigned int prev;
 
        __asm__ __volatile__ (
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    lwarx   %0,0,%2         # __cmpxchg_u32\n\
        cmpw    0,%0,%3\n\
        bne-    2f\n"
        PPC405_ERR77(0,%2)
 "      stwcx.  %4,0,%2\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        "\n\
 2:"
        : "=&r" (prev), "+m" (*p)
@@ -412,13 +412,13 @@ __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
        unsigned long prev;
 
        __asm__ __volatile__ (
-       LWSYNC_ON_SMP
+       PPC_RELEASE_BARRIER
 "1:    ldarx   %0,0,%2         # __cmpxchg_u64\n\
        cmpd    0,%0,%3\n\
        bne-    2f\n\
        stdcx.  %4,0,%2\n\
        bne-    1b"
-       ISYNC_ON_SMP
+       PPC_ACQUIRE_BARRIER
        "\n\
 2:"
        : "=&r" (prev), "+m" (*p)