Merge tag 'fscrypt_for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tytso...
[sfrench/cifs-2.6.git] / arch / arc / include / asm / bitops.h
index 0352fb8d21b998223b03e2bc327c3f4554df6ec5..ee9246184033b3138f8d09878fc7763502b11e5c 100644 (file)
@@ -22,7 +22,7 @@
 #include <asm/smp.h>
 #endif
 
-#if defined(CONFIG_ARC_HAS_LLSC)
+#ifdef CONFIG_ARC_HAS_LLSC
 
 /*
  * Hardware assisted Atomic-R-M-W
@@ -88,7 +88,7 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *
        return (old & (1 << nr)) != 0;                                  \
 }
 
-#else  /* !CONFIG_ARC_HAS_LLSC */
+#elif !defined(CONFIG_ARC_PLAT_EZNPS)
 
 /*
  * Non hardware assisted Atomic-R-M-W
@@ -139,7 +139,55 @@ static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *
        return (old & (1UL << (nr & 0x1f))) != 0;                       \
 }
 
-#endif /* CONFIG_ARC_HAS_LLSC */
+#else /* CONFIG_ARC_PLAT_EZNPS */
+
+#define BIT_OP(op, c_op, asm_op)                                       \
+static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
+{                                                                      \
+       m += nr >> 5;                                                   \
+                                                                       \
+       nr = (1UL << (nr & 0x1f));                                      \
+       if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3)                       \
+               nr = ~nr;                                               \
+                                                                       \
+       __asm__ __volatile__(                                           \
+       "       mov r2, %0\n"                                           \
+       "       mov r3, %1\n"                                           \
+       "       .word %2\n"                                             \
+       :                                                               \
+       : "r"(nr), "r"(m), "i"(asm_op)                                  \
+       : "r2", "r3", "memory");                                        \
+}
+
+#define TEST_N_BIT_OP(op, c_op, asm_op)                                        \
+static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
+{                                                                      \
+       unsigned long old;                                              \
+                                                                       \
+       m += nr >> 5;                                                   \
+                                                                       \
+       nr = old = (1UL << (nr & 0x1f));                                \
+       if (asm_op == CTOP_INST_AAND_DI_R2_R2_R3)                       \
+               old = ~old;                                             \
+                                                                       \
+       /* Explicit full memory barrier needed before/after */          \
+       smp_mb();                                                       \
+                                                                       \
+       __asm__ __volatile__(                                           \
+       "       mov r2, %0\n"                                           \
+       "       mov r3, %1\n"                                           \
+       "       .word %2\n"                                             \
+       "       mov %0, r2"                                             \
+       : "+r"(old)                                                     \
+       : "r"(m), "i"(asm_op)                                           \
+       : "r2", "r3", "memory");                                        \
+                                                                       \
+       smp_mb();                                                       \
+                                                                       \
+       return (old & nr) != 0;                                 \
+}
+
+#endif /* CONFIG_ARC_PLAT_EZNPS */
 
 /***************************************
  * Non atomic variants
@@ -181,9 +229,15 @@ static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long
        /* __test_and_set_bit(), __test_and_clear_bit(), __test_and_change_bit() */\
        __TEST_N_BIT_OP(op, c_op, asm_op)
 
+#ifndef CONFIG_ARC_PLAT_EZNPS
 BIT_OPS(set, |, bset)
 BIT_OPS(clear, & ~, bclr)
 BIT_OPS(change, ^, bxor)
+#else
+BIT_OPS(set, |, CTOP_INST_AOR_DI_R2_R2_R3)
+BIT_OPS(clear, & ~, CTOP_INST_AAND_DI_R2_R2_R3)
+BIT_OPS(change, ^, CTOP_INST_AXOR_DI_R2_R2_R3)
+#endif
 
 /*
  * This routine doesn't need to be atomic.
@@ -224,7 +278,7 @@ static inline __attribute__ ((const)) int clz(unsigned int x)
        return res;
 }
 
-static inline int constant_fls(int x)
+static inline int constant_fls(unsigned int x)
 {
        int r = 32;
 
@@ -258,7 +312,7 @@ static inline int constant_fls(int x)
  * @result: [1-32]
  * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
  */
-static inline __attribute__ ((const)) int fls(unsigned long x)
+static inline __attribute__ ((const)) int fls(unsigned int x)
 {
        if (__builtin_constant_p(x))
               return constant_fls(x);