locking/atomics/arm64: Replace our atomic/lock bitop implementations with asm-generic
authorWill Deacon <will.deacon@arm.com>
Tue, 19 Jun 2018 12:53:13 +0000 (13:53 +0100)
committerIngo Molnar <mingo@kernel.org>
Thu, 21 Jun 2018 10:52:12 +0000 (12:52 +0200)
The <asm-generic/bitops/{atomic,lock}.h> implementations are built around
the atomic-fetch ops, which we implement efficiently for both LSE and
LL/SC systems. Use that instead of our hand-rolled, out-of-line bitops.S.

Signed-off-by: Will Deacon <will.deacon@arm.com>
Acked-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: linux-arm-kernel@lists.infradead.org
Cc: yamada.masahiro@socionext.com
Link: https://lore.kernel.org/lkml/1529412794-17720-9-git-send-email-will.deacon@arm.com
Signed-off-by: Ingo Molnar <mingo@kernel.org>
arch/arm64/include/asm/bitops.h
arch/arm64/lib/Makefile
arch/arm64/lib/bitops.S [deleted file]

index 9c19594ce7cb9932ede27894fb81b3467a43aa58..13501460be6b77d5af5e533683106019800d9742 100644 (file)
 #define __ASM_BITOPS_H
 
 #include <linux/compiler.h>
-#include <asm/barrier.h>
 
 #ifndef _LINUX_BITOPS_H
 #error only <linux/bitops.h> can be included directly
 #endif
 
-/*
- * Little endian assembly atomic bitops.
- */
-extern void set_bit(int nr, volatile unsigned long *p);
-extern void clear_bit(int nr, volatile unsigned long *p);
-extern void change_bit(int nr, volatile unsigned long *p);
-extern int test_and_set_bit(int nr, volatile unsigned long *p);
-extern int test_and_clear_bit(int nr, volatile unsigned long *p);
-extern int test_and_change_bit(int nr, volatile unsigned long *p);
-
 #include <asm-generic/bitops/builtin-__ffs.h>
 #include <asm-generic/bitops/builtin-ffs.h>
 #include <asm-generic/bitops/builtin-__fls.h>
@@ -44,8 +33,9 @@ extern int test_and_change_bit(int nr, volatile unsigned long *p);
 
 #include <asm-generic/bitops/sched.h>
 #include <asm-generic/bitops/hweight.h>
-#include <asm-generic/bitops/lock.h>
 
+#include <asm-generic/bitops/atomic.h>
+#include <asm-generic/bitops/lock.h>
 #include <asm-generic/bitops/non-atomic.h>
 #include <asm-generic/bitops/le.h>
 
index 137710f4dac30ac01c5c17856b730a996628a2e3..68755fd70dcf4c4164cb1453fbe2695a1bc8ff33 100644 (file)
@@ -1,5 +1,5 @@
 # SPDX-License-Identifier: GPL-2.0
-lib-y          := bitops.o clear_user.o delay.o copy_from_user.o       \
+lib-y          := clear_user.o delay.o copy_from_user.o                \
                   copy_to_user.o copy_in_user.o copy_page.o            \
                   clear_page.o memchr.o memcpy.o memmove.o memset.o    \
                   memcmp.o strcmp.o strncmp.o strlen.o strnlen.o       \
diff --git a/arch/arm64/lib/bitops.S b/arch/arm64/lib/bitops.S
deleted file mode 100644 (file)
index 43ac736..0000000
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Based on arch/arm/lib/bitops.h
- *
- * Copyright (C) 2013 ARM Ltd.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License version 2 as
- * published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program.  If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include <linux/linkage.h>
-#include <asm/assembler.h>
-#include <asm/lse.h>
-
-/*
- * x0: bits 5:0  bit offset
- *     bits 31:6 word offset
- * x1: address
- */
-       .macro  bitop, name, llsc, lse
-ENTRY( \name   )
-       and     w3, w0, #63             // Get bit offset
-       eor     w0, w0, w3              // Clear low bits
-       mov     x2, #1
-       add     x1, x1, x0, lsr #3      // Get word offset
-alt_lse "      prfm    pstl1strm, [x1]",       "nop"
-       lsl     x3, x2, x3              // Create mask
-
-alt_lse        "1:     ldxr    x2, [x1]",              "\lse   x3, [x1]"
-alt_lse        "       \llsc   x2, x2, x3",            "nop"
-alt_lse        "       stxr    w0, x2, [x1]",          "nop"
-alt_lse        "       cbnz    w0, 1b",                "nop"
-
-       ret
-ENDPROC(\name  )
-       .endm
-
-       .macro  testop, name, llsc, lse
-ENTRY( \name   )
-       and     w3, w0, #63             // Get bit offset
-       eor     w0, w0, w3              // Clear low bits
-       mov     x2, #1
-       add     x1, x1, x0, lsr #3      // Get word offset
-alt_lse "      prfm    pstl1strm, [x1]",       "nop"
-       lsl     x4, x2, x3              // Create mask
-
-alt_lse        "1:     ldxr    x2, [x1]",              "\lse   x4, x2, [x1]"
-       lsr     x0, x2, x3
-alt_lse        "       \llsc   x2, x2, x4",            "nop"
-alt_lse        "       stlxr   w5, x2, [x1]",          "nop"
-alt_lse        "       cbnz    w5, 1b",                "nop"
-alt_lse        "       dmb     ish",                   "nop"
-
-       and     x0, x0, #1
-       ret
-ENDPROC(\name  )
-       .endm
-
-/*
- * Atomic bit operations.
- */
-       bitop   change_bit, eor, steor
-       bitop   clear_bit, bic, stclr
-       bitop   set_bit, orr, stset
-
-       testop  test_and_change_bit, eor, ldeoral
-       testop  test_and_clear_bit, bic, ldclral
-       testop  test_and_set_bit, orr, ldsetal