Merge remote-tracking branch 'asoc/topic/pcm512x' into asoc-next
[sfrench/cifs-2.6.git] / arch / sh / include / asm / atomic-llsc.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASM_SH_ATOMIC_LLSC_H
3 #define __ASM_SH_ATOMIC_LLSC_H
4
5 /*
6  * SH-4A note:
7  *
8  * We basically get atomic_xxx_return() for free compared with
9  * atomic_xxx(). movli.l/movco.l require r0 due to the instruction
10  * encoding, so the retval is automatically set without having to
11  * do any special work.
12  */
13 /*
14  * To get proper branch prediction for the main line, we must branch
15  * forward to code at the end of this object's .text section, then
16  * branch back to restart the operation.
17  */
18
19 #define ATOMIC_OP(op)                                                   \
20 static inline void atomic_##op(int i, atomic_t *v)                      \
21 {                                                                       \
22         unsigned long tmp;                                              \
23                                                                         \
24         __asm__ __volatile__ (                                          \
25 "1:     movli.l @%2, %0         ! atomic_" #op "\n"                     \
26 "       " #op " %1, %0                          \n"                     \
27 "       movco.l %0, @%2                         \n"                     \
28 "       bf      1b                              \n"                     \
29         : "=&z" (tmp)                                                   \
30         : "r" (i), "r" (&v->counter)                                    \
31         : "t");                                                         \
32 }
33
34 #define ATOMIC_OP_RETURN(op)                                            \
35 static inline int atomic_##op##_return(int i, atomic_t *v)              \
36 {                                                                       \
37         unsigned long temp;                                             \
38                                                                         \
39         __asm__ __volatile__ (                                          \
40 "1:     movli.l @%2, %0         ! atomic_" #op "_return \n"             \
41 "       " #op " %1, %0                                  \n"             \
42 "       movco.l %0, @%2                                 \n"             \
43 "       bf      1b                                      \n"             \
44 "       synco                                           \n"             \
45         : "=&z" (temp)                                                  \
46         : "r" (i), "r" (&v->counter)                                    \
47         : "t");                                                         \
48                                                                         \
49         return temp;                                                    \
50 }
51
52 #define ATOMIC_FETCH_OP(op)                                             \
53 static inline int atomic_fetch_##op(int i, atomic_t *v)                 \
54 {                                                                       \
55         unsigned long res, temp;                                        \
56                                                                         \
57         __asm__ __volatile__ (                                          \
58 "1:     movli.l @%3, %0         ! atomic_fetch_" #op "  \n"             \
59 "       mov %0, %1                                      \n"             \
60 "       " #op " %2, %0                                  \n"             \
61 "       movco.l %0, @%3                                 \n"             \
62 "       bf      1b                                      \n"             \
63 "       synco                                           \n"             \
64         : "=&z" (temp), "=&r" (res)                                     \
65         : "r" (i), "r" (&v->counter)                                    \
66         : "t");                                                         \
67                                                                         \
68         return res;                                                     \
69 }
70
71 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
72
73 ATOMIC_OPS(add)
74 ATOMIC_OPS(sub)
75
76 #undef ATOMIC_OPS
77 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
78
79 ATOMIC_OPS(and)
80 ATOMIC_OPS(or)
81 ATOMIC_OPS(xor)
82
83 #undef ATOMIC_OPS
84 #undef ATOMIC_FETCH_OP
85 #undef ATOMIC_OP_RETURN
86 #undef ATOMIC_OP
87
88 #endif /* __ASM_SH_ATOMIC_LLSC_H */