Merge tag 'v4.10' of git://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux...
[sfrench/cifs-2.6.git] / arch / arm64 / include / asm / percpu.h
1 /*
2  * Copyright (C) 2013 ARM Ltd.
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11  * GNU General Public License for more details.
12  *
13  * You should have received a copy of the GNU General Public License
14  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
15  */
16 #ifndef __ASM_PERCPU_H
17 #define __ASM_PERCPU_H
18
19 #include <asm/stack_pointer.h>
20
21 static inline void set_my_cpu_offset(unsigned long off)
22 {
23         asm volatile("msr tpidr_el1, %0" :: "r" (off) : "memory");
24 }
25
26 static inline unsigned long __my_cpu_offset(void)
27 {
28         unsigned long off;
29
30         /*
31          * We want to allow caching the value, so avoid using volatile and
32          * instead use a fake stack read to hazard against barrier().
33          */
34         asm("mrs %0, tpidr_el1" : "=r" (off) :
35                 "Q" (*(const unsigned long *)current_stack_pointer));
36
37         return off;
38 }
39 #define __my_cpu_offset __my_cpu_offset()
40
41 #define PERCPU_OP(op, asm_op)                                           \
42 static inline unsigned long __percpu_##op(void *ptr,                    \
43                         unsigned long val, int size)                    \
44 {                                                                       \
45         unsigned long loop, ret;                                        \
46                                                                         \
47         switch (size) {                                                 \
48         case 1:                                                         \
49                 asm ("//__per_cpu_" #op "_1\n"                          \
50                 "1:     ldxrb     %w[ret], %[ptr]\n"                    \
51                         #asm_op " %w[ret], %w[ret], %w[val]\n"          \
52                 "       stxrb     %w[loop], %w[ret], %[ptr]\n"          \
53                 "       cbnz      %w[loop], 1b"                         \
54                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
55                   [ptr] "+Q"(*(u8 *)ptr)                                \
56                 : [val] "Ir" (val));                                    \
57                 break;                                                  \
58         case 2:                                                         \
59                 asm ("//__per_cpu_" #op "_2\n"                          \
60                 "1:     ldxrh     %w[ret], %[ptr]\n"                    \
61                         #asm_op " %w[ret], %w[ret], %w[val]\n"          \
62                 "       stxrh     %w[loop], %w[ret], %[ptr]\n"          \
63                 "       cbnz      %w[loop], 1b"                         \
64                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
65                   [ptr]  "+Q"(*(u16 *)ptr)                              \
66                 : [val] "Ir" (val));                                    \
67                 break;                                                  \
68         case 4:                                                         \
69                 asm ("//__per_cpu_" #op "_4\n"                          \
70                 "1:     ldxr      %w[ret], %[ptr]\n"                    \
71                         #asm_op " %w[ret], %w[ret], %w[val]\n"          \
72                 "       stxr      %w[loop], %w[ret], %[ptr]\n"          \
73                 "       cbnz      %w[loop], 1b"                         \
74                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
75                   [ptr] "+Q"(*(u32 *)ptr)                               \
76                 : [val] "Ir" (val));                                    \
77                 break;                                                  \
78         case 8:                                                         \
79                 asm ("//__per_cpu_" #op "_8\n"                          \
80                 "1:     ldxr      %[ret], %[ptr]\n"                     \
81                         #asm_op " %[ret], %[ret], %[val]\n"             \
82                 "       stxr      %w[loop], %[ret], %[ptr]\n"           \
83                 "       cbnz      %w[loop], 1b"                         \
84                 : [loop] "=&r" (loop), [ret] "=&r" (ret),               \
85                   [ptr] "+Q"(*(u64 *)ptr)                               \
86                 : [val] "Ir" (val));                                    \
87                 break;                                                  \
88         default:                                                        \
89                 BUILD_BUG();                                            \
90         }                                                               \
91                                                                         \
92         return ret;                                                     \
93 }
94
95 PERCPU_OP(add, add)
96 PERCPU_OP(and, and)
97 PERCPU_OP(or, orr)
98 #undef PERCPU_OP
99
100 static inline unsigned long __percpu_read(void *ptr, int size)
101 {
102         unsigned long ret;
103
104         switch (size) {
105         case 1:
106                 ret = READ_ONCE(*(u8 *)ptr);
107                 break;
108         case 2:
109                 ret = READ_ONCE(*(u16 *)ptr);
110                 break;
111         case 4:
112                 ret = READ_ONCE(*(u32 *)ptr);
113                 break;
114         case 8:
115                 ret = READ_ONCE(*(u64 *)ptr);
116                 break;
117         default:
118                 BUILD_BUG();
119         }
120
121         return ret;
122 }
123
124 static inline void __percpu_write(void *ptr, unsigned long val, int size)
125 {
126         switch (size) {
127         case 1:
128                 WRITE_ONCE(*(u8 *)ptr, (u8)val);
129                 break;
130         case 2:
131                 WRITE_ONCE(*(u16 *)ptr, (u16)val);
132                 break;
133         case 4:
134                 WRITE_ONCE(*(u32 *)ptr, (u32)val);
135                 break;
136         case 8:
137                 WRITE_ONCE(*(u64 *)ptr, (u64)val);
138                 break;
139         default:
140                 BUILD_BUG();
141         }
142 }
143
144 static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
145                                                 int size)
146 {
147         unsigned long ret, loop;
148
149         switch (size) {
150         case 1:
151                 asm ("//__percpu_xchg_1\n"
152                 "1:     ldxrb   %w[ret], %[ptr]\n"
153                 "       stxrb   %w[loop], %w[val], %[ptr]\n"
154                 "       cbnz    %w[loop], 1b"
155                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
156                   [ptr] "+Q"(*(u8 *)ptr)
157                 : [val] "r" (val));
158                 break;
159         case 2:
160                 asm ("//__percpu_xchg_2\n"
161                 "1:     ldxrh   %w[ret], %[ptr]\n"
162                 "       stxrh   %w[loop], %w[val], %[ptr]\n"
163                 "       cbnz    %w[loop], 1b"
164                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
165                   [ptr] "+Q"(*(u16 *)ptr)
166                 : [val] "r" (val));
167                 break;
168         case 4:
169                 asm ("//__percpu_xchg_4\n"
170                 "1:     ldxr    %w[ret], %[ptr]\n"
171                 "       stxr    %w[loop], %w[val], %[ptr]\n"
172                 "       cbnz    %w[loop], 1b"
173                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
174                   [ptr] "+Q"(*(u32 *)ptr)
175                 : [val] "r" (val));
176                 break;
177         case 8:
178                 asm ("//__percpu_xchg_8\n"
179                 "1:     ldxr    %[ret], %[ptr]\n"
180                 "       stxr    %w[loop], %[val], %[ptr]\n"
181                 "       cbnz    %w[loop], 1b"
182                 : [loop] "=&r"(loop), [ret] "=&r"(ret),
183                   [ptr] "+Q"(*(u64 *)ptr)
184                 : [val] "r" (val));
185                 break;
186         default:
187                 BUILD_BUG();
188         }
189
190         return ret;
191 }
192
193 #define _percpu_read(pcp)                                               \
194 ({                                                                      \
195         typeof(pcp) __retval;                                           \
196         preempt_disable_notrace();                                      \
197         __retval = (typeof(pcp))__percpu_read(raw_cpu_ptr(&(pcp)),      \
198                                               sizeof(pcp));             \
199         preempt_enable_notrace();                                       \
200         __retval;                                                       \
201 })
202
203 #define _percpu_write(pcp, val)                                         \
204 do {                                                                    \
205         preempt_disable_notrace();                                      \
206         __percpu_write(raw_cpu_ptr(&(pcp)), (unsigned long)(val),       \
207                                 sizeof(pcp));                           \
208         preempt_enable_notrace();                                       \
209 } while(0)                                                              \
210
211 #define _pcp_protect(operation, pcp, val)                       \
212 ({                                                              \
213         typeof(pcp) __retval;                                   \
214         preempt_disable();                                      \
215         __retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)),  \
216                                           (val), sizeof(pcp));  \
217         preempt_enable();                                       \
218         __retval;                                               \
219 })
220
221 #define _percpu_add(pcp, val) \
222         _pcp_protect(__percpu_add, pcp, val)
223
224 #define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
225
226 #define _percpu_and(pcp, val) \
227         _pcp_protect(__percpu_and, pcp, val)
228
229 #define _percpu_or(pcp, val) \
230         _pcp_protect(__percpu_or, pcp, val)
231
232 #define _percpu_xchg(pcp, val) (typeof(pcp)) \
233         _pcp_protect(__percpu_xchg, pcp, (unsigned long)(val))
234
235 #define this_cpu_add_1(pcp, val) _percpu_add(pcp, val)
236 #define this_cpu_add_2(pcp, val) _percpu_add(pcp, val)
237 #define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
238 #define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
239
240 #define this_cpu_add_return_1(pcp, val) _percpu_add_return(pcp, val)
241 #define this_cpu_add_return_2(pcp, val) _percpu_add_return(pcp, val)
242 #define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
243 #define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
244
245 #define this_cpu_and_1(pcp, val) _percpu_and(pcp, val)
246 #define this_cpu_and_2(pcp, val) _percpu_and(pcp, val)
247 #define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
248 #define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
249
250 #define this_cpu_or_1(pcp, val) _percpu_or(pcp, val)
251 #define this_cpu_or_2(pcp, val) _percpu_or(pcp, val)
252 #define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
253 #define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
254
255 #define this_cpu_read_1(pcp) _percpu_read(pcp)
256 #define this_cpu_read_2(pcp) _percpu_read(pcp)
257 #define this_cpu_read_4(pcp) _percpu_read(pcp)
258 #define this_cpu_read_8(pcp) _percpu_read(pcp)
259
260 #define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
261 #define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
262 #define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
263 #define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
264
265 #define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
266 #define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
267 #define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
268 #define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
269
270 #include <asm-generic/percpu.h>
271
272 #endif /* __ASM_PERCPU_H */