Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/sparc
[sfrench/cifs-2.6.git] / arch / s390 / include / asm / percpu.h
1 #ifndef __ARCH_S390_PERCPU__
2 #define __ARCH_S390_PERCPU__
3
4 #include <linux/preempt.h>
5 #include <asm/cmpxchg.h>
6
7 /*
8  * s390 uses its own implementation for per cpu data, the offset of
9  * the cpu local data area is cached in the cpu's lowcore memory.
10  */
11 #define __my_cpu_offset S390_lowcore.percpu_offset
12
13 #ifdef CONFIG_64BIT
14
15 /*
16  * For 64 bit module code, the module may be more than 4G above the
17  * per cpu area, use weak definitions to force the compiler to
18  * generate external references.
19  */
20 #if defined(CONFIG_SMP) && defined(MODULE)
21 #define ARCH_NEEDS_WEAK_PER_CPU
22 #endif
23
24 /*
25  * We use a compare-and-swap loop since that uses less cpu cycles than
26  * disabling and enabling interrupts like the generic variant would do.
27  */
28 #define arch_this_cpu_to_op_simple(pcp, val, op)                        \
29 ({                                                                      \
30         typedef typeof(pcp) pcp_op_T__;                                 \
31         pcp_op_T__ old__, new__, prev__;                                \
32         pcp_op_T__ *ptr__;                                              \
33         preempt_disable();                                              \
34         ptr__ = raw_cpu_ptr(&(pcp));                                    \
35         prev__ = *ptr__;                                                \
36         do {                                                            \
37                 old__ = prev__;                                         \
38                 new__ = old__ op (val);                                 \
39                 prev__ = cmpxchg(ptr__, old__, new__);                  \
40         } while (prev__ != old__);                                      \
41         preempt_enable();                                               \
42         new__;                                                          \
43 })
44
45 #define this_cpu_add_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
46 #define this_cpu_add_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
47 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
48 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
49 #define this_cpu_and_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
50 #define this_cpu_and_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
51 #define this_cpu_or_1(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
52 #define this_cpu_or_2(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
53
54 #ifndef CONFIG_HAVE_MARCH_Z196_FEATURES
55
56 #define this_cpu_add_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
57 #define this_cpu_add_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
58 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
59 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
60 #define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
61 #define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
62 #define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
63 #define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
64
65 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
66
67 #define arch_this_cpu_add(pcp, val, op1, op2, szcast)                   \
68 {                                                                       \
69         typedef typeof(pcp) pcp_op_T__;                                 \
70         pcp_op_T__ val__ = (val);                                       \
71         pcp_op_T__ old__, *ptr__;                                       \
72         preempt_disable();                                              \
73         ptr__ = raw_cpu_ptr(&(pcp));                            \
74         if (__builtin_constant_p(val__) &&                              \
75             ((szcast)val__ > -129) && ((szcast)val__ < 128)) {          \
76                 asm volatile(                                           \
77                         op2 "   %[ptr__],%[val__]\n"                    \
78                         : [ptr__] "+Q" (*ptr__)                         \
79                         : [val__] "i" ((szcast)val__)                   \
80                         : "cc");                                        \
81         } else {                                                        \
82                 asm volatile(                                           \
83                         op1 "   %[old__],%[val__],%[ptr__]\n"           \
84                         : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)   \
85                         : [val__] "d" (val__)                           \
86                         : "cc");                                        \
87         }                                                               \
88         preempt_enable();                                               \
89 }
90
91 #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
92 #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
93
94 #define arch_this_cpu_add_return(pcp, val, op)                          \
95 ({                                                                      \
96         typedef typeof(pcp) pcp_op_T__;                                 \
97         pcp_op_T__ val__ = (val);                                       \
98         pcp_op_T__ old__, *ptr__;                                       \
99         preempt_disable();                                              \
100         ptr__ = raw_cpu_ptr(&(pcp));                                    \
101         asm volatile(                                                   \
102                 op "    %[old__],%[val__],%[ptr__]\n"                   \
103                 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
104                 : [val__] "d" (val__)                                   \
105                 : "cc");                                                \
106         preempt_enable();                                               \
107         old__ + val__;                                                  \
108 })
109
110 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
111 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
112
113 #define arch_this_cpu_to_op(pcp, val, op)                               \
114 {                                                                       \
115         typedef typeof(pcp) pcp_op_T__;                                 \
116         pcp_op_T__ val__ = (val);                                       \
117         pcp_op_T__ old__, *ptr__;                                       \
118         preempt_disable();                                              \
119         ptr__ = raw_cpu_ptr(&(pcp));                                    \
120         asm volatile(                                                   \
121                 op "    %[old__],%[val__],%[ptr__]\n"                   \
122                 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
123                 : [val__] "d" (val__)                                   \
124                 : "cc");                                                \
125         preempt_enable();                                               \
126 }
127
128 #define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op(pcp, val, "lan")
129 #define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op(pcp, val, "lang")
130 #define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op(pcp, val, "lao")
131 #define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op(pcp, val, "laog")
132
133 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
134
135 #define arch_this_cpu_cmpxchg(pcp, oval, nval)                          \
136 ({                                                                      \
137         typedef typeof(pcp) pcp_op_T__;                                 \
138         pcp_op_T__ ret__;                                               \
139         pcp_op_T__ *ptr__;                                              \
140         preempt_disable();                                              \
141         ptr__ = raw_cpu_ptr(&(pcp));                                    \
142         ret__ = cmpxchg(ptr__, oval, nval);                             \
143         preempt_enable();                                               \
144         ret__;                                                          \
145 })
146
147 #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
148 #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
149 #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
150 #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
151
152 #define arch_this_cpu_xchg(pcp, nval)                                   \
153 ({                                                                      \
154         typeof(pcp) *ptr__;                                             \
155         typeof(pcp) ret__;                                              \
156         preempt_disable();                                              \
157         ptr__ = raw_cpu_ptr(&(pcp));                                    \
158         ret__ = xchg(ptr__, nval);                                      \
159         preempt_enable();                                               \
160         ret__;                                                          \
161 })
162
163 #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
164 #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
165 #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
166 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
167
168 #define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2)        \
169 ({                                                                      \
170         typeof(pcp1) o1__ = (o1), n1__ = (n1);                          \
171         typeof(pcp2) o2__ = (o2), n2__ = (n2);                          \
172         typeof(pcp1) *p1__;                                             \
173         typeof(pcp2) *p2__;                                             \
174         int ret__;                                                      \
175         preempt_disable();                                              \
176         p1__ = raw_cpu_ptr(&(pcp1));                                    \
177         p2__ = raw_cpu_ptr(&(pcp2));                                    \
178         ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__);   \
179         preempt_enable();                                               \
180         ret__;                                                          \
181 })
182
183 #define this_cpu_cmpxchg_double_4 arch_this_cpu_cmpxchg_double
184 #define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double
185
186 #endif /* CONFIG_64BIT */
187
188 #include <asm-generic/percpu.h>
189
190 #endif /* __ARCH_S390_PERCPU__ */