23c9d0537201e6c2666d119db128280181ce4708
[sfrench/cifs-2.6.git] / arch / m32r / include / asm / cmpxchg.h
1 #ifndef _ASM_M32R_CMPXCHG_H
2 #define _ASM_M32R_CMPXCHG_H
3
4 /*
5  *  M32R version:
6  *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
7  *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
8  */
9
10 #include <linux/irqflags.h>
11 #include <asm/assembler.h>
12 #include <asm/dcache_clear.h>
13
14 extern void  __xchg_called_with_bad_pointer(void);
15
16 static __always_inline unsigned long
17 __xchg(unsigned long x, volatile void *ptr, int size)
18 {
19         unsigned long flags;
20         unsigned long tmp = 0;
21
22         local_irq_save(flags);
23
24         switch (size) {
25 #ifndef CONFIG_SMP
26         case 1:
27                 __asm__ __volatile__ (
28                         "ldb    %0, @%2 \n\t"
29                         "stb    %1, @%2 \n\t"
30                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
31                 break;
32         case 2:
33                 __asm__ __volatile__ (
34                         "ldh    %0, @%2 \n\t"
35                         "sth    %1, @%2 \n\t"
36                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
37                 break;
38         case 4:
39                 __asm__ __volatile__ (
40                         "ld     %0, @%2 \n\t"
41                         "st     %1, @%2 \n\t"
42                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
43                 break;
44 #else  /* CONFIG_SMP */
45         case 4:
46                 __asm__ __volatile__ (
47                         DCACHE_CLEAR("%0", "r4", "%2")
48                         "lock   %0, @%2;        \n\t"
49                         "unlock %1, @%2;        \n\t"
50                         : "=&r" (tmp) : "r" (x), "r" (ptr)
51                         : "memory"
52 #ifdef CONFIG_CHIP_M32700_TS1
53                         , "r4"
54 #endif  /* CONFIG_CHIP_M32700_TS1 */
55                 );
56                 break;
57 #endif  /* CONFIG_SMP */
58         default:
59                 __xchg_called_with_bad_pointer();
60         }
61
62         local_irq_restore(flags);
63
64         return (tmp);
65 }
66
67 #define xchg(ptr, x) ({                                                 \
68         ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr),          \
69                                     sizeof(*(ptr))));                   \
70 })
71
72 static __always_inline unsigned long
73 __xchg_local(unsigned long x, volatile void *ptr, int size)
74 {
75         unsigned long flags;
76         unsigned long tmp = 0;
77
78         local_irq_save(flags);
79
80         switch (size) {
81         case 1:
82                 __asm__ __volatile__ (
83                         "ldb    %0, @%2 \n\t"
84                         "stb    %1, @%2 \n\t"
85                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
86                 break;
87         case 2:
88                 __asm__ __volatile__ (
89                         "ldh    %0, @%2 \n\t"
90                         "sth    %1, @%2 \n\t"
91                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
92                 break;
93         case 4:
94                 __asm__ __volatile__ (
95                         "ld     %0, @%2 \n\t"
96                         "st     %1, @%2 \n\t"
97                         : "=&r" (tmp) : "r" (x), "r" (ptr) : "memory");
98                 break;
99         default:
100                 __xchg_called_with_bad_pointer();
101         }
102
103         local_irq_restore(flags);
104
105         return (tmp);
106 }
107
108 #define xchg_local(ptr, x)                                              \
109         ((__typeof__(*(ptr)))__xchg_local((unsigned long)(x), (ptr),    \
110                         sizeof(*(ptr))))
111
112 static inline unsigned long
113 __cmpxchg_u32(volatile unsigned int *p, unsigned int old, unsigned int new)
114 {
115         unsigned long flags;
116         unsigned int retval;
117
118         local_irq_save(flags);
119         __asm__ __volatile__ (
120                         DCACHE_CLEAR("%0", "r4", "%1")
121                         M32R_LOCK" %0, @%1;     \n"
122                 "       bne     %0, %2, 1f;     \n"
123                         M32R_UNLOCK" %3, @%1;   \n"
124                 "       bra     2f;             \n"
125                 "       .fillinsn               \n"
126                 "1:"
127                         M32R_UNLOCK" %0, @%1;   \n"
128                 "       .fillinsn               \n"
129                 "2:"
130                         : "=&r" (retval)
131                         : "r" (p), "r" (old), "r" (new)
132                         : "cbit", "memory"
133 #ifdef CONFIG_CHIP_M32700_TS1
134                         , "r4"
135 #endif  /* CONFIG_CHIP_M32700_TS1 */
136                 );
137         local_irq_restore(flags);
138
139         return retval;
140 }
141
142 static inline unsigned long
143 __cmpxchg_local_u32(volatile unsigned int *p, unsigned int old,
144                         unsigned int new)
145 {
146         unsigned long flags;
147         unsigned int retval;
148
149         local_irq_save(flags);
150         __asm__ __volatile__ (
151                         DCACHE_CLEAR("%0", "r4", "%1")
152                         "ld %0, @%1;            \n"
153                 "       bne     %0, %2, 1f;     \n"
154                         "st %3, @%1;            \n"
155                 "       bra     2f;             \n"
156                 "       .fillinsn               \n"
157                 "1:"
158                         "st %0, @%1;            \n"
159                 "       .fillinsn               \n"
160                 "2:"
161                         : "=&r" (retval)
162                         : "r" (p), "r" (old), "r" (new)
163                         : "cbit", "memory"
164 #ifdef CONFIG_CHIP_M32700_TS1
165                         , "r4"
166 #endif  /* CONFIG_CHIP_M32700_TS1 */
167                 );
168         local_irq_restore(flags);
169
170         return retval;
171 }
172
173 /* This function doesn't exist, so you'll get a linker error
174    if something tries to do an invalid cmpxchg().  */
175 extern void __cmpxchg_called_with_bad_pointer(void);
176
177 static inline unsigned long
178 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
179 {
180         switch (size) {
181         case 4:
182                 return __cmpxchg_u32(ptr, old, new);
183 #if 0   /* we don't have __cmpxchg_u64 */
184         case 8:
185                 return __cmpxchg_u64(ptr, old, new);
186 #endif /* 0 */
187         }
188         __cmpxchg_called_with_bad_pointer();
189         return old;
190 }
191
192 #define cmpxchg(ptr, o, n) ({                           \
193         ((__typeof__(*(ptr)))                           \
194                  __cmpxchg((ptr), (unsigned long)(o),   \
195                            (unsigned long)(n),          \
196                            sizeof(*(ptr))));            \
197 })
198
199 #include <asm-generic/cmpxchg-local.h>
200
201 static inline unsigned long __cmpxchg_local(volatile void *ptr,
202                                       unsigned long old,
203                                       unsigned long new, int size)
204 {
205         switch (size) {
206         case 4:
207                 return __cmpxchg_local_u32(ptr, old, new);
208         default:
209                 return __cmpxchg_local_generic(ptr, old, new, size);
210         }
211
212         return old;
213 }
214
215 /*
216  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
217  * them available.
218  */
219 #define cmpxchg_local(ptr, o, n)                                            \
220         ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o),     \
221                         (unsigned long)(n), sizeof(*(ptr))))
222 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
223
224 #endif /* _ASM_M32R_CMPXCHG_H */