treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 152
[sfrench/cifs-2.6.git] / arch / mips / include / asm / r4kcache.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Inline assembly cache operations.
7  *
8  * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
9  * Copyright (C) 1997 - 2002 Ralf Baechle (ralf@gnu.org)
10  * Copyright (C) 2004 Ralf Baechle (ralf@linux-mips.org)
11  */
12 #ifndef _ASM_R4KCACHE_H
13 #define _ASM_R4KCACHE_H
14
15 #include <linux/stringify.h>
16
17 #include <asm/asm.h>
18 #include <asm/cacheops.h>
19 #include <asm/compiler.h>
20 #include <asm/cpu-features.h>
21 #include <asm/cpu-type.h>
22 #include <asm/mipsmtregs.h>
23 #include <asm/mmzone.h>
24 #include <linux/uaccess.h> /* for uaccess_kernel() */
25
26 extern void (*r4k_blast_dcache)(void);
27 extern void (*r4k_blast_icache)(void);
28
29 /*
30  * This macro return a properly sign-extended address suitable as base address
31  * for indexed cache operations.  Two issues here:
32  *
33  *  - The MIPS32 and MIPS64 specs permit an implementation to directly derive
34  *    the index bits from the virtual address.  This breaks with tradition
35  *    set by the R4000.  To keep unpleasant surprises from happening we pick
36  *    an address in KSEG0 / CKSEG0.
37  *  - We need a properly sign extended address for 64-bit code.  To get away
38  *    without ifdefs we let the compiler do it by a type cast.
39  */
40 #define INDEX_BASE      CKSEG0
41
42 #define cache_op(op,addr)                                               \
43         __asm__ __volatile__(                                           \
44         "       .set    push                                    \n"     \
45         "       .set    noreorder                               \n"     \
46         "       .set "MIPS_ISA_ARCH_LEVEL"                      \n"     \
47         "       cache   %0, %1                                  \n"     \
48         "       .set    pop                                     \n"     \
49         :                                                               \
50         : "i" (op), "R" (*(unsigned char *)(addr)))
51
52 static inline void flush_icache_line_indexed(unsigned long addr)
53 {
54         cache_op(Index_Invalidate_I, addr);
55 }
56
57 static inline void flush_dcache_line_indexed(unsigned long addr)
58 {
59         cache_op(Index_Writeback_Inv_D, addr);
60 }
61
62 static inline void flush_scache_line_indexed(unsigned long addr)
63 {
64         cache_op(Index_Writeback_Inv_SD, addr);
65 }
66
67 static inline void flush_icache_line(unsigned long addr)
68 {
69         switch (boot_cpu_type()) {
70         case CPU_LOONGSON2:
71                 cache_op(Hit_Invalidate_I_Loongson2, addr);
72                 break;
73
74         default:
75                 cache_op(Hit_Invalidate_I, addr);
76                 break;
77         }
78 }
79
80 static inline void flush_dcache_line(unsigned long addr)
81 {
82         cache_op(Hit_Writeback_Inv_D, addr);
83 }
84
85 static inline void invalidate_dcache_line(unsigned long addr)
86 {
87         cache_op(Hit_Invalidate_D, addr);
88 }
89
90 static inline void invalidate_scache_line(unsigned long addr)
91 {
92         cache_op(Hit_Invalidate_SD, addr);
93 }
94
95 static inline void flush_scache_line(unsigned long addr)
96 {
97         cache_op(Hit_Writeback_Inv_SD, addr);
98 }
99
100 #define protected_cache_op(op,addr)                             \
101 ({                                                              \
102         int __err = 0;                                          \
103         __asm__ __volatile__(                                   \
104         "       .set    push                    \n"             \
105         "       .set    noreorder               \n"             \
106         "       .set "MIPS_ISA_ARCH_LEVEL"      \n"             \
107         "1:     cache   %1, (%2)                \n"             \
108         "2:     .insn                           \n"             \
109         "       .set    pop                     \n"             \
110         "       .section .fixup,\"ax\"          \n"             \
111         "3:     li      %0, %3                  \n"             \
112         "       j       2b                      \n"             \
113         "       .previous                       \n"             \
114         "       .section __ex_table,\"a\"       \n"             \
115         "       "STR(PTR)" 1b, 3b               \n"             \
116         "       .previous"                                      \
117         : "+r" (__err)                                          \
118         : "i" (op), "r" (addr), "i" (-EFAULT));                 \
119         __err;                                                  \
120 })
121
122
123 #define protected_cachee_op(op,addr)                            \
124 ({                                                              \
125         int __err = 0;                                          \
126         __asm__ __volatile__(                                   \
127         "       .set    push                    \n"             \
128         "       .set    noreorder               \n"             \
129         "       .set    mips0                   \n"             \
130         "       .set    eva                     \n"             \
131         "1:     cachee  %1, (%2)                \n"             \
132         "2:     .insn                           \n"             \
133         "       .set    pop                     \n"             \
134         "       .section .fixup,\"ax\"          \n"             \
135         "3:     li      %0, %3                  \n"             \
136         "       j       2b                      \n"             \
137         "       .previous                       \n"             \
138         "       .section __ex_table,\"a\"       \n"             \
139         "       "STR(PTR)" 1b, 3b               \n"             \
140         "       .previous"                                      \
141         : "+r" (__err)                                          \
142         : "i" (op), "r" (addr), "i" (-EFAULT));                 \
143         __err;                                                  \
144 })
145
146 /*
147  * The next two are for badland addresses like signal trampolines.
148  */
149 static inline int protected_flush_icache_line(unsigned long addr)
150 {
151         switch (boot_cpu_type()) {
152         case CPU_LOONGSON2:
153                 return protected_cache_op(Hit_Invalidate_I_Loongson2, addr);
154
155         default:
156 #ifdef CONFIG_EVA
157                 return protected_cachee_op(Hit_Invalidate_I, addr);
158 #else
159                 return protected_cache_op(Hit_Invalidate_I, addr);
160 #endif
161         }
162 }
163
164 /*
165  * R10000 / R12000 hazard - these processors don't support the Hit_Writeback_D
166  * cacheop so we use Hit_Writeback_Inv_D which is supported by all R4000-style
167  * caches.  We're talking about one cacheline unnecessarily getting invalidated
168  * here so the penalty isn't overly hard.
169  */
170 static inline int protected_writeback_dcache_line(unsigned long addr)
171 {
172 #ifdef CONFIG_EVA
173         return protected_cachee_op(Hit_Writeback_Inv_D, addr);
174 #else
175         return protected_cache_op(Hit_Writeback_Inv_D, addr);
176 #endif
177 }
178
179 static inline int protected_writeback_scache_line(unsigned long addr)
180 {
181 #ifdef CONFIG_EVA
182         return protected_cachee_op(Hit_Writeback_Inv_SD, addr);
183 #else
184         return protected_cache_op(Hit_Writeback_Inv_SD, addr);
185 #endif
186 }
187
188 /*
189  * This one is RM7000-specific
190  */
191 static inline void invalidate_tcache_page(unsigned long addr)
192 {
193         cache_op(Page_Invalidate_T, addr);
194 }
195
196 #ifndef CONFIG_CPU_MIPSR6
197 #define cache16_unroll32(base,op)                                       \
198         __asm__ __volatile__(                                           \
199         "       .set push                                       \n"     \
200         "       .set noreorder                                  \n"     \
201         "       .set mips3                                      \n"     \
202         "       cache %1, 0x000(%0); cache %1, 0x010(%0)        \n"     \
203         "       cache %1, 0x020(%0); cache %1, 0x030(%0)        \n"     \
204         "       cache %1, 0x040(%0); cache %1, 0x050(%0)        \n"     \
205         "       cache %1, 0x060(%0); cache %1, 0x070(%0)        \n"     \
206         "       cache %1, 0x080(%0); cache %1, 0x090(%0)        \n"     \
207         "       cache %1, 0x0a0(%0); cache %1, 0x0b0(%0)        \n"     \
208         "       cache %1, 0x0c0(%0); cache %1, 0x0d0(%0)        \n"     \
209         "       cache %1, 0x0e0(%0); cache %1, 0x0f0(%0)        \n"     \
210         "       cache %1, 0x100(%0); cache %1, 0x110(%0)        \n"     \
211         "       cache %1, 0x120(%0); cache %1, 0x130(%0)        \n"     \
212         "       cache %1, 0x140(%0); cache %1, 0x150(%0)        \n"     \
213         "       cache %1, 0x160(%0); cache %1, 0x170(%0)        \n"     \
214         "       cache %1, 0x180(%0); cache %1, 0x190(%0)        \n"     \
215         "       cache %1, 0x1a0(%0); cache %1, 0x1b0(%0)        \n"     \
216         "       cache %1, 0x1c0(%0); cache %1, 0x1d0(%0)        \n"     \
217         "       cache %1, 0x1e0(%0); cache %1, 0x1f0(%0)        \n"     \
218         "       .set pop                                        \n"     \
219                 :                                                       \
220                 : "r" (base),                                           \
221                   "i" (op));
222
223 #define cache32_unroll32(base,op)                                       \
224         __asm__ __volatile__(                                           \
225         "       .set push                                       \n"     \
226         "       .set noreorder                                  \n"     \
227         "       .set mips3                                      \n"     \
228         "       cache %1, 0x000(%0); cache %1, 0x020(%0)        \n"     \
229         "       cache %1, 0x040(%0); cache %1, 0x060(%0)        \n"     \
230         "       cache %1, 0x080(%0); cache %1, 0x0a0(%0)        \n"     \
231         "       cache %1, 0x0c0(%0); cache %1, 0x0e0(%0)        \n"     \
232         "       cache %1, 0x100(%0); cache %1, 0x120(%0)        \n"     \
233         "       cache %1, 0x140(%0); cache %1, 0x160(%0)        \n"     \
234         "       cache %1, 0x180(%0); cache %1, 0x1a0(%0)        \n"     \
235         "       cache %1, 0x1c0(%0); cache %1, 0x1e0(%0)        \n"     \
236         "       cache %1, 0x200(%0); cache %1, 0x220(%0)        \n"     \
237         "       cache %1, 0x240(%0); cache %1, 0x260(%0)        \n"     \
238         "       cache %1, 0x280(%0); cache %1, 0x2a0(%0)        \n"     \
239         "       cache %1, 0x2c0(%0); cache %1, 0x2e0(%0)        \n"     \
240         "       cache %1, 0x300(%0); cache %1, 0x320(%0)        \n"     \
241         "       cache %1, 0x340(%0); cache %1, 0x360(%0)        \n"     \
242         "       cache %1, 0x380(%0); cache %1, 0x3a0(%0)        \n"     \
243         "       cache %1, 0x3c0(%0); cache %1, 0x3e0(%0)        \n"     \
244         "       .set pop                                        \n"     \
245                 :                                                       \
246                 : "r" (base),                                           \
247                   "i" (op));
248
249 #define cache64_unroll32(base,op)                                       \
250         __asm__ __volatile__(                                           \
251         "       .set push                                       \n"     \
252         "       .set noreorder                                  \n"     \
253         "       .set mips3                                      \n"     \
254         "       cache %1, 0x000(%0); cache %1, 0x040(%0)        \n"     \
255         "       cache %1, 0x080(%0); cache %1, 0x0c0(%0)        \n"     \
256         "       cache %1, 0x100(%0); cache %1, 0x140(%0)        \n"     \
257         "       cache %1, 0x180(%0); cache %1, 0x1c0(%0)        \n"     \
258         "       cache %1, 0x200(%0); cache %1, 0x240(%0)        \n"     \
259         "       cache %1, 0x280(%0); cache %1, 0x2c0(%0)        \n"     \
260         "       cache %1, 0x300(%0); cache %1, 0x340(%0)        \n"     \
261         "       cache %1, 0x380(%0); cache %1, 0x3c0(%0)        \n"     \
262         "       cache %1, 0x400(%0); cache %1, 0x440(%0)        \n"     \
263         "       cache %1, 0x480(%0); cache %1, 0x4c0(%0)        \n"     \
264         "       cache %1, 0x500(%0); cache %1, 0x540(%0)        \n"     \
265         "       cache %1, 0x580(%0); cache %1, 0x5c0(%0)        \n"     \
266         "       cache %1, 0x600(%0); cache %1, 0x640(%0)        \n"     \
267         "       cache %1, 0x680(%0); cache %1, 0x6c0(%0)        \n"     \
268         "       cache %1, 0x700(%0); cache %1, 0x740(%0)        \n"     \
269         "       cache %1, 0x780(%0); cache %1, 0x7c0(%0)        \n"     \
270         "       .set pop                                        \n"     \
271                 :                                                       \
272                 : "r" (base),                                           \
273                   "i" (op));
274
275 #define cache128_unroll32(base,op)                                      \
276         __asm__ __volatile__(                                           \
277         "       .set push                                       \n"     \
278         "       .set noreorder                                  \n"     \
279         "       .set mips3                                      \n"     \
280         "       cache %1, 0x000(%0); cache %1, 0x080(%0)        \n"     \
281         "       cache %1, 0x100(%0); cache %1, 0x180(%0)        \n"     \
282         "       cache %1, 0x200(%0); cache %1, 0x280(%0)        \n"     \
283         "       cache %1, 0x300(%0); cache %1, 0x380(%0)        \n"     \
284         "       cache %1, 0x400(%0); cache %1, 0x480(%0)        \n"     \
285         "       cache %1, 0x500(%0); cache %1, 0x580(%0)        \n"     \
286         "       cache %1, 0x600(%0); cache %1, 0x680(%0)        \n"     \
287         "       cache %1, 0x700(%0); cache %1, 0x780(%0)        \n"     \
288         "       cache %1, 0x800(%0); cache %1, 0x880(%0)        \n"     \
289         "       cache %1, 0x900(%0); cache %1, 0x980(%0)        \n"     \
290         "       cache %1, 0xa00(%0); cache %1, 0xa80(%0)        \n"     \
291         "       cache %1, 0xb00(%0); cache %1, 0xb80(%0)        \n"     \
292         "       cache %1, 0xc00(%0); cache %1, 0xc80(%0)        \n"     \
293         "       cache %1, 0xd00(%0); cache %1, 0xd80(%0)        \n"     \
294         "       cache %1, 0xe00(%0); cache %1, 0xe80(%0)        \n"     \
295         "       cache %1, 0xf00(%0); cache %1, 0xf80(%0)        \n"     \
296         "       .set pop                                        \n"     \
297                 :                                                       \
298                 : "r" (base),                                           \
299                   "i" (op));
300
301 #else
302 /*
303  * MIPS R6 changed the cache opcode and moved to a 8-bit offset field.
304  * This means we now need to increment the base register before we flush
305  * more cache lines
306  */
307 #define cache16_unroll32(base,op)                               \
308         __asm__ __volatile__(                                   \
309         "       .set push\n"                                    \
310         "       .set noreorder\n"                               \
311         "       .set mips64r6\n"                                \
312         "       .set noat\n"                                    \
313         "       cache %1, 0x000(%0); cache %1, 0x010(%0)\n"     \
314         "       cache %1, 0x020(%0); cache %1, 0x030(%0)\n"     \
315         "       cache %1, 0x040(%0); cache %1, 0x050(%0)\n"     \
316         "       cache %1, 0x060(%0); cache %1, 0x070(%0)\n"     \
317         "       cache %1, 0x080(%0); cache %1, 0x090(%0)\n"     \
318         "       cache %1, 0x0a0(%0); cache %1, 0x0b0(%0)\n"     \
319         "       cache %1, 0x0c0(%0); cache %1, 0x0d0(%0)\n"     \
320         "       cache %1, 0x0e0(%0); cache %1, 0x0f0(%0)\n"     \
321         "       "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n"     \
322         "       cache %1, 0x000($1); cache %1, 0x010($1)\n"     \
323         "       cache %1, 0x020($1); cache %1, 0x030($1)\n"     \
324         "       cache %1, 0x040($1); cache %1, 0x050($1)\n"     \
325         "       cache %1, 0x060($1); cache %1, 0x070($1)\n"     \
326         "       cache %1, 0x080($1); cache %1, 0x090($1)\n"     \
327         "       cache %1, 0x0a0($1); cache %1, 0x0b0($1)\n"     \
328         "       cache %1, 0x0c0($1); cache %1, 0x0d0($1)\n"     \
329         "       cache %1, 0x0e0($1); cache %1, 0x0f0($1)\n"     \
330         "       .set pop\n"                                     \
331                 :                                               \
332                 : "r" (base),                                   \
333                   "i" (op));
334
335 #define cache32_unroll32(base,op)                               \
336         __asm__ __volatile__(                                   \
337         "       .set push\n"                                    \
338         "       .set noreorder\n"                               \
339         "       .set mips64r6\n"                                \
340         "       .set noat\n"                                    \
341         "       cache %1, 0x000(%0); cache %1, 0x020(%0)\n"     \
342         "       cache %1, 0x040(%0); cache %1, 0x060(%0)\n"     \
343         "       cache %1, 0x080(%0); cache %1, 0x0a0(%0)\n"     \
344         "       cache %1, 0x0c0(%0); cache %1, 0x0e0(%0)\n"     \
345         "       "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n"     \
346         "       cache %1, 0x000($1); cache %1, 0x020($1)\n"     \
347         "       cache %1, 0x040($1); cache %1, 0x060($1)\n"     \
348         "       cache %1, 0x080($1); cache %1, 0x0a0($1)\n"     \
349         "       cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n"     \
350         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
351         "       cache %1, 0x000($1); cache %1, 0x020($1)\n"     \
352         "       cache %1, 0x040($1); cache %1, 0x060($1)\n"     \
353         "       cache %1, 0x080($1); cache %1, 0x0a0($1)\n"     \
354         "       cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n"     \
355         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100\n"      \
356         "       cache %1, 0x000($1); cache %1, 0x020($1)\n"     \
357         "       cache %1, 0x040($1); cache %1, 0x060($1)\n"     \
358         "       cache %1, 0x080($1); cache %1, 0x0a0($1)\n"     \
359         "       cache %1, 0x0c0($1); cache %1, 0x0e0($1)\n"     \
360         "       .set pop\n"                                     \
361                 :                                               \
362                 : "r" (base),                                   \
363                   "i" (op));
364
365 #define cache64_unroll32(base,op)                               \
366         __asm__ __volatile__(                                   \
367         "       .set push\n"                                    \
368         "       .set noreorder\n"                               \
369         "       .set mips64r6\n"                                \
370         "       .set noat\n"                                    \
371         "       cache %1, 0x000(%0); cache %1, 0x040(%0)\n"     \
372         "       cache %1, 0x080(%0); cache %1, 0x0c0(%0)\n"     \
373         "       "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n"     \
374         "       cache %1, 0x000($1); cache %1, 0x040($1)\n"     \
375         "       cache %1, 0x080($1); cache %1, 0x0c0($1)\n"     \
376         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
377         "       cache %1, 0x000($1); cache %1, 0x040($1)\n"     \
378         "       cache %1, 0x080($1); cache %1, 0x0c0($1)\n"     \
379         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
380         "       cache %1, 0x000($1); cache %1, 0x040($1)\n"     \
381         "       cache %1, 0x080($1); cache %1, 0x0c0($1)\n"     \
382         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
383         "       cache %1, 0x000($1); cache %1, 0x040($1)\n"     \
384         "       cache %1, 0x080($1); cache %1, 0x0c0($1)\n"     \
385         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
386         "       cache %1, 0x000($1); cache %1, 0x040($1)\n"     \
387         "       cache %1, 0x080($1); cache %1, 0x0c0($1)\n"     \
388         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
389         "       cache %1, 0x000($1); cache %1, 0x040($1)\n"     \
390         "       cache %1, 0x080($1); cache %1, 0x0c0($1)\n"     \
391         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
392         "       cache %1, 0x000($1); cache %1, 0x040($1)\n"     \
393         "       cache %1, 0x080($1); cache %1, 0x0c0($1)\n"     \
394         "       .set pop\n"                                     \
395                 :                                               \
396                 : "r" (base),                                   \
397                   "i" (op));
398
399 #define cache128_unroll32(base,op)                              \
400         __asm__ __volatile__(                                   \
401         "       .set push\n"                                    \
402         "       .set noreorder\n"                               \
403         "       .set mips64r6\n"                                \
404         "       .set noat\n"                                    \
405         "       cache %1, 0x000(%0); cache %1, 0x080(%0)\n"     \
406         "       "__stringify(LONG_ADDIU)" $1, %0, 0x100 \n"     \
407         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
408         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
409         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
410         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
411         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
412         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
413         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
414         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
415         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
416         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
417         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
418         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
419         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
420         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
421         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
422         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
423         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
424         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
425         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
426         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
427         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
428         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
429         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
430         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
431         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
432         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
433         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
434         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
435         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
436         "       "__stringify(LONG_ADDIU)" $1, $1, 0x100 \n"     \
437         "       cache %1, 0x000($1); cache %1, 0x080($1)\n"     \
438         "       .set pop\n"                                     \
439                 :                                               \
440                 : "r" (base),                                   \
441                   "i" (op));
442 #endif /* CONFIG_CPU_MIPSR6 */
443
444 /*
445  * Perform the cache operation specified by op using a user mode virtual
446  * address while in kernel mode.
447  */
448 #define cache16_unroll32_user(base,op)                                  \
449         __asm__ __volatile__(                                           \
450         "       .set push                                       \n"     \
451         "       .set noreorder                                  \n"     \
452         "       .set mips0                                      \n"     \
453         "       .set eva                                        \n"     \
454         "       cachee %1, 0x000(%0); cachee %1, 0x010(%0)      \n"     \
455         "       cachee %1, 0x020(%0); cachee %1, 0x030(%0)      \n"     \
456         "       cachee %1, 0x040(%0); cachee %1, 0x050(%0)      \n"     \
457         "       cachee %1, 0x060(%0); cachee %1, 0x070(%0)      \n"     \
458         "       cachee %1, 0x080(%0); cachee %1, 0x090(%0)      \n"     \
459         "       cachee %1, 0x0a0(%0); cachee %1, 0x0b0(%0)      \n"     \
460         "       cachee %1, 0x0c0(%0); cachee %1, 0x0d0(%0)      \n"     \
461         "       cachee %1, 0x0e0(%0); cachee %1, 0x0f0(%0)      \n"     \
462         "       cachee %1, 0x100(%0); cachee %1, 0x110(%0)      \n"     \
463         "       cachee %1, 0x120(%0); cachee %1, 0x130(%0)      \n"     \
464         "       cachee %1, 0x140(%0); cachee %1, 0x150(%0)      \n"     \
465         "       cachee %1, 0x160(%0); cachee %1, 0x170(%0)      \n"     \
466         "       cachee %1, 0x180(%0); cachee %1, 0x190(%0)      \n"     \
467         "       cachee %1, 0x1a0(%0); cachee %1, 0x1b0(%0)      \n"     \
468         "       cachee %1, 0x1c0(%0); cachee %1, 0x1d0(%0)      \n"     \
469         "       cachee %1, 0x1e0(%0); cachee %1, 0x1f0(%0)      \n"     \
470         "       .set pop                                        \n"     \
471                 :                                                       \
472                 : "r" (base),                                           \
473                   "i" (op));
474
475 #define cache32_unroll32_user(base, op)                                 \
476         __asm__ __volatile__(                                           \
477         "       .set push                                       \n"     \
478         "       .set noreorder                                  \n"     \
479         "       .set mips0                                      \n"     \
480         "       .set eva                                        \n"     \
481         "       cachee %1, 0x000(%0); cachee %1, 0x020(%0)      \n"     \
482         "       cachee %1, 0x040(%0); cachee %1, 0x060(%0)      \n"     \
483         "       cachee %1, 0x080(%0); cachee %1, 0x0a0(%0)      \n"     \
484         "       cachee %1, 0x0c0(%0); cachee %1, 0x0e0(%0)      \n"     \
485         "       cachee %1, 0x100(%0); cachee %1, 0x120(%0)      \n"     \
486         "       cachee %1, 0x140(%0); cachee %1, 0x160(%0)      \n"     \
487         "       cachee %1, 0x180(%0); cachee %1, 0x1a0(%0)      \n"     \
488         "       cachee %1, 0x1c0(%0); cachee %1, 0x1e0(%0)      \n"     \
489         "       cachee %1, 0x200(%0); cachee %1, 0x220(%0)      \n"     \
490         "       cachee %1, 0x240(%0); cachee %1, 0x260(%0)      \n"     \
491         "       cachee %1, 0x280(%0); cachee %1, 0x2a0(%0)      \n"     \
492         "       cachee %1, 0x2c0(%0); cachee %1, 0x2e0(%0)      \n"     \
493         "       cachee %1, 0x300(%0); cachee %1, 0x320(%0)      \n"     \
494         "       cachee %1, 0x340(%0); cachee %1, 0x360(%0)      \n"     \
495         "       cachee %1, 0x380(%0); cachee %1, 0x3a0(%0)      \n"     \
496         "       cachee %1, 0x3c0(%0); cachee %1, 0x3e0(%0)      \n"     \
497         "       .set pop                                        \n"     \
498                 :                                                       \
499                 : "r" (base),                                           \
500                   "i" (op));
501
502 #define cache64_unroll32_user(base, op)                                 \
503         __asm__ __volatile__(                                           \
504         "       .set push                                       \n"     \
505         "       .set noreorder                                  \n"     \
506         "       .set mips0                                      \n"     \
507         "       .set eva                                        \n"     \
508         "       cachee %1, 0x000(%0); cachee %1, 0x040(%0)      \n"     \
509         "       cachee %1, 0x080(%0); cachee %1, 0x0c0(%0)      \n"     \
510         "       cachee %1, 0x100(%0); cachee %1, 0x140(%0)      \n"     \
511         "       cachee %1, 0x180(%0); cachee %1, 0x1c0(%0)      \n"     \
512         "       cachee %1, 0x200(%0); cachee %1, 0x240(%0)      \n"     \
513         "       cachee %1, 0x280(%0); cachee %1, 0x2c0(%0)      \n"     \
514         "       cachee %1, 0x300(%0); cachee %1, 0x340(%0)      \n"     \
515         "       cachee %1, 0x380(%0); cachee %1, 0x3c0(%0)      \n"     \
516         "       cachee %1, 0x400(%0); cachee %1, 0x440(%0)      \n"     \
517         "       cachee %1, 0x480(%0); cachee %1, 0x4c0(%0)      \n"     \
518         "       cachee %1, 0x500(%0); cachee %1, 0x540(%0)      \n"     \
519         "       cachee %1, 0x580(%0); cachee %1, 0x5c0(%0)      \n"     \
520         "       cachee %1, 0x600(%0); cachee %1, 0x640(%0)      \n"     \
521         "       cachee %1, 0x680(%0); cachee %1, 0x6c0(%0)      \n"     \
522         "       cachee %1, 0x700(%0); cachee %1, 0x740(%0)      \n"     \
523         "       cachee %1, 0x780(%0); cachee %1, 0x7c0(%0)      \n"     \
524         "       .set pop                                        \n"     \
525                 :                                                       \
526                 : "r" (base),                                           \
527                   "i" (op));
528
529 /* build blast_xxx, blast_xxx_page, blast_xxx_page_indexed */
530 #define __BUILD_BLAST_CACHE(pfx, desc, indexop, hitop, lsize, extra)    \
531 static inline void extra##blast_##pfx##cache##lsize(void)               \
532 {                                                                       \
533         unsigned long start = INDEX_BASE;                               \
534         unsigned long end = start + current_cpu_data.desc.waysize;      \
535         unsigned long ws_inc = 1UL << current_cpu_data.desc.waybit;     \
536         unsigned long ws_end = current_cpu_data.desc.ways <<            \
537                                current_cpu_data.desc.waybit;            \
538         unsigned long ws, addr;                                         \
539                                                                         \
540         for (ws = 0; ws < ws_end; ws += ws_inc)                         \
541                 for (addr = start; addr < end; addr += lsize * 32)      \
542                         cache##lsize##_unroll32(addr|ws, indexop);      \
543 }                                                                       \
544                                                                         \
545 static inline void extra##blast_##pfx##cache##lsize##_page(unsigned long page) \
546 {                                                                       \
547         unsigned long start = page;                                     \
548         unsigned long end = page + PAGE_SIZE;                           \
549                                                                         \
550         do {                                                            \
551                 cache##lsize##_unroll32(start, hitop);                  \
552                 start += lsize * 32;                                    \
553         } while (start < end);                                          \
554 }                                                                       \
555                                                                         \
556 static inline void extra##blast_##pfx##cache##lsize##_page_indexed(unsigned long page) \
557 {                                                                       \
558         unsigned long indexmask = current_cpu_data.desc.waysize - 1;    \
559         unsigned long start = INDEX_BASE + (page & indexmask);          \
560         unsigned long end = start + PAGE_SIZE;                          \
561         unsigned long ws_inc = 1UL << current_cpu_data.desc.waybit;     \
562         unsigned long ws_end = current_cpu_data.desc.ways <<            \
563                                current_cpu_data.desc.waybit;            \
564         unsigned long ws, addr;                                         \
565                                                                         \
566         for (ws = 0; ws < ws_end; ws += ws_inc)                         \
567                 for (addr = start; addr < end; addr += lsize * 32)      \
568                         cache##lsize##_unroll32(addr|ws, indexop);      \
569 }
570
571 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 16, )
572 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 16, )
573 __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 16, )
574 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 32, )
575 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 32, )
576 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I_Loongson2, 32, loongson2_)
577 __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 32, )
578 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 64, )
579 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 64, )
580 __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 64, )
581 __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 128, )
582 __BUILD_BLAST_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 128, )
583 __BUILD_BLAST_CACHE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 128, )
584
585 __BUILD_BLAST_CACHE(inv_d, dcache, Index_Writeback_Inv_D, Hit_Invalidate_D, 16, )
586 __BUILD_BLAST_CACHE(inv_d, dcache, Index_Writeback_Inv_D, Hit_Invalidate_D, 32, )
587 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 16, )
588 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 32, )
589 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 64, )
590 __BUILD_BLAST_CACHE(inv_s, scache, Index_Writeback_Inv_SD, Hit_Invalidate_SD, 128, )
591
592 #define __BUILD_BLAST_USER_CACHE(pfx, desc, indexop, hitop, lsize) \
593 static inline void blast_##pfx##cache##lsize##_user_page(unsigned long page) \
594 {                                                                       \
595         unsigned long start = page;                                     \
596         unsigned long end = page + PAGE_SIZE;                           \
597                                                                         \
598         do {                                                            \
599                 cache##lsize##_unroll32_user(start, hitop);             \
600                 start += lsize * 32;                                    \
601         } while (start < end);                                          \
602 }
603
604 __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D,
605                          16)
606 __BUILD_BLAST_USER_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 16)
607 __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D,
608                          32)
609 __BUILD_BLAST_USER_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 32)
610 __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D,
611                          64)
612 __BUILD_BLAST_USER_CACHE(i, icache, Index_Invalidate_I, Hit_Invalidate_I, 64)
613
614 /* build blast_xxx_range, protected_blast_xxx_range */
615 #define __BUILD_BLAST_CACHE_RANGE(pfx, desc, hitop, prot, extra)        \
616 static inline void prot##extra##blast_##pfx##cache##_range(unsigned long start, \
617                                                     unsigned long end)  \
618 {                                                                       \
619         unsigned long lsize = cpu_##desc##_line_size();                 \
620         unsigned long addr = start & ~(lsize - 1);                      \
621         unsigned long aend = (end - 1) & ~(lsize - 1);                  \
622                                                                         \
623         while (1) {                                                     \
624                 prot##cache_op(hitop, addr);                            \
625                 if (addr == aend)                                       \
626                         break;                                          \
627                 addr += lsize;                                          \
628         }                                                               \
629 }
630
631 #ifndef CONFIG_EVA
632
633 __BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, protected_, )
634 __BUILD_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I, protected_, )
635
636 #else
637
638 #define __BUILD_PROT_BLAST_CACHE_RANGE(pfx, desc, hitop)                \
639 static inline void protected_blast_##pfx##cache##_range(unsigned long start,\
640                                                         unsigned long end) \
641 {                                                                       \
642         unsigned long lsize = cpu_##desc##_line_size();                 \
643         unsigned long addr = start & ~(lsize - 1);                      \
644         unsigned long aend = (end - 1) & ~(lsize - 1);                  \
645                                                                         \
646         if (!uaccess_kernel()) {                                        \
647                 while (1) {                                             \
648                         protected_cachee_op(hitop, addr);               \
649                         if (addr == aend)                               \
650                                 break;                                  \
651                         addr += lsize;                                  \
652                 }                                                       \
653         } else {                                                        \
654                 while (1) {                                             \
655                         protected_cache_op(hitop, addr);                \
656                         if (addr == aend)                               \
657                                 break;                                  \
658                         addr += lsize;                                  \
659                 }                                                       \
660                                                                         \
661         }                                                               \
662 }
663
664 __BUILD_PROT_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D)
665 __BUILD_PROT_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I)
666
667 #endif
668 __BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, protected_, )
669 __BUILD_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I_Loongson2, \
670         protected_, loongson2_)
671 __BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, , )
672 __BUILD_BLAST_CACHE_RANGE(i, icache, Hit_Invalidate_I, , )
673 __BUILD_BLAST_CACHE_RANGE(s, scache, Hit_Writeback_Inv_SD, , )
674 /* blast_inv_dcache_range */
675 __BUILD_BLAST_CACHE_RANGE(inv_d, dcache, Hit_Invalidate_D, , )
676 __BUILD_BLAST_CACHE_RANGE(inv_s, scache, Hit_Invalidate_SD, , )
677
678 /* Currently, this is very specific to Loongson-3 */
679 #define __BUILD_BLAST_CACHE_NODE(pfx, desc, indexop, hitop, lsize)      \
680 static inline void blast_##pfx##cache##lsize##_node(long node)          \
681 {                                                                       \
682         unsigned long start = CAC_BASE | nid_to_addrbase(node);         \
683         unsigned long end = start + current_cpu_data.desc.waysize;      \
684         unsigned long ws_inc = 1UL << current_cpu_data.desc.waybit;     \
685         unsigned long ws_end = current_cpu_data.desc.ways <<            \
686                                current_cpu_data.desc.waybit;            \
687         unsigned long ws, addr;                                         \
688                                                                         \
689         for (ws = 0; ws < ws_end; ws += ws_inc)                         \
690                 for (addr = start; addr < end; addr += lsize * 32)      \
691                         cache##lsize##_unroll32(addr|ws, indexop);      \
692 }
693
694 __BUILD_BLAST_CACHE_NODE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 16)
695 __BUILD_BLAST_CACHE_NODE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 32)
696 __BUILD_BLAST_CACHE_NODE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 64)
697 __BUILD_BLAST_CACHE_NODE(s, scache, Index_Writeback_Inv_SD, Hit_Writeback_Inv_SD, 128)
698
699 #endif /* _ASM_R4KCACHE_H */