treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 500
[sfrench/cifs-2.6.git] / arch / c6x / platforms / cache.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  *  Copyright (C) 2011 Texas Instruments Incorporated
4  *  Author: Mark Salter <msalter@redhat.com>
5  */
6 #include <linux/of.h>
7 #include <linux/of_address.h>
8 #include <linux/io.h>
9
10 #include <asm/cache.h>
11 #include <asm/soc.h>
12
13 /*
14  * Internal Memory Control Registers for caches
15  */
16 #define IMCR_CCFG         0x0000
17 #define IMCR_L1PCFG       0x0020
18 #define IMCR_L1PCC        0x0024
19 #define IMCR_L1DCFG       0x0040
20 #define IMCR_L1DCC        0x0044
21 #define IMCR_L2ALLOC0     0x2000
22 #define IMCR_L2ALLOC1     0x2004
23 #define IMCR_L2ALLOC2     0x2008
24 #define IMCR_L2ALLOC3     0x200c
25 #define IMCR_L2WBAR       0x4000
26 #define IMCR_L2WWC        0x4004
27 #define IMCR_L2WIBAR      0x4010
28 #define IMCR_L2WIWC       0x4014
29 #define IMCR_L2IBAR       0x4018
30 #define IMCR_L2IWC        0x401c
31 #define IMCR_L1PIBAR      0x4020
32 #define IMCR_L1PIWC       0x4024
33 #define IMCR_L1DWIBAR     0x4030
34 #define IMCR_L1DWIWC      0x4034
35 #define IMCR_L1DWBAR      0x4040
36 #define IMCR_L1DWWC       0x4044
37 #define IMCR_L1DIBAR      0x4048
38 #define IMCR_L1DIWC       0x404c
39 #define IMCR_L2WB         0x5000
40 #define IMCR_L2WBINV      0x5004
41 #define IMCR_L2INV        0x5008
42 #define IMCR_L1PINV       0x5028
43 #define IMCR_L1DWB        0x5040
44 #define IMCR_L1DWBINV     0x5044
45 #define IMCR_L1DINV       0x5048
46 #define IMCR_MAR_BASE     0x8000
47 #define IMCR_MAR96_111    0x8180
48 #define IMCR_MAR128_191   0x8200
49 #define IMCR_MAR224_239   0x8380
50 #define IMCR_L2MPFAR      0xa000
51 #define IMCR_L2MPFSR      0xa004
52 #define IMCR_L2MPFCR      0xa008
53 #define IMCR_L2MPLK0      0xa100
54 #define IMCR_L2MPLK1      0xa104
55 #define IMCR_L2MPLK2      0xa108
56 #define IMCR_L2MPLK3      0xa10c
57 #define IMCR_L2MPLKCMD    0xa110
58 #define IMCR_L2MPLKSTAT   0xa114
59 #define IMCR_L2MPPA_BASE  0xa200
60 #define IMCR_L1PMPFAR     0xa400
61 #define IMCR_L1PMPFSR     0xa404
62 #define IMCR_L1PMPFCR     0xa408
63 #define IMCR_L1PMPLK0     0xa500
64 #define IMCR_L1PMPLK1     0xa504
65 #define IMCR_L1PMPLK2     0xa508
66 #define IMCR_L1PMPLK3     0xa50c
67 #define IMCR_L1PMPLKCMD   0xa510
68 #define IMCR_L1PMPLKSTAT  0xa514
69 #define IMCR_L1PMPPA_BASE 0xa600
70 #define IMCR_L1DMPFAR     0xac00
71 #define IMCR_L1DMPFSR     0xac04
72 #define IMCR_L1DMPFCR     0xac08
73 #define IMCR_L1DMPLK0     0xad00
74 #define IMCR_L1DMPLK1     0xad04
75 #define IMCR_L1DMPLK2     0xad08
76 #define IMCR_L1DMPLK3     0xad0c
77 #define IMCR_L1DMPLKCMD   0xad10
78 #define IMCR_L1DMPLKSTAT  0xad14
79 #define IMCR_L1DMPPA_BASE 0xae00
80 #define IMCR_L2PDWAKE0    0xc040
81 #define IMCR_L2PDWAKE1    0xc044
82 #define IMCR_L2PDSLEEP0   0xc050
83 #define IMCR_L2PDSLEEP1   0xc054
84 #define IMCR_L2PDSTAT0    0xc060
85 #define IMCR_L2PDSTAT1    0xc064
86
87 /*
88  * CCFG register values and bits
89  */
90 #define L2MODE_0K_CACHE   0x0
91 #define L2MODE_32K_CACHE  0x1
92 #define L2MODE_64K_CACHE  0x2
93 #define L2MODE_128K_CACHE 0x3
94 #define L2MODE_256K_CACHE 0x7
95
96 #define L2PRIO_URGENT     0x0
97 #define L2PRIO_HIGH       0x1
98 #define L2PRIO_MEDIUM     0x2
99 #define L2PRIO_LOW        0x3
100
101 #define CCFG_ID           0x100   /* Invalidate L1P bit */
102 #define CCFG_IP           0x200   /* Invalidate L1D bit */
103
104 static void __iomem *cache_base;
105
106 /*
107  * L1 & L2 caches generic functions
108  */
109 #define imcr_get(reg) soc_readl(cache_base + (reg))
110 #define imcr_set(reg, value) \
111 do {                                                            \
112         soc_writel((value), cache_base + (reg));                \
113         soc_readl(cache_base + (reg));                          \
114 } while (0)
115
116 static void cache_block_operation_wait(unsigned int wc_reg)
117 {
118         /* Wait for completion */
119         while (imcr_get(wc_reg))
120                 cpu_relax();
121 }
122
123 static DEFINE_SPINLOCK(cache_lock);
124
125 /*
126  * Generic function to perform a block cache operation as
127  * invalidate or writeback/invalidate
128  */
129 static void cache_block_operation(unsigned int *start,
130                                   unsigned int *end,
131                                   unsigned int bar_reg,
132                                   unsigned int wc_reg)
133 {
134         unsigned long flags;
135         unsigned int wcnt =
136                 (L2_CACHE_ALIGN_CNT((unsigned int) end)
137                  - L2_CACHE_ALIGN_LOW((unsigned int) start)) >> 2;
138         unsigned int wc = 0;
139
140         for (; wcnt; wcnt -= wc, start += wc) {
141 loop:
142                 spin_lock_irqsave(&cache_lock, flags);
143
144                 /*
145                  * If another cache operation is occurring
146                  */
147                 if (unlikely(imcr_get(wc_reg))) {
148                         spin_unlock_irqrestore(&cache_lock, flags);
149
150                         /* Wait for previous operation completion */
151                         cache_block_operation_wait(wc_reg);
152
153                         /* Try again */
154                         goto loop;
155                 }
156
157                 imcr_set(bar_reg, L2_CACHE_ALIGN_LOW((unsigned int) start));
158
159                 if (wcnt > 0xffff)
160                         wc = 0xffff;
161                 else
162                         wc = wcnt;
163
164                 /* Set word count value in the WC register */
165                 imcr_set(wc_reg, wc & 0xffff);
166
167                 spin_unlock_irqrestore(&cache_lock, flags);
168
169                 /* Wait for completion */
170                 cache_block_operation_wait(wc_reg);
171         }
172 }
173
174 static void cache_block_operation_nowait(unsigned int *start,
175                                          unsigned int *end,
176                                          unsigned int bar_reg,
177                                          unsigned int wc_reg)
178 {
179         unsigned long flags;
180         unsigned int wcnt =
181                 (L2_CACHE_ALIGN_CNT((unsigned int) end)
182                  - L2_CACHE_ALIGN_LOW((unsigned int) start)) >> 2;
183         unsigned int wc = 0;
184
185         for (; wcnt; wcnt -= wc, start += wc) {
186
187                 spin_lock_irqsave(&cache_lock, flags);
188
189                 imcr_set(bar_reg, L2_CACHE_ALIGN_LOW((unsigned int) start));
190
191                 if (wcnt > 0xffff)
192                         wc = 0xffff;
193                 else
194                         wc = wcnt;
195
196                 /* Set word count value in the WC register */
197                 imcr_set(wc_reg, wc & 0xffff);
198
199                 spin_unlock_irqrestore(&cache_lock, flags);
200
201                 /* Don't wait for completion on last cache operation */
202                 if (wcnt > 0xffff)
203                         cache_block_operation_wait(wc_reg);
204         }
205 }
206
207 /*
208  * L1 caches management
209  */
210
211 /*
212  * Disable L1 caches
213  */
214 void L1_cache_off(void)
215 {
216         unsigned int dummy;
217
218         imcr_set(IMCR_L1PCFG, 0);
219         dummy = imcr_get(IMCR_L1PCFG);
220
221         imcr_set(IMCR_L1DCFG, 0);
222         dummy = imcr_get(IMCR_L1DCFG);
223 }
224
225 /*
226  * Enable L1 caches
227  */
228 void L1_cache_on(void)
229 {
230         unsigned int dummy;
231
232         imcr_set(IMCR_L1PCFG, 7);
233         dummy = imcr_get(IMCR_L1PCFG);
234
235         imcr_set(IMCR_L1DCFG, 7);
236         dummy = imcr_get(IMCR_L1DCFG);
237 }
238
239 /*
240  *  L1P global-invalidate all
241  */
242 void L1P_cache_global_invalidate(void)
243 {
244         unsigned int set = 1;
245         imcr_set(IMCR_L1PINV, set);
246         while (imcr_get(IMCR_L1PINV) & 1)
247                 cpu_relax();
248 }
249
250 /*
251  *  L1D global-invalidate all
252  *
253  * Warning: this operation causes all updated data in L1D to
254  * be discarded rather than written back to the lower levels of
255  * memory
256  */
257 void L1D_cache_global_invalidate(void)
258 {
259         unsigned int set = 1;
260         imcr_set(IMCR_L1DINV, set);
261         while (imcr_get(IMCR_L1DINV) & 1)
262                 cpu_relax();
263 }
264
265 void L1D_cache_global_writeback(void)
266 {
267         unsigned int set = 1;
268         imcr_set(IMCR_L1DWB, set);
269         while (imcr_get(IMCR_L1DWB) & 1)
270                 cpu_relax();
271 }
272
273 void L1D_cache_global_writeback_invalidate(void)
274 {
275         unsigned int set = 1;
276         imcr_set(IMCR_L1DWBINV, set);
277         while (imcr_get(IMCR_L1DWBINV) & 1)
278                 cpu_relax();
279 }
280
281 /*
282  * L2 caches management
283  */
284
285 /*
286  * Set L2 operation mode
287  */
288 void L2_cache_set_mode(unsigned int mode)
289 {
290         unsigned int ccfg = imcr_get(IMCR_CCFG);
291
292         /* Clear and set the L2MODE bits in CCFG */
293         ccfg &= ~7;
294         ccfg |= (mode & 7);
295         imcr_set(IMCR_CCFG, ccfg);
296         ccfg = imcr_get(IMCR_CCFG);
297 }
298
299 /*
300  *  L2 global-writeback and global-invalidate all
301  */
302 void L2_cache_global_writeback_invalidate(void)
303 {
304         imcr_set(IMCR_L2WBINV, 1);
305         while (imcr_get(IMCR_L2WBINV))
306                 cpu_relax();
307 }
308
309 /*
310  *  L2 global-writeback all
311  */
312 void L2_cache_global_writeback(void)
313 {
314         imcr_set(IMCR_L2WB, 1);
315         while (imcr_get(IMCR_L2WB))
316                 cpu_relax();
317 }
318
319 /*
320  * Cacheability controls
321  */
322 void enable_caching(unsigned long start, unsigned long end)
323 {
324         unsigned int mar = IMCR_MAR_BASE + ((start >> 24) << 2);
325         unsigned int mar_e = IMCR_MAR_BASE + ((end >> 24) << 2);
326
327         for (; mar <= mar_e; mar += 4)
328                 imcr_set(mar, imcr_get(mar) | 1);
329 }
330
331 void disable_caching(unsigned long start, unsigned long end)
332 {
333         unsigned int mar = IMCR_MAR_BASE + ((start >> 24) << 2);
334         unsigned int mar_e = IMCR_MAR_BASE + ((end >> 24) << 2);
335
336         for (; mar <= mar_e; mar += 4)
337                 imcr_set(mar, imcr_get(mar) & ~1);
338 }
339
340
341 /*
342  *  L1 block operations
343  */
344 void L1P_cache_block_invalidate(unsigned int start, unsigned int end)
345 {
346         cache_block_operation((unsigned int *) start,
347                               (unsigned int *) end,
348                               IMCR_L1PIBAR, IMCR_L1PIWC);
349 }
350 EXPORT_SYMBOL(L1P_cache_block_invalidate);
351
352 void L1D_cache_block_invalidate(unsigned int start, unsigned int end)
353 {
354         cache_block_operation((unsigned int *) start,
355                               (unsigned int *) end,
356                               IMCR_L1DIBAR, IMCR_L1DIWC);
357 }
358
359 void L1D_cache_block_writeback_invalidate(unsigned int start, unsigned int end)
360 {
361         cache_block_operation((unsigned int *) start,
362                               (unsigned int *) end,
363                               IMCR_L1DWIBAR, IMCR_L1DWIWC);
364 }
365
366 void L1D_cache_block_writeback(unsigned int start, unsigned int end)
367 {
368         cache_block_operation((unsigned int *) start,
369                               (unsigned int *) end,
370                               IMCR_L1DWBAR, IMCR_L1DWWC);
371 }
372 EXPORT_SYMBOL(L1D_cache_block_writeback);
373
374 /*
375  *  L2 block operations
376  */
377 void L2_cache_block_invalidate(unsigned int start, unsigned int end)
378 {
379         cache_block_operation((unsigned int *) start,
380                               (unsigned int *) end,
381                               IMCR_L2IBAR, IMCR_L2IWC);
382 }
383
384 void L2_cache_block_writeback(unsigned int start, unsigned int end)
385 {
386         cache_block_operation((unsigned int *) start,
387                               (unsigned int *) end,
388                               IMCR_L2WBAR, IMCR_L2WWC);
389 }
390
391 void L2_cache_block_writeback_invalidate(unsigned int start, unsigned int end)
392 {
393         cache_block_operation((unsigned int *) start,
394                               (unsigned int *) end,
395                               IMCR_L2WIBAR, IMCR_L2WIWC);
396 }
397
398 void L2_cache_block_invalidate_nowait(unsigned int start, unsigned int end)
399 {
400         cache_block_operation_nowait((unsigned int *) start,
401                                      (unsigned int *) end,
402                                      IMCR_L2IBAR, IMCR_L2IWC);
403 }
404
405 void L2_cache_block_writeback_nowait(unsigned int start, unsigned int end)
406 {
407         cache_block_operation_nowait((unsigned int *) start,
408                                      (unsigned int *) end,
409                                      IMCR_L2WBAR, IMCR_L2WWC);
410 }
411
412 void L2_cache_block_writeback_invalidate_nowait(unsigned int start,
413                                                 unsigned int end)
414 {
415         cache_block_operation_nowait((unsigned int *) start,
416                                      (unsigned int *) end,
417                                      IMCR_L2WIBAR, IMCR_L2WIWC);
418 }
419
420
421 /*
422  * L1 and L2 caches configuration
423  */
424 void __init c6x_cache_init(void)
425 {
426         struct device_node *node;
427
428         node = of_find_compatible_node(NULL, NULL, "ti,c64x+cache");
429         if (!node)
430                 return;
431
432         cache_base = of_iomap(node, 0);
433
434         of_node_put(node);
435
436         if (!cache_base)
437                 return;
438
439         /* Set L2 caches on the the whole L2 SRAM memory */
440         L2_cache_set_mode(L2MODE_SIZE);
441
442         /* Enable L1 */
443         L1_cache_on();
444 }