1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_POWERPC_CACHE_H
3 #define _ASM_POWERPC_CACHE_H
8 /* bytes per L1 cache line */
9 #if defined(CONFIG_PPC_8xx) || defined(CONFIG_403GCX)
10 #define L1_CACHE_SHIFT 4
11 #define MAX_COPY_PREFETCH 1
12 #define IFETCH_ALIGN_SHIFT 2
13 #elif defined(CONFIG_PPC_E500MC)
14 #define L1_CACHE_SHIFT 6
15 #define MAX_COPY_PREFETCH 4
16 #define IFETCH_ALIGN_SHIFT 3
17 #elif defined(CONFIG_PPC32)
18 #define MAX_COPY_PREFETCH 4
19 #define IFETCH_ALIGN_SHIFT 3 /* 603 fetches 2 insn at a time */
20 #if defined(CONFIG_PPC_47x)
21 #define L1_CACHE_SHIFT 7
23 #define L1_CACHE_SHIFT 5
25 #else /* CONFIG_PPC64 */
26 #define L1_CACHE_SHIFT 7
27 #define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */
30 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
32 #define SMP_CACHE_BYTES L1_CACHE_BYTES
34 #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT)
36 #if !defined(__ASSEMBLY__)
39 struct ppc_cache_info {
42 u32 block_size; /* L1 only */
50 struct ppc_cache_info l1d;
51 struct ppc_cache_info l1i;
52 struct ppc_cache_info l2;
53 struct ppc_cache_info l3;
56 extern struct ppc64_caches ppc64_caches;
58 static inline u32 l1_cache_shift(void)
60 return ppc64_caches.l1d.log_block_size;
63 static inline u32 l1_cache_bytes(void)
65 return ppc64_caches.l1d.block_size;
68 static inline u32 l1_cache_shift(void)
70 return L1_CACHE_SHIFT;
73 static inline u32 l1_cache_bytes(void)
75 return L1_CACHE_BYTES;
78 #endif /* ! __ASSEMBLY__ */
80 #if defined(__ASSEMBLY__)
82 * For a snooping icache, we still need a dummy icbi to purge all the
83 * prefetched instructions from the ifetch buffers. We also need a sync
84 * before the icbi to order the the actual stores to memory that might
85 * have modified instructions with the icbi.
87 #define PURGE_PREFETCHED_INS \
94 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
96 #ifdef CONFIG_PPC_BOOK3S_32
97 extern long _get_L2CR(void);
98 extern long _get_L3CR(void);
99 extern void _set_L2CR(unsigned long);
100 extern void _set_L3CR(unsigned long);
102 #define _get_L2CR() 0L
103 #define _get_L3CR() 0L
104 #define _set_L2CR(val) do { } while(0)
105 #define _set_L3CR(val) do { } while(0)
108 static inline void dcbz(void *addr)
110 __asm__ __volatile__ ("dcbz %y0" : : "Z"(*(u8 *)addr) : "memory");
113 static inline void dcbi(void *addr)
115 __asm__ __volatile__ ("dcbi %y0" : : "Z"(*(u8 *)addr) : "memory");
118 static inline void dcbf(void *addr)
120 __asm__ __volatile__ ("dcbf %y0" : : "Z"(*(u8 *)addr) : "memory");
123 static inline void dcbst(void *addr)
125 __asm__ __volatile__ ("dcbst %y0" : : "Z"(*(u8 *)addr) : "memory");
127 #endif /* !__ASSEMBLY__ */
128 #endif /* __KERNEL__ */
129 #endif /* _ASM_POWERPC_CACHE_H */