1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_POWERPC_CACHE_H
3 #define _ASM_POWERPC_CACHE_H
4 
5 #ifdef __KERNEL__
6 
7 
8 /* bytes per L1 cache line */
9 #if defined(CONFIG_PPC_8xx) || defined(CONFIG_403GCX)
10 #define L1_CACHE_SHIFT		4
11 #define MAX_COPY_PREFETCH	1
12 #define IFETCH_ALIGN_SHIFT	2
13 #elif defined(CONFIG_PPC_E500MC)
14 #define L1_CACHE_SHIFT		6
15 #define MAX_COPY_PREFETCH	4
16 #define IFETCH_ALIGN_SHIFT	3
17 #elif defined(CONFIG_PPC32)
18 #define MAX_COPY_PREFETCH	4
19 #define IFETCH_ALIGN_SHIFT	3	/* 603 fetches 2 insn at a time */
20 #if defined(CONFIG_PPC_47x)
21 #define L1_CACHE_SHIFT		7
22 #else
23 #define L1_CACHE_SHIFT		5
24 #endif
25 #else /* CONFIG_PPC64 */
26 #define L1_CACHE_SHIFT		7
27 #define IFETCH_ALIGN_SHIFT	4 /* POWER8,9 */
28 #endif
29 
30 #define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
31 
32 #define	SMP_CACHE_BYTES		L1_CACHE_BYTES
33 
34 #define IFETCH_ALIGN_BYTES	(1 << IFETCH_ALIGN_SHIFT)
35 
36 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
37 
38 struct ppc_cache_info {
39 	u32 size;
40 	u32 line_size;
41 	u32 block_size;	/* L1 only */
42 	u32 log_block_size;
43 	u32 blocks_per_page;
44 	u32 sets;
45 	u32 assoc;
46 };
47 
48 struct ppc64_caches {
49 	struct ppc_cache_info l1d;
50 	struct ppc_cache_info l1i;
51 	struct ppc_cache_info l2;
52 	struct ppc_cache_info l3;
53 };
54 
55 extern struct ppc64_caches ppc64_caches;
56 #endif /* __powerpc64__ && ! __ASSEMBLY__ */
57 
58 #if defined(__ASSEMBLY__)
59 /*
60  * For a snooping icache, we still need a dummy icbi to purge all the
61  * prefetched instructions from the ifetch buffers. We also need a sync
62  * before the icbi to order the the actual stores to memory that might
63  * have modified instructions with the icbi.
64  */
65 #define PURGE_PREFETCHED_INS	\
66 	sync;			\
67 	icbi	0,r3;		\
68 	sync;			\
69 	isync
70 
71 #else
72 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
73 
74 #ifdef CONFIG_6xx
75 extern long _get_L2CR(void);
76 extern long _get_L3CR(void);
77 extern void _set_L2CR(unsigned long);
78 extern void _set_L3CR(unsigned long);
79 #else
80 #define _get_L2CR()	0L
81 #define _get_L3CR()	0L
82 #define _set_L2CR(val)	do { } while(0)
83 #define _set_L3CR(val)	do { } while(0)
84 #endif
85 
dcbz(void * addr)86 static inline void dcbz(void *addr)
87 {
88 	__asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
89 }
90 
dcbi(void * addr)91 static inline void dcbi(void *addr)
92 {
93 	__asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
94 }
95 
dcbf(void * addr)96 static inline void dcbf(void *addr)
97 {
98 	__asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
99 }
100 
dcbst(void * addr)101 static inline void dcbst(void *addr)
102 {
103 	__asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
104 }
105 #endif /* !__ASSEMBLY__ */
106 #endif /* __KERNEL__ */
107 #endif /* _ASM_POWERPC_CACHE_H */
108