xref: /linux/arch/powerpc/include/asm/cache.h (revision 04eeb606a8383b306f4bc6991da8231b5f3924b0)
1 #ifndef _ASM_POWERPC_CACHE_H
2 #define _ASM_POWERPC_CACHE_H
3 
4 #ifdef __KERNEL__
5 
6 #include <asm/reg.h>
7 
8 /* bytes per L1 cache line */
9 #if defined(CONFIG_8xx) || defined(CONFIG_403GCX)
10 #define L1_CACHE_SHIFT		4
11 #define MAX_COPY_PREFETCH	1
12 #elif defined(CONFIG_PPC_E500MC)
13 #define L1_CACHE_SHIFT		6
14 #define MAX_COPY_PREFETCH	4
15 #elif defined(CONFIG_PPC32)
16 #define MAX_COPY_PREFETCH	4
17 #if defined(CONFIG_PPC_47x)
18 #define L1_CACHE_SHIFT		7
19 #else
20 #define L1_CACHE_SHIFT		5
21 #endif
22 #else /* CONFIG_PPC64 */
23 #define L1_CACHE_SHIFT		7
24 #endif
25 
26 #define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
27 
28 #define	SMP_CACHE_BYTES		L1_CACHE_BYTES
29 
30 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
31 struct ppc64_caches {
32 	u32	dsize;			/* L1 d-cache size */
33 	u32	dline_size;		/* L1 d-cache line size	*/
34 	u32	log_dline_size;
35 	u32	dlines_per_page;
36 	u32	isize;			/* L1 i-cache size */
37 	u32	iline_size;		/* L1 i-cache line size	*/
38 	u32	log_iline_size;
39 	u32	ilines_per_page;
40 };
41 
42 extern struct ppc64_caches ppc64_caches;
43 
44 static inline void logmpp(u64 x)
45 {
46 	asm volatile(PPC_LOGMPP(R1) : : "r" (x));
47 }
48 
49 #endif /* __powerpc64__ && ! __ASSEMBLY__ */
50 
51 #if defined(__ASSEMBLY__)
52 /*
53  * For a snooping icache, we still need a dummy icbi to purge all the
54  * prefetched instructions from the ifetch buffers. We also need a sync
55  * before the icbi to order the the actual stores to memory that might
56  * have modified instructions with the icbi.
57  */
58 #define PURGE_PREFETCHED_INS	\
59 	sync;			\
60 	icbi	0,r3;		\
61 	sync;			\
62 	isync
63 
64 #else
65 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
66 
67 #ifdef CONFIG_6xx
68 extern long _get_L2CR(void);
69 extern long _get_L3CR(void);
70 extern void _set_L2CR(unsigned long);
71 extern void _set_L3CR(unsigned long);
72 #else
73 #define _get_L2CR()	0L
74 #define _get_L3CR()	0L
75 #define _set_L2CR(val)	do { } while(0)
76 #define _set_L3CR(val)	do { } while(0)
77 #endif
78 
79 extern void cacheable_memzero(void *p, unsigned int nb);
80 extern void *cacheable_memcpy(void *, const void *, unsigned int);
81 
82 #endif /* !__ASSEMBLY__ */
83 #endif /* __KERNEL__ */
84 #endif /* _ASM_POWERPC_CACHE_H */
85