xref: /linux/arch/powerpc/platforms/powermac/cache.S (revision e5c86679d5e864947a52fb31e45a425dea3e7fa9)
1/*
2 * This file contains low-level cache management functions
3 * used for sleep and CPU speed changes on Apple machines.
4 * (In fact the only thing that is Apple-specific is that we assume
5 * that we can read from ROM at physical address 0xfff00000.)
6 *
7 *    Copyright (C) 2004 Paul Mackerras (paulus@samba.org) and
8 *                       Benjamin Herrenschmidt (benh@kernel.crashing.org)
9 *
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License
12 * as published by the Free Software Foundation; either version
13 * 2 of the License, or (at your option) any later version.
14 *
15 */
16
17#include <asm/processor.h>
18#include <asm/ppc_asm.h>
19#include <asm/cputable.h>
20
21/*
22 * Flush and disable all data caches (dL1, L2, L3). This is used
23 * when going to sleep, when doing a PMU based cpufreq transition,
24 * or when "offlining" a CPU on SMP machines. This code is over
25 * paranoid, but I've had enough issues with various CPU revs and
26 * bugs that I decided it was worth being over cautious
27 */
28
29_GLOBAL(flush_disable_caches)
30#ifndef CONFIG_6xx
31	blr
32#else
33BEGIN_FTR_SECTION
34	b	flush_disable_745x
35END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
36BEGIN_FTR_SECTION
37	b	flush_disable_75x
38END_FTR_SECTION_IFSET(CPU_FTR_L2CR)
39	b	__flush_disable_L1
40
41/* This is the code for G3 and 74[01]0 */
42flush_disable_75x:
43	mflr	r10
44
45	/* Turn off EE and DR in MSR */
46	mfmsr	r11
47	rlwinm	r0,r11,0,~MSR_EE
48	rlwinm	r0,r0,0,~MSR_DR
49	sync
50	mtmsr	r0
51	isync
52
53	/* Stop DST streams */
54BEGIN_FTR_SECTION
55	DSSALL
56	sync
57END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
58
59	/* Stop DPM */
60	mfspr	r8,SPRN_HID0		/* Save SPRN_HID0 in r8 */
61	rlwinm	r4,r8,0,12,10		/* Turn off HID0[DPM] */
62	sync
63	mtspr	SPRN_HID0,r4		/* Disable DPM */
64	sync
65
66	/* Disp-flush L1. We have a weird problem here that I never
67	 * totally figured out. On 750FX, using the ROM for the flush
68	 * results in a non-working flush. We use that workaround for
69	 * now until I finally understand what's going on. --BenH
70	 */
71
72	/* ROM base by default */
73	lis	r4,0xfff0
74	mfpvr	r3
75	srwi	r3,r3,16
76	cmplwi	cr0,r3,0x7000
77	bne+	1f
78	/* RAM base on 750FX */
79	li	r4,0
801:	li	r4,0x4000
81	mtctr	r4
821:	lwz	r0,0(r4)
83	addi	r4,r4,32
84	bdnz	1b
85	sync
86	isync
87
88	/* Disable / invalidate / enable L1 data */
89	mfspr	r3,SPRN_HID0
90	rlwinm	r3,r3,0,~(HID0_DCE | HID0_ICE)
91	mtspr	SPRN_HID0,r3
92	sync
93	isync
94	ori	r3,r3,(HID0_DCE|HID0_DCI|HID0_ICE|HID0_ICFI)
95	sync
96	isync
97	mtspr	SPRN_HID0,r3
98	xori	r3,r3,(HID0_DCI|HID0_ICFI)
99	mtspr	SPRN_HID0,r3
100	sync
101
102	/* Get the current enable bit of the L2CR into r4 */
103	mfspr	r5,SPRN_L2CR
104	/* Set to data-only (pre-745x bit) */
105	oris	r3,r5,L2CR_L2DO@h
106	b	2f
107	/* When disabling L2, code must be in L1 */
108	.balign 32
1091:	mtspr	SPRN_L2CR,r3
1103:	sync
111	isync
112	b	1f
1132:	b	3f
1143:	sync
115	isync
116	b	1b
1171:	/* disp-flush L2. The interesting thing here is that the L2 can be
118	 * up to 2Mb ... so using the ROM, we'll end up wrapping back to memory
119	 * but that is probbaly fine. We disp-flush over 4Mb to be safe
120	 */
121	lis	r4,2
122	mtctr	r4
123	lis	r4,0xfff0
1241:	lwz	r0,0(r4)
125	addi	r4,r4,32
126	bdnz	1b
127	sync
128	isync
129	lis	r4,2
130	mtctr	r4
131	lis	r4,0xfff0
1321:	dcbf	0,r4
133	addi	r4,r4,32
134	bdnz	1b
135	sync
136	isync
137
138	/* now disable L2 */
139	rlwinm	r5,r5,0,~L2CR_L2E
140	b	2f
141	/* When disabling L2, code must be in L1 */
142	.balign 32
1431:	mtspr	SPRN_L2CR,r5
1443:	sync
145	isync
146	b	1f
1472:	b	3f
1483:	sync
149	isync
150	b	1b
1511:	sync
152	isync
153	/* Invalidate L2. This is pre-745x, we clear the L2I bit ourselves */
154	oris	r4,r5,L2CR_L2I@h
155	mtspr	SPRN_L2CR,r4
156	sync
157	isync
158
159	/* Wait for the invalidation to complete */
1601:	mfspr	r3,SPRN_L2CR
161	rlwinm.	r0,r3,0,31,31
162	bne	1b
163
164	/* Clear L2I */
165	xoris	r4,r4,L2CR_L2I@h
166	sync
167	mtspr	SPRN_L2CR,r4
168	sync
169
170	/* now disable the L1 data cache */
171	mfspr	r0,SPRN_HID0
172	rlwinm	r0,r0,0,~(HID0_DCE|HID0_ICE)
173	mtspr	SPRN_HID0,r0
174	sync
175	isync
176
177	/* Restore HID0[DPM] to whatever it was before */
178	sync
179	mfspr	r0,SPRN_HID0
180	rlwimi	r0,r8,0,11,11		/* Turn back HID0[DPM] */
181	mtspr	SPRN_HID0,r0
182	sync
183
184	/* restore DR and EE */
185	sync
186	mtmsr	r11
187	isync
188
189	mtlr	r10
190	blr
191
192/* This code is for 745x processors */
193flush_disable_745x:
194	/* Turn off EE and DR in MSR */
195	mfmsr	r11
196	rlwinm	r0,r11,0,~MSR_EE
197	rlwinm	r0,r0,0,~MSR_DR
198	sync
199	mtmsr	r0
200	isync
201
202	/* Stop prefetch streams */
203	DSSALL
204	sync
205
206	/* Disable L2 prefetching */
207	mfspr	r0,SPRN_MSSCR0
208	rlwinm	r0,r0,0,0,29
209	mtspr	SPRN_MSSCR0,r0
210	sync
211	isync
212	lis	r4,0
213	dcbf	0,r4
214	dcbf	0,r4
215	dcbf	0,r4
216	dcbf	0,r4
217	dcbf	0,r4
218	dcbf	0,r4
219	dcbf	0,r4
220	dcbf	0,r4
221
222	/* Due to a bug with the HW flush on some CPU revs, we occasionally
223	 * experience data corruption. I'm adding a displacement flush along
224	 * with a dcbf loop over a few Mb to "help". The problem isn't totally
225	 * fixed by this in theory, but at least, in practice, I couldn't reproduce
226	 * it even with a big hammer...
227	 */
228
229        lis     r4,0x0002
230        mtctr   r4
231 	li      r4,0
2321:
233        lwz     r0,0(r4)
234        addi    r4,r4,32                /* Go to start of next cache line */
235        bdnz    1b
236        isync
237
238        /* Now, flush the first 4MB of memory */
239        lis     r4,0x0002
240        mtctr   r4
241	li      r4,0
242        sync
2431:
244        dcbf    0,r4
245        addi    r4,r4,32                /* Go to start of next cache line */
246        bdnz    1b
247
248	/* Flush and disable the L1 data cache */
249	mfspr	r6,SPRN_LDSTCR
250	lis	r3,0xfff0	/* read from ROM for displacement flush */
251	li	r4,0xfe		/* start with only way 0 unlocked */
252	li	r5,128		/* 128 lines in each way */
2531:	mtctr	r5
254	rlwimi	r6,r4,0,24,31
255	mtspr	SPRN_LDSTCR,r6
256	sync
257	isync
2582:	lwz	r0,0(r3)	/* touch each cache line */
259	addi	r3,r3,32
260	bdnz	2b
261	rlwinm	r4,r4,1,24,30	/* move on to the next way */
262	ori	r4,r4,1
263	cmpwi	r4,0xff		/* all done? */
264	bne	1b
265	/* now unlock the L1 data cache */
266	li	r4,0
267	rlwimi	r6,r4,0,24,31
268	sync
269	mtspr	SPRN_LDSTCR,r6
270	sync
271	isync
272
273	/* Flush the L2 cache using the hardware assist */
274	mfspr	r3,SPRN_L2CR
275	cmpwi	r3,0		/* check if it is enabled first */
276	bge	4f
277	oris	r0,r3,(L2CR_L2IO_745x|L2CR_L2DO_745x)@h
278	b	2f
279	/* When disabling/locking L2, code must be in L1 */
280	.balign 32
2811:	mtspr	SPRN_L2CR,r0	/* lock the L2 cache */
2823:	sync
283	isync
284	b	1f
2852:	b	3f
2863:	sync
287	isync
288	b	1b
2891:	sync
290	isync
291	ori	r0,r3,L2CR_L2HWF_745x
292	sync
293	mtspr	SPRN_L2CR,r0	/* set the hardware flush bit */
2943:	mfspr	r0,SPRN_L2CR	/* wait for it to go to 0 */
295	andi.	r0,r0,L2CR_L2HWF_745x
296	bne	3b
297	sync
298	rlwinm	r3,r3,0,~L2CR_L2E
299	b	2f
300	/* When disabling L2, code must be in L1 */
301	.balign 32
3021:	mtspr	SPRN_L2CR,r3	/* disable the L2 cache */
3033:	sync
304	isync
305	b	1f
3062:	b	3f
3073:	sync
308	isync
309	b	1b
3101:	sync
311	isync
312	oris	r4,r3,L2CR_L2I@h
313	mtspr	SPRN_L2CR,r4
314	sync
315	isync
3161:	mfspr	r4,SPRN_L2CR
317	andis.	r0,r4,L2CR_L2I@h
318	bne	1b
319	sync
320
321BEGIN_FTR_SECTION
322	/* Flush the L3 cache using the hardware assist */
3234:	mfspr	r3,SPRN_L3CR
324	cmpwi	r3,0		/* check if it is enabled */
325	bge	6f
326	oris	r0,r3,L3CR_L3IO@h
327	ori	r0,r0,L3CR_L3DO
328	sync
329	mtspr	SPRN_L3CR,r0	/* lock the L3 cache */
330	sync
331	isync
332	ori	r0,r0,L3CR_L3HWF
333	sync
334	mtspr	SPRN_L3CR,r0	/* set the hardware flush bit */
3355:	mfspr	r0,SPRN_L3CR	/* wait for it to go to zero */
336	andi.	r0,r0,L3CR_L3HWF
337	bne	5b
338	rlwinm	r3,r3,0,~L3CR_L3E
339	sync
340	mtspr	SPRN_L3CR,r3	/* disable the L3 cache */
341	sync
342	ori	r4,r3,L3CR_L3I
343	mtspr	SPRN_L3CR,r4
3441:	mfspr	r4,SPRN_L3CR
345	andi.	r0,r4,L3CR_L3I
346	bne	1b
347	sync
348END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
349
3506:	mfspr	r0,SPRN_HID0	/* now disable the L1 data cache */
351	rlwinm	r0,r0,0,~HID0_DCE
352	mtspr	SPRN_HID0,r0
353	sync
354	isync
355	mtmsr	r11		/* restore DR and EE */
356	isync
357	blr
358#endif	/* CONFIG_6xx */
359