xref: /linux/arch/powerpc/platforms/52xx/lite5200_sleep.S (revision cc04a46f11ea046ed53e2c832ae29e4790f7e35f)
1#include <asm/reg.h>
2#include <asm/ppc_asm.h>
3#include <asm/processor.h>
4#include <asm/cache.h>
5
6
7#define SDRAM_CTRL	0x104
8#define SC_MODE_EN	(1<<31)
9#define SC_CKE		(1<<30)
10#define SC_REF_EN	(1<<28)
11#define SC_SOFT_PRE	(1<<1)
12
13#define GPIOW_GPIOE	0xc00
14#define GPIOW_DDR	0xc08
15#define GPIOW_DVO	0xc0c
16
17#define CDM_CE		0x214
18#define CDM_SDRAM	(1<<3)
19
20
21/* helpers... beware: r10 and r4 are overwritten */
22#define SAVE_SPRN(reg, addr)		\
23	mfspr	r10, SPRN_##reg;	\
24	stw	r10, ((addr)*4)(r4);
25
26#define LOAD_SPRN(reg, addr)		\
27	lwz	r10, ((addr)*4)(r4);	\
28	mtspr	SPRN_##reg, r10;	\
29	sync;				\
30	isync;
31
32
33	.data
34registers:
35	.space 0x5c*4
36	.text
37
38/* ---------------------------------------------------------------------- */
39/* low-power mode with help of M68HLC908QT1 */
40
41	.globl lite5200_low_power
42lite5200_low_power:
43
44	mr	r7, r3	/* save SRAM va */
45	mr	r8, r4	/* save MBAR va */
46
47	/* setup wakeup address for u-boot at physical location 0x0 */
48	lis	r3, CONFIG_KERNEL_START@h
49	lis	r4, lite5200_wakeup@h
50	ori	r4, r4, lite5200_wakeup@l
51	sub	r4, r4, r3
52	stw	r4, 0(r3)
53
54
55	/*
56	 * save stuff BDI overwrites
57	 * 0xf0 (0xe0->0x100 gets overwritten when BDI connected;
58	 *   even when CONFIG_BDI* is disabled and MMU XLAT commented; heisenbug?))
59	 * WARNING: self-refresh doesn't seem to work when BDI2000 is connected,
60	 *   possibly because BDI sets SDRAM registers before wakeup code does
61	 */
62	lis	r4, registers@h
63	ori	r4, r4, registers@l
64	lwz	r10, 0xf0(r3)
65	stw	r10, (0x1d*4)(r4)
66
67	/* save registers to r4 [destroys r10] */
68	SAVE_SPRN(LR, 0x1c)
69	bl	save_regs
70
71	/* flush caches [destroys r3, r4] */
72	bl	flush_data_cache
73
74
75	/* copy code to sram */
76	mr	r4, r7
77	li	r3, (sram_code_end - sram_code)/4
78	mtctr	r3
79	lis	r3, sram_code@h
80	ori	r3, r3, sram_code@l
811:
82	lwz	r5, 0(r3)
83	stw	r5, 0(r4)
84	addi	r3, r3, 4
85	addi	r4, r4, 4
86	bdnz	1b
87
88	/* get tb_ticks_per_usec */
89	lis	r3, tb_ticks_per_usec@h
90	lwz	r11, tb_ticks_per_usec@l(r3)
91
92	/* disable I and D caches */
93	mfspr	r3, SPRN_HID0
94	ori	r3, r3, HID0_ICE | HID0_DCE
95	xori	r3, r3, HID0_ICE | HID0_DCE
96	sync; isync;
97	mtspr	SPRN_HID0, r3
98	sync; isync;
99
100	/* jump to sram */
101	mtlr	r7
102	blrl
103	/* doesn't return */
104
105
106sram_code:
107	/* self refresh */
108	lwz	r4, SDRAM_CTRL(r8)
109
110	/* send NOP (precharge) */
111	oris	r4, r4, SC_MODE_EN@h	/* mode_en */
112	stw	r4, SDRAM_CTRL(r8)
113	sync
114
115	ori	r4, r4, SC_SOFT_PRE	/* soft_pre */
116	stw	r4, SDRAM_CTRL(r8)
117	sync
118	xori	r4, r4, SC_SOFT_PRE
119
120	xoris	r4, r4, SC_MODE_EN@h	/* !mode_en */
121	stw	r4, SDRAM_CTRL(r8)
122	sync
123
124	/* delay (for NOP to finish) */
125	li	r12, 1
126	bl	udelay
127
128	/*
129	 * mode_en must not be set when enabling self-refresh
130	 * send AR with CKE low (self-refresh)
131	 */
132	oris	r4, r4, (SC_REF_EN | SC_CKE)@h
133	xoris	r4, r4, (SC_CKE)@h	/* ref_en !cke */
134	stw	r4, SDRAM_CTRL(r8)
135	sync
136
137	/* delay (after !CKE there should be two cycles) */
138	li	r12, 1
139	bl	udelay
140
141	/* disable clock */
142	lwz	r4, CDM_CE(r8)
143	ori	r4, r4, CDM_SDRAM
144	xori	r4, r4, CDM_SDRAM
145	stw	r4, CDM_CE(r8)
146	sync
147
148	/* delay a bit */
149	li	r12, 1
150	bl	udelay
151
152
153	/* turn off with QT chip */
154	li	r4, 0x02
155	stb	r4, GPIOW_GPIOE(r8)	/* enable gpio_wkup1 */
156	sync
157
158	stb	r4, GPIOW_DVO(r8)	/* "output" high */
159	sync
160	stb	r4, GPIOW_DDR(r8)	/* output */
161	sync
162	stb	r4, GPIOW_DVO(r8)	/* output high */
163	sync
164
165	/* 10uS delay */
166	li	r12, 10
167	bl	udelay
168
169	/* turn off */
170	li	r4, 0
171	stb	r4, GPIOW_DVO(r8)	/* output low */
172	sync
173
174	/* wait until we're offline */
175  1:
176	b	1b
177
178
179	/* local udelay in sram is needed */
180  udelay: /* r11 - tb_ticks_per_usec, r12 - usecs, overwrites r13 */
181	mullw	r12, r12, r11
182	mftb	r13	/* start */
183	addi	r12, r13, r12 /* end */
184    1:
185	mftb	r13	/* current */
186	cmp	cr0, r13, r12
187	blt	1b
188	blr
189
190sram_code_end:
191
192
193
194/* uboot jumps here on resume */
195lite5200_wakeup:
196	bl	restore_regs
197
198
199	/* HIDs, MSR */
200	LOAD_SPRN(HID1, 0x19)
201	LOAD_SPRN(HID2, 0x1a)
202
203
204	/* address translation is tricky (see turn_on_mmu) */
205	mfmsr	r10
206	ori	r10, r10, MSR_DR | MSR_IR
207
208
209	mtspr	SPRN_SRR1, r10
210	lis	r10, mmu_on@h
211	ori	r10, r10, mmu_on@l
212	mtspr	SPRN_SRR0, r10
213	sync
214	rfi
215mmu_on:
216	/* kernel offset (r4 is still set from restore_registers) */
217	addis	r4, r4, CONFIG_KERNEL_START@h
218
219
220	/* restore MSR */
221	lwz	r10, (4*0x1b)(r4)
222	mtmsr	r10
223	sync; isync;
224
225	/* invalidate caches */
226	mfspr	r10, SPRN_HID0
227	ori	r5, r10, HID0_ICFI | HID0_DCI
228	mtspr	SPRN_HID0, r5	/* invalidate caches */
229	sync; isync;
230	mtspr	SPRN_HID0, r10
231	sync; isync;
232
233	/* enable caches */
234	lwz	r10, (4*0x18)(r4)
235	mtspr	SPRN_HID0, r10	/* restore (enable caches, DPM) */
236	/* ^ this has to be after address translation set in MSR */
237	sync
238	isync
239
240
241	/* restore 0xf0 (BDI2000) */
242	lis	r3, CONFIG_KERNEL_START@h
243	lwz	r10, (0x1d*4)(r4)
244	stw	r10, 0xf0(r3)
245
246	LOAD_SPRN(LR, 0x1c)
247
248
249	blr
250
251
252/* ---------------------------------------------------------------------- */
253/* boring code: helpers */
254
255/* save registers */
256#define SAVE_BAT(n, addr)		\
257	SAVE_SPRN(DBAT##n##L, addr);	\
258	SAVE_SPRN(DBAT##n##U, addr+1);	\
259	SAVE_SPRN(IBAT##n##L, addr+2);	\
260	SAVE_SPRN(IBAT##n##U, addr+3);
261
262#define SAVE_SR(n, addr)		\
263	mfsr	r10, n;			\
264	stw	r10, ((addr)*4)(r4);
265
266#define SAVE_4SR(n, addr)	\
267	SAVE_SR(n, addr);	\
268	SAVE_SR(n+1, addr+1);	\
269	SAVE_SR(n+2, addr+2);	\
270	SAVE_SR(n+3, addr+3);
271
272save_regs:
273	stw	r0, 0(r4)
274	stw	r1, 0x4(r4)
275	stw	r2, 0x8(r4)
276	stmw	r11, 0xc(r4) /* 0xc -> 0x5f, (0x18*4-1) */
277
278	SAVE_SPRN(HID0, 0x18)
279	SAVE_SPRN(HID1, 0x19)
280	SAVE_SPRN(HID2, 0x1a)
281	mfmsr	r10
282	stw	r10, (4*0x1b)(r4)
283	/*SAVE_SPRN(LR, 0x1c) have to save it before the call */
284	/* 0x1d reserved by 0xf0 */
285	SAVE_SPRN(RPA,   0x1e)
286	SAVE_SPRN(SDR1,  0x1f)
287
288	/* save MMU regs */
289	SAVE_BAT(0, 0x20)
290	SAVE_BAT(1, 0x24)
291	SAVE_BAT(2, 0x28)
292	SAVE_BAT(3, 0x2c)
293	SAVE_BAT(4, 0x30)
294	SAVE_BAT(5, 0x34)
295	SAVE_BAT(6, 0x38)
296	SAVE_BAT(7, 0x3c)
297
298	SAVE_4SR(0, 0x40)
299	SAVE_4SR(4, 0x44)
300	SAVE_4SR(8, 0x48)
301	SAVE_4SR(12, 0x4c)
302
303	SAVE_SPRN(SPRG0, 0x50)
304	SAVE_SPRN(SPRG1, 0x51)
305	SAVE_SPRN(SPRG2, 0x52)
306	SAVE_SPRN(SPRG3, 0x53)
307	SAVE_SPRN(SPRG4, 0x54)
308	SAVE_SPRN(SPRG5, 0x55)
309	SAVE_SPRN(SPRG6, 0x56)
310	SAVE_SPRN(SPRG7, 0x57)
311
312	SAVE_SPRN(IABR,  0x58)
313	SAVE_SPRN(DABR,  0x59)
314	SAVE_SPRN(TBRL,  0x5a)
315	SAVE_SPRN(TBRU,  0x5b)
316
317	blr
318
319
320/* restore registers */
321#define LOAD_BAT(n, addr)		\
322	LOAD_SPRN(DBAT##n##L, addr);	\
323	LOAD_SPRN(DBAT##n##U, addr+1);	\
324	LOAD_SPRN(IBAT##n##L, addr+2);	\
325	LOAD_SPRN(IBAT##n##U, addr+3);
326
327#define LOAD_SR(n, addr)		\
328	lwz	r10, ((addr)*4)(r4);	\
329	mtsr	n, r10;
330
331#define LOAD_4SR(n, addr)	\
332	LOAD_SR(n, addr);	\
333	LOAD_SR(n+1, addr+1);	\
334	LOAD_SR(n+2, addr+2);	\
335	LOAD_SR(n+3, addr+3);
336
337restore_regs:
338	lis	r4, registers@h
339	ori	r4, r4, registers@l
340
341	/* MMU is not up yet */
342	subis	r4, r4, CONFIG_KERNEL_START@h
343
344	lwz	r0, 0(r4)
345	lwz	r1, 0x4(r4)
346	lwz	r2, 0x8(r4)
347	lmw	r11, 0xc(r4)
348
349	/*
350	 * these are a bit tricky
351	 *
352	 * 0x18 - HID0
353	 * 0x19 - HID1
354	 * 0x1a - HID2
355	 * 0x1b - MSR
356	 * 0x1c - LR
357	 * 0x1d - reserved by 0xf0 (BDI2000)
358	 */
359	LOAD_SPRN(RPA,   0x1e);
360	LOAD_SPRN(SDR1,  0x1f);
361
362	/* restore MMU regs */
363	LOAD_BAT(0, 0x20)
364	LOAD_BAT(1, 0x24)
365	LOAD_BAT(2, 0x28)
366	LOAD_BAT(3, 0x2c)
367	LOAD_BAT(4, 0x30)
368	LOAD_BAT(5, 0x34)
369	LOAD_BAT(6, 0x38)
370	LOAD_BAT(7, 0x3c)
371
372	LOAD_4SR(0, 0x40)
373	LOAD_4SR(4, 0x44)
374	LOAD_4SR(8, 0x48)
375	LOAD_4SR(12, 0x4c)
376
377	/* rest of regs */
378	LOAD_SPRN(SPRG0, 0x50);
379	LOAD_SPRN(SPRG1, 0x51);
380	LOAD_SPRN(SPRG2, 0x52);
381	LOAD_SPRN(SPRG3, 0x53);
382	LOAD_SPRN(SPRG4, 0x54);
383	LOAD_SPRN(SPRG5, 0x55);
384	LOAD_SPRN(SPRG6, 0x56);
385	LOAD_SPRN(SPRG7, 0x57);
386
387	LOAD_SPRN(IABR,  0x58);
388	LOAD_SPRN(DABR,  0x59);
389	LOAD_SPRN(TBWL,  0x5a);	/* these two have separate R/W regs */
390	LOAD_SPRN(TBWU,  0x5b);
391
392	blr
393
394
395
396/* cache flushing code. copied from arch/ppc/boot/util.S */
397#define NUM_CACHE_LINES (128*8)
398
399/*
400 * Flush data cache
401 * Do this by just reading lots of stuff into the cache.
402 */
403flush_data_cache:
404	lis	r3,CONFIG_KERNEL_START@h
405	ori	r3,r3,CONFIG_KERNEL_START@l
406	li	r4,NUM_CACHE_LINES
407	mtctr	r4
4081:
409	lwz	r4,0(r3)
410	addi	r3,r3,L1_CACHE_BYTES	/* Next line, please */
411	bdnz	1b
412	blr
413