xref: /linux/arch/powerpc/platforms/52xx/lite5200_sleep.S (revision 8a922b7728a93d837954315c98b84f6b78de0c4f)
1/* SPDX-License-Identifier: GPL-2.0 */
2#include <linux/linkage.h>
3
4#include <asm/reg.h>
5#include <asm/ppc_asm.h>
6#include <asm/processor.h>
7#include <asm/cache.h>
8
9
10#define SDRAM_CTRL	0x104
11#define SC_MODE_EN	(1<<31)
12#define SC_CKE		(1<<30)
13#define SC_REF_EN	(1<<28)
14#define SC_SOFT_PRE	(1<<1)
15
16#define GPIOW_GPIOE	0xc00
17#define GPIOW_DDR	0xc08
18#define GPIOW_DVO	0xc0c
19
20#define CDM_CE		0x214
21#define CDM_SDRAM	(1<<3)
22
23
24/* helpers... beware: r10 and r4 are overwritten */
25#define SAVE_SPRN(reg, addr)		\
26	mfspr	r10, SPRN_##reg;	\
27	stw	r10, ((addr)*4)(r4);
28
29#define LOAD_SPRN(reg, addr)		\
30	lwz	r10, ((addr)*4)(r4);	\
31	mtspr	SPRN_##reg, r10;	\
32	sync;				\
33	isync;
34
35
36	.data
37registers:
38	.space 0x5c*4
39	.text
40
41/* ---------------------------------------------------------------------- */
42/* low-power mode with help of M68HLC908QT1 */
43
44	.globl lite5200_low_power
45lite5200_low_power:
46
47	mr	r7, r3	/* save SRAM va */
48	mr	r8, r4	/* save MBAR va */
49
50	/* setup wakeup address for u-boot at physical location 0x0 */
51	lis	r3, CONFIG_KERNEL_START@h
52	lis	r4, lite5200_wakeup@h
53	ori	r4, r4, lite5200_wakeup@l
54	sub	r4, r4, r3
55	stw	r4, 0(r3)
56
57
58	/*
59	 * save stuff BDI overwrites
60	 * 0xf0 (0xe0->0x100 gets overwritten when BDI connected;
61	 *   even when CONFIG_BDI_SWITCH is disabled and MMU XLAT commented; heisenbug?))
62	 * WARNING: self-refresh doesn't seem to work when BDI2000 is connected,
63	 *   possibly because BDI sets SDRAM registers before wakeup code does
64	 */
65	lis	r4, registers@h
66	ori	r4, r4, registers@l
67	lwz	r10, 0xf0(r3)
68	stw	r10, (0x1d*4)(r4)
69
70	/* save registers to r4 [destroys r10] */
71	SAVE_SPRN(LR, 0x1c)
72	bl	save_regs
73
74	/* flush caches [destroys r3, r4] */
75	bl	flush_data_cache
76
77
78	/* copy code to sram */
79	mr	r4, r7
80	li	r3, (sram_code_end - sram_code)/4
81	mtctr	r3
82	lis	r3, sram_code@h
83	ori	r3, r3, sram_code@l
841:
85	lwz	r5, 0(r3)
86	stw	r5, 0(r4)
87	addi	r3, r3, 4
88	addi	r4, r4, 4
89	bdnz	1b
90
91	/* get tb_ticks_per_usec */
92	lis	r3, tb_ticks_per_usec@h
93	lwz	r11, tb_ticks_per_usec@l(r3)
94
95	/* disable I and D caches */
96	mfspr	r3, SPRN_HID0
97	ori	r3, r3, HID0_ICE | HID0_DCE
98	xori	r3, r3, HID0_ICE | HID0_DCE
99	sync; isync;
100	mtspr	SPRN_HID0, r3
101	sync; isync;
102
103	/* jump to sram */
104	mtlr	r7
105	blrl
106	/* doesn't return */
107
108
109sram_code:
110	/* self refresh */
111	lwz	r4, SDRAM_CTRL(r8)
112
113	/* send NOP (precharge) */
114	oris	r4, r4, SC_MODE_EN@h	/* mode_en */
115	stw	r4, SDRAM_CTRL(r8)
116	sync
117
118	ori	r4, r4, SC_SOFT_PRE	/* soft_pre */
119	stw	r4, SDRAM_CTRL(r8)
120	sync
121	xori	r4, r4, SC_SOFT_PRE
122
123	xoris	r4, r4, SC_MODE_EN@h	/* !mode_en */
124	stw	r4, SDRAM_CTRL(r8)
125	sync
126
127	/* delay (for NOP to finish) */
128	li	r12, 1
129	bl	udelay
130
131	/*
132	 * mode_en must not be set when enabling self-refresh
133	 * send AR with CKE low (self-refresh)
134	 */
135	oris	r4, r4, (SC_REF_EN | SC_CKE)@h
136	xoris	r4, r4, (SC_CKE)@h	/* ref_en !cke */
137	stw	r4, SDRAM_CTRL(r8)
138	sync
139
140	/* delay (after !CKE there should be two cycles) */
141	li	r12, 1
142	bl	udelay
143
144	/* disable clock */
145	lwz	r4, CDM_CE(r8)
146	ori	r4, r4, CDM_SDRAM
147	xori	r4, r4, CDM_SDRAM
148	stw	r4, CDM_CE(r8)
149	sync
150
151	/* delay a bit */
152	li	r12, 1
153	bl	udelay
154
155
156	/* turn off with QT chip */
157	li	r4, 0x02
158	stb	r4, GPIOW_GPIOE(r8)	/* enable gpio_wkup1 */
159	sync
160
161	stb	r4, GPIOW_DVO(r8)	/* "output" high */
162	sync
163	stb	r4, GPIOW_DDR(r8)	/* output */
164	sync
165	stb	r4, GPIOW_DVO(r8)	/* output high */
166	sync
167
168	/* 10uS delay */
169	li	r12, 10
170	bl	udelay
171
172	/* turn off */
173	li	r4, 0
174	stb	r4, GPIOW_DVO(r8)	/* output low */
175	sync
176
177	/* wait until we're offline */
178  1:
179	b	1b
180
181
182	/* local udelay in sram is needed */
183SYM_FUNC_START_LOCAL(udelay)
184	/* r11 - tb_ticks_per_usec, r12 - usecs, overwrites r13 */
185	mullw	r12, r12, r11
186	mftb	r13	/* start */
187	add	r12, r13, r12 /* end */
188    1:
189	mftb	r13	/* current */
190	cmp	cr0, r13, r12
191	blt	1b
192	blr
193SYM_FUNC_END(udelay)
194
195sram_code_end:
196
197
198
199/* uboot jumps here on resume */
200lite5200_wakeup:
201	bl	restore_regs
202
203
204	/* HIDs, MSR */
205	LOAD_SPRN(HID1, 0x19)
206	LOAD_SPRN(HID2, 0x1a)
207
208
209	/* address translation is tricky (see turn_on_mmu) */
210	mfmsr	r10
211	ori	r10, r10, MSR_DR | MSR_IR
212
213
214	mtspr	SPRN_SRR1, r10
215	lis	r10, mmu_on@h
216	ori	r10, r10, mmu_on@l
217	mtspr	SPRN_SRR0, r10
218	sync
219	rfi
220mmu_on:
221	/* kernel offset (r4 is still set from restore_registers) */
222	addis	r4, r4, CONFIG_KERNEL_START@h
223
224
225	/* restore MSR */
226	lwz	r10, (4*0x1b)(r4)
227	mtmsr	r10
228	sync; isync;
229
230	/* invalidate caches */
231	mfspr	r10, SPRN_HID0
232	ori	r5, r10, HID0_ICFI | HID0_DCI
233	mtspr	SPRN_HID0, r5	/* invalidate caches */
234	sync; isync;
235	mtspr	SPRN_HID0, r10
236	sync; isync;
237
238	/* enable caches */
239	lwz	r10, (4*0x18)(r4)
240	mtspr	SPRN_HID0, r10	/* restore (enable caches, DPM) */
241	/* ^ this has to be after address translation set in MSR */
242	sync
243	isync
244
245
246	/* restore 0xf0 (BDI2000) */
247	lis	r3, CONFIG_KERNEL_START@h
248	lwz	r10, (0x1d*4)(r4)
249	stw	r10, 0xf0(r3)
250
251	LOAD_SPRN(LR, 0x1c)
252
253
254	blr
255_ASM_NOKPROBE_SYMBOL(lite5200_wakeup)
256
257
258/* ---------------------------------------------------------------------- */
259/* boring code: helpers */
260
261/* save registers */
262#define SAVE_BAT(n, addr)		\
263	SAVE_SPRN(DBAT##n##L, addr);	\
264	SAVE_SPRN(DBAT##n##U, addr+1);	\
265	SAVE_SPRN(IBAT##n##L, addr+2);	\
266	SAVE_SPRN(IBAT##n##U, addr+3);
267
268#define SAVE_SR(n, addr)		\
269	mfsr	r10, n;			\
270	stw	r10, ((addr)*4)(r4);
271
272#define SAVE_4SR(n, addr)	\
273	SAVE_SR(n, addr);	\
274	SAVE_SR(n+1, addr+1);	\
275	SAVE_SR(n+2, addr+2);	\
276	SAVE_SR(n+3, addr+3);
277
278SYM_FUNC_START_LOCAL(save_regs)
279	stw	r0, 0(r4)
280	stw	r1, 0x4(r4)
281	stw	r2, 0x8(r4)
282	stmw	r11, 0xc(r4) /* 0xc -> 0x5f, (0x18*4-1) */
283
284	SAVE_SPRN(HID0, 0x18)
285	SAVE_SPRN(HID1, 0x19)
286	SAVE_SPRN(HID2, 0x1a)
287	mfmsr	r10
288	stw	r10, (4*0x1b)(r4)
289	/*SAVE_SPRN(LR, 0x1c) have to save it before the call */
290	/* 0x1d reserved by 0xf0 */
291	SAVE_SPRN(RPA,   0x1e)
292	SAVE_SPRN(SDR1,  0x1f)
293
294	/* save MMU regs */
295	SAVE_BAT(0, 0x20)
296	SAVE_BAT(1, 0x24)
297	SAVE_BAT(2, 0x28)
298	SAVE_BAT(3, 0x2c)
299	SAVE_BAT(4, 0x30)
300	SAVE_BAT(5, 0x34)
301	SAVE_BAT(6, 0x38)
302	SAVE_BAT(7, 0x3c)
303
304	SAVE_4SR(0, 0x40)
305	SAVE_4SR(4, 0x44)
306	SAVE_4SR(8, 0x48)
307	SAVE_4SR(12, 0x4c)
308
309	SAVE_SPRN(SPRG0, 0x50)
310	SAVE_SPRN(SPRG1, 0x51)
311	SAVE_SPRN(SPRG2, 0x52)
312	SAVE_SPRN(SPRG3, 0x53)
313	SAVE_SPRN(SPRG4, 0x54)
314	SAVE_SPRN(SPRG5, 0x55)
315	SAVE_SPRN(SPRG6, 0x56)
316	SAVE_SPRN(SPRG7, 0x57)
317
318	SAVE_SPRN(IABR,  0x58)
319	SAVE_SPRN(DABR,  0x59)
320	SAVE_SPRN(TBRL,  0x5a)
321	SAVE_SPRN(TBRU,  0x5b)
322
323	blr
324SYM_FUNC_END(save_regs)
325
326
327/* restore registers */
328#define LOAD_BAT(n, addr)		\
329	LOAD_SPRN(DBAT##n##L, addr);	\
330	LOAD_SPRN(DBAT##n##U, addr+1);	\
331	LOAD_SPRN(IBAT##n##L, addr+2);	\
332	LOAD_SPRN(IBAT##n##U, addr+3);
333
334#define LOAD_SR(n, addr)		\
335	lwz	r10, ((addr)*4)(r4);	\
336	mtsr	n, r10;
337
338#define LOAD_4SR(n, addr)	\
339	LOAD_SR(n, addr);	\
340	LOAD_SR(n+1, addr+1);	\
341	LOAD_SR(n+2, addr+2);	\
342	LOAD_SR(n+3, addr+3);
343
344SYM_FUNC_START_LOCAL(restore_regs)
345	lis	r4, registers@h
346	ori	r4, r4, registers@l
347
348	/* MMU is not up yet */
349	subis	r4, r4, CONFIG_KERNEL_START@h
350
351	lwz	r0, 0(r4)
352	lwz	r1, 0x4(r4)
353	lwz	r2, 0x8(r4)
354	lmw	r11, 0xc(r4)
355
356	/*
357	 * these are a bit tricky
358	 *
359	 * 0x18 - HID0
360	 * 0x19 - HID1
361	 * 0x1a - HID2
362	 * 0x1b - MSR
363	 * 0x1c - LR
364	 * 0x1d - reserved by 0xf0 (BDI2000)
365	 */
366	LOAD_SPRN(RPA,   0x1e);
367	LOAD_SPRN(SDR1,  0x1f);
368
369	/* restore MMU regs */
370	LOAD_BAT(0, 0x20)
371	LOAD_BAT(1, 0x24)
372	LOAD_BAT(2, 0x28)
373	LOAD_BAT(3, 0x2c)
374	LOAD_BAT(4, 0x30)
375	LOAD_BAT(5, 0x34)
376	LOAD_BAT(6, 0x38)
377	LOAD_BAT(7, 0x3c)
378
379	LOAD_4SR(0, 0x40)
380	LOAD_4SR(4, 0x44)
381	LOAD_4SR(8, 0x48)
382	LOAD_4SR(12, 0x4c)
383
384	/* rest of regs */
385	LOAD_SPRN(SPRG0, 0x50);
386	LOAD_SPRN(SPRG1, 0x51);
387	LOAD_SPRN(SPRG2, 0x52);
388	LOAD_SPRN(SPRG3, 0x53);
389	LOAD_SPRN(SPRG4, 0x54);
390	LOAD_SPRN(SPRG5, 0x55);
391	LOAD_SPRN(SPRG6, 0x56);
392	LOAD_SPRN(SPRG7, 0x57);
393
394	LOAD_SPRN(IABR,  0x58);
395	LOAD_SPRN(DABR,  0x59);
396	LOAD_SPRN(TBWL,  0x5a);	/* these two have separate R/W regs */
397	LOAD_SPRN(TBWU,  0x5b);
398
399	blr
400_ASM_NOKPROBE_SYMBOL(restore_regs)
401SYM_FUNC_END(restore_regs)
402
403
404
405/* cache flushing code. copied from arch/ppc/boot/util.S */
406#define NUM_CACHE_LINES (128*8)
407
408/*
409 * Flush data cache
410 * Do this by just reading lots of stuff into the cache.
411 */
412SYM_FUNC_START_LOCAL(flush_data_cache)
413	lis	r3,CONFIG_KERNEL_START@h
414	ori	r3,r3,CONFIG_KERNEL_START@l
415	li	r4,NUM_CACHE_LINES
416	mtctr	r4
4171:
418	lwz	r4,0(r3)
419	addi	r3,r3,L1_CACHE_BYTES	/* Next line, please */
420	bdnz	1b
421	blr
422SYM_FUNC_END(flush_data_cache)
423