xref: /linux/arch/arm/mach-omap2/sleep34xx.S (revision 0795a75a369b931150074a14473f024359b7f25c)
18bd22949SKevin Hilman/*
28bd22949SKevin Hilman * linux/arch/arm/mach-omap2/sleep.S
38bd22949SKevin Hilman *
48bd22949SKevin Hilman * (C) Copyright 2007
58bd22949SKevin Hilman * Texas Instruments
68bd22949SKevin Hilman * Karthik Dasu <karthik-dp@ti.com>
78bd22949SKevin Hilman *
88bd22949SKevin Hilman * (C) Copyright 2004
98bd22949SKevin Hilman * Texas Instruments, <www.ti.com>
108bd22949SKevin Hilman * Richard Woodruff <r-woodruff2@ti.com>
118bd22949SKevin Hilman *
128bd22949SKevin Hilman * This program is free software; you can redistribute it and/or
138bd22949SKevin Hilman * modify it under the terms of the GNU General Public License as
148bd22949SKevin Hilman * published by the Free Software Foundation; either version 2 of
158bd22949SKevin Hilman * the License, or (at your option) any later version.
168bd22949SKevin Hilman *
178bd22949SKevin Hilman * This program is distributed in the hope that it will be useful,
188bd22949SKevin Hilman * but WITHOUT ANY WARRANTY; without even the implied warranty of
198bd22949SKevin Hilman * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE.  See the
208bd22949SKevin Hilman * GNU General Public License for more details.
218bd22949SKevin Hilman *
228bd22949SKevin Hilman * You should have received a copy of the GNU General Public License
238bd22949SKevin Hilman * along with this program; if not, write to the Free Software
248bd22949SKevin Hilman * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
258bd22949SKevin Hilman * MA 02111-1307 USA
268bd22949SKevin Hilman */
278bd22949SKevin Hilman#include <linux/linkage.h>
288bd22949SKevin Hilman#include <asm/assembler.h>
298bd22949SKevin Hilman#include <mach/io.h>
30ce491cf8STony Lindgren#include <plat/control.h>
318bd22949SKevin Hilman
328bd22949SKevin Hilman#include "prm.h"
338bd22949SKevin Hilman#include "sdrc.h"
348bd22949SKevin Hilman
358bd22949SKevin Hilman#define PM_PREPWSTST_CORE_V	OMAP34XX_PRM_REGADDR(CORE_MOD, \
368bd22949SKevin Hilman				OMAP3430_PM_PREPWSTST)
37*0795a75aSTero Kristo#define PM_PREPWSTST_CORE_P	0x48306AE8
388bd22949SKevin Hilman#define PM_PREPWSTST_MPU_V	OMAP34XX_PRM_REGADDR(MPU_MOD, \
398bd22949SKevin Hilman				OMAP3430_PM_PREPWSTST)
4061255ab9SRajendra Nayak#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL
4127d59a4aSTero Kristo#define SRAM_BASE_P		0x40200000
4227d59a4aSTero Kristo#define CONTROL_STAT		0x480022F0
438bd22949SKevin Hilman#define SCRATCHPAD_MEM_OFFS	0x310 /* Move this as correct place is
448bd22949SKevin Hilman				       * available */
4561255ab9SRajendra Nayak#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
4661255ab9SRajendra Nayak						+ SCRATCHPAD_MEM_OFFS)
478bd22949SKevin Hilman#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
48*0795a75aSTero Kristo#define SDRC_SYSCONFIG_P	(OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
49*0795a75aSTero Kristo#define SDRC_MR_0_P		(OMAP343X_SDRC_BASE + SDRC_MR_0)
50*0795a75aSTero Kristo#define SDRC_EMR2_0_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_0)
51*0795a75aSTero Kristo#define SDRC_MANUAL_0_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
52*0795a75aSTero Kristo#define SDRC_MR_1_P		(OMAP343X_SDRC_BASE + SDRC_MR_1)
53*0795a75aSTero Kristo#define SDRC_EMR2_1_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_1)
54*0795a75aSTero Kristo#define SDRC_MANUAL_1_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
558bd22949SKevin Hilman
568bd22949SKevin Hilman	.text
578bd22949SKevin Hilman/* Function call to get the restore pointer for resume from OFF */
588bd22949SKevin HilmanENTRY(get_restore_pointer)
598bd22949SKevin Hilman        stmfd   sp!, {lr}     @ save registers on stack
608bd22949SKevin Hilman	adr	r0, restore
618bd22949SKevin Hilman        ldmfd   sp!, {pc}     @ restore regs and return
628bd22949SKevin HilmanENTRY(get_restore_pointer_sz)
63*0795a75aSTero Kristo        .word   . - get_restore_pointer
64*0795a75aSTero Kristo
65*0795a75aSTero Kristo	.text
66*0795a75aSTero Kristo/* Function call to get the restore pointer for for ES3 to resume from OFF */
67*0795a75aSTero KristoENTRY(get_es3_restore_pointer)
68*0795a75aSTero Kristo	stmfd	sp!, {lr}	@ save registers on stack
69*0795a75aSTero Kristo	adr	r0, restore_es3
70*0795a75aSTero Kristo	ldmfd	sp!, {pc}	@ restore regs and return
71*0795a75aSTero KristoENTRY(get_es3_restore_pointer_sz)
72*0795a75aSTero Kristo	.word	. - get_es3_restore_pointer
73*0795a75aSTero Kristo
74*0795a75aSTero KristoENTRY(es3_sdrc_fix)
75*0795a75aSTero Kristo	ldr	r4, sdrc_syscfg		@ get config addr
76*0795a75aSTero Kristo	ldr	r5, [r4]		@ get value
77*0795a75aSTero Kristo	tst	r5, #0x100		@ is part access blocked
78*0795a75aSTero Kristo	it	eq
79*0795a75aSTero Kristo	biceq	r5, r5, #0x100		@ clear bit if set
80*0795a75aSTero Kristo	str	r5, [r4]		@ write back change
81*0795a75aSTero Kristo	ldr	r4, sdrc_mr_0		@ get config addr
82*0795a75aSTero Kristo	ldr	r5, [r4]		@ get value
83*0795a75aSTero Kristo	str	r5, [r4]		@ write back change
84*0795a75aSTero Kristo	ldr	r4, sdrc_emr2_0		@ get config addr
85*0795a75aSTero Kristo	ldr	r5, [r4]		@ get value
86*0795a75aSTero Kristo	str	r5, [r4]		@ write back change
87*0795a75aSTero Kristo	ldr	r4, sdrc_manual_0	@ get config addr
88*0795a75aSTero Kristo	mov	r5, #0x2		@ autorefresh command
89*0795a75aSTero Kristo	str	r5, [r4]		@ kick off refreshes
90*0795a75aSTero Kristo	ldr	r4, sdrc_mr_1		@ get config addr
91*0795a75aSTero Kristo	ldr	r5, [r4]		@ get value
92*0795a75aSTero Kristo	str	r5, [r4]		@ write back change
93*0795a75aSTero Kristo	ldr	r4, sdrc_emr2_1		@ get config addr
94*0795a75aSTero Kristo	ldr	r5, [r4]		@ get value
95*0795a75aSTero Kristo	str	r5, [r4]		@ write back change
96*0795a75aSTero Kristo	ldr	r4, sdrc_manual_1	@ get config addr
97*0795a75aSTero Kristo	mov	r5, #0x2		@ autorefresh command
98*0795a75aSTero Kristo	str	r5, [r4]		@ kick off refreshes
99*0795a75aSTero Kristo	bx	lr
100*0795a75aSTero Kristosdrc_syscfg:
101*0795a75aSTero Kristo	.word	SDRC_SYSCONFIG_P
102*0795a75aSTero Kristosdrc_mr_0:
103*0795a75aSTero Kristo	.word	SDRC_MR_0_P
104*0795a75aSTero Kristosdrc_emr2_0:
105*0795a75aSTero Kristo	.word	SDRC_EMR2_0_P
106*0795a75aSTero Kristosdrc_manual_0:
107*0795a75aSTero Kristo	.word	SDRC_MANUAL_0_P
108*0795a75aSTero Kristosdrc_mr_1:
109*0795a75aSTero Kristo	.word	SDRC_MR_1_P
110*0795a75aSTero Kristosdrc_emr2_1:
111*0795a75aSTero Kristo	.word	SDRC_EMR2_1_P
112*0795a75aSTero Kristosdrc_manual_1:
113*0795a75aSTero Kristo	.word	SDRC_MANUAL_1_P
114*0795a75aSTero KristoENTRY(es3_sdrc_fix_sz)
115*0795a75aSTero Kristo	.word	. - es3_sdrc_fix
11627d59a4aSTero Kristo
11727d59a4aSTero Kristo/* Function to call rom code to save secure ram context */
11827d59a4aSTero KristoENTRY(save_secure_ram_context)
11927d59a4aSTero Kristo	stmfd	sp!, {r1-r12, lr}	@ save registers on stack
12027d59a4aSTero Kristosave_secure_ram_debug:
12127d59a4aSTero Kristo	/* b save_secure_ram_debug */	@ enable to debug save code
12227d59a4aSTero Kristo	adr	r3, api_params		@ r3 points to parameters
12327d59a4aSTero Kristo	str	r0, [r3,#0x4]		@ r0 has sdram address
12427d59a4aSTero Kristo	ldr	r12, high_mask
12527d59a4aSTero Kristo	and	r3, r3, r12
12627d59a4aSTero Kristo	ldr	r12, sram_phy_addr_mask
12727d59a4aSTero Kristo	orr	r3, r3, r12
12827d59a4aSTero Kristo	mov	r0, #25			@ set service ID for PPA
12927d59a4aSTero Kristo	mov	r12, r0			@ copy secure service ID in r12
13027d59a4aSTero Kristo	mov	r1, #0			@ set task id for ROM code in r1
131ba50ea7eSKalle Jokiniemi	mov	r2, #4			@ set some flags in r2, r6
13227d59a4aSTero Kristo	mov	r6, #0xff
13327d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
13427d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
13527d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
13627d59a4aSTero Kristo	nop
13727d59a4aSTero Kristo	nop
13827d59a4aSTero Kristo	nop
13927d59a4aSTero Kristo	nop
14027d59a4aSTero Kristo	ldmfd	sp!, {r1-r12, pc}
14127d59a4aSTero Kristosram_phy_addr_mask:
14227d59a4aSTero Kristo	.word	SRAM_BASE_P
14327d59a4aSTero Kristohigh_mask:
14427d59a4aSTero Kristo	.word	0xffff
14527d59a4aSTero Kristoapi_params:
14627d59a4aSTero Kristo	.word	0x4, 0x0, 0x0, 0x1, 0x1
14727d59a4aSTero KristoENTRY(save_secure_ram_context_sz)
14827d59a4aSTero Kristo	.word	. - save_secure_ram_context
14927d59a4aSTero Kristo
1508bd22949SKevin Hilman/*
1518bd22949SKevin Hilman * Forces OMAP into idle state
1528bd22949SKevin Hilman *
1538bd22949SKevin Hilman * omap34xx_suspend() - This bit of code just executes the WFI
1548bd22949SKevin Hilman * for normal idles.
1558bd22949SKevin Hilman *
1568bd22949SKevin Hilman * Note: This code get's copied to internal SRAM at boot. When the OMAP
1578bd22949SKevin Hilman *	 wakes up it continues execution at the point it went to sleep.
1588bd22949SKevin Hilman */
1598bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend)
1608bd22949SKevin Hilman	stmfd	sp!, {r0-r12, lr}		@ save registers on stack
1618bd22949SKevin Hilmanloop:
1628bd22949SKevin Hilman	/*b	loop*/	@Enable to debug by stepping through code
1638bd22949SKevin Hilman	/* r0 contains restore pointer in sdram */
1648bd22949SKevin Hilman	/* r1 contains information about saving context */
1658bd22949SKevin Hilman	ldr     r4, sdrc_power          @ read the SDRC_POWER register
1668bd22949SKevin Hilman	ldr     r5, [r4]                @ read the contents of SDRC_POWER
1678bd22949SKevin Hilman	orr     r5, r5, #0x40           @ enable self refresh on idle req
1688bd22949SKevin Hilman	str     r5, [r4]                @ write back to SDRC_POWER register
1698bd22949SKevin Hilman
1708bd22949SKevin Hilman	cmp	r1, #0x0
1718bd22949SKevin Hilman	/* If context save is required, do that and execute wfi */
1728bd22949SKevin Hilman	bne	save_context_wfi
1738bd22949SKevin Hilman	/* Data memory barrier and Data sync barrier */
1748bd22949SKevin Hilman	mov	r1, #0
1758bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c10, 4
1768bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c10, 5
1778bd22949SKevin Hilman
1788bd22949SKevin Hilman	wfi				@ wait for interrupt
1798bd22949SKevin Hilman
1808bd22949SKevin Hilman	nop
1818bd22949SKevin Hilman	nop
1828bd22949SKevin Hilman	nop
1838bd22949SKevin Hilman	nop
1848bd22949SKevin Hilman	nop
1858bd22949SKevin Hilman	nop
1868bd22949SKevin Hilman	nop
1878bd22949SKevin Hilman	nop
1888bd22949SKevin Hilman	nop
1898bd22949SKevin Hilman	nop
1908bd22949SKevin Hilman	bl i_dll_wait
1918bd22949SKevin Hilman
1928bd22949SKevin Hilman	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
193*0795a75aSTero Kristorestore_es3:
194*0795a75aSTero Kristo	/*b restore_es3*/		@ Enable to debug restore code
195*0795a75aSTero Kristo	ldr	r5, pm_prepwstst_core_p
196*0795a75aSTero Kristo	ldr	r4, [r5]
197*0795a75aSTero Kristo	and	r4, r4, #0x3
198*0795a75aSTero Kristo	cmp	r4, #0x0	@ Check if previous power state of CORE is OFF
199*0795a75aSTero Kristo	bne	restore
200*0795a75aSTero Kristo	adr	r0, es3_sdrc_fix
201*0795a75aSTero Kristo	ldr	r1, sram_base
202*0795a75aSTero Kristo	ldr	r2, es3_sdrc_fix_sz
203*0795a75aSTero Kristo	mov	r2, r2, ror #2
204*0795a75aSTero Kristocopy_to_sram:
205*0795a75aSTero Kristo	ldmia	r0!, {r3}	@ val = *src
206*0795a75aSTero Kristo	stmia	r1!, {r3}	@ *dst = val
207*0795a75aSTero Kristo	subs	r2, r2, #0x1	@ num_words--
208*0795a75aSTero Kristo	bne	copy_to_sram
209*0795a75aSTero Kristo	ldr	r1, sram_base
210*0795a75aSTero Kristo	blx	r1
2118bd22949SKevin Hilmanrestore:
2128bd22949SKevin Hilman	/* b restore*/  @ Enable to debug restore code
2138bd22949SKevin Hilman        /* Check what was the reason for mpu reset and store the reason in r9*/
2148bd22949SKevin Hilman        /* 1 - Only L1 and logic lost */
2158bd22949SKevin Hilman        /* 2 - Only L2 lost - In this case, we wont be here */
2168bd22949SKevin Hilman        /* 3 - Both L1 and L2 lost */
2178bd22949SKevin Hilman	ldr     r1, pm_pwstctrl_mpu
2188bd22949SKevin Hilman	ldr	r2, [r1]
2198bd22949SKevin Hilman	and     r2, r2, #0x3
2208bd22949SKevin Hilman	cmp     r2, #0x0	@ Check if target power state was OFF or RET
2218bd22949SKevin Hilman        moveq   r9, #0x3        @ MPU OFF => L1 and L2 lost
2228bd22949SKevin Hilman	movne	r9, #0x1	@ Only L1 and L2 lost => avoid L2 invalidation
2238bd22949SKevin Hilman	bne	logic_l1_restore
22427d59a4aSTero Kristo	ldr	r0, control_stat
22527d59a4aSTero Kristo	ldr	r1, [r0]
22627d59a4aSTero Kristo	and	r1, #0x700
22727d59a4aSTero Kristo	cmp	r1, #0x300
22827d59a4aSTero Kristo	beq	l2_inv_gp
22927d59a4aSTero Kristo	mov	r0, #40		@ set service ID for PPA
23027d59a4aSTero Kristo	mov	r12, r0		@ copy secure Service ID in r12
23127d59a4aSTero Kristo	mov	r1, #0		@ set task id for ROM code in r1
23227d59a4aSTero Kristo	mov	r2, #4		@ set some flags in r2, r6
23327d59a4aSTero Kristo	mov	r6, #0xff
23427d59a4aSTero Kristo	adr	r3, l2_inv_api_params	@ r3 points to dummy parameters
23527d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
23627d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
23727d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
23827d59a4aSTero Kristo	/* Write to Aux control register to set some bits */
23927d59a4aSTero Kristo	mov	r0, #42		@ set service ID for PPA
24027d59a4aSTero Kristo	mov	r12, r0		@ copy secure Service ID in r12
24127d59a4aSTero Kristo	mov	r1, #0		@ set task id for ROM code in r1
24227d59a4aSTero Kristo	mov	r2, #4		@ set some flags in r2, r6
24327d59a4aSTero Kristo	mov	r6, #0xff
24427d59a4aSTero Kristo	adr	r3, write_aux_control_params	@ r3 points to parameters
24527d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
24627d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
24727d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
24827d59a4aSTero Kristo
24927d59a4aSTero Kristo	b	logic_l1_restore
25027d59a4aSTero Kristol2_inv_api_params:
25127d59a4aSTero Kristo	.word   0x1, 0x00
25227d59a4aSTero Kristowrite_aux_control_params:
25327d59a4aSTero Kristo	.word   0x1, 0x72
25427d59a4aSTero Kristol2_inv_gp:
2558bd22949SKevin Hilman	/* Execute smi to invalidate L2 cache */
2568bd22949SKevin Hilman	mov r12, #0x1                         @ set up to invalide L2
2578bd22949SKevin Hilmansmi:    .word 0xE1600070		@ Call SMI monitor (smieq)
25827d59a4aSTero Kristo	/* Write to Aux control register to set some bits */
25927d59a4aSTero Kristo	mov	r0, #0x72
26027d59a4aSTero Kristo	mov	r12, #0x3
26127d59a4aSTero Kristo	.word 0xE1600070	@ Call SMI monitor (smieq)
2628bd22949SKevin Hilmanlogic_l1_restore:
2638bd22949SKevin Hilman	mov	r1, #0
2648bd22949SKevin Hilman	/* Invalidate all instruction caches to PoU
2658bd22949SKevin Hilman	 * and flush branch target cache */
2668bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c5, 0
2678bd22949SKevin Hilman
2688bd22949SKevin Hilman	ldr	r4, scratchpad_base
2698bd22949SKevin Hilman	ldr	r3, [r4,#0xBC]
2708bd22949SKevin Hilman	ldmia	r3!, {r4-r6}
2718bd22949SKevin Hilman	mov	sp, r4
2728bd22949SKevin Hilman	msr	spsr_cxsf, r5
2738bd22949SKevin Hilman	mov	lr, r6
2748bd22949SKevin Hilman
2758bd22949SKevin Hilman	ldmia	r3!, {r4-r9}
2768bd22949SKevin Hilman	/* Coprocessor access Control Register */
2778bd22949SKevin Hilman	mcr p15, 0, r4, c1, c0, 2
2788bd22949SKevin Hilman
2798bd22949SKevin Hilman	/* TTBR0 */
2808bd22949SKevin Hilman	MCR p15, 0, r5, c2, c0, 0
2818bd22949SKevin Hilman	/* TTBR1 */
2828bd22949SKevin Hilman	MCR p15, 0, r6, c2, c0, 1
2838bd22949SKevin Hilman	/* Translation table base control register */
2848bd22949SKevin Hilman	MCR p15, 0, r7, c2, c0, 2
2858bd22949SKevin Hilman	/*domain access Control Register */
2868bd22949SKevin Hilman	MCR p15, 0, r8, c3, c0, 0
2878bd22949SKevin Hilman	/* data fault status Register */
2888bd22949SKevin Hilman	MCR p15, 0, r9, c5, c0, 0
2898bd22949SKevin Hilman
2908bd22949SKevin Hilman	ldmia  r3!,{r4-r8}
2918bd22949SKevin Hilman	/* instruction fault status Register */
2928bd22949SKevin Hilman	MCR p15, 0, r4, c5, c0, 1
2938bd22949SKevin Hilman	/*Data Auxiliary Fault Status Register */
2948bd22949SKevin Hilman	MCR p15, 0, r5, c5, c1, 0
2958bd22949SKevin Hilman	/*Instruction Auxiliary Fault Status Register*/
2968bd22949SKevin Hilman	MCR p15, 0, r6, c5, c1, 1
2978bd22949SKevin Hilman	/*Data Fault Address Register */
2988bd22949SKevin Hilman	MCR p15, 0, r7, c6, c0, 0
2998bd22949SKevin Hilman	/*Instruction Fault Address Register*/
3008bd22949SKevin Hilman	MCR p15, 0, r8, c6, c0, 2
3018bd22949SKevin Hilman	ldmia  r3!,{r4-r7}
3028bd22949SKevin Hilman
3038bd22949SKevin Hilman	/* user r/w thread and process ID */
3048bd22949SKevin Hilman	MCR p15, 0, r4, c13, c0, 2
3058bd22949SKevin Hilman	/* user ro thread and process ID */
3068bd22949SKevin Hilman	MCR p15, 0, r5, c13, c0, 3
3078bd22949SKevin Hilman	/*Privileged only thread and process ID */
3088bd22949SKevin Hilman	MCR p15, 0, r6, c13, c0, 4
3098bd22949SKevin Hilman	/* cache size selection */
3108bd22949SKevin Hilman	MCR p15, 2, r7, c0, c0, 0
3118bd22949SKevin Hilman	ldmia  r3!,{r4-r8}
3128bd22949SKevin Hilman	/* Data TLB lockdown registers */
3138bd22949SKevin Hilman	MCR p15, 0, r4, c10, c0, 0
3148bd22949SKevin Hilman	/* Instruction TLB lockdown registers */
3158bd22949SKevin Hilman	MCR p15, 0, r5, c10, c0, 1
3168bd22949SKevin Hilman	/* Secure or Nonsecure Vector Base Address */
3178bd22949SKevin Hilman	MCR p15, 0, r6, c12, c0, 0
3188bd22949SKevin Hilman	/* FCSE PID */
3198bd22949SKevin Hilman	MCR p15, 0, r7, c13, c0, 0
3208bd22949SKevin Hilman	/* Context PID */
3218bd22949SKevin Hilman	MCR p15, 0, r8, c13, c0, 1
3228bd22949SKevin Hilman
3238bd22949SKevin Hilman	ldmia  r3!,{r4-r5}
3248bd22949SKevin Hilman	/* primary memory remap register */
3258bd22949SKevin Hilman	MCR p15, 0, r4, c10, c2, 0
3268bd22949SKevin Hilman	/*normal memory remap register */
3278bd22949SKevin Hilman	MCR p15, 0, r5, c10, c2, 1
3288bd22949SKevin Hilman
3298bd22949SKevin Hilman	/* Restore cpsr */
3308bd22949SKevin Hilman	ldmia	r3!,{r4}	/*load CPSR from SDRAM*/
3318bd22949SKevin Hilman	msr	cpsr, r4	/*store cpsr */
3328bd22949SKevin Hilman
3338bd22949SKevin Hilman	/* Enabling MMU here */
3348bd22949SKevin Hilman	mrc	p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
3358bd22949SKevin Hilman	/* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
3368bd22949SKevin Hilman	and	r7, #0x7
3378bd22949SKevin Hilman	cmp	r7, #0x0
3388bd22949SKevin Hilman	beq	usettbr0
3398bd22949SKevin Hilmanttbr_error:
3408bd22949SKevin Hilman	/* More work needs to be done to support N[0:2] value other than 0
3418bd22949SKevin Hilman	* So looping here so that the error can be detected
3428bd22949SKevin Hilman	*/
3438bd22949SKevin Hilman	b	ttbr_error
3448bd22949SKevin Hilmanusettbr0:
3458bd22949SKevin Hilman	mrc	p15, 0, r2, c2, c0, 0
3468bd22949SKevin Hilman	ldr	r5, ttbrbit_mask
3478bd22949SKevin Hilman	and	r2, r5
3488bd22949SKevin Hilman	mov	r4, pc
3498bd22949SKevin Hilman	ldr	r5, table_index_mask
3508bd22949SKevin Hilman	and	r4, r5 /* r4 = 31 to 20 bits of pc */
3518bd22949SKevin Hilman	/* Extract the value to be written to table entry */
3528bd22949SKevin Hilman	ldr	r1, table_entry
3538bd22949SKevin Hilman	add	r1, r1, r4 /* r1 has value to be written to table entry*/
3548bd22949SKevin Hilman	/* Getting the address of table entry to modify */
3558bd22949SKevin Hilman	lsr	r4, #18
3568bd22949SKevin Hilman	add	r2, r4 /* r2 has the location which needs to be modified */
3578bd22949SKevin Hilman	/* Storing previous entry of location being modified */
3588bd22949SKevin Hilman	ldr	r5, scratchpad_base
3598bd22949SKevin Hilman	ldr	r4, [r2]
3608bd22949SKevin Hilman	str	r4, [r5, #0xC0]
3618bd22949SKevin Hilman	/* Modify the table entry */
3628bd22949SKevin Hilman	str	r1, [r2]
3638bd22949SKevin Hilman	/* Storing address of entry being modified
3648bd22949SKevin Hilman	 * - will be restored after enabling MMU */
3658bd22949SKevin Hilman	ldr	r5, scratchpad_base
3668bd22949SKevin Hilman	str	r2, [r5, #0xC4]
3678bd22949SKevin Hilman
3688bd22949SKevin Hilman	mov	r0, #0
3698bd22949SKevin Hilman	mcr	p15, 0, r0, c7, c5, 4	@ Flush prefetch buffer
3708bd22949SKevin Hilman	mcr	p15, 0, r0, c7, c5, 6	@ Invalidate branch predictor array
3718bd22949SKevin Hilman	mcr	p15, 0, r0, c8, c5, 0	@ Invalidate instruction TLB
3728bd22949SKevin Hilman	mcr	p15, 0, r0, c8, c6, 0	@ Invalidate data TLB
3738bd22949SKevin Hilman	/* Restore control register  but dont enable caches here*/
3748bd22949SKevin Hilman	/* Caches will be enabled after restoring MMU table entry */
3758bd22949SKevin Hilman	ldmia	r3!, {r4}
3768bd22949SKevin Hilman	/* Store previous value of control register in scratchpad */
3778bd22949SKevin Hilman	str	r4, [r5, #0xC8]
3788bd22949SKevin Hilman	ldr	r2, cache_pred_disable_mask
3798bd22949SKevin Hilman	and	r4, r2
3808bd22949SKevin Hilman	mcr	p15, 0, r4, c1, c0, 0
3818bd22949SKevin Hilman
3828bd22949SKevin Hilman	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
3838bd22949SKevin Hilmansave_context_wfi:
3848bd22949SKevin Hilman	/*b	save_context_wfi*/	@ enable to debug save code
3858bd22949SKevin Hilman	mov	r8, r0 /* Store SDRAM address in r8 */
3868bd22949SKevin Hilman        /* Check what that target sleep state is:stored in r1*/
3878bd22949SKevin Hilman        /* 1 - Only L1 and logic lost */
3888bd22949SKevin Hilman        /* 2 - Only L2 lost */
3898bd22949SKevin Hilman        /* 3 - Both L1 and L2 lost */
3908bd22949SKevin Hilman	cmp	r1, #0x2 /* Only L2 lost */
3918bd22949SKevin Hilman	beq	clean_l2
3928bd22949SKevin Hilman	cmp	r1, #0x1 /* L2 retained */
3938bd22949SKevin Hilman	/* r9 stores whether to clean L2 or not*/
3948bd22949SKevin Hilman	moveq	r9, #0x0 /* Dont Clean L2 */
3958bd22949SKevin Hilman	movne	r9, #0x1 /* Clean L2 */
3968bd22949SKevin Hilmanl1_logic_lost:
3978bd22949SKevin Hilman	/* Store sp and spsr to SDRAM */
3988bd22949SKevin Hilman	mov	r4, sp
3998bd22949SKevin Hilman	mrs	r5, spsr
4008bd22949SKevin Hilman	mov	r6, lr
4018bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4028bd22949SKevin Hilman	/* Save all ARM registers */
4038bd22949SKevin Hilman	/* Coprocessor access control register */
4048bd22949SKevin Hilman	mrc	p15, 0, r6, c1, c0, 2
4058bd22949SKevin Hilman	stmia	r8!, {r6}
4068bd22949SKevin Hilman	/* TTBR0, TTBR1 and Translation table base control */
4078bd22949SKevin Hilman	mrc	p15, 0, r4, c2, c0, 0
4088bd22949SKevin Hilman	mrc	p15, 0, r5, c2, c0, 1
4098bd22949SKevin Hilman	mrc	p15, 0, r6, c2, c0, 2
4108bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4118bd22949SKevin Hilman	/* Domain access control register, data fault status register,
4128bd22949SKevin Hilman	and instruction fault status register */
4138bd22949SKevin Hilman	mrc	p15, 0, r4, c3, c0, 0
4148bd22949SKevin Hilman	mrc	p15, 0, r5, c5, c0, 0
4158bd22949SKevin Hilman	mrc	p15, 0, r6, c5, c0, 1
4168bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4178bd22949SKevin Hilman	/* Data aux fault status register, instruction aux fault status,
4188bd22949SKevin Hilman	datat fault address register and instruction fault address register*/
4198bd22949SKevin Hilman	mrc	p15, 0, r4, c5, c1, 0
4208bd22949SKevin Hilman	mrc	p15, 0, r5, c5, c1, 1
4218bd22949SKevin Hilman	mrc	p15, 0, r6, c6, c0, 0
4228bd22949SKevin Hilman	mrc	p15, 0, r7, c6, c0, 2
4238bd22949SKevin Hilman	stmia	r8!, {r4-r7}
4248bd22949SKevin Hilman	/* user r/w thread and process ID, user r/o thread and process ID,
4258bd22949SKevin Hilman	priv only thread and process ID, cache size selection */
4268bd22949SKevin Hilman	mrc	p15, 0, r4, c13, c0, 2
4278bd22949SKevin Hilman	mrc	p15, 0, r5, c13, c0, 3
4288bd22949SKevin Hilman	mrc	p15, 0, r6, c13, c0, 4
4298bd22949SKevin Hilman	mrc	p15, 2, r7, c0, c0, 0
4308bd22949SKevin Hilman	stmia	r8!, {r4-r7}
4318bd22949SKevin Hilman	/* Data TLB lockdown, instruction TLB lockdown registers */
4328bd22949SKevin Hilman	mrc	p15, 0, r5, c10, c0, 0
4338bd22949SKevin Hilman	mrc	p15, 0, r6, c10, c0, 1
4348bd22949SKevin Hilman	stmia	r8!, {r5-r6}
4358bd22949SKevin Hilman	/* Secure or non secure vector base address, FCSE PID, Context PID*/
4368bd22949SKevin Hilman	mrc	p15, 0, r4, c12, c0, 0
4378bd22949SKevin Hilman	mrc	p15, 0, r5, c13, c0, 0
4388bd22949SKevin Hilman	mrc	p15, 0, r6, c13, c0, 1
4398bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4408bd22949SKevin Hilman	/* Primary remap, normal remap registers */
4418bd22949SKevin Hilman	mrc	p15, 0, r4, c10, c2, 0
4428bd22949SKevin Hilman	mrc	p15, 0, r5, c10, c2, 1
4438bd22949SKevin Hilman	stmia	r8!,{r4-r5}
4448bd22949SKevin Hilman
4458bd22949SKevin Hilman	/* Store current cpsr*/
4468bd22949SKevin Hilman	mrs	r2, cpsr
4478bd22949SKevin Hilman	stmia	r8!, {r2}
4488bd22949SKevin Hilman
4498bd22949SKevin Hilman	mrc	p15, 0, r4, c1, c0, 0
4508bd22949SKevin Hilman	/* save control register */
4518bd22949SKevin Hilman	stmia	r8!, {r4}
4528bd22949SKevin Hilmanclean_caches:
4538bd22949SKevin Hilman	/* Clean Data or unified cache to POU*/
4548bd22949SKevin Hilman	/* How to invalidate only L1 cache???? - #FIX_ME# */
4558bd22949SKevin Hilman	/* mcr	p15, 0, r11, c7, c11, 1 */
4568bd22949SKevin Hilman	cmp	r9, #1 /* Check whether L2 inval is required or not*/
4578bd22949SKevin Hilman	bne	skip_l2_inval
4588bd22949SKevin Hilmanclean_l2:
4598bd22949SKevin Hilman	/* read clidr */
4608bd22949SKevin Hilman	mrc     p15, 1, r0, c0, c0, 1
4618bd22949SKevin Hilman	/* extract loc from clidr */
4628bd22949SKevin Hilman	ands    r3, r0, #0x7000000
4638bd22949SKevin Hilman	/* left align loc bit field */
4648bd22949SKevin Hilman	mov     r3, r3, lsr #23
4658bd22949SKevin Hilman	/* if loc is 0, then no need to clean */
4668bd22949SKevin Hilman	beq     finished
4678bd22949SKevin Hilman	/* start clean at cache level 0 */
4688bd22949SKevin Hilman	mov     r10, #0
4698bd22949SKevin Hilmanloop1:
4708bd22949SKevin Hilman	/* work out 3x current cache level */
4718bd22949SKevin Hilman	add     r2, r10, r10, lsr #1
4728bd22949SKevin Hilman	/* extract cache type bits from clidr*/
4738bd22949SKevin Hilman	mov     r1, r0, lsr r2
4748bd22949SKevin Hilman	/* mask of the bits for current cache only */
4758bd22949SKevin Hilman	and     r1, r1, #7
4768bd22949SKevin Hilman	/* see what cache we have at this level */
4778bd22949SKevin Hilman	cmp     r1, #2
4788bd22949SKevin Hilman	/* skip if no cache, or just i-cache */
4798bd22949SKevin Hilman	blt     skip
4808bd22949SKevin Hilman	/* select current cache level in cssr */
4818bd22949SKevin Hilman	mcr     p15, 2, r10, c0, c0, 0
4828bd22949SKevin Hilman	/* isb to sych the new cssr&csidr */
4838bd22949SKevin Hilman	isb
4848bd22949SKevin Hilman	/* read the new csidr */
4858bd22949SKevin Hilman	mrc     p15, 1, r1, c0, c0, 0
4868bd22949SKevin Hilman	/* extract the length of the cache lines */
4878bd22949SKevin Hilman	and     r2, r1, #7
4888bd22949SKevin Hilman	/* add 4 (line length offset) */
4898bd22949SKevin Hilman	add     r2, r2, #4
4908bd22949SKevin Hilman	ldr     r4, assoc_mask
4918bd22949SKevin Hilman	/* find maximum number on the way size */
4928bd22949SKevin Hilman	ands    r4, r4, r1, lsr #3
4938bd22949SKevin Hilman	/* find bit position of way size increment */
4948bd22949SKevin Hilman	clz     r5, r4
4958bd22949SKevin Hilman	ldr     r7, numset_mask
4968bd22949SKevin Hilman	/* extract max number of the index size*/
4978bd22949SKevin Hilman	ands    r7, r7, r1, lsr #13
4988bd22949SKevin Hilmanloop2:
4998bd22949SKevin Hilman	mov     r9, r4
5008bd22949SKevin Hilman	/* create working copy of max way size*/
5018bd22949SKevin Hilmanloop3:
5028bd22949SKevin Hilman	/* factor way and cache number into r11 */
5038bd22949SKevin Hilman	orr     r11, r10, r9, lsl r5
5048bd22949SKevin Hilman	/* factor index number into r11 */
5058bd22949SKevin Hilman	orr     r11, r11, r7, lsl r2
5068bd22949SKevin Hilman	/*clean & invalidate by set/way */
5078bd22949SKevin Hilman	mcr     p15, 0, r11, c7, c10, 2
5088bd22949SKevin Hilman	/* decrement the way*/
5098bd22949SKevin Hilman	subs    r9, r9, #1
5108bd22949SKevin Hilman	bge     loop3
5118bd22949SKevin Hilman	/*decrement the index */
5128bd22949SKevin Hilman	subs    r7, r7, #1
5138bd22949SKevin Hilman	bge     loop2
5148bd22949SKevin Hilmanskip:
5158bd22949SKevin Hilman	add     r10, r10, #2
5168bd22949SKevin Hilman	/* increment cache number */
5178bd22949SKevin Hilman	cmp     r3, r10
5188bd22949SKevin Hilman	bgt     loop1
5198bd22949SKevin Hilmanfinished:
5208bd22949SKevin Hilman	/*swith back to cache level 0 */
5218bd22949SKevin Hilman	mov     r10, #0
5228bd22949SKevin Hilman	/* select current cache level in cssr */
5238bd22949SKevin Hilman	mcr     p15, 2, r10, c0, c0, 0
5248bd22949SKevin Hilman	isb
5258bd22949SKevin Hilmanskip_l2_inval:
5268bd22949SKevin Hilman	/* Data memory barrier and Data sync barrier */
5278bd22949SKevin Hilman	mov     r1, #0
5288bd22949SKevin Hilman	mcr     p15, 0, r1, c7, c10, 4
5298bd22949SKevin Hilman	mcr     p15, 0, r1, c7, c10, 5
5308bd22949SKevin Hilman
5318bd22949SKevin Hilman	wfi                             @ wait for interrupt
5328bd22949SKevin Hilman	nop
5338bd22949SKevin Hilman	nop
5348bd22949SKevin Hilman	nop
5358bd22949SKevin Hilman	nop
5368bd22949SKevin Hilman	nop
5378bd22949SKevin Hilman	nop
5388bd22949SKevin Hilman	nop
5398bd22949SKevin Hilman	nop
5408bd22949SKevin Hilman	nop
5418bd22949SKevin Hilman	nop
5428bd22949SKevin Hilman	bl i_dll_wait
5438bd22949SKevin Hilman	/* restore regs and return */
5448bd22949SKevin Hilman	ldmfd   sp!, {r0-r12, pc}
5458bd22949SKevin Hilman
5468bd22949SKevin Hilmani_dll_wait:
5478bd22949SKevin Hilman	ldr     r4, clk_stabilize_delay
5488bd22949SKevin Hilman
5498bd22949SKevin Hilmani_dll_delay:
5508bd22949SKevin Hilman	subs    r4, r4, #0x1
5518bd22949SKevin Hilman	bne     i_dll_delay
5528bd22949SKevin Hilman	ldr     r4, sdrc_power
5538bd22949SKevin Hilman	ldr     r5, [r4]
5548bd22949SKevin Hilman	bic     r5, r5, #0x40
5558bd22949SKevin Hilman	str     r5, [r4]
5568bd22949SKevin Hilman	bx	lr
5578bd22949SKevin Hilmanpm_prepwstst_core:
5588bd22949SKevin Hilman	.word	PM_PREPWSTST_CORE_V
559*0795a75aSTero Kristopm_prepwstst_core_p:
560*0795a75aSTero Kristo	.word	PM_PREPWSTST_CORE_P
5618bd22949SKevin Hilmanpm_prepwstst_mpu:
5628bd22949SKevin Hilman	.word	PM_PREPWSTST_MPU_V
5638bd22949SKevin Hilmanpm_pwstctrl_mpu:
5648bd22949SKevin Hilman	.word	PM_PWSTCTRL_MPU_P
5658bd22949SKevin Hilmanscratchpad_base:
5668bd22949SKevin Hilman	.word	SCRATCHPAD_BASE_P
567*0795a75aSTero Kristosram_base:
568*0795a75aSTero Kristo	.word	SRAM_BASE_P + 0x8000
5698bd22949SKevin Hilmansdrc_power:
5708bd22949SKevin Hilman	.word SDRC_POWER_V
5718bd22949SKevin Hilmanclk_stabilize_delay:
5728bd22949SKevin Hilman	.word 0x000001FF
5738bd22949SKevin Hilmanassoc_mask:
5748bd22949SKevin Hilman	.word	0x3ff
5758bd22949SKevin Hilmannumset_mask:
5768bd22949SKevin Hilman	.word	0x7fff
5778bd22949SKevin Hilmanttbrbit_mask:
5788bd22949SKevin Hilman	.word	0xFFFFC000
5798bd22949SKevin Hilmantable_index_mask:
5808bd22949SKevin Hilman	.word	0xFFF00000
5818bd22949SKevin Hilmantable_entry:
5828bd22949SKevin Hilman	.word	0x00000C02
5838bd22949SKevin Hilmancache_pred_disable_mask:
5848bd22949SKevin Hilman	.word	0xFFFFE7FB
58527d59a4aSTero Kristocontrol_stat:
58627d59a4aSTero Kristo	.word	CONTROL_STAT
5878bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend_sz)
5888bd22949SKevin Hilman	.word	. - omap34xx_cpu_suspend
589