xref: /linux/arch/arm/mach-omap2/sleep43xx.S (revision 41d37e61372fdaf4d7e381580c0c3bf88972f7da)
1*41d37e61SDave Gerlach/* SPDX-License-Identifier: GPL-2.0 */
2*41d37e61SDave Gerlach/*
3*41d37e61SDave Gerlach * Low level suspend code for AM43XX SoCs
4*41d37e61SDave Gerlach *
5*41d37e61SDave Gerlach * Copyright (C) 2013-2018 Texas Instruments Incorporated - http://www.ti.com/
6*41d37e61SDave Gerlach *	Dave Gerlach, Vaibhav Bedia
7*41d37e61SDave Gerlach */
8*41d37e61SDave Gerlach
9*41d37e61SDave Gerlach#include <linux/linkage.h>
10*41d37e61SDave Gerlach#include <linux/ti-emif-sram.h>
11*41d37e61SDave Gerlach
12*41d37e61SDave Gerlach#include <asm/assembler.h>
13*41d37e61SDave Gerlach#include <asm/hardware/cache-l2x0.h>
14*41d37e61SDave Gerlach#include <asm/memory.h>
15*41d37e61SDave Gerlach
16*41d37e61SDave Gerlach#include "cm33xx.h"
17*41d37e61SDave Gerlach#include "common.h"
18*41d37e61SDave Gerlach#include "iomap.h"
19*41d37e61SDave Gerlach#include "omap-secure.h"
20*41d37e61SDave Gerlach#include "omap44xx.h"
21*41d37e61SDave Gerlach#include "prm33xx.h"
22*41d37e61SDave Gerlach#include "prcm43xx.h"
23*41d37e61SDave Gerlach
24*41d37e61SDave Gerlach#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED		0x00030000
25*41d37e61SDave Gerlach#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE		0x0003
26*41d37e61SDave Gerlach#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE		0x0002
27*41d37e61SDave Gerlach
28*41d37e61SDave Gerlach#define AM43XX_EMIF_POWEROFF_ENABLE			0x1
29*41d37e61SDave Gerlach#define AM43XX_EMIF_POWEROFF_DISABLE			0x0
30*41d37e61SDave Gerlach
31*41d37e61SDave Gerlach#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP		0x1
32*41d37e61SDave Gerlach#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO		0x3
33*41d37e61SDave Gerlach
34*41d37e61SDave Gerlach#define AM43XX_CM_BASE					0x44DF0000
35*41d37e61SDave Gerlach
36*41d37e61SDave Gerlach#define AM43XX_CM_REGADDR(inst, reg)                           \
37*41d37e61SDave Gerlach       AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg))
38*41d37e61SDave Gerlach
39*41d37e61SDave Gerlach#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
40*41d37e61SDave Gerlach					AM43XX_CM_MPU_MPU_CDOFFS)
41*41d37e61SDave Gerlach#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
42*41d37e61SDave Gerlach					AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET)
43*41d37e61SDave Gerlach#define AM43XX_CM_PER_EMIF_CLKCTRL  AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \
44*41d37e61SDave Gerlach					AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
45*41d37e61SDave Gerlach#define AM43XX_PRM_EMIF_CTRL_OFFSET			0x0030
46*41d37e61SDave Gerlach
47*41d37e61SDave Gerlach	.arm
48*41d37e61SDave Gerlach	.align 3
49*41d37e61SDave Gerlach
50*41d37e61SDave GerlachENTRY(am43xx_do_wfi)
51*41d37e61SDave Gerlach	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
52*41d37e61SDave Gerlach
53*41d37e61SDave Gerlach	/* Retrieve l2 cache virt address BEFORE we shut off EMIF */
54*41d37e61SDave Gerlach	ldr	r1, get_l2cache_base
55*41d37e61SDave Gerlach	blx	r1
56*41d37e61SDave Gerlach	mov	r8, r0
57*41d37e61SDave Gerlach
58*41d37e61SDave Gerlach	/*
59*41d37e61SDave Gerlach	 * Flush all data from the L1 and L2 data cache before disabling
60*41d37e61SDave Gerlach	 * SCTLR.C bit.
61*41d37e61SDave Gerlach	 */
62*41d37e61SDave Gerlach	ldr	r1, kernel_flush
63*41d37e61SDave Gerlach	blx	r1
64*41d37e61SDave Gerlach
65*41d37e61SDave Gerlach	/*
66*41d37e61SDave Gerlach	 * Clear the SCTLR.C bit to prevent further data cache
67*41d37e61SDave Gerlach	 * allocation. Clearing SCTLR.C would make all the data accesses
68*41d37e61SDave Gerlach	 * strongly ordered and would not hit the cache.
69*41d37e61SDave Gerlach	 */
70*41d37e61SDave Gerlach	mrc	p15, 0, r0, c1, c0, 0
71*41d37e61SDave Gerlach	bic	r0, r0, #(1 << 2)	@ Disable the C bit
72*41d37e61SDave Gerlach	mcr	p15, 0, r0, c1, c0, 0
73*41d37e61SDave Gerlach	isb
74*41d37e61SDave Gerlach	dsb
75*41d37e61SDave Gerlach
76*41d37e61SDave Gerlach	/*
77*41d37e61SDave Gerlach	 * Invalidate L1 and L2 data cache.
78*41d37e61SDave Gerlach	 */
79*41d37e61SDave Gerlach	ldr	r1, kernel_flush
80*41d37e61SDave Gerlach	blx	r1
81*41d37e61SDave Gerlach
82*41d37e61SDave Gerlach#ifdef CONFIG_CACHE_L2X0
83*41d37e61SDave Gerlach	/*
84*41d37e61SDave Gerlach	 * Clean and invalidate the L2 cache.
85*41d37e61SDave Gerlach	 */
86*41d37e61SDave Gerlach#ifdef CONFIG_PL310_ERRATA_727915
87*41d37e61SDave Gerlach	mov	r0, #0x03
88*41d37e61SDave Gerlach	mov	r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
89*41d37e61SDave Gerlach	dsb
90*41d37e61SDave Gerlach	smc	#0
91*41d37e61SDave Gerlach	dsb
92*41d37e61SDave Gerlach#endif
93*41d37e61SDave Gerlach	mov	r0, r8
94*41d37e61SDave Gerlach	adr	r4, am43xx_pm_ro_sram_data
95*41d37e61SDave Gerlach	ldr	r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
96*41d37e61SDave Gerlach
97*41d37e61SDave Gerlach	mov	r2, r0
98*41d37e61SDave Gerlach	ldr	r0, [r2, #L2X0_AUX_CTRL]
99*41d37e61SDave Gerlach	str	r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
100*41d37e61SDave Gerlach	ldr	r0, [r2, #L310_PREFETCH_CTRL]
101*41d37e61SDave Gerlach	str	r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
102*41d37e61SDave Gerlach
103*41d37e61SDave Gerlach	ldr	r0, l2_val
104*41d37e61SDave Gerlach	str	r0, [r2, #L2X0_CLEAN_INV_WAY]
105*41d37e61SDave Gerlachwait:
106*41d37e61SDave Gerlach	ldr	r0, [r2, #L2X0_CLEAN_INV_WAY]
107*41d37e61SDave Gerlach	ldr	r1, l2_val
108*41d37e61SDave Gerlach	ands	r0, r0, r1
109*41d37e61SDave Gerlach	bne	wait
110*41d37e61SDave Gerlach#ifdef CONFIG_PL310_ERRATA_727915
111*41d37e61SDave Gerlach	mov	r0, #0x00
112*41d37e61SDave Gerlach	mov	r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
113*41d37e61SDave Gerlach	dsb
114*41d37e61SDave Gerlach	smc	#0
115*41d37e61SDave Gerlach	dsb
116*41d37e61SDave Gerlach#endif
117*41d37e61SDave Gerlachl2x_sync:
118*41d37e61SDave Gerlach	mov	r0, r8
119*41d37e61SDave Gerlach	mov	r2, r0
120*41d37e61SDave Gerlach	mov	r0, #0x0
121*41d37e61SDave Gerlach	str	r0, [r2, #L2X0_CACHE_SYNC]
122*41d37e61SDave Gerlachsync:
123*41d37e61SDave Gerlach	ldr	r0, [r2, #L2X0_CACHE_SYNC]
124*41d37e61SDave Gerlach	ands	r0, r0, #0x1
125*41d37e61SDave Gerlach	bne	sync
126*41d37e61SDave Gerlach#endif
127*41d37e61SDave Gerlach
128*41d37e61SDave Gerlach	adr     r9, am43xx_emif_sram_table
129*41d37e61SDave Gerlach
130*41d37e61SDave Gerlach	ldr     r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
131*41d37e61SDave Gerlach	blx     r3
132*41d37e61SDave Gerlach
133*41d37e61SDave Gerlach	ldr     r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
134*41d37e61SDave Gerlach	blx     r3
135*41d37e61SDave Gerlach
136*41d37e61SDave Gerlach	/* Disable EMIF */
137*41d37e61SDave Gerlach	ldr	r1, am43xx_virt_emif_clkctrl
138*41d37e61SDave Gerlach	ldr	r2, [r1]
139*41d37e61SDave Gerlach	bic	r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
140*41d37e61SDave Gerlach	str	r2, [r1]
141*41d37e61SDave Gerlach
142*41d37e61SDave Gerlachwait_emif_disable:
143*41d37e61SDave Gerlach	ldr	r2, [r1]
144*41d37e61SDave Gerlach	mov	r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
145*41d37e61SDave Gerlach	cmp	r2, r3
146*41d37e61SDave Gerlach	bne	wait_emif_disable
147*41d37e61SDave Gerlach
148*41d37e61SDave Gerlach	/*
149*41d37e61SDave Gerlach	 * For the MPU WFI to be registered as an interrupt
150*41d37e61SDave Gerlach	 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
151*41d37e61SDave Gerlach	 * to DISABLED
152*41d37e61SDave Gerlach	 */
153*41d37e61SDave Gerlach	ldr	r1, am43xx_virt_mpu_clkctrl
154*41d37e61SDave Gerlach	ldr	r2, [r1]
155*41d37e61SDave Gerlach	bic	r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
156*41d37e61SDave Gerlach	str	r2, [r1]
157*41d37e61SDave Gerlach
158*41d37e61SDave Gerlach	/*
159*41d37e61SDave Gerlach	 * Put MPU CLKDM to SW_SLEEP
160*41d37e61SDave Gerlach	 */
161*41d37e61SDave Gerlach	ldr	r1, am43xx_virt_mpu_clkstctrl
162*41d37e61SDave Gerlach	mov	r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP
163*41d37e61SDave Gerlach	str	r2, [r1]
164*41d37e61SDave Gerlach
165*41d37e61SDave Gerlach	/*
166*41d37e61SDave Gerlach	 * Execute a barrier instruction to ensure that all cache,
167*41d37e61SDave Gerlach	 * TLB and branch predictor maintenance operations issued
168*41d37e61SDave Gerlach	 * have completed.
169*41d37e61SDave Gerlach	 */
170*41d37e61SDave Gerlach	dsb
171*41d37e61SDave Gerlach	dmb
172*41d37e61SDave Gerlach
173*41d37e61SDave Gerlach	/*
174*41d37e61SDave Gerlach	 * Execute a WFI instruction and wait until the
175*41d37e61SDave Gerlach	 * STANDBYWFI output is asserted to indicate that the
176*41d37e61SDave Gerlach	 * CPU is in idle and low power state. CPU can specualatively
177*41d37e61SDave Gerlach	 * prefetch the instructions so add NOPs after WFI. Sixteen
178*41d37e61SDave Gerlach	 * NOPs as per Cortex-A9 pipeline.
179*41d37e61SDave Gerlach	 */
180*41d37e61SDave Gerlach	wfi
181*41d37e61SDave Gerlach
182*41d37e61SDave Gerlach	nop
183*41d37e61SDave Gerlach	nop
184*41d37e61SDave Gerlach	nop
185*41d37e61SDave Gerlach	nop
186*41d37e61SDave Gerlach	nop
187*41d37e61SDave Gerlach	nop
188*41d37e61SDave Gerlach	nop
189*41d37e61SDave Gerlach	nop
190*41d37e61SDave Gerlach	nop
191*41d37e61SDave Gerlach	nop
192*41d37e61SDave Gerlach	nop
193*41d37e61SDave Gerlach	nop
194*41d37e61SDave Gerlach	nop
195*41d37e61SDave Gerlach	nop
196*41d37e61SDave Gerlach	nop
197*41d37e61SDave Gerlach	nop
198*41d37e61SDave Gerlach
199*41d37e61SDave Gerlach	/* We come here in case of an abort due to a late interrupt */
200*41d37e61SDave Gerlach	ldr	r1, am43xx_virt_mpu_clkstctrl
201*41d37e61SDave Gerlach	mov	r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
202*41d37e61SDave Gerlach	str	r2, [r1]
203*41d37e61SDave Gerlach
204*41d37e61SDave Gerlach	/* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
205*41d37e61SDave Gerlach	ldr	r1, am43xx_virt_mpu_clkctrl
206*41d37e61SDave Gerlach	mov	r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
207*41d37e61SDave Gerlach	str	r2, [r1]
208*41d37e61SDave Gerlach
209*41d37e61SDave Gerlach	/* Re-enable EMIF */
210*41d37e61SDave Gerlach	ldr	r1, am43xx_virt_emif_clkctrl
211*41d37e61SDave Gerlach	mov	r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
212*41d37e61SDave Gerlach	str	r2, [r1]
213*41d37e61SDave Gerlachwait_emif_enable:
214*41d37e61SDave Gerlach	ldr	r3, [r1]
215*41d37e61SDave Gerlach	cmp	r2, r3
216*41d37e61SDave Gerlach	bne	wait_emif_enable
217*41d37e61SDave Gerlach
218*41d37e61SDave Gerlach	/*
219*41d37e61SDave Gerlach	 * Set SCTLR.C bit to allow data cache allocation
220*41d37e61SDave Gerlach	 */
221*41d37e61SDave Gerlach	mrc	p15, 0, r0, c1, c0, 0
222*41d37e61SDave Gerlach	orr	r0, r0, #(1 << 2)	@ Enable the C bit
223*41d37e61SDave Gerlach	mcr	p15, 0, r0, c1, c0, 0
224*41d37e61SDave Gerlach	isb
225*41d37e61SDave Gerlach
226*41d37e61SDave Gerlach	ldr     r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
227*41d37e61SDave Gerlach	blx     r1
228*41d37e61SDave Gerlach
229*41d37e61SDave Gerlach	/* Let the suspend code know about the abort */
230*41d37e61SDave Gerlach	mov	r0, #1
231*41d37e61SDave Gerlach	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
232*41d37e61SDave GerlachENDPROC(am43xx_do_wfi)
233*41d37e61SDave Gerlach
234*41d37e61SDave Gerlach	.align
235*41d37e61SDave GerlachENTRY(am43xx_resume_offset)
236*41d37e61SDave Gerlach	.word . - am43xx_do_wfi
237*41d37e61SDave Gerlach
238*41d37e61SDave GerlachENTRY(am43xx_resume_from_deep_sleep)
239*41d37e61SDave Gerlach	/* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */
240*41d37e61SDave Gerlach	ldr	r1, am43xx_virt_mpu_clkstctrl
241*41d37e61SDave Gerlach	mov	r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
242*41d37e61SDave Gerlach	str	r2, [r1]
243*41d37e61SDave Gerlach
244*41d37e61SDave Gerlach	/* For AM43xx, use EMIF power down until context is restored */
245*41d37e61SDave Gerlach	ldr	r2, am43xx_phys_emif_poweroff
246*41d37e61SDave Gerlach	mov	r1, #AM43XX_EMIF_POWEROFF_ENABLE
247*41d37e61SDave Gerlach	str	r1, [r2, #0x0]
248*41d37e61SDave Gerlach
249*41d37e61SDave Gerlach	/* Re-enable EMIF */
250*41d37e61SDave Gerlach	ldr	r1, am43xx_phys_emif_clkctrl
251*41d37e61SDave Gerlach	mov	r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
252*41d37e61SDave Gerlach	str	r2, [r1]
253*41d37e61SDave Gerlachwait_emif_enable1:
254*41d37e61SDave Gerlach	ldr	r3, [r1]
255*41d37e61SDave Gerlach	cmp	r2, r3
256*41d37e61SDave Gerlach	bne	wait_emif_enable1
257*41d37e61SDave Gerlach
258*41d37e61SDave Gerlach	adr     r9, am43xx_emif_sram_table
259*41d37e61SDave Gerlach
260*41d37e61SDave Gerlach	ldr     r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
261*41d37e61SDave Gerlach	blx     r1
262*41d37e61SDave Gerlach
263*41d37e61SDave Gerlach	ldr     r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
264*41d37e61SDave Gerlach	blx     r1
265*41d37e61SDave Gerlach
266*41d37e61SDave Gerlach	ldr     r2, am43xx_phys_emif_poweroff
267*41d37e61SDave Gerlach	mov     r1, #AM43XX_EMIF_POWEROFF_DISABLE
268*41d37e61SDave Gerlach	str     r1, [r2, #0x0]
269*41d37e61SDave Gerlach
270*41d37e61SDave Gerlach#ifdef CONFIG_CACHE_L2X0
271*41d37e61SDave Gerlach	ldr	r2, l2_cache_base
272*41d37e61SDave Gerlach	ldr	r0, [r2, #L2X0_CTRL]
273*41d37e61SDave Gerlach	and	r0, #0x0f
274*41d37e61SDave Gerlach	cmp	r0, #1
275*41d37e61SDave Gerlach	beq	skip_l2en			@ Skip if already enabled
276*41d37e61SDave Gerlach
277*41d37e61SDave Gerlach	adr	r4, am43xx_pm_ro_sram_data
278*41d37e61SDave Gerlach	ldr	r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET]
279*41d37e61SDave Gerlach	ldr     r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
280*41d37e61SDave Gerlach
281*41d37e61SDave Gerlach	ldr	r12, l2_smc1
282*41d37e61SDave Gerlach	dsb
283*41d37e61SDave Gerlach	smc	#0
284*41d37e61SDave Gerlach	dsb
285*41d37e61SDave Gerlachset_aux_ctrl:
286*41d37e61SDave Gerlach	ldr     r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
287*41d37e61SDave Gerlach	ldr	r12, l2_smc2
288*41d37e61SDave Gerlach	dsb
289*41d37e61SDave Gerlach	smc	#0
290*41d37e61SDave Gerlach	dsb
291*41d37e61SDave Gerlach
292*41d37e61SDave Gerlach	/* L2 invalidate on resume */
293*41d37e61SDave Gerlach	ldr	r0, l2_val
294*41d37e61SDave Gerlach	ldr	r2, l2_cache_base
295*41d37e61SDave Gerlach	str	r0, [r2, #L2X0_INV_WAY]
296*41d37e61SDave Gerlachwait2:
297*41d37e61SDave Gerlach	ldr	r0, [r2, #L2X0_INV_WAY]
298*41d37e61SDave Gerlach	ldr	r1, l2_val
299*41d37e61SDave Gerlach	ands	r0, r0, r1
300*41d37e61SDave Gerlach	bne	wait2
301*41d37e61SDave Gerlach#ifdef CONFIG_PL310_ERRATA_727915
302*41d37e61SDave Gerlach	mov	r0, #0x00
303*41d37e61SDave Gerlach	mov	r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
304*41d37e61SDave Gerlach	dsb
305*41d37e61SDave Gerlach	smc	#0
306*41d37e61SDave Gerlach	dsb
307*41d37e61SDave Gerlach#endif
308*41d37e61SDave Gerlachl2x_sync2:
309*41d37e61SDave Gerlach	ldr	r2, l2_cache_base
310*41d37e61SDave Gerlach	mov	r0, #0x0
311*41d37e61SDave Gerlach	str	r0, [r2, #L2X0_CACHE_SYNC]
312*41d37e61SDave Gerlachsync2:
313*41d37e61SDave Gerlach	ldr	r0, [r2, #L2X0_CACHE_SYNC]
314*41d37e61SDave Gerlach	ands	r0, r0, #0x1
315*41d37e61SDave Gerlach	bne	sync2
316*41d37e61SDave Gerlach
317*41d37e61SDave Gerlach	mov	r0, #0x1
318*41d37e61SDave Gerlach	ldr	r12, l2_smc3
319*41d37e61SDave Gerlach	dsb
320*41d37e61SDave Gerlach	smc	#0
321*41d37e61SDave Gerlach	dsb
322*41d37e61SDave Gerlach#endif
323*41d37e61SDave Gerlachskip_l2en:
324*41d37e61SDave Gerlach	/* We are back. Branch to the common CPU resume routine */
325*41d37e61SDave Gerlach	mov	r0, #0
326*41d37e61SDave Gerlach	ldr	pc, resume_addr
327*41d37e61SDave GerlachENDPROC(am43xx_resume_from_deep_sleep)
328*41d37e61SDave Gerlach
329*41d37e61SDave Gerlach/*
330*41d37e61SDave Gerlach * Local variables
331*41d37e61SDave Gerlach */
332*41d37e61SDave Gerlach	.align
333*41d37e61SDave Gerlachresume_addr:
334*41d37e61SDave Gerlach	.word	cpu_resume - PAGE_OFFSET + 0x80000000
335*41d37e61SDave Gerlachget_l2cache_base:
336*41d37e61SDave Gerlach	.word	omap4_get_l2cache_base
337*41d37e61SDave Gerlachkernel_flush:
338*41d37e61SDave Gerlach	.word   v7_flush_dcache_all
339*41d37e61SDave Gerlachddr_start:
340*41d37e61SDave Gerlach	.word	PAGE_OFFSET
341*41d37e61SDave Gerlach
342*41d37e61SDave Gerlacham43xx_phys_emif_poweroff:
343*41d37e61SDave Gerlach	.word   (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \
344*41d37e61SDave Gerlach		 AM43XX_PRM_EMIF_CTRL_OFFSET)
345*41d37e61SDave Gerlacham43xx_virt_mpu_clkstctrl:
346*41d37e61SDave Gerlach	.word	(AM43XX_CM_MPU_CLKSTCTRL)
347*41d37e61SDave Gerlacham43xx_virt_mpu_clkctrl:
348*41d37e61SDave Gerlach	.word	(AM43XX_CM_MPU_MPU_CLKCTRL)
349*41d37e61SDave Gerlacham43xx_virt_emif_clkctrl:
350*41d37e61SDave Gerlach	.word	(AM43XX_CM_PER_EMIF_CLKCTRL)
351*41d37e61SDave Gerlacham43xx_phys_emif_clkctrl:
352*41d37e61SDave Gerlach	.word	(AM43XX_CM_BASE + AM43XX_CM_PER_INST + \
353*41d37e61SDave Gerlach		 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
354*41d37e61SDave Gerlach
355*41d37e61SDave Gerlach/* L2 cache related defines for AM437x */
356*41d37e61SDave Gerlachl2_cache_base:
357*41d37e61SDave Gerlach	.word	OMAP44XX_L2CACHE_BASE
358*41d37e61SDave Gerlachl2_smc1:
359*41d37e61SDave Gerlach	.word	OMAP4_MON_L2X0_PREFETCH_INDEX
360*41d37e61SDave Gerlachl2_smc2:
361*41d37e61SDave Gerlach	.word	OMAP4_MON_L2X0_AUXCTRL_INDEX
362*41d37e61SDave Gerlachl2_smc3:
363*41d37e61SDave Gerlach	.word	OMAP4_MON_L2X0_CTRL_INDEX
364*41d37e61SDave Gerlachl2_val:
365*41d37e61SDave Gerlach	.word	0xffff
366*41d37e61SDave Gerlach
367*41d37e61SDave Gerlach.align 3
368*41d37e61SDave Gerlach/* DDR related defines */
369*41d37e61SDave GerlachENTRY(am43xx_emif_sram_table)
370*41d37e61SDave Gerlach	.space EMIF_PM_FUNCTIONS_SIZE
371*41d37e61SDave Gerlach
372*41d37e61SDave GerlachENTRY(am43xx_pm_sram)
373*41d37e61SDave Gerlach	.word am43xx_do_wfi
374*41d37e61SDave Gerlach	.word am43xx_do_wfi_sz
375*41d37e61SDave Gerlach	.word am43xx_resume_offset
376*41d37e61SDave Gerlach	.word am43xx_emif_sram_table
377*41d37e61SDave Gerlach	.word am43xx_pm_ro_sram_data
378*41d37e61SDave Gerlach
379*41d37e61SDave Gerlach.align 3
380*41d37e61SDave Gerlach
381*41d37e61SDave GerlachENTRY(am43xx_pm_ro_sram_data)
382*41d37e61SDave Gerlach	.space AMX3_PM_RO_SRAM_DATA_SIZE
383*41d37e61SDave Gerlach
384*41d37e61SDave GerlachENTRY(am43xx_do_wfi_sz)
385*41d37e61SDave Gerlach	.word	. - am43xx_do_wfi
386