18bd22949SKevin Hilman/* 28bd22949SKevin Hilman * linux/arch/arm/mach-omap2/sleep.S 38bd22949SKevin Hilman * 48bd22949SKevin Hilman * (C) Copyright 2007 58bd22949SKevin Hilman * Texas Instruments 68bd22949SKevin Hilman * Karthik Dasu <karthik-dp@ti.com> 78bd22949SKevin Hilman * 88bd22949SKevin Hilman * (C) Copyright 2004 98bd22949SKevin Hilman * Texas Instruments, <www.ti.com> 108bd22949SKevin Hilman * Richard Woodruff <r-woodruff2@ti.com> 118bd22949SKevin Hilman * 128bd22949SKevin Hilman * This program is free software; you can redistribute it and/or 138bd22949SKevin Hilman * modify it under the terms of the GNU General Public License as 148bd22949SKevin Hilman * published by the Free Software Foundation; either version 2 of 158bd22949SKevin Hilman * the License, or (at your option) any later version. 168bd22949SKevin Hilman * 178bd22949SKevin Hilman * This program is distributed in the hope that it will be useful, 188bd22949SKevin Hilman * but WITHOUT ANY WARRANTY; without even the implied warranty of 198bd22949SKevin Hilman * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the 208bd22949SKevin Hilman * GNU General Public License for more details. 218bd22949SKevin Hilman * 228bd22949SKevin Hilman * You should have received a copy of the GNU General Public License 238bd22949SKevin Hilman * along with this program; if not, write to the Free Software 248bd22949SKevin Hilman * Foundation, Inc., 59 Temple Place, Suite 330, Boston, 258bd22949SKevin Hilman * MA 02111-1307 USA 268bd22949SKevin Hilman */ 278bd22949SKevin Hilman#include <linux/linkage.h> 288bd22949SKevin Hilman#include <asm/assembler.h> 298bd22949SKevin Hilman#include <mach/io.h> 30ce491cf8STony Lindgren#include <plat/control.h> 318bd22949SKevin Hilman 3289139dceSPeter 'p2' De Schrijver#include "cm.h" 338bd22949SKevin Hilman#include "prm.h" 348bd22949SKevin Hilman#include "sdrc.h" 358bd22949SKevin Hilman 36*a89b6f00SRajendra Nayak#define SDRC_SCRATCHPAD_SEM_V 0xfa00291c 37*a89b6f00SRajendra Nayak 388bd22949SKevin Hilman#define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \ 398bd22949SKevin Hilman OMAP3430_PM_PREPWSTST) 400795a75aSTero Kristo#define PM_PREPWSTST_CORE_P 0x48306AE8 418bd22949SKevin Hilman#define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \ 428bd22949SKevin Hilman OMAP3430_PM_PREPWSTST) 4337903009SAbhijit Pagare#define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL 4489139dceSPeter 'p2' De Schrijver#define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1) 4527d59a4aSTero Kristo#define SRAM_BASE_P 0x40200000 4627d59a4aSTero Kristo#define CONTROL_STAT 0x480022F0 478bd22949SKevin Hilman#define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is 488bd22949SKevin Hilman * available */ 4961255ab9SRajendra Nayak#define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\ 5061255ab9SRajendra Nayak + SCRATCHPAD_MEM_OFFS) 518bd22949SKevin Hilman#define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) 520795a75aSTero Kristo#define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG) 530795a75aSTero Kristo#define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0) 540795a75aSTero Kristo#define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0) 550795a75aSTero Kristo#define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0) 560795a75aSTero Kristo#define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1) 570795a75aSTero Kristo#define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1) 580795a75aSTero Kristo#define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1) 5989139dceSPeter 'p2' De Schrijver#define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) 6089139dceSPeter 'p2' De Schrijver#define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) 618bd22949SKevin Hilman 628bd22949SKevin Hilman .text 63*a89b6f00SRajendra Nayak/* Function to aquire the semaphore in scratchpad */ 64*a89b6f00SRajendra NayakENTRY(lock_scratchpad_sem) 65*a89b6f00SRajendra Nayak stmfd sp!, {lr} @ save registers on stack 66*a89b6f00SRajendra Nayakwait_sem: 67*a89b6f00SRajendra Nayak mov r0,#1 68*a89b6f00SRajendra Nayak ldr r1, sdrc_scratchpad_sem 69*a89b6f00SRajendra Nayakwait_loop: 70*a89b6f00SRajendra Nayak ldr r2, [r1] @ load the lock value 71*a89b6f00SRajendra Nayak cmp r2, r0 @ is the lock free ? 72*a89b6f00SRajendra Nayak beq wait_loop @ not free... 73*a89b6f00SRajendra Nayak swp r2, r0, [r1] @ semaphore free so lock it and proceed 74*a89b6f00SRajendra Nayak cmp r2, r0 @ did we succeed ? 75*a89b6f00SRajendra Nayak beq wait_sem @ no - try again 76*a89b6f00SRajendra Nayak ldmfd sp!, {pc} @ restore regs and return 77*a89b6f00SRajendra Nayaksdrc_scratchpad_sem: 78*a89b6f00SRajendra Nayak .word SDRC_SCRATCHPAD_SEM_V 79*a89b6f00SRajendra NayakENTRY(lock_scratchpad_sem_sz) 80*a89b6f00SRajendra Nayak .word . - lock_scratchpad_sem 81*a89b6f00SRajendra Nayak 82*a89b6f00SRajendra Nayak .text 83*a89b6f00SRajendra Nayak/* Function to release the scratchpad semaphore */ 84*a89b6f00SRajendra NayakENTRY(unlock_scratchpad_sem) 85*a89b6f00SRajendra Nayak stmfd sp!, {lr} @ save registers on stack 86*a89b6f00SRajendra Nayak ldr r3, sdrc_scratchpad_sem 87*a89b6f00SRajendra Nayak mov r2,#0 88*a89b6f00SRajendra Nayak str r2,[r3] 89*a89b6f00SRajendra Nayak ldmfd sp!, {pc} @ restore regs and return 90*a89b6f00SRajendra NayakENTRY(unlock_scratchpad_sem_sz) 91*a89b6f00SRajendra Nayak .word . - unlock_scratchpad_sem 92*a89b6f00SRajendra Nayak 93*a89b6f00SRajendra Nayak .text 948bd22949SKevin Hilman/* Function call to get the restore pointer for resume from OFF */ 958bd22949SKevin HilmanENTRY(get_restore_pointer) 968bd22949SKevin Hilman stmfd sp!, {lr} @ save registers on stack 978bd22949SKevin Hilman adr r0, restore 988bd22949SKevin Hilman ldmfd sp!, {pc} @ restore regs and return 998bd22949SKevin HilmanENTRY(get_restore_pointer_sz) 1000795a75aSTero Kristo .word . - get_restore_pointer 1010795a75aSTero Kristo 1020795a75aSTero Kristo .text 1030795a75aSTero Kristo/* Function call to get the restore pointer for for ES3 to resume from OFF */ 1040795a75aSTero KristoENTRY(get_es3_restore_pointer) 1050795a75aSTero Kristo stmfd sp!, {lr} @ save registers on stack 1060795a75aSTero Kristo adr r0, restore_es3 1070795a75aSTero Kristo ldmfd sp!, {pc} @ restore regs and return 1080795a75aSTero KristoENTRY(get_es3_restore_pointer_sz) 1090795a75aSTero Kristo .word . - get_es3_restore_pointer 1100795a75aSTero Kristo 1110795a75aSTero KristoENTRY(es3_sdrc_fix) 1120795a75aSTero Kristo ldr r4, sdrc_syscfg @ get config addr 1130795a75aSTero Kristo ldr r5, [r4] @ get value 1140795a75aSTero Kristo tst r5, #0x100 @ is part access blocked 1150795a75aSTero Kristo it eq 1160795a75aSTero Kristo biceq r5, r5, #0x100 @ clear bit if set 1170795a75aSTero Kristo str r5, [r4] @ write back change 1180795a75aSTero Kristo ldr r4, sdrc_mr_0 @ get config addr 1190795a75aSTero Kristo ldr r5, [r4] @ get value 1200795a75aSTero Kristo str r5, [r4] @ write back change 1210795a75aSTero Kristo ldr r4, sdrc_emr2_0 @ get config addr 1220795a75aSTero Kristo ldr r5, [r4] @ get value 1230795a75aSTero Kristo str r5, [r4] @ write back change 1240795a75aSTero Kristo ldr r4, sdrc_manual_0 @ get config addr 1250795a75aSTero Kristo mov r5, #0x2 @ autorefresh command 1260795a75aSTero Kristo str r5, [r4] @ kick off refreshes 1270795a75aSTero Kristo ldr r4, sdrc_mr_1 @ get config addr 1280795a75aSTero Kristo ldr r5, [r4] @ get value 1290795a75aSTero Kristo str r5, [r4] @ write back change 1300795a75aSTero Kristo ldr r4, sdrc_emr2_1 @ get config addr 1310795a75aSTero Kristo ldr r5, [r4] @ get value 1320795a75aSTero Kristo str r5, [r4] @ write back change 1330795a75aSTero Kristo ldr r4, sdrc_manual_1 @ get config addr 1340795a75aSTero Kristo mov r5, #0x2 @ autorefresh command 1350795a75aSTero Kristo str r5, [r4] @ kick off refreshes 1360795a75aSTero Kristo bx lr 1370795a75aSTero Kristosdrc_syscfg: 1380795a75aSTero Kristo .word SDRC_SYSCONFIG_P 1390795a75aSTero Kristosdrc_mr_0: 1400795a75aSTero Kristo .word SDRC_MR_0_P 1410795a75aSTero Kristosdrc_emr2_0: 1420795a75aSTero Kristo .word SDRC_EMR2_0_P 1430795a75aSTero Kristosdrc_manual_0: 1440795a75aSTero Kristo .word SDRC_MANUAL_0_P 1450795a75aSTero Kristosdrc_mr_1: 1460795a75aSTero Kristo .word SDRC_MR_1_P 1470795a75aSTero Kristosdrc_emr2_1: 1480795a75aSTero Kristo .word SDRC_EMR2_1_P 1490795a75aSTero Kristosdrc_manual_1: 1500795a75aSTero Kristo .word SDRC_MANUAL_1_P 1510795a75aSTero KristoENTRY(es3_sdrc_fix_sz) 1520795a75aSTero Kristo .word . - es3_sdrc_fix 15327d59a4aSTero Kristo 15427d59a4aSTero Kristo/* Function to call rom code to save secure ram context */ 15527d59a4aSTero KristoENTRY(save_secure_ram_context) 15627d59a4aSTero Kristo stmfd sp!, {r1-r12, lr} @ save registers on stack 15727d59a4aSTero Kristosave_secure_ram_debug: 15827d59a4aSTero Kristo /* b save_secure_ram_debug */ @ enable to debug save code 15927d59a4aSTero Kristo adr r3, api_params @ r3 points to parameters 16027d59a4aSTero Kristo str r0, [r3,#0x4] @ r0 has sdram address 16127d59a4aSTero Kristo ldr r12, high_mask 16227d59a4aSTero Kristo and r3, r3, r12 16327d59a4aSTero Kristo ldr r12, sram_phy_addr_mask 16427d59a4aSTero Kristo orr r3, r3, r12 16527d59a4aSTero Kristo mov r0, #25 @ set service ID for PPA 16627d59a4aSTero Kristo mov r12, r0 @ copy secure service ID in r12 16727d59a4aSTero Kristo mov r1, #0 @ set task id for ROM code in r1 168ba50ea7eSKalle Jokiniemi mov r2, #4 @ set some flags in r2, r6 16927d59a4aSTero Kristo mov r6, #0xff 17027d59a4aSTero Kristo mcr p15, 0, r0, c7, c10, 4 @ data write barrier 17127d59a4aSTero Kristo mcr p15, 0, r0, c7, c10, 5 @ data memory barrier 17227d59a4aSTero Kristo .word 0xE1600071 @ call SMI monitor (smi #1) 17327d59a4aSTero Kristo nop 17427d59a4aSTero Kristo nop 17527d59a4aSTero Kristo nop 17627d59a4aSTero Kristo nop 17727d59a4aSTero Kristo ldmfd sp!, {r1-r12, pc} 17827d59a4aSTero Kristosram_phy_addr_mask: 17927d59a4aSTero Kristo .word SRAM_BASE_P 18027d59a4aSTero Kristohigh_mask: 18127d59a4aSTero Kristo .word 0xffff 18227d59a4aSTero Kristoapi_params: 18327d59a4aSTero Kristo .word 0x4, 0x0, 0x0, 0x1, 0x1 18427d59a4aSTero KristoENTRY(save_secure_ram_context_sz) 18527d59a4aSTero Kristo .word . - save_secure_ram_context 18627d59a4aSTero Kristo 1878bd22949SKevin Hilman/* 1888bd22949SKevin Hilman * Forces OMAP into idle state 1898bd22949SKevin Hilman * 1908bd22949SKevin Hilman * omap34xx_suspend() - This bit of code just executes the WFI 1918bd22949SKevin Hilman * for normal idles. 1928bd22949SKevin Hilman * 1938bd22949SKevin Hilman * Note: This code get's copied to internal SRAM at boot. When the OMAP 1948bd22949SKevin Hilman * wakes up it continues execution at the point it went to sleep. 1958bd22949SKevin Hilman */ 1968bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend) 1978bd22949SKevin Hilman stmfd sp!, {r0-r12, lr} @ save registers on stack 1988bd22949SKevin Hilmanloop: 1998bd22949SKevin Hilman /*b loop*/ @Enable to debug by stepping through code 2008bd22949SKevin Hilman /* r0 contains restore pointer in sdram */ 2018bd22949SKevin Hilman /* r1 contains information about saving context */ 2028bd22949SKevin Hilman ldr r4, sdrc_power @ read the SDRC_POWER register 2038bd22949SKevin Hilman ldr r5, [r4] @ read the contents of SDRC_POWER 2048bd22949SKevin Hilman orr r5, r5, #0x40 @ enable self refresh on idle req 2058bd22949SKevin Hilman str r5, [r4] @ write back to SDRC_POWER register 2068bd22949SKevin Hilman 2078bd22949SKevin Hilman cmp r1, #0x0 2088bd22949SKevin Hilman /* If context save is required, do that and execute wfi */ 2098bd22949SKevin Hilman bne save_context_wfi 2108bd22949SKevin Hilman /* Data memory barrier and Data sync barrier */ 2118bd22949SKevin Hilman mov r1, #0 2128bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 4 2138bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 5 2148bd22949SKevin Hilman 2158bd22949SKevin Hilman wfi @ wait for interrupt 2168bd22949SKevin Hilman 2178bd22949SKevin Hilman nop 2188bd22949SKevin Hilman nop 2198bd22949SKevin Hilman nop 2208bd22949SKevin Hilman nop 2218bd22949SKevin Hilman nop 2228bd22949SKevin Hilman nop 2238bd22949SKevin Hilman nop 2248bd22949SKevin Hilman nop 2258bd22949SKevin Hilman nop 2268bd22949SKevin Hilman nop 22789139dceSPeter 'p2' De Schrijver bl wait_sdrc_ok 2288bd22949SKevin Hilman 2298bd22949SKevin Hilman ldmfd sp!, {r0-r12, pc} @ restore regs and return 2300795a75aSTero Kristorestore_es3: 2310795a75aSTero Kristo /*b restore_es3*/ @ Enable to debug restore code 2320795a75aSTero Kristo ldr r5, pm_prepwstst_core_p 2330795a75aSTero Kristo ldr r4, [r5] 2340795a75aSTero Kristo and r4, r4, #0x3 2350795a75aSTero Kristo cmp r4, #0x0 @ Check if previous power state of CORE is OFF 2360795a75aSTero Kristo bne restore 2370795a75aSTero Kristo adr r0, es3_sdrc_fix 2380795a75aSTero Kristo ldr r1, sram_base 2390795a75aSTero Kristo ldr r2, es3_sdrc_fix_sz 2400795a75aSTero Kristo mov r2, r2, ror #2 2410795a75aSTero Kristocopy_to_sram: 2420795a75aSTero Kristo ldmia r0!, {r3} @ val = *src 2430795a75aSTero Kristo stmia r1!, {r3} @ *dst = val 2440795a75aSTero Kristo subs r2, r2, #0x1 @ num_words-- 2450795a75aSTero Kristo bne copy_to_sram 2460795a75aSTero Kristo ldr r1, sram_base 2470795a75aSTero Kristo blx r1 2488bd22949SKevin Hilmanrestore: 2498bd22949SKevin Hilman /* b restore*/ @ Enable to debug restore code 2508bd22949SKevin Hilman /* Check what was the reason for mpu reset and store the reason in r9*/ 2518bd22949SKevin Hilman /* 1 - Only L1 and logic lost */ 2528bd22949SKevin Hilman /* 2 - Only L2 lost - In this case, we wont be here */ 2538bd22949SKevin Hilman /* 3 - Both L1 and L2 lost */ 2548bd22949SKevin Hilman ldr r1, pm_pwstctrl_mpu 2558bd22949SKevin Hilman ldr r2, [r1] 2568bd22949SKevin Hilman and r2, r2, #0x3 2578bd22949SKevin Hilman cmp r2, #0x0 @ Check if target power state was OFF or RET 2588bd22949SKevin Hilman moveq r9, #0x3 @ MPU OFF => L1 and L2 lost 2598bd22949SKevin Hilman movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation 2608bd22949SKevin Hilman bne logic_l1_restore 26127d59a4aSTero Kristo ldr r0, control_stat 26227d59a4aSTero Kristo ldr r1, [r0] 26327d59a4aSTero Kristo and r1, #0x700 26427d59a4aSTero Kristo cmp r1, #0x300 26527d59a4aSTero Kristo beq l2_inv_gp 26627d59a4aSTero Kristo mov r0, #40 @ set service ID for PPA 26727d59a4aSTero Kristo mov r12, r0 @ copy secure Service ID in r12 26827d59a4aSTero Kristo mov r1, #0 @ set task id for ROM code in r1 26927d59a4aSTero Kristo mov r2, #4 @ set some flags in r2, r6 27027d59a4aSTero Kristo mov r6, #0xff 27127d59a4aSTero Kristo adr r3, l2_inv_api_params @ r3 points to dummy parameters 27227d59a4aSTero Kristo mcr p15, 0, r0, c7, c10, 4 @ data write barrier 27327d59a4aSTero Kristo mcr p15, 0, r0, c7, c10, 5 @ data memory barrier 27427d59a4aSTero Kristo .word 0xE1600071 @ call SMI monitor (smi #1) 27527d59a4aSTero Kristo /* Write to Aux control register to set some bits */ 27627d59a4aSTero Kristo mov r0, #42 @ set service ID for PPA 27727d59a4aSTero Kristo mov r12, r0 @ copy secure Service ID in r12 27827d59a4aSTero Kristo mov r1, #0 @ set task id for ROM code in r1 27927d59a4aSTero Kristo mov r2, #4 @ set some flags in r2, r6 28027d59a4aSTero Kristo mov r6, #0xff 281a087cad9STero Kristo ldr r4, scratchpad_base 282a087cad9STero Kristo ldr r3, [r4, #0xBC] @ r3 points to parameters 28327d59a4aSTero Kristo mcr p15, 0, r0, c7, c10, 4 @ data write barrier 28427d59a4aSTero Kristo mcr p15, 0, r0, c7, c10, 5 @ data memory barrier 28527d59a4aSTero Kristo .word 0xE1600071 @ call SMI monitor (smi #1) 28627d59a4aSTero Kristo 28727d59a4aSTero Kristo b logic_l1_restore 28827d59a4aSTero Kristol2_inv_api_params: 28927d59a4aSTero Kristo .word 0x1, 0x00 29027d59a4aSTero Kristol2_inv_gp: 2918bd22949SKevin Hilman /* Execute smi to invalidate L2 cache */ 2928bd22949SKevin Hilman mov r12, #0x1 @ set up to invalide L2 2938bd22949SKevin Hilmansmi: .word 0xE1600070 @ Call SMI monitor (smieq) 29427d59a4aSTero Kristo /* Write to Aux control register to set some bits */ 295a087cad9STero Kristo ldr r4, scratchpad_base 296a087cad9STero Kristo ldr r3, [r4,#0xBC] 297a087cad9STero Kristo ldr r0, [r3,#4] 29827d59a4aSTero Kristo mov r12, #0x3 29927d59a4aSTero Kristo .word 0xE1600070 @ Call SMI monitor (smieq) 3008bd22949SKevin Hilmanlogic_l1_restore: 3018bd22949SKevin Hilman mov r1, #0 3028bd22949SKevin Hilman /* Invalidate all instruction caches to PoU 3038bd22949SKevin Hilman * and flush branch target cache */ 3048bd22949SKevin Hilman mcr p15, 0, r1, c7, c5, 0 3058bd22949SKevin Hilman 3068bd22949SKevin Hilman ldr r4, scratchpad_base 3078bd22949SKevin Hilman ldr r3, [r4,#0xBC] 308a087cad9STero Kristo adds r3, r3, #8 3098bd22949SKevin Hilman ldmia r3!, {r4-r6} 3108bd22949SKevin Hilman mov sp, r4 3118bd22949SKevin Hilman msr spsr_cxsf, r5 3128bd22949SKevin Hilman mov lr, r6 3138bd22949SKevin Hilman 3148bd22949SKevin Hilman ldmia r3!, {r4-r9} 3158bd22949SKevin Hilman /* Coprocessor access Control Register */ 3168bd22949SKevin Hilman mcr p15, 0, r4, c1, c0, 2 3178bd22949SKevin Hilman 3188bd22949SKevin Hilman /* TTBR0 */ 3198bd22949SKevin Hilman MCR p15, 0, r5, c2, c0, 0 3208bd22949SKevin Hilman /* TTBR1 */ 3218bd22949SKevin Hilman MCR p15, 0, r6, c2, c0, 1 3228bd22949SKevin Hilman /* Translation table base control register */ 3238bd22949SKevin Hilman MCR p15, 0, r7, c2, c0, 2 3248bd22949SKevin Hilman /*domain access Control Register */ 3258bd22949SKevin Hilman MCR p15, 0, r8, c3, c0, 0 3268bd22949SKevin Hilman /* data fault status Register */ 3278bd22949SKevin Hilman MCR p15, 0, r9, c5, c0, 0 3288bd22949SKevin Hilman 3298bd22949SKevin Hilman ldmia r3!,{r4-r8} 3308bd22949SKevin Hilman /* instruction fault status Register */ 3318bd22949SKevin Hilman MCR p15, 0, r4, c5, c0, 1 3328bd22949SKevin Hilman /*Data Auxiliary Fault Status Register */ 3338bd22949SKevin Hilman MCR p15, 0, r5, c5, c1, 0 3348bd22949SKevin Hilman /*Instruction Auxiliary Fault Status Register*/ 3358bd22949SKevin Hilman MCR p15, 0, r6, c5, c1, 1 3368bd22949SKevin Hilman /*Data Fault Address Register */ 3378bd22949SKevin Hilman MCR p15, 0, r7, c6, c0, 0 3388bd22949SKevin Hilman /*Instruction Fault Address Register*/ 3398bd22949SKevin Hilman MCR p15, 0, r8, c6, c0, 2 3408bd22949SKevin Hilman ldmia r3!,{r4-r7} 3418bd22949SKevin Hilman 3428bd22949SKevin Hilman /* user r/w thread and process ID */ 3438bd22949SKevin Hilman MCR p15, 0, r4, c13, c0, 2 3448bd22949SKevin Hilman /* user ro thread and process ID */ 3458bd22949SKevin Hilman MCR p15, 0, r5, c13, c0, 3 3468bd22949SKevin Hilman /*Privileged only thread and process ID */ 3478bd22949SKevin Hilman MCR p15, 0, r6, c13, c0, 4 3488bd22949SKevin Hilman /* cache size selection */ 3498bd22949SKevin Hilman MCR p15, 2, r7, c0, c0, 0 3508bd22949SKevin Hilman ldmia r3!,{r4-r8} 3518bd22949SKevin Hilman /* Data TLB lockdown registers */ 3528bd22949SKevin Hilman MCR p15, 0, r4, c10, c0, 0 3538bd22949SKevin Hilman /* Instruction TLB lockdown registers */ 3548bd22949SKevin Hilman MCR p15, 0, r5, c10, c0, 1 3558bd22949SKevin Hilman /* Secure or Nonsecure Vector Base Address */ 3568bd22949SKevin Hilman MCR p15, 0, r6, c12, c0, 0 3578bd22949SKevin Hilman /* FCSE PID */ 3588bd22949SKevin Hilman MCR p15, 0, r7, c13, c0, 0 3598bd22949SKevin Hilman /* Context PID */ 3608bd22949SKevin Hilman MCR p15, 0, r8, c13, c0, 1 3618bd22949SKevin Hilman 3628bd22949SKevin Hilman ldmia r3!,{r4-r5} 3638bd22949SKevin Hilman /* primary memory remap register */ 3648bd22949SKevin Hilman MCR p15, 0, r4, c10, c2, 0 3658bd22949SKevin Hilman /*normal memory remap register */ 3668bd22949SKevin Hilman MCR p15, 0, r5, c10, c2, 1 3678bd22949SKevin Hilman 3688bd22949SKevin Hilman /* Restore cpsr */ 3698bd22949SKevin Hilman ldmia r3!,{r4} /*load CPSR from SDRAM*/ 3708bd22949SKevin Hilman msr cpsr, r4 /*store cpsr */ 3718bd22949SKevin Hilman 3728bd22949SKevin Hilman /* Enabling MMU here */ 3738bd22949SKevin Hilman mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ 3748bd22949SKevin Hilman /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ 3758bd22949SKevin Hilman and r7, #0x7 3768bd22949SKevin Hilman cmp r7, #0x0 3778bd22949SKevin Hilman beq usettbr0 3788bd22949SKevin Hilmanttbr_error: 3798bd22949SKevin Hilman /* More work needs to be done to support N[0:2] value other than 0 3808bd22949SKevin Hilman * So looping here so that the error can be detected 3818bd22949SKevin Hilman */ 3828bd22949SKevin Hilman b ttbr_error 3838bd22949SKevin Hilmanusettbr0: 3848bd22949SKevin Hilman mrc p15, 0, r2, c2, c0, 0 3858bd22949SKevin Hilman ldr r5, ttbrbit_mask 3868bd22949SKevin Hilman and r2, r5 3878bd22949SKevin Hilman mov r4, pc 3888bd22949SKevin Hilman ldr r5, table_index_mask 3898bd22949SKevin Hilman and r4, r5 /* r4 = 31 to 20 bits of pc */ 3908bd22949SKevin Hilman /* Extract the value to be written to table entry */ 3918bd22949SKevin Hilman ldr r1, table_entry 3928bd22949SKevin Hilman add r1, r1, r4 /* r1 has value to be written to table entry*/ 3938bd22949SKevin Hilman /* Getting the address of table entry to modify */ 3948bd22949SKevin Hilman lsr r4, #18 3958bd22949SKevin Hilman add r2, r4 /* r2 has the location which needs to be modified */ 3968bd22949SKevin Hilman /* Storing previous entry of location being modified */ 3978bd22949SKevin Hilman ldr r5, scratchpad_base 3988bd22949SKevin Hilman ldr r4, [r2] 3998bd22949SKevin Hilman str r4, [r5, #0xC0] 4008bd22949SKevin Hilman /* Modify the table entry */ 4018bd22949SKevin Hilman str r1, [r2] 4028bd22949SKevin Hilman /* Storing address of entry being modified 4038bd22949SKevin Hilman * - will be restored after enabling MMU */ 4048bd22949SKevin Hilman ldr r5, scratchpad_base 4058bd22949SKevin Hilman str r2, [r5, #0xC4] 4068bd22949SKevin Hilman 4078bd22949SKevin Hilman mov r0, #0 4088bd22949SKevin Hilman mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer 4098bd22949SKevin Hilman mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array 4108bd22949SKevin Hilman mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB 4118bd22949SKevin Hilman mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB 4128bd22949SKevin Hilman /* Restore control register but dont enable caches here*/ 4138bd22949SKevin Hilman /* Caches will be enabled after restoring MMU table entry */ 4148bd22949SKevin Hilman ldmia r3!, {r4} 4158bd22949SKevin Hilman /* Store previous value of control register in scratchpad */ 4168bd22949SKevin Hilman str r4, [r5, #0xC8] 4178bd22949SKevin Hilman ldr r2, cache_pred_disable_mask 4188bd22949SKevin Hilman and r4, r2 4198bd22949SKevin Hilman mcr p15, 0, r4, c1, c0, 0 4208bd22949SKevin Hilman 4218bd22949SKevin Hilman ldmfd sp!, {r0-r12, pc} @ restore regs and return 4228bd22949SKevin Hilmansave_context_wfi: 4238bd22949SKevin Hilman /*b save_context_wfi*/ @ enable to debug save code 4248bd22949SKevin Hilman mov r8, r0 /* Store SDRAM address in r8 */ 425a087cad9STero Kristo mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register 426a087cad9STero Kristo mov r4, #0x1 @ Number of parameters for restore call 427a087cad9STero Kristo stmia r8!, {r4-r5} 4288bd22949SKevin Hilman /* Check what that target sleep state is:stored in r1*/ 4298bd22949SKevin Hilman /* 1 - Only L1 and logic lost */ 4308bd22949SKevin Hilman /* 2 - Only L2 lost */ 4318bd22949SKevin Hilman /* 3 - Both L1 and L2 lost */ 4328bd22949SKevin Hilman cmp r1, #0x2 /* Only L2 lost */ 4338bd22949SKevin Hilman beq clean_l2 4348bd22949SKevin Hilman cmp r1, #0x1 /* L2 retained */ 4358bd22949SKevin Hilman /* r9 stores whether to clean L2 or not*/ 4368bd22949SKevin Hilman moveq r9, #0x0 /* Dont Clean L2 */ 4378bd22949SKevin Hilman movne r9, #0x1 /* Clean L2 */ 4388bd22949SKevin Hilmanl1_logic_lost: 4398bd22949SKevin Hilman /* Store sp and spsr to SDRAM */ 4408bd22949SKevin Hilman mov r4, sp 4418bd22949SKevin Hilman mrs r5, spsr 4428bd22949SKevin Hilman mov r6, lr 4438bd22949SKevin Hilman stmia r8!, {r4-r6} 4448bd22949SKevin Hilman /* Save all ARM registers */ 4458bd22949SKevin Hilman /* Coprocessor access control register */ 4468bd22949SKevin Hilman mrc p15, 0, r6, c1, c0, 2 4478bd22949SKevin Hilman stmia r8!, {r6} 4488bd22949SKevin Hilman /* TTBR0, TTBR1 and Translation table base control */ 4498bd22949SKevin Hilman mrc p15, 0, r4, c2, c0, 0 4508bd22949SKevin Hilman mrc p15, 0, r5, c2, c0, 1 4518bd22949SKevin Hilman mrc p15, 0, r6, c2, c0, 2 4528bd22949SKevin Hilman stmia r8!, {r4-r6} 4538bd22949SKevin Hilman /* Domain access control register, data fault status register, 4548bd22949SKevin Hilman and instruction fault status register */ 4558bd22949SKevin Hilman mrc p15, 0, r4, c3, c0, 0 4568bd22949SKevin Hilman mrc p15, 0, r5, c5, c0, 0 4578bd22949SKevin Hilman mrc p15, 0, r6, c5, c0, 1 4588bd22949SKevin Hilman stmia r8!, {r4-r6} 4598bd22949SKevin Hilman /* Data aux fault status register, instruction aux fault status, 4608bd22949SKevin Hilman datat fault address register and instruction fault address register*/ 4618bd22949SKevin Hilman mrc p15, 0, r4, c5, c1, 0 4628bd22949SKevin Hilman mrc p15, 0, r5, c5, c1, 1 4638bd22949SKevin Hilman mrc p15, 0, r6, c6, c0, 0 4648bd22949SKevin Hilman mrc p15, 0, r7, c6, c0, 2 4658bd22949SKevin Hilman stmia r8!, {r4-r7} 4668bd22949SKevin Hilman /* user r/w thread and process ID, user r/o thread and process ID, 4678bd22949SKevin Hilman priv only thread and process ID, cache size selection */ 4688bd22949SKevin Hilman mrc p15, 0, r4, c13, c0, 2 4698bd22949SKevin Hilman mrc p15, 0, r5, c13, c0, 3 4708bd22949SKevin Hilman mrc p15, 0, r6, c13, c0, 4 4718bd22949SKevin Hilman mrc p15, 2, r7, c0, c0, 0 4728bd22949SKevin Hilman stmia r8!, {r4-r7} 4738bd22949SKevin Hilman /* Data TLB lockdown, instruction TLB lockdown registers */ 4748bd22949SKevin Hilman mrc p15, 0, r5, c10, c0, 0 4758bd22949SKevin Hilman mrc p15, 0, r6, c10, c0, 1 4768bd22949SKevin Hilman stmia r8!, {r5-r6} 4778bd22949SKevin Hilman /* Secure or non secure vector base address, FCSE PID, Context PID*/ 4788bd22949SKevin Hilman mrc p15, 0, r4, c12, c0, 0 4798bd22949SKevin Hilman mrc p15, 0, r5, c13, c0, 0 4808bd22949SKevin Hilman mrc p15, 0, r6, c13, c0, 1 4818bd22949SKevin Hilman stmia r8!, {r4-r6} 4828bd22949SKevin Hilman /* Primary remap, normal remap registers */ 4838bd22949SKevin Hilman mrc p15, 0, r4, c10, c2, 0 4848bd22949SKevin Hilman mrc p15, 0, r5, c10, c2, 1 4858bd22949SKevin Hilman stmia r8!,{r4-r5} 4868bd22949SKevin Hilman 4878bd22949SKevin Hilman /* Store current cpsr*/ 4888bd22949SKevin Hilman mrs r2, cpsr 4898bd22949SKevin Hilman stmia r8!, {r2} 4908bd22949SKevin Hilman 4918bd22949SKevin Hilman mrc p15, 0, r4, c1, c0, 0 4928bd22949SKevin Hilman /* save control register */ 4938bd22949SKevin Hilman stmia r8!, {r4} 4948bd22949SKevin Hilmanclean_caches: 4958bd22949SKevin Hilman /* Clean Data or unified cache to POU*/ 4968bd22949SKevin Hilman /* How to invalidate only L1 cache???? - #FIX_ME# */ 4978bd22949SKevin Hilman /* mcr p15, 0, r11, c7, c11, 1 */ 4988bd22949SKevin Hilman cmp r9, #1 /* Check whether L2 inval is required or not*/ 4998bd22949SKevin Hilman bne skip_l2_inval 5008bd22949SKevin Hilmanclean_l2: 5018bd22949SKevin Hilman /* read clidr */ 5028bd22949SKevin Hilman mrc p15, 1, r0, c0, c0, 1 5038bd22949SKevin Hilman /* extract loc from clidr */ 5048bd22949SKevin Hilman ands r3, r0, #0x7000000 5058bd22949SKevin Hilman /* left align loc bit field */ 5068bd22949SKevin Hilman mov r3, r3, lsr #23 5078bd22949SKevin Hilman /* if loc is 0, then no need to clean */ 5088bd22949SKevin Hilman beq finished 5098bd22949SKevin Hilman /* start clean at cache level 0 */ 5108bd22949SKevin Hilman mov r10, #0 5118bd22949SKevin Hilmanloop1: 5128bd22949SKevin Hilman /* work out 3x current cache level */ 5138bd22949SKevin Hilman add r2, r10, r10, lsr #1 5148bd22949SKevin Hilman /* extract cache type bits from clidr*/ 5158bd22949SKevin Hilman mov r1, r0, lsr r2 5168bd22949SKevin Hilman /* mask of the bits for current cache only */ 5178bd22949SKevin Hilman and r1, r1, #7 5188bd22949SKevin Hilman /* see what cache we have at this level */ 5198bd22949SKevin Hilman cmp r1, #2 5208bd22949SKevin Hilman /* skip if no cache, or just i-cache */ 5218bd22949SKevin Hilman blt skip 5228bd22949SKevin Hilman /* select current cache level in cssr */ 5238bd22949SKevin Hilman mcr p15, 2, r10, c0, c0, 0 5248bd22949SKevin Hilman /* isb to sych the new cssr&csidr */ 5258bd22949SKevin Hilman isb 5268bd22949SKevin Hilman /* read the new csidr */ 5278bd22949SKevin Hilman mrc p15, 1, r1, c0, c0, 0 5288bd22949SKevin Hilman /* extract the length of the cache lines */ 5298bd22949SKevin Hilman and r2, r1, #7 5308bd22949SKevin Hilman /* add 4 (line length offset) */ 5318bd22949SKevin Hilman add r2, r2, #4 5328bd22949SKevin Hilman ldr r4, assoc_mask 5338bd22949SKevin Hilman /* find maximum number on the way size */ 5348bd22949SKevin Hilman ands r4, r4, r1, lsr #3 5358bd22949SKevin Hilman /* find bit position of way size increment */ 5368bd22949SKevin Hilman clz r5, r4 5378bd22949SKevin Hilman ldr r7, numset_mask 5388bd22949SKevin Hilman /* extract max number of the index size*/ 5398bd22949SKevin Hilman ands r7, r7, r1, lsr #13 5408bd22949SKevin Hilmanloop2: 5418bd22949SKevin Hilman mov r9, r4 5428bd22949SKevin Hilman /* create working copy of max way size*/ 5438bd22949SKevin Hilmanloop3: 5448bd22949SKevin Hilman /* factor way and cache number into r11 */ 5458bd22949SKevin Hilman orr r11, r10, r9, lsl r5 5468bd22949SKevin Hilman /* factor index number into r11 */ 5478bd22949SKevin Hilman orr r11, r11, r7, lsl r2 5488bd22949SKevin Hilman /*clean & invalidate by set/way */ 5498bd22949SKevin Hilman mcr p15, 0, r11, c7, c10, 2 5508bd22949SKevin Hilman /* decrement the way*/ 5518bd22949SKevin Hilman subs r9, r9, #1 5528bd22949SKevin Hilman bge loop3 5538bd22949SKevin Hilman /*decrement the index */ 5548bd22949SKevin Hilman subs r7, r7, #1 5558bd22949SKevin Hilman bge loop2 5568bd22949SKevin Hilmanskip: 5578bd22949SKevin Hilman add r10, r10, #2 5588bd22949SKevin Hilman /* increment cache number */ 5598bd22949SKevin Hilman cmp r3, r10 5608bd22949SKevin Hilman bgt loop1 5618bd22949SKevin Hilmanfinished: 5628bd22949SKevin Hilman /*swith back to cache level 0 */ 5638bd22949SKevin Hilman mov r10, #0 5648bd22949SKevin Hilman /* select current cache level in cssr */ 5658bd22949SKevin Hilman mcr p15, 2, r10, c0, c0, 0 5668bd22949SKevin Hilman isb 5678bd22949SKevin Hilmanskip_l2_inval: 5688bd22949SKevin Hilman /* Data memory barrier and Data sync barrier */ 5698bd22949SKevin Hilman mov r1, #0 5708bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 4 5718bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 5 5728bd22949SKevin Hilman 5738bd22949SKevin Hilman wfi @ wait for interrupt 5748bd22949SKevin Hilman nop 5758bd22949SKevin Hilman nop 5768bd22949SKevin Hilman nop 5778bd22949SKevin Hilman nop 5788bd22949SKevin Hilman nop 5798bd22949SKevin Hilman nop 5808bd22949SKevin Hilman nop 5818bd22949SKevin Hilman nop 5828bd22949SKevin Hilman nop 5838bd22949SKevin Hilman nop 58489139dceSPeter 'p2' De Schrijver bl wait_sdrc_ok 5858bd22949SKevin Hilman /* restore regs and return */ 5868bd22949SKevin Hilman ldmfd sp!, {r0-r12, pc} 5878bd22949SKevin Hilman 58889139dceSPeter 'p2' De Schrijver/* Make sure SDRC accesses are ok */ 58989139dceSPeter 'p2' De Schrijverwait_sdrc_ok: 59089139dceSPeter 'p2' De Schrijver ldr r4, cm_idlest1_core 59189139dceSPeter 'p2' De Schrijver ldr r5, [r4] 59289139dceSPeter 'p2' De Schrijver and r5, r5, #0x2 59389139dceSPeter 'p2' De Schrijver cmp r5, #0 59489139dceSPeter 'p2' De Schrijver bne wait_sdrc_ok 5958bd22949SKevin Hilman ldr r4, sdrc_power 5968bd22949SKevin Hilman ldr r5, [r4] 5978bd22949SKevin Hilman bic r5, r5, #0x40 5988bd22949SKevin Hilman str r5, [r4] 59989139dceSPeter 'p2' De Schrijverwait_dll_lock: 60089139dceSPeter 'p2' De Schrijver /* Is dll in lock mode? */ 60189139dceSPeter 'p2' De Schrijver ldr r4, sdrc_dlla_ctrl 60289139dceSPeter 'p2' De Schrijver ldr r5, [r4] 60389139dceSPeter 'p2' De Schrijver tst r5, #0x4 60489139dceSPeter 'p2' De Schrijver bxne lr 60589139dceSPeter 'p2' De Schrijver /* wait till dll locks */ 60689139dceSPeter 'p2' De Schrijver ldr r4, sdrc_dlla_status 60789139dceSPeter 'p2' De Schrijver ldr r5, [r4] 60889139dceSPeter 'p2' De Schrijver and r5, r5, #0x4 60989139dceSPeter 'p2' De Schrijver cmp r5, #0x4 61089139dceSPeter 'p2' De Schrijver bne wait_dll_lock 6118bd22949SKevin Hilman bx lr 61289139dceSPeter 'p2' De Schrijver 61389139dceSPeter 'p2' De Schrijvercm_idlest1_core: 61489139dceSPeter 'p2' De Schrijver .word CM_IDLEST1_CORE_V 61589139dceSPeter 'p2' De Schrijversdrc_dlla_status: 61689139dceSPeter 'p2' De Schrijver .word SDRC_DLLA_STATUS_V 61789139dceSPeter 'p2' De Schrijversdrc_dlla_ctrl: 61889139dceSPeter 'p2' De Schrijver .word SDRC_DLLA_CTRL_V 6198bd22949SKevin Hilmanpm_prepwstst_core: 6208bd22949SKevin Hilman .word PM_PREPWSTST_CORE_V 6210795a75aSTero Kristopm_prepwstst_core_p: 6220795a75aSTero Kristo .word PM_PREPWSTST_CORE_P 6238bd22949SKevin Hilmanpm_prepwstst_mpu: 6248bd22949SKevin Hilman .word PM_PREPWSTST_MPU_V 6258bd22949SKevin Hilmanpm_pwstctrl_mpu: 6268bd22949SKevin Hilman .word PM_PWSTCTRL_MPU_P 6278bd22949SKevin Hilmanscratchpad_base: 6288bd22949SKevin Hilman .word SCRATCHPAD_BASE_P 6290795a75aSTero Kristosram_base: 6300795a75aSTero Kristo .word SRAM_BASE_P + 0x8000 6318bd22949SKevin Hilmansdrc_power: 6328bd22949SKevin Hilman .word SDRC_POWER_V 6338bd22949SKevin Hilmanclk_stabilize_delay: 6348bd22949SKevin Hilman .word 0x000001FF 6358bd22949SKevin Hilmanassoc_mask: 6368bd22949SKevin Hilman .word 0x3ff 6378bd22949SKevin Hilmannumset_mask: 6388bd22949SKevin Hilman .word 0x7fff 6398bd22949SKevin Hilmanttbrbit_mask: 6408bd22949SKevin Hilman .word 0xFFFFC000 6418bd22949SKevin Hilmantable_index_mask: 6428bd22949SKevin Hilman .word 0xFFF00000 6438bd22949SKevin Hilmantable_entry: 6448bd22949SKevin Hilman .word 0x00000C02 6458bd22949SKevin Hilmancache_pred_disable_mask: 6468bd22949SKevin Hilman .word 0xFFFFE7FB 64727d59a4aSTero Kristocontrol_stat: 64827d59a4aSTero Kristo .word CONTROL_STAT 6498bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend_sz) 6508bd22949SKevin Hilman .word . - omap34xx_cpu_suspend 651