1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Low level suspend code for AM43XX SoCs 4 * 5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - http://www.ti.com/ 6 * Dave Gerlach, Vaibhav Bedia 7 */ 8 9#include <generated/ti-pm-asm-offsets.h> 10#include <linux/linkage.h> 11#include <linux/ti-emif-sram.h> 12 13#include <asm/assembler.h> 14#include <asm/hardware/cache-l2x0.h> 15#include <asm/memory.h> 16 17#include "cm33xx.h" 18#include "common.h" 19#include "iomap.h" 20#include "omap-secure.h" 21#include "omap44xx.h" 22#include "prm33xx.h" 23#include "prcm43xx.h" 24 25#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000 26#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003 27#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002 28 29#define AM43XX_EMIF_POWEROFF_ENABLE 0x1 30#define AM43XX_EMIF_POWEROFF_DISABLE 0x0 31 32#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1 33#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3 34 35#define AM43XX_CM_BASE 0x44DF0000 36 37#define AM43XX_CM_REGADDR(inst, reg) \ 38 AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg)) 39 40#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 41 AM43XX_CM_MPU_MPU_CDOFFS) 42#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 43 AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET) 44#define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \ 45 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 46#define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030 47 48 .arm 49 .align 3 50 51ENTRY(am43xx_do_wfi) 52 stmfd sp!, {r4 - r11, lr} @ save registers on stack 53 54#ifdef CONFIG_CACHE_L2X0 55 /* Retrieve l2 cache virt address BEFORE we shut off EMIF */ 56 ldr r1, get_l2cache_base 57 blx r1 58 mov r8, r0 59#endif 60 61 /* 62 * Flush all data from the L1 and L2 data cache before disabling 63 * SCTLR.C bit. 64 */ 65 ldr r1, kernel_flush 66 blx r1 67 68 /* 69 * Clear the SCTLR.C bit to prevent further data cache 70 * allocation. Clearing SCTLR.C would make all the data accesses 71 * strongly ordered and would not hit the cache. 72 */ 73 mrc p15, 0, r0, c1, c0, 0 74 bic r0, r0, #(1 << 2) @ Disable the C bit 75 mcr p15, 0, r0, c1, c0, 0 76 isb 77 dsb 78 79 /* 80 * Invalidate L1 and L2 data cache. 81 */ 82 ldr r1, kernel_flush 83 blx r1 84 85#ifdef CONFIG_CACHE_L2X0 86 /* 87 * Clean and invalidate the L2 cache. 88 */ 89#ifdef CONFIG_PL310_ERRATA_727915 90 mov r0, #0x03 91 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 92 dsb 93 smc #0 94 dsb 95#endif 96 mov r0, r8 97 adr r4, am43xx_pm_ro_sram_data 98 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 99 100 mov r2, r0 101 ldr r0, [r2, #L2X0_AUX_CTRL] 102 str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 103 ldr r0, [r2, #L310_PREFETCH_CTRL] 104 str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 105 106 ldr r0, l2_val 107 str r0, [r2, #L2X0_CLEAN_INV_WAY] 108wait: 109 ldr r0, [r2, #L2X0_CLEAN_INV_WAY] 110 ldr r1, l2_val 111 ands r0, r0, r1 112 bne wait 113#ifdef CONFIG_PL310_ERRATA_727915 114 mov r0, #0x00 115 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 116 dsb 117 smc #0 118 dsb 119#endif 120l2x_sync: 121 mov r0, r8 122 mov r2, r0 123 mov r0, #0x0 124 str r0, [r2, #L2X0_CACHE_SYNC] 125sync: 126 ldr r0, [r2, #L2X0_CACHE_SYNC] 127 ands r0, r0, #0x1 128 bne sync 129#endif 130 131 adr r9, am43xx_emif_sram_table 132 133 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET] 134 blx r3 135 136 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET] 137 blx r3 138 139 /* Disable EMIF */ 140 ldr r1, am43xx_virt_emif_clkctrl 141 ldr r2, [r1] 142 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 143 str r2, [r1] 144 145wait_emif_disable: 146 ldr r2, [r1] 147 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 148 cmp r2, r3 149 bne wait_emif_disable 150 151 /* 152 * For the MPU WFI to be registered as an interrupt 153 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set 154 * to DISABLED 155 */ 156 ldr r1, am43xx_virt_mpu_clkctrl 157 ldr r2, [r1] 158 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 159 str r2, [r1] 160 161 /* 162 * Put MPU CLKDM to SW_SLEEP 163 */ 164 ldr r1, am43xx_virt_mpu_clkstctrl 165 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 166 str r2, [r1] 167 168 /* 169 * Execute a barrier instruction to ensure that all cache, 170 * TLB and branch predictor maintenance operations issued 171 * have completed. 172 */ 173 dsb 174 dmb 175 176 /* 177 * Execute a WFI instruction and wait until the 178 * STANDBYWFI output is asserted to indicate that the 179 * CPU is in idle and low power state. CPU can specualatively 180 * prefetch the instructions so add NOPs after WFI. Sixteen 181 * NOPs as per Cortex-A9 pipeline. 182 */ 183 wfi 184 185 nop 186 nop 187 nop 188 nop 189 nop 190 nop 191 nop 192 nop 193 nop 194 nop 195 nop 196 nop 197 nop 198 nop 199 nop 200 nop 201 202 /* We come here in case of an abort due to a late interrupt */ 203 ldr r1, am43xx_virt_mpu_clkstctrl 204 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 205 str r2, [r1] 206 207 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */ 208 ldr r1, am43xx_virt_mpu_clkctrl 209 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 210 str r2, [r1] 211 212 /* Re-enable EMIF */ 213 ldr r1, am43xx_virt_emif_clkctrl 214 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 215 str r2, [r1] 216wait_emif_enable: 217 ldr r3, [r1] 218 cmp r2, r3 219 bne wait_emif_enable 220 221 /* 222 * Set SCTLR.C bit to allow data cache allocation 223 */ 224 mrc p15, 0, r0, c1, c0, 0 225 orr r0, r0, #(1 << 2) @ Enable the C bit 226 mcr p15, 0, r0, c1, c0, 0 227 isb 228 229 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET] 230 blx r1 231 232 /* Let the suspend code know about the abort */ 233 mov r0, #1 234 ldmfd sp!, {r4 - r11, pc} @ restore regs and return 235ENDPROC(am43xx_do_wfi) 236 237 .align 238ENTRY(am43xx_resume_offset) 239 .word . - am43xx_do_wfi 240 241ENTRY(am43xx_resume_from_deep_sleep) 242 /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */ 243 ldr r1, am43xx_virt_mpu_clkstctrl 244 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 245 str r2, [r1] 246 247 /* For AM43xx, use EMIF power down until context is restored */ 248 ldr r2, am43xx_phys_emif_poweroff 249 mov r1, #AM43XX_EMIF_POWEROFF_ENABLE 250 str r1, [r2, #0x0] 251 252 /* Re-enable EMIF */ 253 ldr r1, am43xx_phys_emif_clkctrl 254 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 255 str r2, [r1] 256wait_emif_enable1: 257 ldr r3, [r1] 258 cmp r2, r3 259 bne wait_emif_enable1 260 261 adr r9, am43xx_emif_sram_table 262 263 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET] 264 blx r1 265 266 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET] 267 blx r1 268 269 ldr r2, am43xx_phys_emif_poweroff 270 mov r1, #AM43XX_EMIF_POWEROFF_DISABLE 271 str r1, [r2, #0x0] 272 273#ifdef CONFIG_CACHE_L2X0 274 ldr r2, l2_cache_base 275 ldr r0, [r2, #L2X0_CTRL] 276 and r0, #0x0f 277 cmp r0, #1 278 beq skip_l2en @ Skip if already enabled 279 280 adr r4, am43xx_pm_ro_sram_data 281 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET] 282 ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 283 284 ldr r12, l2_smc1 285 dsb 286 smc #0 287 dsb 288set_aux_ctrl: 289 ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 290 ldr r12, l2_smc2 291 dsb 292 smc #0 293 dsb 294 295 /* L2 invalidate on resume */ 296 ldr r0, l2_val 297 ldr r2, l2_cache_base 298 str r0, [r2, #L2X0_INV_WAY] 299wait2: 300 ldr r0, [r2, #L2X0_INV_WAY] 301 ldr r1, l2_val 302 ands r0, r0, r1 303 bne wait2 304#ifdef CONFIG_PL310_ERRATA_727915 305 mov r0, #0x00 306 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 307 dsb 308 smc #0 309 dsb 310#endif 311l2x_sync2: 312 ldr r2, l2_cache_base 313 mov r0, #0x0 314 str r0, [r2, #L2X0_CACHE_SYNC] 315sync2: 316 ldr r0, [r2, #L2X0_CACHE_SYNC] 317 ands r0, r0, #0x1 318 bne sync2 319 320 mov r0, #0x1 321 ldr r12, l2_smc3 322 dsb 323 smc #0 324 dsb 325#endif 326skip_l2en: 327 /* We are back. Branch to the common CPU resume routine */ 328 mov r0, #0 329 ldr pc, resume_addr 330ENDPROC(am43xx_resume_from_deep_sleep) 331 332/* 333 * Local variables 334 */ 335 .align 336resume_addr: 337 .word cpu_resume - PAGE_OFFSET + 0x80000000 338kernel_flush: 339 .word v7_flush_dcache_all 340ddr_start: 341 .word PAGE_OFFSET 342 343am43xx_phys_emif_poweroff: 344 .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \ 345 AM43XX_PRM_EMIF_CTRL_OFFSET) 346am43xx_virt_mpu_clkstctrl: 347 .word (AM43XX_CM_MPU_CLKSTCTRL) 348am43xx_virt_mpu_clkctrl: 349 .word (AM43XX_CM_MPU_MPU_CLKCTRL) 350am43xx_virt_emif_clkctrl: 351 .word (AM43XX_CM_PER_EMIF_CLKCTRL) 352am43xx_phys_emif_clkctrl: 353 .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \ 354 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 355 356#ifdef CONFIG_CACHE_L2X0 357/* L2 cache related defines for AM437x */ 358get_l2cache_base: 359 .word omap4_get_l2cache_base 360l2_cache_base: 361 .word OMAP44XX_L2CACHE_BASE 362l2_smc1: 363 .word OMAP4_MON_L2X0_PREFETCH_INDEX 364l2_smc2: 365 .word OMAP4_MON_L2X0_AUXCTRL_INDEX 366l2_smc3: 367 .word OMAP4_MON_L2X0_CTRL_INDEX 368l2_val: 369 .word 0xffff 370#endif 371 372.align 3 373/* DDR related defines */ 374ENTRY(am43xx_emif_sram_table) 375 .space EMIF_PM_FUNCTIONS_SIZE 376 377ENTRY(am43xx_pm_sram) 378 .word am43xx_do_wfi 379 .word am43xx_do_wfi_sz 380 .word am43xx_resume_offset 381 .word am43xx_emif_sram_table 382 .word am43xx_pm_ro_sram_data 383 384.align 3 385 386ENTRY(am43xx_pm_ro_sram_data) 387 .space AMX3_PM_RO_SRAM_DATA_SIZE 388 389ENTRY(am43xx_do_wfi_sz) 390 .word . - am43xx_do_wfi 391