1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Low level suspend code for AM43XX SoCs 4 * 5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - http://www.ti.com/ 6 * Dave Gerlach, Vaibhav Bedia 7 */ 8 9#include <generated/ti-pm-asm-offsets.h> 10#include <linux/linkage.h> 11#include <linux/ti-emif-sram.h> 12#include <linux/platform_data/pm33xx.h> 13#include <asm/assembler.h> 14#include <asm/hardware/cache-l2x0.h> 15#include <asm/memory.h> 16 17#include "cm33xx.h" 18#include "common.h" 19#include "iomap.h" 20#include "omap-secure.h" 21#include "omap44xx.h" 22#include "prm33xx.h" 23#include "prcm43xx.h" 24 25/* replicated define because linux/bitops.h cannot be included in assembly */ 26#define BIT(nr) (1 << (nr)) 27 28#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000 29#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003 30#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002 31 32#define AM43XX_EMIF_POWEROFF_ENABLE 0x1 33#define AM43XX_EMIF_POWEROFF_DISABLE 0x0 34 35#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1 36#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3 37 38#define AM43XX_CM_BASE 0x44DF0000 39 40#define AM43XX_CM_REGADDR(inst, reg) \ 41 AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg)) 42 43#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 44 AM43XX_CM_MPU_MPU_CDOFFS) 45#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 46 AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET) 47#define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \ 48 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 49#define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030 50 51#define RTC_SECONDS_REG 0x0 52#define RTC_PMIC_REG 0x98 53#define RTC_PMIC_POWER_EN BIT(16) 54#define RTC_PMIC_EXT_WAKEUP_STS BIT(12) 55#define RTC_PMIC_EXT_WAKEUP_POL BIT(4) 56#define RTC_PMIC_EXT_WAKEUP_EN BIT(0) 57 58 .arm 59 .align 3 60 61ENTRY(am43xx_do_wfi) 62 stmfd sp!, {r4 - r11, lr} @ save registers on stack 63 64 /* Save wfi_flags arg to data space */ 65 mov r4, r0 66 adr r3, am43xx_pm_ro_sram_data 67 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 68 str r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET] 69 70#ifdef CONFIG_CACHE_L2X0 71 /* Retrieve l2 cache virt address BEFORE we shut off EMIF */ 72 ldr r1, get_l2cache_base 73 blx r1 74 mov r8, r0 75#endif 76 77 /* Only flush cache is we know we are losing MPU context */ 78 tst r4, #WFI_FLAG_FLUSH_CACHE 79 beq cache_skip_flush 80 81 /* 82 * Flush all data from the L1 and L2 data cache before disabling 83 * SCTLR.C bit. 84 */ 85 ldr r1, kernel_flush 86 blx r1 87 88 /* 89 * Clear the SCTLR.C bit to prevent further data cache 90 * allocation. Clearing SCTLR.C would make all the data accesses 91 * strongly ordered and would not hit the cache. 92 */ 93 mrc p15, 0, r0, c1, c0, 0 94 bic r0, r0, #(1 << 2) @ Disable the C bit 95 mcr p15, 0, r0, c1, c0, 0 96 isb 97 dsb 98 99 /* 100 * Invalidate L1 and L2 data cache. 101 */ 102 ldr r1, kernel_flush 103 blx r1 104 105#ifdef CONFIG_CACHE_L2X0 106 /* 107 * Clean and invalidate the L2 cache. 108 */ 109#ifdef CONFIG_PL310_ERRATA_727915 110 mov r0, #0x03 111 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 112 dsb 113 smc #0 114 dsb 115#endif 116 mov r0, r8 117 adr r4, am43xx_pm_ro_sram_data 118 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 119 120 mov r2, r0 121 ldr r0, [r2, #L2X0_AUX_CTRL] 122 str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 123 ldr r0, [r2, #L310_PREFETCH_CTRL] 124 str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 125 126 ldr r0, l2_val 127 str r0, [r2, #L2X0_CLEAN_INV_WAY] 128wait: 129 ldr r0, [r2, #L2X0_CLEAN_INV_WAY] 130 ldr r1, l2_val 131 ands r0, r0, r1 132 bne wait 133#ifdef CONFIG_PL310_ERRATA_727915 134 mov r0, #0x00 135 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 136 dsb 137 smc #0 138 dsb 139#endif 140l2x_sync: 141 mov r0, r8 142 mov r2, r0 143 mov r0, #0x0 144 str r0, [r2, #L2X0_CACHE_SYNC] 145sync: 146 ldr r0, [r2, #L2X0_CACHE_SYNC] 147 ands r0, r0, #0x1 148 bne sync 149#endif 150 151 /* Restore wfi_flags */ 152 adr r3, am43xx_pm_ro_sram_data 153 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 154 ldr r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET] 155 156cache_skip_flush: 157 /* 158 * If we are trying to enter RTC+DDR mode we must perform 159 * a read from the rtc address space to ensure translation 160 * presence in the TLB to avoid page table walk after DDR 161 * is unavailable. 162 */ 163 tst r4, #WFI_FLAG_RTC_ONLY 164 beq skip_rtc_va_refresh 165 166 adr r3, am43xx_pm_ro_sram_data 167 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET] 168 ldr r0, [r1] 169 170skip_rtc_va_refresh: 171 /* Check if we want self refresh */ 172 tst r4, #WFI_FLAG_SELF_REFRESH 173 beq emif_skip_enter_sr 174 175 adr r9, am43xx_emif_sram_table 176 177 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET] 178 blx r3 179 180emif_skip_enter_sr: 181 /* Only necessary if PER is losing context */ 182 tst r4, #WFI_FLAG_SAVE_EMIF 183 beq emif_skip_save 184 185 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET] 186 blx r3 187 188emif_skip_save: 189 /* Only can disable EMIF if we have entered self refresh */ 190 tst r4, #WFI_FLAG_SELF_REFRESH 191 beq emif_skip_disable 192 193 /* Disable EMIF */ 194 ldr r1, am43xx_virt_emif_clkctrl 195 ldr r2, [r1] 196 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 197 str r2, [r1] 198 199wait_emif_disable: 200 ldr r2, [r1] 201 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 202 cmp r2, r3 203 bne wait_emif_disable 204 205emif_skip_disable: 206 tst r4, #WFI_FLAG_RTC_ONLY 207 beq skip_rtc_only 208 209 adr r3, am43xx_pm_ro_sram_data 210 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET] 211 212 ldr r0, [r1, #RTC_PMIC_REG] 213 orr r0, r0, #RTC_PMIC_POWER_EN 214 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_STS 215 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_EN 216 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_POL 217 str r0, [r1, #RTC_PMIC_REG] 218 ldr r0, [r1, #RTC_PMIC_REG] 219 /* Wait for 2 seconds to lose power */ 220 mov r3, #2 221 ldr r2, [r1, #RTC_SECONDS_REG] 222rtc_loop: 223 ldr r0, [r1, #RTC_SECONDS_REG] 224 cmp r0, r2 225 beq rtc_loop 226 mov r2, r0 227 subs r3, r3, #1 228 bne rtc_loop 229 230 b re_enable_emif 231 232skip_rtc_only: 233 234 tst r4, #WFI_FLAG_WAKE_M3 235 beq wkup_m3_skip 236 237 /* 238 * For the MPU WFI to be registered as an interrupt 239 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set 240 * to DISABLED 241 */ 242 ldr r1, am43xx_virt_mpu_clkctrl 243 ldr r2, [r1] 244 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 245 str r2, [r1] 246 247 /* 248 * Put MPU CLKDM to SW_SLEEP 249 */ 250 ldr r1, am43xx_virt_mpu_clkstctrl 251 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 252 str r2, [r1] 253 254wkup_m3_skip: 255 /* 256 * Execute a barrier instruction to ensure that all cache, 257 * TLB and branch predictor maintenance operations issued 258 * have completed. 259 */ 260 dsb 261 dmb 262 263 /* 264 * Execute a WFI instruction and wait until the 265 * STANDBYWFI output is asserted to indicate that the 266 * CPU is in idle and low power state. CPU can specualatively 267 * prefetch the instructions so add NOPs after WFI. Sixteen 268 * NOPs as per Cortex-A9 pipeline. 269 */ 270 wfi 271 272 nop 273 nop 274 nop 275 nop 276 nop 277 nop 278 nop 279 nop 280 nop 281 nop 282 nop 283 nop 284 nop 285 nop 286 nop 287 nop 288 289 /* We come here in case of an abort due to a late interrupt */ 290 ldr r1, am43xx_virt_mpu_clkstctrl 291 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 292 str r2, [r1] 293 294 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */ 295 ldr r1, am43xx_virt_mpu_clkctrl 296 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 297 str r2, [r1] 298 299re_enable_emif: 300 /* Re-enable EMIF */ 301 ldr r1, am43xx_virt_emif_clkctrl 302 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 303 str r2, [r1] 304wait_emif_enable: 305 ldr r3, [r1] 306 cmp r2, r3 307 bne wait_emif_enable 308 309 tst r4, #WFI_FLAG_FLUSH_CACHE 310 beq cache_skip_restore 311 312 /* 313 * Set SCTLR.C bit to allow data cache allocation 314 */ 315 mrc p15, 0, r0, c1, c0, 0 316 orr r0, r0, #(1 << 2) @ Enable the C bit 317 mcr p15, 0, r0, c1, c0, 0 318 isb 319 320cache_skip_restore: 321 /* Only necessary if PER is losing context */ 322 tst r4, #WFI_FLAG_SELF_REFRESH 323 beq emif_skip_exit_sr_abt 324 325 adr r9, am43xx_emif_sram_table 326 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET] 327 blx r1 328 329emif_skip_exit_sr_abt: 330 /* Let the suspend code know about the abort */ 331 mov r0, #1 332 ldmfd sp!, {r4 - r11, pc} @ restore regs and return 333ENDPROC(am43xx_do_wfi) 334 335 .align 336ENTRY(am43xx_resume_offset) 337 .word . - am43xx_do_wfi 338 339ENTRY(am43xx_resume_from_deep_sleep) 340 /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */ 341 ldr r1, am43xx_virt_mpu_clkstctrl 342 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 343 str r2, [r1] 344 345 /* For AM43xx, use EMIF power down until context is restored */ 346 ldr r2, am43xx_phys_emif_poweroff 347 mov r1, #AM43XX_EMIF_POWEROFF_ENABLE 348 str r1, [r2, #0x0] 349 350 /* Re-enable EMIF */ 351 ldr r1, am43xx_phys_emif_clkctrl 352 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 353 str r2, [r1] 354wait_emif_enable1: 355 ldr r3, [r1] 356 cmp r2, r3 357 bne wait_emif_enable1 358 359 adr r9, am43xx_emif_sram_table 360 361 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET] 362 blx r1 363 364 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET] 365 blx r1 366 367 ldr r2, am43xx_phys_emif_poweroff 368 mov r1, #AM43XX_EMIF_POWEROFF_DISABLE 369 str r1, [r2, #0x0] 370 371 ldr r1, [r9, #EMIF_PM_RUN_HW_LEVELING] 372 blx r1 373 374#ifdef CONFIG_CACHE_L2X0 375 ldr r2, l2_cache_base 376 ldr r0, [r2, #L2X0_CTRL] 377 and r0, #0x0f 378 cmp r0, #1 379 beq skip_l2en @ Skip if already enabled 380 381 adr r4, am43xx_pm_ro_sram_data 382 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET] 383 ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 384 385 ldr r12, l2_smc1 386 dsb 387 smc #0 388 dsb 389set_aux_ctrl: 390 ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 391 ldr r12, l2_smc2 392 dsb 393 smc #0 394 dsb 395 396 /* L2 invalidate on resume */ 397 ldr r0, l2_val 398 ldr r2, l2_cache_base 399 str r0, [r2, #L2X0_INV_WAY] 400wait2: 401 ldr r0, [r2, #L2X0_INV_WAY] 402 ldr r1, l2_val 403 ands r0, r0, r1 404 bne wait2 405#ifdef CONFIG_PL310_ERRATA_727915 406 mov r0, #0x00 407 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 408 dsb 409 smc #0 410 dsb 411#endif 412l2x_sync2: 413 ldr r2, l2_cache_base 414 mov r0, #0x0 415 str r0, [r2, #L2X0_CACHE_SYNC] 416sync2: 417 ldr r0, [r2, #L2X0_CACHE_SYNC] 418 ands r0, r0, #0x1 419 bne sync2 420 421 mov r0, #0x1 422 ldr r12, l2_smc3 423 dsb 424 smc #0 425 dsb 426#endif 427skip_l2en: 428 /* We are back. Branch to the common CPU resume routine */ 429 mov r0, #0 430 ldr pc, resume_addr 431ENDPROC(am43xx_resume_from_deep_sleep) 432 433/* 434 * Local variables 435 */ 436 .align 437kernel_flush: 438 .word v7_flush_dcache_all 439ddr_start: 440 .word PAGE_OFFSET 441 442am43xx_phys_emif_poweroff: 443 .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \ 444 AM43XX_PRM_EMIF_CTRL_OFFSET) 445am43xx_virt_mpu_clkstctrl: 446 .word (AM43XX_CM_MPU_CLKSTCTRL) 447am43xx_virt_mpu_clkctrl: 448 .word (AM43XX_CM_MPU_MPU_CLKCTRL) 449am43xx_virt_emif_clkctrl: 450 .word (AM43XX_CM_PER_EMIF_CLKCTRL) 451am43xx_phys_emif_clkctrl: 452 .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \ 453 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 454 455#ifdef CONFIG_CACHE_L2X0 456/* L2 cache related defines for AM437x */ 457get_l2cache_base: 458 .word omap4_get_l2cache_base 459l2_cache_base: 460 .word OMAP44XX_L2CACHE_BASE 461l2_smc1: 462 .word OMAP4_MON_L2X0_PREFETCH_INDEX 463l2_smc2: 464 .word OMAP4_MON_L2X0_AUXCTRL_INDEX 465l2_smc3: 466 .word OMAP4_MON_L2X0_CTRL_INDEX 467l2_val: 468 .word 0xffff 469#endif 470 471.align 3 472/* DDR related defines */ 473ENTRY(am43xx_emif_sram_table) 474 .space EMIF_PM_FUNCTIONS_SIZE 475 476ENTRY(am43xx_pm_sram) 477 .word am43xx_do_wfi 478 .word am43xx_do_wfi_sz 479 .word am43xx_resume_offset 480 .word am43xx_emif_sram_table 481 .word am43xx_pm_ro_sram_data 482 483resume_addr: 484 .word cpu_resume - PAGE_OFFSET + 0x80000000 485.align 3 486 487ENTRY(am43xx_pm_ro_sram_data) 488 .space AMX3_PM_RO_SRAM_DATA_SIZE 489 490ENTRY(am43xx_do_wfi_sz) 491 .word . - am43xx_do_wfi 492