1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun/* 3*4882a593Smuzhiyun * Low level suspend code for AM43XX SoCs 4*4882a593Smuzhiyun * 5*4882a593Smuzhiyun * Copyright (C) 2013-2018 Texas Instruments Incorporated - https://www.ti.com/ 6*4882a593Smuzhiyun * Dave Gerlach, Vaibhav Bedia 7*4882a593Smuzhiyun */ 8*4882a593Smuzhiyun 9*4882a593Smuzhiyun#include <linux/linkage.h> 10*4882a593Smuzhiyun#include <linux/ti-emif-sram.h> 11*4882a593Smuzhiyun#include <linux/platform_data/pm33xx.h> 12*4882a593Smuzhiyun#include <asm/assembler.h> 13*4882a593Smuzhiyun#include <asm/hardware/cache-l2x0.h> 14*4882a593Smuzhiyun#include <asm/memory.h> 15*4882a593Smuzhiyun 16*4882a593Smuzhiyun#include "cm33xx.h" 17*4882a593Smuzhiyun#include "common.h" 18*4882a593Smuzhiyun#include "iomap.h" 19*4882a593Smuzhiyun#include "omap-secure.h" 20*4882a593Smuzhiyun#include "omap44xx.h" 21*4882a593Smuzhiyun#include "pm-asm-offsets.h" 22*4882a593Smuzhiyun#include "prm33xx.h" 23*4882a593Smuzhiyun#include "prcm43xx.h" 24*4882a593Smuzhiyun 25*4882a593Smuzhiyun/* replicated define because linux/bitops.h cannot be included in assembly */ 26*4882a593Smuzhiyun#define BIT(nr) (1 << (nr)) 27*4882a593Smuzhiyun 28*4882a593Smuzhiyun#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000 29*4882a593Smuzhiyun#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003 30*4882a593Smuzhiyun#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002 31*4882a593Smuzhiyun 32*4882a593Smuzhiyun#define AM43XX_EMIF_POWEROFF_ENABLE 0x1 33*4882a593Smuzhiyun#define AM43XX_EMIF_POWEROFF_DISABLE 0x0 34*4882a593Smuzhiyun 35*4882a593Smuzhiyun#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1 36*4882a593Smuzhiyun#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3 37*4882a593Smuzhiyun 38*4882a593Smuzhiyun#define AM43XX_CM_BASE 0x44DF0000 39*4882a593Smuzhiyun 40*4882a593Smuzhiyun#define AM43XX_CM_REGADDR(inst, reg) \ 41*4882a593Smuzhiyun AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg)) 42*4882a593Smuzhiyun 43*4882a593Smuzhiyun#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 44*4882a593Smuzhiyun AM43XX_CM_MPU_MPU_CDOFFS) 45*4882a593Smuzhiyun#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 46*4882a593Smuzhiyun AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET) 47*4882a593Smuzhiyun#define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \ 48*4882a593Smuzhiyun AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 49*4882a593Smuzhiyun#define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030 50*4882a593Smuzhiyun 51*4882a593Smuzhiyun#define RTC_SECONDS_REG 0x0 52*4882a593Smuzhiyun#define RTC_PMIC_REG 0x98 53*4882a593Smuzhiyun#define RTC_PMIC_POWER_EN BIT(16) 54*4882a593Smuzhiyun#define RTC_PMIC_EXT_WAKEUP_STS BIT(12) 55*4882a593Smuzhiyun#define RTC_PMIC_EXT_WAKEUP_POL BIT(4) 56*4882a593Smuzhiyun#define RTC_PMIC_EXT_WAKEUP_EN BIT(0) 57*4882a593Smuzhiyun 58*4882a593Smuzhiyun .arm 59*4882a593Smuzhiyun .arch armv7-a 60*4882a593Smuzhiyun .arch_extension sec 61*4882a593Smuzhiyun .align 3 62*4882a593Smuzhiyun 63*4882a593SmuzhiyunENTRY(am43xx_do_wfi) 64*4882a593Smuzhiyun stmfd sp!, {r4 - r11, lr} @ save registers on stack 65*4882a593Smuzhiyun 66*4882a593Smuzhiyun /* Save wfi_flags arg to data space */ 67*4882a593Smuzhiyun mov r4, r0 68*4882a593Smuzhiyun adr r3, am43xx_pm_ro_sram_data 69*4882a593Smuzhiyun ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 70*4882a593Smuzhiyun str r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET] 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun#ifdef CONFIG_CACHE_L2X0 73*4882a593Smuzhiyun /* Retrieve l2 cache virt address BEFORE we shut off EMIF */ 74*4882a593Smuzhiyun ldr r1, get_l2cache_base 75*4882a593Smuzhiyun blx r1 76*4882a593Smuzhiyun mov r8, r0 77*4882a593Smuzhiyun#endif 78*4882a593Smuzhiyun 79*4882a593Smuzhiyun /* Only flush cache is we know we are losing MPU context */ 80*4882a593Smuzhiyun tst r4, #WFI_FLAG_FLUSH_CACHE 81*4882a593Smuzhiyun beq cache_skip_flush 82*4882a593Smuzhiyun 83*4882a593Smuzhiyun /* 84*4882a593Smuzhiyun * Flush all data from the L1 and L2 data cache before disabling 85*4882a593Smuzhiyun * SCTLR.C bit. 86*4882a593Smuzhiyun */ 87*4882a593Smuzhiyun ldr r1, kernel_flush 88*4882a593Smuzhiyun blx r1 89*4882a593Smuzhiyun 90*4882a593Smuzhiyun /* 91*4882a593Smuzhiyun * Clear the SCTLR.C bit to prevent further data cache 92*4882a593Smuzhiyun * allocation. Clearing SCTLR.C would make all the data accesses 93*4882a593Smuzhiyun * strongly ordered and would not hit the cache. 94*4882a593Smuzhiyun */ 95*4882a593Smuzhiyun mrc p15, 0, r0, c1, c0, 0 96*4882a593Smuzhiyun bic r0, r0, #(1 << 2) @ Disable the C bit 97*4882a593Smuzhiyun mcr p15, 0, r0, c1, c0, 0 98*4882a593Smuzhiyun isb 99*4882a593Smuzhiyun dsb 100*4882a593Smuzhiyun 101*4882a593Smuzhiyun /* 102*4882a593Smuzhiyun * Invalidate L1 and L2 data cache. 103*4882a593Smuzhiyun */ 104*4882a593Smuzhiyun ldr r1, kernel_flush 105*4882a593Smuzhiyun blx r1 106*4882a593Smuzhiyun 107*4882a593Smuzhiyun#ifdef CONFIG_CACHE_L2X0 108*4882a593Smuzhiyun /* 109*4882a593Smuzhiyun * Clean and invalidate the L2 cache. 110*4882a593Smuzhiyun */ 111*4882a593Smuzhiyun#ifdef CONFIG_PL310_ERRATA_727915 112*4882a593Smuzhiyun mov r0, #0x03 113*4882a593Smuzhiyun mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 114*4882a593Smuzhiyun dsb 115*4882a593Smuzhiyun smc #0 116*4882a593Smuzhiyun dsb 117*4882a593Smuzhiyun#endif 118*4882a593Smuzhiyun mov r0, r8 119*4882a593Smuzhiyun adr r4, am43xx_pm_ro_sram_data 120*4882a593Smuzhiyun ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 121*4882a593Smuzhiyun 122*4882a593Smuzhiyun mov r2, r0 123*4882a593Smuzhiyun ldr r0, [r2, #L2X0_AUX_CTRL] 124*4882a593Smuzhiyun str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 125*4882a593Smuzhiyun ldr r0, [r2, #L310_PREFETCH_CTRL] 126*4882a593Smuzhiyun str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 127*4882a593Smuzhiyun 128*4882a593Smuzhiyun ldr r0, l2_val 129*4882a593Smuzhiyun str r0, [r2, #L2X0_CLEAN_INV_WAY] 130*4882a593Smuzhiyunwait: 131*4882a593Smuzhiyun ldr r0, [r2, #L2X0_CLEAN_INV_WAY] 132*4882a593Smuzhiyun ldr r1, l2_val 133*4882a593Smuzhiyun ands r0, r0, r1 134*4882a593Smuzhiyun bne wait 135*4882a593Smuzhiyun#ifdef CONFIG_PL310_ERRATA_727915 136*4882a593Smuzhiyun mov r0, #0x00 137*4882a593Smuzhiyun mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 138*4882a593Smuzhiyun dsb 139*4882a593Smuzhiyun smc #0 140*4882a593Smuzhiyun dsb 141*4882a593Smuzhiyun#endif 142*4882a593Smuzhiyunl2x_sync: 143*4882a593Smuzhiyun mov r0, r8 144*4882a593Smuzhiyun mov r2, r0 145*4882a593Smuzhiyun mov r0, #0x0 146*4882a593Smuzhiyun str r0, [r2, #L2X0_CACHE_SYNC] 147*4882a593Smuzhiyunsync: 148*4882a593Smuzhiyun ldr r0, [r2, #L2X0_CACHE_SYNC] 149*4882a593Smuzhiyun ands r0, r0, #0x1 150*4882a593Smuzhiyun bne sync 151*4882a593Smuzhiyun#endif 152*4882a593Smuzhiyun 153*4882a593Smuzhiyun /* Restore wfi_flags */ 154*4882a593Smuzhiyun adr r3, am43xx_pm_ro_sram_data 155*4882a593Smuzhiyun ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 156*4882a593Smuzhiyun ldr r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET] 157*4882a593Smuzhiyun 158*4882a593Smuzhiyuncache_skip_flush: 159*4882a593Smuzhiyun /* 160*4882a593Smuzhiyun * If we are trying to enter RTC+DDR mode we must perform 161*4882a593Smuzhiyun * a read from the rtc address space to ensure translation 162*4882a593Smuzhiyun * presence in the TLB to avoid page table walk after DDR 163*4882a593Smuzhiyun * is unavailable. 164*4882a593Smuzhiyun */ 165*4882a593Smuzhiyun tst r4, #WFI_FLAG_RTC_ONLY 166*4882a593Smuzhiyun beq skip_rtc_va_refresh 167*4882a593Smuzhiyun 168*4882a593Smuzhiyun adr r3, am43xx_pm_ro_sram_data 169*4882a593Smuzhiyun ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET] 170*4882a593Smuzhiyun ldr r0, [r1] 171*4882a593Smuzhiyun 172*4882a593Smuzhiyunskip_rtc_va_refresh: 173*4882a593Smuzhiyun /* Check if we want self refresh */ 174*4882a593Smuzhiyun tst r4, #WFI_FLAG_SELF_REFRESH 175*4882a593Smuzhiyun beq emif_skip_enter_sr 176*4882a593Smuzhiyun 177*4882a593Smuzhiyun adr r9, am43xx_emif_sram_table 178*4882a593Smuzhiyun 179*4882a593Smuzhiyun ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET] 180*4882a593Smuzhiyun blx r3 181*4882a593Smuzhiyun 182*4882a593Smuzhiyunemif_skip_enter_sr: 183*4882a593Smuzhiyun /* Only necessary if PER is losing context */ 184*4882a593Smuzhiyun tst r4, #WFI_FLAG_SAVE_EMIF 185*4882a593Smuzhiyun beq emif_skip_save 186*4882a593Smuzhiyun 187*4882a593Smuzhiyun ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET] 188*4882a593Smuzhiyun blx r3 189*4882a593Smuzhiyun 190*4882a593Smuzhiyunemif_skip_save: 191*4882a593Smuzhiyun /* Only can disable EMIF if we have entered self refresh */ 192*4882a593Smuzhiyun tst r4, #WFI_FLAG_SELF_REFRESH 193*4882a593Smuzhiyun beq emif_skip_disable 194*4882a593Smuzhiyun 195*4882a593Smuzhiyun /* Disable EMIF */ 196*4882a593Smuzhiyun ldr r1, am43xx_virt_emif_clkctrl 197*4882a593Smuzhiyun ldr r2, [r1] 198*4882a593Smuzhiyun bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 199*4882a593Smuzhiyun str r2, [r1] 200*4882a593Smuzhiyun 201*4882a593Smuzhiyunwait_emif_disable: 202*4882a593Smuzhiyun ldr r2, [r1] 203*4882a593Smuzhiyun mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 204*4882a593Smuzhiyun cmp r2, r3 205*4882a593Smuzhiyun bne wait_emif_disable 206*4882a593Smuzhiyun 207*4882a593Smuzhiyunemif_skip_disable: 208*4882a593Smuzhiyun tst r4, #WFI_FLAG_RTC_ONLY 209*4882a593Smuzhiyun beq skip_rtc_only 210*4882a593Smuzhiyun 211*4882a593Smuzhiyun adr r3, am43xx_pm_ro_sram_data 212*4882a593Smuzhiyun ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET] 213*4882a593Smuzhiyun 214*4882a593Smuzhiyun ldr r0, [r1, #RTC_PMIC_REG] 215*4882a593Smuzhiyun orr r0, r0, #RTC_PMIC_POWER_EN 216*4882a593Smuzhiyun orr r0, r0, #RTC_PMIC_EXT_WAKEUP_STS 217*4882a593Smuzhiyun orr r0, r0, #RTC_PMIC_EXT_WAKEUP_EN 218*4882a593Smuzhiyun orr r0, r0, #RTC_PMIC_EXT_WAKEUP_POL 219*4882a593Smuzhiyun str r0, [r1, #RTC_PMIC_REG] 220*4882a593Smuzhiyun ldr r0, [r1, #RTC_PMIC_REG] 221*4882a593Smuzhiyun /* Wait for 2 seconds to lose power */ 222*4882a593Smuzhiyun mov r3, #2 223*4882a593Smuzhiyun ldr r2, [r1, #RTC_SECONDS_REG] 224*4882a593Smuzhiyunrtc_loop: 225*4882a593Smuzhiyun ldr r0, [r1, #RTC_SECONDS_REG] 226*4882a593Smuzhiyun cmp r0, r2 227*4882a593Smuzhiyun beq rtc_loop 228*4882a593Smuzhiyun mov r2, r0 229*4882a593Smuzhiyun subs r3, r3, #1 230*4882a593Smuzhiyun bne rtc_loop 231*4882a593Smuzhiyun 232*4882a593Smuzhiyun b re_enable_emif 233*4882a593Smuzhiyun 234*4882a593Smuzhiyunskip_rtc_only: 235*4882a593Smuzhiyun 236*4882a593Smuzhiyun tst r4, #WFI_FLAG_WAKE_M3 237*4882a593Smuzhiyun beq wkup_m3_skip 238*4882a593Smuzhiyun 239*4882a593Smuzhiyun /* 240*4882a593Smuzhiyun * For the MPU WFI to be registered as an interrupt 241*4882a593Smuzhiyun * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set 242*4882a593Smuzhiyun * to DISABLED 243*4882a593Smuzhiyun */ 244*4882a593Smuzhiyun ldr r1, am43xx_virt_mpu_clkctrl 245*4882a593Smuzhiyun ldr r2, [r1] 246*4882a593Smuzhiyun bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 247*4882a593Smuzhiyun str r2, [r1] 248*4882a593Smuzhiyun 249*4882a593Smuzhiyun /* 250*4882a593Smuzhiyun * Put MPU CLKDM to SW_SLEEP 251*4882a593Smuzhiyun */ 252*4882a593Smuzhiyun ldr r1, am43xx_virt_mpu_clkstctrl 253*4882a593Smuzhiyun mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 254*4882a593Smuzhiyun str r2, [r1] 255*4882a593Smuzhiyun 256*4882a593Smuzhiyunwkup_m3_skip: 257*4882a593Smuzhiyun /* 258*4882a593Smuzhiyun * Execute a barrier instruction to ensure that all cache, 259*4882a593Smuzhiyun * TLB and branch predictor maintenance operations issued 260*4882a593Smuzhiyun * have completed. 261*4882a593Smuzhiyun */ 262*4882a593Smuzhiyun dsb 263*4882a593Smuzhiyun dmb 264*4882a593Smuzhiyun 265*4882a593Smuzhiyun /* 266*4882a593Smuzhiyun * Execute a WFI instruction and wait until the 267*4882a593Smuzhiyun * STANDBYWFI output is asserted to indicate that the 268*4882a593Smuzhiyun * CPU is in idle and low power state. CPU can specualatively 269*4882a593Smuzhiyun * prefetch the instructions so add NOPs after WFI. Sixteen 270*4882a593Smuzhiyun * NOPs as per Cortex-A9 pipeline. 271*4882a593Smuzhiyun */ 272*4882a593Smuzhiyun wfi 273*4882a593Smuzhiyun 274*4882a593Smuzhiyun nop 275*4882a593Smuzhiyun nop 276*4882a593Smuzhiyun nop 277*4882a593Smuzhiyun nop 278*4882a593Smuzhiyun nop 279*4882a593Smuzhiyun nop 280*4882a593Smuzhiyun nop 281*4882a593Smuzhiyun nop 282*4882a593Smuzhiyun nop 283*4882a593Smuzhiyun nop 284*4882a593Smuzhiyun nop 285*4882a593Smuzhiyun nop 286*4882a593Smuzhiyun nop 287*4882a593Smuzhiyun nop 288*4882a593Smuzhiyun nop 289*4882a593Smuzhiyun nop 290*4882a593Smuzhiyun 291*4882a593Smuzhiyun /* We come here in case of an abort due to a late interrupt */ 292*4882a593Smuzhiyun ldr r1, am43xx_virt_mpu_clkstctrl 293*4882a593Smuzhiyun mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 294*4882a593Smuzhiyun str r2, [r1] 295*4882a593Smuzhiyun 296*4882a593Smuzhiyun /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */ 297*4882a593Smuzhiyun ldr r1, am43xx_virt_mpu_clkctrl 298*4882a593Smuzhiyun mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 299*4882a593Smuzhiyun str r2, [r1] 300*4882a593Smuzhiyun 301*4882a593Smuzhiyunre_enable_emif: 302*4882a593Smuzhiyun /* Re-enable EMIF */ 303*4882a593Smuzhiyun ldr r1, am43xx_virt_emif_clkctrl 304*4882a593Smuzhiyun mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 305*4882a593Smuzhiyun str r2, [r1] 306*4882a593Smuzhiyunwait_emif_enable: 307*4882a593Smuzhiyun ldr r3, [r1] 308*4882a593Smuzhiyun cmp r2, r3 309*4882a593Smuzhiyun bne wait_emif_enable 310*4882a593Smuzhiyun 311*4882a593Smuzhiyun tst r4, #WFI_FLAG_FLUSH_CACHE 312*4882a593Smuzhiyun beq cache_skip_restore 313*4882a593Smuzhiyun 314*4882a593Smuzhiyun /* 315*4882a593Smuzhiyun * Set SCTLR.C bit to allow data cache allocation 316*4882a593Smuzhiyun */ 317*4882a593Smuzhiyun mrc p15, 0, r0, c1, c0, 0 318*4882a593Smuzhiyun orr r0, r0, #(1 << 2) @ Enable the C bit 319*4882a593Smuzhiyun mcr p15, 0, r0, c1, c0, 0 320*4882a593Smuzhiyun isb 321*4882a593Smuzhiyun 322*4882a593Smuzhiyuncache_skip_restore: 323*4882a593Smuzhiyun /* Only necessary if PER is losing context */ 324*4882a593Smuzhiyun tst r4, #WFI_FLAG_SELF_REFRESH 325*4882a593Smuzhiyun beq emif_skip_exit_sr_abt 326*4882a593Smuzhiyun 327*4882a593Smuzhiyun adr r9, am43xx_emif_sram_table 328*4882a593Smuzhiyun ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET] 329*4882a593Smuzhiyun blx r1 330*4882a593Smuzhiyun 331*4882a593Smuzhiyunemif_skip_exit_sr_abt: 332*4882a593Smuzhiyun /* Let the suspend code know about the abort */ 333*4882a593Smuzhiyun mov r0, #1 334*4882a593Smuzhiyun ldmfd sp!, {r4 - r11, pc} @ restore regs and return 335*4882a593SmuzhiyunENDPROC(am43xx_do_wfi) 336*4882a593Smuzhiyun 337*4882a593Smuzhiyun .align 338*4882a593SmuzhiyunENTRY(am43xx_resume_offset) 339*4882a593Smuzhiyun .word . - am43xx_do_wfi 340*4882a593Smuzhiyun 341*4882a593SmuzhiyunENTRY(am43xx_resume_from_deep_sleep) 342*4882a593Smuzhiyun /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */ 343*4882a593Smuzhiyun ldr r1, am43xx_virt_mpu_clkstctrl 344*4882a593Smuzhiyun mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 345*4882a593Smuzhiyun str r2, [r1] 346*4882a593Smuzhiyun 347*4882a593Smuzhiyun /* For AM43xx, use EMIF power down until context is restored */ 348*4882a593Smuzhiyun ldr r2, am43xx_phys_emif_poweroff 349*4882a593Smuzhiyun mov r1, #AM43XX_EMIF_POWEROFF_ENABLE 350*4882a593Smuzhiyun str r1, [r2, #0x0] 351*4882a593Smuzhiyun 352*4882a593Smuzhiyun /* Re-enable EMIF */ 353*4882a593Smuzhiyun ldr r1, am43xx_phys_emif_clkctrl 354*4882a593Smuzhiyun mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 355*4882a593Smuzhiyun str r2, [r1] 356*4882a593Smuzhiyunwait_emif_enable1: 357*4882a593Smuzhiyun ldr r3, [r1] 358*4882a593Smuzhiyun cmp r2, r3 359*4882a593Smuzhiyun bne wait_emif_enable1 360*4882a593Smuzhiyun 361*4882a593Smuzhiyun adr r9, am43xx_emif_sram_table 362*4882a593Smuzhiyun 363*4882a593Smuzhiyun ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET] 364*4882a593Smuzhiyun blx r1 365*4882a593Smuzhiyun 366*4882a593Smuzhiyun ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET] 367*4882a593Smuzhiyun blx r1 368*4882a593Smuzhiyun 369*4882a593Smuzhiyun ldr r2, am43xx_phys_emif_poweroff 370*4882a593Smuzhiyun mov r1, #AM43XX_EMIF_POWEROFF_DISABLE 371*4882a593Smuzhiyun str r1, [r2, #0x0] 372*4882a593Smuzhiyun 373*4882a593Smuzhiyun ldr r1, [r9, #EMIF_PM_RUN_HW_LEVELING] 374*4882a593Smuzhiyun blx r1 375*4882a593Smuzhiyun 376*4882a593Smuzhiyun#ifdef CONFIG_CACHE_L2X0 377*4882a593Smuzhiyun ldr r2, l2_cache_base 378*4882a593Smuzhiyun ldr r0, [r2, #L2X0_CTRL] 379*4882a593Smuzhiyun and r0, #0x0f 380*4882a593Smuzhiyun cmp r0, #1 381*4882a593Smuzhiyun beq skip_l2en @ Skip if already enabled 382*4882a593Smuzhiyun 383*4882a593Smuzhiyun adr r4, am43xx_pm_ro_sram_data 384*4882a593Smuzhiyun ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET] 385*4882a593Smuzhiyun ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 386*4882a593Smuzhiyun 387*4882a593Smuzhiyun ldr r12, l2_smc1 388*4882a593Smuzhiyun dsb 389*4882a593Smuzhiyun smc #0 390*4882a593Smuzhiyun dsb 391*4882a593Smuzhiyunset_aux_ctrl: 392*4882a593Smuzhiyun ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 393*4882a593Smuzhiyun ldr r12, l2_smc2 394*4882a593Smuzhiyun dsb 395*4882a593Smuzhiyun smc #0 396*4882a593Smuzhiyun dsb 397*4882a593Smuzhiyun 398*4882a593Smuzhiyun /* L2 invalidate on resume */ 399*4882a593Smuzhiyun ldr r0, l2_val 400*4882a593Smuzhiyun ldr r2, l2_cache_base 401*4882a593Smuzhiyun str r0, [r2, #L2X0_INV_WAY] 402*4882a593Smuzhiyunwait2: 403*4882a593Smuzhiyun ldr r0, [r2, #L2X0_INV_WAY] 404*4882a593Smuzhiyun ldr r1, l2_val 405*4882a593Smuzhiyun ands r0, r0, r1 406*4882a593Smuzhiyun bne wait2 407*4882a593Smuzhiyun#ifdef CONFIG_PL310_ERRATA_727915 408*4882a593Smuzhiyun mov r0, #0x00 409*4882a593Smuzhiyun mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 410*4882a593Smuzhiyun dsb 411*4882a593Smuzhiyun smc #0 412*4882a593Smuzhiyun dsb 413*4882a593Smuzhiyun#endif 414*4882a593Smuzhiyunl2x_sync2: 415*4882a593Smuzhiyun ldr r2, l2_cache_base 416*4882a593Smuzhiyun mov r0, #0x0 417*4882a593Smuzhiyun str r0, [r2, #L2X0_CACHE_SYNC] 418*4882a593Smuzhiyunsync2: 419*4882a593Smuzhiyun ldr r0, [r2, #L2X0_CACHE_SYNC] 420*4882a593Smuzhiyun ands r0, r0, #0x1 421*4882a593Smuzhiyun bne sync2 422*4882a593Smuzhiyun 423*4882a593Smuzhiyun mov r0, #0x1 424*4882a593Smuzhiyun ldr r12, l2_smc3 425*4882a593Smuzhiyun dsb 426*4882a593Smuzhiyun smc #0 427*4882a593Smuzhiyun dsb 428*4882a593Smuzhiyun#endif 429*4882a593Smuzhiyunskip_l2en: 430*4882a593Smuzhiyun /* We are back. Branch to the common CPU resume routine */ 431*4882a593Smuzhiyun mov r0, #0 432*4882a593Smuzhiyun ldr pc, resume_addr 433*4882a593SmuzhiyunENDPROC(am43xx_resume_from_deep_sleep) 434*4882a593Smuzhiyun 435*4882a593Smuzhiyun/* 436*4882a593Smuzhiyun * Local variables 437*4882a593Smuzhiyun */ 438*4882a593Smuzhiyun .align 439*4882a593Smuzhiyunkernel_flush: 440*4882a593Smuzhiyun .word v7_flush_dcache_all 441*4882a593Smuzhiyunddr_start: 442*4882a593Smuzhiyun .word PAGE_OFFSET 443*4882a593Smuzhiyun 444*4882a593Smuzhiyunam43xx_phys_emif_poweroff: 445*4882a593Smuzhiyun .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \ 446*4882a593Smuzhiyun AM43XX_PRM_EMIF_CTRL_OFFSET) 447*4882a593Smuzhiyunam43xx_virt_mpu_clkstctrl: 448*4882a593Smuzhiyun .word (AM43XX_CM_MPU_CLKSTCTRL) 449*4882a593Smuzhiyunam43xx_virt_mpu_clkctrl: 450*4882a593Smuzhiyun .word (AM43XX_CM_MPU_MPU_CLKCTRL) 451*4882a593Smuzhiyunam43xx_virt_emif_clkctrl: 452*4882a593Smuzhiyun .word (AM43XX_CM_PER_EMIF_CLKCTRL) 453*4882a593Smuzhiyunam43xx_phys_emif_clkctrl: 454*4882a593Smuzhiyun .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \ 455*4882a593Smuzhiyun AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 456*4882a593Smuzhiyun 457*4882a593Smuzhiyun#ifdef CONFIG_CACHE_L2X0 458*4882a593Smuzhiyun/* L2 cache related defines for AM437x */ 459*4882a593Smuzhiyunget_l2cache_base: 460*4882a593Smuzhiyun .word omap4_get_l2cache_base 461*4882a593Smuzhiyunl2_cache_base: 462*4882a593Smuzhiyun .word OMAP44XX_L2CACHE_BASE 463*4882a593Smuzhiyunl2_smc1: 464*4882a593Smuzhiyun .word OMAP4_MON_L2X0_PREFETCH_INDEX 465*4882a593Smuzhiyunl2_smc2: 466*4882a593Smuzhiyun .word OMAP4_MON_L2X0_AUXCTRL_INDEX 467*4882a593Smuzhiyunl2_smc3: 468*4882a593Smuzhiyun .word OMAP4_MON_L2X0_CTRL_INDEX 469*4882a593Smuzhiyunl2_val: 470*4882a593Smuzhiyun .word 0xffff 471*4882a593Smuzhiyun#endif 472*4882a593Smuzhiyun 473*4882a593Smuzhiyun.align 3 474*4882a593Smuzhiyun/* DDR related defines */ 475*4882a593SmuzhiyunENTRY(am43xx_emif_sram_table) 476*4882a593Smuzhiyun .space EMIF_PM_FUNCTIONS_SIZE 477*4882a593Smuzhiyun 478*4882a593SmuzhiyunENTRY(am43xx_pm_sram) 479*4882a593Smuzhiyun .word am43xx_do_wfi 480*4882a593Smuzhiyun .word am43xx_do_wfi_sz 481*4882a593Smuzhiyun .word am43xx_resume_offset 482*4882a593Smuzhiyun .word am43xx_emif_sram_table 483*4882a593Smuzhiyun .word am43xx_pm_ro_sram_data 484*4882a593Smuzhiyun 485*4882a593Smuzhiyunresume_addr: 486*4882a593Smuzhiyun .word cpu_resume - PAGE_OFFSET + 0x80000000 487*4882a593Smuzhiyun.align 3 488*4882a593Smuzhiyun 489*4882a593SmuzhiyunENTRY(am43xx_pm_ro_sram_data) 490*4882a593Smuzhiyun .space AMX3_PM_RO_SRAM_DATA_SIZE 491*4882a593Smuzhiyun 492*4882a593SmuzhiyunENTRY(am43xx_do_wfi_sz) 493*4882a593Smuzhiyun .word . - am43xx_do_wfi 494