1*4882a593Smuzhiyun/* 2*4882a593Smuzhiyun * Low level PM code for TI EMIF 3*4882a593Smuzhiyun * 4*4882a593Smuzhiyun * Copyright (C) 2016-2017 Texas Instruments Incorporated - http://www.ti.com/ 5*4882a593Smuzhiyun * Dave Gerlach 6*4882a593Smuzhiyun * 7*4882a593Smuzhiyun * This program is free software; you can redistribute it and/or 8*4882a593Smuzhiyun * modify it under the terms of the GNU General Public License as 9*4882a593Smuzhiyun * published by the Free Software Foundation version 2. 10*4882a593Smuzhiyun * 11*4882a593Smuzhiyun * This program is distributed "as is" WITHOUT ANY WARRANTY of any 12*4882a593Smuzhiyun * kind, whether express or implied; without even the implied warranty 13*4882a593Smuzhiyun * of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14*4882a593Smuzhiyun * GNU General Public License for more details. 15*4882a593Smuzhiyun */ 16*4882a593Smuzhiyun 17*4882a593Smuzhiyun#include <linux/linkage.h> 18*4882a593Smuzhiyun#include <asm/assembler.h> 19*4882a593Smuzhiyun#include <asm/memory.h> 20*4882a593Smuzhiyun 21*4882a593Smuzhiyun#include "emif.h" 22*4882a593Smuzhiyun#include "ti-emif-asm-offsets.h" 23*4882a593Smuzhiyun 24*4882a593Smuzhiyun#define EMIF_POWER_MGMT_WAIT_SELF_REFRESH_8192_CYCLES 0x00a0 25*4882a593Smuzhiyun#define EMIF_POWER_MGMT_SR_TIMER_MASK 0x00f0 26*4882a593Smuzhiyun#define EMIF_POWER_MGMT_SELF_REFRESH_MODE 0x0200 27*4882a593Smuzhiyun#define EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK 0x0700 28*4882a593Smuzhiyun 29*4882a593Smuzhiyun#define EMIF_SDCFG_TYPE_DDR2 0x2 << SDRAM_TYPE_SHIFT 30*4882a593Smuzhiyun#define EMIF_SDCFG_TYPE_DDR3 0x3 << SDRAM_TYPE_SHIFT 31*4882a593Smuzhiyun#define EMIF_STATUS_READY 0x4 32*4882a593Smuzhiyun 33*4882a593Smuzhiyun#define AM43XX_EMIF_PHY_CTRL_REG_COUNT 0x120 34*4882a593Smuzhiyun 35*4882a593Smuzhiyun#define EMIF_AM437X_REGISTERS 0x1 36*4882a593Smuzhiyun 37*4882a593Smuzhiyun .arm 38*4882a593Smuzhiyun .align 3 39*4882a593Smuzhiyun 40*4882a593SmuzhiyunENTRY(ti_emif_sram) 41*4882a593Smuzhiyun 42*4882a593Smuzhiyun/* 43*4882a593Smuzhiyun * void ti_emif_save_context(void) 44*4882a593Smuzhiyun * 45*4882a593Smuzhiyun * Used during suspend to save the context of all required EMIF registers 46*4882a593Smuzhiyun * to local memory if the EMIF is going to lose context during the sleep 47*4882a593Smuzhiyun * transition. Operates on the VIRTUAL address of the EMIF. 48*4882a593Smuzhiyun */ 49*4882a593SmuzhiyunENTRY(ti_emif_save_context) 50*4882a593Smuzhiyun stmfd sp!, {r4 - r11, lr} @ save registers on stack 51*4882a593Smuzhiyun 52*4882a593Smuzhiyun adr r4, ti_emif_pm_sram_data 53*4882a593Smuzhiyun ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET] 54*4882a593Smuzhiyun ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET] 55*4882a593Smuzhiyun 56*4882a593Smuzhiyun /* Save EMIF configuration */ 57*4882a593Smuzhiyun ldr r1, [r0, #EMIF_SDRAM_CONFIG] 58*4882a593Smuzhiyun str r1, [r2, #EMIF_SDCFG_VAL_OFFSET] 59*4882a593Smuzhiyun 60*4882a593Smuzhiyun ldr r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL] 61*4882a593Smuzhiyun str r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET] 62*4882a593Smuzhiyun 63*4882a593Smuzhiyun ldr r1, [r0, #EMIF_SDRAM_TIMING_1] 64*4882a593Smuzhiyun str r1, [r2, #EMIF_TIMING1_VAL_OFFSET] 65*4882a593Smuzhiyun 66*4882a593Smuzhiyun ldr r1, [r0, #EMIF_SDRAM_TIMING_2] 67*4882a593Smuzhiyun str r1, [r2, #EMIF_TIMING2_VAL_OFFSET] 68*4882a593Smuzhiyun 69*4882a593Smuzhiyun ldr r1, [r0, #EMIF_SDRAM_TIMING_3] 70*4882a593Smuzhiyun str r1, [r2, #EMIF_TIMING3_VAL_OFFSET] 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL] 73*4882a593Smuzhiyun str r1, [r2, #EMIF_PMCR_VAL_OFFSET] 74*4882a593Smuzhiyun 75*4882a593Smuzhiyun ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW] 76*4882a593Smuzhiyun str r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET] 77*4882a593Smuzhiyun 78*4882a593Smuzhiyun ldr r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG] 79*4882a593Smuzhiyun str r1, [r2, #EMIF_ZQCFG_VAL_OFFSET] 80*4882a593Smuzhiyun 81*4882a593Smuzhiyun ldr r1, [r0, #EMIF_DDR_PHY_CTRL_1] 82*4882a593Smuzhiyun str r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET] 83*4882a593Smuzhiyun 84*4882a593Smuzhiyun ldr r1, [r0, #EMIF_COS_CONFIG] 85*4882a593Smuzhiyun str r1, [r2, #EMIF_COS_CONFIG_OFFSET] 86*4882a593Smuzhiyun 87*4882a593Smuzhiyun ldr r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING] 88*4882a593Smuzhiyun str r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET] 89*4882a593Smuzhiyun 90*4882a593Smuzhiyun ldr r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING] 91*4882a593Smuzhiyun str r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET] 92*4882a593Smuzhiyun 93*4882a593Smuzhiyun ldr r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING] 94*4882a593Smuzhiyun str r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET] 95*4882a593Smuzhiyun 96*4882a593Smuzhiyun ldr r1, [r0, #EMIF_OCP_CONFIG] 97*4882a593Smuzhiyun str r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET] 98*4882a593Smuzhiyun 99*4882a593Smuzhiyun ldr r5, [r4, #EMIF_PM_CONFIG_OFFSET] 100*4882a593Smuzhiyun cmp r5, #EMIF_SRAM_AM43_REG_LAYOUT 101*4882a593Smuzhiyun bne emif_skip_save_extra_regs 102*4882a593Smuzhiyun 103*4882a593Smuzhiyun ldr r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL] 104*4882a593Smuzhiyun str r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET] 105*4882a593Smuzhiyun 106*4882a593Smuzhiyun ldr r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD] 107*4882a593Smuzhiyun str r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET] 108*4882a593Smuzhiyun 109*4882a593Smuzhiyun ldr r1, [r0, #EMIF_LPDDR2_NVM_TIMING] 110*4882a593Smuzhiyun str r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET] 111*4882a593Smuzhiyun 112*4882a593Smuzhiyun ldr r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW] 113*4882a593Smuzhiyun str r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET] 114*4882a593Smuzhiyun 115*4882a593Smuzhiyun ldr r1, [r0, #EMIF_DLL_CALIB_CTRL] 116*4882a593Smuzhiyun str r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET] 117*4882a593Smuzhiyun 118*4882a593Smuzhiyun ldr r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW] 119*4882a593Smuzhiyun str r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET] 120*4882a593Smuzhiyun 121*4882a593Smuzhiyun /* Loop and save entire block of emif phy regs */ 122*4882a593Smuzhiyun mov r5, #0x0 123*4882a593Smuzhiyun add r4, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET 124*4882a593Smuzhiyun add r3, r0, #EMIF_EXT_PHY_CTRL_1 125*4882a593Smuzhiyunddr_phy_ctrl_save: 126*4882a593Smuzhiyun ldr r1, [r3, r5] 127*4882a593Smuzhiyun str r1, [r4, r5] 128*4882a593Smuzhiyun add r5, r5, #0x4 129*4882a593Smuzhiyun cmp r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT 130*4882a593Smuzhiyun bne ddr_phy_ctrl_save 131*4882a593Smuzhiyun 132*4882a593Smuzhiyunemif_skip_save_extra_regs: 133*4882a593Smuzhiyun ldmfd sp!, {r4 - r11, pc} @ restore regs and return 134*4882a593SmuzhiyunENDPROC(ti_emif_save_context) 135*4882a593Smuzhiyun 136*4882a593Smuzhiyun/* 137*4882a593Smuzhiyun * void ti_emif_restore_context(void) 138*4882a593Smuzhiyun * 139*4882a593Smuzhiyun * Used during resume to restore the context of all required EMIF registers 140*4882a593Smuzhiyun * from local memory after the EMIF has lost context during a sleep transition. 141*4882a593Smuzhiyun * Operates on the PHYSICAL address of the EMIF. 142*4882a593Smuzhiyun */ 143*4882a593SmuzhiyunENTRY(ti_emif_restore_context) 144*4882a593Smuzhiyun adr r4, ti_emif_pm_sram_data 145*4882a593Smuzhiyun ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET] 146*4882a593Smuzhiyun ldr r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET] 147*4882a593Smuzhiyun 148*4882a593Smuzhiyun /* Config EMIF Timings */ 149*4882a593Smuzhiyun ldr r1, [r2, #EMIF_DDR_PHY_CTLR_1_OFFSET] 150*4882a593Smuzhiyun str r1, [r0, #EMIF_DDR_PHY_CTRL_1] 151*4882a593Smuzhiyun str r1, [r0, #EMIF_DDR_PHY_CTRL_1_SHDW] 152*4882a593Smuzhiyun 153*4882a593Smuzhiyun ldr r1, [r2, #EMIF_TIMING1_VAL_OFFSET] 154*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_TIMING_1] 155*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_TIMING_1_SHDW] 156*4882a593Smuzhiyun 157*4882a593Smuzhiyun ldr r1, [r2, #EMIF_TIMING2_VAL_OFFSET] 158*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_TIMING_2] 159*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_TIMING_2_SHDW] 160*4882a593Smuzhiyun 161*4882a593Smuzhiyun ldr r1, [r2, #EMIF_TIMING3_VAL_OFFSET] 162*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_TIMING_3] 163*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_TIMING_3_SHDW] 164*4882a593Smuzhiyun 165*4882a593Smuzhiyun ldr r1, [r2, #EMIF_REF_CTRL_VAL_OFFSET] 166*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_REFRESH_CONTROL] 167*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_REFRESH_CTRL_SHDW] 168*4882a593Smuzhiyun 169*4882a593Smuzhiyun ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET] 170*4882a593Smuzhiyun str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL] 171*4882a593Smuzhiyun 172*4882a593Smuzhiyun ldr r1, [r2, #EMIF_PMCR_SHDW_VAL_OFFSET] 173*4882a593Smuzhiyun str r1, [r0, #EMIF_POWER_MANAGEMENT_CTRL_SHDW] 174*4882a593Smuzhiyun 175*4882a593Smuzhiyun ldr r1, [r2, #EMIF_COS_CONFIG_OFFSET] 176*4882a593Smuzhiyun str r1, [r0, #EMIF_COS_CONFIG] 177*4882a593Smuzhiyun 178*4882a593Smuzhiyun ldr r1, [r2, #EMIF_PRIORITY_TO_COS_MAPPING_OFFSET] 179*4882a593Smuzhiyun str r1, [r0, #EMIF_PRIORITY_TO_CLASS_OF_SERVICE_MAPPING] 180*4882a593Smuzhiyun 181*4882a593Smuzhiyun ldr r1, [r2, #EMIF_CONNECT_ID_SERV_1_MAP_OFFSET] 182*4882a593Smuzhiyun str r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_1_MAPPING] 183*4882a593Smuzhiyun 184*4882a593Smuzhiyun ldr r1, [r2, #EMIF_CONNECT_ID_SERV_2_MAP_OFFSET] 185*4882a593Smuzhiyun str r1, [r0, #EMIF_CONNECTION_ID_TO_CLASS_OF_SERVICE_2_MAPPING] 186*4882a593Smuzhiyun 187*4882a593Smuzhiyun ldr r1, [r2, #EMIF_OCP_CONFIG_VAL_OFFSET] 188*4882a593Smuzhiyun str r1, [r0, #EMIF_OCP_CONFIG] 189*4882a593Smuzhiyun 190*4882a593Smuzhiyun ldr r5, [r4, #EMIF_PM_CONFIG_OFFSET] 191*4882a593Smuzhiyun cmp r5, #EMIF_SRAM_AM43_REG_LAYOUT 192*4882a593Smuzhiyun bne emif_skip_restore_extra_regs 193*4882a593Smuzhiyun 194*4882a593Smuzhiyun ldr r1, [r2, #EMIF_RD_WR_LEVEL_RAMP_CTRL_OFFSET] 195*4882a593Smuzhiyun str r1, [r0, #EMIF_READ_WRITE_LEVELING_RAMP_CONTROL] 196*4882a593Smuzhiyun 197*4882a593Smuzhiyun ldr r1, [r2, #EMIF_RD_WR_EXEC_THRESH_OFFSET] 198*4882a593Smuzhiyun str r1, [r0, #EMIF_READ_WRITE_EXECUTION_THRESHOLD] 199*4882a593Smuzhiyun 200*4882a593Smuzhiyun ldr r1, [r2, #EMIF_LPDDR2_NVM_TIM_OFFSET] 201*4882a593Smuzhiyun str r1, [r0, #EMIF_LPDDR2_NVM_TIMING] 202*4882a593Smuzhiyun 203*4882a593Smuzhiyun ldr r1, [r2, #EMIF_LPDDR2_NVM_TIM_SHDW_OFFSET] 204*4882a593Smuzhiyun str r1, [r0, #EMIF_LPDDR2_NVM_TIMING_SHDW] 205*4882a593Smuzhiyun 206*4882a593Smuzhiyun ldr r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_OFFSET] 207*4882a593Smuzhiyun str r1, [r0, #EMIF_DLL_CALIB_CTRL] 208*4882a593Smuzhiyun 209*4882a593Smuzhiyun ldr r1, [r2, #EMIF_DLL_CALIB_CTRL_VAL_SHDW_OFFSET] 210*4882a593Smuzhiyun str r1, [r0, #EMIF_DLL_CALIB_CTRL_SHDW] 211*4882a593Smuzhiyun 212*4882a593Smuzhiyun ldr r1, [r2, #EMIF_ZQCFG_VAL_OFFSET] 213*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG] 214*4882a593Smuzhiyun 215*4882a593Smuzhiyun /* Loop and restore entire block of emif phy regs */ 216*4882a593Smuzhiyun mov r5, #0x0 217*4882a593Smuzhiyun /* Load ti_emif_regs_amx3 + EMIF_EXT_PHY_CTRL_VALS_OFFSET for address 218*4882a593Smuzhiyun * to phy register save space 219*4882a593Smuzhiyun */ 220*4882a593Smuzhiyun add r3, r2, #EMIF_EXT_PHY_CTRL_VALS_OFFSET 221*4882a593Smuzhiyun add r4, r0, #EMIF_EXT_PHY_CTRL_1 222*4882a593Smuzhiyunddr_phy_ctrl_restore: 223*4882a593Smuzhiyun ldr r1, [r3, r5] 224*4882a593Smuzhiyun str r1, [r4, r5] 225*4882a593Smuzhiyun add r5, r5, #0x4 226*4882a593Smuzhiyun cmp r5, #AM43XX_EMIF_PHY_CTRL_REG_COUNT 227*4882a593Smuzhiyun bne ddr_phy_ctrl_restore 228*4882a593Smuzhiyun 229*4882a593Smuzhiyunemif_skip_restore_extra_regs: 230*4882a593Smuzhiyun /* 231*4882a593Smuzhiyun * Output impedence calib needed only for DDR3 232*4882a593Smuzhiyun * but since the initial state of this will be 233*4882a593Smuzhiyun * disabled for DDR2 no harm in restoring the 234*4882a593Smuzhiyun * old configuration 235*4882a593Smuzhiyun */ 236*4882a593Smuzhiyun ldr r1, [r2, #EMIF_ZQCFG_VAL_OFFSET] 237*4882a593Smuzhiyun str r1, [r0, #EMIF_SDRAM_OUTPUT_IMPEDANCE_CALIBRATION_CONFIG] 238*4882a593Smuzhiyun 239*4882a593Smuzhiyun /* Write to sdcfg last for DDR2 only */ 240*4882a593Smuzhiyun ldr r1, [r2, #EMIF_SDCFG_VAL_OFFSET] 241*4882a593Smuzhiyun and r2, r1, #SDRAM_TYPE_MASK 242*4882a593Smuzhiyun cmp r2, #EMIF_SDCFG_TYPE_DDR2 243*4882a593Smuzhiyun streq r1, [r0, #EMIF_SDRAM_CONFIG] 244*4882a593Smuzhiyun 245*4882a593Smuzhiyun mov pc, lr 246*4882a593SmuzhiyunENDPROC(ti_emif_restore_context) 247*4882a593Smuzhiyun 248*4882a593Smuzhiyun/* 249*4882a593Smuzhiyun * void ti_emif_run_hw_leveling(void) 250*4882a593Smuzhiyun * 251*4882a593Smuzhiyun * Used during resume to run hardware leveling again and restore the 252*4882a593Smuzhiyun * configuration of the EMIF PHY, only for DDR3. 253*4882a593Smuzhiyun */ 254*4882a593SmuzhiyunENTRY(ti_emif_run_hw_leveling) 255*4882a593Smuzhiyun adr r4, ti_emif_pm_sram_data 256*4882a593Smuzhiyun ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET] 257*4882a593Smuzhiyun 258*4882a593Smuzhiyun ldr r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL] 259*4882a593Smuzhiyun orr r3, r3, #RDWRLVLFULL_START 260*4882a593Smuzhiyun ldr r2, [r0, #EMIF_SDRAM_CONFIG] 261*4882a593Smuzhiyun and r2, r2, #SDRAM_TYPE_MASK 262*4882a593Smuzhiyun cmp r2, #EMIF_SDCFG_TYPE_DDR3 263*4882a593Smuzhiyun bne skip_hwlvl 264*4882a593Smuzhiyun 265*4882a593Smuzhiyun str r3, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL] 266*4882a593Smuzhiyun 267*4882a593Smuzhiyun /* 268*4882a593Smuzhiyun * If EMIF registers are touched during initial stage of HW 269*4882a593Smuzhiyun * leveling sequence there will be an L3 NOC timeout error issued 270*4882a593Smuzhiyun * as the EMIF will not respond, which is not fatal, but it is 271*4882a593Smuzhiyun * avoidable. This small wait loop is enough time for this condition 272*4882a593Smuzhiyun * to clear, even at worst case of CPU running at max speed of 1Ghz. 273*4882a593Smuzhiyun */ 274*4882a593Smuzhiyun mov r2, #0x2000 275*4882a593Smuzhiyun1: 276*4882a593Smuzhiyun subs r2, r2, #0x1 277*4882a593Smuzhiyun bne 1b 278*4882a593Smuzhiyun 279*4882a593Smuzhiyun /* Bit clears when operation is complete */ 280*4882a593Smuzhiyun2: ldr r1, [r0, #EMIF_READ_WRITE_LEVELING_CONTROL] 281*4882a593Smuzhiyun tst r1, #RDWRLVLFULL_START 282*4882a593Smuzhiyun bne 2b 283*4882a593Smuzhiyun 284*4882a593Smuzhiyunskip_hwlvl: 285*4882a593Smuzhiyun mov pc, lr 286*4882a593SmuzhiyunENDPROC(ti_emif_run_hw_leveling) 287*4882a593Smuzhiyun 288*4882a593Smuzhiyun/* 289*4882a593Smuzhiyun * void ti_emif_enter_sr(void) 290*4882a593Smuzhiyun * 291*4882a593Smuzhiyun * Programs the EMIF to tell the SDRAM to enter into self-refresh 292*4882a593Smuzhiyun * mode during a sleep transition. Operates on the VIRTUAL address 293*4882a593Smuzhiyun * of the EMIF. 294*4882a593Smuzhiyun */ 295*4882a593SmuzhiyunENTRY(ti_emif_enter_sr) 296*4882a593Smuzhiyun stmfd sp!, {r4 - r11, lr} @ save registers on stack 297*4882a593Smuzhiyun 298*4882a593Smuzhiyun adr r4, ti_emif_pm_sram_data 299*4882a593Smuzhiyun ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET] 300*4882a593Smuzhiyun ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET] 301*4882a593Smuzhiyun 302*4882a593Smuzhiyun ldr r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL] 303*4882a593Smuzhiyun bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK 304*4882a593Smuzhiyun orr r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE 305*4882a593Smuzhiyun str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL] 306*4882a593Smuzhiyun 307*4882a593Smuzhiyun ldmfd sp!, {r4 - r11, pc} @ restore regs and return 308*4882a593SmuzhiyunENDPROC(ti_emif_enter_sr) 309*4882a593Smuzhiyun 310*4882a593Smuzhiyun/* 311*4882a593Smuzhiyun * void ti_emif_exit_sr(void) 312*4882a593Smuzhiyun * 313*4882a593Smuzhiyun * Programs the EMIF to tell the SDRAM to exit self-refresh mode 314*4882a593Smuzhiyun * after a sleep transition. Operates on the PHYSICAL address of 315*4882a593Smuzhiyun * the EMIF. 316*4882a593Smuzhiyun */ 317*4882a593SmuzhiyunENTRY(ti_emif_exit_sr) 318*4882a593Smuzhiyun adr r4, ti_emif_pm_sram_data 319*4882a593Smuzhiyun ldr r0, [r4, #EMIF_PM_BASE_ADDR_PHYS_OFFSET] 320*4882a593Smuzhiyun ldr r2, [r4, #EMIF_PM_REGS_PHYS_OFFSET] 321*4882a593Smuzhiyun 322*4882a593Smuzhiyun /* 323*4882a593Smuzhiyun * Toggle EMIF to exit refresh mode: 324*4882a593Smuzhiyun * if EMIF lost context, PWR_MGT_CTRL is currently 0, writing disable 325*4882a593Smuzhiyun * (0x0), wont do diddly squat! so do a toggle from SR(0x2) to disable 326*4882a593Smuzhiyun * (0x0) here. 327*4882a593Smuzhiyun * *If* EMIF did not lose context, nothing broken as we write the same 328*4882a593Smuzhiyun * value(0x2) to reg before we write a disable (0x0). 329*4882a593Smuzhiyun */ 330*4882a593Smuzhiyun ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET] 331*4882a593Smuzhiyun bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK 332*4882a593Smuzhiyun orr r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE 333*4882a593Smuzhiyun str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL] 334*4882a593Smuzhiyun bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK 335*4882a593Smuzhiyun str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL] 336*4882a593Smuzhiyun 337*4882a593Smuzhiyun /* Wait for EMIF to become ready */ 338*4882a593Smuzhiyun1: ldr r1, [r0, #EMIF_STATUS] 339*4882a593Smuzhiyun tst r1, #EMIF_STATUS_READY 340*4882a593Smuzhiyun beq 1b 341*4882a593Smuzhiyun 342*4882a593Smuzhiyun mov pc, lr 343*4882a593SmuzhiyunENDPROC(ti_emif_exit_sr) 344*4882a593Smuzhiyun 345*4882a593Smuzhiyun/* 346*4882a593Smuzhiyun * void ti_emif_abort_sr(void) 347*4882a593Smuzhiyun * 348*4882a593Smuzhiyun * Disables self-refresh after a failed transition to a low-power 349*4882a593Smuzhiyun * state so the kernel can jump back to DDR and follow abort path. 350*4882a593Smuzhiyun * Operates on the VIRTUAL address of the EMIF. 351*4882a593Smuzhiyun */ 352*4882a593SmuzhiyunENTRY(ti_emif_abort_sr) 353*4882a593Smuzhiyun stmfd sp!, {r4 - r11, lr} @ save registers on stack 354*4882a593Smuzhiyun 355*4882a593Smuzhiyun adr r4, ti_emif_pm_sram_data 356*4882a593Smuzhiyun ldr r0, [r4, #EMIF_PM_BASE_ADDR_VIRT_OFFSET] 357*4882a593Smuzhiyun ldr r2, [r4, #EMIF_PM_REGS_VIRT_OFFSET] 358*4882a593Smuzhiyun 359*4882a593Smuzhiyun ldr r1, [r2, #EMIF_PMCR_VAL_OFFSET] 360*4882a593Smuzhiyun bic r1, r1, #EMIF_POWER_MGMT_SELF_REFRESH_MODE_MASK 361*4882a593Smuzhiyun str r1, [r0, #EMIF_POWER_MANAGEMENT_CONTROL] 362*4882a593Smuzhiyun 363*4882a593Smuzhiyun /* Wait for EMIF to become ready */ 364*4882a593Smuzhiyun1: ldr r1, [r0, #EMIF_STATUS] 365*4882a593Smuzhiyun tst r1, #EMIF_STATUS_READY 366*4882a593Smuzhiyun beq 1b 367*4882a593Smuzhiyun 368*4882a593Smuzhiyun ldmfd sp!, {r4 - r11, pc} @ restore regs and return 369*4882a593SmuzhiyunENDPROC(ti_emif_abort_sr) 370*4882a593Smuzhiyun 371*4882a593Smuzhiyun .align 3 372*4882a593SmuzhiyunENTRY(ti_emif_pm_sram_data) 373*4882a593Smuzhiyun .space EMIF_PM_DATA_SIZE 374*4882a593SmuzhiyunENTRY(ti_emif_sram_sz) 375*4882a593Smuzhiyun .word . - ti_emif_save_context 376