1 /* 2 * Copyright 2021-2024 NXP 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7 #include <assert.h> 8 #include <stdbool.h> 9 10 #include <arch_helpers.h> 11 #include <bl31/interrupt_mgmt.h> 12 #include <common/runtime_svc.h> 13 #include <lib/mmio.h> 14 #include <lib/spinlock.h> 15 #include <plat/common/platform.h> 16 17 #include <platform_def.h> 18 19 #include <dram.h> 20 #include <upower_api.h> 21 22 #define PHY_FREQ_SEL_INDEX(x) ((x) << 16) 23 #define PHY_FREQ_MULTICAST_EN(x) ((x) << 8) 24 #define DENALI_PHY_1537 U(0x5804) 25 26 #define IMX_DDRC_BASE U(0x2E060000) 27 #define SAVED_DRAM_DATA_BASE U(0x20055000) 28 #define IMX_DRAM_BASE U(0x80000000) 29 #define DENALI_CTL_143 U(0x23C) 30 #define DENALI_CTL_144 U(0x240) 31 #define DENALI_CTL_146 U(0x248) 32 #define LP_STATE_CS_IDLE U(0x404000) 33 #define LP_STATE_CS_PD_CG U(0x4F4F00) 34 #define LPI_WAKEUP_EN_SHIFT U(8) 35 #define IMX_LPAV_SIM_BASE 0x2DA50000 36 #define LPDDR_CTRL 0x14 37 #define LPDDR_AUTO_LP_MODE_DISABLE BIT(24) 38 #define SOC_LP_CMD_SHIFT U(15) 39 #define LPDDR_CTRL2 0x18 40 #define LPDDR_EN_CLKGATE (0x1<<17) 41 #define LPDDR_MAX_CLKDIV_EN (0x1 << 16) 42 #define LP_AUTO_ENTRY_EN 0x4 43 #define LP_AUTO_EXIT_EN 0xF 44 45 #define DENALI_CTL_00 U(0x0) 46 #define DENALI_CTL_23 U(0x5c) 47 #define DFIBUS_FREQ_INIT_SHIFT U(24) 48 #define TSREF2PHYMSTR_SHIFT U(8) 49 #define TSREF2PHYMSTR_MASK GENMASK(13, 8) 50 51 #define DENALI_CTL_24 U(0x60) 52 #define DENALI_CTL_25 U(0x64) 53 54 #define DENALI_CTL_93 U(0x174) 55 #define PWRUP_SREFRESH_EXIT BIT(0) 56 57 #define DENALI_CTL_127 U(0x1fc) 58 #define PHYMSTR_TRAIN_AFTER_INIT_COMPLETE BIT(16) 59 60 #define DENALI_CTL_147 U(0x24c) 61 #define DENALI_CTL_153 U(0x264) 62 #define PCPCS_PD_EN BIT(8) 63 64 #define DENALI_CTL_249 U(0x3E4) 65 #define DENALI_CTL_266 U(0x428) 66 67 #define DENALI_PHY_1547 U(0x582c) 68 #define PHY_LP4_BOOT_DISABLE BIT(8) 69 70 #define DENALI_PHY_1559 U(0x585c) 71 #define DENALI_PHY_1590 U(0x58D8) 72 73 #define DENALI_PI_00 U(0x2000) 74 #define DENALI_PI_04 U(0x2010) 75 #define DENALI_PI_52 U(0x20D0) 76 #define DENALI_PI_26 U(0x2068) 77 #define DENALI_PI_33 U(0x2084) 78 #define DENALI_PI_65 U(0x2104) 79 #define DENALI_PI_77 U(0x2134) 80 #define DENALI_PI_134 U(0x2218) 81 #define DENALI_PI_131 U(0x220C) 82 #define DENALI_PI_132 U(0x2210) 83 #define DENALI_PI_134 U(0x2218) 84 #define DENALI_PI_137 U(0x2224) 85 #define DENALI_PI_174 U(0x22B8) 86 #define DENALI_PI_175 U(0x22BC) 87 #define DENALI_PI_181 U(0x22D4) 88 #define DENALI_PI_182 U(0x22D8) 89 #define DENALI_PI_191 U(0x22FC) 90 #define DENALI_PI_192 U(0x2300) 91 #define DENALI_PI_212 U(0x2350) 92 #define DENALI_PI_214 U(0x2358) 93 #define DENALI_PI_217 U(0x2364) 94 95 #define LPDDR3_TYPE U(0x7) 96 #define LPDDR4_TYPE U(0xB) 97 98 extern void upower_wait_resp(void); 99 100 struct dram_cfg_param { 101 uint32_t reg; 102 uint32_t val; 103 }; 104 105 struct dram_timing_info { 106 /* ddr controller config */ 107 struct dram_cfg_param *ctl_cfg; 108 unsigned int ctl_cfg_num; 109 /* pi config */ 110 struct dram_cfg_param *pi_cfg; 111 unsigned int pi_cfg_num; 112 /* phy freq1 config */ 113 struct dram_cfg_param *phy_f1_cfg; 114 unsigned int phy_f1_cfg_num; 115 /* phy freq2 config */ 116 struct dram_cfg_param *phy_f2_cfg; 117 unsigned int phy_f2_cfg_num; 118 /* automatic low power config */ 119 struct dram_cfg_param *auto_lp_cfg; 120 unsigned int auto_lp_cfg_num; 121 /* initialized drate table */ 122 unsigned int fsp_table[3]; 123 }; 124 125 #define CTL_NUM U(680) 126 #define PI_NUM U(298) 127 #define PHY_NUM U(1654) 128 #define PHY_DIFF_NUM U(49) 129 #define AUTO_LP_NUM U(3) 130 struct dram_cfg { 131 uint32_t ctl_cfg[CTL_NUM]; 132 uint32_t pi_cfg[PI_NUM]; 133 uint32_t phy_full[PHY_NUM]; 134 uint32_t phy_diff[PHY_DIFF_NUM]; 135 uint32_t auto_lp_cfg[AUTO_LP_NUM]; 136 }; 137 138 struct dram_timing_info *info; 139 struct dram_cfg *dram_timing_cfg; 140 141 /* mark if dram cfg is already saved */ 142 static bool dram_cfg_saved; 143 static bool dram_auto_lp_true; 144 static uint32_t dram_class, dram_ctl_143; 145 146 /* PHY register index for frequency diff */ 147 uint32_t freq_specific_reg_array[PHY_DIFF_NUM] = { 148 90, 92, 93, 96, 97, 100, 101, 102, 103, 104, 114, 149 346, 348, 349, 352, 353, 356, 357, 358, 359, 360, 150 370, 602, 604, 605, 608, 609, 612, 613, 614, 615, 151 616, 626, 858, 860, 861, 864, 865, 868, 869, 870, 152 871, 872, 882, 1063, 1319, 1566, 1624, 1625 153 }; 154 155 /* lock used for DDR DVFS */ 156 spinlock_t dfs_lock; 157 static volatile uint32_t core_count; 158 static volatile bool in_progress; 159 static volatile bool sys_dvfs; 160 static int num_fsp; 161 162 static void ddr_init(void) 163 { 164 unsigned int i; 165 166 /* restore the ddr ctl config */ 167 for (i = 0U; i < CTL_NUM; i++) { 168 mmio_write_32(IMX_DDRC_BASE + i * 4, dram_timing_cfg->ctl_cfg[i]); 169 } 170 171 /* load the PI registers */ 172 for (i = 0U; i < PI_NUM; i++) { 173 mmio_write_32(IMX_DDRC_BASE + 0x2000 + i * 4, dram_timing_cfg->pi_cfg[i]); 174 } 175 176 177 /* restore all PHY registers for all the fsp. */ 178 mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x100); 179 /* restore all the phy configs */ 180 for (i = 0U; i < PHY_NUM; i++) { 181 /* skip the reserved registers space */ 182 if (i >= 121U && i <= 255U) { 183 continue; 184 } 185 if (i >= 377U && i <= 511U) { 186 continue; 187 } 188 if (i >= 633U && i <= 767U) { 189 continue; 190 } 191 if (i >= 889U && i <= 1023U) { 192 continue; 193 } 194 if (i >= 1065U && i <= 1279U) { 195 continue; 196 } 197 if (i >= 1321U && i <= 1535U) { 198 continue; 199 } 200 mmio_write_32(IMX_DDRC_BASE + 0x4000 + i * 4, dram_timing_cfg->phy_full[i]); 201 } 202 203 if (dram_class == LPDDR4_TYPE) { 204 /* restore only the diff. */ 205 mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x0); 206 for (i = 0U; i < PHY_DIFF_NUM; i++) { 207 mmio_write_32(IMX_DDRC_BASE + 0x4000 + freq_specific_reg_array[i] * 4, 208 dram_timing_cfg->phy_diff[i]); 209 } 210 } 211 212 /* Re-enable MULTICAST mode */ 213 mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, PHY_FREQ_MULTICAST_EN(1)); 214 } 215 216 void dram_lp_auto_disable(void) 217 { 218 uint32_t lp_auto_en; 219 220 dram_timing_cfg = (struct dram_cfg *)(SAVED_DRAM_DATA_BASE + 221 sizeof(struct dram_timing_info)); 222 lp_auto_en = (mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146) & (LP_AUTO_ENTRY_EN << 24)); 223 /* Save initial config */ 224 dram_ctl_143 = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_143); 225 226 if (lp_auto_en && !dram_auto_lp_true) { 227 /* 0.a Save DDRC auto low-power mode parameter */ 228 dram_timing_cfg->auto_lp_cfg[0] = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_144); 229 dram_timing_cfg->auto_lp_cfg[1] = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_147); 230 dram_timing_cfg->auto_lp_cfg[2] = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146); 231 /* Set LPI_SRPD_LONG_MCCLK_GATE_WAKEUP_F2 to Maximum */ 232 mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_143, 0xF << 24); 233 /* 0.b Disable DDRC auto low-power mode interface */ 234 mmio_clrbits_32(IMX_DDRC_BASE + DENALI_CTL_146, LP_AUTO_ENTRY_EN << 24); 235 /* 0.c Read any location to get DRAM out of Self-refresh */ 236 mmio_read_32(IMX_DRAM_BASE); 237 /* 0.d Confirm DRAM is out of Self-refresh */ 238 while ((mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146) & 239 LP_STATE_CS_PD_CG) != LP_STATE_CS_IDLE) { 240 ; 241 } 242 /* 0.e Disable DDRC auto low-power exit */ 243 mmio_clrbits_32(IMX_DDRC_BASE + DENALI_CTL_147, LP_AUTO_EXIT_EN); 244 /* dram low power mode flag */ 245 dram_auto_lp_true = true; 246 } 247 } 248 249 void dram_lp_auto_enable(void) 250 { 251 /* Switch back to Auto Low-power mode */ 252 if (dram_auto_lp_true) { 253 /* 12.a Confirm DRAM is out of Self-refresh */ 254 while ((mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_146) & 255 LP_STATE_CS_PD_CG) != LP_STATE_CS_IDLE) { 256 ; 257 } 258 /* 12.b Enable DDRC auto low-power exit */ 259 /* 260 * 12.c TBC! : Set DENALI_CTL_144 [LPI_CTRL_REQ_EN[24]] and 261 * [DFI_LP_VERSION[16]] back to default settings = 1b'1. 262 */ 263 /* 264 * 12.d Reconfigure DENALI_CTL_144 [LPI_WAKEUP_EN[5:0]] bit 265 * LPI_WAKEUP_EN[3] = 1b'1. 266 */ 267 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, dram_timing_cfg->auto_lp_cfg[0]); 268 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_147, dram_timing_cfg->auto_lp_cfg[1]); 269 /* 12.e Re-enable DDRC auto low-power mode interface */ 270 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_146, dram_timing_cfg->auto_lp_cfg[2]); 271 /* restore ctl config */ 272 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_143, dram_ctl_143); 273 /* dram low power mode flag */ 274 dram_auto_lp_true = false; 275 } 276 } 277 278 void dram_enter_self_refresh(void) 279 { 280 /* disable auto low power interface */ 281 dram_lp_auto_disable(); 282 /* 1. config the PCC_LPDDR4[SSADO] to 2b'11 for ACK domain 0/1's STOP */ 283 mmio_setbits_32(IMX_PCC5_BASE + 0x108, 0x2 << 22); 284 /* 1.a Clock gate PCC_LPDDR4[CGC] and no software reset PCC_LPDDR4[SWRST] */ 285 mmio_setbits_32(IMX_PCC5_BASE + 0x108, (BIT(30) | BIT(28))); 286 287 /* 288 * 2. Make sure the DENALI_CTL_144[LPI_WAKEUP_EN[5:0]] has the bit 289 * LPI_WAKEUP_EN[3] = 1b'1. This enables the option 'self-refresh 290 * long with mem and ctlr clk gating or self-refresh power-down long 291 * with mem and ctlr clk gating' 292 */ 293 mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, BIT(3) << LPI_WAKEUP_EN_SHIFT); 294 /* TODO: Needed ? 2.a DENALI_CTL_144[LPI_TIMER_WAKEUP_F2] */ 295 //mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, BIT(0)); 296 297 /* 298 * 3a. Config SIM_LPAV LPDDR_CTRL[LPDDR_AUTO_LP_MODE_DISABLE] to 1b'0(enable 299 * the logic to automatic handles low power entry/exit. This is the recommended 300 * option over handling through software. 301 * 3b. Config the SIM_LPAV LPDDR_CTRL[SOC_LP_CMD] to 6b'101001(encoding for 302 * self_refresh with both DDR controller and DRAM clock gate. THis is mandatory 303 * since LPPDR logic will be power gated). 304 */ 305 mmio_clrbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL, LPDDR_AUTO_LP_MODE_DISABLE); 306 mmio_clrsetbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL, 307 0x3f << SOC_LP_CMD_SHIFT, 0x29 << SOC_LP_CMD_SHIFT); 308 /* 3.c clock gate ddr controller */ 309 mmio_setbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL2, LPDDR_EN_CLKGATE); 310 /* 3.d lpddr max clk div en */ 311 mmio_clrbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL2, LPDDR_MAX_CLKDIV_EN); 312 } 313 314 void dram_exit_self_refresh(void) 315 { 316 dram_lp_auto_enable(); 317 } 318 319 void dram_enter_retention(void) 320 { 321 unsigned int i; 322 323 dram_lp_auto_disable(); 324 325 /* 1. config the PCC_LPDDR4[SSADO] to 2b'11 for ACK domain 0/1's STOP */ 326 mmio_setbits_32(IMX_PCC5_BASE + 0x108, 0x2 << 22); 327 328 /* 329 * 2. Make sure the DENALI_CTL_144[LPI_WAKEUP_EN[5:0]] has the bit 330 * LPI_WAKEUP_EN[3] = 1b'1. This enables the option 'self-refresh 331 * long with mem and ctlr clk gating or self-refresh power-down 332 * long with mem and ctlr clk gating' 333 */ 334 mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, BIT(3) << LPI_WAKEUP_EN_SHIFT); 335 336 /* 337 * 3a. Config SIM_LPAV LPDDR_CTRL[LPDDR_AUTO_LP_MODE_DISABLE] to 1b'0(enable 338 * the logic to automatic handles low power entry/exit. This is the recommended 339 * option over handling through software. 340 * 3b. Config the SIM_LPAV LPDDR_CTRL[SOC_LP_CMD] to 6b'101001(encoding for 341 * self_refresh with both DDR controller and DRAM clock gate. THis is mandatory 342 * since LPPDR logic will be power gated). 343 */ 344 mmio_clrbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL, LPDDR_AUTO_LP_MODE_DISABLE); 345 mmio_clrsetbits_32(IMX_LPAV_SIM_BASE + LPDDR_CTRL, 346 0x3f << SOC_LP_CMD_SHIFT, 0x29 << SOC_LP_CMD_SHIFT); 347 348 /* Save DDR Controller & PHY config. 349 * Set PHY_FREQ_SEL_MULTICAST_EN=0 & PHY_FREQ_SEL_INDEX=1. Read and store all 350 * the PHY registers for F2 into phy_f1_cfg, then read/store the diff between 351 * F1 & F2 into phy_f2_cfg. 352 */ 353 if (!dram_cfg_saved) { 354 info = (struct dram_timing_info *)SAVED_DRAM_DATA_BASE; 355 dram_timing_cfg = (struct dram_cfg *)(SAVED_DRAM_DATA_BASE + 356 sizeof(struct dram_timing_info)); 357 358 /* get the dram type */ 359 dram_class = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_00); 360 dram_class = (dram_class >> 8) & 0xf; 361 362 /* save the ctl registers */ 363 for (i = 0U; i < CTL_NUM; i++) { 364 dram_timing_cfg->ctl_cfg[i] = mmio_read_32(IMX_DDRC_BASE + i * 4); 365 } 366 dram_timing_cfg->ctl_cfg[0] = dram_timing_cfg->ctl_cfg[0] & 0xFFFFFFFE; 367 368 /* save the PI registers */ 369 for (i = 0U; i < PI_NUM; i++) { 370 dram_timing_cfg->pi_cfg[i] = mmio_read_32(IMX_DDRC_BASE + 0x2000 + i * 4); 371 } 372 dram_timing_cfg->pi_cfg[0] = dram_timing_cfg->pi_cfg[0] & 0xFFFFFFFE; 373 374 /* 375 * Read and store all PHY registers. full array is a full 376 * copy for all the setpoint 377 */ 378 if (dram_class == LPDDR4_TYPE) { 379 mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x10000); 380 for (i = 0U; i < PHY_NUM; i++) { 381 /* Make sure MULTICASE is enabled */ 382 if (i == 1537U) { 383 dram_timing_cfg->phy_full[i] = 0x100; 384 } else { 385 dram_timing_cfg->phy_full[i] = mmio_read_32(IMX_DDRC_BASE + 0x4000 + i * 4); 386 } 387 } 388 389 /* 390 * set PHY_FREQ_SEL_MULTICAST_EN=0 & PHY_FREQ_SEL_INDEX=0. 391 * Read and store only the diff. 392 */ 393 mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1537, 0x0); 394 /* save only the frequency based diff config to save memory */ 395 for (i = 0U; i < PHY_DIFF_NUM; i++) { 396 dram_timing_cfg->phy_diff[i] = mmio_read_32(IMX_DDRC_BASE + 0x4000 + 397 freq_specific_reg_array[i] * 4); 398 } 399 } else { 400 /* LPDDR3, only f1 need to save */ 401 for (i = 0U; i < info->phy_f1_cfg_num; i++) { 402 info->phy_f1_cfg[i].val = mmio_read_32(info->phy_f1_cfg[i].reg); 403 } 404 } 405 406 dram_cfg_saved = true; 407 } 408 } 409 410 void dram_exit_retention(void) 411 { 412 uint32_t val; 413 414 /* 1. Config the LPAV PLL4 and DDR clock for the desired LPDDR operating frequency. */ 415 mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(30)); 416 417 /* 2. Write PCC5.PCC_LPDDR4[SWRST] to 1b'1 to release LPDDR from reset. */ 418 mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(28)); 419 420 /* 3. Reload the LPDDR CTL/PI/PHY register */ 421 ddr_init(); 422 423 if (dram_class == LPDDR4_TYPE) { 424 /* 4a. FIXME Set PHY_SET_DFI_INPUT_N parameters to 4'h1. LPDDR4 only */ 425 mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1559, 0x01010101); 426 427 /* 428 * 4b. CTL PWRUP_SREFRESH_EXIT=1'b0 for disabling self refresh exit 429 * from controller. 430 */ 431 /* 432 * 4c. PI_PWRUP_SELF_REF_EXIT=1, PI_MC_PWRUP_SELF_REF_EXIT=0 for enabling 433 * self refresh exit from PI 434 */ 435 /* 4c. PI_INT_LVL_EN=0 to skip Initialization trainings. */ 436 /* 437 * 4d. PI_WRLVL_EN_F0/1/2= PI_CALVL_EN_F0/1/2= PI_RDLVL_EN_F0/1/2= 438 * PI_RDLVL_GATE_EN_F0/1/2= PI_WDQLVL_EN_F0/1/2=0x2. 439 * Enable non initialization trainings. 440 */ 441 /* 4e. PI_PWRUP_SREFRESH_EXIT_CS=0xF */ 442 /* 4f. PI_DLL_RESET=0x1 */ 443 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_137, 0x1); 444 /* PI_PWRUP_SELF_REF_EXIT = 1 */ 445 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_132, 0x01000000); 446 /* PI_MC_PWRUP_SELF_REF_EXIT = 0 */ 447 mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_132, BIT(16)); 448 /* PI_INT_LVL_EN = 0 */ 449 mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_04, BIT(0)); 450 /* PI_WRLVL_EN_F0 = 3, PI_WRLVL_EN_F1 = 3 */ 451 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_174, 0x03030000); 452 /* PI_WRLVL_EN_F2 = 3 */ 453 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_175, 0x03); 454 /* PI_CALVL_EN_F0 = 3, PI_CALVL_EN_F1 = 3 */ 455 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_191, 0x03030000); 456 /* PI_CALVL_EN_F2 = 3 */ 457 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_192, 0x03); 458 /* PI_WDQLVL_EN_F0 = 3 */ 459 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_212, 0x300); 460 /* PI_WDQLVL_EN_F1 = 3 */ 461 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_214, 0x03000000); 462 /* PI_WDQLVL_EN_F2 = 3 */ 463 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_217, 0x300); 464 /* PI_EDLVL_EN_F0 = 3, PI_EDLVL_GATE_EN_F0 = 3 */ 465 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_181, 0x03030000); 466 /* 467 * PI_RDLVL_EN_F1 = 3, PI_RDLVL_GATE_EN_F1 = 3, 468 * PI_RDLVL_EN_F2 = 3, PI_RDLVL_GATE_EN_F2 = 3 469 */ 470 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_182, 0x03030303); 471 /* PI_PWRUP_SREFRESH_EXIT_CS = 0xF */ 472 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_134, 0x000F0000); 473 } else { 474 /* PI_DLL_RESET=1 */ 475 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_137, 0x1); 476 /* PI_PWRUP_SELF_REF_EXIT=1 */ 477 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_132, 0x01000000); 478 /* PI_MC_PWRUP_SELF_REF_EXIT=0 */ 479 mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_132, BIT(16)); 480 /* PI_INT_LVL_EN=0 */ 481 mmio_clrbits_32(IMX_DDRC_BASE + DENALI_PI_04, BIT(0)); 482 /* PI_WRLVL_EN_F0=3 */ 483 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_174, 0x00030000); 484 /* PI_CALVL_EN_F0=3 */ 485 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_191, 0x00030000); 486 /* PI_RDLVL_EN_F0=3,PI_RDLVL_GATE_EN_F0=3 */ 487 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_181, 0x03030000); 488 /* PI_PWRUP_SREFRESH_EXIT_CS=0xF */ 489 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_134, 0x000F0000); 490 } 491 492 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, 0x00002D00); 493 494 /* Force in-order AXI read data */ 495 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, 0x1); 496 497 /* 498 * Disable special R/W group switches so that R/W group placement 499 * is always at END of R/W group. 500 */ 501 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_249, 0x0); 502 503 /* Reduce time for IO pad calibration */ 504 mmio_write_32(IMX_DDRC_BASE + DENALI_PHY_1590, 0x01000000); 505 506 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_25, 0x00020100); 507 508 /* PD disable */ 509 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_153, 0x04040000); 510 /* 511 * 5. Disable automatic LP entry and PCPCS modes LP_AUTO_ENTRY_EN 512 * to 1b'0, PCPCS_PD_EN to 1b'0 513 */ 514 515 upwr_xcp_set_ddr_retention(APD_DOMAIN, 0, NULL); 516 upower_wait_resp(); 517 518 if (dram_class == LPDDR4_TYPE) { 519 /* 7. Write PI START parameter to 1'b1 */ 520 mmio_write_32(IMX_DDRC_BASE + DENALI_PI_00, 0x00000b01); 521 522 /* 8. Write CTL START parameter to 1'b1 */ 523 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_00, 0x00000b01); 524 } else { 525 /* 7. Write PI START parameter to 1'b1 */ 526 mmio_write_32(IMX_DDRC_BASE + DENALI_PI_00, 0x00000701); 527 528 /* 8. Write CTL START parameter to 1'b1 */ 529 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_00, 0x00000701); 530 } 531 532 /* 9. DENALI_CTL_266: Wait for INT_STATUS_INIT=0x2 */ 533 do { 534 val = (mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_266) >> 8) & 0xFF; 535 } while (val != 0x2); 536 537 /* 538 * 10. Run SW trainings by setting PI_CALVL_REQ,PI_WRLVL_REQ,PI_RDLVL_GATE_REQ, 539 * PI_RDLVL_REQ,PI_WDQLVL_REQ(NA for LPDDR3) in same order. 540 */ 541 if (dram_class == LPDDR4_TYPE) { 542 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_52, 0x10000); /* CALVL */ 543 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_26, 0x100); /* WRLVL */ 544 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x10000); /* RDGATE */ 545 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x100); /* RDQLVL */ 546 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_65, 0x10000); /* WDQLVL */ 547 548 /* 11. Wait for trainings to get complete by polling PI_INT_STATUS */ 549 while ((mmio_read_32(IMX_DDRC_BASE + DENALI_PI_77) & 0x07E00000) != 0x07E00000) { 550 ; 551 } 552 } else { 553 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_52, 0x10000); /* CALVL */ 554 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_26, 0x100); /* WRLVL */ 555 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x10000); /* RDGATE */ 556 mmio_setbits_32(IMX_DDRC_BASE + DENALI_PI_33, 0x100); /* RDQLVL */ 557 while ((mmio_read_32(IMX_DDRC_BASE + DENALI_PI_77) & 0x05E00000) != 0x05E00000) { 558 ; 559 } 560 } 561 562 dram_lp_auto_enable(); 563 } 564 565 #define LPDDR_DONE (0x1<<4) 566 #define SOC_FREQ_CHG_ACK (0x1<<6) 567 #define SOC_FREQ_CHG_REQ (0x1<<7) 568 #define LPI_WAKEUP_EN (0x4<<8) 569 #define SOC_FREQ_REQ (0x1<<11) 570 571 static void set_cgc2_ddrclk(uint8_t src, uint8_t div) 572 { 573 574 /* Wait until the reg is unlocked for writing */ 575 while (mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(31)) 576 ; 577 578 mmio_write_32(IMX_CGC2_BASE + 0x40, (src << 28) | (div << 21)); 579 /* Wait for the clock switching done */ 580 while (!(mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(27))) 581 ; 582 } 583 static void set_ddr_clk(uint32_t ddr_freq) 584 { 585 /* Disable DDR clock */ 586 mmio_clrbits_32(IMX_PCC5_BASE + 0x108, BIT(30)); 587 switch (ddr_freq) { 588 /* boot frequency ? */ 589 case 48: 590 set_cgc2_ddrclk(2, 0); 591 break; 592 /* default bypass frequency for fsp 1 */ 593 case 192: 594 set_cgc2_ddrclk(0, 1); 595 break; 596 case 384: 597 set_cgc2_ddrclk(0, 0); 598 break; 599 case 264: 600 set_cgc2_ddrclk(4, 3); 601 break; 602 case 528: 603 set_cgc2_ddrclk(4, 1); 604 break; 605 default: 606 break; 607 } 608 /* Enable DDR clock */ 609 mmio_setbits_32(IMX_PCC5_BASE + 0x108, BIT(30)); 610 611 /* Wait until the reg is unlocked for writing */ 612 while (mmio_read_32(IMX_CGC2_BASE + 0x40) & BIT(31)) { 613 ; 614 } 615 } 616 617 #define AVD_SIM_LPDDR_CTRL (IMX_LPAV_SIM_BASE + 0x14) 618 #define AVD_SIM_LPDDR_CTRL2 (IMX_LPAV_SIM_BASE + 0x18) 619 #define MAX_FSP_NUM U(3) 620 #define DDR_DFS_GET_FSP_COUNT 0x10 621 #define DDR_BYPASS_DRATE U(400) 622 623 extern int upower_pmic_i2c_write(uint32_t reg_addr, uint32_t reg_val); 624 625 /* Normally, we only switch frequency between 1(bypass) and 2(highest) */ 626 int lpddr4_dfs(uint32_t freq_index) 627 { 628 uint32_t lpddr_ctrl, lpddr_ctrl2; 629 uint32_t ddr_ctl_144; 630 631 /* 632 * Valid index: 0 to 2 633 * index 0: boot frequency 634 * index 1: bypass frequency 635 * index 2: highest frequency 636 */ 637 if (freq_index > 2U) { 638 return -1; 639 } 640 641 /* 642 * increase the voltage to 1.1V firstly before increase frequency 643 * and APD enter OD mode 644 */ 645 if (freq_index == 2U && sys_dvfs) { 646 upower_pmic_i2c_write(0x22, 0x28); 647 } 648 649 /* Enable LPI_WAKEUP_EN */ 650 ddr_ctl_144 = mmio_read_32(IMX_DDRC_BASE + DENALI_CTL_144); 651 mmio_setbits_32(IMX_DDRC_BASE + DENALI_CTL_144, LPI_WAKEUP_EN); 652 653 /* put DRAM into long self-refresh & clock gating */ 654 lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL); 655 lpddr_ctrl = (lpddr_ctrl & ~((0x3f << 15) | (0x3 << 9))) | (0x28 << 15) | (freq_index << 9); 656 mmio_write_32(AVD_SIM_LPDDR_CTRL, lpddr_ctrl); 657 658 /* Gating the clock */ 659 lpddr_ctrl2 = mmio_read_32(AVD_SIM_LPDDR_CTRL2); 660 mmio_setbits_32(AVD_SIM_LPDDR_CTRL2, LPDDR_EN_CLKGATE); 661 662 /* Request frequency change */ 663 mmio_setbits_32(AVD_SIM_LPDDR_CTRL, SOC_FREQ_REQ); 664 665 do { 666 lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL); 667 if (lpddr_ctrl & SOC_FREQ_CHG_REQ) { 668 /* Bypass mode */ 669 if (info->fsp_table[freq_index] < DDR_BYPASS_DRATE) { 670 /* Change to PLL bypass mode */ 671 mmio_write_32(IMX_LPAV_SIM_BASE, 0x1); 672 /* change the ddr clock source & frequency */ 673 set_ddr_clk(info->fsp_table[freq_index]); 674 } else { 675 /* Change to PLL unbypass mode */ 676 mmio_write_32(IMX_LPAV_SIM_BASE, 0x0); 677 /* change the ddr clock source & frequency */ 678 set_ddr_clk(info->fsp_table[freq_index] >> 1); 679 } 680 681 mmio_clrsetbits_32(AVD_SIM_LPDDR_CTRL, SOC_FREQ_CHG_REQ, SOC_FREQ_CHG_ACK); 682 continue; 683 } 684 } while ((lpddr_ctrl & LPDDR_DONE) != 0); /* several try? */ 685 686 /* restore the original setting */ 687 mmio_write_32(IMX_DDRC_BASE + DENALI_CTL_144, ddr_ctl_144); 688 mmio_write_32(AVD_SIM_LPDDR_CTRL2, lpddr_ctrl2); 689 690 /* Check the DFS result */ 691 lpddr_ctrl = mmio_read_32(AVD_SIM_LPDDR_CTRL) & 0xF; 692 if (lpddr_ctrl != 0U) { 693 /* Must be something wrong, return failure */ 694 return -1; 695 } 696 697 /* decrease the BUCK3 voltage after frequency changed to lower 698 * and APD in ND_MODE 699 */ 700 if (freq_index == 1U && sys_dvfs) { 701 upower_pmic_i2c_write(0x22, 0x20); 702 } 703 704 /* DFS done successfully */ 705 return 0; 706 } 707 708 /* for the non-primary core, waiting for DFS done */ 709 static uint64_t waiting_dvfs(uint32_t id, uint32_t flags, 710 void *handle, void *cookie) 711 { 712 uint32_t irq; 713 714 irq = plat_ic_acknowledge_interrupt(); 715 if (irq < 1022U) { 716 plat_ic_end_of_interrupt(irq); 717 } 718 719 /* set the WFE done status */ 720 spin_lock(&dfs_lock); 721 core_count++; 722 dsb(); 723 spin_unlock(&dfs_lock); 724 725 while (in_progress) { 726 wfe(); 727 } 728 729 return 0; 730 } 731 732 int dram_dvfs_handler(uint32_t smc_fid, void *handle, 733 u_register_t x1, u_register_t x2, u_register_t x3) 734 { 735 unsigned int fsp_index = x1; 736 uint32_t online_cpus = x2 - 1; 737 uint64_t mpidr = read_mpidr_el1(); 738 unsigned int cpu_id = MPIDR_AFFLVL0_VAL(mpidr); 739 740 /* Get the number of FSPs */ 741 if (x1 == DDR_DFS_GET_FSP_COUNT) { 742 SMC_RET2(handle, num_fsp, info->fsp_table[1]); 743 } 744 745 /* start lpddr frequency scaling */ 746 in_progress = true; 747 sys_dvfs = x3 ? true : false; 748 dsb(); 749 750 /* notify other core wait for scaling done */ 751 for (unsigned int i = 0; i < PLATFORM_CORE_COUNT; i++) 752 /* Skip raise SGI for current CPU */ 753 if (i != cpu_id) { 754 plat_ic_raise_el3_sgi(0x8, i); 755 } 756 757 /* Make sure all the cpu in WFE */ 758 while (online_cpus != core_count) { 759 ; 760 } 761 762 /* Flush the L1/L2 cache */ 763 dcsw_op_all(DCCSW); 764 765 lpddr4_dfs(fsp_index); 766 767 in_progress = false; 768 core_count = 0; 769 dsb(); 770 sev(); 771 isb(); 772 773 SMC_RET1(handle, 0); 774 } 775 776 void dram_init(void) 777 { 778 uint32_t flags = 0; 779 uint32_t rc; 780 unsigned int i; 781 782 /* Register the EL3 handler for DDR DVFS */ 783 set_interrupt_rm_flag(flags, NON_SECURE); 784 rc = register_interrupt_type_handler(INTR_TYPE_EL3, waiting_dvfs, flags); 785 if (rc) { 786 panic(); 787 } 788 789 info = (struct dram_timing_info *)SAVED_DRAM_DATA_BASE; 790 791 /* Get the num of the supported Fsp */ 792 for (i = 0; i < MAX_FSP_NUM; i++) { 793 if (!info->fsp_table[i]) { 794 break; 795 } 796 } 797 798 num_fsp = (i > MAX_FSP_NUM) ? MAX_FSP_NUM : i; 799 } 800