1 /* 2 * Copyright (C) 2018-2024, STMicroelectronics - All Rights Reserved 3 * 4 * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause 5 */ 6 7 #include <errno.h> 8 #include <stddef.h> 9 10 #include <arch.h> 11 #include <arch_helpers.h> 12 #include <common/debug.h> 13 #include <drivers/clk.h> 14 #include <drivers/delay_timer.h> 15 #include <drivers/st/stm32mp1_ddr.h> 16 #include <drivers/st/stm32mp1_ddr_regs.h> 17 #include <drivers/st/stm32mp1_pwr.h> 18 #include <drivers/st/stm32mp1_ram.h> 19 #include <drivers/st/stm32mp_ddr.h> 20 #include <lib/mmio.h> 21 #include <plat/common/platform.h> 22 23 #include <platform_def.h> 24 25 #define DDRCTL_REG(x, y) \ 26 { \ 27 .offset = offsetof(struct stm32mp_ddrctl, x), \ 28 .par_offset = offsetof(struct y, x) \ 29 } 30 31 #define DDRPHY_REG(x, y) \ 32 { \ 33 .offset = offsetof(struct stm32mp_ddrphy, x), \ 34 .par_offset = offsetof(struct y, x) \ 35 } 36 37 /* 38 * PARAMETERS: value get from device tree : 39 * size / order need to be aligned with binding 40 * modification NOT ALLOWED !!! 41 */ 42 #define DDRCTL_REG_REG_SIZE 25 /* st,ctl-reg */ 43 #define DDRCTL_REG_TIMING_SIZE 12 /* st,ctl-timing */ 44 #define DDRCTL_REG_MAP_SIZE 9 /* st,ctl-map */ 45 #if STM32MP_DDR_DUAL_AXI_PORT 46 #define DDRCTL_REG_PERF_SIZE 17 /* st,ctl-perf */ 47 #else 48 #define DDRCTL_REG_PERF_SIZE 11 /* st,ctl-perf */ 49 #endif 50 51 #if STM32MP_DDR_32BIT_INTERFACE 52 #define DDRPHY_REG_REG_SIZE 11 /* st,phy-reg */ 53 #else 54 #define DDRPHY_REG_REG_SIZE 9 /* st,phy-reg */ 55 #endif 56 #define DDRPHY_REG_TIMING_SIZE 10 /* st,phy-timing */ 57 58 #define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg) 59 static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = { 60 DDRCTL_REG_REG(mstr), 61 DDRCTL_REG_REG(mrctrl0), 62 DDRCTL_REG_REG(mrctrl1), 63 DDRCTL_REG_REG(derateen), 64 DDRCTL_REG_REG(derateint), 65 DDRCTL_REG_REG(pwrctl), 66 DDRCTL_REG_REG(pwrtmg), 67 DDRCTL_REG_REG(hwlpctl), 68 DDRCTL_REG_REG(rfshctl0), 69 DDRCTL_REG_REG(rfshctl3), 70 DDRCTL_REG_REG(crcparctl0), 71 DDRCTL_REG_REG(zqctl0), 72 DDRCTL_REG_REG(dfitmg0), 73 DDRCTL_REG_REG(dfitmg1), 74 DDRCTL_REG_REG(dfilpcfg0), 75 DDRCTL_REG_REG(dfiupd0), 76 DDRCTL_REG_REG(dfiupd1), 77 DDRCTL_REG_REG(dfiupd2), 78 DDRCTL_REG_REG(dfiphymstr), 79 DDRCTL_REG_REG(odtmap), 80 DDRCTL_REG_REG(dbg0), 81 DDRCTL_REG_REG(dbg1), 82 DDRCTL_REG_REG(dbgcmd), 83 DDRCTL_REG_REG(poisoncfg), 84 DDRCTL_REG_REG(pccfg), 85 }; 86 87 #define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing) 88 static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = { 89 DDRCTL_REG_TIMING(rfshtmg), 90 DDRCTL_REG_TIMING(dramtmg0), 91 DDRCTL_REG_TIMING(dramtmg1), 92 DDRCTL_REG_TIMING(dramtmg2), 93 DDRCTL_REG_TIMING(dramtmg3), 94 DDRCTL_REG_TIMING(dramtmg4), 95 DDRCTL_REG_TIMING(dramtmg5), 96 DDRCTL_REG_TIMING(dramtmg6), 97 DDRCTL_REG_TIMING(dramtmg7), 98 DDRCTL_REG_TIMING(dramtmg8), 99 DDRCTL_REG_TIMING(dramtmg14), 100 DDRCTL_REG_TIMING(odtcfg), 101 }; 102 103 #define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map) 104 static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = { 105 DDRCTL_REG_MAP(addrmap1), 106 DDRCTL_REG_MAP(addrmap2), 107 DDRCTL_REG_MAP(addrmap3), 108 DDRCTL_REG_MAP(addrmap4), 109 DDRCTL_REG_MAP(addrmap5), 110 DDRCTL_REG_MAP(addrmap6), 111 DDRCTL_REG_MAP(addrmap9), 112 DDRCTL_REG_MAP(addrmap10), 113 DDRCTL_REG_MAP(addrmap11), 114 }; 115 116 #define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf) 117 static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = { 118 DDRCTL_REG_PERF(sched), 119 DDRCTL_REG_PERF(sched1), 120 DDRCTL_REG_PERF(perfhpr1), 121 DDRCTL_REG_PERF(perflpr1), 122 DDRCTL_REG_PERF(perfwr1), 123 DDRCTL_REG_PERF(pcfgr_0), 124 DDRCTL_REG_PERF(pcfgw_0), 125 DDRCTL_REG_PERF(pcfgqos0_0), 126 DDRCTL_REG_PERF(pcfgqos1_0), 127 DDRCTL_REG_PERF(pcfgwqos0_0), 128 DDRCTL_REG_PERF(pcfgwqos1_0), 129 #if STM32MP_DDR_DUAL_AXI_PORT 130 DDRCTL_REG_PERF(pcfgr_1), 131 DDRCTL_REG_PERF(pcfgw_1), 132 DDRCTL_REG_PERF(pcfgqos0_1), 133 DDRCTL_REG_PERF(pcfgqos1_1), 134 DDRCTL_REG_PERF(pcfgwqos0_1), 135 DDRCTL_REG_PERF(pcfgwqos1_1), 136 #endif 137 }; 138 139 #define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg) 140 static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = { 141 DDRPHY_REG_REG(pgcr), 142 DDRPHY_REG_REG(aciocr), 143 DDRPHY_REG_REG(dxccr), 144 DDRPHY_REG_REG(dsgcr), 145 DDRPHY_REG_REG(dcr), 146 DDRPHY_REG_REG(odtcr), 147 DDRPHY_REG_REG(zq0cr1), 148 DDRPHY_REG_REG(dx0gcr), 149 DDRPHY_REG_REG(dx1gcr), 150 #if STM32MP_DDR_32BIT_INTERFACE 151 DDRPHY_REG_REG(dx2gcr), 152 DDRPHY_REG_REG(dx3gcr), 153 #endif 154 }; 155 156 #define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing) 157 static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = { 158 DDRPHY_REG_TIMING(ptr0), 159 DDRPHY_REG_TIMING(ptr1), 160 DDRPHY_REG_TIMING(ptr2), 161 DDRPHY_REG_TIMING(dtpr0), 162 DDRPHY_REG_TIMING(dtpr1), 163 DDRPHY_REG_TIMING(dtpr2), 164 DDRPHY_REG_TIMING(mr0), 165 DDRPHY_REG_TIMING(mr1), 166 DDRPHY_REG_TIMING(mr2), 167 DDRPHY_REG_TIMING(mr3), 168 }; 169 170 /* 171 * REGISTERS ARRAY: used to parse device tree and interactive mode 172 */ 173 static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] = { 174 [REG_REG] = { 175 .name = "static", 176 .desc = ddr_reg, 177 .size = DDRCTL_REG_REG_SIZE, 178 .base = DDR_BASE 179 }, 180 [REG_TIMING] = { 181 .name = "timing", 182 .desc = ddr_timing, 183 .size = DDRCTL_REG_TIMING_SIZE, 184 .base = DDR_BASE 185 }, 186 [REG_PERF] = { 187 .name = "perf", 188 .desc = ddr_perf, 189 .size = DDRCTL_REG_PERF_SIZE, 190 .base = DDR_BASE 191 }, 192 [REG_MAP] = { 193 .name = "map", 194 .desc = ddr_map, 195 .size = DDRCTL_REG_MAP_SIZE, 196 .base = DDR_BASE 197 }, 198 [REGPHY_REG] = { 199 .name = "static", 200 .desc = ddrphy_reg, 201 .size = DDRPHY_REG_REG_SIZE, 202 .base = DDRPHY_BASE 203 }, 204 [REGPHY_TIMING] = { 205 .name = "timing", 206 .desc = ddrphy_timing, 207 .size = DDRPHY_REG_TIMING_SIZE, 208 .base = DDRPHY_BASE 209 }, 210 }; 211 212 static void stm32mp1_ddrphy_idone_wait(struct stm32mp_ddrphy *phy) 213 { 214 uint32_t pgsr; 215 int error = 0; 216 uint64_t timeout = timeout_init_us(DDR_TIMEOUT_US_1S); 217 218 do { 219 pgsr = mmio_read_32((uintptr_t)&phy->pgsr); 220 221 VERBOSE(" > [0x%lx] pgsr = 0x%x &\n", 222 (uintptr_t)&phy->pgsr, pgsr); 223 224 if (timeout_elapsed(timeout)) { 225 panic(); 226 } 227 228 if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) { 229 VERBOSE("DQS Gate Trainig Error\n"); 230 error++; 231 } 232 233 if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) { 234 VERBOSE("DQS Gate Trainig Intermittent Error\n"); 235 error++; 236 } 237 238 if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) { 239 VERBOSE("DQS Drift Error\n"); 240 error++; 241 } 242 243 if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) { 244 VERBOSE("Read Valid Training Error\n"); 245 error++; 246 } 247 248 if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) { 249 VERBOSE("Read Valid Training Intermittent Error\n"); 250 error++; 251 } 252 } while (((pgsr & DDRPHYC_PGSR_IDONE) == 0U) && (error == 0)); 253 VERBOSE("\n[0x%lx] pgsr = 0x%x\n", 254 (uintptr_t)&phy->pgsr, pgsr); 255 } 256 257 static void stm32mp1_ddrphy_init(struct stm32mp_ddrphy *phy, uint32_t pir) 258 { 259 uint32_t pir_init = pir | DDRPHYC_PIR_INIT; 260 261 mmio_write_32((uintptr_t)&phy->pir, pir_init); 262 VERBOSE("[0x%lx] pir = 0x%x -> 0x%x\n", 263 (uintptr_t)&phy->pir, pir_init, 264 mmio_read_32((uintptr_t)&phy->pir)); 265 266 /* Need to wait 10 configuration clock before start polling */ 267 udelay(DDR_DELAY_10US); 268 269 /* Wait DRAM initialization and Gate Training Evaluation complete */ 270 stm32mp1_ddrphy_idone_wait(phy); 271 } 272 273 /* Wait quasi dynamic register update */ 274 static void stm32mp1_wait_operating_mode(struct stm32mp_ddr_priv *priv, uint32_t mode) 275 { 276 uint64_t timeout; 277 uint32_t stat; 278 int break_loop = 0; 279 280 timeout = timeout_init_us(DDR_TIMEOUT_US_1S); 281 for ( ; ; ) { 282 uint32_t operating_mode; 283 uint32_t selref_type; 284 285 stat = mmio_read_32((uintptr_t)&priv->ctl->stat); 286 operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK; 287 selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK; 288 VERBOSE("[0x%lx] stat = 0x%x\n", 289 (uintptr_t)&priv->ctl->stat, stat); 290 if (timeout_elapsed(timeout)) { 291 panic(); 292 } 293 294 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) { 295 /* 296 * Self-refresh due to software 297 * => checking also STAT.selfref_type. 298 */ 299 if ((operating_mode == 300 DDRCTRL_STAT_OPERATING_MODE_SR) && 301 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) { 302 break_loop = 1; 303 } 304 } else if (operating_mode == mode) { 305 break_loop = 1; 306 } else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) && 307 (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) && 308 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) { 309 /* Normal mode: handle also automatic self refresh */ 310 break_loop = 1; 311 } 312 313 if (break_loop == 1) { 314 break; 315 } 316 } 317 318 VERBOSE("[0x%lx] stat = 0x%x\n", 319 (uintptr_t)&priv->ctl->stat, stat); 320 } 321 322 /* Mode Register Writes (MRW or MRS) */ 323 static void stm32mp1_mode_register_write(struct stm32mp_ddr_priv *priv, uint8_t addr, 324 uint32_t data) 325 { 326 uint32_t mrctrl0; 327 328 VERBOSE("MRS: %d = %x\n", addr, data); 329 330 /* 331 * 1. Poll MRSTAT.mr_wr_busy until it is '0'. 332 * This checks that there is no outstanding MR transaction. 333 * No write should be performed to MRCTRL0 and MRCTRL1 334 * if MRSTAT.mr_wr_busy = 1. 335 */ 336 while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) & 337 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) { 338 ; 339 } 340 341 /* 342 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank 343 * and (for MRWs) MRCTRL1.mr_data to define the MR transaction. 344 */ 345 mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE | 346 DDRCTRL_MRCTRL0_MR_RANK_ALL | 347 (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) & 348 DDRCTRL_MRCTRL0_MR_ADDR_MASK); 349 mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 350 VERBOSE("[0x%lx] mrctrl0 = 0x%x (0x%x)\n", 351 (uintptr_t)&priv->ctl->mrctrl0, 352 mmio_read_32((uintptr_t)&priv->ctl->mrctrl0), mrctrl0); 353 mmio_write_32((uintptr_t)&priv->ctl->mrctrl1, data); 354 VERBOSE("[0x%lx] mrctrl1 = 0x%x\n", 355 (uintptr_t)&priv->ctl->mrctrl1, 356 mmio_read_32((uintptr_t)&priv->ctl->mrctrl1)); 357 358 /* 359 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This 360 * bit is self-clearing, and triggers the MR transaction. 361 * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs 362 * the MR transaction to SDRAM, and no further access can be 363 * initiated until it is deasserted. 364 */ 365 mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR; 366 mmio_write_32((uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 367 368 while ((mmio_read_32((uintptr_t)&priv->ctl->mrstat) & 369 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) { 370 ; 371 } 372 373 VERBOSE("[0x%lx] mrctrl0 = 0x%x\n", 374 (uintptr_t)&priv->ctl->mrctrl0, mrctrl0); 375 } 376 377 /* Switch DDR3 from DLL-on to DLL-off */ 378 static void stm32mp1_ddr3_dll_off(struct stm32mp_ddr_priv *priv) 379 { 380 uint32_t mr1 = mmio_read_32((uintptr_t)&priv->phy->mr1); 381 uint32_t mr2 = mmio_read_32((uintptr_t)&priv->phy->mr2); 382 uint32_t dbgcam; 383 384 VERBOSE("mr1: 0x%x\n", mr1); 385 VERBOSE("mr2: 0x%x\n", mr2); 386 387 /* 388 * 1. Set the DBG1.dis_hif = 1. 389 * This prevents further reads/writes being received on the HIF. 390 */ 391 mmio_setbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF); 392 VERBOSE("[0x%lx] dbg1 = 0x%x\n", 393 (uintptr_t)&priv->ctl->dbg1, 394 mmio_read_32((uintptr_t)&priv->ctl->dbg1)); 395 396 /* 397 * 2. Ensure all commands have been flushed from the uMCTL2 by polling 398 * DBGCAM.wr_data_pipeline_empty = 1, 399 * DBGCAM.rd_data_pipeline_empty = 1, 400 * DBGCAM.dbg_wr_q_depth = 0 , 401 * DBGCAM.dbg_lpr_q_depth = 0, and 402 * DBGCAM.dbg_hpr_q_depth = 0. 403 */ 404 do { 405 dbgcam = mmio_read_32((uintptr_t)&priv->ctl->dbgcam); 406 VERBOSE("[0x%lx] dbgcam = 0x%x\n", 407 (uintptr_t)&priv->ctl->dbgcam, dbgcam); 408 } while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) == 409 DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) && 410 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U)); 411 412 /* 413 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 414 * to disable RTT_NOM: 415 * a. DDR3: Write to MR1[9], MR1[6] and MR1[2] 416 * b. DDR4: Write to MR1[10:8] 417 */ 418 mr1 &= ~(BIT(9) | BIT(6) | BIT(2)); 419 stm32mp1_mode_register_write(priv, 1, mr1); 420 421 /* 422 * 4. For DDR4 only: Perform an MRS command 423 * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6] 424 * to disable RTT_PARK 425 */ 426 427 /* 428 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 429 * to write to MR2[10:9], to disable RTT_WR 430 * (and therefore disable dynamic ODT). 431 * This applies for both DDR3 and DDR4. 432 */ 433 mr2 &= ~GENMASK(10, 9); 434 stm32mp1_mode_register_write(priv, 2, mr2); 435 436 /* 437 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers) 438 * to disable the DLL. The timing of this MRS is automatically 439 * handled by the uMCTL2. 440 * a. DDR3: Write to MR1[0] 441 * b. DDR4: Write to MR1[0] 442 */ 443 mr1 |= BIT(0); 444 stm32mp1_mode_register_write(priv, 1, mr1); 445 446 /* 447 * 7. Put the SDRAM into self-refresh mode by setting 448 * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure 449 * the DDRC has entered self-refresh. 450 */ 451 mmio_setbits_32((uintptr_t)&priv->ctl->pwrctl, 452 DDRCTRL_PWRCTL_SELFREF_SW); 453 VERBOSE("[0x%lx] pwrctl = 0x%x\n", 454 (uintptr_t)&priv->ctl->pwrctl, 455 mmio_read_32((uintptr_t)&priv->ctl->pwrctl)); 456 457 /* 458 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the 459 * DWC_ddr_umctl2 core is in self-refresh mode. 460 * Ensure transition to self-refresh was due to software 461 * by checking that STAT.selfref_type[1:0]=2. 462 */ 463 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR); 464 465 /* 466 * 9. Set the MSTR.dll_off_mode = 1. 467 * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field 468 */ 469 stm32mp_ddr_start_sw_done(priv->ctl); 470 471 mmio_setbits_32((uintptr_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE); 472 VERBOSE("[0x%lx] mstr = 0x%x\n", 473 (uintptr_t)&priv->ctl->mstr, 474 mmio_read_32((uintptr_t)&priv->ctl->mstr)); 475 476 stm32mp_ddr_wait_sw_done_ack(priv->ctl); 477 478 /* 10. Change the clock frequency to the desired value. */ 479 480 /* 481 * 11. Update any registers which may be required to change for the new 482 * frequency. This includes static and dynamic registers. 483 * This includes both uMCTL2 registers and PHY registers. 484 */ 485 486 /* Change Bypass Mode Frequency Range */ 487 if (clk_get_rate(DDRPHYC) < 100000000U) { 488 mmio_clrbits_32((uintptr_t)&priv->phy->dllgcr, 489 DDRPHYC_DLLGCR_BPS200); 490 } else { 491 mmio_setbits_32((uintptr_t)&priv->phy->dllgcr, 492 DDRPHYC_DLLGCR_BPS200); 493 } 494 495 mmio_setbits_32((uintptr_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS); 496 497 mmio_setbits_32((uintptr_t)&priv->phy->dx0dllcr, 498 DDRPHYC_DXNDLLCR_DLLDIS); 499 mmio_setbits_32((uintptr_t)&priv->phy->dx1dllcr, 500 DDRPHYC_DXNDLLCR_DLLDIS); 501 #if STM32MP_DDR_32BIT_INTERFACE 502 mmio_setbits_32((uintptr_t)&priv->phy->dx2dllcr, 503 DDRPHYC_DXNDLLCR_DLLDIS); 504 mmio_setbits_32((uintptr_t)&priv->phy->dx3dllcr, 505 DDRPHYC_DXNDLLCR_DLLDIS); 506 #endif 507 508 /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */ 509 mmio_clrbits_32((uintptr_t)&priv->ctl->pwrctl, 510 DDRCTRL_PWRCTL_SELFREF_SW); 511 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL); 512 513 /* 514 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command 515 * at this point. 516 */ 517 518 /* 519 * 14. Perform MRS commands as required to re-program timing registers 520 * in the SDRAM for the new frequency 521 * (in particular, CL, CWL and WR may need to be changed). 522 */ 523 524 /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */ 525 mmio_clrbits_32((uintptr_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF); 526 VERBOSE("[0x%lx] dbg1 = 0x%x\n", 527 (uintptr_t)&priv->ctl->dbg1, 528 mmio_read_32((uintptr_t)&priv->ctl->dbg1)); 529 } 530 531 static void stm32mp1_refresh_disable(struct stm32mp_ddrctl *ctl) 532 { 533 stm32mp_ddr_start_sw_done(ctl); 534 /* Quasi-dynamic register update*/ 535 mmio_setbits_32((uintptr_t)&ctl->rfshctl3, 536 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH); 537 mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN); 538 mmio_clrbits_32((uintptr_t)&ctl->dfimisc, 539 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 540 stm32mp_ddr_wait_sw_done_ack(ctl); 541 } 542 543 static void stm32mp1_refresh_restore(struct stm32mp_ddrctl *ctl, 544 uint32_t rfshctl3, uint32_t pwrctl) 545 { 546 stm32mp_ddr_start_sw_done(ctl); 547 if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) { 548 mmio_clrbits_32((uintptr_t)&ctl->rfshctl3, 549 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH); 550 } 551 if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) { 552 mmio_setbits_32((uintptr_t)&ctl->pwrctl, 553 DDRCTRL_PWRCTL_POWERDOWN_EN); 554 } 555 mmio_setbits_32((uintptr_t)&ctl->dfimisc, 556 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 557 stm32mp_ddr_wait_sw_done_ack(ctl); 558 } 559 560 void stm32mp1_ddr_init(struct stm32mp_ddr_priv *priv, 561 struct stm32mp_ddr_config *config) 562 { 563 uint32_t pir; 564 int ret = -EINVAL; 565 566 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) { 567 ret = stm32mp_board_ddr_power_init(STM32MP_DDR3); 568 } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR2) != 0U) { 569 ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR2); 570 } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR3) != 0U) { 571 ret = stm32mp_board_ddr_power_init(STM32MP_LPDDR3); 572 } else { 573 ERROR("DDR type not supported\n"); 574 } 575 576 if (ret != 0) { 577 panic(); 578 } 579 580 VERBOSE("name = %s\n", config->info.name); 581 VERBOSE("speed = %u kHz\n", config->info.speed); 582 VERBOSE("size = 0x%x\n", config->info.size); 583 584 /* DDR INIT SEQUENCE */ 585 586 /* 587 * 1. Program the DWC_ddr_umctl2 registers 588 * nota: check DFIMISC.dfi_init_complete = 0 589 */ 590 591 /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */ 592 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST); 593 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST); 594 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST); 595 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST); 596 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST); 597 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST); 598 599 /* 1.2. start CLOCK */ 600 if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) { 601 panic(); 602 } 603 604 /* 1.3. deassert reset */ 605 /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */ 606 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST); 607 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST); 608 /* 609 * De-assert presetn once the clocks are active 610 * and stable via DDRCAPBRST bit. 611 */ 612 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST); 613 614 /* 1.4. wait 128 cycles to permit initialization of end logic */ 615 udelay(DDR_DELAY_2US); 616 /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */ 617 618 /* 1.5. initialize registers ddr_umctl2 */ 619 /* Stop uMCTL2 before PHY is ready */ 620 mmio_clrbits_32((uintptr_t)&priv->ctl->dfimisc, 621 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 622 VERBOSE("[0x%lx] dfimisc = 0x%x\n", 623 (uintptr_t)&priv->ctl->dfimisc, 624 mmio_read_32((uintptr_t)&priv->ctl->dfimisc)); 625 626 stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers); 627 628 /* DDR3 = don't set DLLOFF for init mode */ 629 if ((config->c_reg.mstr & 630 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) 631 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) { 632 VERBOSE("deactivate DLL OFF in mstr\n"); 633 mmio_clrbits_32((uintptr_t)&priv->ctl->mstr, 634 DDRCTRL_MSTR_DLL_OFF_MODE); 635 VERBOSE("[0x%lx] mstr = 0x%x\n", 636 (uintptr_t)&priv->ctl->mstr, 637 mmio_read_32((uintptr_t)&priv->ctl->mstr)); 638 } 639 640 stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers); 641 stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers); 642 643 /* Skip CTRL init, SDRAM init is done by PHY PUBL */ 644 mmio_clrsetbits_32((uintptr_t)&priv->ctl->init0, 645 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK, 646 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL); 647 VERBOSE("[0x%lx] init0 = 0x%x\n", 648 (uintptr_t)&priv->ctl->init0, 649 mmio_read_32((uintptr_t)&priv->ctl->init0)); 650 651 stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers); 652 653 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */ 654 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST); 655 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST); 656 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST); 657 658 /* 659 * 3. start PHY init by accessing relevant PUBL registers 660 * (DXGCR, DCR, PTR*, MR*, DTPR*) 661 */ 662 stm32mp_ddr_set_reg(priv, REGPHY_REG, &config->p_reg, ddr_registers); 663 stm32mp_ddr_set_reg(priv, REGPHY_TIMING, &config->p_timing, ddr_registers); 664 665 /* DDR3 = don't set DLLOFF for init mode */ 666 if ((config->c_reg.mstr & 667 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) 668 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) { 669 VERBOSE("deactivate DLL OFF in mr1\n"); 670 mmio_clrbits_32((uintptr_t)&priv->phy->mr1, BIT(0)); 671 VERBOSE("[0x%lx] mr1 = 0x%x\n", 672 (uintptr_t)&priv->phy->mr1, 673 mmio_read_32((uintptr_t)&priv->phy->mr1)); 674 } 675 676 /* 677 * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE 678 * Perform DDR PHY DRAM initialization and Gate Training Evaluation 679 */ 680 stm32mp1_ddrphy_idone_wait(priv->phy); 681 682 /* 683 * 5. Indicate to PUBL that controller performs SDRAM initialization 684 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE 685 * DRAM init is done by PHY, init0.skip_dram.init = 1 686 */ 687 688 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL | 689 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC; 690 691 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) { 692 pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */ 693 } 694 695 stm32mp1_ddrphy_init(priv->phy, pir); 696 697 /* 698 * 6. SET DFIMISC.dfi_init_complete_en to 1 699 * Enable quasi-dynamic register programming. 700 */ 701 stm32mp_ddr_start_sw_done(priv->ctl); 702 703 mmio_setbits_32((uintptr_t)&priv->ctl->dfimisc, 704 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN); 705 VERBOSE("[0x%lx] dfimisc = 0x%x\n", 706 (uintptr_t)&priv->ctl->dfimisc, 707 mmio_read_32((uintptr_t)&priv->ctl->dfimisc)); 708 709 stm32mp_ddr_wait_sw_done_ack(priv->ctl); 710 711 /* 712 * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode 713 * by monitoring STAT.operating_mode signal 714 */ 715 716 /* Wait uMCTL2 ready */ 717 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL); 718 719 /* Switch to DLL OFF mode */ 720 if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) { 721 stm32mp1_ddr3_dll_off(priv); 722 } 723 724 VERBOSE("DDR DQS training : "); 725 726 /* 727 * 8. Disable Auto refresh and power down by setting 728 * - RFSHCTL3.dis_au_refresh = 1 729 * - PWRCTL.powerdown_en = 0 730 * - DFIMISC.dfiinit_complete_en = 0 731 */ 732 stm32mp1_refresh_disable(priv->ctl); 733 734 /* 735 * 9. Program PUBL PGCR to enable refresh during training 736 * and rank to train 737 * not done => keep the programed value in PGCR 738 */ 739 740 /* 741 * 10. configure PUBL PIR register to specify which training step 742 * to run 743 * RVTRN is executed only on LPDDR2/LPDDR3 744 */ 745 pir = DDRPHYC_PIR_QSTRN; 746 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) == 0U) { 747 pir |= DDRPHYC_PIR_RVTRN; 748 } 749 750 stm32mp1_ddrphy_init(priv->phy, pir); 751 752 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */ 753 stm32mp1_ddrphy_idone_wait(priv->phy); 754 755 /* 756 * 12. set back registers in step 8 to the original values if desidered 757 */ 758 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3, 759 config->c_reg.pwrctl); 760 761 stm32mp_ddr_enable_axi_port(priv->ctl); 762 } 763